##// END OF EJS Templates
memfilectx: call super.__init__ instead of duplicating code...
Sean Farley -
r21689:503bb3af default
parent child Browse files
Show More
@@ -1,379 +1,379
1 # synthrepo.py - repo synthesis
1 # synthrepo.py - repo synthesis
2 #
2 #
3 # Copyright 2012 Facebook
3 # Copyright 2012 Facebook
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''synthesize structurally interesting change history
8 '''synthesize structurally interesting change history
9
9
10 This extension is useful for creating a repository with properties
10 This extension is useful for creating a repository with properties
11 that are statistically similar to an existing repository. During
11 that are statistically similar to an existing repository. During
12 analysis, a simple probability table is constructed from the history
12 analysis, a simple probability table is constructed from the history
13 of an existing repository. During synthesis, these properties are
13 of an existing repository. During synthesis, these properties are
14 reconstructed.
14 reconstructed.
15
15
16 Properties that are analyzed and synthesized include the following:
16 Properties that are analyzed and synthesized include the following:
17
17
18 - Lines added or removed when an existing file is modified
18 - Lines added or removed when an existing file is modified
19 - Number and sizes of files added
19 - Number and sizes of files added
20 - Number of files removed
20 - Number of files removed
21 - Line lengths
21 - Line lengths
22 - Topological distance to parent changeset(s)
22 - Topological distance to parent changeset(s)
23 - Probability of a commit being a merge
23 - Probability of a commit being a merge
24 - Probability of a newly added file being added to a new directory
24 - Probability of a newly added file being added to a new directory
25 - Interarrival time, and time zone, of commits
25 - Interarrival time, and time zone, of commits
26
26
27 A few obvious properties that are not currently handled realistically:
27 A few obvious properties that are not currently handled realistically:
28
28
29 - Merges are treated as regular commits with two parents, which is not
29 - Merges are treated as regular commits with two parents, which is not
30 realistic
30 realistic
31 - Modifications are not treated as operations on hunks of lines, but
31 - Modifications are not treated as operations on hunks of lines, but
32 as insertions and deletions of randomly chosen single lines
32 as insertions and deletions of randomly chosen single lines
33 - Committer ID (always random)
33 - Committer ID (always random)
34 - Executability of files
34 - Executability of files
35 - Symlinks and binary files are ignored
35 - Symlinks and binary files are ignored
36 '''
36 '''
37
37
38 import bisect, collections, json, os, random, time, sys
38 import bisect, collections, json, os, random, time, sys
39 from mercurial import cmdutil, context, patch, scmutil, util, hg
39 from mercurial import cmdutil, context, patch, scmutil, util, hg
40 from mercurial.i18n import _
40 from mercurial.i18n import _
41 from mercurial.node import nullrev, nullid
41 from mercurial.node import nullrev, nullid
42
42
43 testedwith = 'internal'
43 testedwith = 'internal'
44
44
45 cmdtable = {}
45 cmdtable = {}
46 command = cmdutil.command(cmdtable)
46 command = cmdutil.command(cmdtable)
47
47
48 newfile = set(('new fi', 'rename', 'copy f', 'copy t'))
48 newfile = set(('new fi', 'rename', 'copy f', 'copy t'))
49
49
50 def zerodict():
50 def zerodict():
51 return collections.defaultdict(lambda: 0)
51 return collections.defaultdict(lambda: 0)
52
52
53 def roundto(x, k):
53 def roundto(x, k):
54 if x > k * 2:
54 if x > k * 2:
55 return int(round(x / float(k)) * k)
55 return int(round(x / float(k)) * k)
56 return int(round(x))
56 return int(round(x))
57
57
58 def parsegitdiff(lines):
58 def parsegitdiff(lines):
59 filename, mar, lineadd, lineremove = None, None, zerodict(), 0
59 filename, mar, lineadd, lineremove = None, None, zerodict(), 0
60 binary = False
60 binary = False
61 for line in lines:
61 for line in lines:
62 start = line[:6]
62 start = line[:6]
63 if start == 'diff -':
63 if start == 'diff -':
64 if filename:
64 if filename:
65 yield filename, mar, lineadd, lineremove, binary
65 yield filename, mar, lineadd, lineremove, binary
66 mar, lineadd, lineremove, binary = 'm', zerodict(), 0, False
66 mar, lineadd, lineremove, binary = 'm', zerodict(), 0, False
67 filename = patch.gitre.match(line).group(1)
67 filename = patch.gitre.match(line).group(1)
68 elif start in newfile:
68 elif start in newfile:
69 mar = 'a'
69 mar = 'a'
70 elif start == 'GIT bi':
70 elif start == 'GIT bi':
71 binary = True
71 binary = True
72 elif start == 'delete':
72 elif start == 'delete':
73 mar = 'r'
73 mar = 'r'
74 elif start:
74 elif start:
75 s = start[0]
75 s = start[0]
76 if s == '-' and not line.startswith('--- '):
76 if s == '-' and not line.startswith('--- '):
77 lineremove += 1
77 lineremove += 1
78 elif s == '+' and not line.startswith('+++ '):
78 elif s == '+' and not line.startswith('+++ '):
79 lineadd[roundto(len(line) - 1, 5)] += 1
79 lineadd[roundto(len(line) - 1, 5)] += 1
80 if filename:
80 if filename:
81 yield filename, mar, lineadd, lineremove, binary
81 yield filename, mar, lineadd, lineremove, binary
82
82
83 @command('analyze',
83 @command('analyze',
84 [('o', 'output', [], _('write output to given file'), _('FILE')),
84 [('o', 'output', [], _('write output to given file'), _('FILE')),
85 ('r', 'rev', [], _('analyze specified revisions'), _('REV'))],
85 ('r', 'rev', [], _('analyze specified revisions'), _('REV'))],
86 _('hg analyze'))
86 _('hg analyze'))
87 def analyze(ui, repo, *revs, **opts):
87 def analyze(ui, repo, *revs, **opts):
88 '''create a simple model of a repository to use for later synthesis
88 '''create a simple model of a repository to use for later synthesis
89
89
90 This command examines every changeset in the given range (or all
90 This command examines every changeset in the given range (or all
91 of history if none are specified) and creates a simple statistical
91 of history if none are specified) and creates a simple statistical
92 model of the history of the repository.
92 model of the history of the repository.
93
93
94 The model is written out to a JSON file, and can be used by
94 The model is written out to a JSON file, and can be used by
95 :hg:`synthesize` to create or augment a repository with synthetic
95 :hg:`synthesize` to create or augment a repository with synthetic
96 commits that have a structure that is statistically similar to the
96 commits that have a structure that is statistically similar to the
97 analyzed repository.
97 analyzed repository.
98 '''
98 '''
99
99
100 revs = list(revs)
100 revs = list(revs)
101 revs.extend(opts['rev'])
101 revs.extend(opts['rev'])
102 if not revs:
102 if not revs:
103 revs = [':']
103 revs = [':']
104
104
105 output = opts['output']
105 output = opts['output']
106 if not output:
106 if not output:
107 output = os.path.basename(repo.root) + '.json'
107 output = os.path.basename(repo.root) + '.json'
108
108
109 if output == '-':
109 if output == '-':
110 fp = sys.stdout
110 fp = sys.stdout
111 else:
111 else:
112 fp = open(output, 'w')
112 fp = open(output, 'w')
113
113
114 revs = scmutil.revrange(repo, revs)
114 revs = scmutil.revrange(repo, revs)
115 revs.sort()
115 revs.sort()
116
116
117 lineschanged = zerodict()
117 lineschanged = zerodict()
118 children = zerodict()
118 children = zerodict()
119 p1distance = zerodict()
119 p1distance = zerodict()
120 p2distance = zerodict()
120 p2distance = zerodict()
121 linesinfilesadded = zerodict()
121 linesinfilesadded = zerodict()
122 fileschanged = zerodict()
122 fileschanged = zerodict()
123 filesadded = zerodict()
123 filesadded = zerodict()
124 filesremoved = zerodict()
124 filesremoved = zerodict()
125 linelengths = zerodict()
125 linelengths = zerodict()
126 interarrival = zerodict()
126 interarrival = zerodict()
127 parents = zerodict()
127 parents = zerodict()
128 dirsadded = zerodict()
128 dirsadded = zerodict()
129 tzoffset = zerodict()
129 tzoffset = zerodict()
130
130
131 progress = ui.progress
131 progress = ui.progress
132 _analyzing = _('analyzing')
132 _analyzing = _('analyzing')
133 _changesets = _('changesets')
133 _changesets = _('changesets')
134 _total = len(revs)
134 _total = len(revs)
135
135
136 for i, rev in enumerate(revs):
136 for i, rev in enumerate(revs):
137 progress(_analyzing, i, unit=_changesets, total=_total)
137 progress(_analyzing, i, unit=_changesets, total=_total)
138 ctx = repo[rev]
138 ctx = repo[rev]
139 pl = ctx.parents()
139 pl = ctx.parents()
140 pctx = pl[0]
140 pctx = pl[0]
141 prev = pctx.rev()
141 prev = pctx.rev()
142 children[prev] += 1
142 children[prev] += 1
143 p1distance[rev - prev] += 1
143 p1distance[rev - prev] += 1
144 parents[len(pl)] += 1
144 parents[len(pl)] += 1
145 tzoffset[ctx.date()[1]] += 1
145 tzoffset[ctx.date()[1]] += 1
146 if len(pl) > 1:
146 if len(pl) > 1:
147 p2distance[rev - pl[1].rev()] += 1
147 p2distance[rev - pl[1].rev()] += 1
148 if prev == rev - 1:
148 if prev == rev - 1:
149 lastctx = pctx
149 lastctx = pctx
150 else:
150 else:
151 lastctx = repo[rev - 1]
151 lastctx = repo[rev - 1]
152 if lastctx.rev() != nullrev:
152 if lastctx.rev() != nullrev:
153 interarrival[roundto(ctx.date()[0] - lastctx.date()[0], 300)] += 1
153 interarrival[roundto(ctx.date()[0] - lastctx.date()[0], 300)] += 1
154 diff = sum((d.splitlines()
154 diff = sum((d.splitlines()
155 for d in ctx.diff(pctx, opts={'git': True})), [])
155 for d in ctx.diff(pctx, opts={'git': True})), [])
156 fileadds, diradds, fileremoves, filechanges = 0, 0, 0, 0
156 fileadds, diradds, fileremoves, filechanges = 0, 0, 0, 0
157 for filename, mar, lineadd, lineremove, binary in parsegitdiff(diff):
157 for filename, mar, lineadd, lineremove, binary in parsegitdiff(diff):
158 if binary:
158 if binary:
159 continue
159 continue
160 added = sum(lineadd.itervalues(), 0)
160 added = sum(lineadd.itervalues(), 0)
161 if mar == 'm':
161 if mar == 'm':
162 if added and lineremove:
162 if added and lineremove:
163 lineschanged[roundto(added, 5), roundto(lineremove, 5)] += 1
163 lineschanged[roundto(added, 5), roundto(lineremove, 5)] += 1
164 filechanges += 1
164 filechanges += 1
165 elif mar == 'a':
165 elif mar == 'a':
166 fileadds += 1
166 fileadds += 1
167 if '/' in filename:
167 if '/' in filename:
168 filedir = filename.rsplit('/', 1)[0]
168 filedir = filename.rsplit('/', 1)[0]
169 if filedir not in pctx.dirs():
169 if filedir not in pctx.dirs():
170 diradds += 1
170 diradds += 1
171 linesinfilesadded[roundto(added, 5)] += 1
171 linesinfilesadded[roundto(added, 5)] += 1
172 elif mar == 'r':
172 elif mar == 'r':
173 fileremoves += 1
173 fileremoves += 1
174 for length, count in lineadd.iteritems():
174 for length, count in lineadd.iteritems():
175 linelengths[length] += count
175 linelengths[length] += count
176 fileschanged[filechanges] += 1
176 fileschanged[filechanges] += 1
177 filesadded[fileadds] += 1
177 filesadded[fileadds] += 1
178 dirsadded[diradds] += 1
178 dirsadded[diradds] += 1
179 filesremoved[fileremoves] += 1
179 filesremoved[fileremoves] += 1
180
180
181 invchildren = zerodict()
181 invchildren = zerodict()
182
182
183 for rev, count in children.iteritems():
183 for rev, count in children.iteritems():
184 invchildren[count] += 1
184 invchildren[count] += 1
185
185
186 if output != '-':
186 if output != '-':
187 ui.status(_('writing output to %s\n') % output)
187 ui.status(_('writing output to %s\n') % output)
188
188
189 def pronk(d):
189 def pronk(d):
190 return sorted(d.iteritems(), key=lambda x: x[1], reverse=True)
190 return sorted(d.iteritems(), key=lambda x: x[1], reverse=True)
191
191
192 json.dump({'revs': len(revs),
192 json.dump({'revs': len(revs),
193 'lineschanged': pronk(lineschanged),
193 'lineschanged': pronk(lineschanged),
194 'children': pronk(invchildren),
194 'children': pronk(invchildren),
195 'fileschanged': pronk(fileschanged),
195 'fileschanged': pronk(fileschanged),
196 'filesadded': pronk(filesadded),
196 'filesadded': pronk(filesadded),
197 'linesinfilesadded': pronk(linesinfilesadded),
197 'linesinfilesadded': pronk(linesinfilesadded),
198 'dirsadded': pronk(dirsadded),
198 'dirsadded': pronk(dirsadded),
199 'filesremoved': pronk(filesremoved),
199 'filesremoved': pronk(filesremoved),
200 'linelengths': pronk(linelengths),
200 'linelengths': pronk(linelengths),
201 'parents': pronk(parents),
201 'parents': pronk(parents),
202 'p1distance': pronk(p1distance),
202 'p1distance': pronk(p1distance),
203 'p2distance': pronk(p2distance),
203 'p2distance': pronk(p2distance),
204 'interarrival': pronk(interarrival),
204 'interarrival': pronk(interarrival),
205 'tzoffset': pronk(tzoffset),
205 'tzoffset': pronk(tzoffset),
206 },
206 },
207 fp)
207 fp)
208 fp.close()
208 fp.close()
209
209
210 @command('synthesize',
210 @command('synthesize',
211 [('c', 'count', 0, _('create given number of commits'), _('COUNT')),
211 [('c', 'count', 0, _('create given number of commits'), _('COUNT')),
212 ('', 'dict', '', _('path to a dictionary of words'), _('FILE'))],
212 ('', 'dict', '', _('path to a dictionary of words'), _('FILE'))],
213 _('hg synthesize [OPTION].. DESCFILE'))
213 _('hg synthesize [OPTION].. DESCFILE'))
214 def synthesize(ui, repo, descpath, **opts):
214 def synthesize(ui, repo, descpath, **opts):
215 '''synthesize commits based on a model of an existing repository
215 '''synthesize commits based on a model of an existing repository
216
216
217 The model must have been generated by :hg:`analyze`. Commits will
217 The model must have been generated by :hg:`analyze`. Commits will
218 be generated randomly according to the probabilities described in
218 be generated randomly according to the probabilities described in
219 the model.
219 the model.
220
220
221 When synthesizing new content, commit descriptions, and user
221 When synthesizing new content, commit descriptions, and user
222 names, words will be chosen randomly from a dictionary that is
222 names, words will be chosen randomly from a dictionary that is
223 presumed to contain one word per line. Use --dict to specify the
223 presumed to contain one word per line. Use --dict to specify the
224 path to an alternate dictionary to use.
224 path to an alternate dictionary to use.
225 '''
225 '''
226 try:
226 try:
227 fp = hg.openpath(ui, descpath)
227 fp = hg.openpath(ui, descpath)
228 except Exception, err:
228 except Exception, err:
229 raise util.Abort('%s: %s' % (descpath, err[0].strerror))
229 raise util.Abort('%s: %s' % (descpath, err[0].strerror))
230 desc = json.load(fp)
230 desc = json.load(fp)
231 fp.close()
231 fp.close()
232
232
233 def cdf(l):
233 def cdf(l):
234 if not l:
234 if not l:
235 return [], []
235 return [], []
236 vals, probs = zip(*sorted(l, key=lambda x: x[1], reverse=True))
236 vals, probs = zip(*sorted(l, key=lambda x: x[1], reverse=True))
237 t = float(sum(probs, 0))
237 t = float(sum(probs, 0))
238 s, cdfs = 0, []
238 s, cdfs = 0, []
239 for v in probs:
239 for v in probs:
240 s += v
240 s += v
241 cdfs.append(s / t)
241 cdfs.append(s / t)
242 return vals, cdfs
242 return vals, cdfs
243
243
244 lineschanged = cdf(desc['lineschanged'])
244 lineschanged = cdf(desc['lineschanged'])
245 fileschanged = cdf(desc['fileschanged'])
245 fileschanged = cdf(desc['fileschanged'])
246 filesadded = cdf(desc['filesadded'])
246 filesadded = cdf(desc['filesadded'])
247 dirsadded = cdf(desc['dirsadded'])
247 dirsadded = cdf(desc['dirsadded'])
248 filesremoved = cdf(desc['filesremoved'])
248 filesremoved = cdf(desc['filesremoved'])
249 linelengths = cdf(desc['linelengths'])
249 linelengths = cdf(desc['linelengths'])
250 parents = cdf(desc['parents'])
250 parents = cdf(desc['parents'])
251 p1distance = cdf(desc['p1distance'])
251 p1distance = cdf(desc['p1distance'])
252 p2distance = cdf(desc['p2distance'])
252 p2distance = cdf(desc['p2distance'])
253 interarrival = cdf(desc['interarrival'])
253 interarrival = cdf(desc['interarrival'])
254 linesinfilesadded = cdf(desc['linesinfilesadded'])
254 linesinfilesadded = cdf(desc['linesinfilesadded'])
255 tzoffset = cdf(desc['tzoffset'])
255 tzoffset = cdf(desc['tzoffset'])
256
256
257 dictfile = opts.get('dict') or '/usr/share/dict/words'
257 dictfile = opts.get('dict') or '/usr/share/dict/words'
258 try:
258 try:
259 fp = open(dictfile, 'rU')
259 fp = open(dictfile, 'rU')
260 except IOError, err:
260 except IOError, err:
261 raise util.Abort('%s: %s' % (dictfile, err.strerror))
261 raise util.Abort('%s: %s' % (dictfile, err.strerror))
262 words = fp.read().splitlines()
262 words = fp.read().splitlines()
263 fp.close()
263 fp.close()
264
264
265 def pick(cdf):
265 def pick(cdf):
266 return cdf[0][bisect.bisect_left(cdf[1], random.random())]
266 return cdf[0][bisect.bisect_left(cdf[1], random.random())]
267
267
268 def makeline(minimum=0):
268 def makeline(minimum=0):
269 total = max(minimum, pick(linelengths))
269 total = max(minimum, pick(linelengths))
270 c, l = 0, []
270 c, l = 0, []
271 while c < total:
271 while c < total:
272 w = random.choice(words)
272 w = random.choice(words)
273 c += len(w) + 1
273 c += len(w) + 1
274 l.append(w)
274 l.append(w)
275 return ' '.join(l)
275 return ' '.join(l)
276
276
277 wlock = repo.wlock()
277 wlock = repo.wlock()
278 lock = repo.lock()
278 lock = repo.lock()
279
279
280 nevertouch = set(('.hgsub', '.hgignore', '.hgtags'))
280 nevertouch = set(('.hgsub', '.hgignore', '.hgtags'))
281
281
282 progress = ui.progress
282 progress = ui.progress
283 _synthesizing = _('synthesizing')
283 _synthesizing = _('synthesizing')
284 _changesets = _('changesets')
284 _changesets = _('changesets')
285
285
286 count = int(opts['count'])
286 count = int(opts['count'])
287 heads = set(map(repo.changelog.rev, repo.heads()))
287 heads = set(map(repo.changelog.rev, repo.heads()))
288 for i in xrange(count):
288 for i in xrange(count):
289 progress(_synthesizing, i, unit=_changesets, total=count)
289 progress(_synthesizing, i, unit=_changesets, total=count)
290
290
291 node = repo.changelog.node
291 node = repo.changelog.node
292 revs = len(repo)
292 revs = len(repo)
293
293
294 def pickhead(heads, distance):
294 def pickhead(heads, distance):
295 if heads:
295 if heads:
296 lheads = sorted(heads)
296 lheads = sorted(heads)
297 rev = revs - min(pick(distance), revs)
297 rev = revs - min(pick(distance), revs)
298 if rev < lheads[-1]:
298 if rev < lheads[-1]:
299 rev = lheads[bisect.bisect_left(lheads, rev)]
299 rev = lheads[bisect.bisect_left(lheads, rev)]
300 else:
300 else:
301 rev = lheads[-1]
301 rev = lheads[-1]
302 return rev, node(rev)
302 return rev, node(rev)
303 return nullrev, nullid
303 return nullrev, nullid
304
304
305 r1 = revs - min(pick(p1distance), revs)
305 r1 = revs - min(pick(p1distance), revs)
306 p1 = node(r1)
306 p1 = node(r1)
307
307
308 # the number of heads will grow without bound if we use a pure
308 # the number of heads will grow without bound if we use a pure
309 # model, so artificially constrain their proliferation
309 # model, so artificially constrain their proliferation
310 if pick(parents) == 2 or len(heads) > random.randint(1, 20):
310 if pick(parents) == 2 or len(heads) > random.randint(1, 20):
311 r2, p2 = pickhead(heads.difference([r1]), p2distance)
311 r2, p2 = pickhead(heads.difference([r1]), p2distance)
312 else:
312 else:
313 r2, p2 = nullrev, nullid
313 r2, p2 = nullrev, nullid
314
314
315 pl = [p1, p2]
315 pl = [p1, p2]
316 pctx = repo[r1]
316 pctx = repo[r1]
317 mf = pctx.manifest()
317 mf = pctx.manifest()
318 mfk = mf.keys()
318 mfk = mf.keys()
319 changes = {}
319 changes = {}
320 if mfk:
320 if mfk:
321 for __ in xrange(pick(fileschanged)):
321 for __ in xrange(pick(fileschanged)):
322 for __ in xrange(10):
322 for __ in xrange(10):
323 fctx = pctx.filectx(random.choice(mfk))
323 fctx = pctx.filectx(random.choice(mfk))
324 path = fctx.path()
324 path = fctx.path()
325 if not (path in nevertouch or fctx.isbinary() or
325 if not (path in nevertouch or fctx.isbinary() or
326 'l' in fctx.flags()):
326 'l' in fctx.flags()):
327 break
327 break
328 lines = fctx.data().splitlines()
328 lines = fctx.data().splitlines()
329 add, remove = pick(lineschanged)
329 add, remove = pick(lineschanged)
330 for __ in xrange(remove):
330 for __ in xrange(remove):
331 if not lines:
331 if not lines:
332 break
332 break
333 del lines[random.randrange(0, len(lines))]
333 del lines[random.randrange(0, len(lines))]
334 for __ in xrange(add):
334 for __ in xrange(add):
335 lines.insert(random.randint(0, len(lines)), makeline())
335 lines.insert(random.randint(0, len(lines)), makeline())
336 path = fctx.path()
336 path = fctx.path()
337 changes[path] = context.memfilectx(path,
337 changes[path] = context.memfilectx(repo, path,
338 '\n'.join(lines) + '\n')
338 '\n'.join(lines) + '\n')
339 for __ in xrange(pick(filesremoved)):
339 for __ in xrange(pick(filesremoved)):
340 path = random.choice(mfk)
340 path = random.choice(mfk)
341 for __ in xrange(10):
341 for __ in xrange(10):
342 path = random.choice(mfk)
342 path = random.choice(mfk)
343 if path not in changes:
343 if path not in changes:
344 changes[path] = None
344 changes[path] = None
345 break
345 break
346 if filesadded:
346 if filesadded:
347 dirs = list(pctx.dirs())
347 dirs = list(pctx.dirs())
348 dirs.append('')
348 dirs.append('')
349 for __ in xrange(pick(filesadded)):
349 for __ in xrange(pick(filesadded)):
350 path = [random.choice(dirs)]
350 path = [random.choice(dirs)]
351 if pick(dirsadded):
351 if pick(dirsadded):
352 path.append(random.choice(words))
352 path.append(random.choice(words))
353 path.append(random.choice(words))
353 path.append(random.choice(words))
354 path = '/'.join(filter(None, path))
354 path = '/'.join(filter(None, path))
355 data = '\n'.join(makeline()
355 data = '\n'.join(makeline()
356 for __ in xrange(pick(linesinfilesadded))) + '\n'
356 for __ in xrange(pick(linesinfilesadded))) + '\n'
357 changes[path] = context.memfilectx(path, data)
357 changes[path] = context.memfilectx(repo, path, data)
358 def filectxfn(repo, memctx, path):
358 def filectxfn(repo, memctx, path):
359 data = changes[path]
359 data = changes[path]
360 if data is None:
360 if data is None:
361 raise IOError
361 raise IOError
362 return data
362 return data
363 if not changes:
363 if not changes:
364 continue
364 continue
365 if revs:
365 if revs:
366 date = repo['tip'].date()[0] + pick(interarrival)
366 date = repo['tip'].date()[0] + pick(interarrival)
367 else:
367 else:
368 date = time.time() - (86400 * count)
368 date = time.time() - (86400 * count)
369 user = random.choice(words) + '@' + random.choice(words)
369 user = random.choice(words) + '@' + random.choice(words)
370 mc = context.memctx(repo, pl, makeline(minimum=2),
370 mc = context.memctx(repo, pl, makeline(minimum=2),
371 sorted(changes.iterkeys()),
371 sorted(changes.iterkeys()),
372 filectxfn, user, '%d %d' % (date, pick(tzoffset)))
372 filectxfn, user, '%d %d' % (date, pick(tzoffset)))
373 newnode = mc.commit()
373 newnode = mc.commit()
374 heads.add(repo.changelog.rev(newnode))
374 heads.add(repo.changelog.rev(newnode))
375 heads.discard(r1)
375 heads.discard(r1)
376 heads.discard(r2)
376 heads.discard(r2)
377
377
378 lock.release()
378 lock.release()
379 wlock.release()
379 wlock.release()
@@ -1,452 +1,452
1 # hg.py - hg backend for convert extension
1 # hg.py - hg backend for convert extension
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # Notes for hg->hg conversion:
8 # Notes for hg->hg conversion:
9 #
9 #
10 # * Old versions of Mercurial didn't trim the whitespace from the ends
10 # * Old versions of Mercurial didn't trim the whitespace from the ends
11 # of commit messages, but new versions do. Changesets created by
11 # of commit messages, but new versions do. Changesets created by
12 # those older versions, then converted, may thus have different
12 # those older versions, then converted, may thus have different
13 # hashes for changesets that are otherwise identical.
13 # hashes for changesets that are otherwise identical.
14 #
14 #
15 # * Using "--config convert.hg.saverev=true" will make the source
15 # * Using "--config convert.hg.saverev=true" will make the source
16 # identifier to be stored in the converted revision. This will cause
16 # identifier to be stored in the converted revision. This will cause
17 # the converted revision to have a different identity than the
17 # the converted revision to have a different identity than the
18 # source.
18 # source.
19
19
20
20
21 import os, time, cStringIO
21 import os, time, cStringIO
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23 from mercurial.node import bin, hex, nullid
23 from mercurial.node import bin, hex, nullid
24 from mercurial import hg, util, context, bookmarks, error, scmutil
24 from mercurial import hg, util, context, bookmarks, error, scmutil
25
25
26 from common import NoRepo, commit, converter_source, converter_sink
26 from common import NoRepo, commit, converter_source, converter_sink
27
27
28 import re
28 import re
29 sha1re = re.compile(r'\b[0-9a-f]{6,40}\b')
29 sha1re = re.compile(r'\b[0-9a-f]{6,40}\b')
30
30
31 class mercurial_sink(converter_sink):
31 class mercurial_sink(converter_sink):
32 def __init__(self, ui, path):
32 def __init__(self, ui, path):
33 converter_sink.__init__(self, ui, path)
33 converter_sink.__init__(self, ui, path)
34 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
34 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
35 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
35 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
36 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
36 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
37 self.lastbranch = None
37 self.lastbranch = None
38 if os.path.isdir(path) and len(os.listdir(path)) > 0:
38 if os.path.isdir(path) and len(os.listdir(path)) > 0:
39 try:
39 try:
40 self.repo = hg.repository(self.ui, path)
40 self.repo = hg.repository(self.ui, path)
41 if not self.repo.local():
41 if not self.repo.local():
42 raise NoRepo(_('%s is not a local Mercurial repository')
42 raise NoRepo(_('%s is not a local Mercurial repository')
43 % path)
43 % path)
44 except error.RepoError, err:
44 except error.RepoError, err:
45 ui.traceback()
45 ui.traceback()
46 raise NoRepo(err.args[0])
46 raise NoRepo(err.args[0])
47 else:
47 else:
48 try:
48 try:
49 ui.status(_('initializing destination %s repository\n') % path)
49 ui.status(_('initializing destination %s repository\n') % path)
50 self.repo = hg.repository(self.ui, path, create=True)
50 self.repo = hg.repository(self.ui, path, create=True)
51 if not self.repo.local():
51 if not self.repo.local():
52 raise NoRepo(_('%s is not a local Mercurial repository')
52 raise NoRepo(_('%s is not a local Mercurial repository')
53 % path)
53 % path)
54 self.created.append(path)
54 self.created.append(path)
55 except error.RepoError:
55 except error.RepoError:
56 ui.traceback()
56 ui.traceback()
57 raise NoRepo(_("could not create hg repository %s as sink")
57 raise NoRepo(_("could not create hg repository %s as sink")
58 % path)
58 % path)
59 self.lock = None
59 self.lock = None
60 self.wlock = None
60 self.wlock = None
61 self.filemapmode = False
61 self.filemapmode = False
62
62
63 def before(self):
63 def before(self):
64 self.ui.debug('run hg sink pre-conversion action\n')
64 self.ui.debug('run hg sink pre-conversion action\n')
65 self.wlock = self.repo.wlock()
65 self.wlock = self.repo.wlock()
66 self.lock = self.repo.lock()
66 self.lock = self.repo.lock()
67
67
68 def after(self):
68 def after(self):
69 self.ui.debug('run hg sink post-conversion action\n')
69 self.ui.debug('run hg sink post-conversion action\n')
70 if self.lock:
70 if self.lock:
71 self.lock.release()
71 self.lock.release()
72 if self.wlock:
72 if self.wlock:
73 self.wlock.release()
73 self.wlock.release()
74
74
75 def revmapfile(self):
75 def revmapfile(self):
76 return self.repo.join("shamap")
76 return self.repo.join("shamap")
77
77
78 def authorfile(self):
78 def authorfile(self):
79 return self.repo.join("authormap")
79 return self.repo.join("authormap")
80
80
81 def setbranch(self, branch, pbranches):
81 def setbranch(self, branch, pbranches):
82 if not self.clonebranches:
82 if not self.clonebranches:
83 return
83 return
84
84
85 setbranch = (branch != self.lastbranch)
85 setbranch = (branch != self.lastbranch)
86 self.lastbranch = branch
86 self.lastbranch = branch
87 if not branch:
87 if not branch:
88 branch = 'default'
88 branch = 'default'
89 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
89 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
90 pbranch = pbranches and pbranches[0][1] or 'default'
90 pbranch = pbranches and pbranches[0][1] or 'default'
91
91
92 branchpath = os.path.join(self.path, branch)
92 branchpath = os.path.join(self.path, branch)
93 if setbranch:
93 if setbranch:
94 self.after()
94 self.after()
95 try:
95 try:
96 self.repo = hg.repository(self.ui, branchpath)
96 self.repo = hg.repository(self.ui, branchpath)
97 except Exception:
97 except Exception:
98 self.repo = hg.repository(self.ui, branchpath, create=True)
98 self.repo = hg.repository(self.ui, branchpath, create=True)
99 self.before()
99 self.before()
100
100
101 # pbranches may bring revisions from other branches (merge parents)
101 # pbranches may bring revisions from other branches (merge parents)
102 # Make sure we have them, or pull them.
102 # Make sure we have them, or pull them.
103 missings = {}
103 missings = {}
104 for b in pbranches:
104 for b in pbranches:
105 try:
105 try:
106 self.repo.lookup(b[0])
106 self.repo.lookup(b[0])
107 except Exception:
107 except Exception:
108 missings.setdefault(b[1], []).append(b[0])
108 missings.setdefault(b[1], []).append(b[0])
109
109
110 if missings:
110 if missings:
111 self.after()
111 self.after()
112 for pbranch, heads in sorted(missings.iteritems()):
112 for pbranch, heads in sorted(missings.iteritems()):
113 pbranchpath = os.path.join(self.path, pbranch)
113 pbranchpath = os.path.join(self.path, pbranch)
114 prepo = hg.peer(self.ui, {}, pbranchpath)
114 prepo = hg.peer(self.ui, {}, pbranchpath)
115 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
115 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
116 self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
116 self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
117 self.before()
117 self.before()
118
118
119 def _rewritetags(self, source, revmap, data):
119 def _rewritetags(self, source, revmap, data):
120 fp = cStringIO.StringIO()
120 fp = cStringIO.StringIO()
121 for line in data.splitlines():
121 for line in data.splitlines():
122 s = line.split(' ', 1)
122 s = line.split(' ', 1)
123 if len(s) != 2:
123 if len(s) != 2:
124 continue
124 continue
125 revid = revmap.get(source.lookuprev(s[0]))
125 revid = revmap.get(source.lookuprev(s[0]))
126 if not revid:
126 if not revid:
127 continue
127 continue
128 fp.write('%s %s\n' % (revid, s[1]))
128 fp.write('%s %s\n' % (revid, s[1]))
129 return fp.getvalue()
129 return fp.getvalue()
130
130
131 def putcommit(self, files, copies, parents, commit, source, revmap):
131 def putcommit(self, files, copies, parents, commit, source, revmap):
132
132
133 files = dict(files)
133 files = dict(files)
134 def getfilectx(repo, memctx, f):
134 def getfilectx(repo, memctx, f):
135 v = files[f]
135 v = files[f]
136 data, mode = source.getfile(f, v)
136 data, mode = source.getfile(f, v)
137 if f == '.hgtags':
137 if f == '.hgtags':
138 data = self._rewritetags(source, revmap, data)
138 data = self._rewritetags(source, revmap, data)
139 return context.memfilectx(f, data, 'l' in mode, 'x' in mode,
139 return context.memfilectx(self.repo, f, data, 'l' in mode,
140 copies.get(f))
140 'x' in mode, copies.get(f))
141
141
142 pl = []
142 pl = []
143 for p in parents:
143 for p in parents:
144 if p not in pl:
144 if p not in pl:
145 pl.append(p)
145 pl.append(p)
146 parents = pl
146 parents = pl
147 nparents = len(parents)
147 nparents = len(parents)
148 if self.filemapmode and nparents == 1:
148 if self.filemapmode and nparents == 1:
149 m1node = self.repo.changelog.read(bin(parents[0]))[0]
149 m1node = self.repo.changelog.read(bin(parents[0]))[0]
150 parent = parents[0]
150 parent = parents[0]
151
151
152 if len(parents) < 2:
152 if len(parents) < 2:
153 parents.append(nullid)
153 parents.append(nullid)
154 if len(parents) < 2:
154 if len(parents) < 2:
155 parents.append(nullid)
155 parents.append(nullid)
156 p2 = parents.pop(0)
156 p2 = parents.pop(0)
157
157
158 text = commit.desc
158 text = commit.desc
159
159
160 sha1s = re.findall(sha1re, text)
160 sha1s = re.findall(sha1re, text)
161 for sha1 in sha1s:
161 for sha1 in sha1s:
162 oldrev = source.lookuprev(sha1)
162 oldrev = source.lookuprev(sha1)
163 newrev = revmap.get(oldrev)
163 newrev = revmap.get(oldrev)
164 if newrev is not None:
164 if newrev is not None:
165 text = text.replace(sha1, newrev[:len(sha1)])
165 text = text.replace(sha1, newrev[:len(sha1)])
166
166
167 extra = commit.extra.copy()
167 extra = commit.extra.copy()
168 if self.branchnames and commit.branch:
168 if self.branchnames and commit.branch:
169 extra['branch'] = commit.branch
169 extra['branch'] = commit.branch
170 if commit.rev:
170 if commit.rev:
171 extra['convert_revision'] = commit.rev
171 extra['convert_revision'] = commit.rev
172
172
173 while parents:
173 while parents:
174 p1 = p2
174 p1 = p2
175 p2 = parents.pop(0)
175 p2 = parents.pop(0)
176 ctx = context.memctx(self.repo, (p1, p2), text, files.keys(),
176 ctx = context.memctx(self.repo, (p1, p2), text, files.keys(),
177 getfilectx, commit.author, commit.date, extra)
177 getfilectx, commit.author, commit.date, extra)
178 self.repo.commitctx(ctx)
178 self.repo.commitctx(ctx)
179 text = "(octopus merge fixup)\n"
179 text = "(octopus merge fixup)\n"
180 p2 = hex(self.repo.changelog.tip())
180 p2 = hex(self.repo.changelog.tip())
181
181
182 if self.filemapmode and nparents == 1:
182 if self.filemapmode and nparents == 1:
183 man = self.repo.manifest
183 man = self.repo.manifest
184 mnode = self.repo.changelog.read(bin(p2))[0]
184 mnode = self.repo.changelog.read(bin(p2))[0]
185 closed = 'close' in commit.extra
185 closed = 'close' in commit.extra
186 if not closed and not man.cmp(m1node, man.revision(mnode)):
186 if not closed and not man.cmp(m1node, man.revision(mnode)):
187 self.ui.status(_("filtering out empty revision\n"))
187 self.ui.status(_("filtering out empty revision\n"))
188 self.repo.rollback(force=True)
188 self.repo.rollback(force=True)
189 return parent
189 return parent
190 return p2
190 return p2
191
191
192 def puttags(self, tags):
192 def puttags(self, tags):
193 try:
193 try:
194 parentctx = self.repo[self.tagsbranch]
194 parentctx = self.repo[self.tagsbranch]
195 tagparent = parentctx.node()
195 tagparent = parentctx.node()
196 except error.RepoError:
196 except error.RepoError:
197 parentctx = None
197 parentctx = None
198 tagparent = nullid
198 tagparent = nullid
199
199
200 oldlines = set()
200 oldlines = set()
201 for branch, heads in self.repo.branchmap().iteritems():
201 for branch, heads in self.repo.branchmap().iteritems():
202 for h in heads:
202 for h in heads:
203 if '.hgtags' in self.repo[h]:
203 if '.hgtags' in self.repo[h]:
204 oldlines.update(
204 oldlines.update(
205 set(self.repo[h]['.hgtags'].data().splitlines(True)))
205 set(self.repo[h]['.hgtags'].data().splitlines(True)))
206 oldlines = sorted(list(oldlines))
206 oldlines = sorted(list(oldlines))
207
207
208 newlines = sorted([("%s %s\n" % (tags[tag], tag)) for tag in tags])
208 newlines = sorted([("%s %s\n" % (tags[tag], tag)) for tag in tags])
209 if newlines == oldlines:
209 if newlines == oldlines:
210 return None, None
210 return None, None
211
211
212 # if the old and new tags match, then there is nothing to update
212 # if the old and new tags match, then there is nothing to update
213 oldtags = set()
213 oldtags = set()
214 newtags = set()
214 newtags = set()
215 for line in oldlines:
215 for line in oldlines:
216 s = line.strip().split(' ', 1)
216 s = line.strip().split(' ', 1)
217 if len(s) != 2:
217 if len(s) != 2:
218 continue
218 continue
219 oldtags.add(s[1])
219 oldtags.add(s[1])
220 for line in newlines:
220 for line in newlines:
221 s = line.strip().split(' ', 1)
221 s = line.strip().split(' ', 1)
222 if len(s) != 2:
222 if len(s) != 2:
223 continue
223 continue
224 if s[1] not in oldtags:
224 if s[1] not in oldtags:
225 newtags.add(s[1].strip())
225 newtags.add(s[1].strip())
226
226
227 if not newtags:
227 if not newtags:
228 return None, None
228 return None, None
229
229
230 data = "".join(newlines)
230 data = "".join(newlines)
231 def getfilectx(repo, memctx, f):
231 def getfilectx(repo, memctx, f):
232 return context.memfilectx(f, data, False, False, None)
232 return context.memfilectx(repo, f, data, False, False, None)
233
233
234 self.ui.status(_("updating tags\n"))
234 self.ui.status(_("updating tags\n"))
235 date = "%s 0" % int(time.mktime(time.gmtime()))
235 date = "%s 0" % int(time.mktime(time.gmtime()))
236 extra = {'branch': self.tagsbranch}
236 extra = {'branch': self.tagsbranch}
237 ctx = context.memctx(self.repo, (tagparent, None), "update tags",
237 ctx = context.memctx(self.repo, (tagparent, None), "update tags",
238 [".hgtags"], getfilectx, "convert-repo", date,
238 [".hgtags"], getfilectx, "convert-repo", date,
239 extra)
239 extra)
240 self.repo.commitctx(ctx)
240 self.repo.commitctx(ctx)
241 return hex(self.repo.changelog.tip()), hex(tagparent)
241 return hex(self.repo.changelog.tip()), hex(tagparent)
242
242
243 def setfilemapmode(self, active):
243 def setfilemapmode(self, active):
244 self.filemapmode = active
244 self.filemapmode = active
245
245
246 def putbookmarks(self, updatedbookmark):
246 def putbookmarks(self, updatedbookmark):
247 if not len(updatedbookmark):
247 if not len(updatedbookmark):
248 return
248 return
249
249
250 self.ui.status(_("updating bookmarks\n"))
250 self.ui.status(_("updating bookmarks\n"))
251 destmarks = self.repo._bookmarks
251 destmarks = self.repo._bookmarks
252 for bookmark in updatedbookmark:
252 for bookmark in updatedbookmark:
253 destmarks[bookmark] = bin(updatedbookmark[bookmark])
253 destmarks[bookmark] = bin(updatedbookmark[bookmark])
254 destmarks.write()
254 destmarks.write()
255
255
256 def hascommitfrommap(self, rev):
256 def hascommitfrommap(self, rev):
257 # the exact semantics of clonebranches is unclear so we can't say no
257 # the exact semantics of clonebranches is unclear so we can't say no
258 return rev in self.repo or self.clonebranches
258 return rev in self.repo or self.clonebranches
259
259
260 def hascommitforsplicemap(self, rev):
260 def hascommitforsplicemap(self, rev):
261 if rev not in self.repo and self.clonebranches:
261 if rev not in self.repo and self.clonebranches:
262 raise util.Abort(_('revision %s not found in destination '
262 raise util.Abort(_('revision %s not found in destination '
263 'repository (lookups with clonebranches=true '
263 'repository (lookups with clonebranches=true '
264 'are not implemented)') % rev)
264 'are not implemented)') % rev)
265 return rev in self.repo
265 return rev in self.repo
266
266
267 class mercurial_source(converter_source):
267 class mercurial_source(converter_source):
268 def __init__(self, ui, path, rev=None):
268 def __init__(self, ui, path, rev=None):
269 converter_source.__init__(self, ui, path, rev)
269 converter_source.__init__(self, ui, path, rev)
270 self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors', False)
270 self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors', False)
271 self.ignored = set()
271 self.ignored = set()
272 self.saverev = ui.configbool('convert', 'hg.saverev', False)
272 self.saverev = ui.configbool('convert', 'hg.saverev', False)
273 try:
273 try:
274 self.repo = hg.repository(self.ui, path)
274 self.repo = hg.repository(self.ui, path)
275 # try to provoke an exception if this isn't really a hg
275 # try to provoke an exception if this isn't really a hg
276 # repo, but some other bogus compatible-looking url
276 # repo, but some other bogus compatible-looking url
277 if not self.repo.local():
277 if not self.repo.local():
278 raise error.RepoError
278 raise error.RepoError
279 except error.RepoError:
279 except error.RepoError:
280 ui.traceback()
280 ui.traceback()
281 raise NoRepo(_("%s is not a local Mercurial repository") % path)
281 raise NoRepo(_("%s is not a local Mercurial repository") % path)
282 self.lastrev = None
282 self.lastrev = None
283 self.lastctx = None
283 self.lastctx = None
284 self._changescache = None
284 self._changescache = None
285 self.convertfp = None
285 self.convertfp = None
286 # Restrict converted revisions to startrev descendants
286 # Restrict converted revisions to startrev descendants
287 startnode = ui.config('convert', 'hg.startrev')
287 startnode = ui.config('convert', 'hg.startrev')
288 hgrevs = ui.config('convert', 'hg.revs')
288 hgrevs = ui.config('convert', 'hg.revs')
289 if hgrevs is None:
289 if hgrevs is None:
290 if startnode is not None:
290 if startnode is not None:
291 try:
291 try:
292 startnode = self.repo.lookup(startnode)
292 startnode = self.repo.lookup(startnode)
293 except error.RepoError:
293 except error.RepoError:
294 raise util.Abort(_('%s is not a valid start revision')
294 raise util.Abort(_('%s is not a valid start revision')
295 % startnode)
295 % startnode)
296 startrev = self.repo.changelog.rev(startnode)
296 startrev = self.repo.changelog.rev(startnode)
297 children = {startnode: 1}
297 children = {startnode: 1}
298 for r in self.repo.changelog.descendants([startrev]):
298 for r in self.repo.changelog.descendants([startrev]):
299 children[self.repo.changelog.node(r)] = 1
299 children[self.repo.changelog.node(r)] = 1
300 self.keep = children.__contains__
300 self.keep = children.__contains__
301 else:
301 else:
302 self.keep = util.always
302 self.keep = util.always
303 if rev:
303 if rev:
304 self._heads = [self.repo[rev].node()]
304 self._heads = [self.repo[rev].node()]
305 else:
305 else:
306 self._heads = self.repo.heads()
306 self._heads = self.repo.heads()
307 else:
307 else:
308 if rev or startnode is not None:
308 if rev or startnode is not None:
309 raise util.Abort(_('hg.revs cannot be combined with '
309 raise util.Abort(_('hg.revs cannot be combined with '
310 'hg.startrev or --rev'))
310 'hg.startrev or --rev'))
311 nodes = set()
311 nodes = set()
312 parents = set()
312 parents = set()
313 for r in scmutil.revrange(self.repo, [hgrevs]):
313 for r in scmutil.revrange(self.repo, [hgrevs]):
314 ctx = self.repo[r]
314 ctx = self.repo[r]
315 nodes.add(ctx.node())
315 nodes.add(ctx.node())
316 parents.update(p.node() for p in ctx.parents())
316 parents.update(p.node() for p in ctx.parents())
317 self.keep = nodes.__contains__
317 self.keep = nodes.__contains__
318 self._heads = nodes - parents
318 self._heads = nodes - parents
319
319
320 def changectx(self, rev):
320 def changectx(self, rev):
321 if self.lastrev != rev:
321 if self.lastrev != rev:
322 self.lastctx = self.repo[rev]
322 self.lastctx = self.repo[rev]
323 self.lastrev = rev
323 self.lastrev = rev
324 return self.lastctx
324 return self.lastctx
325
325
326 def parents(self, ctx):
326 def parents(self, ctx):
327 return [p for p in ctx.parents() if p and self.keep(p.node())]
327 return [p for p in ctx.parents() if p and self.keep(p.node())]
328
328
329 def getheads(self):
329 def getheads(self):
330 return [hex(h) for h in self._heads if self.keep(h)]
330 return [hex(h) for h in self._heads if self.keep(h)]
331
331
332 def getfile(self, name, rev):
332 def getfile(self, name, rev):
333 try:
333 try:
334 fctx = self.changectx(rev)[name]
334 fctx = self.changectx(rev)[name]
335 return fctx.data(), fctx.flags()
335 return fctx.data(), fctx.flags()
336 except error.LookupError, err:
336 except error.LookupError, err:
337 raise IOError(err)
337 raise IOError(err)
338
338
339 def getchanges(self, rev):
339 def getchanges(self, rev):
340 ctx = self.changectx(rev)
340 ctx = self.changectx(rev)
341 parents = self.parents(ctx)
341 parents = self.parents(ctx)
342 if not parents:
342 if not parents:
343 files = sorted(ctx.manifest())
343 files = sorted(ctx.manifest())
344 # getcopies() is not needed for roots, but it is a simple way to
344 # getcopies() is not needed for roots, but it is a simple way to
345 # detect missing revlogs and abort on errors or populate
345 # detect missing revlogs and abort on errors or populate
346 # self.ignored
346 # self.ignored
347 self.getcopies(ctx, parents, files)
347 self.getcopies(ctx, parents, files)
348 return [(f, rev) for f in files if f not in self.ignored], {}
348 return [(f, rev) for f in files if f not in self.ignored], {}
349 if self._changescache and self._changescache[0] == rev:
349 if self._changescache and self._changescache[0] == rev:
350 m, a, r = self._changescache[1]
350 m, a, r = self._changescache[1]
351 else:
351 else:
352 m, a, r = self.repo.status(parents[0].node(), ctx.node())[:3]
352 m, a, r = self.repo.status(parents[0].node(), ctx.node())[:3]
353 # getcopies() detects missing revlogs early, run it before
353 # getcopies() detects missing revlogs early, run it before
354 # filtering the changes.
354 # filtering the changes.
355 copies = self.getcopies(ctx, parents, m + a)
355 copies = self.getcopies(ctx, parents, m + a)
356 changes = [(name, rev) for name in m + a + r
356 changes = [(name, rev) for name in m + a + r
357 if name not in self.ignored]
357 if name not in self.ignored]
358 return sorted(changes), copies
358 return sorted(changes), copies
359
359
360 def getcopies(self, ctx, parents, files):
360 def getcopies(self, ctx, parents, files):
361 copies = {}
361 copies = {}
362 for name in files:
362 for name in files:
363 if name in self.ignored:
363 if name in self.ignored:
364 continue
364 continue
365 try:
365 try:
366 copysource, _copynode = ctx.filectx(name).renamed()
366 copysource, _copynode = ctx.filectx(name).renamed()
367 if copysource in self.ignored:
367 if copysource in self.ignored:
368 continue
368 continue
369 # Ignore copy sources not in parent revisions
369 # Ignore copy sources not in parent revisions
370 found = False
370 found = False
371 for p in parents:
371 for p in parents:
372 if copysource in p:
372 if copysource in p:
373 found = True
373 found = True
374 break
374 break
375 if not found:
375 if not found:
376 continue
376 continue
377 copies[name] = copysource
377 copies[name] = copysource
378 except TypeError:
378 except TypeError:
379 pass
379 pass
380 except error.LookupError, e:
380 except error.LookupError, e:
381 if not self.ignoreerrors:
381 if not self.ignoreerrors:
382 raise
382 raise
383 self.ignored.add(name)
383 self.ignored.add(name)
384 self.ui.warn(_('ignoring: %s\n') % e)
384 self.ui.warn(_('ignoring: %s\n') % e)
385 return copies
385 return copies
386
386
387 def getcommit(self, rev):
387 def getcommit(self, rev):
388 ctx = self.changectx(rev)
388 ctx = self.changectx(rev)
389 parents = [p.hex() for p in self.parents(ctx)]
389 parents = [p.hex() for p in self.parents(ctx)]
390 if self.saverev:
390 if self.saverev:
391 crev = rev
391 crev = rev
392 else:
392 else:
393 crev = None
393 crev = None
394 return commit(author=ctx.user(),
394 return commit(author=ctx.user(),
395 date=util.datestr(ctx.date(), '%Y-%m-%d %H:%M:%S %1%2'),
395 date=util.datestr(ctx.date(), '%Y-%m-%d %H:%M:%S %1%2'),
396 desc=ctx.description(), rev=crev, parents=parents,
396 desc=ctx.description(), rev=crev, parents=parents,
397 branch=ctx.branch(), extra=ctx.extra(),
397 branch=ctx.branch(), extra=ctx.extra(),
398 sortkey=ctx.rev())
398 sortkey=ctx.rev())
399
399
400 def gettags(self):
400 def gettags(self):
401 # This will get written to .hgtags, filter non global tags out.
401 # This will get written to .hgtags, filter non global tags out.
402 tags = [t for t in self.repo.tagslist()
402 tags = [t for t in self.repo.tagslist()
403 if self.repo.tagtype(t[0]) == 'global']
403 if self.repo.tagtype(t[0]) == 'global']
404 return dict([(name, hex(node)) for name, node in tags
404 return dict([(name, hex(node)) for name, node in tags
405 if self.keep(node)])
405 if self.keep(node)])
406
406
407 def getchangedfiles(self, rev, i):
407 def getchangedfiles(self, rev, i):
408 ctx = self.changectx(rev)
408 ctx = self.changectx(rev)
409 parents = self.parents(ctx)
409 parents = self.parents(ctx)
410 if not parents and i is None:
410 if not parents and i is None:
411 i = 0
411 i = 0
412 changes = [], ctx.manifest().keys(), []
412 changes = [], ctx.manifest().keys(), []
413 else:
413 else:
414 i = i or 0
414 i = i or 0
415 changes = self.repo.status(parents[i].node(), ctx.node())[:3]
415 changes = self.repo.status(parents[i].node(), ctx.node())[:3]
416 changes = [[f for f in l if f not in self.ignored] for l in changes]
416 changes = [[f for f in l if f not in self.ignored] for l in changes]
417
417
418 if i == 0:
418 if i == 0:
419 self._changescache = (rev, changes)
419 self._changescache = (rev, changes)
420
420
421 return changes[0] + changes[1] + changes[2]
421 return changes[0] + changes[1] + changes[2]
422
422
423 def converted(self, rev, destrev):
423 def converted(self, rev, destrev):
424 if self.convertfp is None:
424 if self.convertfp is None:
425 self.convertfp = open(self.repo.join('shamap'), 'a')
425 self.convertfp = open(self.repo.join('shamap'), 'a')
426 self.convertfp.write('%s %s\n' % (destrev, rev))
426 self.convertfp.write('%s %s\n' % (destrev, rev))
427 self.convertfp.flush()
427 self.convertfp.flush()
428
428
429 def before(self):
429 def before(self):
430 self.ui.debug('run hg source pre-conversion action\n')
430 self.ui.debug('run hg source pre-conversion action\n')
431
431
432 def after(self):
432 def after(self):
433 self.ui.debug('run hg source post-conversion action\n')
433 self.ui.debug('run hg source post-conversion action\n')
434
434
435 def hasnativeorder(self):
435 def hasnativeorder(self):
436 return True
436 return True
437
437
438 def hasnativeclose(self):
438 def hasnativeclose(self):
439 return True
439 return True
440
440
441 def lookuprev(self, rev):
441 def lookuprev(self, rev):
442 try:
442 try:
443 return hex(self.repo.lookup(rev))
443 return hex(self.repo.lookup(rev))
444 except error.RepoError:
444 except error.RepoError:
445 return None
445 return None
446
446
447 def getbookmarks(self):
447 def getbookmarks(self):
448 return bookmarks.listbookmarks(self.repo)
448 return bookmarks.listbookmarks(self.repo)
449
449
450 def checkrevformat(self, revstr, mapname='splicemap'):
450 def checkrevformat(self, revstr, mapname='splicemap'):
451 """ Mercurial, revision string is a 40 byte hex """
451 """ Mercurial, revision string is a 40 byte hex """
452 self.checkhexformat(revstr, mapname)
452 self.checkhexformat(revstr, mapname)
@@ -1,926 +1,927
1 # histedit.py - interactive history editing for mercurial
1 # histedit.py - interactive history editing for mercurial
2 #
2 #
3 # Copyright 2009 Augie Fackler <raf@durin42.com>
3 # Copyright 2009 Augie Fackler <raf@durin42.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """interactive history editing
7 """interactive history editing
8
8
9 With this extension installed, Mercurial gains one new command: histedit. Usage
9 With this extension installed, Mercurial gains one new command: histedit. Usage
10 is as follows, assuming the following history::
10 is as follows, assuming the following history::
11
11
12 @ 3[tip] 7c2fd3b9020c 2009-04-27 18:04 -0500 durin42
12 @ 3[tip] 7c2fd3b9020c 2009-04-27 18:04 -0500 durin42
13 | Add delta
13 | Add delta
14 |
14 |
15 o 2 030b686bedc4 2009-04-27 18:04 -0500 durin42
15 o 2 030b686bedc4 2009-04-27 18:04 -0500 durin42
16 | Add gamma
16 | Add gamma
17 |
17 |
18 o 1 c561b4e977df 2009-04-27 18:04 -0500 durin42
18 o 1 c561b4e977df 2009-04-27 18:04 -0500 durin42
19 | Add beta
19 | Add beta
20 |
20 |
21 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
21 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
22 Add alpha
22 Add alpha
23
23
24 If you were to run ``hg histedit c561b4e977df``, you would see the following
24 If you were to run ``hg histedit c561b4e977df``, you would see the following
25 file open in your editor::
25 file open in your editor::
26
26
27 pick c561b4e977df Add beta
27 pick c561b4e977df Add beta
28 pick 030b686bedc4 Add gamma
28 pick 030b686bedc4 Add gamma
29 pick 7c2fd3b9020c Add delta
29 pick 7c2fd3b9020c Add delta
30
30
31 # Edit history between c561b4e977df and 7c2fd3b9020c
31 # Edit history between c561b4e977df and 7c2fd3b9020c
32 #
32 #
33 # Commits are listed from least to most recent
33 # Commits are listed from least to most recent
34 #
34 #
35 # Commands:
35 # Commands:
36 # p, pick = use commit
36 # p, pick = use commit
37 # e, edit = use commit, but stop for amending
37 # e, edit = use commit, but stop for amending
38 # f, fold = use commit, but combine it with the one above
38 # f, fold = use commit, but combine it with the one above
39 # d, drop = remove commit from history
39 # d, drop = remove commit from history
40 # m, mess = edit message without changing commit content
40 # m, mess = edit message without changing commit content
41 #
41 #
42
42
43 In this file, lines beginning with ``#`` are ignored. You must specify a rule
43 In this file, lines beginning with ``#`` are ignored. You must specify a rule
44 for each revision in your history. For example, if you had meant to add gamma
44 for each revision in your history. For example, if you had meant to add gamma
45 before beta, and then wanted to add delta in the same revision as beta, you
45 before beta, and then wanted to add delta in the same revision as beta, you
46 would reorganize the file to look like this::
46 would reorganize the file to look like this::
47
47
48 pick 030b686bedc4 Add gamma
48 pick 030b686bedc4 Add gamma
49 pick c561b4e977df Add beta
49 pick c561b4e977df Add beta
50 fold 7c2fd3b9020c Add delta
50 fold 7c2fd3b9020c Add delta
51
51
52 # Edit history between c561b4e977df and 7c2fd3b9020c
52 # Edit history between c561b4e977df and 7c2fd3b9020c
53 #
53 #
54 # Commits are listed from least to most recent
54 # Commits are listed from least to most recent
55 #
55 #
56 # Commands:
56 # Commands:
57 # p, pick = use commit
57 # p, pick = use commit
58 # e, edit = use commit, but stop for amending
58 # e, edit = use commit, but stop for amending
59 # f, fold = use commit, but combine it with the one above
59 # f, fold = use commit, but combine it with the one above
60 # d, drop = remove commit from history
60 # d, drop = remove commit from history
61 # m, mess = edit message without changing commit content
61 # m, mess = edit message without changing commit content
62 #
62 #
63
63
64 At which point you close the editor and ``histedit`` starts working. When you
64 At which point you close the editor and ``histedit`` starts working. When you
65 specify a ``fold`` operation, ``histedit`` will open an editor when it folds
65 specify a ``fold`` operation, ``histedit`` will open an editor when it folds
66 those revisions together, offering you a chance to clean up the commit message::
66 those revisions together, offering you a chance to clean up the commit message::
67
67
68 Add beta
68 Add beta
69 ***
69 ***
70 Add delta
70 Add delta
71
71
72 Edit the commit message to your liking, then close the editor. For
72 Edit the commit message to your liking, then close the editor. For
73 this example, let's assume that the commit message was changed to
73 this example, let's assume that the commit message was changed to
74 ``Add beta and delta.`` After histedit has run and had a chance to
74 ``Add beta and delta.`` After histedit has run and had a chance to
75 remove any old or temporary revisions it needed, the history looks
75 remove any old or temporary revisions it needed, the history looks
76 like this::
76 like this::
77
77
78 @ 2[tip] 989b4d060121 2009-04-27 18:04 -0500 durin42
78 @ 2[tip] 989b4d060121 2009-04-27 18:04 -0500 durin42
79 | Add beta and delta.
79 | Add beta and delta.
80 |
80 |
81 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
81 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
82 | Add gamma
82 | Add gamma
83 |
83 |
84 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
84 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
85 Add alpha
85 Add alpha
86
86
87 Note that ``histedit`` does *not* remove any revisions (even its own temporary
87 Note that ``histedit`` does *not* remove any revisions (even its own temporary
88 ones) until after it has completed all the editing operations, so it will
88 ones) until after it has completed all the editing operations, so it will
89 probably perform several strip operations when it's done. For the above example,
89 probably perform several strip operations when it's done. For the above example,
90 it had to run strip twice. Strip can be slow depending on a variety of factors,
90 it had to run strip twice. Strip can be slow depending on a variety of factors,
91 so you might need to be a little patient. You can choose to keep the original
91 so you might need to be a little patient. You can choose to keep the original
92 revisions by passing the ``--keep`` flag.
92 revisions by passing the ``--keep`` flag.
93
93
94 The ``edit`` operation will drop you back to a command prompt,
94 The ``edit`` operation will drop you back to a command prompt,
95 allowing you to edit files freely, or even use ``hg record`` to commit
95 allowing you to edit files freely, or even use ``hg record`` to commit
96 some changes as a separate commit. When you're done, any remaining
96 some changes as a separate commit. When you're done, any remaining
97 uncommitted changes will be committed as well. When done, run ``hg
97 uncommitted changes will be committed as well. When done, run ``hg
98 histedit --continue`` to finish this step. You'll be prompted for a
98 histedit --continue`` to finish this step. You'll be prompted for a
99 new commit message, but the default commit message will be the
99 new commit message, but the default commit message will be the
100 original message for the ``edit`` ed revision.
100 original message for the ``edit`` ed revision.
101
101
102 The ``message`` operation will give you a chance to revise a commit
102 The ``message`` operation will give you a chance to revise a commit
103 message without changing the contents. It's a shortcut for doing
103 message without changing the contents. It's a shortcut for doing
104 ``edit`` immediately followed by `hg histedit --continue``.
104 ``edit`` immediately followed by `hg histedit --continue``.
105
105
106 If ``histedit`` encounters a conflict when moving a revision (while
106 If ``histedit`` encounters a conflict when moving a revision (while
107 handling ``pick`` or ``fold``), it'll stop in a similar manner to
107 handling ``pick`` or ``fold``), it'll stop in a similar manner to
108 ``edit`` with the difference that it won't prompt you for a commit
108 ``edit`` with the difference that it won't prompt you for a commit
109 message when done. If you decide at this point that you don't like how
109 message when done. If you decide at this point that you don't like how
110 much work it will be to rearrange history, or that you made a mistake,
110 much work it will be to rearrange history, or that you made a mistake,
111 you can use ``hg histedit --abort`` to abandon the new changes you
111 you can use ``hg histedit --abort`` to abandon the new changes you
112 have made and return to the state before you attempted to edit your
112 have made and return to the state before you attempted to edit your
113 history.
113 history.
114
114
115 If we clone the histedit-ed example repository above and add four more
115 If we clone the histedit-ed example repository above and add four more
116 changes, such that we have the following history::
116 changes, such that we have the following history::
117
117
118 @ 6[tip] 038383181893 2009-04-27 18:04 -0500 stefan
118 @ 6[tip] 038383181893 2009-04-27 18:04 -0500 stefan
119 | Add theta
119 | Add theta
120 |
120 |
121 o 5 140988835471 2009-04-27 18:04 -0500 stefan
121 o 5 140988835471 2009-04-27 18:04 -0500 stefan
122 | Add eta
122 | Add eta
123 |
123 |
124 o 4 122930637314 2009-04-27 18:04 -0500 stefan
124 o 4 122930637314 2009-04-27 18:04 -0500 stefan
125 | Add zeta
125 | Add zeta
126 |
126 |
127 o 3 836302820282 2009-04-27 18:04 -0500 stefan
127 o 3 836302820282 2009-04-27 18:04 -0500 stefan
128 | Add epsilon
128 | Add epsilon
129 |
129 |
130 o 2 989b4d060121 2009-04-27 18:04 -0500 durin42
130 o 2 989b4d060121 2009-04-27 18:04 -0500 durin42
131 | Add beta and delta.
131 | Add beta and delta.
132 |
132 |
133 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
133 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
134 | Add gamma
134 | Add gamma
135 |
135 |
136 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
136 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
137 Add alpha
137 Add alpha
138
138
139 If you run ``hg histedit --outgoing`` on the clone then it is the same
139 If you run ``hg histedit --outgoing`` on the clone then it is the same
140 as running ``hg histedit 836302820282``. If you need plan to push to a
140 as running ``hg histedit 836302820282``. If you need plan to push to a
141 repository that Mercurial does not detect to be related to the source
141 repository that Mercurial does not detect to be related to the source
142 repo, you can add a ``--force`` option.
142 repo, you can add a ``--force`` option.
143 """
143 """
144
144
145 try:
145 try:
146 import cPickle as pickle
146 import cPickle as pickle
147 pickle.dump # import now
147 pickle.dump # import now
148 except ImportError:
148 except ImportError:
149 import pickle
149 import pickle
150 import os
150 import os
151 import sys
151 import sys
152
152
153 from mercurial import cmdutil
153 from mercurial import cmdutil
154 from mercurial import discovery
154 from mercurial import discovery
155 from mercurial import error
155 from mercurial import error
156 from mercurial import copies
156 from mercurial import copies
157 from mercurial import context
157 from mercurial import context
158 from mercurial import hg
158 from mercurial import hg
159 from mercurial import node
159 from mercurial import node
160 from mercurial import repair
160 from mercurial import repair
161 from mercurial import util
161 from mercurial import util
162 from mercurial import obsolete
162 from mercurial import obsolete
163 from mercurial import merge as mergemod
163 from mercurial import merge as mergemod
164 from mercurial.lock import release
164 from mercurial.lock import release
165 from mercurial.i18n import _
165 from mercurial.i18n import _
166
166
167 cmdtable = {}
167 cmdtable = {}
168 command = cmdutil.command(cmdtable)
168 command = cmdutil.command(cmdtable)
169
169
170 testedwith = 'internal'
170 testedwith = 'internal'
171
171
172 # i18n: command names and abbreviations must remain untranslated
172 # i18n: command names and abbreviations must remain untranslated
173 editcomment = _("""# Edit history between %s and %s
173 editcomment = _("""# Edit history between %s and %s
174 #
174 #
175 # Commits are listed from least to most recent
175 # Commits are listed from least to most recent
176 #
176 #
177 # Commands:
177 # Commands:
178 # p, pick = use commit
178 # p, pick = use commit
179 # e, edit = use commit, but stop for amending
179 # e, edit = use commit, but stop for amending
180 # f, fold = use commit, but combine it with the one above
180 # f, fold = use commit, but combine it with the one above
181 # d, drop = remove commit from history
181 # d, drop = remove commit from history
182 # m, mess = edit message without changing commit content
182 # m, mess = edit message without changing commit content
183 #
183 #
184 """)
184 """)
185
185
186 def commitfuncfor(repo, src):
186 def commitfuncfor(repo, src):
187 """Build a commit function for the replacement of <src>
187 """Build a commit function for the replacement of <src>
188
188
189 This function ensure we apply the same treatment to all changesets.
189 This function ensure we apply the same treatment to all changesets.
190
190
191 - Add a 'histedit_source' entry in extra.
191 - Add a 'histedit_source' entry in extra.
192
192
193 Note that fold have its own separated logic because its handling is a bit
193 Note that fold have its own separated logic because its handling is a bit
194 different and not easily factored out of the fold method.
194 different and not easily factored out of the fold method.
195 """
195 """
196 phasemin = src.phase()
196 phasemin = src.phase()
197 def commitfunc(**kwargs):
197 def commitfunc(**kwargs):
198 phasebackup = repo.ui.backupconfig('phases', 'new-commit')
198 phasebackup = repo.ui.backupconfig('phases', 'new-commit')
199 try:
199 try:
200 repo.ui.setconfig('phases', 'new-commit', phasemin,
200 repo.ui.setconfig('phases', 'new-commit', phasemin,
201 'histedit')
201 'histedit')
202 extra = kwargs.get('extra', {}).copy()
202 extra = kwargs.get('extra', {}).copy()
203 extra['histedit_source'] = src.hex()
203 extra['histedit_source'] = src.hex()
204 kwargs['extra'] = extra
204 kwargs['extra'] = extra
205 return repo.commit(**kwargs)
205 return repo.commit(**kwargs)
206 finally:
206 finally:
207 repo.ui.restoreconfig(phasebackup)
207 repo.ui.restoreconfig(phasebackup)
208 return commitfunc
208 return commitfunc
209
209
210
210
211
211
212 def applychanges(ui, repo, ctx, opts):
212 def applychanges(ui, repo, ctx, opts):
213 """Merge changeset from ctx (only) in the current working directory"""
213 """Merge changeset from ctx (only) in the current working directory"""
214 wcpar = repo.dirstate.parents()[0]
214 wcpar = repo.dirstate.parents()[0]
215 if ctx.p1().node() == wcpar:
215 if ctx.p1().node() == wcpar:
216 # edition ar "in place" we do not need to make any merge,
216 # edition ar "in place" we do not need to make any merge,
217 # just applies changes on parent for edition
217 # just applies changes on parent for edition
218 cmdutil.revert(ui, repo, ctx, (wcpar, node.nullid), all=True)
218 cmdutil.revert(ui, repo, ctx, (wcpar, node.nullid), all=True)
219 stats = None
219 stats = None
220 else:
220 else:
221 try:
221 try:
222 # ui.forcemerge is an internal variable, do not document
222 # ui.forcemerge is an internal variable, do not document
223 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
223 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
224 'histedit')
224 'histedit')
225 stats = mergemod.update(repo, ctx.node(), True, True, False,
225 stats = mergemod.update(repo, ctx.node(), True, True, False,
226 ctx.p1().node())
226 ctx.p1().node())
227 finally:
227 finally:
228 repo.ui.setconfig('ui', 'forcemerge', '', 'histedit')
228 repo.ui.setconfig('ui', 'forcemerge', '', 'histedit')
229 repo.setparents(wcpar, node.nullid)
229 repo.setparents(wcpar, node.nullid)
230 repo.dirstate.write()
230 repo.dirstate.write()
231 # fix up dirstate for copies and renames
231 # fix up dirstate for copies and renames
232 cmdutil.duplicatecopies(repo, ctx.rev(), ctx.p1().rev())
232 cmdutil.duplicatecopies(repo, ctx.rev(), ctx.p1().rev())
233 return stats
233 return stats
234
234
235 def collapse(repo, first, last, commitopts):
235 def collapse(repo, first, last, commitopts):
236 """collapse the set of revisions from first to last as new one.
236 """collapse the set of revisions from first to last as new one.
237
237
238 Expected commit options are:
238 Expected commit options are:
239 - message
239 - message
240 - date
240 - date
241 - username
241 - username
242 Commit message is edited in all cases.
242 Commit message is edited in all cases.
243
243
244 This function works in memory."""
244 This function works in memory."""
245 ctxs = list(repo.set('%d::%d', first, last))
245 ctxs = list(repo.set('%d::%d', first, last))
246 if not ctxs:
246 if not ctxs:
247 return None
247 return None
248 base = first.parents()[0]
248 base = first.parents()[0]
249
249
250 # commit a new version of the old changeset, including the update
250 # commit a new version of the old changeset, including the update
251 # collect all files which might be affected
251 # collect all files which might be affected
252 files = set()
252 files = set()
253 for ctx in ctxs:
253 for ctx in ctxs:
254 files.update(ctx.files())
254 files.update(ctx.files())
255
255
256 # Recompute copies (avoid recording a -> b -> a)
256 # Recompute copies (avoid recording a -> b -> a)
257 copied = copies.pathcopies(base, last)
257 copied = copies.pathcopies(base, last)
258
258
259 # prune files which were reverted by the updates
259 # prune files which were reverted by the updates
260 def samefile(f):
260 def samefile(f):
261 if f in last.manifest():
261 if f in last.manifest():
262 a = last.filectx(f)
262 a = last.filectx(f)
263 if f in base.manifest():
263 if f in base.manifest():
264 b = base.filectx(f)
264 b = base.filectx(f)
265 return (a.data() == b.data()
265 return (a.data() == b.data()
266 and a.flags() == b.flags())
266 and a.flags() == b.flags())
267 else:
267 else:
268 return False
268 return False
269 else:
269 else:
270 return f not in base.manifest()
270 return f not in base.manifest()
271 files = [f for f in files if not samefile(f)]
271 files = [f for f in files if not samefile(f)]
272 # commit version of these files as defined by head
272 # commit version of these files as defined by head
273 headmf = last.manifest()
273 headmf = last.manifest()
274 def filectxfn(repo, ctx, path):
274 def filectxfn(repo, ctx, path):
275 if path in headmf:
275 if path in headmf:
276 fctx = last[path]
276 fctx = last[path]
277 flags = fctx.flags()
277 flags = fctx.flags()
278 mctx = context.memfilectx(fctx.path(), fctx.data(),
278 mctx = context.memfilectx(repo,
279 fctx.path(), fctx.data(),
279 islink='l' in flags,
280 islink='l' in flags,
280 isexec='x' in flags,
281 isexec='x' in flags,
281 copied=copied.get(path))
282 copied=copied.get(path))
282 return mctx
283 return mctx
283 raise IOError()
284 raise IOError()
284
285
285 if commitopts.get('message'):
286 if commitopts.get('message'):
286 message = commitopts['message']
287 message = commitopts['message']
287 else:
288 else:
288 message = first.description()
289 message = first.description()
289 user = commitopts.get('user')
290 user = commitopts.get('user')
290 date = commitopts.get('date')
291 date = commitopts.get('date')
291 extra = commitopts.get('extra')
292 extra = commitopts.get('extra')
292
293
293 parents = (first.p1().node(), first.p2().node())
294 parents = (first.p1().node(), first.p2().node())
294 new = context.memctx(repo,
295 new = context.memctx(repo,
295 parents=parents,
296 parents=parents,
296 text=message,
297 text=message,
297 files=files,
298 files=files,
298 filectxfn=filectxfn,
299 filectxfn=filectxfn,
299 user=user,
300 user=user,
300 date=date,
301 date=date,
301 extra=extra,
302 extra=extra,
302 editor=cmdutil.getcommiteditor(edit=True))
303 editor=cmdutil.getcommiteditor(edit=True))
303 return repo.commitctx(new)
304 return repo.commitctx(new)
304
305
305 def pick(ui, repo, ctx, ha, opts):
306 def pick(ui, repo, ctx, ha, opts):
306 oldctx = repo[ha]
307 oldctx = repo[ha]
307 if oldctx.parents()[0] == ctx:
308 if oldctx.parents()[0] == ctx:
308 ui.debug('node %s unchanged\n' % ha)
309 ui.debug('node %s unchanged\n' % ha)
309 return oldctx, []
310 return oldctx, []
310 hg.update(repo, ctx.node())
311 hg.update(repo, ctx.node())
311 stats = applychanges(ui, repo, oldctx, opts)
312 stats = applychanges(ui, repo, oldctx, opts)
312 if stats and stats[3] > 0:
313 if stats and stats[3] > 0:
313 raise error.InterventionRequired(_('Fix up the change and run '
314 raise error.InterventionRequired(_('Fix up the change and run '
314 'hg histedit --continue'))
315 'hg histedit --continue'))
315 # drop the second merge parent
316 # drop the second merge parent
316 commit = commitfuncfor(repo, oldctx)
317 commit = commitfuncfor(repo, oldctx)
317 n = commit(text=oldctx.description(), user=oldctx.user(),
318 n = commit(text=oldctx.description(), user=oldctx.user(),
318 date=oldctx.date(), extra=oldctx.extra())
319 date=oldctx.date(), extra=oldctx.extra())
319 if n is None:
320 if n is None:
320 ui.warn(_('%s: empty changeset\n')
321 ui.warn(_('%s: empty changeset\n')
321 % node.hex(ha))
322 % node.hex(ha))
322 return ctx, []
323 return ctx, []
323 new = repo[n]
324 new = repo[n]
324 return new, [(oldctx.node(), (n,))]
325 return new, [(oldctx.node(), (n,))]
325
326
326
327
327 def edit(ui, repo, ctx, ha, opts):
328 def edit(ui, repo, ctx, ha, opts):
328 oldctx = repo[ha]
329 oldctx = repo[ha]
329 hg.update(repo, ctx.node())
330 hg.update(repo, ctx.node())
330 applychanges(ui, repo, oldctx, opts)
331 applychanges(ui, repo, oldctx, opts)
331 raise error.InterventionRequired(
332 raise error.InterventionRequired(
332 _('Make changes as needed, you may commit or record as needed now.\n'
333 _('Make changes as needed, you may commit or record as needed now.\n'
333 'When you are finished, run hg histedit --continue to resume.'))
334 'When you are finished, run hg histedit --continue to resume.'))
334
335
335 def fold(ui, repo, ctx, ha, opts):
336 def fold(ui, repo, ctx, ha, opts):
336 oldctx = repo[ha]
337 oldctx = repo[ha]
337 hg.update(repo, ctx.node())
338 hg.update(repo, ctx.node())
338 stats = applychanges(ui, repo, oldctx, opts)
339 stats = applychanges(ui, repo, oldctx, opts)
339 if stats and stats[3] > 0:
340 if stats and stats[3] > 0:
340 raise error.InterventionRequired(
341 raise error.InterventionRequired(
341 _('Fix up the change and run hg histedit --continue'))
342 _('Fix up the change and run hg histedit --continue'))
342 n = repo.commit(text='fold-temp-revision %s' % ha, user=oldctx.user(),
343 n = repo.commit(text='fold-temp-revision %s' % ha, user=oldctx.user(),
343 date=oldctx.date(), extra=oldctx.extra())
344 date=oldctx.date(), extra=oldctx.extra())
344 if n is None:
345 if n is None:
345 ui.warn(_('%s: empty changeset')
346 ui.warn(_('%s: empty changeset')
346 % node.hex(ha))
347 % node.hex(ha))
347 return ctx, []
348 return ctx, []
348 return finishfold(ui, repo, ctx, oldctx, n, opts, [])
349 return finishfold(ui, repo, ctx, oldctx, n, opts, [])
349
350
350 def finishfold(ui, repo, ctx, oldctx, newnode, opts, internalchanges):
351 def finishfold(ui, repo, ctx, oldctx, newnode, opts, internalchanges):
351 parent = ctx.parents()[0].node()
352 parent = ctx.parents()[0].node()
352 hg.update(repo, parent)
353 hg.update(repo, parent)
353 ### prepare new commit data
354 ### prepare new commit data
354 commitopts = opts.copy()
355 commitopts = opts.copy()
355 # username
356 # username
356 if ctx.user() == oldctx.user():
357 if ctx.user() == oldctx.user():
357 username = ctx.user()
358 username = ctx.user()
358 else:
359 else:
359 username = ui.username()
360 username = ui.username()
360 commitopts['user'] = username
361 commitopts['user'] = username
361 # commit message
362 # commit message
362 newmessage = '\n***\n'.join(
363 newmessage = '\n***\n'.join(
363 [ctx.description()] +
364 [ctx.description()] +
364 [repo[r].description() for r in internalchanges] +
365 [repo[r].description() for r in internalchanges] +
365 [oldctx.description()]) + '\n'
366 [oldctx.description()]) + '\n'
366 commitopts['message'] = newmessage
367 commitopts['message'] = newmessage
367 # date
368 # date
368 commitopts['date'] = max(ctx.date(), oldctx.date())
369 commitopts['date'] = max(ctx.date(), oldctx.date())
369 extra = ctx.extra().copy()
370 extra = ctx.extra().copy()
370 # histedit_source
371 # histedit_source
371 # note: ctx is likely a temporary commit but that the best we can do here
372 # note: ctx is likely a temporary commit but that the best we can do here
372 # This is sufficient to solve issue3681 anyway
373 # This is sufficient to solve issue3681 anyway
373 extra['histedit_source'] = '%s,%s' % (ctx.hex(), oldctx.hex())
374 extra['histedit_source'] = '%s,%s' % (ctx.hex(), oldctx.hex())
374 commitopts['extra'] = extra
375 commitopts['extra'] = extra
375 phasebackup = repo.ui.backupconfig('phases', 'new-commit')
376 phasebackup = repo.ui.backupconfig('phases', 'new-commit')
376 try:
377 try:
377 phasemin = max(ctx.phase(), oldctx.phase())
378 phasemin = max(ctx.phase(), oldctx.phase())
378 repo.ui.setconfig('phases', 'new-commit', phasemin, 'histedit')
379 repo.ui.setconfig('phases', 'new-commit', phasemin, 'histedit')
379 n = collapse(repo, ctx, repo[newnode], commitopts)
380 n = collapse(repo, ctx, repo[newnode], commitopts)
380 finally:
381 finally:
381 repo.ui.restoreconfig(phasebackup)
382 repo.ui.restoreconfig(phasebackup)
382 if n is None:
383 if n is None:
383 return ctx, []
384 return ctx, []
384 hg.update(repo, n)
385 hg.update(repo, n)
385 replacements = [(oldctx.node(), (newnode,)),
386 replacements = [(oldctx.node(), (newnode,)),
386 (ctx.node(), (n,)),
387 (ctx.node(), (n,)),
387 (newnode, (n,)),
388 (newnode, (n,)),
388 ]
389 ]
389 for ich in internalchanges:
390 for ich in internalchanges:
390 replacements.append((ich, (n,)))
391 replacements.append((ich, (n,)))
391 return repo[n], replacements
392 return repo[n], replacements
392
393
393 def drop(ui, repo, ctx, ha, opts):
394 def drop(ui, repo, ctx, ha, opts):
394 return ctx, [(repo[ha].node(), ())]
395 return ctx, [(repo[ha].node(), ())]
395
396
396
397
397 def message(ui, repo, ctx, ha, opts):
398 def message(ui, repo, ctx, ha, opts):
398 oldctx = repo[ha]
399 oldctx = repo[ha]
399 hg.update(repo, ctx.node())
400 hg.update(repo, ctx.node())
400 stats = applychanges(ui, repo, oldctx, opts)
401 stats = applychanges(ui, repo, oldctx, opts)
401 if stats and stats[3] > 0:
402 if stats and stats[3] > 0:
402 raise error.InterventionRequired(
403 raise error.InterventionRequired(
403 _('Fix up the change and run hg histedit --continue'))
404 _('Fix up the change and run hg histedit --continue'))
404 message = oldctx.description()
405 message = oldctx.description()
405 commit = commitfuncfor(repo, oldctx)
406 commit = commitfuncfor(repo, oldctx)
406 new = commit(text=message, user=oldctx.user(), date=oldctx.date(),
407 new = commit(text=message, user=oldctx.user(), date=oldctx.date(),
407 extra=oldctx.extra(),
408 extra=oldctx.extra(),
408 editor=cmdutil.getcommiteditor(edit=True))
409 editor=cmdutil.getcommiteditor(edit=True))
409 newctx = repo[new]
410 newctx = repo[new]
410 if oldctx.node() != newctx.node():
411 if oldctx.node() != newctx.node():
411 return newctx, [(oldctx.node(), (new,))]
412 return newctx, [(oldctx.node(), (new,))]
412 # We didn't make an edit, so just indicate no replaced nodes
413 # We didn't make an edit, so just indicate no replaced nodes
413 return newctx, []
414 return newctx, []
414
415
415 def findoutgoing(ui, repo, remote=None, force=False, opts={}):
416 def findoutgoing(ui, repo, remote=None, force=False, opts={}):
416 """utility function to find the first outgoing changeset
417 """utility function to find the first outgoing changeset
417
418
418 Used by initialisation code"""
419 Used by initialisation code"""
419 dest = ui.expandpath(remote or 'default-push', remote or 'default')
420 dest = ui.expandpath(remote or 'default-push', remote or 'default')
420 dest, revs = hg.parseurl(dest, None)[:2]
421 dest, revs = hg.parseurl(dest, None)[:2]
421 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
422 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
422
423
423 revs, checkout = hg.addbranchrevs(repo, repo, revs, None)
424 revs, checkout = hg.addbranchrevs(repo, repo, revs, None)
424 other = hg.peer(repo, opts, dest)
425 other = hg.peer(repo, opts, dest)
425
426
426 if revs:
427 if revs:
427 revs = [repo.lookup(rev) for rev in revs]
428 revs = [repo.lookup(rev) for rev in revs]
428
429
429 outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force)
430 outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force)
430 if not outgoing.missing:
431 if not outgoing.missing:
431 raise util.Abort(_('no outgoing ancestors'))
432 raise util.Abort(_('no outgoing ancestors'))
432 roots = list(repo.revs("roots(%ln)", outgoing.missing))
433 roots = list(repo.revs("roots(%ln)", outgoing.missing))
433 if 1 < len(roots):
434 if 1 < len(roots):
434 msg = _('there are ambiguous outgoing revisions')
435 msg = _('there are ambiguous outgoing revisions')
435 hint = _('see "hg help histedit" for more detail')
436 hint = _('see "hg help histedit" for more detail')
436 raise util.Abort(msg, hint=hint)
437 raise util.Abort(msg, hint=hint)
437 return repo.lookup(roots[0])
438 return repo.lookup(roots[0])
438
439
439 actiontable = {'p': pick,
440 actiontable = {'p': pick,
440 'pick': pick,
441 'pick': pick,
441 'e': edit,
442 'e': edit,
442 'edit': edit,
443 'edit': edit,
443 'f': fold,
444 'f': fold,
444 'fold': fold,
445 'fold': fold,
445 'd': drop,
446 'd': drop,
446 'drop': drop,
447 'drop': drop,
447 'm': message,
448 'm': message,
448 'mess': message,
449 'mess': message,
449 }
450 }
450
451
451 @command('histedit',
452 @command('histedit',
452 [('', 'commands', '',
453 [('', 'commands', '',
453 _('Read history edits from the specified file.')),
454 _('Read history edits from the specified file.')),
454 ('c', 'continue', False, _('continue an edit already in progress')),
455 ('c', 'continue', False, _('continue an edit already in progress')),
455 ('k', 'keep', False,
456 ('k', 'keep', False,
456 _("don't strip old nodes after edit is complete")),
457 _("don't strip old nodes after edit is complete")),
457 ('', 'abort', False, _('abort an edit in progress')),
458 ('', 'abort', False, _('abort an edit in progress')),
458 ('o', 'outgoing', False, _('changesets not found in destination')),
459 ('o', 'outgoing', False, _('changesets not found in destination')),
459 ('f', 'force', False,
460 ('f', 'force', False,
460 _('force outgoing even for unrelated repositories')),
461 _('force outgoing even for unrelated repositories')),
461 ('r', 'rev', [], _('first revision to be edited'))],
462 ('r', 'rev', [], _('first revision to be edited'))],
462 _("ANCESTOR | --outgoing [URL]"))
463 _("ANCESTOR | --outgoing [URL]"))
463 def histedit(ui, repo, *freeargs, **opts):
464 def histedit(ui, repo, *freeargs, **opts):
464 """interactively edit changeset history
465 """interactively edit changeset history
465
466
466 This command edits changesets between ANCESTOR and the parent of
467 This command edits changesets between ANCESTOR and the parent of
467 the working directory.
468 the working directory.
468
469
469 With --outgoing, this edits changesets not found in the
470 With --outgoing, this edits changesets not found in the
470 destination repository. If URL of the destination is omitted, the
471 destination repository. If URL of the destination is omitted, the
471 'default-push' (or 'default') path will be used.
472 'default-push' (or 'default') path will be used.
472
473
473 For safety, this command is aborted, also if there are ambiguous
474 For safety, this command is aborted, also if there are ambiguous
474 outgoing revisions which may confuse users: for example, there are
475 outgoing revisions which may confuse users: for example, there are
475 multiple branches containing outgoing revisions.
476 multiple branches containing outgoing revisions.
476
477
477 Use "min(outgoing() and ::.)" or similar revset specification
478 Use "min(outgoing() and ::.)" or similar revset specification
478 instead of --outgoing to specify edit target revision exactly in
479 instead of --outgoing to specify edit target revision exactly in
479 such ambiguous situation. See :hg:`help revsets` for detail about
480 such ambiguous situation. See :hg:`help revsets` for detail about
480 selecting revisions.
481 selecting revisions.
481
482
482 Returns 0 on success, 1 if user intervention is required (not only
483 Returns 0 on success, 1 if user intervention is required (not only
483 for intentional "edit" command, but also for resolving unexpected
484 for intentional "edit" command, but also for resolving unexpected
484 conflicts).
485 conflicts).
485 """
486 """
486 lock = wlock = None
487 lock = wlock = None
487 try:
488 try:
488 wlock = repo.wlock()
489 wlock = repo.wlock()
489 lock = repo.lock()
490 lock = repo.lock()
490 _histedit(ui, repo, *freeargs, **opts)
491 _histedit(ui, repo, *freeargs, **opts)
491 finally:
492 finally:
492 release(lock, wlock)
493 release(lock, wlock)
493
494
494 def _histedit(ui, repo, *freeargs, **opts):
495 def _histedit(ui, repo, *freeargs, **opts):
495 # TODO only abort if we try and histedit mq patches, not just
496 # TODO only abort if we try and histedit mq patches, not just
496 # blanket if mq patches are applied somewhere
497 # blanket if mq patches are applied somewhere
497 mq = getattr(repo, 'mq', None)
498 mq = getattr(repo, 'mq', None)
498 if mq and mq.applied:
499 if mq and mq.applied:
499 raise util.Abort(_('source has mq patches applied'))
500 raise util.Abort(_('source has mq patches applied'))
500
501
501 # basic argument incompatibility processing
502 # basic argument incompatibility processing
502 outg = opts.get('outgoing')
503 outg = opts.get('outgoing')
503 cont = opts.get('continue')
504 cont = opts.get('continue')
504 abort = opts.get('abort')
505 abort = opts.get('abort')
505 force = opts.get('force')
506 force = opts.get('force')
506 rules = opts.get('commands', '')
507 rules = opts.get('commands', '')
507 revs = opts.get('rev', [])
508 revs = opts.get('rev', [])
508 goal = 'new' # This invocation goal, in new, continue, abort
509 goal = 'new' # This invocation goal, in new, continue, abort
509 if force and not outg:
510 if force and not outg:
510 raise util.Abort(_('--force only allowed with --outgoing'))
511 raise util.Abort(_('--force only allowed with --outgoing'))
511 if cont:
512 if cont:
512 if util.any((outg, abort, revs, freeargs, rules)):
513 if util.any((outg, abort, revs, freeargs, rules)):
513 raise util.Abort(_('no arguments allowed with --continue'))
514 raise util.Abort(_('no arguments allowed with --continue'))
514 goal = 'continue'
515 goal = 'continue'
515 elif abort:
516 elif abort:
516 if util.any((outg, revs, freeargs, rules)):
517 if util.any((outg, revs, freeargs, rules)):
517 raise util.Abort(_('no arguments allowed with --abort'))
518 raise util.Abort(_('no arguments allowed with --abort'))
518 goal = 'abort'
519 goal = 'abort'
519 else:
520 else:
520 if os.path.exists(os.path.join(repo.path, 'histedit-state')):
521 if os.path.exists(os.path.join(repo.path, 'histedit-state')):
521 raise util.Abort(_('history edit already in progress, try '
522 raise util.Abort(_('history edit already in progress, try '
522 '--continue or --abort'))
523 '--continue or --abort'))
523 if outg:
524 if outg:
524 if revs:
525 if revs:
525 raise util.Abort(_('no revisions allowed with --outgoing'))
526 raise util.Abort(_('no revisions allowed with --outgoing'))
526 if len(freeargs) > 1:
527 if len(freeargs) > 1:
527 raise util.Abort(
528 raise util.Abort(
528 _('only one repo argument allowed with --outgoing'))
529 _('only one repo argument allowed with --outgoing'))
529 else:
530 else:
530 revs.extend(freeargs)
531 revs.extend(freeargs)
531 if len(revs) != 1:
532 if len(revs) != 1:
532 raise util.Abort(
533 raise util.Abort(
533 _('histedit requires exactly one ancestor revision'))
534 _('histedit requires exactly one ancestor revision'))
534
535
535
536
536 if goal == 'continue':
537 if goal == 'continue':
537 (parentctxnode, rules, keep, topmost, replacements) = readstate(repo)
538 (parentctxnode, rules, keep, topmost, replacements) = readstate(repo)
538 parentctx = repo[parentctxnode]
539 parentctx = repo[parentctxnode]
539 parentctx, repl = bootstrapcontinue(ui, repo, parentctx, rules, opts)
540 parentctx, repl = bootstrapcontinue(ui, repo, parentctx, rules, opts)
540 replacements.extend(repl)
541 replacements.extend(repl)
541 elif goal == 'abort':
542 elif goal == 'abort':
542 (parentctxnode, rules, keep, topmost, replacements) = readstate(repo)
543 (parentctxnode, rules, keep, topmost, replacements) = readstate(repo)
543 mapping, tmpnodes, leafs, _ntm = processreplacement(repo, replacements)
544 mapping, tmpnodes, leafs, _ntm = processreplacement(repo, replacements)
544 ui.debug('restore wc to old parent %s\n' % node.short(topmost))
545 ui.debug('restore wc to old parent %s\n' % node.short(topmost))
545 # check whether we should update away
546 # check whether we should update away
546 parentnodes = [c.node() for c in repo[None].parents()]
547 parentnodes = [c.node() for c in repo[None].parents()]
547 for n in leafs | set([parentctxnode]):
548 for n in leafs | set([parentctxnode]):
548 if n in parentnodes:
549 if n in parentnodes:
549 hg.clean(repo, topmost)
550 hg.clean(repo, topmost)
550 break
551 break
551 else:
552 else:
552 pass
553 pass
553 cleanupnode(ui, repo, 'created', tmpnodes)
554 cleanupnode(ui, repo, 'created', tmpnodes)
554 cleanupnode(ui, repo, 'temp', leafs)
555 cleanupnode(ui, repo, 'temp', leafs)
555 os.unlink(os.path.join(repo.path, 'histedit-state'))
556 os.unlink(os.path.join(repo.path, 'histedit-state'))
556 return
557 return
557 else:
558 else:
558 cmdutil.checkunfinished(repo)
559 cmdutil.checkunfinished(repo)
559 cmdutil.bailifchanged(repo)
560 cmdutil.bailifchanged(repo)
560
561
561 topmost, empty = repo.dirstate.parents()
562 topmost, empty = repo.dirstate.parents()
562 if outg:
563 if outg:
563 if freeargs:
564 if freeargs:
564 remote = freeargs[0]
565 remote = freeargs[0]
565 else:
566 else:
566 remote = None
567 remote = None
567 root = findoutgoing(ui, repo, remote, force, opts)
568 root = findoutgoing(ui, repo, remote, force, opts)
568 else:
569 else:
569 rootrevs = list(repo.set('roots(%lr)', revs))
570 rootrevs = list(repo.set('roots(%lr)', revs))
570 if len(rootrevs) != 1:
571 if len(rootrevs) != 1:
571 raise util.Abort(_('The specified revisions must have '
572 raise util.Abort(_('The specified revisions must have '
572 'exactly one common root'))
573 'exactly one common root'))
573 root = rootrevs[0].node()
574 root = rootrevs[0].node()
574
575
575 keep = opts.get('keep', False)
576 keep = opts.get('keep', False)
576 revs = between(repo, root, topmost, keep)
577 revs = between(repo, root, topmost, keep)
577 if not revs:
578 if not revs:
578 raise util.Abort(_('%s is not an ancestor of working directory') %
579 raise util.Abort(_('%s is not an ancestor of working directory') %
579 node.short(root))
580 node.short(root))
580
581
581 ctxs = [repo[r] for r in revs]
582 ctxs = [repo[r] for r in revs]
582 if not rules:
583 if not rules:
583 rules = '\n'.join([makedesc(c) for c in ctxs])
584 rules = '\n'.join([makedesc(c) for c in ctxs])
584 rules += '\n\n'
585 rules += '\n\n'
585 rules += editcomment % (node.short(root), node.short(topmost))
586 rules += editcomment % (node.short(root), node.short(topmost))
586 rules = ui.edit(rules, ui.username())
587 rules = ui.edit(rules, ui.username())
587 # Save edit rules in .hg/histedit-last-edit.txt in case
588 # Save edit rules in .hg/histedit-last-edit.txt in case
588 # the user needs to ask for help after something
589 # the user needs to ask for help after something
589 # surprising happens.
590 # surprising happens.
590 f = open(repo.join('histedit-last-edit.txt'), 'w')
591 f = open(repo.join('histedit-last-edit.txt'), 'w')
591 f.write(rules)
592 f.write(rules)
592 f.close()
593 f.close()
593 else:
594 else:
594 if rules == '-':
595 if rules == '-':
595 f = sys.stdin
596 f = sys.stdin
596 else:
597 else:
597 f = open(rules)
598 f = open(rules)
598 rules = f.read()
599 rules = f.read()
599 f.close()
600 f.close()
600 rules = [l for l in (r.strip() for r in rules.splitlines())
601 rules = [l for l in (r.strip() for r in rules.splitlines())
601 if l and not l[0] == '#']
602 if l and not l[0] == '#']
602 rules = verifyrules(rules, repo, ctxs)
603 rules = verifyrules(rules, repo, ctxs)
603
604
604 parentctx = repo[root].parents()[0]
605 parentctx = repo[root].parents()[0]
605 keep = opts.get('keep', False)
606 keep = opts.get('keep', False)
606 replacements = []
607 replacements = []
607
608
608
609
609 while rules:
610 while rules:
610 writestate(repo, parentctx.node(), rules, keep, topmost, replacements)
611 writestate(repo, parentctx.node(), rules, keep, topmost, replacements)
611 action, ha = rules.pop(0)
612 action, ha = rules.pop(0)
612 ui.debug('histedit: processing %s %s\n' % (action, ha))
613 ui.debug('histedit: processing %s %s\n' % (action, ha))
613 actfunc = actiontable[action]
614 actfunc = actiontable[action]
614 parentctx, replacement_ = actfunc(ui, repo, parentctx, ha, opts)
615 parentctx, replacement_ = actfunc(ui, repo, parentctx, ha, opts)
615 replacements.extend(replacement_)
616 replacements.extend(replacement_)
616
617
617 hg.update(repo, parentctx.node())
618 hg.update(repo, parentctx.node())
618
619
619 mapping, tmpnodes, created, ntm = processreplacement(repo, replacements)
620 mapping, tmpnodes, created, ntm = processreplacement(repo, replacements)
620 if mapping:
621 if mapping:
621 for prec, succs in mapping.iteritems():
622 for prec, succs in mapping.iteritems():
622 if not succs:
623 if not succs:
623 ui.debug('histedit: %s is dropped\n' % node.short(prec))
624 ui.debug('histedit: %s is dropped\n' % node.short(prec))
624 else:
625 else:
625 ui.debug('histedit: %s is replaced by %s\n' % (
626 ui.debug('histedit: %s is replaced by %s\n' % (
626 node.short(prec), node.short(succs[0])))
627 node.short(prec), node.short(succs[0])))
627 if len(succs) > 1:
628 if len(succs) > 1:
628 m = 'histedit: %s'
629 m = 'histedit: %s'
629 for n in succs[1:]:
630 for n in succs[1:]:
630 ui.debug(m % node.short(n))
631 ui.debug(m % node.short(n))
631
632
632 if not keep:
633 if not keep:
633 if mapping:
634 if mapping:
634 movebookmarks(ui, repo, mapping, topmost, ntm)
635 movebookmarks(ui, repo, mapping, topmost, ntm)
635 # TODO update mq state
636 # TODO update mq state
636 if obsolete._enabled:
637 if obsolete._enabled:
637 markers = []
638 markers = []
638 # sort by revision number because it sound "right"
639 # sort by revision number because it sound "right"
639 for prec in sorted(mapping, key=repo.changelog.rev):
640 for prec in sorted(mapping, key=repo.changelog.rev):
640 succs = mapping[prec]
641 succs = mapping[prec]
641 markers.append((repo[prec],
642 markers.append((repo[prec],
642 tuple(repo[s] for s in succs)))
643 tuple(repo[s] for s in succs)))
643 if markers:
644 if markers:
644 obsolete.createmarkers(repo, markers)
645 obsolete.createmarkers(repo, markers)
645 else:
646 else:
646 cleanupnode(ui, repo, 'replaced', mapping)
647 cleanupnode(ui, repo, 'replaced', mapping)
647
648
648 cleanupnode(ui, repo, 'temp', tmpnodes)
649 cleanupnode(ui, repo, 'temp', tmpnodes)
649 os.unlink(os.path.join(repo.path, 'histedit-state'))
650 os.unlink(os.path.join(repo.path, 'histedit-state'))
650 if os.path.exists(repo.sjoin('undo')):
651 if os.path.exists(repo.sjoin('undo')):
651 os.unlink(repo.sjoin('undo'))
652 os.unlink(repo.sjoin('undo'))
652
653
653 def gatherchildren(repo, ctx):
654 def gatherchildren(repo, ctx):
654 # is there any new commit between the expected parent and "."
655 # is there any new commit between the expected parent and "."
655 #
656 #
656 # note: does not take non linear new change in account (but previous
657 # note: does not take non linear new change in account (but previous
657 # implementation didn't used them anyway (issue3655)
658 # implementation didn't used them anyway (issue3655)
658 newchildren = [c.node() for c in repo.set('(%d::.)', ctx)]
659 newchildren = [c.node() for c in repo.set('(%d::.)', ctx)]
659 if ctx.node() != node.nullid:
660 if ctx.node() != node.nullid:
660 if not newchildren:
661 if not newchildren:
661 # `ctx` should match but no result. This means that
662 # `ctx` should match but no result. This means that
662 # currentnode is not a descendant from ctx.
663 # currentnode is not a descendant from ctx.
663 msg = _('%s is not an ancestor of working directory')
664 msg = _('%s is not an ancestor of working directory')
664 hint = _('use "histedit --abort" to clear broken state')
665 hint = _('use "histedit --abort" to clear broken state')
665 raise util.Abort(msg % ctx, hint=hint)
666 raise util.Abort(msg % ctx, hint=hint)
666 newchildren.pop(0) # remove ctx
667 newchildren.pop(0) # remove ctx
667 return newchildren
668 return newchildren
668
669
669 def bootstrapcontinue(ui, repo, parentctx, rules, opts):
670 def bootstrapcontinue(ui, repo, parentctx, rules, opts):
670 action, currentnode = rules.pop(0)
671 action, currentnode = rules.pop(0)
671 ctx = repo[currentnode]
672 ctx = repo[currentnode]
672
673
673 newchildren = gatherchildren(repo, parentctx)
674 newchildren = gatherchildren(repo, parentctx)
674
675
675 # Commit dirty working directory if necessary
676 # Commit dirty working directory if necessary
676 new = None
677 new = None
677 m, a, r, d = repo.status()[:4]
678 m, a, r, d = repo.status()[:4]
678 if m or a or r or d:
679 if m or a or r or d:
679 # prepare the message for the commit to comes
680 # prepare the message for the commit to comes
680 if action in ('f', 'fold'):
681 if action in ('f', 'fold'):
681 message = 'fold-temp-revision %s' % currentnode
682 message = 'fold-temp-revision %s' % currentnode
682 else:
683 else:
683 message = ctx.description()
684 message = ctx.description()
684 editopt = action in ('e', 'edit', 'm', 'mess')
685 editopt = action in ('e', 'edit', 'm', 'mess')
685 editor = cmdutil.getcommiteditor(edit=editopt)
686 editor = cmdutil.getcommiteditor(edit=editopt)
686 commit = commitfuncfor(repo, ctx)
687 commit = commitfuncfor(repo, ctx)
687 new = commit(text=message, user=ctx.user(),
688 new = commit(text=message, user=ctx.user(),
688 date=ctx.date(), extra=ctx.extra(),
689 date=ctx.date(), extra=ctx.extra(),
689 editor=editor)
690 editor=editor)
690 if new is not None:
691 if new is not None:
691 newchildren.append(new)
692 newchildren.append(new)
692
693
693 replacements = []
694 replacements = []
694 # track replacements
695 # track replacements
695 if ctx.node() not in newchildren:
696 if ctx.node() not in newchildren:
696 # note: new children may be empty when the changeset is dropped.
697 # note: new children may be empty when the changeset is dropped.
697 # this happen e.g during conflicting pick where we revert content
698 # this happen e.g during conflicting pick where we revert content
698 # to parent.
699 # to parent.
699 replacements.append((ctx.node(), tuple(newchildren)))
700 replacements.append((ctx.node(), tuple(newchildren)))
700
701
701 if action in ('f', 'fold'):
702 if action in ('f', 'fold'):
702 if newchildren:
703 if newchildren:
703 # finalize fold operation if applicable
704 # finalize fold operation if applicable
704 if new is None:
705 if new is None:
705 new = newchildren[-1]
706 new = newchildren[-1]
706 else:
707 else:
707 newchildren.pop() # remove new from internal changes
708 newchildren.pop() # remove new from internal changes
708 parentctx, repl = finishfold(ui, repo, parentctx, ctx, new, opts,
709 parentctx, repl = finishfold(ui, repo, parentctx, ctx, new, opts,
709 newchildren)
710 newchildren)
710 replacements.extend(repl)
711 replacements.extend(repl)
711 else:
712 else:
712 # newchildren is empty if the fold did not result in any commit
713 # newchildren is empty if the fold did not result in any commit
713 # this happen when all folded change are discarded during the
714 # this happen when all folded change are discarded during the
714 # merge.
715 # merge.
715 replacements.append((ctx.node(), (parentctx.node(),)))
716 replacements.append((ctx.node(), (parentctx.node(),)))
716 elif newchildren:
717 elif newchildren:
717 # otherwise update "parentctx" before proceeding to further operation
718 # otherwise update "parentctx" before proceeding to further operation
718 parentctx = repo[newchildren[-1]]
719 parentctx = repo[newchildren[-1]]
719 return parentctx, replacements
720 return parentctx, replacements
720
721
721
722
722 def between(repo, old, new, keep):
723 def between(repo, old, new, keep):
723 """select and validate the set of revision to edit
724 """select and validate the set of revision to edit
724
725
725 When keep is false, the specified set can't have children."""
726 When keep is false, the specified set can't have children."""
726 ctxs = list(repo.set('%n::%n', old, new))
727 ctxs = list(repo.set('%n::%n', old, new))
727 if ctxs and not keep:
728 if ctxs and not keep:
728 if (not obsolete._enabled and
729 if (not obsolete._enabled and
729 repo.revs('(%ld::) - (%ld)', ctxs, ctxs)):
730 repo.revs('(%ld::) - (%ld)', ctxs, ctxs)):
730 raise util.Abort(_('cannot edit history that would orphan nodes'))
731 raise util.Abort(_('cannot edit history that would orphan nodes'))
731 if repo.revs('(%ld) and merge()', ctxs):
732 if repo.revs('(%ld) and merge()', ctxs):
732 raise util.Abort(_('cannot edit history that contains merges'))
733 raise util.Abort(_('cannot edit history that contains merges'))
733 root = ctxs[0] # list is already sorted by repo.set
734 root = ctxs[0] # list is already sorted by repo.set
734 if not root.phase():
735 if not root.phase():
735 raise util.Abort(_('cannot edit immutable changeset: %s') % root)
736 raise util.Abort(_('cannot edit immutable changeset: %s') % root)
736 return [c.node() for c in ctxs]
737 return [c.node() for c in ctxs]
737
738
738
739
739 def writestate(repo, parentnode, rules, keep, topmost, replacements):
740 def writestate(repo, parentnode, rules, keep, topmost, replacements):
740 fp = open(os.path.join(repo.path, 'histedit-state'), 'w')
741 fp = open(os.path.join(repo.path, 'histedit-state'), 'w')
741 pickle.dump((parentnode, rules, keep, topmost, replacements), fp)
742 pickle.dump((parentnode, rules, keep, topmost, replacements), fp)
742 fp.close()
743 fp.close()
743
744
744 def readstate(repo):
745 def readstate(repo):
745 """Returns a tuple of (parentnode, rules, keep, topmost, replacements).
746 """Returns a tuple of (parentnode, rules, keep, topmost, replacements).
746 """
747 """
747 fp = open(os.path.join(repo.path, 'histedit-state'))
748 fp = open(os.path.join(repo.path, 'histedit-state'))
748 return pickle.load(fp)
749 return pickle.load(fp)
749
750
750
751
751 def makedesc(c):
752 def makedesc(c):
752 """build a initial action line for a ctx `c`
753 """build a initial action line for a ctx `c`
753
754
754 line are in the form:
755 line are in the form:
755
756
756 pick <hash> <rev> <summary>
757 pick <hash> <rev> <summary>
757 """
758 """
758 summary = ''
759 summary = ''
759 if c.description():
760 if c.description():
760 summary = c.description().splitlines()[0]
761 summary = c.description().splitlines()[0]
761 line = 'pick %s %d %s' % (c, c.rev(), summary)
762 line = 'pick %s %d %s' % (c, c.rev(), summary)
762 return line[:80] # trim to 80 chars so it's not stupidly wide in my editor
763 return line[:80] # trim to 80 chars so it's not stupidly wide in my editor
763
764
764 def verifyrules(rules, repo, ctxs):
765 def verifyrules(rules, repo, ctxs):
765 """Verify that there exists exactly one edit rule per given changeset.
766 """Verify that there exists exactly one edit rule per given changeset.
766
767
767 Will abort if there are to many or too few rules, a malformed rule,
768 Will abort if there are to many or too few rules, a malformed rule,
768 or a rule on a changeset outside of the user-given range.
769 or a rule on a changeset outside of the user-given range.
769 """
770 """
770 parsed = []
771 parsed = []
771 expected = set(str(c) for c in ctxs)
772 expected = set(str(c) for c in ctxs)
772 seen = set()
773 seen = set()
773 for r in rules:
774 for r in rules:
774 if ' ' not in r:
775 if ' ' not in r:
775 raise util.Abort(_('malformed line "%s"') % r)
776 raise util.Abort(_('malformed line "%s"') % r)
776 action, rest = r.split(' ', 1)
777 action, rest = r.split(' ', 1)
777 ha = rest.strip().split(' ', 1)[0]
778 ha = rest.strip().split(' ', 1)[0]
778 try:
779 try:
779 ha = str(repo[ha]) # ensure its a short hash
780 ha = str(repo[ha]) # ensure its a short hash
780 except error.RepoError:
781 except error.RepoError:
781 raise util.Abort(_('unknown changeset %s listed') % ha)
782 raise util.Abort(_('unknown changeset %s listed') % ha)
782 if ha not in expected:
783 if ha not in expected:
783 raise util.Abort(
784 raise util.Abort(
784 _('may not use changesets other than the ones listed'))
785 _('may not use changesets other than the ones listed'))
785 if ha in seen:
786 if ha in seen:
786 raise util.Abort(_('duplicated command for changeset %s') % ha)
787 raise util.Abort(_('duplicated command for changeset %s') % ha)
787 seen.add(ha)
788 seen.add(ha)
788 if action not in actiontable:
789 if action not in actiontable:
789 raise util.Abort(_('unknown action "%s"') % action)
790 raise util.Abort(_('unknown action "%s"') % action)
790 parsed.append([action, ha])
791 parsed.append([action, ha])
791 missing = sorted(expected - seen) # sort to stabilize output
792 missing = sorted(expected - seen) # sort to stabilize output
792 if missing:
793 if missing:
793 raise util.Abort(_('missing rules for changeset %s') % missing[0],
794 raise util.Abort(_('missing rules for changeset %s') % missing[0],
794 hint=_('do you want to use the drop action?'))
795 hint=_('do you want to use the drop action?'))
795 return parsed
796 return parsed
796
797
797 def processreplacement(repo, replacements):
798 def processreplacement(repo, replacements):
798 """process the list of replacements to return
799 """process the list of replacements to return
799
800
800 1) the final mapping between original and created nodes
801 1) the final mapping between original and created nodes
801 2) the list of temporary node created by histedit
802 2) the list of temporary node created by histedit
802 3) the list of new commit created by histedit"""
803 3) the list of new commit created by histedit"""
803 allsuccs = set()
804 allsuccs = set()
804 replaced = set()
805 replaced = set()
805 fullmapping = {}
806 fullmapping = {}
806 # initialise basic set
807 # initialise basic set
807 # fullmapping record all operation recorded in replacement
808 # fullmapping record all operation recorded in replacement
808 for rep in replacements:
809 for rep in replacements:
809 allsuccs.update(rep[1])
810 allsuccs.update(rep[1])
810 replaced.add(rep[0])
811 replaced.add(rep[0])
811 fullmapping.setdefault(rep[0], set()).update(rep[1])
812 fullmapping.setdefault(rep[0], set()).update(rep[1])
812 new = allsuccs - replaced
813 new = allsuccs - replaced
813 tmpnodes = allsuccs & replaced
814 tmpnodes = allsuccs & replaced
814 # Reduce content fullmapping into direct relation between original nodes
815 # Reduce content fullmapping into direct relation between original nodes
815 # and final node created during history edition
816 # and final node created during history edition
816 # Dropped changeset are replaced by an empty list
817 # Dropped changeset are replaced by an empty list
817 toproceed = set(fullmapping)
818 toproceed = set(fullmapping)
818 final = {}
819 final = {}
819 while toproceed:
820 while toproceed:
820 for x in list(toproceed):
821 for x in list(toproceed):
821 succs = fullmapping[x]
822 succs = fullmapping[x]
822 for s in list(succs):
823 for s in list(succs):
823 if s in toproceed:
824 if s in toproceed:
824 # non final node with unknown closure
825 # non final node with unknown closure
825 # We can't process this now
826 # We can't process this now
826 break
827 break
827 elif s in final:
828 elif s in final:
828 # non final node, replace with closure
829 # non final node, replace with closure
829 succs.remove(s)
830 succs.remove(s)
830 succs.update(final[s])
831 succs.update(final[s])
831 else:
832 else:
832 final[x] = succs
833 final[x] = succs
833 toproceed.remove(x)
834 toproceed.remove(x)
834 # remove tmpnodes from final mapping
835 # remove tmpnodes from final mapping
835 for n in tmpnodes:
836 for n in tmpnodes:
836 del final[n]
837 del final[n]
837 # we expect all changes involved in final to exist in the repo
838 # we expect all changes involved in final to exist in the repo
838 # turn `final` into list (topologically sorted)
839 # turn `final` into list (topologically sorted)
839 nm = repo.changelog.nodemap
840 nm = repo.changelog.nodemap
840 for prec, succs in final.items():
841 for prec, succs in final.items():
841 final[prec] = sorted(succs, key=nm.get)
842 final[prec] = sorted(succs, key=nm.get)
842
843
843 # computed topmost element (necessary for bookmark)
844 # computed topmost element (necessary for bookmark)
844 if new:
845 if new:
845 newtopmost = sorted(new, key=repo.changelog.rev)[-1]
846 newtopmost = sorted(new, key=repo.changelog.rev)[-1]
846 elif not final:
847 elif not final:
847 # Nothing rewritten at all. we won't need `newtopmost`
848 # Nothing rewritten at all. we won't need `newtopmost`
848 # It is the same as `oldtopmost` and `processreplacement` know it
849 # It is the same as `oldtopmost` and `processreplacement` know it
849 newtopmost = None
850 newtopmost = None
850 else:
851 else:
851 # every body died. The newtopmost is the parent of the root.
852 # every body died. The newtopmost is the parent of the root.
852 newtopmost = repo[sorted(final, key=repo.changelog.rev)[0]].p1().node()
853 newtopmost = repo[sorted(final, key=repo.changelog.rev)[0]].p1().node()
853
854
854 return final, tmpnodes, new, newtopmost
855 return final, tmpnodes, new, newtopmost
855
856
856 def movebookmarks(ui, repo, mapping, oldtopmost, newtopmost):
857 def movebookmarks(ui, repo, mapping, oldtopmost, newtopmost):
857 """Move bookmark from old to newly created node"""
858 """Move bookmark from old to newly created node"""
858 if not mapping:
859 if not mapping:
859 # if nothing got rewritten there is not purpose for this function
860 # if nothing got rewritten there is not purpose for this function
860 return
861 return
861 moves = []
862 moves = []
862 for bk, old in sorted(repo._bookmarks.iteritems()):
863 for bk, old in sorted(repo._bookmarks.iteritems()):
863 if old == oldtopmost:
864 if old == oldtopmost:
864 # special case ensure bookmark stay on tip.
865 # special case ensure bookmark stay on tip.
865 #
866 #
866 # This is arguably a feature and we may only want that for the
867 # This is arguably a feature and we may only want that for the
867 # active bookmark. But the behavior is kept compatible with the old
868 # active bookmark. But the behavior is kept compatible with the old
868 # version for now.
869 # version for now.
869 moves.append((bk, newtopmost))
870 moves.append((bk, newtopmost))
870 continue
871 continue
871 base = old
872 base = old
872 new = mapping.get(base, None)
873 new = mapping.get(base, None)
873 if new is None:
874 if new is None:
874 continue
875 continue
875 while not new:
876 while not new:
876 # base is killed, trying with parent
877 # base is killed, trying with parent
877 base = repo[base].p1().node()
878 base = repo[base].p1().node()
878 new = mapping.get(base, (base,))
879 new = mapping.get(base, (base,))
879 # nothing to move
880 # nothing to move
880 moves.append((bk, new[-1]))
881 moves.append((bk, new[-1]))
881 if moves:
882 if moves:
882 marks = repo._bookmarks
883 marks = repo._bookmarks
883 for mark, new in moves:
884 for mark, new in moves:
884 old = marks[mark]
885 old = marks[mark]
885 ui.note(_('histedit: moving bookmarks %s from %s to %s\n')
886 ui.note(_('histedit: moving bookmarks %s from %s to %s\n')
886 % (mark, node.short(old), node.short(new)))
887 % (mark, node.short(old), node.short(new)))
887 marks[mark] = new
888 marks[mark] = new
888 marks.write()
889 marks.write()
889
890
890 def cleanupnode(ui, repo, name, nodes):
891 def cleanupnode(ui, repo, name, nodes):
891 """strip a group of nodes from the repository
892 """strip a group of nodes from the repository
892
893
893 The set of node to strip may contains unknown nodes."""
894 The set of node to strip may contains unknown nodes."""
894 ui.debug('should strip %s nodes %s\n' %
895 ui.debug('should strip %s nodes %s\n' %
895 (name, ', '.join([node.short(n) for n in nodes])))
896 (name, ', '.join([node.short(n) for n in nodes])))
896 lock = None
897 lock = None
897 try:
898 try:
898 lock = repo.lock()
899 lock = repo.lock()
899 # Find all node that need to be stripped
900 # Find all node that need to be stripped
900 # (we hg %lr instead of %ln to silently ignore unknown item
901 # (we hg %lr instead of %ln to silently ignore unknown item
901 nm = repo.changelog.nodemap
902 nm = repo.changelog.nodemap
902 nodes = [n for n in nodes if n in nm]
903 nodes = [n for n in nodes if n in nm]
903 roots = [c.node() for c in repo.set("roots(%ln)", nodes)]
904 roots = [c.node() for c in repo.set("roots(%ln)", nodes)]
904 for c in roots:
905 for c in roots:
905 # We should process node in reverse order to strip tip most first.
906 # We should process node in reverse order to strip tip most first.
906 # but this trigger a bug in changegroup hook.
907 # but this trigger a bug in changegroup hook.
907 # This would reduce bundle overhead
908 # This would reduce bundle overhead
908 repair.strip(ui, repo, c)
909 repair.strip(ui, repo, c)
909 finally:
910 finally:
910 release(lock)
911 release(lock)
911
912
912 def summaryhook(ui, repo):
913 def summaryhook(ui, repo):
913 if not os.path.exists(repo.join('histedit-state')):
914 if not os.path.exists(repo.join('histedit-state')):
914 return
915 return
915 (parentctxnode, rules, keep, topmost, replacements) = readstate(repo)
916 (parentctxnode, rules, keep, topmost, replacements) = readstate(repo)
916 if rules:
917 if rules:
917 # i18n: column positioning for "hg summary"
918 # i18n: column positioning for "hg summary"
918 ui.write(_('hist: %s (histedit --continue)\n') %
919 ui.write(_('hist: %s (histedit --continue)\n') %
919 (ui.label(_('%d remaining'), 'histedit.remaining') %
920 (ui.label(_('%d remaining'), 'histedit.remaining') %
920 len(rules)))
921 len(rules)))
921
922
922 def extsetup(ui):
923 def extsetup(ui):
923 cmdutil.summaryhooks.add('histedit', summaryhook)
924 cmdutil.summaryhooks.add('histedit', summaryhook)
924 cmdutil.unfinishedstates.append(
925 cmdutil.unfinishedstates.append(
925 ['histedit-state', False, True, _('histedit in progress'),
926 ['histedit-state', False, True, _('histedit in progress'),
926 _("use 'hg histedit --continue' or 'hg histedit --abort'")])
927 _("use 'hg histedit --continue' or 'hg histedit --abort'")])
@@ -1,571 +1,572
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''High-level command function for lfconvert, plus the cmdtable.'''
9 '''High-level command function for lfconvert, plus the cmdtable.'''
10
10
11 import os, errno
11 import os, errno
12 import shutil
12 import shutil
13
13
14 from mercurial import util, match as match_, hg, node, context, error, \
14 from mercurial import util, match as match_, hg, node, context, error, \
15 cmdutil, scmutil, commands
15 cmdutil, scmutil, commands
16 from mercurial.i18n import _
16 from mercurial.i18n import _
17 from mercurial.lock import release
17 from mercurial.lock import release
18
18
19 import lfutil
19 import lfutil
20 import basestore
20 import basestore
21
21
22 # -- Commands ----------------------------------------------------------
22 # -- Commands ----------------------------------------------------------
23
23
24 cmdtable = {}
24 cmdtable = {}
25 command = cmdutil.command(cmdtable)
25 command = cmdutil.command(cmdtable)
26
26
27 commands.inferrepo += " lfconvert"
27 commands.inferrepo += " lfconvert"
28
28
29 @command('lfconvert',
29 @command('lfconvert',
30 [('s', 'size', '',
30 [('s', 'size', '',
31 _('minimum size (MB) for files to be converted as largefiles'), 'SIZE'),
31 _('minimum size (MB) for files to be converted as largefiles'), 'SIZE'),
32 ('', 'to-normal', False,
32 ('', 'to-normal', False,
33 _('convert from a largefiles repo to a normal repo')),
33 _('convert from a largefiles repo to a normal repo')),
34 ],
34 ],
35 _('hg lfconvert SOURCE DEST [FILE ...]'))
35 _('hg lfconvert SOURCE DEST [FILE ...]'))
36 def lfconvert(ui, src, dest, *pats, **opts):
36 def lfconvert(ui, src, dest, *pats, **opts):
37 '''convert a normal repository to a largefiles repository
37 '''convert a normal repository to a largefiles repository
38
38
39 Convert repository SOURCE to a new repository DEST, identical to
39 Convert repository SOURCE to a new repository DEST, identical to
40 SOURCE except that certain files will be converted as largefiles:
40 SOURCE except that certain files will be converted as largefiles:
41 specifically, any file that matches any PATTERN *or* whose size is
41 specifically, any file that matches any PATTERN *or* whose size is
42 above the minimum size threshold is converted as a largefile. The
42 above the minimum size threshold is converted as a largefile. The
43 size used to determine whether or not to track a file as a
43 size used to determine whether or not to track a file as a
44 largefile is the size of the first version of the file. The
44 largefile is the size of the first version of the file. The
45 minimum size can be specified either with --size or in
45 minimum size can be specified either with --size or in
46 configuration as ``largefiles.size``.
46 configuration as ``largefiles.size``.
47
47
48 After running this command you will need to make sure that
48 After running this command you will need to make sure that
49 largefiles is enabled anywhere you intend to push the new
49 largefiles is enabled anywhere you intend to push the new
50 repository.
50 repository.
51
51
52 Use --to-normal to convert largefiles back to normal files; after
52 Use --to-normal to convert largefiles back to normal files; after
53 this, the DEST repository can be used without largefiles at all.'''
53 this, the DEST repository can be used without largefiles at all.'''
54
54
55 if opts['to_normal']:
55 if opts['to_normal']:
56 tolfile = False
56 tolfile = False
57 else:
57 else:
58 tolfile = True
58 tolfile = True
59 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
59 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
60
60
61 if not hg.islocal(src):
61 if not hg.islocal(src):
62 raise util.Abort(_('%s is not a local Mercurial repo') % src)
62 raise util.Abort(_('%s is not a local Mercurial repo') % src)
63 if not hg.islocal(dest):
63 if not hg.islocal(dest):
64 raise util.Abort(_('%s is not a local Mercurial repo') % dest)
64 raise util.Abort(_('%s is not a local Mercurial repo') % dest)
65
65
66 rsrc = hg.repository(ui, src)
66 rsrc = hg.repository(ui, src)
67 ui.status(_('initializing destination %s\n') % dest)
67 ui.status(_('initializing destination %s\n') % dest)
68 rdst = hg.repository(ui, dest, create=True)
68 rdst = hg.repository(ui, dest, create=True)
69
69
70 success = False
70 success = False
71 dstwlock = dstlock = None
71 dstwlock = dstlock = None
72 try:
72 try:
73 # Lock destination to prevent modification while it is converted to.
73 # Lock destination to prevent modification while it is converted to.
74 # Don't need to lock src because we are just reading from its history
74 # Don't need to lock src because we are just reading from its history
75 # which can't change.
75 # which can't change.
76 dstwlock = rdst.wlock()
76 dstwlock = rdst.wlock()
77 dstlock = rdst.lock()
77 dstlock = rdst.lock()
78
78
79 # Get a list of all changesets in the source. The easy way to do this
79 # Get a list of all changesets in the source. The easy way to do this
80 # is to simply walk the changelog, using changelog.nodesbetween().
80 # is to simply walk the changelog, using changelog.nodesbetween().
81 # Take a look at mercurial/revlog.py:639 for more details.
81 # Take a look at mercurial/revlog.py:639 for more details.
82 # Use a generator instead of a list to decrease memory usage
82 # Use a generator instead of a list to decrease memory usage
83 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
83 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
84 rsrc.heads())[0])
84 rsrc.heads())[0])
85 revmap = {node.nullid: node.nullid}
85 revmap = {node.nullid: node.nullid}
86 if tolfile:
86 if tolfile:
87 lfiles = set()
87 lfiles = set()
88 normalfiles = set()
88 normalfiles = set()
89 if not pats:
89 if not pats:
90 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
90 pats = ui.configlist(lfutil.longname, 'patterns', default=[])
91 if pats:
91 if pats:
92 matcher = match_.match(rsrc.root, '', list(pats))
92 matcher = match_.match(rsrc.root, '', list(pats))
93 else:
93 else:
94 matcher = None
94 matcher = None
95
95
96 lfiletohash = {}
96 lfiletohash = {}
97 for ctx in ctxs:
97 for ctx in ctxs:
98 ui.progress(_('converting revisions'), ctx.rev(),
98 ui.progress(_('converting revisions'), ctx.rev(),
99 unit=_('revision'), total=rsrc['tip'].rev())
99 unit=_('revision'), total=rsrc['tip'].rev())
100 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
100 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
101 lfiles, normalfiles, matcher, size, lfiletohash)
101 lfiles, normalfiles, matcher, size, lfiletohash)
102 ui.progress(_('converting revisions'), None)
102 ui.progress(_('converting revisions'), None)
103
103
104 if os.path.exists(rdst.wjoin(lfutil.shortname)):
104 if os.path.exists(rdst.wjoin(lfutil.shortname)):
105 shutil.rmtree(rdst.wjoin(lfutil.shortname))
105 shutil.rmtree(rdst.wjoin(lfutil.shortname))
106
106
107 for f in lfiletohash.keys():
107 for f in lfiletohash.keys():
108 if os.path.isfile(rdst.wjoin(f)):
108 if os.path.isfile(rdst.wjoin(f)):
109 os.unlink(rdst.wjoin(f))
109 os.unlink(rdst.wjoin(f))
110 try:
110 try:
111 os.removedirs(os.path.dirname(rdst.wjoin(f)))
111 os.removedirs(os.path.dirname(rdst.wjoin(f)))
112 except OSError:
112 except OSError:
113 pass
113 pass
114
114
115 # If there were any files converted to largefiles, add largefiles
115 # If there were any files converted to largefiles, add largefiles
116 # to the destination repository's requirements.
116 # to the destination repository's requirements.
117 if lfiles:
117 if lfiles:
118 rdst.requirements.add('largefiles')
118 rdst.requirements.add('largefiles')
119 rdst._writerequirements()
119 rdst._writerequirements()
120 else:
120 else:
121 for ctx in ctxs:
121 for ctx in ctxs:
122 ui.progress(_('converting revisions'), ctx.rev(),
122 ui.progress(_('converting revisions'), ctx.rev(),
123 unit=_('revision'), total=rsrc['tip'].rev())
123 unit=_('revision'), total=rsrc['tip'].rev())
124 _addchangeset(ui, rsrc, rdst, ctx, revmap)
124 _addchangeset(ui, rsrc, rdst, ctx, revmap)
125
125
126 ui.progress(_('converting revisions'), None)
126 ui.progress(_('converting revisions'), None)
127 success = True
127 success = True
128 finally:
128 finally:
129 rdst.dirstate.clear()
129 rdst.dirstate.clear()
130 release(dstlock, dstwlock)
130 release(dstlock, dstwlock)
131 if not success:
131 if not success:
132 # we failed, remove the new directory
132 # we failed, remove the new directory
133 shutil.rmtree(rdst.root)
133 shutil.rmtree(rdst.root)
134
134
135 def _addchangeset(ui, rsrc, rdst, ctx, revmap):
135 def _addchangeset(ui, rsrc, rdst, ctx, revmap):
136 # Convert src parents to dst parents
136 # Convert src parents to dst parents
137 parents = _convertparents(ctx, revmap)
137 parents = _convertparents(ctx, revmap)
138
138
139 # Generate list of changed files
139 # Generate list of changed files
140 files = _getchangedfiles(ctx, parents)
140 files = _getchangedfiles(ctx, parents)
141
141
142 def getfilectx(repo, memctx, f):
142 def getfilectx(repo, memctx, f):
143 if lfutil.standin(f) in files:
143 if lfutil.standin(f) in files:
144 # if the file isn't in the manifest then it was removed
144 # if the file isn't in the manifest then it was removed
145 # or renamed, raise IOError to indicate this
145 # or renamed, raise IOError to indicate this
146 try:
146 try:
147 fctx = ctx.filectx(lfutil.standin(f))
147 fctx = ctx.filectx(lfutil.standin(f))
148 except error.LookupError:
148 except error.LookupError:
149 raise IOError
149 raise IOError
150 renamed = fctx.renamed()
150 renamed = fctx.renamed()
151 if renamed:
151 if renamed:
152 renamed = lfutil.splitstandin(renamed[0])
152 renamed = lfutil.splitstandin(renamed[0])
153
153
154 hash = fctx.data().strip()
154 hash = fctx.data().strip()
155 path = lfutil.findfile(rsrc, hash)
155 path = lfutil.findfile(rsrc, hash)
156
156
157 # If one file is missing, likely all files from this rev are
157 # If one file is missing, likely all files from this rev are
158 if path is None:
158 if path is None:
159 cachelfiles(ui, rsrc, ctx.node())
159 cachelfiles(ui, rsrc, ctx.node())
160 path = lfutil.findfile(rsrc, hash)
160 path = lfutil.findfile(rsrc, hash)
161
161
162 if path is None:
162 if path is None:
163 raise util.Abort(
163 raise util.Abort(
164 _("missing largefile \'%s\' from revision %s")
164 _("missing largefile \'%s\' from revision %s")
165 % (f, node.hex(ctx.node())))
165 % (f, node.hex(ctx.node())))
166
166
167 data = ''
167 data = ''
168 fd = None
168 fd = None
169 try:
169 try:
170 fd = open(path, 'rb')
170 fd = open(path, 'rb')
171 data = fd.read()
171 data = fd.read()
172 finally:
172 finally:
173 if fd:
173 if fd:
174 fd.close()
174 fd.close()
175 return context.memfilectx(f, data, 'l' in fctx.flags(),
175 return context.memfilectx(repo, f, data, 'l' in fctx.flags(),
176 'x' in fctx.flags(), renamed)
176 'x' in fctx.flags(), renamed)
177 else:
177 else:
178 return _getnormalcontext(repo.ui, ctx, f, revmap)
178 return _getnormalcontext(repo, ctx, f, revmap)
179
179
180 dstfiles = []
180 dstfiles = []
181 for file in files:
181 for file in files:
182 if lfutil.isstandin(file):
182 if lfutil.isstandin(file):
183 dstfiles.append(lfutil.splitstandin(file))
183 dstfiles.append(lfutil.splitstandin(file))
184 else:
184 else:
185 dstfiles.append(file)
185 dstfiles.append(file)
186 # Commit
186 # Commit
187 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
187 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
188
188
189 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
189 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
190 matcher, size, lfiletohash):
190 matcher, size, lfiletohash):
191 # Convert src parents to dst parents
191 # Convert src parents to dst parents
192 parents = _convertparents(ctx, revmap)
192 parents = _convertparents(ctx, revmap)
193
193
194 # Generate list of changed files
194 # Generate list of changed files
195 files = _getchangedfiles(ctx, parents)
195 files = _getchangedfiles(ctx, parents)
196
196
197 dstfiles = []
197 dstfiles = []
198 for f in files:
198 for f in files:
199 if f not in lfiles and f not in normalfiles:
199 if f not in lfiles and f not in normalfiles:
200 islfile = _islfile(f, ctx, matcher, size)
200 islfile = _islfile(f, ctx, matcher, size)
201 # If this file was renamed or copied then copy
201 # If this file was renamed or copied then copy
202 # the largefile-ness of its predecessor
202 # the largefile-ness of its predecessor
203 if f in ctx.manifest():
203 if f in ctx.manifest():
204 fctx = ctx.filectx(f)
204 fctx = ctx.filectx(f)
205 renamed = fctx.renamed()
205 renamed = fctx.renamed()
206 renamedlfile = renamed and renamed[0] in lfiles
206 renamedlfile = renamed and renamed[0] in lfiles
207 islfile |= renamedlfile
207 islfile |= renamedlfile
208 if 'l' in fctx.flags():
208 if 'l' in fctx.flags():
209 if renamedlfile:
209 if renamedlfile:
210 raise util.Abort(
210 raise util.Abort(
211 _('renamed/copied largefile %s becomes symlink')
211 _('renamed/copied largefile %s becomes symlink')
212 % f)
212 % f)
213 islfile = False
213 islfile = False
214 if islfile:
214 if islfile:
215 lfiles.add(f)
215 lfiles.add(f)
216 else:
216 else:
217 normalfiles.add(f)
217 normalfiles.add(f)
218
218
219 if f in lfiles:
219 if f in lfiles:
220 dstfiles.append(lfutil.standin(f))
220 dstfiles.append(lfutil.standin(f))
221 # largefile in manifest if it has not been removed/renamed
221 # largefile in manifest if it has not been removed/renamed
222 if f in ctx.manifest():
222 if f in ctx.manifest():
223 fctx = ctx.filectx(f)
223 fctx = ctx.filectx(f)
224 if 'l' in fctx.flags():
224 if 'l' in fctx.flags():
225 renamed = fctx.renamed()
225 renamed = fctx.renamed()
226 if renamed and renamed[0] in lfiles:
226 if renamed and renamed[0] in lfiles:
227 raise util.Abort(_('largefile %s becomes symlink') % f)
227 raise util.Abort(_('largefile %s becomes symlink') % f)
228
228
229 # largefile was modified, update standins
229 # largefile was modified, update standins
230 m = util.sha1('')
230 m = util.sha1('')
231 m.update(ctx[f].data())
231 m.update(ctx[f].data())
232 hash = m.hexdigest()
232 hash = m.hexdigest()
233 if f not in lfiletohash or lfiletohash[f] != hash:
233 if f not in lfiletohash or lfiletohash[f] != hash:
234 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
234 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
235 executable = 'x' in ctx[f].flags()
235 executable = 'x' in ctx[f].flags()
236 lfutil.writestandin(rdst, lfutil.standin(f), hash,
236 lfutil.writestandin(rdst, lfutil.standin(f), hash,
237 executable)
237 executable)
238 lfiletohash[f] = hash
238 lfiletohash[f] = hash
239 else:
239 else:
240 # normal file
240 # normal file
241 dstfiles.append(f)
241 dstfiles.append(f)
242
242
243 def getfilectx(repo, memctx, f):
243 def getfilectx(repo, memctx, f):
244 if lfutil.isstandin(f):
244 if lfutil.isstandin(f):
245 # if the file isn't in the manifest then it was removed
245 # if the file isn't in the manifest then it was removed
246 # or renamed, raise IOError to indicate this
246 # or renamed, raise IOError to indicate this
247 srcfname = lfutil.splitstandin(f)
247 srcfname = lfutil.splitstandin(f)
248 try:
248 try:
249 fctx = ctx.filectx(srcfname)
249 fctx = ctx.filectx(srcfname)
250 except error.LookupError:
250 except error.LookupError:
251 raise IOError
251 raise IOError
252 renamed = fctx.renamed()
252 renamed = fctx.renamed()
253 if renamed:
253 if renamed:
254 # standin is always a largefile because largefile-ness
254 # standin is always a largefile because largefile-ness
255 # doesn't change after rename or copy
255 # doesn't change after rename or copy
256 renamed = lfutil.standin(renamed[0])
256 renamed = lfutil.standin(renamed[0])
257
257
258 return context.memfilectx(f, lfiletohash[srcfname] + '\n', 'l' in
258 return context.memfilectx(repo, f, lfiletohash[srcfname] + '\n',
259 fctx.flags(), 'x' in fctx.flags(), renamed)
259 'l' in fctx.flags(), 'x' in fctx.flags(),
260 renamed)
260 else:
261 else:
261 return _getnormalcontext(repo.ui, ctx, f, revmap)
262 return _getnormalcontext(repo, ctx, f, revmap)
262
263
263 # Commit
264 # Commit
264 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
265 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
265
266
266 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
267 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
267 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
268 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
268 getfilectx, ctx.user(), ctx.date(), ctx.extra())
269 getfilectx, ctx.user(), ctx.date(), ctx.extra())
269 ret = rdst.commitctx(mctx)
270 ret = rdst.commitctx(mctx)
270 rdst.setparents(ret)
271 rdst.setparents(ret)
271 revmap[ctx.node()] = rdst.changelog.tip()
272 revmap[ctx.node()] = rdst.changelog.tip()
272
273
273 # Generate list of changed files
274 # Generate list of changed files
274 def _getchangedfiles(ctx, parents):
275 def _getchangedfiles(ctx, parents):
275 files = set(ctx.files())
276 files = set(ctx.files())
276 if node.nullid not in parents:
277 if node.nullid not in parents:
277 mc = ctx.manifest()
278 mc = ctx.manifest()
278 mp1 = ctx.parents()[0].manifest()
279 mp1 = ctx.parents()[0].manifest()
279 mp2 = ctx.parents()[1].manifest()
280 mp2 = ctx.parents()[1].manifest()
280 files |= (set(mp1) | set(mp2)) - set(mc)
281 files |= (set(mp1) | set(mp2)) - set(mc)
281 for f in mc:
282 for f in mc:
282 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
283 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
283 files.add(f)
284 files.add(f)
284 return files
285 return files
285
286
286 # Convert src parents to dst parents
287 # Convert src parents to dst parents
287 def _convertparents(ctx, revmap):
288 def _convertparents(ctx, revmap):
288 parents = []
289 parents = []
289 for p in ctx.parents():
290 for p in ctx.parents():
290 parents.append(revmap[p.node()])
291 parents.append(revmap[p.node()])
291 while len(parents) < 2:
292 while len(parents) < 2:
292 parents.append(node.nullid)
293 parents.append(node.nullid)
293 return parents
294 return parents
294
295
295 # Get memfilectx for a normal file
296 # Get memfilectx for a normal file
296 def _getnormalcontext(ui, ctx, f, revmap):
297 def _getnormalcontext(repo, ctx, f, revmap):
297 try:
298 try:
298 fctx = ctx.filectx(f)
299 fctx = ctx.filectx(f)
299 except error.LookupError:
300 except error.LookupError:
300 raise IOError
301 raise IOError
301 renamed = fctx.renamed()
302 renamed = fctx.renamed()
302 if renamed:
303 if renamed:
303 renamed = renamed[0]
304 renamed = renamed[0]
304
305
305 data = fctx.data()
306 data = fctx.data()
306 if f == '.hgtags':
307 if f == '.hgtags':
307 data = _converttags (ui, revmap, data)
308 data = _converttags (repo.ui, revmap, data)
308 return context.memfilectx(f, data, 'l' in fctx.flags(),
309 return context.memfilectx(repo, f, data, 'l' in fctx.flags(),
309 'x' in fctx.flags(), renamed)
310 'x' in fctx.flags(), renamed)
310
311
311 # Remap tag data using a revision map
312 # Remap tag data using a revision map
312 def _converttags(ui, revmap, data):
313 def _converttags(ui, revmap, data):
313 newdata = []
314 newdata = []
314 for line in data.splitlines():
315 for line in data.splitlines():
315 try:
316 try:
316 id, name = line.split(' ', 1)
317 id, name = line.split(' ', 1)
317 except ValueError:
318 except ValueError:
318 ui.warn(_('skipping incorrectly formatted tag %s\n')
319 ui.warn(_('skipping incorrectly formatted tag %s\n')
319 % line)
320 % line)
320 continue
321 continue
321 try:
322 try:
322 newid = node.bin(id)
323 newid = node.bin(id)
323 except TypeError:
324 except TypeError:
324 ui.warn(_('skipping incorrectly formatted id %s\n')
325 ui.warn(_('skipping incorrectly formatted id %s\n')
325 % id)
326 % id)
326 continue
327 continue
327 try:
328 try:
328 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
329 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
329 name))
330 name))
330 except KeyError:
331 except KeyError:
331 ui.warn(_('no mapping for id %s\n') % id)
332 ui.warn(_('no mapping for id %s\n') % id)
332 continue
333 continue
333 return ''.join(newdata)
334 return ''.join(newdata)
334
335
335 def _islfile(file, ctx, matcher, size):
336 def _islfile(file, ctx, matcher, size):
336 '''Return true if file should be considered a largefile, i.e.
337 '''Return true if file should be considered a largefile, i.e.
337 matcher matches it or it is larger than size.'''
338 matcher matches it or it is larger than size.'''
338 # never store special .hg* files as largefiles
339 # never store special .hg* files as largefiles
339 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
340 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
340 return False
341 return False
341 if matcher and matcher(file):
342 if matcher and matcher(file):
342 return True
343 return True
343 try:
344 try:
344 return ctx.filectx(file).size() >= size * 1024 * 1024
345 return ctx.filectx(file).size() >= size * 1024 * 1024
345 except error.LookupError:
346 except error.LookupError:
346 return False
347 return False
347
348
348 def uploadlfiles(ui, rsrc, rdst, files):
349 def uploadlfiles(ui, rsrc, rdst, files):
349 '''upload largefiles to the central store'''
350 '''upload largefiles to the central store'''
350
351
351 if not files:
352 if not files:
352 return
353 return
353
354
354 store = basestore._openstore(rsrc, rdst, put=True)
355 store = basestore._openstore(rsrc, rdst, put=True)
355
356
356 at = 0
357 at = 0
357 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
358 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
358 retval = store.exists(files)
359 retval = store.exists(files)
359 files = filter(lambda h: not retval[h], files)
360 files = filter(lambda h: not retval[h], files)
360 ui.debug("%d largefiles need to be uploaded\n" % len(files))
361 ui.debug("%d largefiles need to be uploaded\n" % len(files))
361
362
362 for hash in files:
363 for hash in files:
363 ui.progress(_('uploading largefiles'), at, unit='largefile',
364 ui.progress(_('uploading largefiles'), at, unit='largefile',
364 total=len(files))
365 total=len(files))
365 source = lfutil.findfile(rsrc, hash)
366 source = lfutil.findfile(rsrc, hash)
366 if not source:
367 if not source:
367 raise util.Abort(_('largefile %s missing from store'
368 raise util.Abort(_('largefile %s missing from store'
368 ' (needs to be uploaded)') % hash)
369 ' (needs to be uploaded)') % hash)
369 # XXX check for errors here
370 # XXX check for errors here
370 store.put(source, hash)
371 store.put(source, hash)
371 at += 1
372 at += 1
372 ui.progress(_('uploading largefiles'), None)
373 ui.progress(_('uploading largefiles'), None)
373
374
374 def verifylfiles(ui, repo, all=False, contents=False):
375 def verifylfiles(ui, repo, all=False, contents=False):
375 '''Verify that every largefile revision in the current changeset
376 '''Verify that every largefile revision in the current changeset
376 exists in the central store. With --contents, also verify that
377 exists in the central store. With --contents, also verify that
377 the contents of each local largefile file revision are correct (SHA-1 hash
378 the contents of each local largefile file revision are correct (SHA-1 hash
378 matches the revision ID). With --all, check every changeset in
379 matches the revision ID). With --all, check every changeset in
379 this repository.'''
380 this repository.'''
380 if all:
381 if all:
381 # Pass a list to the function rather than an iterator because we know a
382 # Pass a list to the function rather than an iterator because we know a
382 # list will work.
383 # list will work.
383 revs = range(len(repo))
384 revs = range(len(repo))
384 else:
385 else:
385 revs = ['.']
386 revs = ['.']
386
387
387 store = basestore._openstore(repo)
388 store = basestore._openstore(repo)
388 return store.verify(revs, contents=contents)
389 return store.verify(revs, contents=contents)
389
390
390 def cachelfiles(ui, repo, node, filelist=None):
391 def cachelfiles(ui, repo, node, filelist=None):
391 '''cachelfiles ensures that all largefiles needed by the specified revision
392 '''cachelfiles ensures that all largefiles needed by the specified revision
392 are present in the repository's largefile cache.
393 are present in the repository's largefile cache.
393
394
394 returns a tuple (cached, missing). cached is the list of files downloaded
395 returns a tuple (cached, missing). cached is the list of files downloaded
395 by this operation; missing is the list of files that were needed but could
396 by this operation; missing is the list of files that were needed but could
396 not be found.'''
397 not be found.'''
397 lfiles = lfutil.listlfiles(repo, node)
398 lfiles = lfutil.listlfiles(repo, node)
398 if filelist:
399 if filelist:
399 lfiles = set(lfiles) & set(filelist)
400 lfiles = set(lfiles) & set(filelist)
400 toget = []
401 toget = []
401
402
402 for lfile in lfiles:
403 for lfile in lfiles:
403 try:
404 try:
404 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
405 expectedhash = repo[node][lfutil.standin(lfile)].data().strip()
405 except IOError, err:
406 except IOError, err:
406 if err.errno == errno.ENOENT:
407 if err.errno == errno.ENOENT:
407 continue # node must be None and standin wasn't found in wctx
408 continue # node must be None and standin wasn't found in wctx
408 raise
409 raise
409 if not lfutil.findfile(repo, expectedhash):
410 if not lfutil.findfile(repo, expectedhash):
410 toget.append((lfile, expectedhash))
411 toget.append((lfile, expectedhash))
411
412
412 if toget:
413 if toget:
413 store = basestore._openstore(repo)
414 store = basestore._openstore(repo)
414 ret = store.get(toget)
415 ret = store.get(toget)
415 return ret
416 return ret
416
417
417 return ([], [])
418 return ([], [])
418
419
419 def downloadlfiles(ui, repo, rev=None):
420 def downloadlfiles(ui, repo, rev=None):
420 matchfn = scmutil.match(repo[None],
421 matchfn = scmutil.match(repo[None],
421 [repo.wjoin(lfutil.shortname)], {})
422 [repo.wjoin(lfutil.shortname)], {})
422 def prepare(ctx, fns):
423 def prepare(ctx, fns):
423 pass
424 pass
424 totalsuccess = 0
425 totalsuccess = 0
425 totalmissing = 0
426 totalmissing = 0
426 if rev != []: # walkchangerevs on empty list would return all revs
427 if rev != []: # walkchangerevs on empty list would return all revs
427 for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
428 for ctx in cmdutil.walkchangerevs(repo, matchfn, {'rev' : rev},
428 prepare):
429 prepare):
429 success, missing = cachelfiles(ui, repo, ctx.node())
430 success, missing = cachelfiles(ui, repo, ctx.node())
430 totalsuccess += len(success)
431 totalsuccess += len(success)
431 totalmissing += len(missing)
432 totalmissing += len(missing)
432 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
433 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
433 if totalmissing > 0:
434 if totalmissing > 0:
434 ui.status(_("%d largefiles failed to download\n") % totalmissing)
435 ui.status(_("%d largefiles failed to download\n") % totalmissing)
435 return totalsuccess, totalmissing
436 return totalsuccess, totalmissing
436
437
437 def updatelfiles(ui, repo, filelist=None, printmessage=True):
438 def updatelfiles(ui, repo, filelist=None, printmessage=True):
438 wlock = repo.wlock()
439 wlock = repo.wlock()
439 try:
440 try:
440 lfdirstate = lfutil.openlfdirstate(ui, repo)
441 lfdirstate = lfutil.openlfdirstate(ui, repo)
441 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
442 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
442
443
443 if filelist is not None:
444 if filelist is not None:
444 lfiles = [f for f in lfiles if f in filelist]
445 lfiles = [f for f in lfiles if f in filelist]
445
446
446 update = {}
447 update = {}
447 updated, removed = 0, 0
448 updated, removed = 0, 0
448 for lfile in lfiles:
449 for lfile in lfiles:
449 abslfile = repo.wjoin(lfile)
450 abslfile = repo.wjoin(lfile)
450 absstandin = repo.wjoin(lfutil.standin(lfile))
451 absstandin = repo.wjoin(lfutil.standin(lfile))
451 if os.path.exists(absstandin):
452 if os.path.exists(absstandin):
452 if (os.path.exists(absstandin + '.orig') and
453 if (os.path.exists(absstandin + '.orig') and
453 os.path.exists(abslfile)):
454 os.path.exists(abslfile)):
454 shutil.copyfile(abslfile, abslfile + '.orig')
455 shutil.copyfile(abslfile, abslfile + '.orig')
455 util.unlinkpath(absstandin + '.orig')
456 util.unlinkpath(absstandin + '.orig')
456 expecthash = lfutil.readstandin(repo, lfile)
457 expecthash = lfutil.readstandin(repo, lfile)
457 if (expecthash != '' and
458 if (expecthash != '' and
458 (not os.path.exists(abslfile) or
459 (not os.path.exists(abslfile) or
459 expecthash != lfutil.hashfile(abslfile))):
460 expecthash != lfutil.hashfile(abslfile))):
460 if lfile not in repo[None]: # not switched to normal file
461 if lfile not in repo[None]: # not switched to normal file
461 util.unlinkpath(abslfile, ignoremissing=True)
462 util.unlinkpath(abslfile, ignoremissing=True)
462 # use normallookup() to allocate entry in largefiles
463 # use normallookup() to allocate entry in largefiles
463 # dirstate, because lack of it misleads
464 # dirstate, because lack of it misleads
464 # lfilesrepo.status() into recognition that such cache
465 # lfilesrepo.status() into recognition that such cache
465 # missing files are REMOVED.
466 # missing files are REMOVED.
466 lfdirstate.normallookup(lfile)
467 lfdirstate.normallookup(lfile)
467 update[lfile] = expecthash
468 update[lfile] = expecthash
468 else:
469 else:
469 # Remove lfiles for which the standin is deleted, unless the
470 # Remove lfiles for which the standin is deleted, unless the
470 # lfile is added to the repository again. This happens when a
471 # lfile is added to the repository again. This happens when a
471 # largefile is converted back to a normal file: the standin
472 # largefile is converted back to a normal file: the standin
472 # disappears, but a new (normal) file appears as the lfile.
473 # disappears, but a new (normal) file appears as the lfile.
473 if (os.path.exists(abslfile) and
474 if (os.path.exists(abslfile) and
474 repo.dirstate.normalize(lfile) not in repo[None]):
475 repo.dirstate.normalize(lfile) not in repo[None]):
475 util.unlinkpath(abslfile)
476 util.unlinkpath(abslfile)
476 removed += 1
477 removed += 1
477
478
478 # largefile processing might be slow and be interrupted - be prepared
479 # largefile processing might be slow and be interrupted - be prepared
479 lfdirstate.write()
480 lfdirstate.write()
480
481
481 if lfiles:
482 if lfiles:
482 if printmessage:
483 if printmessage:
483 ui.status(_('getting changed largefiles\n'))
484 ui.status(_('getting changed largefiles\n'))
484 cachelfiles(ui, repo, None, lfiles)
485 cachelfiles(ui, repo, None, lfiles)
485
486
486 for lfile in lfiles:
487 for lfile in lfiles:
487 update1 = 0
488 update1 = 0
488
489
489 expecthash = update.get(lfile)
490 expecthash = update.get(lfile)
490 if expecthash:
491 if expecthash:
491 if not lfutil.copyfromcache(repo, expecthash, lfile):
492 if not lfutil.copyfromcache(repo, expecthash, lfile):
492 # failed ... but already removed and set to normallookup
493 # failed ... but already removed and set to normallookup
493 continue
494 continue
494 # Synchronize largefile dirstate to the last modified
495 # Synchronize largefile dirstate to the last modified
495 # time of the file
496 # time of the file
496 lfdirstate.normal(lfile)
497 lfdirstate.normal(lfile)
497 update1 = 1
498 update1 = 1
498
499
499 # copy the state of largefile standin from the repository's
500 # copy the state of largefile standin from the repository's
500 # dirstate to its state in the lfdirstate.
501 # dirstate to its state in the lfdirstate.
501 abslfile = repo.wjoin(lfile)
502 abslfile = repo.wjoin(lfile)
502 absstandin = repo.wjoin(lfutil.standin(lfile))
503 absstandin = repo.wjoin(lfutil.standin(lfile))
503 if os.path.exists(absstandin):
504 if os.path.exists(absstandin):
504 mode = os.stat(absstandin).st_mode
505 mode = os.stat(absstandin).st_mode
505 if mode != os.stat(abslfile).st_mode:
506 if mode != os.stat(abslfile).st_mode:
506 os.chmod(abslfile, mode)
507 os.chmod(abslfile, mode)
507 update1 = 1
508 update1 = 1
508
509
509 updated += update1
510 updated += update1
510
511
511 state = repo.dirstate[lfutil.standin(lfile)]
512 state = repo.dirstate[lfutil.standin(lfile)]
512 if state == 'n':
513 if state == 'n':
513 # When rebasing, we need to synchronize the standin and the
514 # When rebasing, we need to synchronize the standin and the
514 # largefile, because otherwise the largefile will get reverted.
515 # largefile, because otherwise the largefile will get reverted.
515 # But for commit's sake, we have to mark the file as unclean.
516 # But for commit's sake, we have to mark the file as unclean.
516 if getattr(repo, "_isrebasing", False):
517 if getattr(repo, "_isrebasing", False):
517 lfdirstate.normallookup(lfile)
518 lfdirstate.normallookup(lfile)
518 else:
519 else:
519 lfdirstate.normal(lfile)
520 lfdirstate.normal(lfile)
520 elif state == 'r':
521 elif state == 'r':
521 lfdirstate.remove(lfile)
522 lfdirstate.remove(lfile)
522 elif state == 'a':
523 elif state == 'a':
523 lfdirstate.add(lfile)
524 lfdirstate.add(lfile)
524 elif state == '?':
525 elif state == '?':
525 lfdirstate.drop(lfile)
526 lfdirstate.drop(lfile)
526
527
527 lfdirstate.write()
528 lfdirstate.write()
528 if printmessage and lfiles:
529 if printmessage and lfiles:
529 ui.status(_('%d largefiles updated, %d removed\n') % (updated,
530 ui.status(_('%d largefiles updated, %d removed\n') % (updated,
530 removed))
531 removed))
531 finally:
532 finally:
532 wlock.release()
533 wlock.release()
533
534
534 @command('lfpull',
535 @command('lfpull',
535 [('r', 'rev', [], _('pull largefiles for these revisions'))
536 [('r', 'rev', [], _('pull largefiles for these revisions'))
536 ] + commands.remoteopts,
537 ] + commands.remoteopts,
537 _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'))
538 _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'))
538 def lfpull(ui, repo, source="default", **opts):
539 def lfpull(ui, repo, source="default", **opts):
539 """pull largefiles for the specified revisions from the specified source
540 """pull largefiles for the specified revisions from the specified source
540
541
541 Pull largefiles that are referenced from local changesets but missing
542 Pull largefiles that are referenced from local changesets but missing
542 locally, pulling from a remote repository to the local cache.
543 locally, pulling from a remote repository to the local cache.
543
544
544 If SOURCE is omitted, the 'default' path will be used.
545 If SOURCE is omitted, the 'default' path will be used.
545 See :hg:`help urls` for more information.
546 See :hg:`help urls` for more information.
546
547
547 .. container:: verbose
548 .. container:: verbose
548
549
549 Some examples:
550 Some examples:
550
551
551 - pull largefiles for all branch heads::
552 - pull largefiles for all branch heads::
552
553
553 hg lfpull -r "head() and not closed()"
554 hg lfpull -r "head() and not closed()"
554
555
555 - pull largefiles on the default branch::
556 - pull largefiles on the default branch::
556
557
557 hg lfpull -r "branch(default)"
558 hg lfpull -r "branch(default)"
558 """
559 """
559 repo.lfpullsource = source
560 repo.lfpullsource = source
560
561
561 revs = opts.get('rev', [])
562 revs = opts.get('rev', [])
562 if not revs:
563 if not revs:
563 raise util.Abort(_('no revisions specified'))
564 raise util.Abort(_('no revisions specified'))
564 revs = scmutil.revrange(repo, revs)
565 revs = scmutil.revrange(repo, revs)
565
566
566 numcached = 0
567 numcached = 0
567 for rev in revs:
568 for rev in revs:
568 ui.note(_('pulling largefiles for revision %s\n') % rev)
569 ui.note(_('pulling largefiles for revision %s\n') % rev)
569 (cached, missing) = cachelfiles(ui, repo, rev)
570 (cached, missing) = cachelfiles(ui, repo, rev)
570 numcached += len(cached)
571 numcached += len(cached)
571 ui.status(_("%d largefiles cached\n") % numcached)
572 ui.status(_("%d largefiles cached\n") % numcached)
@@ -1,2538 +1,2539
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, sys, errno, re, tempfile
10 import os, sys, errno, re, tempfile
11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 import match as matchmod
12 import match as matchmod
13 import context, repair, graphmod, revset, phases, obsolete, pathutil
13 import context, repair, graphmod, revset, phases, obsolete, pathutil
14 import changelog
14 import changelog
15 import bookmarks
15 import bookmarks
16 import lock as lockmod
16 import lock as lockmod
17
17
18 def parsealiases(cmd):
18 def parsealiases(cmd):
19 return cmd.lstrip("^").split("|")
19 return cmd.lstrip("^").split("|")
20
20
21 def findpossible(cmd, table, strict=False):
21 def findpossible(cmd, table, strict=False):
22 """
22 """
23 Return cmd -> (aliases, command table entry)
23 Return cmd -> (aliases, command table entry)
24 for each matching command.
24 for each matching command.
25 Return debug commands (or their aliases) only if no normal command matches.
25 Return debug commands (or their aliases) only if no normal command matches.
26 """
26 """
27 choice = {}
27 choice = {}
28 debugchoice = {}
28 debugchoice = {}
29
29
30 if cmd in table:
30 if cmd in table:
31 # short-circuit exact matches, "log" alias beats "^log|history"
31 # short-circuit exact matches, "log" alias beats "^log|history"
32 keys = [cmd]
32 keys = [cmd]
33 else:
33 else:
34 keys = table.keys()
34 keys = table.keys()
35
35
36 for e in keys:
36 for e in keys:
37 aliases = parsealiases(e)
37 aliases = parsealiases(e)
38 found = None
38 found = None
39 if cmd in aliases:
39 if cmd in aliases:
40 found = cmd
40 found = cmd
41 elif not strict:
41 elif not strict:
42 for a in aliases:
42 for a in aliases:
43 if a.startswith(cmd):
43 if a.startswith(cmd):
44 found = a
44 found = a
45 break
45 break
46 if found is not None:
46 if found is not None:
47 if aliases[0].startswith("debug") or found.startswith("debug"):
47 if aliases[0].startswith("debug") or found.startswith("debug"):
48 debugchoice[found] = (aliases, table[e])
48 debugchoice[found] = (aliases, table[e])
49 else:
49 else:
50 choice[found] = (aliases, table[e])
50 choice[found] = (aliases, table[e])
51
51
52 if not choice and debugchoice:
52 if not choice and debugchoice:
53 choice = debugchoice
53 choice = debugchoice
54
54
55 return choice
55 return choice
56
56
57 def findcmd(cmd, table, strict=True):
57 def findcmd(cmd, table, strict=True):
58 """Return (aliases, command table entry) for command string."""
58 """Return (aliases, command table entry) for command string."""
59 choice = findpossible(cmd, table, strict)
59 choice = findpossible(cmd, table, strict)
60
60
61 if cmd in choice:
61 if cmd in choice:
62 return choice[cmd]
62 return choice[cmd]
63
63
64 if len(choice) > 1:
64 if len(choice) > 1:
65 clist = choice.keys()
65 clist = choice.keys()
66 clist.sort()
66 clist.sort()
67 raise error.AmbiguousCommand(cmd, clist)
67 raise error.AmbiguousCommand(cmd, clist)
68
68
69 if choice:
69 if choice:
70 return choice.values()[0]
70 return choice.values()[0]
71
71
72 raise error.UnknownCommand(cmd)
72 raise error.UnknownCommand(cmd)
73
73
74 def findrepo(p):
74 def findrepo(p):
75 while not os.path.isdir(os.path.join(p, ".hg")):
75 while not os.path.isdir(os.path.join(p, ".hg")):
76 oldp, p = p, os.path.dirname(p)
76 oldp, p = p, os.path.dirname(p)
77 if p == oldp:
77 if p == oldp:
78 return None
78 return None
79
79
80 return p
80 return p
81
81
82 def bailifchanged(repo):
82 def bailifchanged(repo):
83 if repo.dirstate.p2() != nullid:
83 if repo.dirstate.p2() != nullid:
84 raise util.Abort(_('outstanding uncommitted merge'))
84 raise util.Abort(_('outstanding uncommitted merge'))
85 modified, added, removed, deleted = repo.status()[:4]
85 modified, added, removed, deleted = repo.status()[:4]
86 if modified or added or removed or deleted:
86 if modified or added or removed or deleted:
87 raise util.Abort(_('uncommitted changes'))
87 raise util.Abort(_('uncommitted changes'))
88 ctx = repo[None]
88 ctx = repo[None]
89 for s in sorted(ctx.substate):
89 for s in sorted(ctx.substate):
90 if ctx.sub(s).dirty():
90 if ctx.sub(s).dirty():
91 raise util.Abort(_("uncommitted changes in subrepo %s") % s)
91 raise util.Abort(_("uncommitted changes in subrepo %s") % s)
92
92
93 def logmessage(ui, opts):
93 def logmessage(ui, opts):
94 """ get the log message according to -m and -l option """
94 """ get the log message according to -m and -l option """
95 message = opts.get('message')
95 message = opts.get('message')
96 logfile = opts.get('logfile')
96 logfile = opts.get('logfile')
97
97
98 if message and logfile:
98 if message and logfile:
99 raise util.Abort(_('options --message and --logfile are mutually '
99 raise util.Abort(_('options --message and --logfile are mutually '
100 'exclusive'))
100 'exclusive'))
101 if not message and logfile:
101 if not message and logfile:
102 try:
102 try:
103 if logfile == '-':
103 if logfile == '-':
104 message = ui.fin.read()
104 message = ui.fin.read()
105 else:
105 else:
106 message = '\n'.join(util.readfile(logfile).splitlines())
106 message = '\n'.join(util.readfile(logfile).splitlines())
107 except IOError, inst:
107 except IOError, inst:
108 raise util.Abort(_("can't read commit message '%s': %s") %
108 raise util.Abort(_("can't read commit message '%s': %s") %
109 (logfile, inst.strerror))
109 (logfile, inst.strerror))
110 return message
110 return message
111
111
112 def getcommiteditor(edit=False, finishdesc=None, extramsg=None, **opts):
112 def getcommiteditor(edit=False, finishdesc=None, extramsg=None, **opts):
113 """get appropriate commit message editor according to '--edit' option
113 """get appropriate commit message editor according to '--edit' option
114
114
115 'finishdesc' is a function to be called with edited commit message
115 'finishdesc' is a function to be called with edited commit message
116 (= 'description' of the new changeset) just after editing, but
116 (= 'description' of the new changeset) just after editing, but
117 before checking empty-ness. It should return actual text to be
117 before checking empty-ness. It should return actual text to be
118 stored into history. This allows to change description before
118 stored into history. This allows to change description before
119 storing.
119 storing.
120
120
121 'extramsg' is a extra message to be shown in the editor instead of
121 'extramsg' is a extra message to be shown in the editor instead of
122 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
122 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
123 is automatically added.
123 is automatically added.
124
124
125 'getcommiteditor' returns 'commitforceeditor' regardless of
125 'getcommiteditor' returns 'commitforceeditor' regardless of
126 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
126 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
127 they are specific for usage in MQ.
127 they are specific for usage in MQ.
128 """
128 """
129 if edit or finishdesc or extramsg:
129 if edit or finishdesc or extramsg:
130 return lambda r, c, s: commitforceeditor(r, c, s,
130 return lambda r, c, s: commitforceeditor(r, c, s,
131 finishdesc=finishdesc,
131 finishdesc=finishdesc,
132 extramsg=extramsg)
132 extramsg=extramsg)
133 else:
133 else:
134 return commiteditor
134 return commiteditor
135
135
136 def loglimit(opts):
136 def loglimit(opts):
137 """get the log limit according to option -l/--limit"""
137 """get the log limit according to option -l/--limit"""
138 limit = opts.get('limit')
138 limit = opts.get('limit')
139 if limit:
139 if limit:
140 try:
140 try:
141 limit = int(limit)
141 limit = int(limit)
142 except ValueError:
142 except ValueError:
143 raise util.Abort(_('limit must be a positive integer'))
143 raise util.Abort(_('limit must be a positive integer'))
144 if limit <= 0:
144 if limit <= 0:
145 raise util.Abort(_('limit must be positive'))
145 raise util.Abort(_('limit must be positive'))
146 else:
146 else:
147 limit = None
147 limit = None
148 return limit
148 return limit
149
149
150 def makefilename(repo, pat, node, desc=None,
150 def makefilename(repo, pat, node, desc=None,
151 total=None, seqno=None, revwidth=None, pathname=None):
151 total=None, seqno=None, revwidth=None, pathname=None):
152 node_expander = {
152 node_expander = {
153 'H': lambda: hex(node),
153 'H': lambda: hex(node),
154 'R': lambda: str(repo.changelog.rev(node)),
154 'R': lambda: str(repo.changelog.rev(node)),
155 'h': lambda: short(node),
155 'h': lambda: short(node),
156 'm': lambda: re.sub('[^\w]', '_', str(desc))
156 'm': lambda: re.sub('[^\w]', '_', str(desc))
157 }
157 }
158 expander = {
158 expander = {
159 '%': lambda: '%',
159 '%': lambda: '%',
160 'b': lambda: os.path.basename(repo.root),
160 'b': lambda: os.path.basename(repo.root),
161 }
161 }
162
162
163 try:
163 try:
164 if node:
164 if node:
165 expander.update(node_expander)
165 expander.update(node_expander)
166 if node:
166 if node:
167 expander['r'] = (lambda:
167 expander['r'] = (lambda:
168 str(repo.changelog.rev(node)).zfill(revwidth or 0))
168 str(repo.changelog.rev(node)).zfill(revwidth or 0))
169 if total is not None:
169 if total is not None:
170 expander['N'] = lambda: str(total)
170 expander['N'] = lambda: str(total)
171 if seqno is not None:
171 if seqno is not None:
172 expander['n'] = lambda: str(seqno)
172 expander['n'] = lambda: str(seqno)
173 if total is not None and seqno is not None:
173 if total is not None and seqno is not None:
174 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
174 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
175 if pathname is not None:
175 if pathname is not None:
176 expander['s'] = lambda: os.path.basename(pathname)
176 expander['s'] = lambda: os.path.basename(pathname)
177 expander['d'] = lambda: os.path.dirname(pathname) or '.'
177 expander['d'] = lambda: os.path.dirname(pathname) or '.'
178 expander['p'] = lambda: pathname
178 expander['p'] = lambda: pathname
179
179
180 newname = []
180 newname = []
181 patlen = len(pat)
181 patlen = len(pat)
182 i = 0
182 i = 0
183 while i < patlen:
183 while i < patlen:
184 c = pat[i]
184 c = pat[i]
185 if c == '%':
185 if c == '%':
186 i += 1
186 i += 1
187 c = pat[i]
187 c = pat[i]
188 c = expander[c]()
188 c = expander[c]()
189 newname.append(c)
189 newname.append(c)
190 i += 1
190 i += 1
191 return ''.join(newname)
191 return ''.join(newname)
192 except KeyError, inst:
192 except KeyError, inst:
193 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
193 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
194 inst.args[0])
194 inst.args[0])
195
195
196 def makefileobj(repo, pat, node=None, desc=None, total=None,
196 def makefileobj(repo, pat, node=None, desc=None, total=None,
197 seqno=None, revwidth=None, mode='wb', modemap=None,
197 seqno=None, revwidth=None, mode='wb', modemap=None,
198 pathname=None):
198 pathname=None):
199
199
200 writable = mode not in ('r', 'rb')
200 writable = mode not in ('r', 'rb')
201
201
202 if not pat or pat == '-':
202 if not pat or pat == '-':
203 fp = writable and repo.ui.fout or repo.ui.fin
203 fp = writable and repo.ui.fout or repo.ui.fin
204 if util.safehasattr(fp, 'fileno'):
204 if util.safehasattr(fp, 'fileno'):
205 return os.fdopen(os.dup(fp.fileno()), mode)
205 return os.fdopen(os.dup(fp.fileno()), mode)
206 else:
206 else:
207 # if this fp can't be duped properly, return
207 # if this fp can't be duped properly, return
208 # a dummy object that can be closed
208 # a dummy object that can be closed
209 class wrappedfileobj(object):
209 class wrappedfileobj(object):
210 noop = lambda x: None
210 noop = lambda x: None
211 def __init__(self, f):
211 def __init__(self, f):
212 self.f = f
212 self.f = f
213 def __getattr__(self, attr):
213 def __getattr__(self, attr):
214 if attr == 'close':
214 if attr == 'close':
215 return self.noop
215 return self.noop
216 else:
216 else:
217 return getattr(self.f, attr)
217 return getattr(self.f, attr)
218
218
219 return wrappedfileobj(fp)
219 return wrappedfileobj(fp)
220 if util.safehasattr(pat, 'write') and writable:
220 if util.safehasattr(pat, 'write') and writable:
221 return pat
221 return pat
222 if util.safehasattr(pat, 'read') and 'r' in mode:
222 if util.safehasattr(pat, 'read') and 'r' in mode:
223 return pat
223 return pat
224 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
224 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
225 if modemap is not None:
225 if modemap is not None:
226 mode = modemap.get(fn, mode)
226 mode = modemap.get(fn, mode)
227 if mode == 'wb':
227 if mode == 'wb':
228 modemap[fn] = 'ab'
228 modemap[fn] = 'ab'
229 return open(fn, mode)
229 return open(fn, mode)
230
230
231 def openrevlog(repo, cmd, file_, opts):
231 def openrevlog(repo, cmd, file_, opts):
232 """opens the changelog, manifest, a filelog or a given revlog"""
232 """opens the changelog, manifest, a filelog or a given revlog"""
233 cl = opts['changelog']
233 cl = opts['changelog']
234 mf = opts['manifest']
234 mf = opts['manifest']
235 msg = None
235 msg = None
236 if cl and mf:
236 if cl and mf:
237 msg = _('cannot specify --changelog and --manifest at the same time')
237 msg = _('cannot specify --changelog and --manifest at the same time')
238 elif cl or mf:
238 elif cl or mf:
239 if file_:
239 if file_:
240 msg = _('cannot specify filename with --changelog or --manifest')
240 msg = _('cannot specify filename with --changelog or --manifest')
241 elif not repo:
241 elif not repo:
242 msg = _('cannot specify --changelog or --manifest '
242 msg = _('cannot specify --changelog or --manifest '
243 'without a repository')
243 'without a repository')
244 if msg:
244 if msg:
245 raise util.Abort(msg)
245 raise util.Abort(msg)
246
246
247 r = None
247 r = None
248 if repo:
248 if repo:
249 if cl:
249 if cl:
250 r = repo.unfiltered().changelog
250 r = repo.unfiltered().changelog
251 elif mf:
251 elif mf:
252 r = repo.manifest
252 r = repo.manifest
253 elif file_:
253 elif file_:
254 filelog = repo.file(file_)
254 filelog = repo.file(file_)
255 if len(filelog):
255 if len(filelog):
256 r = filelog
256 r = filelog
257 if not r:
257 if not r:
258 if not file_:
258 if not file_:
259 raise error.CommandError(cmd, _('invalid arguments'))
259 raise error.CommandError(cmd, _('invalid arguments'))
260 if not os.path.isfile(file_):
260 if not os.path.isfile(file_):
261 raise util.Abort(_("revlog '%s' not found") % file_)
261 raise util.Abort(_("revlog '%s' not found") % file_)
262 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
262 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
263 file_[:-2] + ".i")
263 file_[:-2] + ".i")
264 return r
264 return r
265
265
266 def copy(ui, repo, pats, opts, rename=False):
266 def copy(ui, repo, pats, opts, rename=False):
267 # called with the repo lock held
267 # called with the repo lock held
268 #
268 #
269 # hgsep => pathname that uses "/" to separate directories
269 # hgsep => pathname that uses "/" to separate directories
270 # ossep => pathname that uses os.sep to separate directories
270 # ossep => pathname that uses os.sep to separate directories
271 cwd = repo.getcwd()
271 cwd = repo.getcwd()
272 targets = {}
272 targets = {}
273 after = opts.get("after")
273 after = opts.get("after")
274 dryrun = opts.get("dry_run")
274 dryrun = opts.get("dry_run")
275 wctx = repo[None]
275 wctx = repo[None]
276
276
277 def walkpat(pat):
277 def walkpat(pat):
278 srcs = []
278 srcs = []
279 badstates = after and '?' or '?r'
279 badstates = after and '?' or '?r'
280 m = scmutil.match(repo[None], [pat], opts, globbed=True)
280 m = scmutil.match(repo[None], [pat], opts, globbed=True)
281 for abs in repo.walk(m):
281 for abs in repo.walk(m):
282 state = repo.dirstate[abs]
282 state = repo.dirstate[abs]
283 rel = m.rel(abs)
283 rel = m.rel(abs)
284 exact = m.exact(abs)
284 exact = m.exact(abs)
285 if state in badstates:
285 if state in badstates:
286 if exact and state == '?':
286 if exact and state == '?':
287 ui.warn(_('%s: not copying - file is not managed\n') % rel)
287 ui.warn(_('%s: not copying - file is not managed\n') % rel)
288 if exact and state == 'r':
288 if exact and state == 'r':
289 ui.warn(_('%s: not copying - file has been marked for'
289 ui.warn(_('%s: not copying - file has been marked for'
290 ' remove\n') % rel)
290 ' remove\n') % rel)
291 continue
291 continue
292 # abs: hgsep
292 # abs: hgsep
293 # rel: ossep
293 # rel: ossep
294 srcs.append((abs, rel, exact))
294 srcs.append((abs, rel, exact))
295 return srcs
295 return srcs
296
296
297 # abssrc: hgsep
297 # abssrc: hgsep
298 # relsrc: ossep
298 # relsrc: ossep
299 # otarget: ossep
299 # otarget: ossep
300 def copyfile(abssrc, relsrc, otarget, exact):
300 def copyfile(abssrc, relsrc, otarget, exact):
301 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
301 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
302 if '/' in abstarget:
302 if '/' in abstarget:
303 # We cannot normalize abstarget itself, this would prevent
303 # We cannot normalize abstarget itself, this would prevent
304 # case only renames, like a => A.
304 # case only renames, like a => A.
305 abspath, absname = abstarget.rsplit('/', 1)
305 abspath, absname = abstarget.rsplit('/', 1)
306 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
306 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
307 reltarget = repo.pathto(abstarget, cwd)
307 reltarget = repo.pathto(abstarget, cwd)
308 target = repo.wjoin(abstarget)
308 target = repo.wjoin(abstarget)
309 src = repo.wjoin(abssrc)
309 src = repo.wjoin(abssrc)
310 state = repo.dirstate[abstarget]
310 state = repo.dirstate[abstarget]
311
311
312 scmutil.checkportable(ui, abstarget)
312 scmutil.checkportable(ui, abstarget)
313
313
314 # check for collisions
314 # check for collisions
315 prevsrc = targets.get(abstarget)
315 prevsrc = targets.get(abstarget)
316 if prevsrc is not None:
316 if prevsrc is not None:
317 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
317 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
318 (reltarget, repo.pathto(abssrc, cwd),
318 (reltarget, repo.pathto(abssrc, cwd),
319 repo.pathto(prevsrc, cwd)))
319 repo.pathto(prevsrc, cwd)))
320 return
320 return
321
321
322 # check for overwrites
322 # check for overwrites
323 exists = os.path.lexists(target)
323 exists = os.path.lexists(target)
324 samefile = False
324 samefile = False
325 if exists and abssrc != abstarget:
325 if exists and abssrc != abstarget:
326 if (repo.dirstate.normalize(abssrc) ==
326 if (repo.dirstate.normalize(abssrc) ==
327 repo.dirstate.normalize(abstarget)):
327 repo.dirstate.normalize(abstarget)):
328 if not rename:
328 if not rename:
329 ui.warn(_("%s: can't copy - same file\n") % reltarget)
329 ui.warn(_("%s: can't copy - same file\n") % reltarget)
330 return
330 return
331 exists = False
331 exists = False
332 samefile = True
332 samefile = True
333
333
334 if not after and exists or after and state in 'mn':
334 if not after and exists or after and state in 'mn':
335 if not opts['force']:
335 if not opts['force']:
336 ui.warn(_('%s: not overwriting - file exists\n') %
336 ui.warn(_('%s: not overwriting - file exists\n') %
337 reltarget)
337 reltarget)
338 return
338 return
339
339
340 if after:
340 if after:
341 if not exists:
341 if not exists:
342 if rename:
342 if rename:
343 ui.warn(_('%s: not recording move - %s does not exist\n') %
343 ui.warn(_('%s: not recording move - %s does not exist\n') %
344 (relsrc, reltarget))
344 (relsrc, reltarget))
345 else:
345 else:
346 ui.warn(_('%s: not recording copy - %s does not exist\n') %
346 ui.warn(_('%s: not recording copy - %s does not exist\n') %
347 (relsrc, reltarget))
347 (relsrc, reltarget))
348 return
348 return
349 elif not dryrun:
349 elif not dryrun:
350 try:
350 try:
351 if exists:
351 if exists:
352 os.unlink(target)
352 os.unlink(target)
353 targetdir = os.path.dirname(target) or '.'
353 targetdir = os.path.dirname(target) or '.'
354 if not os.path.isdir(targetdir):
354 if not os.path.isdir(targetdir):
355 os.makedirs(targetdir)
355 os.makedirs(targetdir)
356 if samefile:
356 if samefile:
357 tmp = target + "~hgrename"
357 tmp = target + "~hgrename"
358 os.rename(src, tmp)
358 os.rename(src, tmp)
359 os.rename(tmp, target)
359 os.rename(tmp, target)
360 else:
360 else:
361 util.copyfile(src, target)
361 util.copyfile(src, target)
362 srcexists = True
362 srcexists = True
363 except IOError, inst:
363 except IOError, inst:
364 if inst.errno == errno.ENOENT:
364 if inst.errno == errno.ENOENT:
365 ui.warn(_('%s: deleted in working copy\n') % relsrc)
365 ui.warn(_('%s: deleted in working copy\n') % relsrc)
366 srcexists = False
366 srcexists = False
367 else:
367 else:
368 ui.warn(_('%s: cannot copy - %s\n') %
368 ui.warn(_('%s: cannot copy - %s\n') %
369 (relsrc, inst.strerror))
369 (relsrc, inst.strerror))
370 return True # report a failure
370 return True # report a failure
371
371
372 if ui.verbose or not exact:
372 if ui.verbose or not exact:
373 if rename:
373 if rename:
374 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
374 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
375 else:
375 else:
376 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
376 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
377
377
378 targets[abstarget] = abssrc
378 targets[abstarget] = abssrc
379
379
380 # fix up dirstate
380 # fix up dirstate
381 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
381 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
382 dryrun=dryrun, cwd=cwd)
382 dryrun=dryrun, cwd=cwd)
383 if rename and not dryrun:
383 if rename and not dryrun:
384 if not after and srcexists and not samefile:
384 if not after and srcexists and not samefile:
385 util.unlinkpath(repo.wjoin(abssrc))
385 util.unlinkpath(repo.wjoin(abssrc))
386 wctx.forget([abssrc])
386 wctx.forget([abssrc])
387
387
388 # pat: ossep
388 # pat: ossep
389 # dest ossep
389 # dest ossep
390 # srcs: list of (hgsep, hgsep, ossep, bool)
390 # srcs: list of (hgsep, hgsep, ossep, bool)
391 # return: function that takes hgsep and returns ossep
391 # return: function that takes hgsep and returns ossep
392 def targetpathfn(pat, dest, srcs):
392 def targetpathfn(pat, dest, srcs):
393 if os.path.isdir(pat):
393 if os.path.isdir(pat):
394 abspfx = pathutil.canonpath(repo.root, cwd, pat)
394 abspfx = pathutil.canonpath(repo.root, cwd, pat)
395 abspfx = util.localpath(abspfx)
395 abspfx = util.localpath(abspfx)
396 if destdirexists:
396 if destdirexists:
397 striplen = len(os.path.split(abspfx)[0])
397 striplen = len(os.path.split(abspfx)[0])
398 else:
398 else:
399 striplen = len(abspfx)
399 striplen = len(abspfx)
400 if striplen:
400 if striplen:
401 striplen += len(os.sep)
401 striplen += len(os.sep)
402 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
402 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
403 elif destdirexists:
403 elif destdirexists:
404 res = lambda p: os.path.join(dest,
404 res = lambda p: os.path.join(dest,
405 os.path.basename(util.localpath(p)))
405 os.path.basename(util.localpath(p)))
406 else:
406 else:
407 res = lambda p: dest
407 res = lambda p: dest
408 return res
408 return res
409
409
410 # pat: ossep
410 # pat: ossep
411 # dest ossep
411 # dest ossep
412 # srcs: list of (hgsep, hgsep, ossep, bool)
412 # srcs: list of (hgsep, hgsep, ossep, bool)
413 # return: function that takes hgsep and returns ossep
413 # return: function that takes hgsep and returns ossep
414 def targetpathafterfn(pat, dest, srcs):
414 def targetpathafterfn(pat, dest, srcs):
415 if matchmod.patkind(pat):
415 if matchmod.patkind(pat):
416 # a mercurial pattern
416 # a mercurial pattern
417 res = lambda p: os.path.join(dest,
417 res = lambda p: os.path.join(dest,
418 os.path.basename(util.localpath(p)))
418 os.path.basename(util.localpath(p)))
419 else:
419 else:
420 abspfx = pathutil.canonpath(repo.root, cwd, pat)
420 abspfx = pathutil.canonpath(repo.root, cwd, pat)
421 if len(abspfx) < len(srcs[0][0]):
421 if len(abspfx) < len(srcs[0][0]):
422 # A directory. Either the target path contains the last
422 # A directory. Either the target path contains the last
423 # component of the source path or it does not.
423 # component of the source path or it does not.
424 def evalpath(striplen):
424 def evalpath(striplen):
425 score = 0
425 score = 0
426 for s in srcs:
426 for s in srcs:
427 t = os.path.join(dest, util.localpath(s[0])[striplen:])
427 t = os.path.join(dest, util.localpath(s[0])[striplen:])
428 if os.path.lexists(t):
428 if os.path.lexists(t):
429 score += 1
429 score += 1
430 return score
430 return score
431
431
432 abspfx = util.localpath(abspfx)
432 abspfx = util.localpath(abspfx)
433 striplen = len(abspfx)
433 striplen = len(abspfx)
434 if striplen:
434 if striplen:
435 striplen += len(os.sep)
435 striplen += len(os.sep)
436 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
436 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
437 score = evalpath(striplen)
437 score = evalpath(striplen)
438 striplen1 = len(os.path.split(abspfx)[0])
438 striplen1 = len(os.path.split(abspfx)[0])
439 if striplen1:
439 if striplen1:
440 striplen1 += len(os.sep)
440 striplen1 += len(os.sep)
441 if evalpath(striplen1) > score:
441 if evalpath(striplen1) > score:
442 striplen = striplen1
442 striplen = striplen1
443 res = lambda p: os.path.join(dest,
443 res = lambda p: os.path.join(dest,
444 util.localpath(p)[striplen:])
444 util.localpath(p)[striplen:])
445 else:
445 else:
446 # a file
446 # a file
447 if destdirexists:
447 if destdirexists:
448 res = lambda p: os.path.join(dest,
448 res = lambda p: os.path.join(dest,
449 os.path.basename(util.localpath(p)))
449 os.path.basename(util.localpath(p)))
450 else:
450 else:
451 res = lambda p: dest
451 res = lambda p: dest
452 return res
452 return res
453
453
454
454
455 pats = scmutil.expandpats(pats)
455 pats = scmutil.expandpats(pats)
456 if not pats:
456 if not pats:
457 raise util.Abort(_('no source or destination specified'))
457 raise util.Abort(_('no source or destination specified'))
458 if len(pats) == 1:
458 if len(pats) == 1:
459 raise util.Abort(_('no destination specified'))
459 raise util.Abort(_('no destination specified'))
460 dest = pats.pop()
460 dest = pats.pop()
461 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
461 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
462 if not destdirexists:
462 if not destdirexists:
463 if len(pats) > 1 or matchmod.patkind(pats[0]):
463 if len(pats) > 1 or matchmod.patkind(pats[0]):
464 raise util.Abort(_('with multiple sources, destination must be an '
464 raise util.Abort(_('with multiple sources, destination must be an '
465 'existing directory'))
465 'existing directory'))
466 if util.endswithsep(dest):
466 if util.endswithsep(dest):
467 raise util.Abort(_('destination %s is not a directory') % dest)
467 raise util.Abort(_('destination %s is not a directory') % dest)
468
468
469 tfn = targetpathfn
469 tfn = targetpathfn
470 if after:
470 if after:
471 tfn = targetpathafterfn
471 tfn = targetpathafterfn
472 copylist = []
472 copylist = []
473 for pat in pats:
473 for pat in pats:
474 srcs = walkpat(pat)
474 srcs = walkpat(pat)
475 if not srcs:
475 if not srcs:
476 continue
476 continue
477 copylist.append((tfn(pat, dest, srcs), srcs))
477 copylist.append((tfn(pat, dest, srcs), srcs))
478 if not copylist:
478 if not copylist:
479 raise util.Abort(_('no files to copy'))
479 raise util.Abort(_('no files to copy'))
480
480
481 errors = 0
481 errors = 0
482 for targetpath, srcs in copylist:
482 for targetpath, srcs in copylist:
483 for abssrc, relsrc, exact in srcs:
483 for abssrc, relsrc, exact in srcs:
484 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
484 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
485 errors += 1
485 errors += 1
486
486
487 if errors:
487 if errors:
488 ui.warn(_('(consider using --after)\n'))
488 ui.warn(_('(consider using --after)\n'))
489
489
490 return errors != 0
490 return errors != 0
491
491
492 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
492 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
493 runargs=None, appendpid=False):
493 runargs=None, appendpid=False):
494 '''Run a command as a service.'''
494 '''Run a command as a service.'''
495
495
496 def writepid(pid):
496 def writepid(pid):
497 if opts['pid_file']:
497 if opts['pid_file']:
498 mode = appendpid and 'a' or 'w'
498 mode = appendpid and 'a' or 'w'
499 fp = open(opts['pid_file'], mode)
499 fp = open(opts['pid_file'], mode)
500 fp.write(str(pid) + '\n')
500 fp.write(str(pid) + '\n')
501 fp.close()
501 fp.close()
502
502
503 if opts['daemon'] and not opts['daemon_pipefds']:
503 if opts['daemon'] and not opts['daemon_pipefds']:
504 # Signal child process startup with file removal
504 # Signal child process startup with file removal
505 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
505 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
506 os.close(lockfd)
506 os.close(lockfd)
507 try:
507 try:
508 if not runargs:
508 if not runargs:
509 runargs = util.hgcmd() + sys.argv[1:]
509 runargs = util.hgcmd() + sys.argv[1:]
510 runargs.append('--daemon-pipefds=%s' % lockpath)
510 runargs.append('--daemon-pipefds=%s' % lockpath)
511 # Don't pass --cwd to the child process, because we've already
511 # Don't pass --cwd to the child process, because we've already
512 # changed directory.
512 # changed directory.
513 for i in xrange(1, len(runargs)):
513 for i in xrange(1, len(runargs)):
514 if runargs[i].startswith('--cwd='):
514 if runargs[i].startswith('--cwd='):
515 del runargs[i]
515 del runargs[i]
516 break
516 break
517 elif runargs[i].startswith('--cwd'):
517 elif runargs[i].startswith('--cwd'):
518 del runargs[i:i + 2]
518 del runargs[i:i + 2]
519 break
519 break
520 def condfn():
520 def condfn():
521 return not os.path.exists(lockpath)
521 return not os.path.exists(lockpath)
522 pid = util.rundetached(runargs, condfn)
522 pid = util.rundetached(runargs, condfn)
523 if pid < 0:
523 if pid < 0:
524 raise util.Abort(_('child process failed to start'))
524 raise util.Abort(_('child process failed to start'))
525 writepid(pid)
525 writepid(pid)
526 finally:
526 finally:
527 try:
527 try:
528 os.unlink(lockpath)
528 os.unlink(lockpath)
529 except OSError, e:
529 except OSError, e:
530 if e.errno != errno.ENOENT:
530 if e.errno != errno.ENOENT:
531 raise
531 raise
532 if parentfn:
532 if parentfn:
533 return parentfn(pid)
533 return parentfn(pid)
534 else:
534 else:
535 return
535 return
536
536
537 if initfn:
537 if initfn:
538 initfn()
538 initfn()
539
539
540 if not opts['daemon']:
540 if not opts['daemon']:
541 writepid(os.getpid())
541 writepid(os.getpid())
542
542
543 if opts['daemon_pipefds']:
543 if opts['daemon_pipefds']:
544 lockpath = opts['daemon_pipefds']
544 lockpath = opts['daemon_pipefds']
545 try:
545 try:
546 os.setsid()
546 os.setsid()
547 except AttributeError:
547 except AttributeError:
548 pass
548 pass
549 os.unlink(lockpath)
549 os.unlink(lockpath)
550 util.hidewindow()
550 util.hidewindow()
551 sys.stdout.flush()
551 sys.stdout.flush()
552 sys.stderr.flush()
552 sys.stderr.flush()
553
553
554 nullfd = os.open(os.devnull, os.O_RDWR)
554 nullfd = os.open(os.devnull, os.O_RDWR)
555 logfilefd = nullfd
555 logfilefd = nullfd
556 if logfile:
556 if logfile:
557 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
557 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
558 os.dup2(nullfd, 0)
558 os.dup2(nullfd, 0)
559 os.dup2(logfilefd, 1)
559 os.dup2(logfilefd, 1)
560 os.dup2(logfilefd, 2)
560 os.dup2(logfilefd, 2)
561 if nullfd not in (0, 1, 2):
561 if nullfd not in (0, 1, 2):
562 os.close(nullfd)
562 os.close(nullfd)
563 if logfile and logfilefd not in (0, 1, 2):
563 if logfile and logfilefd not in (0, 1, 2):
564 os.close(logfilefd)
564 os.close(logfilefd)
565
565
566 if runfn:
566 if runfn:
567 return runfn()
567 return runfn()
568
568
569 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
569 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
570 """Utility function used by commands.import to import a single patch
570 """Utility function used by commands.import to import a single patch
571
571
572 This function is explicitly defined here to help the evolve extension to
572 This function is explicitly defined here to help the evolve extension to
573 wrap this part of the import logic.
573 wrap this part of the import logic.
574
574
575 The API is currently a bit ugly because it a simple code translation from
575 The API is currently a bit ugly because it a simple code translation from
576 the import command. Feel free to make it better.
576 the import command. Feel free to make it better.
577
577
578 :hunk: a patch (as a binary string)
578 :hunk: a patch (as a binary string)
579 :parents: nodes that will be parent of the created commit
579 :parents: nodes that will be parent of the created commit
580 :opts: the full dict of option passed to the import command
580 :opts: the full dict of option passed to the import command
581 :msgs: list to save commit message to.
581 :msgs: list to save commit message to.
582 (used in case we need to save it when failing)
582 (used in case we need to save it when failing)
583 :updatefunc: a function that update a repo to a given node
583 :updatefunc: a function that update a repo to a given node
584 updatefunc(<repo>, <node>)
584 updatefunc(<repo>, <node>)
585 """
585 """
586 tmpname, message, user, date, branch, nodeid, p1, p2 = \
586 tmpname, message, user, date, branch, nodeid, p1, p2 = \
587 patch.extract(ui, hunk)
587 patch.extract(ui, hunk)
588
588
589 editor = getcommiteditor(**opts)
589 editor = getcommiteditor(**opts)
590 update = not opts.get('bypass')
590 update = not opts.get('bypass')
591 strip = opts["strip"]
591 strip = opts["strip"]
592 sim = float(opts.get('similarity') or 0)
592 sim = float(opts.get('similarity') or 0)
593 if not tmpname:
593 if not tmpname:
594 return (None, None, False)
594 return (None, None, False)
595 msg = _('applied to working directory')
595 msg = _('applied to working directory')
596
596
597 rejects = False
597 rejects = False
598
598
599 try:
599 try:
600 cmdline_message = logmessage(ui, opts)
600 cmdline_message = logmessage(ui, opts)
601 if cmdline_message:
601 if cmdline_message:
602 # pickup the cmdline msg
602 # pickup the cmdline msg
603 message = cmdline_message
603 message = cmdline_message
604 elif message:
604 elif message:
605 # pickup the patch msg
605 # pickup the patch msg
606 message = message.strip()
606 message = message.strip()
607 else:
607 else:
608 # launch the editor
608 # launch the editor
609 message = None
609 message = None
610 ui.debug('message:\n%s\n' % message)
610 ui.debug('message:\n%s\n' % message)
611
611
612 if len(parents) == 1:
612 if len(parents) == 1:
613 parents.append(repo[nullid])
613 parents.append(repo[nullid])
614 if opts.get('exact'):
614 if opts.get('exact'):
615 if not nodeid or not p1:
615 if not nodeid or not p1:
616 raise util.Abort(_('not a Mercurial patch'))
616 raise util.Abort(_('not a Mercurial patch'))
617 p1 = repo[p1]
617 p1 = repo[p1]
618 p2 = repo[p2 or nullid]
618 p2 = repo[p2 or nullid]
619 elif p2:
619 elif p2:
620 try:
620 try:
621 p1 = repo[p1]
621 p1 = repo[p1]
622 p2 = repo[p2]
622 p2 = repo[p2]
623 # Without any options, consider p2 only if the
623 # Without any options, consider p2 only if the
624 # patch is being applied on top of the recorded
624 # patch is being applied on top of the recorded
625 # first parent.
625 # first parent.
626 if p1 != parents[0]:
626 if p1 != parents[0]:
627 p1 = parents[0]
627 p1 = parents[0]
628 p2 = repo[nullid]
628 p2 = repo[nullid]
629 except error.RepoError:
629 except error.RepoError:
630 p1, p2 = parents
630 p1, p2 = parents
631 else:
631 else:
632 p1, p2 = parents
632 p1, p2 = parents
633
633
634 n = None
634 n = None
635 if update:
635 if update:
636 if p1 != parents[0]:
636 if p1 != parents[0]:
637 updatefunc(repo, p1.node())
637 updatefunc(repo, p1.node())
638 if p2 != parents[1]:
638 if p2 != parents[1]:
639 repo.setparents(p1.node(), p2.node())
639 repo.setparents(p1.node(), p2.node())
640
640
641 if opts.get('exact') or opts.get('import_branch'):
641 if opts.get('exact') or opts.get('import_branch'):
642 repo.dirstate.setbranch(branch or 'default')
642 repo.dirstate.setbranch(branch or 'default')
643
643
644 partial = opts.get('partial', False)
644 partial = opts.get('partial', False)
645 files = set()
645 files = set()
646 try:
646 try:
647 patch.patch(ui, repo, tmpname, strip=strip, files=files,
647 patch.patch(ui, repo, tmpname, strip=strip, files=files,
648 eolmode=None, similarity=sim / 100.0)
648 eolmode=None, similarity=sim / 100.0)
649 except patch.PatchError, e:
649 except patch.PatchError, e:
650 if not partial:
650 if not partial:
651 raise util.Abort(str(e))
651 raise util.Abort(str(e))
652 if partial:
652 if partial:
653 rejects = True
653 rejects = True
654
654
655 files = list(files)
655 files = list(files)
656 if opts.get('no_commit'):
656 if opts.get('no_commit'):
657 if message:
657 if message:
658 msgs.append(message)
658 msgs.append(message)
659 else:
659 else:
660 if opts.get('exact') or p2:
660 if opts.get('exact') or p2:
661 # If you got here, you either use --force and know what
661 # If you got here, you either use --force and know what
662 # you are doing or used --exact or a merge patch while
662 # you are doing or used --exact or a merge patch while
663 # being updated to its first parent.
663 # being updated to its first parent.
664 m = None
664 m = None
665 else:
665 else:
666 m = scmutil.matchfiles(repo, files or [])
666 m = scmutil.matchfiles(repo, files or [])
667 n = repo.commit(message, opts.get('user') or user,
667 n = repo.commit(message, opts.get('user') or user,
668 opts.get('date') or date, match=m,
668 opts.get('date') or date, match=m,
669 editor=editor, force=partial)
669 editor=editor, force=partial)
670 else:
670 else:
671 if opts.get('exact') or opts.get('import_branch'):
671 if opts.get('exact') or opts.get('import_branch'):
672 branch = branch or 'default'
672 branch = branch or 'default'
673 else:
673 else:
674 branch = p1.branch()
674 branch = p1.branch()
675 store = patch.filestore()
675 store = patch.filestore()
676 try:
676 try:
677 files = set()
677 files = set()
678 try:
678 try:
679 patch.patchrepo(ui, repo, p1, store, tmpname, strip,
679 patch.patchrepo(ui, repo, p1, store, tmpname, strip,
680 files, eolmode=None)
680 files, eolmode=None)
681 except patch.PatchError, e:
681 except patch.PatchError, e:
682 raise util.Abort(str(e))
682 raise util.Abort(str(e))
683 memctx = context.makememctx(repo, (p1.node(), p2.node()),
683 memctx = context.makememctx(repo, (p1.node(), p2.node()),
684 message,
684 message,
685 opts.get('user') or user,
685 opts.get('user') or user,
686 opts.get('date') or date,
686 opts.get('date') or date,
687 branch, files, store,
687 branch, files, store,
688 editor=getcommiteditor())
688 editor=getcommiteditor())
689 n = memctx.commit()
689 n = memctx.commit()
690 finally:
690 finally:
691 store.close()
691 store.close()
692 if opts.get('exact') and hex(n) != nodeid:
692 if opts.get('exact') and hex(n) != nodeid:
693 raise util.Abort(_('patch is damaged or loses information'))
693 raise util.Abort(_('patch is damaged or loses information'))
694 if n:
694 if n:
695 # i18n: refers to a short changeset id
695 # i18n: refers to a short changeset id
696 msg = _('created %s') % short(n)
696 msg = _('created %s') % short(n)
697 return (msg, n, rejects)
697 return (msg, n, rejects)
698 finally:
698 finally:
699 os.unlink(tmpname)
699 os.unlink(tmpname)
700
700
701 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
701 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
702 opts=None):
702 opts=None):
703 '''export changesets as hg patches.'''
703 '''export changesets as hg patches.'''
704
704
705 total = len(revs)
705 total = len(revs)
706 revwidth = max([len(str(rev)) for rev in revs])
706 revwidth = max([len(str(rev)) for rev in revs])
707 filemode = {}
707 filemode = {}
708
708
709 def single(rev, seqno, fp):
709 def single(rev, seqno, fp):
710 ctx = repo[rev]
710 ctx = repo[rev]
711 node = ctx.node()
711 node = ctx.node()
712 parents = [p.node() for p in ctx.parents() if p]
712 parents = [p.node() for p in ctx.parents() if p]
713 branch = ctx.branch()
713 branch = ctx.branch()
714 if switch_parent:
714 if switch_parent:
715 parents.reverse()
715 parents.reverse()
716 prev = (parents and parents[0]) or nullid
716 prev = (parents and parents[0]) or nullid
717
717
718 shouldclose = False
718 shouldclose = False
719 if not fp and len(template) > 0:
719 if not fp and len(template) > 0:
720 desc_lines = ctx.description().rstrip().split('\n')
720 desc_lines = ctx.description().rstrip().split('\n')
721 desc = desc_lines[0] #Commit always has a first line.
721 desc = desc_lines[0] #Commit always has a first line.
722 fp = makefileobj(repo, template, node, desc=desc, total=total,
722 fp = makefileobj(repo, template, node, desc=desc, total=total,
723 seqno=seqno, revwidth=revwidth, mode='wb',
723 seqno=seqno, revwidth=revwidth, mode='wb',
724 modemap=filemode)
724 modemap=filemode)
725 if fp != template:
725 if fp != template:
726 shouldclose = True
726 shouldclose = True
727 if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
727 if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
728 repo.ui.note("%s\n" % fp.name)
728 repo.ui.note("%s\n" % fp.name)
729
729
730 if not fp:
730 if not fp:
731 write = repo.ui.write
731 write = repo.ui.write
732 else:
732 else:
733 def write(s, **kw):
733 def write(s, **kw):
734 fp.write(s)
734 fp.write(s)
735
735
736
736
737 write("# HG changeset patch\n")
737 write("# HG changeset patch\n")
738 write("# User %s\n" % ctx.user())
738 write("# User %s\n" % ctx.user())
739 write("# Date %d %d\n" % ctx.date())
739 write("# Date %d %d\n" % ctx.date())
740 write("# %s\n" % util.datestr(ctx.date()))
740 write("# %s\n" % util.datestr(ctx.date()))
741 if branch and branch != 'default':
741 if branch and branch != 'default':
742 write("# Branch %s\n" % branch)
742 write("# Branch %s\n" % branch)
743 write("# Node ID %s\n" % hex(node))
743 write("# Node ID %s\n" % hex(node))
744 write("# Parent %s\n" % hex(prev))
744 write("# Parent %s\n" % hex(prev))
745 if len(parents) > 1:
745 if len(parents) > 1:
746 write("# Parent %s\n" % hex(parents[1]))
746 write("# Parent %s\n" % hex(parents[1]))
747 write(ctx.description().rstrip())
747 write(ctx.description().rstrip())
748 write("\n\n")
748 write("\n\n")
749
749
750 for chunk, label in patch.diffui(repo, prev, node, opts=opts):
750 for chunk, label in patch.diffui(repo, prev, node, opts=opts):
751 write(chunk, label=label)
751 write(chunk, label=label)
752
752
753 if shouldclose:
753 if shouldclose:
754 fp.close()
754 fp.close()
755
755
756 for seqno, rev in enumerate(revs):
756 for seqno, rev in enumerate(revs):
757 single(rev, seqno + 1, fp)
757 single(rev, seqno + 1, fp)
758
758
759 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
759 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
760 changes=None, stat=False, fp=None, prefix='',
760 changes=None, stat=False, fp=None, prefix='',
761 listsubrepos=False):
761 listsubrepos=False):
762 '''show diff or diffstat.'''
762 '''show diff or diffstat.'''
763 if fp is None:
763 if fp is None:
764 write = ui.write
764 write = ui.write
765 else:
765 else:
766 def write(s, **kw):
766 def write(s, **kw):
767 fp.write(s)
767 fp.write(s)
768
768
769 if stat:
769 if stat:
770 diffopts = diffopts.copy(context=0)
770 diffopts = diffopts.copy(context=0)
771 width = 80
771 width = 80
772 if not ui.plain():
772 if not ui.plain():
773 width = ui.termwidth()
773 width = ui.termwidth()
774 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
774 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
775 prefix=prefix)
775 prefix=prefix)
776 for chunk, label in patch.diffstatui(util.iterlines(chunks),
776 for chunk, label in patch.diffstatui(util.iterlines(chunks),
777 width=width,
777 width=width,
778 git=diffopts.git):
778 git=diffopts.git):
779 write(chunk, label=label)
779 write(chunk, label=label)
780 else:
780 else:
781 for chunk, label in patch.diffui(repo, node1, node2, match,
781 for chunk, label in patch.diffui(repo, node1, node2, match,
782 changes, diffopts, prefix=prefix):
782 changes, diffopts, prefix=prefix):
783 write(chunk, label=label)
783 write(chunk, label=label)
784
784
785 if listsubrepos:
785 if listsubrepos:
786 ctx1 = repo[node1]
786 ctx1 = repo[node1]
787 ctx2 = repo[node2]
787 ctx2 = repo[node2]
788 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
788 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
789 tempnode2 = node2
789 tempnode2 = node2
790 try:
790 try:
791 if node2 is not None:
791 if node2 is not None:
792 tempnode2 = ctx2.substate[subpath][1]
792 tempnode2 = ctx2.substate[subpath][1]
793 except KeyError:
793 except KeyError:
794 # A subrepo that existed in node1 was deleted between node1 and
794 # A subrepo that existed in node1 was deleted between node1 and
795 # node2 (inclusive). Thus, ctx2's substate won't contain that
795 # node2 (inclusive). Thus, ctx2's substate won't contain that
796 # subpath. The best we can do is to ignore it.
796 # subpath. The best we can do is to ignore it.
797 tempnode2 = None
797 tempnode2 = None
798 submatch = matchmod.narrowmatcher(subpath, match)
798 submatch = matchmod.narrowmatcher(subpath, match)
799 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
799 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
800 stat=stat, fp=fp, prefix=prefix)
800 stat=stat, fp=fp, prefix=prefix)
801
801
802 class changeset_printer(object):
802 class changeset_printer(object):
803 '''show changeset information when templating not requested.'''
803 '''show changeset information when templating not requested.'''
804
804
805 def __init__(self, ui, repo, patch, diffopts, buffered):
805 def __init__(self, ui, repo, patch, diffopts, buffered):
806 self.ui = ui
806 self.ui = ui
807 self.repo = repo
807 self.repo = repo
808 self.buffered = buffered
808 self.buffered = buffered
809 self.patch = patch
809 self.patch = patch
810 self.diffopts = diffopts
810 self.diffopts = diffopts
811 self.header = {}
811 self.header = {}
812 self.hunk = {}
812 self.hunk = {}
813 self.lastheader = None
813 self.lastheader = None
814 self.footer = None
814 self.footer = None
815
815
816 def flush(self, rev):
816 def flush(self, rev):
817 if rev in self.header:
817 if rev in self.header:
818 h = self.header[rev]
818 h = self.header[rev]
819 if h != self.lastheader:
819 if h != self.lastheader:
820 self.lastheader = h
820 self.lastheader = h
821 self.ui.write(h)
821 self.ui.write(h)
822 del self.header[rev]
822 del self.header[rev]
823 if rev in self.hunk:
823 if rev in self.hunk:
824 self.ui.write(self.hunk[rev])
824 self.ui.write(self.hunk[rev])
825 del self.hunk[rev]
825 del self.hunk[rev]
826 return 1
826 return 1
827 return 0
827 return 0
828
828
829 def close(self):
829 def close(self):
830 if self.footer:
830 if self.footer:
831 self.ui.write(self.footer)
831 self.ui.write(self.footer)
832
832
833 def show(self, ctx, copies=None, matchfn=None, **props):
833 def show(self, ctx, copies=None, matchfn=None, **props):
834 if self.buffered:
834 if self.buffered:
835 self.ui.pushbuffer()
835 self.ui.pushbuffer()
836 self._show(ctx, copies, matchfn, props)
836 self._show(ctx, copies, matchfn, props)
837 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
837 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
838 else:
838 else:
839 self._show(ctx, copies, matchfn, props)
839 self._show(ctx, copies, matchfn, props)
840
840
841 def _show(self, ctx, copies, matchfn, props):
841 def _show(self, ctx, copies, matchfn, props):
842 '''show a single changeset or file revision'''
842 '''show a single changeset or file revision'''
843 changenode = ctx.node()
843 changenode = ctx.node()
844 rev = ctx.rev()
844 rev = ctx.rev()
845
845
846 if self.ui.quiet:
846 if self.ui.quiet:
847 self.ui.write("%d:%s\n" % (rev, short(changenode)),
847 self.ui.write("%d:%s\n" % (rev, short(changenode)),
848 label='log.node')
848 label='log.node')
849 return
849 return
850
850
851 log = self.repo.changelog
851 log = self.repo.changelog
852 date = util.datestr(ctx.date())
852 date = util.datestr(ctx.date())
853
853
854 hexfunc = self.ui.debugflag and hex or short
854 hexfunc = self.ui.debugflag and hex or short
855
855
856 parents = [(p, hexfunc(log.node(p)))
856 parents = [(p, hexfunc(log.node(p)))
857 for p in self._meaningful_parentrevs(log, rev)]
857 for p in self._meaningful_parentrevs(log, rev)]
858
858
859 # i18n: column positioning for "hg log"
859 # i18n: column positioning for "hg log"
860 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)),
860 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)),
861 label='log.changeset changeset.%s' % ctx.phasestr())
861 label='log.changeset changeset.%s' % ctx.phasestr())
862
862
863 branch = ctx.branch()
863 branch = ctx.branch()
864 # don't show the default branch name
864 # don't show the default branch name
865 if branch != 'default':
865 if branch != 'default':
866 # i18n: column positioning for "hg log"
866 # i18n: column positioning for "hg log"
867 self.ui.write(_("branch: %s\n") % branch,
867 self.ui.write(_("branch: %s\n") % branch,
868 label='log.branch')
868 label='log.branch')
869 for bookmark in self.repo.nodebookmarks(changenode):
869 for bookmark in self.repo.nodebookmarks(changenode):
870 # i18n: column positioning for "hg log"
870 # i18n: column positioning for "hg log"
871 self.ui.write(_("bookmark: %s\n") % bookmark,
871 self.ui.write(_("bookmark: %s\n") % bookmark,
872 label='log.bookmark')
872 label='log.bookmark')
873 for tag in self.repo.nodetags(changenode):
873 for tag in self.repo.nodetags(changenode):
874 # i18n: column positioning for "hg log"
874 # i18n: column positioning for "hg log"
875 self.ui.write(_("tag: %s\n") % tag,
875 self.ui.write(_("tag: %s\n") % tag,
876 label='log.tag')
876 label='log.tag')
877 if self.ui.debugflag and ctx.phase():
877 if self.ui.debugflag and ctx.phase():
878 # i18n: column positioning for "hg log"
878 # i18n: column positioning for "hg log"
879 self.ui.write(_("phase: %s\n") % _(ctx.phasestr()),
879 self.ui.write(_("phase: %s\n") % _(ctx.phasestr()),
880 label='log.phase')
880 label='log.phase')
881 for parent in parents:
881 for parent in parents:
882 # i18n: column positioning for "hg log"
882 # i18n: column positioning for "hg log"
883 self.ui.write(_("parent: %d:%s\n") % parent,
883 self.ui.write(_("parent: %d:%s\n") % parent,
884 label='log.parent changeset.%s' % ctx.phasestr())
884 label='log.parent changeset.%s' % ctx.phasestr())
885
885
886 if self.ui.debugflag:
886 if self.ui.debugflag:
887 mnode = ctx.manifestnode()
887 mnode = ctx.manifestnode()
888 # i18n: column positioning for "hg log"
888 # i18n: column positioning for "hg log"
889 self.ui.write(_("manifest: %d:%s\n") %
889 self.ui.write(_("manifest: %d:%s\n") %
890 (self.repo.manifest.rev(mnode), hex(mnode)),
890 (self.repo.manifest.rev(mnode), hex(mnode)),
891 label='ui.debug log.manifest')
891 label='ui.debug log.manifest')
892 # i18n: column positioning for "hg log"
892 # i18n: column positioning for "hg log"
893 self.ui.write(_("user: %s\n") % ctx.user(),
893 self.ui.write(_("user: %s\n") % ctx.user(),
894 label='log.user')
894 label='log.user')
895 # i18n: column positioning for "hg log"
895 # i18n: column positioning for "hg log"
896 self.ui.write(_("date: %s\n") % date,
896 self.ui.write(_("date: %s\n") % date,
897 label='log.date')
897 label='log.date')
898
898
899 if self.ui.debugflag:
899 if self.ui.debugflag:
900 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
900 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
901 for key, value in zip([# i18n: column positioning for "hg log"
901 for key, value in zip([# i18n: column positioning for "hg log"
902 _("files:"),
902 _("files:"),
903 # i18n: column positioning for "hg log"
903 # i18n: column positioning for "hg log"
904 _("files+:"),
904 _("files+:"),
905 # i18n: column positioning for "hg log"
905 # i18n: column positioning for "hg log"
906 _("files-:")], files):
906 _("files-:")], files):
907 if value:
907 if value:
908 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
908 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
909 label='ui.debug log.files')
909 label='ui.debug log.files')
910 elif ctx.files() and self.ui.verbose:
910 elif ctx.files() and self.ui.verbose:
911 # i18n: column positioning for "hg log"
911 # i18n: column positioning for "hg log"
912 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
912 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
913 label='ui.note log.files')
913 label='ui.note log.files')
914 if copies and self.ui.verbose:
914 if copies and self.ui.verbose:
915 copies = ['%s (%s)' % c for c in copies]
915 copies = ['%s (%s)' % c for c in copies]
916 # i18n: column positioning for "hg log"
916 # i18n: column positioning for "hg log"
917 self.ui.write(_("copies: %s\n") % ' '.join(copies),
917 self.ui.write(_("copies: %s\n") % ' '.join(copies),
918 label='ui.note log.copies')
918 label='ui.note log.copies')
919
919
920 extra = ctx.extra()
920 extra = ctx.extra()
921 if extra and self.ui.debugflag:
921 if extra and self.ui.debugflag:
922 for key, value in sorted(extra.items()):
922 for key, value in sorted(extra.items()):
923 # i18n: column positioning for "hg log"
923 # i18n: column positioning for "hg log"
924 self.ui.write(_("extra: %s=%s\n")
924 self.ui.write(_("extra: %s=%s\n")
925 % (key, value.encode('string_escape')),
925 % (key, value.encode('string_escape')),
926 label='ui.debug log.extra')
926 label='ui.debug log.extra')
927
927
928 description = ctx.description().strip()
928 description = ctx.description().strip()
929 if description:
929 if description:
930 if self.ui.verbose:
930 if self.ui.verbose:
931 self.ui.write(_("description:\n"),
931 self.ui.write(_("description:\n"),
932 label='ui.note log.description')
932 label='ui.note log.description')
933 self.ui.write(description,
933 self.ui.write(description,
934 label='ui.note log.description')
934 label='ui.note log.description')
935 self.ui.write("\n\n")
935 self.ui.write("\n\n")
936 else:
936 else:
937 # i18n: column positioning for "hg log"
937 # i18n: column positioning for "hg log"
938 self.ui.write(_("summary: %s\n") %
938 self.ui.write(_("summary: %s\n") %
939 description.splitlines()[0],
939 description.splitlines()[0],
940 label='log.summary')
940 label='log.summary')
941 self.ui.write("\n")
941 self.ui.write("\n")
942
942
943 self.showpatch(changenode, matchfn)
943 self.showpatch(changenode, matchfn)
944
944
945 def showpatch(self, node, matchfn):
945 def showpatch(self, node, matchfn):
946 if not matchfn:
946 if not matchfn:
947 matchfn = self.patch
947 matchfn = self.patch
948 if matchfn:
948 if matchfn:
949 stat = self.diffopts.get('stat')
949 stat = self.diffopts.get('stat')
950 diff = self.diffopts.get('patch')
950 diff = self.diffopts.get('patch')
951 diffopts = patch.diffopts(self.ui, self.diffopts)
951 diffopts = patch.diffopts(self.ui, self.diffopts)
952 prev = self.repo.changelog.parents(node)[0]
952 prev = self.repo.changelog.parents(node)[0]
953 if stat:
953 if stat:
954 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
954 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
955 match=matchfn, stat=True)
955 match=matchfn, stat=True)
956 if diff:
956 if diff:
957 if stat:
957 if stat:
958 self.ui.write("\n")
958 self.ui.write("\n")
959 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
959 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
960 match=matchfn, stat=False)
960 match=matchfn, stat=False)
961 self.ui.write("\n")
961 self.ui.write("\n")
962
962
963 def _meaningful_parentrevs(self, log, rev):
963 def _meaningful_parentrevs(self, log, rev):
964 """Return list of meaningful (or all if debug) parentrevs for rev.
964 """Return list of meaningful (or all if debug) parentrevs for rev.
965
965
966 For merges (two non-nullrev revisions) both parents are meaningful.
966 For merges (two non-nullrev revisions) both parents are meaningful.
967 Otherwise the first parent revision is considered meaningful if it
967 Otherwise the first parent revision is considered meaningful if it
968 is not the preceding revision.
968 is not the preceding revision.
969 """
969 """
970 parents = log.parentrevs(rev)
970 parents = log.parentrevs(rev)
971 if not self.ui.debugflag and parents[1] == nullrev:
971 if not self.ui.debugflag and parents[1] == nullrev:
972 if parents[0] >= rev - 1:
972 if parents[0] >= rev - 1:
973 parents = []
973 parents = []
974 else:
974 else:
975 parents = [parents[0]]
975 parents = [parents[0]]
976 return parents
976 return parents
977
977
978
978
979 class changeset_templater(changeset_printer):
979 class changeset_templater(changeset_printer):
980 '''format changeset information.'''
980 '''format changeset information.'''
981
981
982 def __init__(self, ui, repo, patch, diffopts, tmpl, mapfile, buffered):
982 def __init__(self, ui, repo, patch, diffopts, tmpl, mapfile, buffered):
983 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
983 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
984 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
984 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
985 defaulttempl = {
985 defaulttempl = {
986 'parent': '{rev}:{node|formatnode} ',
986 'parent': '{rev}:{node|formatnode} ',
987 'manifest': '{rev}:{node|formatnode}',
987 'manifest': '{rev}:{node|formatnode}',
988 'file_copy': '{name} ({source})',
988 'file_copy': '{name} ({source})',
989 'extra': '{key}={value|stringescape}'
989 'extra': '{key}={value|stringescape}'
990 }
990 }
991 # filecopy is preserved for compatibility reasons
991 # filecopy is preserved for compatibility reasons
992 defaulttempl['filecopy'] = defaulttempl['file_copy']
992 defaulttempl['filecopy'] = defaulttempl['file_copy']
993 self.t = templater.templater(mapfile, {'formatnode': formatnode},
993 self.t = templater.templater(mapfile, {'formatnode': formatnode},
994 cache=defaulttempl)
994 cache=defaulttempl)
995 if tmpl:
995 if tmpl:
996 self.t.cache['changeset'] = tmpl
996 self.t.cache['changeset'] = tmpl
997
997
998 self.cache = {}
998 self.cache = {}
999
999
1000 def _meaningful_parentrevs(self, ctx):
1000 def _meaningful_parentrevs(self, ctx):
1001 """Return list of meaningful (or all if debug) parentrevs for rev.
1001 """Return list of meaningful (or all if debug) parentrevs for rev.
1002 """
1002 """
1003 parents = ctx.parents()
1003 parents = ctx.parents()
1004 if len(parents) > 1:
1004 if len(parents) > 1:
1005 return parents
1005 return parents
1006 if self.ui.debugflag:
1006 if self.ui.debugflag:
1007 return [parents[0], self.repo['null']]
1007 return [parents[0], self.repo['null']]
1008 if parents[0].rev() >= ctx.rev() - 1:
1008 if parents[0].rev() >= ctx.rev() - 1:
1009 return []
1009 return []
1010 return parents
1010 return parents
1011
1011
1012 def _show(self, ctx, copies, matchfn, props):
1012 def _show(self, ctx, copies, matchfn, props):
1013 '''show a single changeset or file revision'''
1013 '''show a single changeset or file revision'''
1014
1014
1015 showlist = templatekw.showlist
1015 showlist = templatekw.showlist
1016
1016
1017 # showparents() behaviour depends on ui trace level which
1017 # showparents() behaviour depends on ui trace level which
1018 # causes unexpected behaviours at templating level and makes
1018 # causes unexpected behaviours at templating level and makes
1019 # it harder to extract it in a standalone function. Its
1019 # it harder to extract it in a standalone function. Its
1020 # behaviour cannot be changed so leave it here for now.
1020 # behaviour cannot be changed so leave it here for now.
1021 def showparents(**args):
1021 def showparents(**args):
1022 ctx = args['ctx']
1022 ctx = args['ctx']
1023 parents = [[('rev', p.rev()), ('node', p.hex())]
1023 parents = [[('rev', p.rev()), ('node', p.hex())]
1024 for p in self._meaningful_parentrevs(ctx)]
1024 for p in self._meaningful_parentrevs(ctx)]
1025 return showlist('parent', parents, **args)
1025 return showlist('parent', parents, **args)
1026
1026
1027 props = props.copy()
1027 props = props.copy()
1028 props.update(templatekw.keywords)
1028 props.update(templatekw.keywords)
1029 props['parents'] = showparents
1029 props['parents'] = showparents
1030 props['templ'] = self.t
1030 props['templ'] = self.t
1031 props['ctx'] = ctx
1031 props['ctx'] = ctx
1032 props['repo'] = self.repo
1032 props['repo'] = self.repo
1033 props['revcache'] = {'copies': copies}
1033 props['revcache'] = {'copies': copies}
1034 props['cache'] = self.cache
1034 props['cache'] = self.cache
1035
1035
1036 # find correct templates for current mode
1036 # find correct templates for current mode
1037
1037
1038 tmplmodes = [
1038 tmplmodes = [
1039 (True, None),
1039 (True, None),
1040 (self.ui.verbose, 'verbose'),
1040 (self.ui.verbose, 'verbose'),
1041 (self.ui.quiet, 'quiet'),
1041 (self.ui.quiet, 'quiet'),
1042 (self.ui.debugflag, 'debug'),
1042 (self.ui.debugflag, 'debug'),
1043 ]
1043 ]
1044
1044
1045 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
1045 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
1046 for mode, postfix in tmplmodes:
1046 for mode, postfix in tmplmodes:
1047 for type in types:
1047 for type in types:
1048 cur = postfix and ('%s_%s' % (type, postfix)) or type
1048 cur = postfix and ('%s_%s' % (type, postfix)) or type
1049 if mode and cur in self.t:
1049 if mode and cur in self.t:
1050 types[type] = cur
1050 types[type] = cur
1051
1051
1052 try:
1052 try:
1053
1053
1054 # write header
1054 # write header
1055 if types['header']:
1055 if types['header']:
1056 h = templater.stringify(self.t(types['header'], **props))
1056 h = templater.stringify(self.t(types['header'], **props))
1057 if self.buffered:
1057 if self.buffered:
1058 self.header[ctx.rev()] = h
1058 self.header[ctx.rev()] = h
1059 else:
1059 else:
1060 if self.lastheader != h:
1060 if self.lastheader != h:
1061 self.lastheader = h
1061 self.lastheader = h
1062 self.ui.write(h)
1062 self.ui.write(h)
1063
1063
1064 # write changeset metadata, then patch if requested
1064 # write changeset metadata, then patch if requested
1065 key = types['changeset']
1065 key = types['changeset']
1066 self.ui.write(templater.stringify(self.t(key, **props)))
1066 self.ui.write(templater.stringify(self.t(key, **props)))
1067 self.showpatch(ctx.node(), matchfn)
1067 self.showpatch(ctx.node(), matchfn)
1068
1068
1069 if types['footer']:
1069 if types['footer']:
1070 if not self.footer:
1070 if not self.footer:
1071 self.footer = templater.stringify(self.t(types['footer'],
1071 self.footer = templater.stringify(self.t(types['footer'],
1072 **props))
1072 **props))
1073
1073
1074 except KeyError, inst:
1074 except KeyError, inst:
1075 msg = _("%s: no key named '%s'")
1075 msg = _("%s: no key named '%s'")
1076 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
1076 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
1077 except SyntaxError, inst:
1077 except SyntaxError, inst:
1078 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1078 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1079
1079
1080 def gettemplate(ui, tmpl, style):
1080 def gettemplate(ui, tmpl, style):
1081 """
1081 """
1082 Find the template matching the given template spec or style.
1082 Find the template matching the given template spec or style.
1083 """
1083 """
1084
1084
1085 # ui settings
1085 # ui settings
1086 if not tmpl and not style:
1086 if not tmpl and not style:
1087 tmpl = ui.config('ui', 'logtemplate')
1087 tmpl = ui.config('ui', 'logtemplate')
1088 if tmpl:
1088 if tmpl:
1089 try:
1089 try:
1090 tmpl = templater.parsestring(tmpl)
1090 tmpl = templater.parsestring(tmpl)
1091 except SyntaxError:
1091 except SyntaxError:
1092 tmpl = templater.parsestring(tmpl, quoted=False)
1092 tmpl = templater.parsestring(tmpl, quoted=False)
1093 return tmpl, None
1093 return tmpl, None
1094 else:
1094 else:
1095 style = util.expandpath(ui.config('ui', 'style', ''))
1095 style = util.expandpath(ui.config('ui', 'style', ''))
1096
1096
1097 if style:
1097 if style:
1098 mapfile = style
1098 mapfile = style
1099 if not os.path.split(mapfile)[0]:
1099 if not os.path.split(mapfile)[0]:
1100 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1100 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1101 or templater.templatepath(mapfile))
1101 or templater.templatepath(mapfile))
1102 if mapname:
1102 if mapname:
1103 mapfile = mapname
1103 mapfile = mapname
1104 return None, mapfile
1104 return None, mapfile
1105
1105
1106 if not tmpl:
1106 if not tmpl:
1107 return None, None
1107 return None, None
1108
1108
1109 # looks like a literal template?
1109 # looks like a literal template?
1110 if '{' in tmpl:
1110 if '{' in tmpl:
1111 return tmpl, None
1111 return tmpl, None
1112
1112
1113 # perhaps a stock style?
1113 # perhaps a stock style?
1114 if not os.path.split(tmpl)[0]:
1114 if not os.path.split(tmpl)[0]:
1115 mapname = (templater.templatepath('map-cmdline.' + tmpl)
1115 mapname = (templater.templatepath('map-cmdline.' + tmpl)
1116 or templater.templatepath(tmpl))
1116 or templater.templatepath(tmpl))
1117 if mapname and os.path.isfile(mapname):
1117 if mapname and os.path.isfile(mapname):
1118 return None, mapname
1118 return None, mapname
1119
1119
1120 # perhaps it's a reference to [templates]
1120 # perhaps it's a reference to [templates]
1121 t = ui.config('templates', tmpl)
1121 t = ui.config('templates', tmpl)
1122 if t:
1122 if t:
1123 try:
1123 try:
1124 tmpl = templater.parsestring(t)
1124 tmpl = templater.parsestring(t)
1125 except SyntaxError:
1125 except SyntaxError:
1126 tmpl = templater.parsestring(t, quoted=False)
1126 tmpl = templater.parsestring(t, quoted=False)
1127 return tmpl, None
1127 return tmpl, None
1128
1128
1129 # perhaps it's a path to a map or a template
1129 # perhaps it's a path to a map or a template
1130 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
1130 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
1131 # is it a mapfile for a style?
1131 # is it a mapfile for a style?
1132 if os.path.basename(tmpl).startswith("map-"):
1132 if os.path.basename(tmpl).startswith("map-"):
1133 return None, os.path.realpath(tmpl)
1133 return None, os.path.realpath(tmpl)
1134 tmpl = open(tmpl).read()
1134 tmpl = open(tmpl).read()
1135 return tmpl, None
1135 return tmpl, None
1136
1136
1137 # constant string?
1137 # constant string?
1138 return tmpl, None
1138 return tmpl, None
1139
1139
1140 def show_changeset(ui, repo, opts, buffered=False):
1140 def show_changeset(ui, repo, opts, buffered=False):
1141 """show one changeset using template or regular display.
1141 """show one changeset using template or regular display.
1142
1142
1143 Display format will be the first non-empty hit of:
1143 Display format will be the first non-empty hit of:
1144 1. option 'template'
1144 1. option 'template'
1145 2. option 'style'
1145 2. option 'style'
1146 3. [ui] setting 'logtemplate'
1146 3. [ui] setting 'logtemplate'
1147 4. [ui] setting 'style'
1147 4. [ui] setting 'style'
1148 If all of these values are either the unset or the empty string,
1148 If all of these values are either the unset or the empty string,
1149 regular display via changeset_printer() is done.
1149 regular display via changeset_printer() is done.
1150 """
1150 """
1151 # options
1151 # options
1152 patch = None
1152 patch = None
1153 if opts.get('patch') or opts.get('stat'):
1153 if opts.get('patch') or opts.get('stat'):
1154 patch = scmutil.matchall(repo)
1154 patch = scmutil.matchall(repo)
1155
1155
1156 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1156 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1157
1157
1158 if not tmpl and not mapfile:
1158 if not tmpl and not mapfile:
1159 return changeset_printer(ui, repo, patch, opts, buffered)
1159 return changeset_printer(ui, repo, patch, opts, buffered)
1160
1160
1161 try:
1161 try:
1162 t = changeset_templater(ui, repo, patch, opts, tmpl, mapfile, buffered)
1162 t = changeset_templater(ui, repo, patch, opts, tmpl, mapfile, buffered)
1163 except SyntaxError, inst:
1163 except SyntaxError, inst:
1164 raise util.Abort(inst.args[0])
1164 raise util.Abort(inst.args[0])
1165 return t
1165 return t
1166
1166
1167 def showmarker(ui, marker):
1167 def showmarker(ui, marker):
1168 """utility function to display obsolescence marker in a readable way
1168 """utility function to display obsolescence marker in a readable way
1169
1169
1170 To be used by debug function."""
1170 To be used by debug function."""
1171 ui.write(hex(marker.precnode()))
1171 ui.write(hex(marker.precnode()))
1172 for repl in marker.succnodes():
1172 for repl in marker.succnodes():
1173 ui.write(' ')
1173 ui.write(' ')
1174 ui.write(hex(repl))
1174 ui.write(hex(repl))
1175 ui.write(' %X ' % marker._data[2])
1175 ui.write(' %X ' % marker._data[2])
1176 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1176 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1177 sorted(marker.metadata().items()))))
1177 sorted(marker.metadata().items()))))
1178 ui.write('\n')
1178 ui.write('\n')
1179
1179
1180 def finddate(ui, repo, date):
1180 def finddate(ui, repo, date):
1181 """Find the tipmost changeset that matches the given date spec"""
1181 """Find the tipmost changeset that matches the given date spec"""
1182
1182
1183 df = util.matchdate(date)
1183 df = util.matchdate(date)
1184 m = scmutil.matchall(repo)
1184 m = scmutil.matchall(repo)
1185 results = {}
1185 results = {}
1186
1186
1187 def prep(ctx, fns):
1187 def prep(ctx, fns):
1188 d = ctx.date()
1188 d = ctx.date()
1189 if df(d[0]):
1189 if df(d[0]):
1190 results[ctx.rev()] = d
1190 results[ctx.rev()] = d
1191
1191
1192 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1192 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1193 rev = ctx.rev()
1193 rev = ctx.rev()
1194 if rev in results:
1194 if rev in results:
1195 ui.status(_("found revision %s from %s\n") %
1195 ui.status(_("found revision %s from %s\n") %
1196 (rev, util.datestr(results[rev])))
1196 (rev, util.datestr(results[rev])))
1197 return str(rev)
1197 return str(rev)
1198
1198
1199 raise util.Abort(_("revision matching date not found"))
1199 raise util.Abort(_("revision matching date not found"))
1200
1200
1201 def increasingwindows(windowsize=8, sizelimit=512):
1201 def increasingwindows(windowsize=8, sizelimit=512):
1202 while True:
1202 while True:
1203 yield windowsize
1203 yield windowsize
1204 if windowsize < sizelimit:
1204 if windowsize < sizelimit:
1205 windowsize *= 2
1205 windowsize *= 2
1206
1206
1207 class FileWalkError(Exception):
1207 class FileWalkError(Exception):
1208 pass
1208 pass
1209
1209
1210 def walkfilerevs(repo, match, follow, revs, fncache):
1210 def walkfilerevs(repo, match, follow, revs, fncache):
1211 '''Walks the file history for the matched files.
1211 '''Walks the file history for the matched files.
1212
1212
1213 Returns the changeset revs that are involved in the file history.
1213 Returns the changeset revs that are involved in the file history.
1214
1214
1215 Throws FileWalkError if the file history can't be walked using
1215 Throws FileWalkError if the file history can't be walked using
1216 filelogs alone.
1216 filelogs alone.
1217 '''
1217 '''
1218 wanted = set()
1218 wanted = set()
1219 copies = []
1219 copies = []
1220 minrev, maxrev = min(revs), max(revs)
1220 minrev, maxrev = min(revs), max(revs)
1221 def filerevgen(filelog, last):
1221 def filerevgen(filelog, last):
1222 """
1222 """
1223 Only files, no patterns. Check the history of each file.
1223 Only files, no patterns. Check the history of each file.
1224
1224
1225 Examines filelog entries within minrev, maxrev linkrev range
1225 Examines filelog entries within minrev, maxrev linkrev range
1226 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1226 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1227 tuples in backwards order
1227 tuples in backwards order
1228 """
1228 """
1229 cl_count = len(repo)
1229 cl_count = len(repo)
1230 revs = []
1230 revs = []
1231 for j in xrange(0, last + 1):
1231 for j in xrange(0, last + 1):
1232 linkrev = filelog.linkrev(j)
1232 linkrev = filelog.linkrev(j)
1233 if linkrev < minrev:
1233 if linkrev < minrev:
1234 continue
1234 continue
1235 # only yield rev for which we have the changelog, it can
1235 # only yield rev for which we have the changelog, it can
1236 # happen while doing "hg log" during a pull or commit
1236 # happen while doing "hg log" during a pull or commit
1237 if linkrev >= cl_count:
1237 if linkrev >= cl_count:
1238 break
1238 break
1239
1239
1240 parentlinkrevs = []
1240 parentlinkrevs = []
1241 for p in filelog.parentrevs(j):
1241 for p in filelog.parentrevs(j):
1242 if p != nullrev:
1242 if p != nullrev:
1243 parentlinkrevs.append(filelog.linkrev(p))
1243 parentlinkrevs.append(filelog.linkrev(p))
1244 n = filelog.node(j)
1244 n = filelog.node(j)
1245 revs.append((linkrev, parentlinkrevs,
1245 revs.append((linkrev, parentlinkrevs,
1246 follow and filelog.renamed(n)))
1246 follow and filelog.renamed(n)))
1247
1247
1248 return reversed(revs)
1248 return reversed(revs)
1249 def iterfiles():
1249 def iterfiles():
1250 pctx = repo['.']
1250 pctx = repo['.']
1251 for filename in match.files():
1251 for filename in match.files():
1252 if follow:
1252 if follow:
1253 if filename not in pctx:
1253 if filename not in pctx:
1254 raise util.Abort(_('cannot follow file not in parent '
1254 raise util.Abort(_('cannot follow file not in parent '
1255 'revision: "%s"') % filename)
1255 'revision: "%s"') % filename)
1256 yield filename, pctx[filename].filenode()
1256 yield filename, pctx[filename].filenode()
1257 else:
1257 else:
1258 yield filename, None
1258 yield filename, None
1259 for filename_node in copies:
1259 for filename_node in copies:
1260 yield filename_node
1260 yield filename_node
1261
1261
1262 for file_, node in iterfiles():
1262 for file_, node in iterfiles():
1263 filelog = repo.file(file_)
1263 filelog = repo.file(file_)
1264 if not len(filelog):
1264 if not len(filelog):
1265 if node is None:
1265 if node is None:
1266 # A zero count may be a directory or deleted file, so
1266 # A zero count may be a directory or deleted file, so
1267 # try to find matching entries on the slow path.
1267 # try to find matching entries on the slow path.
1268 if follow:
1268 if follow:
1269 raise util.Abort(
1269 raise util.Abort(
1270 _('cannot follow nonexistent file: "%s"') % file_)
1270 _('cannot follow nonexistent file: "%s"') % file_)
1271 raise FileWalkError("Cannot walk via filelog")
1271 raise FileWalkError("Cannot walk via filelog")
1272 else:
1272 else:
1273 continue
1273 continue
1274
1274
1275 if node is None:
1275 if node is None:
1276 last = len(filelog) - 1
1276 last = len(filelog) - 1
1277 else:
1277 else:
1278 last = filelog.rev(node)
1278 last = filelog.rev(node)
1279
1279
1280
1280
1281 # keep track of all ancestors of the file
1281 # keep track of all ancestors of the file
1282 ancestors = set([filelog.linkrev(last)])
1282 ancestors = set([filelog.linkrev(last)])
1283
1283
1284 # iterate from latest to oldest revision
1284 # iterate from latest to oldest revision
1285 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1285 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1286 if not follow:
1286 if not follow:
1287 if rev > maxrev:
1287 if rev > maxrev:
1288 continue
1288 continue
1289 else:
1289 else:
1290 # Note that last might not be the first interesting
1290 # Note that last might not be the first interesting
1291 # rev to us:
1291 # rev to us:
1292 # if the file has been changed after maxrev, we'll
1292 # if the file has been changed after maxrev, we'll
1293 # have linkrev(last) > maxrev, and we still need
1293 # have linkrev(last) > maxrev, and we still need
1294 # to explore the file graph
1294 # to explore the file graph
1295 if rev not in ancestors:
1295 if rev not in ancestors:
1296 continue
1296 continue
1297 # XXX insert 1327 fix here
1297 # XXX insert 1327 fix here
1298 if flparentlinkrevs:
1298 if flparentlinkrevs:
1299 ancestors.update(flparentlinkrevs)
1299 ancestors.update(flparentlinkrevs)
1300
1300
1301 fncache.setdefault(rev, []).append(file_)
1301 fncache.setdefault(rev, []).append(file_)
1302 wanted.add(rev)
1302 wanted.add(rev)
1303 if copied:
1303 if copied:
1304 copies.append(copied)
1304 copies.append(copied)
1305
1305
1306 return wanted
1306 return wanted
1307
1307
1308 def walkchangerevs(repo, match, opts, prepare):
1308 def walkchangerevs(repo, match, opts, prepare):
1309 '''Iterate over files and the revs in which they changed.
1309 '''Iterate over files and the revs in which they changed.
1310
1310
1311 Callers most commonly need to iterate backwards over the history
1311 Callers most commonly need to iterate backwards over the history
1312 in which they are interested. Doing so has awful (quadratic-looking)
1312 in which they are interested. Doing so has awful (quadratic-looking)
1313 performance, so we use iterators in a "windowed" way.
1313 performance, so we use iterators in a "windowed" way.
1314
1314
1315 We walk a window of revisions in the desired order. Within the
1315 We walk a window of revisions in the desired order. Within the
1316 window, we first walk forwards to gather data, then in the desired
1316 window, we first walk forwards to gather data, then in the desired
1317 order (usually backwards) to display it.
1317 order (usually backwards) to display it.
1318
1318
1319 This function returns an iterator yielding contexts. Before
1319 This function returns an iterator yielding contexts. Before
1320 yielding each context, the iterator will first call the prepare
1320 yielding each context, the iterator will first call the prepare
1321 function on each context in the window in forward order.'''
1321 function on each context in the window in forward order.'''
1322
1322
1323 follow = opts.get('follow') or opts.get('follow_first')
1323 follow = opts.get('follow') or opts.get('follow_first')
1324
1324
1325 if opts.get('rev'):
1325 if opts.get('rev'):
1326 revs = scmutil.revrange(repo, opts.get('rev'))
1326 revs = scmutil.revrange(repo, opts.get('rev'))
1327 elif follow:
1327 elif follow:
1328 revs = repo.revs('reverse(:.)')
1328 revs = repo.revs('reverse(:.)')
1329 else:
1329 else:
1330 revs = revset.spanset(repo)
1330 revs = revset.spanset(repo)
1331 revs.reverse()
1331 revs.reverse()
1332 if not revs:
1332 if not revs:
1333 return []
1333 return []
1334 wanted = set()
1334 wanted = set()
1335 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1335 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1336 fncache = {}
1336 fncache = {}
1337 change = repo.changectx
1337 change = repo.changectx
1338
1338
1339 # First step is to fill wanted, the set of revisions that we want to yield.
1339 # First step is to fill wanted, the set of revisions that we want to yield.
1340 # When it does not induce extra cost, we also fill fncache for revisions in
1340 # When it does not induce extra cost, we also fill fncache for revisions in
1341 # wanted: a cache of filenames that were changed (ctx.files()) and that
1341 # wanted: a cache of filenames that were changed (ctx.files()) and that
1342 # match the file filtering conditions.
1342 # match the file filtering conditions.
1343
1343
1344 if not slowpath and not match.files():
1344 if not slowpath and not match.files():
1345 # No files, no patterns. Display all revs.
1345 # No files, no patterns. Display all revs.
1346 wanted = revs
1346 wanted = revs
1347
1347
1348 if not slowpath and match.files():
1348 if not slowpath and match.files():
1349 # We only have to read through the filelog to find wanted revisions
1349 # We only have to read through the filelog to find wanted revisions
1350
1350
1351 try:
1351 try:
1352 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1352 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1353 except FileWalkError:
1353 except FileWalkError:
1354 slowpath = True
1354 slowpath = True
1355
1355
1356 # We decided to fall back to the slowpath because at least one
1356 # We decided to fall back to the slowpath because at least one
1357 # of the paths was not a file. Check to see if at least one of them
1357 # of the paths was not a file. Check to see if at least one of them
1358 # existed in history, otherwise simply return
1358 # existed in history, otherwise simply return
1359 for path in match.files():
1359 for path in match.files():
1360 if path == '.' or path in repo.store:
1360 if path == '.' or path in repo.store:
1361 break
1361 break
1362 else:
1362 else:
1363 return []
1363 return []
1364
1364
1365 if slowpath:
1365 if slowpath:
1366 # We have to read the changelog to match filenames against
1366 # We have to read the changelog to match filenames against
1367 # changed files
1367 # changed files
1368
1368
1369 if follow:
1369 if follow:
1370 raise util.Abort(_('can only follow copies/renames for explicit '
1370 raise util.Abort(_('can only follow copies/renames for explicit '
1371 'filenames'))
1371 'filenames'))
1372
1372
1373 # The slow path checks files modified in every changeset.
1373 # The slow path checks files modified in every changeset.
1374 # This is really slow on large repos, so compute the set lazily.
1374 # This is really slow on large repos, so compute the set lazily.
1375 class lazywantedset(object):
1375 class lazywantedset(object):
1376 def __init__(self):
1376 def __init__(self):
1377 self.set = set()
1377 self.set = set()
1378 self.revs = set(revs)
1378 self.revs = set(revs)
1379
1379
1380 # No need to worry about locality here because it will be accessed
1380 # No need to worry about locality here because it will be accessed
1381 # in the same order as the increasing window below.
1381 # in the same order as the increasing window below.
1382 def __contains__(self, value):
1382 def __contains__(self, value):
1383 if value in self.set:
1383 if value in self.set:
1384 return True
1384 return True
1385 elif not value in self.revs:
1385 elif not value in self.revs:
1386 return False
1386 return False
1387 else:
1387 else:
1388 self.revs.discard(value)
1388 self.revs.discard(value)
1389 ctx = change(value)
1389 ctx = change(value)
1390 matches = filter(match, ctx.files())
1390 matches = filter(match, ctx.files())
1391 if matches:
1391 if matches:
1392 fncache[value] = matches
1392 fncache[value] = matches
1393 self.set.add(value)
1393 self.set.add(value)
1394 return True
1394 return True
1395 return False
1395 return False
1396
1396
1397 def discard(self, value):
1397 def discard(self, value):
1398 self.revs.discard(value)
1398 self.revs.discard(value)
1399 self.set.discard(value)
1399 self.set.discard(value)
1400
1400
1401 wanted = lazywantedset()
1401 wanted = lazywantedset()
1402
1402
1403 class followfilter(object):
1403 class followfilter(object):
1404 def __init__(self, onlyfirst=False):
1404 def __init__(self, onlyfirst=False):
1405 self.startrev = nullrev
1405 self.startrev = nullrev
1406 self.roots = set()
1406 self.roots = set()
1407 self.onlyfirst = onlyfirst
1407 self.onlyfirst = onlyfirst
1408
1408
1409 def match(self, rev):
1409 def match(self, rev):
1410 def realparents(rev):
1410 def realparents(rev):
1411 if self.onlyfirst:
1411 if self.onlyfirst:
1412 return repo.changelog.parentrevs(rev)[0:1]
1412 return repo.changelog.parentrevs(rev)[0:1]
1413 else:
1413 else:
1414 return filter(lambda x: x != nullrev,
1414 return filter(lambda x: x != nullrev,
1415 repo.changelog.parentrevs(rev))
1415 repo.changelog.parentrevs(rev))
1416
1416
1417 if self.startrev == nullrev:
1417 if self.startrev == nullrev:
1418 self.startrev = rev
1418 self.startrev = rev
1419 return True
1419 return True
1420
1420
1421 if rev > self.startrev:
1421 if rev > self.startrev:
1422 # forward: all descendants
1422 # forward: all descendants
1423 if not self.roots:
1423 if not self.roots:
1424 self.roots.add(self.startrev)
1424 self.roots.add(self.startrev)
1425 for parent in realparents(rev):
1425 for parent in realparents(rev):
1426 if parent in self.roots:
1426 if parent in self.roots:
1427 self.roots.add(rev)
1427 self.roots.add(rev)
1428 return True
1428 return True
1429 else:
1429 else:
1430 # backwards: all parents
1430 # backwards: all parents
1431 if not self.roots:
1431 if not self.roots:
1432 self.roots.update(realparents(self.startrev))
1432 self.roots.update(realparents(self.startrev))
1433 if rev in self.roots:
1433 if rev in self.roots:
1434 self.roots.remove(rev)
1434 self.roots.remove(rev)
1435 self.roots.update(realparents(rev))
1435 self.roots.update(realparents(rev))
1436 return True
1436 return True
1437
1437
1438 return False
1438 return False
1439
1439
1440 # it might be worthwhile to do this in the iterator if the rev range
1440 # it might be worthwhile to do this in the iterator if the rev range
1441 # is descending and the prune args are all within that range
1441 # is descending and the prune args are all within that range
1442 for rev in opts.get('prune', ()):
1442 for rev in opts.get('prune', ()):
1443 rev = repo[rev].rev()
1443 rev = repo[rev].rev()
1444 ff = followfilter()
1444 ff = followfilter()
1445 stop = min(revs[0], revs[-1])
1445 stop = min(revs[0], revs[-1])
1446 for x in xrange(rev, stop - 1, -1):
1446 for x in xrange(rev, stop - 1, -1):
1447 if ff.match(x):
1447 if ff.match(x):
1448 wanted = wanted - [x]
1448 wanted = wanted - [x]
1449
1449
1450 # Now that wanted is correctly initialized, we can iterate over the
1450 # Now that wanted is correctly initialized, we can iterate over the
1451 # revision range, yielding only revisions in wanted.
1451 # revision range, yielding only revisions in wanted.
1452 def iterate():
1452 def iterate():
1453 if follow and not match.files():
1453 if follow and not match.files():
1454 ff = followfilter(onlyfirst=opts.get('follow_first'))
1454 ff = followfilter(onlyfirst=opts.get('follow_first'))
1455 def want(rev):
1455 def want(rev):
1456 return ff.match(rev) and rev in wanted
1456 return ff.match(rev) and rev in wanted
1457 else:
1457 else:
1458 def want(rev):
1458 def want(rev):
1459 return rev in wanted
1459 return rev in wanted
1460
1460
1461 it = iter(revs)
1461 it = iter(revs)
1462 stopiteration = False
1462 stopiteration = False
1463 for windowsize in increasingwindows():
1463 for windowsize in increasingwindows():
1464 nrevs = []
1464 nrevs = []
1465 for i in xrange(windowsize):
1465 for i in xrange(windowsize):
1466 try:
1466 try:
1467 rev = it.next()
1467 rev = it.next()
1468 if want(rev):
1468 if want(rev):
1469 nrevs.append(rev)
1469 nrevs.append(rev)
1470 except (StopIteration):
1470 except (StopIteration):
1471 stopiteration = True
1471 stopiteration = True
1472 break
1472 break
1473 for rev in sorted(nrevs):
1473 for rev in sorted(nrevs):
1474 fns = fncache.get(rev)
1474 fns = fncache.get(rev)
1475 ctx = change(rev)
1475 ctx = change(rev)
1476 if not fns:
1476 if not fns:
1477 def fns_generator():
1477 def fns_generator():
1478 for f in ctx.files():
1478 for f in ctx.files():
1479 if match(f):
1479 if match(f):
1480 yield f
1480 yield f
1481 fns = fns_generator()
1481 fns = fns_generator()
1482 prepare(ctx, fns)
1482 prepare(ctx, fns)
1483 for rev in nrevs:
1483 for rev in nrevs:
1484 yield change(rev)
1484 yield change(rev)
1485
1485
1486 if stopiteration:
1486 if stopiteration:
1487 break
1487 break
1488
1488
1489 return iterate()
1489 return iterate()
1490
1490
1491 def _makelogfilematcher(repo, pats, followfirst):
1491 def _makelogfilematcher(repo, pats, followfirst):
1492 # When displaying a revision with --patch --follow FILE, we have
1492 # When displaying a revision with --patch --follow FILE, we have
1493 # to know which file of the revision must be diffed. With
1493 # to know which file of the revision must be diffed. With
1494 # --follow, we want the names of the ancestors of FILE in the
1494 # --follow, we want the names of the ancestors of FILE in the
1495 # revision, stored in "fcache". "fcache" is populated by
1495 # revision, stored in "fcache". "fcache" is populated by
1496 # reproducing the graph traversal already done by --follow revset
1496 # reproducing the graph traversal already done by --follow revset
1497 # and relating linkrevs to file names (which is not "correct" but
1497 # and relating linkrevs to file names (which is not "correct" but
1498 # good enough).
1498 # good enough).
1499 fcache = {}
1499 fcache = {}
1500 fcacheready = [False]
1500 fcacheready = [False]
1501 pctx = repo['.']
1501 pctx = repo['.']
1502 wctx = repo[None]
1502 wctx = repo[None]
1503
1503
1504 def populate():
1504 def populate():
1505 for fn in pats:
1505 for fn in pats:
1506 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1506 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1507 for c in i:
1507 for c in i:
1508 fcache.setdefault(c.linkrev(), set()).add(c.path())
1508 fcache.setdefault(c.linkrev(), set()).add(c.path())
1509
1509
1510 def filematcher(rev):
1510 def filematcher(rev):
1511 if not fcacheready[0]:
1511 if not fcacheready[0]:
1512 # Lazy initialization
1512 # Lazy initialization
1513 fcacheready[0] = True
1513 fcacheready[0] = True
1514 populate()
1514 populate()
1515 return scmutil.match(wctx, fcache.get(rev, []), default='path')
1515 return scmutil.match(wctx, fcache.get(rev, []), default='path')
1516
1516
1517 return filematcher
1517 return filematcher
1518
1518
1519 def _makelogrevset(repo, pats, opts, revs):
1519 def _makelogrevset(repo, pats, opts, revs):
1520 """Return (expr, filematcher) where expr is a revset string built
1520 """Return (expr, filematcher) where expr is a revset string built
1521 from log options and file patterns or None. If --stat or --patch
1521 from log options and file patterns or None. If --stat or --patch
1522 are not passed filematcher is None. Otherwise it is a callable
1522 are not passed filematcher is None. Otherwise it is a callable
1523 taking a revision number and returning a match objects filtering
1523 taking a revision number and returning a match objects filtering
1524 the files to be detailed when displaying the revision.
1524 the files to be detailed when displaying the revision.
1525 """
1525 """
1526 opt2revset = {
1526 opt2revset = {
1527 'no_merges': ('not merge()', None),
1527 'no_merges': ('not merge()', None),
1528 'only_merges': ('merge()', None),
1528 'only_merges': ('merge()', None),
1529 '_ancestors': ('ancestors(%(val)s)', None),
1529 '_ancestors': ('ancestors(%(val)s)', None),
1530 '_fancestors': ('_firstancestors(%(val)s)', None),
1530 '_fancestors': ('_firstancestors(%(val)s)', None),
1531 '_descendants': ('descendants(%(val)s)', None),
1531 '_descendants': ('descendants(%(val)s)', None),
1532 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1532 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1533 '_matchfiles': ('_matchfiles(%(val)s)', None),
1533 '_matchfiles': ('_matchfiles(%(val)s)', None),
1534 'date': ('date(%(val)r)', None),
1534 'date': ('date(%(val)r)', None),
1535 'branch': ('branch(%(val)r)', ' or '),
1535 'branch': ('branch(%(val)r)', ' or '),
1536 '_patslog': ('filelog(%(val)r)', ' or '),
1536 '_patslog': ('filelog(%(val)r)', ' or '),
1537 '_patsfollow': ('follow(%(val)r)', ' or '),
1537 '_patsfollow': ('follow(%(val)r)', ' or '),
1538 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1538 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1539 'keyword': ('keyword(%(val)r)', ' or '),
1539 'keyword': ('keyword(%(val)r)', ' or '),
1540 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1540 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1541 'user': ('user(%(val)r)', ' or '),
1541 'user': ('user(%(val)r)', ' or '),
1542 }
1542 }
1543
1543
1544 opts = dict(opts)
1544 opts = dict(opts)
1545 # follow or not follow?
1545 # follow or not follow?
1546 follow = opts.get('follow') or opts.get('follow_first')
1546 follow = opts.get('follow') or opts.get('follow_first')
1547 followfirst = opts.get('follow_first') and 1 or 0
1547 followfirst = opts.get('follow_first') and 1 or 0
1548 # --follow with FILE behaviour depends on revs...
1548 # --follow with FILE behaviour depends on revs...
1549 it = iter(revs)
1549 it = iter(revs)
1550 startrev = it.next()
1550 startrev = it.next()
1551 try:
1551 try:
1552 followdescendants = startrev < it.next()
1552 followdescendants = startrev < it.next()
1553 except (StopIteration):
1553 except (StopIteration):
1554 followdescendants = False
1554 followdescendants = False
1555
1555
1556 # branch and only_branch are really aliases and must be handled at
1556 # branch and only_branch are really aliases and must be handled at
1557 # the same time
1557 # the same time
1558 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1558 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1559 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1559 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1560 # pats/include/exclude are passed to match.match() directly in
1560 # pats/include/exclude are passed to match.match() directly in
1561 # _matchfiles() revset but walkchangerevs() builds its matcher with
1561 # _matchfiles() revset but walkchangerevs() builds its matcher with
1562 # scmutil.match(). The difference is input pats are globbed on
1562 # scmutil.match(). The difference is input pats are globbed on
1563 # platforms without shell expansion (windows).
1563 # platforms without shell expansion (windows).
1564 pctx = repo[None]
1564 pctx = repo[None]
1565 match, pats = scmutil.matchandpats(pctx, pats, opts)
1565 match, pats = scmutil.matchandpats(pctx, pats, opts)
1566 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1566 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1567 if not slowpath:
1567 if not slowpath:
1568 for f in match.files():
1568 for f in match.files():
1569 if follow and f not in pctx:
1569 if follow and f not in pctx:
1570 raise util.Abort(_('cannot follow file not in parent '
1570 raise util.Abort(_('cannot follow file not in parent '
1571 'revision: "%s"') % f)
1571 'revision: "%s"') % f)
1572 filelog = repo.file(f)
1572 filelog = repo.file(f)
1573 if not filelog:
1573 if not filelog:
1574 # A zero count may be a directory or deleted file, so
1574 # A zero count may be a directory or deleted file, so
1575 # try to find matching entries on the slow path.
1575 # try to find matching entries on the slow path.
1576 if follow:
1576 if follow:
1577 raise util.Abort(
1577 raise util.Abort(
1578 _('cannot follow nonexistent file: "%s"') % f)
1578 _('cannot follow nonexistent file: "%s"') % f)
1579 slowpath = True
1579 slowpath = True
1580
1580
1581 # We decided to fall back to the slowpath because at least one
1581 # We decided to fall back to the slowpath because at least one
1582 # of the paths was not a file. Check to see if at least one of them
1582 # of the paths was not a file. Check to see if at least one of them
1583 # existed in history - in that case, we'll continue down the
1583 # existed in history - in that case, we'll continue down the
1584 # slowpath; otherwise, we can turn off the slowpath
1584 # slowpath; otherwise, we can turn off the slowpath
1585 if slowpath:
1585 if slowpath:
1586 for path in match.files():
1586 for path in match.files():
1587 if path == '.' or path in repo.store:
1587 if path == '.' or path in repo.store:
1588 break
1588 break
1589 else:
1589 else:
1590 slowpath = False
1590 slowpath = False
1591
1591
1592 if slowpath:
1592 if slowpath:
1593 # See walkchangerevs() slow path.
1593 # See walkchangerevs() slow path.
1594 #
1594 #
1595 if follow:
1595 if follow:
1596 raise util.Abort(_('can only follow copies/renames for explicit '
1596 raise util.Abort(_('can only follow copies/renames for explicit '
1597 'filenames'))
1597 'filenames'))
1598 # pats/include/exclude cannot be represented as separate
1598 # pats/include/exclude cannot be represented as separate
1599 # revset expressions as their filtering logic applies at file
1599 # revset expressions as their filtering logic applies at file
1600 # level. For instance "-I a -X a" matches a revision touching
1600 # level. For instance "-I a -X a" matches a revision touching
1601 # "a" and "b" while "file(a) and not file(b)" does
1601 # "a" and "b" while "file(a) and not file(b)" does
1602 # not. Besides, filesets are evaluated against the working
1602 # not. Besides, filesets are evaluated against the working
1603 # directory.
1603 # directory.
1604 matchargs = ['r:', 'd:relpath']
1604 matchargs = ['r:', 'd:relpath']
1605 for p in pats:
1605 for p in pats:
1606 matchargs.append('p:' + p)
1606 matchargs.append('p:' + p)
1607 for p in opts.get('include', []):
1607 for p in opts.get('include', []):
1608 matchargs.append('i:' + p)
1608 matchargs.append('i:' + p)
1609 for p in opts.get('exclude', []):
1609 for p in opts.get('exclude', []):
1610 matchargs.append('x:' + p)
1610 matchargs.append('x:' + p)
1611 matchargs = ','.join(('%r' % p) for p in matchargs)
1611 matchargs = ','.join(('%r' % p) for p in matchargs)
1612 opts['_matchfiles'] = matchargs
1612 opts['_matchfiles'] = matchargs
1613 else:
1613 else:
1614 if follow:
1614 if follow:
1615 fpats = ('_patsfollow', '_patsfollowfirst')
1615 fpats = ('_patsfollow', '_patsfollowfirst')
1616 fnopats = (('_ancestors', '_fancestors'),
1616 fnopats = (('_ancestors', '_fancestors'),
1617 ('_descendants', '_fdescendants'))
1617 ('_descendants', '_fdescendants'))
1618 if pats:
1618 if pats:
1619 # follow() revset interprets its file argument as a
1619 # follow() revset interprets its file argument as a
1620 # manifest entry, so use match.files(), not pats.
1620 # manifest entry, so use match.files(), not pats.
1621 opts[fpats[followfirst]] = list(match.files())
1621 opts[fpats[followfirst]] = list(match.files())
1622 else:
1622 else:
1623 opts[fnopats[followdescendants][followfirst]] = str(startrev)
1623 opts[fnopats[followdescendants][followfirst]] = str(startrev)
1624 else:
1624 else:
1625 opts['_patslog'] = list(pats)
1625 opts['_patslog'] = list(pats)
1626
1626
1627 filematcher = None
1627 filematcher = None
1628 if opts.get('patch') or opts.get('stat'):
1628 if opts.get('patch') or opts.get('stat'):
1629 if follow:
1629 if follow:
1630 filematcher = _makelogfilematcher(repo, pats, followfirst)
1630 filematcher = _makelogfilematcher(repo, pats, followfirst)
1631 else:
1631 else:
1632 filematcher = lambda rev: match
1632 filematcher = lambda rev: match
1633
1633
1634 expr = []
1634 expr = []
1635 for op, val in opts.iteritems():
1635 for op, val in opts.iteritems():
1636 if not val:
1636 if not val:
1637 continue
1637 continue
1638 if op not in opt2revset:
1638 if op not in opt2revset:
1639 continue
1639 continue
1640 revop, andor = opt2revset[op]
1640 revop, andor = opt2revset[op]
1641 if '%(val)' not in revop:
1641 if '%(val)' not in revop:
1642 expr.append(revop)
1642 expr.append(revop)
1643 else:
1643 else:
1644 if not isinstance(val, list):
1644 if not isinstance(val, list):
1645 e = revop % {'val': val}
1645 e = revop % {'val': val}
1646 else:
1646 else:
1647 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
1647 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
1648 expr.append(e)
1648 expr.append(e)
1649
1649
1650 if expr:
1650 if expr:
1651 expr = '(' + ' and '.join(expr) + ')'
1651 expr = '(' + ' and '.join(expr) + ')'
1652 else:
1652 else:
1653 expr = None
1653 expr = None
1654 return expr, filematcher
1654 return expr, filematcher
1655
1655
1656 def getgraphlogrevs(repo, pats, opts):
1656 def getgraphlogrevs(repo, pats, opts):
1657 """Return (revs, expr, filematcher) where revs is an iterable of
1657 """Return (revs, expr, filematcher) where revs is an iterable of
1658 revision numbers, expr is a revset string built from log options
1658 revision numbers, expr is a revset string built from log options
1659 and file patterns or None, and used to filter 'revs'. If --stat or
1659 and file patterns or None, and used to filter 'revs'. If --stat or
1660 --patch are not passed filematcher is None. Otherwise it is a
1660 --patch are not passed filematcher is None. Otherwise it is a
1661 callable taking a revision number and returning a match objects
1661 callable taking a revision number and returning a match objects
1662 filtering the files to be detailed when displaying the revision.
1662 filtering the files to be detailed when displaying the revision.
1663 """
1663 """
1664 if not len(repo):
1664 if not len(repo):
1665 return [], None, None
1665 return [], None, None
1666 limit = loglimit(opts)
1666 limit = loglimit(opts)
1667 # Default --rev value depends on --follow but --follow behaviour
1667 # Default --rev value depends on --follow but --follow behaviour
1668 # depends on revisions resolved from --rev...
1668 # depends on revisions resolved from --rev...
1669 follow = opts.get('follow') or opts.get('follow_first')
1669 follow = opts.get('follow') or opts.get('follow_first')
1670 possiblyunsorted = False # whether revs might need sorting
1670 possiblyunsorted = False # whether revs might need sorting
1671 if opts.get('rev'):
1671 if opts.get('rev'):
1672 revs = scmutil.revrange(repo, opts['rev'])
1672 revs = scmutil.revrange(repo, opts['rev'])
1673 # Don't sort here because _makelogrevset might depend on the
1673 # Don't sort here because _makelogrevset might depend on the
1674 # order of revs
1674 # order of revs
1675 possiblyunsorted = True
1675 possiblyunsorted = True
1676 else:
1676 else:
1677 if follow and len(repo) > 0:
1677 if follow and len(repo) > 0:
1678 revs = repo.revs('reverse(:.)')
1678 revs = repo.revs('reverse(:.)')
1679 else:
1679 else:
1680 revs = revset.spanset(repo)
1680 revs = revset.spanset(repo)
1681 revs.reverse()
1681 revs.reverse()
1682 if not revs:
1682 if not revs:
1683 return revset.baseset(), None, None
1683 return revset.baseset(), None, None
1684 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
1684 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
1685 if possiblyunsorted:
1685 if possiblyunsorted:
1686 revs.sort(reverse=True)
1686 revs.sort(reverse=True)
1687 if expr:
1687 if expr:
1688 # Revset matchers often operate faster on revisions in changelog
1688 # Revset matchers often operate faster on revisions in changelog
1689 # order, because most filters deal with the changelog.
1689 # order, because most filters deal with the changelog.
1690 revs.reverse()
1690 revs.reverse()
1691 matcher = revset.match(repo.ui, expr)
1691 matcher = revset.match(repo.ui, expr)
1692 # Revset matches can reorder revisions. "A or B" typically returns
1692 # Revset matches can reorder revisions. "A or B" typically returns
1693 # returns the revision matching A then the revision matching B. Sort
1693 # returns the revision matching A then the revision matching B. Sort
1694 # again to fix that.
1694 # again to fix that.
1695 revs = matcher(repo, revs)
1695 revs = matcher(repo, revs)
1696 revs.sort(reverse=True)
1696 revs.sort(reverse=True)
1697 if limit is not None:
1697 if limit is not None:
1698 limitedrevs = revset.baseset()
1698 limitedrevs = revset.baseset()
1699 for idx, rev in enumerate(revs):
1699 for idx, rev in enumerate(revs):
1700 if idx >= limit:
1700 if idx >= limit:
1701 break
1701 break
1702 limitedrevs.append(rev)
1702 limitedrevs.append(rev)
1703 revs = limitedrevs
1703 revs = limitedrevs
1704
1704
1705 return revs, expr, filematcher
1705 return revs, expr, filematcher
1706
1706
1707 def getlogrevs(repo, pats, opts):
1707 def getlogrevs(repo, pats, opts):
1708 """Return (revs, expr, filematcher) where revs is an iterable of
1708 """Return (revs, expr, filematcher) where revs is an iterable of
1709 revision numbers, expr is a revset string built from log options
1709 revision numbers, expr is a revset string built from log options
1710 and file patterns or None, and used to filter 'revs'. If --stat or
1710 and file patterns or None, and used to filter 'revs'. If --stat or
1711 --patch are not passed filematcher is None. Otherwise it is a
1711 --patch are not passed filematcher is None. Otherwise it is a
1712 callable taking a revision number and returning a match objects
1712 callable taking a revision number and returning a match objects
1713 filtering the files to be detailed when displaying the revision.
1713 filtering the files to be detailed when displaying the revision.
1714 """
1714 """
1715 limit = loglimit(opts)
1715 limit = loglimit(opts)
1716 # Default --rev value depends on --follow but --follow behaviour
1716 # Default --rev value depends on --follow but --follow behaviour
1717 # depends on revisions resolved from --rev...
1717 # depends on revisions resolved from --rev...
1718 follow = opts.get('follow') or opts.get('follow_first')
1718 follow = opts.get('follow') or opts.get('follow_first')
1719 if opts.get('rev'):
1719 if opts.get('rev'):
1720 revs = scmutil.revrange(repo, opts['rev'])
1720 revs = scmutil.revrange(repo, opts['rev'])
1721 elif follow:
1721 elif follow:
1722 revs = revset.baseset(repo.revs('reverse(:.)'))
1722 revs = revset.baseset(repo.revs('reverse(:.)'))
1723 else:
1723 else:
1724 revs = revset.spanset(repo)
1724 revs = revset.spanset(repo)
1725 revs.reverse()
1725 revs.reverse()
1726 if not revs:
1726 if not revs:
1727 return revset.baseset([]), None, None
1727 return revset.baseset([]), None, None
1728 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
1728 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
1729 if expr:
1729 if expr:
1730 # Revset matchers often operate faster on revisions in changelog
1730 # Revset matchers often operate faster on revisions in changelog
1731 # order, because most filters deal with the changelog.
1731 # order, because most filters deal with the changelog.
1732 if not opts.get('rev'):
1732 if not opts.get('rev'):
1733 revs.reverse()
1733 revs.reverse()
1734 matcher = revset.match(repo.ui, expr)
1734 matcher = revset.match(repo.ui, expr)
1735 # Revset matches can reorder revisions. "A or B" typically returns
1735 # Revset matches can reorder revisions. "A or B" typically returns
1736 # returns the revision matching A then the revision matching B. Sort
1736 # returns the revision matching A then the revision matching B. Sort
1737 # again to fix that.
1737 # again to fix that.
1738 revs = matcher(repo, revs)
1738 revs = matcher(repo, revs)
1739 if not opts.get('rev'):
1739 if not opts.get('rev'):
1740 revs.sort(reverse=True)
1740 revs.sort(reverse=True)
1741 if limit is not None:
1741 if limit is not None:
1742 count = 0
1742 count = 0
1743 limitedrevs = revset.baseset([])
1743 limitedrevs = revset.baseset([])
1744 it = iter(revs)
1744 it = iter(revs)
1745 while count < limit:
1745 while count < limit:
1746 try:
1746 try:
1747 limitedrevs.append(it.next())
1747 limitedrevs.append(it.next())
1748 except (StopIteration):
1748 except (StopIteration):
1749 break
1749 break
1750 count += 1
1750 count += 1
1751 revs = limitedrevs
1751 revs = limitedrevs
1752
1752
1753 return revs, expr, filematcher
1753 return revs, expr, filematcher
1754
1754
1755 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
1755 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
1756 filematcher=None):
1756 filematcher=None):
1757 seen, state = [], graphmod.asciistate()
1757 seen, state = [], graphmod.asciistate()
1758 for rev, type, ctx, parents in dag:
1758 for rev, type, ctx, parents in dag:
1759 char = 'o'
1759 char = 'o'
1760 if ctx.node() in showparents:
1760 if ctx.node() in showparents:
1761 char = '@'
1761 char = '@'
1762 elif ctx.obsolete():
1762 elif ctx.obsolete():
1763 char = 'x'
1763 char = 'x'
1764 copies = None
1764 copies = None
1765 if getrenamed and ctx.rev():
1765 if getrenamed and ctx.rev():
1766 copies = []
1766 copies = []
1767 for fn in ctx.files():
1767 for fn in ctx.files():
1768 rename = getrenamed(fn, ctx.rev())
1768 rename = getrenamed(fn, ctx.rev())
1769 if rename:
1769 if rename:
1770 copies.append((fn, rename[0]))
1770 copies.append((fn, rename[0]))
1771 revmatchfn = None
1771 revmatchfn = None
1772 if filematcher is not None:
1772 if filematcher is not None:
1773 revmatchfn = filematcher(ctx.rev())
1773 revmatchfn = filematcher(ctx.rev())
1774 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
1774 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
1775 lines = displayer.hunk.pop(rev).split('\n')
1775 lines = displayer.hunk.pop(rev).split('\n')
1776 if not lines[-1]:
1776 if not lines[-1]:
1777 del lines[-1]
1777 del lines[-1]
1778 displayer.flush(rev)
1778 displayer.flush(rev)
1779 edges = edgefn(type, char, lines, seen, rev, parents)
1779 edges = edgefn(type, char, lines, seen, rev, parents)
1780 for type, char, lines, coldata in edges:
1780 for type, char, lines, coldata in edges:
1781 graphmod.ascii(ui, state, type, char, lines, coldata)
1781 graphmod.ascii(ui, state, type, char, lines, coldata)
1782 displayer.close()
1782 displayer.close()
1783
1783
1784 def graphlog(ui, repo, *pats, **opts):
1784 def graphlog(ui, repo, *pats, **opts):
1785 # Parameters are identical to log command ones
1785 # Parameters are identical to log command ones
1786 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
1786 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
1787 revdag = graphmod.dagwalker(repo, revs)
1787 revdag = graphmod.dagwalker(repo, revs)
1788
1788
1789 getrenamed = None
1789 getrenamed = None
1790 if opts.get('copies'):
1790 if opts.get('copies'):
1791 endrev = None
1791 endrev = None
1792 if opts.get('rev'):
1792 if opts.get('rev'):
1793 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
1793 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
1794 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
1794 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
1795 displayer = show_changeset(ui, repo, opts, buffered=True)
1795 displayer = show_changeset(ui, repo, opts, buffered=True)
1796 showparents = [ctx.node() for ctx in repo[None].parents()]
1796 showparents = [ctx.node() for ctx in repo[None].parents()]
1797 displaygraph(ui, revdag, displayer, showparents,
1797 displaygraph(ui, revdag, displayer, showparents,
1798 graphmod.asciiedges, getrenamed, filematcher)
1798 graphmod.asciiedges, getrenamed, filematcher)
1799
1799
1800 def checkunsupportedgraphflags(pats, opts):
1800 def checkunsupportedgraphflags(pats, opts):
1801 for op in ["newest_first"]:
1801 for op in ["newest_first"]:
1802 if op in opts and opts[op]:
1802 if op in opts and opts[op]:
1803 raise util.Abort(_("-G/--graph option is incompatible with --%s")
1803 raise util.Abort(_("-G/--graph option is incompatible with --%s")
1804 % op.replace("_", "-"))
1804 % op.replace("_", "-"))
1805
1805
1806 def graphrevs(repo, nodes, opts):
1806 def graphrevs(repo, nodes, opts):
1807 limit = loglimit(opts)
1807 limit = loglimit(opts)
1808 nodes.reverse()
1808 nodes.reverse()
1809 if limit is not None:
1809 if limit is not None:
1810 nodes = nodes[:limit]
1810 nodes = nodes[:limit]
1811 return graphmod.nodes(repo, nodes)
1811 return graphmod.nodes(repo, nodes)
1812
1812
1813 def add(ui, repo, match, dryrun, listsubrepos, prefix, explicitonly):
1813 def add(ui, repo, match, dryrun, listsubrepos, prefix, explicitonly):
1814 join = lambda f: os.path.join(prefix, f)
1814 join = lambda f: os.path.join(prefix, f)
1815 bad = []
1815 bad = []
1816 oldbad = match.bad
1816 oldbad = match.bad
1817 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
1817 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
1818 names = []
1818 names = []
1819 wctx = repo[None]
1819 wctx = repo[None]
1820 cca = None
1820 cca = None
1821 abort, warn = scmutil.checkportabilityalert(ui)
1821 abort, warn = scmutil.checkportabilityalert(ui)
1822 if abort or warn:
1822 if abort or warn:
1823 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
1823 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
1824 for f in repo.walk(match):
1824 for f in repo.walk(match):
1825 exact = match.exact(f)
1825 exact = match.exact(f)
1826 if exact or not explicitonly and f not in repo.dirstate:
1826 if exact or not explicitonly and f not in repo.dirstate:
1827 if cca:
1827 if cca:
1828 cca(f)
1828 cca(f)
1829 names.append(f)
1829 names.append(f)
1830 if ui.verbose or not exact:
1830 if ui.verbose or not exact:
1831 ui.status(_('adding %s\n') % match.rel(join(f)))
1831 ui.status(_('adding %s\n') % match.rel(join(f)))
1832
1832
1833 for subpath in sorted(wctx.substate):
1833 for subpath in sorted(wctx.substate):
1834 sub = wctx.sub(subpath)
1834 sub = wctx.sub(subpath)
1835 try:
1835 try:
1836 submatch = matchmod.narrowmatcher(subpath, match)
1836 submatch = matchmod.narrowmatcher(subpath, match)
1837 if listsubrepos:
1837 if listsubrepos:
1838 bad.extend(sub.add(ui, submatch, dryrun, listsubrepos, prefix,
1838 bad.extend(sub.add(ui, submatch, dryrun, listsubrepos, prefix,
1839 False))
1839 False))
1840 else:
1840 else:
1841 bad.extend(sub.add(ui, submatch, dryrun, listsubrepos, prefix,
1841 bad.extend(sub.add(ui, submatch, dryrun, listsubrepos, prefix,
1842 True))
1842 True))
1843 except error.LookupError:
1843 except error.LookupError:
1844 ui.status(_("skipping missing subrepository: %s\n")
1844 ui.status(_("skipping missing subrepository: %s\n")
1845 % join(subpath))
1845 % join(subpath))
1846
1846
1847 if not dryrun:
1847 if not dryrun:
1848 rejected = wctx.add(names, prefix)
1848 rejected = wctx.add(names, prefix)
1849 bad.extend(f for f in rejected if f in match.files())
1849 bad.extend(f for f in rejected if f in match.files())
1850 return bad
1850 return bad
1851
1851
1852 def forget(ui, repo, match, prefix, explicitonly):
1852 def forget(ui, repo, match, prefix, explicitonly):
1853 join = lambda f: os.path.join(prefix, f)
1853 join = lambda f: os.path.join(prefix, f)
1854 bad = []
1854 bad = []
1855 oldbad = match.bad
1855 oldbad = match.bad
1856 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
1856 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
1857 wctx = repo[None]
1857 wctx = repo[None]
1858 forgot = []
1858 forgot = []
1859 s = repo.status(match=match, clean=True)
1859 s = repo.status(match=match, clean=True)
1860 forget = sorted(s[0] + s[1] + s[3] + s[6])
1860 forget = sorted(s[0] + s[1] + s[3] + s[6])
1861 if explicitonly:
1861 if explicitonly:
1862 forget = [f for f in forget if match.exact(f)]
1862 forget = [f for f in forget if match.exact(f)]
1863
1863
1864 for subpath in sorted(wctx.substate):
1864 for subpath in sorted(wctx.substate):
1865 sub = wctx.sub(subpath)
1865 sub = wctx.sub(subpath)
1866 try:
1866 try:
1867 submatch = matchmod.narrowmatcher(subpath, match)
1867 submatch = matchmod.narrowmatcher(subpath, match)
1868 subbad, subforgot = sub.forget(ui, submatch, prefix)
1868 subbad, subforgot = sub.forget(ui, submatch, prefix)
1869 bad.extend([subpath + '/' + f for f in subbad])
1869 bad.extend([subpath + '/' + f for f in subbad])
1870 forgot.extend([subpath + '/' + f for f in subforgot])
1870 forgot.extend([subpath + '/' + f for f in subforgot])
1871 except error.LookupError:
1871 except error.LookupError:
1872 ui.status(_("skipping missing subrepository: %s\n")
1872 ui.status(_("skipping missing subrepository: %s\n")
1873 % join(subpath))
1873 % join(subpath))
1874
1874
1875 if not explicitonly:
1875 if not explicitonly:
1876 for f in match.files():
1876 for f in match.files():
1877 if f not in repo.dirstate and not os.path.isdir(match.rel(join(f))):
1877 if f not in repo.dirstate and not os.path.isdir(match.rel(join(f))):
1878 if f not in forgot:
1878 if f not in forgot:
1879 if os.path.exists(match.rel(join(f))):
1879 if os.path.exists(match.rel(join(f))):
1880 ui.warn(_('not removing %s: '
1880 ui.warn(_('not removing %s: '
1881 'file is already untracked\n')
1881 'file is already untracked\n')
1882 % match.rel(join(f)))
1882 % match.rel(join(f)))
1883 bad.append(f)
1883 bad.append(f)
1884
1884
1885 for f in forget:
1885 for f in forget:
1886 if ui.verbose or not match.exact(f):
1886 if ui.verbose or not match.exact(f):
1887 ui.status(_('removing %s\n') % match.rel(join(f)))
1887 ui.status(_('removing %s\n') % match.rel(join(f)))
1888
1888
1889 rejected = wctx.forget(forget, prefix)
1889 rejected = wctx.forget(forget, prefix)
1890 bad.extend(f for f in rejected if f in match.files())
1890 bad.extend(f for f in rejected if f in match.files())
1891 forgot.extend(forget)
1891 forgot.extend(forget)
1892 return bad, forgot
1892 return bad, forgot
1893
1893
1894 def cat(ui, repo, ctx, matcher, prefix, **opts):
1894 def cat(ui, repo, ctx, matcher, prefix, **opts):
1895 err = 1
1895 err = 1
1896
1896
1897 def write(path):
1897 def write(path):
1898 fp = makefileobj(repo, opts.get('output'), ctx.node(),
1898 fp = makefileobj(repo, opts.get('output'), ctx.node(),
1899 pathname=os.path.join(prefix, path))
1899 pathname=os.path.join(prefix, path))
1900 data = ctx[path].data()
1900 data = ctx[path].data()
1901 if opts.get('decode'):
1901 if opts.get('decode'):
1902 data = repo.wwritedata(path, data)
1902 data = repo.wwritedata(path, data)
1903 fp.write(data)
1903 fp.write(data)
1904 fp.close()
1904 fp.close()
1905
1905
1906 # Automation often uses hg cat on single files, so special case it
1906 # Automation often uses hg cat on single files, so special case it
1907 # for performance to avoid the cost of parsing the manifest.
1907 # for performance to avoid the cost of parsing the manifest.
1908 if len(matcher.files()) == 1 and not matcher.anypats():
1908 if len(matcher.files()) == 1 and not matcher.anypats():
1909 file = matcher.files()[0]
1909 file = matcher.files()[0]
1910 mf = repo.manifest
1910 mf = repo.manifest
1911 mfnode = ctx._changeset[0]
1911 mfnode = ctx._changeset[0]
1912 if mf.find(mfnode, file)[0]:
1912 if mf.find(mfnode, file)[0]:
1913 write(file)
1913 write(file)
1914 return 0
1914 return 0
1915
1915
1916 # Don't warn about "missing" files that are really in subrepos
1916 # Don't warn about "missing" files that are really in subrepos
1917 bad = matcher.bad
1917 bad = matcher.bad
1918
1918
1919 def badfn(path, msg):
1919 def badfn(path, msg):
1920 for subpath in ctx.substate:
1920 for subpath in ctx.substate:
1921 if path.startswith(subpath):
1921 if path.startswith(subpath):
1922 return
1922 return
1923 bad(path, msg)
1923 bad(path, msg)
1924
1924
1925 matcher.bad = badfn
1925 matcher.bad = badfn
1926
1926
1927 for abs in ctx.walk(matcher):
1927 for abs in ctx.walk(matcher):
1928 write(abs)
1928 write(abs)
1929 err = 0
1929 err = 0
1930
1930
1931 matcher.bad = bad
1931 matcher.bad = bad
1932
1932
1933 for subpath in sorted(ctx.substate):
1933 for subpath in sorted(ctx.substate):
1934 sub = ctx.sub(subpath)
1934 sub = ctx.sub(subpath)
1935 try:
1935 try:
1936 submatch = matchmod.narrowmatcher(subpath, matcher)
1936 submatch = matchmod.narrowmatcher(subpath, matcher)
1937
1937
1938 if not sub.cat(ui, submatch, os.path.join(prefix, sub._path),
1938 if not sub.cat(ui, submatch, os.path.join(prefix, sub._path),
1939 **opts):
1939 **opts):
1940 err = 0
1940 err = 0
1941 except error.RepoLookupError:
1941 except error.RepoLookupError:
1942 ui.status(_("skipping missing subrepository: %s\n")
1942 ui.status(_("skipping missing subrepository: %s\n")
1943 % os.path.join(prefix, subpath))
1943 % os.path.join(prefix, subpath))
1944
1944
1945 return err
1945 return err
1946
1946
1947 def duplicatecopies(repo, rev, fromrev):
1947 def duplicatecopies(repo, rev, fromrev):
1948 '''reproduce copies from fromrev to rev in the dirstate'''
1948 '''reproduce copies from fromrev to rev in the dirstate'''
1949 for dst, src in copies.pathcopies(repo[fromrev], repo[rev]).iteritems():
1949 for dst, src in copies.pathcopies(repo[fromrev], repo[rev]).iteritems():
1950 # copies.pathcopies returns backward renames, so dst might not
1950 # copies.pathcopies returns backward renames, so dst might not
1951 # actually be in the dirstate
1951 # actually be in the dirstate
1952 if repo.dirstate[dst] in "nma":
1952 if repo.dirstate[dst] in "nma":
1953 repo.dirstate.copy(src, dst)
1953 repo.dirstate.copy(src, dst)
1954
1954
1955 def commit(ui, repo, commitfunc, pats, opts):
1955 def commit(ui, repo, commitfunc, pats, opts):
1956 '''commit the specified files or all outstanding changes'''
1956 '''commit the specified files or all outstanding changes'''
1957 date = opts.get('date')
1957 date = opts.get('date')
1958 if date:
1958 if date:
1959 opts['date'] = util.parsedate(date)
1959 opts['date'] = util.parsedate(date)
1960 message = logmessage(ui, opts)
1960 message = logmessage(ui, opts)
1961
1961
1962 # extract addremove carefully -- this function can be called from a command
1962 # extract addremove carefully -- this function can be called from a command
1963 # that doesn't support addremove
1963 # that doesn't support addremove
1964 if opts.get('addremove'):
1964 if opts.get('addremove'):
1965 scmutil.addremove(repo, pats, opts)
1965 scmutil.addremove(repo, pats, opts)
1966
1966
1967 return commitfunc(ui, repo, message,
1967 return commitfunc(ui, repo, message,
1968 scmutil.match(repo[None], pats, opts), opts)
1968 scmutil.match(repo[None], pats, opts), opts)
1969
1969
1970 def amend(ui, repo, commitfunc, old, extra, pats, opts):
1970 def amend(ui, repo, commitfunc, old, extra, pats, opts):
1971 ui.note(_('amending changeset %s\n') % old)
1971 ui.note(_('amending changeset %s\n') % old)
1972 base = old.p1()
1972 base = old.p1()
1973
1973
1974 wlock = lock = newid = None
1974 wlock = lock = newid = None
1975 try:
1975 try:
1976 wlock = repo.wlock()
1976 wlock = repo.wlock()
1977 lock = repo.lock()
1977 lock = repo.lock()
1978 tr = repo.transaction('amend')
1978 tr = repo.transaction('amend')
1979 try:
1979 try:
1980 # See if we got a message from -m or -l, if not, open the editor
1980 # See if we got a message from -m or -l, if not, open the editor
1981 # with the message of the changeset to amend
1981 # with the message of the changeset to amend
1982 message = logmessage(ui, opts)
1982 message = logmessage(ui, opts)
1983 # ensure logfile does not conflict with later enforcement of the
1983 # ensure logfile does not conflict with later enforcement of the
1984 # message. potential logfile content has been processed by
1984 # message. potential logfile content has been processed by
1985 # `logmessage` anyway.
1985 # `logmessage` anyway.
1986 opts.pop('logfile')
1986 opts.pop('logfile')
1987 # First, do a regular commit to record all changes in the working
1987 # First, do a regular commit to record all changes in the working
1988 # directory (if there are any)
1988 # directory (if there are any)
1989 ui.callhooks = False
1989 ui.callhooks = False
1990 currentbookmark = repo._bookmarkcurrent
1990 currentbookmark = repo._bookmarkcurrent
1991 try:
1991 try:
1992 repo._bookmarkcurrent = None
1992 repo._bookmarkcurrent = None
1993 opts['message'] = 'temporary amend commit for %s' % old
1993 opts['message'] = 'temporary amend commit for %s' % old
1994 node = commit(ui, repo, commitfunc, pats, opts)
1994 node = commit(ui, repo, commitfunc, pats, opts)
1995 finally:
1995 finally:
1996 repo._bookmarkcurrent = currentbookmark
1996 repo._bookmarkcurrent = currentbookmark
1997 ui.callhooks = True
1997 ui.callhooks = True
1998 ctx = repo[node]
1998 ctx = repo[node]
1999
1999
2000 # Participating changesets:
2000 # Participating changesets:
2001 #
2001 #
2002 # node/ctx o - new (intermediate) commit that contains changes
2002 # node/ctx o - new (intermediate) commit that contains changes
2003 # | from working dir to go into amending commit
2003 # | from working dir to go into amending commit
2004 # | (or a workingctx if there were no changes)
2004 # | (or a workingctx if there were no changes)
2005 # |
2005 # |
2006 # old o - changeset to amend
2006 # old o - changeset to amend
2007 # |
2007 # |
2008 # base o - parent of amending changeset
2008 # base o - parent of amending changeset
2009
2009
2010 # Update extra dict from amended commit (e.g. to preserve graft
2010 # Update extra dict from amended commit (e.g. to preserve graft
2011 # source)
2011 # source)
2012 extra.update(old.extra())
2012 extra.update(old.extra())
2013
2013
2014 # Also update it from the intermediate commit or from the wctx
2014 # Also update it from the intermediate commit or from the wctx
2015 extra.update(ctx.extra())
2015 extra.update(ctx.extra())
2016
2016
2017 if len(old.parents()) > 1:
2017 if len(old.parents()) > 1:
2018 # ctx.files() isn't reliable for merges, so fall back to the
2018 # ctx.files() isn't reliable for merges, so fall back to the
2019 # slower repo.status() method
2019 # slower repo.status() method
2020 files = set([fn for st in repo.status(base, old)[:3]
2020 files = set([fn for st in repo.status(base, old)[:3]
2021 for fn in st])
2021 for fn in st])
2022 else:
2022 else:
2023 files = set(old.files())
2023 files = set(old.files())
2024
2024
2025 # Second, we use either the commit we just did, or if there were no
2025 # Second, we use either the commit we just did, or if there were no
2026 # changes the parent of the working directory as the version of the
2026 # changes the parent of the working directory as the version of the
2027 # files in the final amend commit
2027 # files in the final amend commit
2028 if node:
2028 if node:
2029 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2029 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2030
2030
2031 user = ctx.user()
2031 user = ctx.user()
2032 date = ctx.date()
2032 date = ctx.date()
2033 # Recompute copies (avoid recording a -> b -> a)
2033 # Recompute copies (avoid recording a -> b -> a)
2034 copied = copies.pathcopies(base, ctx)
2034 copied = copies.pathcopies(base, ctx)
2035
2035
2036 # Prune files which were reverted by the updates: if old
2036 # Prune files which were reverted by the updates: if old
2037 # introduced file X and our intermediate commit, node,
2037 # introduced file X and our intermediate commit, node,
2038 # renamed that file, then those two files are the same and
2038 # renamed that file, then those two files are the same and
2039 # we can discard X from our list of files. Likewise if X
2039 # we can discard X from our list of files. Likewise if X
2040 # was deleted, it's no longer relevant
2040 # was deleted, it's no longer relevant
2041 files.update(ctx.files())
2041 files.update(ctx.files())
2042
2042
2043 def samefile(f):
2043 def samefile(f):
2044 if f in ctx.manifest():
2044 if f in ctx.manifest():
2045 a = ctx.filectx(f)
2045 a = ctx.filectx(f)
2046 if f in base.manifest():
2046 if f in base.manifest():
2047 b = base.filectx(f)
2047 b = base.filectx(f)
2048 return (not a.cmp(b)
2048 return (not a.cmp(b)
2049 and a.flags() == b.flags())
2049 and a.flags() == b.flags())
2050 else:
2050 else:
2051 return False
2051 return False
2052 else:
2052 else:
2053 return f not in base.manifest()
2053 return f not in base.manifest()
2054 files = [f for f in files if not samefile(f)]
2054 files = [f for f in files if not samefile(f)]
2055
2055
2056 def filectxfn(repo, ctx_, path):
2056 def filectxfn(repo, ctx_, path):
2057 try:
2057 try:
2058 fctx = ctx[path]
2058 fctx = ctx[path]
2059 flags = fctx.flags()
2059 flags = fctx.flags()
2060 mctx = context.memfilectx(fctx.path(), fctx.data(),
2060 mctx = context.memfilectx(repo,
2061 fctx.path(), fctx.data(),
2061 islink='l' in flags,
2062 islink='l' in flags,
2062 isexec='x' in flags,
2063 isexec='x' in flags,
2063 copied=copied.get(path))
2064 copied=copied.get(path))
2064 return mctx
2065 return mctx
2065 except KeyError:
2066 except KeyError:
2066 raise IOError
2067 raise IOError
2067 else:
2068 else:
2068 ui.note(_('copying changeset %s to %s\n') % (old, base))
2069 ui.note(_('copying changeset %s to %s\n') % (old, base))
2069
2070
2070 # Use version of files as in the old cset
2071 # Use version of files as in the old cset
2071 def filectxfn(repo, ctx_, path):
2072 def filectxfn(repo, ctx_, path):
2072 try:
2073 try:
2073 return old.filectx(path)
2074 return old.filectx(path)
2074 except KeyError:
2075 except KeyError:
2075 raise IOError
2076 raise IOError
2076
2077
2077 user = opts.get('user') or old.user()
2078 user = opts.get('user') or old.user()
2078 date = opts.get('date') or old.date()
2079 date = opts.get('date') or old.date()
2079 editor = getcommiteditor(**opts)
2080 editor = getcommiteditor(**opts)
2080 if not message:
2081 if not message:
2081 editor = getcommiteditor(edit=True)
2082 editor = getcommiteditor(edit=True)
2082 message = old.description()
2083 message = old.description()
2083
2084
2084 pureextra = extra.copy()
2085 pureextra = extra.copy()
2085 extra['amend_source'] = old.hex()
2086 extra['amend_source'] = old.hex()
2086
2087
2087 new = context.memctx(repo,
2088 new = context.memctx(repo,
2088 parents=[base.node(), old.p2().node()],
2089 parents=[base.node(), old.p2().node()],
2089 text=message,
2090 text=message,
2090 files=files,
2091 files=files,
2091 filectxfn=filectxfn,
2092 filectxfn=filectxfn,
2092 user=user,
2093 user=user,
2093 date=date,
2094 date=date,
2094 extra=extra,
2095 extra=extra,
2095 editor=editor)
2096 editor=editor)
2096
2097
2097 newdesc = changelog.stripdesc(new.description())
2098 newdesc = changelog.stripdesc(new.description())
2098 if ((not node)
2099 if ((not node)
2099 and newdesc == old.description()
2100 and newdesc == old.description()
2100 and user == old.user()
2101 and user == old.user()
2101 and date == old.date()
2102 and date == old.date()
2102 and pureextra == old.extra()):
2103 and pureextra == old.extra()):
2103 # nothing changed. continuing here would create a new node
2104 # nothing changed. continuing here would create a new node
2104 # anyway because of the amend_source noise.
2105 # anyway because of the amend_source noise.
2105 #
2106 #
2106 # This not what we expect from amend.
2107 # This not what we expect from amend.
2107 return old.node()
2108 return old.node()
2108
2109
2109 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2110 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2110 try:
2111 try:
2111 if opts.get('secret'):
2112 if opts.get('secret'):
2112 commitphase = 'secret'
2113 commitphase = 'secret'
2113 else:
2114 else:
2114 commitphase = old.phase()
2115 commitphase = old.phase()
2115 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2116 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2116 newid = repo.commitctx(new)
2117 newid = repo.commitctx(new)
2117 finally:
2118 finally:
2118 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2119 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2119 if newid != old.node():
2120 if newid != old.node():
2120 # Reroute the working copy parent to the new changeset
2121 # Reroute the working copy parent to the new changeset
2121 repo.setparents(newid, nullid)
2122 repo.setparents(newid, nullid)
2122
2123
2123 # Move bookmarks from old parent to amend commit
2124 # Move bookmarks from old parent to amend commit
2124 bms = repo.nodebookmarks(old.node())
2125 bms = repo.nodebookmarks(old.node())
2125 if bms:
2126 if bms:
2126 marks = repo._bookmarks
2127 marks = repo._bookmarks
2127 for bm in bms:
2128 for bm in bms:
2128 marks[bm] = newid
2129 marks[bm] = newid
2129 marks.write()
2130 marks.write()
2130 #commit the whole amend process
2131 #commit the whole amend process
2131 if obsolete._enabled and newid != old.node():
2132 if obsolete._enabled and newid != old.node():
2132 # mark the new changeset as successor of the rewritten one
2133 # mark the new changeset as successor of the rewritten one
2133 new = repo[newid]
2134 new = repo[newid]
2134 obs = [(old, (new,))]
2135 obs = [(old, (new,))]
2135 if node:
2136 if node:
2136 obs.append((ctx, ()))
2137 obs.append((ctx, ()))
2137
2138
2138 obsolete.createmarkers(repo, obs)
2139 obsolete.createmarkers(repo, obs)
2139 tr.close()
2140 tr.close()
2140 finally:
2141 finally:
2141 tr.release()
2142 tr.release()
2142 if (not obsolete._enabled) and newid != old.node():
2143 if (not obsolete._enabled) and newid != old.node():
2143 # Strip the intermediate commit (if there was one) and the amended
2144 # Strip the intermediate commit (if there was one) and the amended
2144 # commit
2145 # commit
2145 if node:
2146 if node:
2146 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2147 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2147 ui.note(_('stripping amended changeset %s\n') % old)
2148 ui.note(_('stripping amended changeset %s\n') % old)
2148 repair.strip(ui, repo, old.node(), topic='amend-backup')
2149 repair.strip(ui, repo, old.node(), topic='amend-backup')
2149 finally:
2150 finally:
2150 if newid is None:
2151 if newid is None:
2151 repo.dirstate.invalidate()
2152 repo.dirstate.invalidate()
2152 lockmod.release(lock, wlock)
2153 lockmod.release(lock, wlock)
2153 return newid
2154 return newid
2154
2155
2155 def commiteditor(repo, ctx, subs):
2156 def commiteditor(repo, ctx, subs):
2156 if ctx.description():
2157 if ctx.description():
2157 return ctx.description()
2158 return ctx.description()
2158 return commitforceeditor(repo, ctx, subs)
2159 return commitforceeditor(repo, ctx, subs)
2159
2160
2160 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None):
2161 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None):
2161 edittext = []
2162 edittext = []
2162 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2163 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2163 if ctx.description():
2164 if ctx.description():
2164 edittext.append(ctx.description())
2165 edittext.append(ctx.description())
2165 edittext.append("")
2166 edittext.append("")
2166 edittext.append("") # Empty line between message and comments.
2167 edittext.append("") # Empty line between message and comments.
2167 edittext.append(_("HG: Enter commit message."
2168 edittext.append(_("HG: Enter commit message."
2168 " Lines beginning with 'HG:' are removed."))
2169 " Lines beginning with 'HG:' are removed."))
2169 if extramsg:
2170 if extramsg:
2170 edittext.append("HG: %s" % extramsg)
2171 edittext.append("HG: %s" % extramsg)
2171 else:
2172 else:
2172 edittext.append(_("HG: Leave message empty to abort commit."))
2173 edittext.append(_("HG: Leave message empty to abort commit."))
2173 edittext.append("HG: --")
2174 edittext.append("HG: --")
2174 edittext.append(_("HG: user: %s") % ctx.user())
2175 edittext.append(_("HG: user: %s") % ctx.user())
2175 if ctx.p2():
2176 if ctx.p2():
2176 edittext.append(_("HG: branch merge"))
2177 edittext.append(_("HG: branch merge"))
2177 if ctx.branch():
2178 if ctx.branch():
2178 edittext.append(_("HG: branch '%s'") % ctx.branch())
2179 edittext.append(_("HG: branch '%s'") % ctx.branch())
2179 if bookmarks.iscurrent(repo):
2180 if bookmarks.iscurrent(repo):
2180 edittext.append(_("HG: bookmark '%s'") % repo._bookmarkcurrent)
2181 edittext.append(_("HG: bookmark '%s'") % repo._bookmarkcurrent)
2181 edittext.extend([_("HG: subrepo %s") % s for s in subs])
2182 edittext.extend([_("HG: subrepo %s") % s for s in subs])
2182 edittext.extend([_("HG: added %s") % f for f in added])
2183 edittext.extend([_("HG: added %s") % f for f in added])
2183 edittext.extend([_("HG: changed %s") % f for f in modified])
2184 edittext.extend([_("HG: changed %s") % f for f in modified])
2184 edittext.extend([_("HG: removed %s") % f for f in removed])
2185 edittext.extend([_("HG: removed %s") % f for f in removed])
2185 if not added and not modified and not removed:
2186 if not added and not modified and not removed:
2186 edittext.append(_("HG: no files changed"))
2187 edittext.append(_("HG: no files changed"))
2187 edittext.append("")
2188 edittext.append("")
2188 # run editor in the repository root
2189 # run editor in the repository root
2189 olddir = os.getcwd()
2190 olddir = os.getcwd()
2190 os.chdir(repo.root)
2191 os.chdir(repo.root)
2191 text = repo.ui.edit("\n".join(edittext), ctx.user(), ctx.extra())
2192 text = repo.ui.edit("\n".join(edittext), ctx.user(), ctx.extra())
2192 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2193 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2193 os.chdir(olddir)
2194 os.chdir(olddir)
2194
2195
2195 if finishdesc:
2196 if finishdesc:
2196 text = finishdesc(text)
2197 text = finishdesc(text)
2197 if not text.strip():
2198 if not text.strip():
2198 raise util.Abort(_("empty commit message"))
2199 raise util.Abort(_("empty commit message"))
2199
2200
2200 return text
2201 return text
2201
2202
2202 def commitstatus(repo, node, branch, bheads=None, opts={}):
2203 def commitstatus(repo, node, branch, bheads=None, opts={}):
2203 ctx = repo[node]
2204 ctx = repo[node]
2204 parents = ctx.parents()
2205 parents = ctx.parents()
2205
2206
2206 if (not opts.get('amend') and bheads and node not in bheads and not
2207 if (not opts.get('amend') and bheads and node not in bheads and not
2207 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2208 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2208 repo.ui.status(_('created new head\n'))
2209 repo.ui.status(_('created new head\n'))
2209 # The message is not printed for initial roots. For the other
2210 # The message is not printed for initial roots. For the other
2210 # changesets, it is printed in the following situations:
2211 # changesets, it is printed in the following situations:
2211 #
2212 #
2212 # Par column: for the 2 parents with ...
2213 # Par column: for the 2 parents with ...
2213 # N: null or no parent
2214 # N: null or no parent
2214 # B: parent is on another named branch
2215 # B: parent is on another named branch
2215 # C: parent is a regular non head changeset
2216 # C: parent is a regular non head changeset
2216 # H: parent was a branch head of the current branch
2217 # H: parent was a branch head of the current branch
2217 # Msg column: whether we print "created new head" message
2218 # Msg column: whether we print "created new head" message
2218 # In the following, it is assumed that there already exists some
2219 # In the following, it is assumed that there already exists some
2219 # initial branch heads of the current branch, otherwise nothing is
2220 # initial branch heads of the current branch, otherwise nothing is
2220 # printed anyway.
2221 # printed anyway.
2221 #
2222 #
2222 # Par Msg Comment
2223 # Par Msg Comment
2223 # N N y additional topo root
2224 # N N y additional topo root
2224 #
2225 #
2225 # B N y additional branch root
2226 # B N y additional branch root
2226 # C N y additional topo head
2227 # C N y additional topo head
2227 # H N n usual case
2228 # H N n usual case
2228 #
2229 #
2229 # B B y weird additional branch root
2230 # B B y weird additional branch root
2230 # C B y branch merge
2231 # C B y branch merge
2231 # H B n merge with named branch
2232 # H B n merge with named branch
2232 #
2233 #
2233 # C C y additional head from merge
2234 # C C y additional head from merge
2234 # C H n merge with a head
2235 # C H n merge with a head
2235 #
2236 #
2236 # H H n head merge: head count decreases
2237 # H H n head merge: head count decreases
2237
2238
2238 if not opts.get('close_branch'):
2239 if not opts.get('close_branch'):
2239 for r in parents:
2240 for r in parents:
2240 if r.closesbranch() and r.branch() == branch:
2241 if r.closesbranch() and r.branch() == branch:
2241 repo.ui.status(_('reopening closed branch head %d\n') % r)
2242 repo.ui.status(_('reopening closed branch head %d\n') % r)
2242
2243
2243 if repo.ui.debugflag:
2244 if repo.ui.debugflag:
2244 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2245 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2245 elif repo.ui.verbose:
2246 elif repo.ui.verbose:
2246 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2247 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2247
2248
2248 def revert(ui, repo, ctx, parents, *pats, **opts):
2249 def revert(ui, repo, ctx, parents, *pats, **opts):
2249 parent, p2 = parents
2250 parent, p2 = parents
2250 node = ctx.node()
2251 node = ctx.node()
2251
2252
2252 mf = ctx.manifest()
2253 mf = ctx.manifest()
2253 if node == p2:
2254 if node == p2:
2254 parent = p2
2255 parent = p2
2255 if node == parent:
2256 if node == parent:
2256 pmf = mf
2257 pmf = mf
2257 else:
2258 else:
2258 pmf = None
2259 pmf = None
2259
2260
2260 # need all matching names in dirstate and manifest of target rev,
2261 # need all matching names in dirstate and manifest of target rev,
2261 # so have to walk both. do not print errors if files exist in one
2262 # so have to walk both. do not print errors if files exist in one
2262 # but not other.
2263 # but not other.
2263
2264
2264 # `names` is a mapping for all elements in working copy and target revision
2265 # `names` is a mapping for all elements in working copy and target revision
2265 # The mapping is in the form:
2266 # The mapping is in the form:
2266 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2267 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2267 names = {}
2268 names = {}
2268
2269
2269 wlock = repo.wlock()
2270 wlock = repo.wlock()
2270 try:
2271 try:
2271 ## filling of the `names` mapping
2272 ## filling of the `names` mapping
2272 # walk dirstate to fill `names`
2273 # walk dirstate to fill `names`
2273
2274
2274 m = scmutil.match(repo[None], pats, opts)
2275 m = scmutil.match(repo[None], pats, opts)
2275 m.bad = lambda x, y: False
2276 m.bad = lambda x, y: False
2276 for abs in repo.walk(m):
2277 for abs in repo.walk(m):
2277 names[abs] = m.rel(abs), m.exact(abs)
2278 names[abs] = m.rel(abs), m.exact(abs)
2278
2279
2279 # walk target manifest to fill `names`
2280 # walk target manifest to fill `names`
2280
2281
2281 def badfn(path, msg):
2282 def badfn(path, msg):
2282 if path in names:
2283 if path in names:
2283 return
2284 return
2284 if path in ctx.substate:
2285 if path in ctx.substate:
2285 return
2286 return
2286 path_ = path + '/'
2287 path_ = path + '/'
2287 for f in names:
2288 for f in names:
2288 if f.startswith(path_):
2289 if f.startswith(path_):
2289 return
2290 return
2290 ui.warn("%s: %s\n" % (m.rel(path), msg))
2291 ui.warn("%s: %s\n" % (m.rel(path), msg))
2291
2292
2292 m = scmutil.match(ctx, pats, opts)
2293 m = scmutil.match(ctx, pats, opts)
2293 m.bad = badfn
2294 m.bad = badfn
2294 for abs in ctx.walk(m):
2295 for abs in ctx.walk(m):
2295 if abs not in names:
2296 if abs not in names:
2296 names[abs] = m.rel(abs), m.exact(abs)
2297 names[abs] = m.rel(abs), m.exact(abs)
2297
2298
2298 # get the list of subrepos that must be reverted
2299 # get the list of subrepos that must be reverted
2299 targetsubs = sorted(s for s in ctx.substate if m(s))
2300 targetsubs = sorted(s for s in ctx.substate if m(s))
2300
2301
2301 # Find status of all file in `names`. (Against working directory parent)
2302 # Find status of all file in `names`. (Against working directory parent)
2302 m = scmutil.matchfiles(repo, names)
2303 m = scmutil.matchfiles(repo, names)
2303 changes = repo.status(node1=parent, match=m)[:4]
2304 changes = repo.status(node1=parent, match=m)[:4]
2304 modified, added, removed, deleted = map(set, changes)
2305 modified, added, removed, deleted = map(set, changes)
2305
2306
2306 # if f is a rename, update `names` to also revert the source
2307 # if f is a rename, update `names` to also revert the source
2307 cwd = repo.getcwd()
2308 cwd = repo.getcwd()
2308 for f in added:
2309 for f in added:
2309 src = repo.dirstate.copied(f)
2310 src = repo.dirstate.copied(f)
2310 if src and src not in names and repo.dirstate[src] == 'r':
2311 if src and src not in names and repo.dirstate[src] == 'r':
2311 removed.add(src)
2312 removed.add(src)
2312 names[src] = (repo.pathto(src, cwd), True)
2313 names[src] = (repo.pathto(src, cwd), True)
2313
2314
2314 ## computation of the action to performs on `names` content.
2315 ## computation of the action to performs on `names` content.
2315
2316
2316 def removeforget(abs):
2317 def removeforget(abs):
2317 if repo.dirstate[abs] == 'a':
2318 if repo.dirstate[abs] == 'a':
2318 return _('forgetting %s\n')
2319 return _('forgetting %s\n')
2319 return _('removing %s\n')
2320 return _('removing %s\n')
2320
2321
2321 # action to be actually performed by revert
2322 # action to be actually performed by revert
2322 # (<list of file>, message>) tuple
2323 # (<list of file>, message>) tuple
2323 actions = {'revert': ([], _('reverting %s\n')),
2324 actions = {'revert': ([], _('reverting %s\n')),
2324 'add': ([], _('adding %s\n')),
2325 'add': ([], _('adding %s\n')),
2325 'remove': ([], removeforget),
2326 'remove': ([], removeforget),
2326 'undelete': ([], _('undeleting %s\n'))}
2327 'undelete': ([], _('undeleting %s\n'))}
2327
2328
2328 disptable = (
2329 disptable = (
2329 # dispatch table:
2330 # dispatch table:
2330 # file state
2331 # file state
2331 # action if in target manifest
2332 # action if in target manifest
2332 # action if not in target manifest
2333 # action if not in target manifest
2333 # make backup if in target manifest
2334 # make backup if in target manifest
2334 # make backup if not in target manifest
2335 # make backup if not in target manifest
2335 (modified, (actions['revert'], True),
2336 (modified, (actions['revert'], True),
2336 (actions['remove'], True)),
2337 (actions['remove'], True)),
2337 (added, (actions['revert'], True),
2338 (added, (actions['revert'], True),
2338 (actions['remove'], False)),
2339 (actions['remove'], False)),
2339 (removed, (actions['undelete'], True),
2340 (removed, (actions['undelete'], True),
2340 (None, False)),
2341 (None, False)),
2341 (deleted, (actions['revert'], False),
2342 (deleted, (actions['revert'], False),
2342 (actions['remove'], False)),
2343 (actions['remove'], False)),
2343 )
2344 )
2344
2345
2345 for abs, (rel, exact) in sorted(names.items()):
2346 for abs, (rel, exact) in sorted(names.items()):
2346 # hash on file in target manifest (or None if missing from target)
2347 # hash on file in target manifest (or None if missing from target)
2347 mfentry = mf.get(abs)
2348 mfentry = mf.get(abs)
2348 # target file to be touch on disk (relative to cwd)
2349 # target file to be touch on disk (relative to cwd)
2349 target = repo.wjoin(abs)
2350 target = repo.wjoin(abs)
2350 def handle(xlist, dobackup):
2351 def handle(xlist, dobackup):
2351 xlist[0].append(abs)
2352 xlist[0].append(abs)
2352 if (dobackup and not opts.get('no_backup') and
2353 if (dobackup and not opts.get('no_backup') and
2353 os.path.lexists(target) and
2354 os.path.lexists(target) and
2354 abs in ctx and repo[None][abs].cmp(ctx[abs])):
2355 abs in ctx and repo[None][abs].cmp(ctx[abs])):
2355 bakname = "%s.orig" % rel
2356 bakname = "%s.orig" % rel
2356 ui.note(_('saving current version of %s as %s\n') %
2357 ui.note(_('saving current version of %s as %s\n') %
2357 (rel, bakname))
2358 (rel, bakname))
2358 if not opts.get('dry_run'):
2359 if not opts.get('dry_run'):
2359 util.rename(target, bakname)
2360 util.rename(target, bakname)
2360 if ui.verbose or not exact:
2361 if ui.verbose or not exact:
2361 msg = xlist[1]
2362 msg = xlist[1]
2362 if not isinstance(msg, basestring):
2363 if not isinstance(msg, basestring):
2363 msg = msg(abs)
2364 msg = msg(abs)
2364 ui.status(msg % rel)
2365 ui.status(msg % rel)
2365 # search the entry in the dispatch table.
2366 # search the entry in the dispatch table.
2366 # if the file is in any of this sets, it was touched in the working
2367 # if the file is in any of this sets, it was touched in the working
2367 # directory parent and we are sure it needs to be reverted.
2368 # directory parent and we are sure it needs to be reverted.
2368 for table, hit, miss in disptable:
2369 for table, hit, miss in disptable:
2369 if abs not in table:
2370 if abs not in table:
2370 continue
2371 continue
2371 # file has changed in dirstate
2372 # file has changed in dirstate
2372 if mfentry:
2373 if mfentry:
2373 handle(*hit)
2374 handle(*hit)
2374 elif miss[0] is not None:
2375 elif miss[0] is not None:
2375 handle(*miss)
2376 handle(*miss)
2376 break
2377 break
2377 else:
2378 else:
2378 # Not touched in current dirstate.
2379 # Not touched in current dirstate.
2379
2380
2380 # file is unknown in parent, restore older version or ignore.
2381 # file is unknown in parent, restore older version or ignore.
2381 if abs not in repo.dirstate:
2382 if abs not in repo.dirstate:
2382 if mfentry:
2383 if mfentry:
2383 handle(actions['add'], True)
2384 handle(actions['add'], True)
2384 elif exact:
2385 elif exact:
2385 ui.warn(_('file not managed: %s\n') % rel)
2386 ui.warn(_('file not managed: %s\n') % rel)
2386 continue
2387 continue
2387
2388
2388 # parent is target, no changes mean no changes
2389 # parent is target, no changes mean no changes
2389 if node == parent:
2390 if node == parent:
2390 if exact:
2391 if exact:
2391 ui.warn(_('no changes needed to %s\n') % rel)
2392 ui.warn(_('no changes needed to %s\n') % rel)
2392 continue
2393 continue
2393 # no change in dirstate but parent and target may differ
2394 # no change in dirstate but parent and target may differ
2394 if pmf is None:
2395 if pmf is None:
2395 # only need parent manifest in this unlikely case,
2396 # only need parent manifest in this unlikely case,
2396 # so do not read by default
2397 # so do not read by default
2397 pmf = repo[parent].manifest()
2398 pmf = repo[parent].manifest()
2398 if abs in pmf and mfentry:
2399 if abs in pmf and mfentry:
2399 # if version of file is same in parent and target
2400 # if version of file is same in parent and target
2400 # manifests, do nothing
2401 # manifests, do nothing
2401 if (pmf[abs] != mfentry or
2402 if (pmf[abs] != mfentry or
2402 pmf.flags(abs) != mf.flags(abs)):
2403 pmf.flags(abs) != mf.flags(abs)):
2403 handle(actions['revert'], False)
2404 handle(actions['revert'], False)
2404 else:
2405 else:
2405 handle(actions['remove'], False)
2406 handle(actions['remove'], False)
2406
2407
2407 if not opts.get('dry_run'):
2408 if not opts.get('dry_run'):
2408 _performrevert(repo, parents, ctx, actions)
2409 _performrevert(repo, parents, ctx, actions)
2409
2410
2410 if targetsubs:
2411 if targetsubs:
2411 # Revert the subrepos on the revert list
2412 # Revert the subrepos on the revert list
2412 for sub in targetsubs:
2413 for sub in targetsubs:
2413 ctx.sub(sub).revert(ui, ctx.substate[sub], *pats, **opts)
2414 ctx.sub(sub).revert(ui, ctx.substate[sub], *pats, **opts)
2414 finally:
2415 finally:
2415 wlock.release()
2416 wlock.release()
2416
2417
2417 def _performrevert(repo, parents, ctx, actions):
2418 def _performrevert(repo, parents, ctx, actions):
2418 """function that actually perform all the actions computed for revert
2419 """function that actually perform all the actions computed for revert
2419
2420
2420 This is an independent function to let extension to plug in and react to
2421 This is an independent function to let extension to plug in and react to
2421 the imminent revert.
2422 the imminent revert.
2422
2423
2423 Make sure you have the working directory locked when calling this function.
2424 Make sure you have the working directory locked when calling this function.
2424 """
2425 """
2425 parent, p2 = parents
2426 parent, p2 = parents
2426 node = ctx.node()
2427 node = ctx.node()
2427 def checkout(f):
2428 def checkout(f):
2428 fc = ctx[f]
2429 fc = ctx[f]
2429 repo.wwrite(f, fc.data(), fc.flags())
2430 repo.wwrite(f, fc.data(), fc.flags())
2430
2431
2431 audit_path = pathutil.pathauditor(repo.root)
2432 audit_path = pathutil.pathauditor(repo.root)
2432 for f in actions['remove'][0]:
2433 for f in actions['remove'][0]:
2433 if repo.dirstate[f] == 'a':
2434 if repo.dirstate[f] == 'a':
2434 repo.dirstate.drop(f)
2435 repo.dirstate.drop(f)
2435 continue
2436 continue
2436 audit_path(f)
2437 audit_path(f)
2437 try:
2438 try:
2438 util.unlinkpath(repo.wjoin(f))
2439 util.unlinkpath(repo.wjoin(f))
2439 except OSError:
2440 except OSError:
2440 pass
2441 pass
2441 repo.dirstate.remove(f)
2442 repo.dirstate.remove(f)
2442
2443
2443 normal = None
2444 normal = None
2444 if node == parent:
2445 if node == parent:
2445 # We're reverting to our parent. If possible, we'd like status
2446 # We're reverting to our parent. If possible, we'd like status
2446 # to report the file as clean. We have to use normallookup for
2447 # to report the file as clean. We have to use normallookup for
2447 # merges to avoid losing information about merged/dirty files.
2448 # merges to avoid losing information about merged/dirty files.
2448 if p2 != nullid:
2449 if p2 != nullid:
2449 normal = repo.dirstate.normallookup
2450 normal = repo.dirstate.normallookup
2450 else:
2451 else:
2451 normal = repo.dirstate.normal
2452 normal = repo.dirstate.normal
2452 for f in actions['revert'][0]:
2453 for f in actions['revert'][0]:
2453 checkout(f)
2454 checkout(f)
2454 if normal:
2455 if normal:
2455 normal(f)
2456 normal(f)
2456
2457
2457 for f in actions['add'][0]:
2458 for f in actions['add'][0]:
2458 checkout(f)
2459 checkout(f)
2459 repo.dirstate.add(f)
2460 repo.dirstate.add(f)
2460
2461
2461 normal = repo.dirstate.normallookup
2462 normal = repo.dirstate.normallookup
2462 if node == parent and p2 == nullid:
2463 if node == parent and p2 == nullid:
2463 normal = repo.dirstate.normal
2464 normal = repo.dirstate.normal
2464 for f in actions['undelete'][0]:
2465 for f in actions['undelete'][0]:
2465 checkout(f)
2466 checkout(f)
2466 normal(f)
2467 normal(f)
2467
2468
2468 copied = copies.pathcopies(repo[parent], ctx)
2469 copied = copies.pathcopies(repo[parent], ctx)
2469
2470
2470 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
2471 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
2471 if f in copied:
2472 if f in copied:
2472 repo.dirstate.copy(copied[f], f)
2473 repo.dirstate.copy(copied[f], f)
2473
2474
2474 def command(table):
2475 def command(table):
2475 '''returns a function object bound to table which can be used as
2476 '''returns a function object bound to table which can be used as
2476 a decorator for populating table as a command table'''
2477 a decorator for populating table as a command table'''
2477
2478
2478 def cmd(name, options=(), synopsis=None):
2479 def cmd(name, options=(), synopsis=None):
2479 def decorator(func):
2480 def decorator(func):
2480 if synopsis:
2481 if synopsis:
2481 table[name] = func, list(options), synopsis
2482 table[name] = func, list(options), synopsis
2482 else:
2483 else:
2483 table[name] = func, list(options)
2484 table[name] = func, list(options)
2484 return func
2485 return func
2485 return decorator
2486 return decorator
2486
2487
2487 return cmd
2488 return cmd
2488
2489
2489 # a list of (ui, repo, otherpeer, opts, missing) functions called by
2490 # a list of (ui, repo, otherpeer, opts, missing) functions called by
2490 # commands.outgoing. "missing" is "missing" of the result of
2491 # commands.outgoing. "missing" is "missing" of the result of
2491 # "findcommonoutgoing()"
2492 # "findcommonoutgoing()"
2492 outgoinghooks = util.hooks()
2493 outgoinghooks = util.hooks()
2493
2494
2494 # a list of (ui, repo) functions called by commands.summary
2495 # a list of (ui, repo) functions called by commands.summary
2495 summaryhooks = util.hooks()
2496 summaryhooks = util.hooks()
2496
2497
2497 # a list of (ui, repo, opts, changes) functions called by commands.summary.
2498 # a list of (ui, repo, opts, changes) functions called by commands.summary.
2498 #
2499 #
2499 # functions should return tuple of booleans below, if 'changes' is None:
2500 # functions should return tuple of booleans below, if 'changes' is None:
2500 # (whether-incomings-are-needed, whether-outgoings-are-needed)
2501 # (whether-incomings-are-needed, whether-outgoings-are-needed)
2501 #
2502 #
2502 # otherwise, 'changes' is a tuple of tuples below:
2503 # otherwise, 'changes' is a tuple of tuples below:
2503 # - (sourceurl, sourcebranch, sourcepeer, incoming)
2504 # - (sourceurl, sourcebranch, sourcepeer, incoming)
2504 # - (desturl, destbranch, destpeer, outgoing)
2505 # - (desturl, destbranch, destpeer, outgoing)
2505 summaryremotehooks = util.hooks()
2506 summaryremotehooks = util.hooks()
2506
2507
2507 # A list of state files kept by multistep operations like graft.
2508 # A list of state files kept by multistep operations like graft.
2508 # Since graft cannot be aborted, it is considered 'clearable' by update.
2509 # Since graft cannot be aborted, it is considered 'clearable' by update.
2509 # note: bisect is intentionally excluded
2510 # note: bisect is intentionally excluded
2510 # (state file, clearable, allowcommit, error, hint)
2511 # (state file, clearable, allowcommit, error, hint)
2511 unfinishedstates = [
2512 unfinishedstates = [
2512 ('graftstate', True, False, _('graft in progress'),
2513 ('graftstate', True, False, _('graft in progress'),
2513 _("use 'hg graft --continue' or 'hg update' to abort")),
2514 _("use 'hg graft --continue' or 'hg update' to abort")),
2514 ('updatestate', True, False, _('last update was interrupted'),
2515 ('updatestate', True, False, _('last update was interrupted'),
2515 _("use 'hg update' to get a consistent checkout"))
2516 _("use 'hg update' to get a consistent checkout"))
2516 ]
2517 ]
2517
2518
2518 def checkunfinished(repo, commit=False):
2519 def checkunfinished(repo, commit=False):
2519 '''Look for an unfinished multistep operation, like graft, and abort
2520 '''Look for an unfinished multistep operation, like graft, and abort
2520 if found. It's probably good to check this right before
2521 if found. It's probably good to check this right before
2521 bailifchanged().
2522 bailifchanged().
2522 '''
2523 '''
2523 for f, clearable, allowcommit, msg, hint in unfinishedstates:
2524 for f, clearable, allowcommit, msg, hint in unfinishedstates:
2524 if commit and allowcommit:
2525 if commit and allowcommit:
2525 continue
2526 continue
2526 if repo.vfs.exists(f):
2527 if repo.vfs.exists(f):
2527 raise util.Abort(msg, hint=hint)
2528 raise util.Abort(msg, hint=hint)
2528
2529
2529 def clearunfinished(repo):
2530 def clearunfinished(repo):
2530 '''Check for unfinished operations (as above), and clear the ones
2531 '''Check for unfinished operations (as above), and clear the ones
2531 that are clearable.
2532 that are clearable.
2532 '''
2533 '''
2533 for f, clearable, allowcommit, msg, hint in unfinishedstates:
2534 for f, clearable, allowcommit, msg, hint in unfinishedstates:
2534 if not clearable and repo.vfs.exists(f):
2535 if not clearable and repo.vfs.exists(f):
2535 raise util.Abort(msg, hint=hint)
2536 raise util.Abort(msg, hint=hint)
2536 for f, clearable, allowcommit, msg, hint in unfinishedstates:
2537 for f, clearable, allowcommit, msg, hint in unfinishedstates:
2537 if clearable and repo.vfs.exists(f):
2538 if clearable and repo.vfs.exists(f):
2538 util.unlink(repo.join(f))
2539 util.unlink(repo.join(f))
@@ -1,5966 +1,5966
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex, bin, nullid, nullrev, short
8 from node import hex, bin, nullid, nullrev, short
9 from lock import release
9 from lock import release
10 from i18n import _
10 from i18n import _
11 import os, re, difflib, time, tempfile, errno
11 import os, re, difflib, time, tempfile, errno
12 import sys
12 import sys
13 import hg, scmutil, util, revlog, copies, error, bookmarks
13 import hg, scmutil, util, revlog, copies, error, bookmarks
14 import patch, help, encoding, templatekw, discovery
14 import patch, help, encoding, templatekw, discovery
15 import archival, changegroup, cmdutil, hbisect
15 import archival, changegroup, cmdutil, hbisect
16 import sshserver, hgweb, commandserver
16 import sshserver, hgweb, commandserver
17 from hgweb import server as hgweb_server
17 from hgweb import server as hgweb_server
18 import merge as mergemod
18 import merge as mergemod
19 import minirst, revset, fileset
19 import minirst, revset, fileset
20 import dagparser, context, simplemerge, graphmod
20 import dagparser, context, simplemerge, graphmod
21 import random
21 import random
22 import setdiscovery, treediscovery, dagutil, pvec, localrepo
22 import setdiscovery, treediscovery, dagutil, pvec, localrepo
23 import phases, obsolete, exchange
23 import phases, obsolete, exchange
24
24
25 table = {}
25 table = {}
26
26
27 command = cmdutil.command(table)
27 command = cmdutil.command(table)
28
28
29 # common command options
29 # common command options
30
30
31 globalopts = [
31 globalopts = [
32 ('R', 'repository', '',
32 ('R', 'repository', '',
33 _('repository root directory or name of overlay bundle file'),
33 _('repository root directory or name of overlay bundle file'),
34 _('REPO')),
34 _('REPO')),
35 ('', 'cwd', '',
35 ('', 'cwd', '',
36 _('change working directory'), _('DIR')),
36 _('change working directory'), _('DIR')),
37 ('y', 'noninteractive', None,
37 ('y', 'noninteractive', None,
38 _('do not prompt, automatically pick the first choice for all prompts')),
38 _('do not prompt, automatically pick the first choice for all prompts')),
39 ('q', 'quiet', None, _('suppress output')),
39 ('q', 'quiet', None, _('suppress output')),
40 ('v', 'verbose', None, _('enable additional output')),
40 ('v', 'verbose', None, _('enable additional output')),
41 ('', 'config', [],
41 ('', 'config', [],
42 _('set/override config option (use \'section.name=value\')'),
42 _('set/override config option (use \'section.name=value\')'),
43 _('CONFIG')),
43 _('CONFIG')),
44 ('', 'debug', None, _('enable debugging output')),
44 ('', 'debug', None, _('enable debugging output')),
45 ('', 'debugger', None, _('start debugger')),
45 ('', 'debugger', None, _('start debugger')),
46 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
46 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
47 _('ENCODE')),
47 _('ENCODE')),
48 ('', 'encodingmode', encoding.encodingmode,
48 ('', 'encodingmode', encoding.encodingmode,
49 _('set the charset encoding mode'), _('MODE')),
49 _('set the charset encoding mode'), _('MODE')),
50 ('', 'traceback', None, _('always print a traceback on exception')),
50 ('', 'traceback', None, _('always print a traceback on exception')),
51 ('', 'time', None, _('time how long the command takes')),
51 ('', 'time', None, _('time how long the command takes')),
52 ('', 'profile', None, _('print command execution profile')),
52 ('', 'profile', None, _('print command execution profile')),
53 ('', 'version', None, _('output version information and exit')),
53 ('', 'version', None, _('output version information and exit')),
54 ('h', 'help', None, _('display help and exit')),
54 ('h', 'help', None, _('display help and exit')),
55 ('', 'hidden', False, _('consider hidden changesets')),
55 ('', 'hidden', False, _('consider hidden changesets')),
56 ]
56 ]
57
57
58 dryrunopts = [('n', 'dry-run', None,
58 dryrunopts = [('n', 'dry-run', None,
59 _('do not perform actions, just print output'))]
59 _('do not perform actions, just print output'))]
60
60
61 remoteopts = [
61 remoteopts = [
62 ('e', 'ssh', '',
62 ('e', 'ssh', '',
63 _('specify ssh command to use'), _('CMD')),
63 _('specify ssh command to use'), _('CMD')),
64 ('', 'remotecmd', '',
64 ('', 'remotecmd', '',
65 _('specify hg command to run on the remote side'), _('CMD')),
65 _('specify hg command to run on the remote side'), _('CMD')),
66 ('', 'insecure', None,
66 ('', 'insecure', None,
67 _('do not verify server certificate (ignoring web.cacerts config)')),
67 _('do not verify server certificate (ignoring web.cacerts config)')),
68 ]
68 ]
69
69
70 walkopts = [
70 walkopts = [
71 ('I', 'include', [],
71 ('I', 'include', [],
72 _('include names matching the given patterns'), _('PATTERN')),
72 _('include names matching the given patterns'), _('PATTERN')),
73 ('X', 'exclude', [],
73 ('X', 'exclude', [],
74 _('exclude names matching the given patterns'), _('PATTERN')),
74 _('exclude names matching the given patterns'), _('PATTERN')),
75 ]
75 ]
76
76
77 commitopts = [
77 commitopts = [
78 ('m', 'message', '',
78 ('m', 'message', '',
79 _('use text as commit message'), _('TEXT')),
79 _('use text as commit message'), _('TEXT')),
80 ('l', 'logfile', '',
80 ('l', 'logfile', '',
81 _('read commit message from file'), _('FILE')),
81 _('read commit message from file'), _('FILE')),
82 ]
82 ]
83
83
84 commitopts2 = [
84 commitopts2 = [
85 ('d', 'date', '',
85 ('d', 'date', '',
86 _('record the specified date as commit date'), _('DATE')),
86 _('record the specified date as commit date'), _('DATE')),
87 ('u', 'user', '',
87 ('u', 'user', '',
88 _('record the specified user as committer'), _('USER')),
88 _('record the specified user as committer'), _('USER')),
89 ]
89 ]
90
90
91 templateopts = [
91 templateopts = [
92 ('', 'style', '',
92 ('', 'style', '',
93 _('display using template map file (DEPRECATED)'), _('STYLE')),
93 _('display using template map file (DEPRECATED)'), _('STYLE')),
94 ('T', 'template', '',
94 ('T', 'template', '',
95 _('display with template'), _('TEMPLATE')),
95 _('display with template'), _('TEMPLATE')),
96 ]
96 ]
97
97
98 logopts = [
98 logopts = [
99 ('p', 'patch', None, _('show patch')),
99 ('p', 'patch', None, _('show patch')),
100 ('g', 'git', None, _('use git extended diff format')),
100 ('g', 'git', None, _('use git extended diff format')),
101 ('l', 'limit', '',
101 ('l', 'limit', '',
102 _('limit number of changes displayed'), _('NUM')),
102 _('limit number of changes displayed'), _('NUM')),
103 ('M', 'no-merges', None, _('do not show merges')),
103 ('M', 'no-merges', None, _('do not show merges')),
104 ('', 'stat', None, _('output diffstat-style summary of changes')),
104 ('', 'stat', None, _('output diffstat-style summary of changes')),
105 ('G', 'graph', None, _("show the revision DAG")),
105 ('G', 'graph', None, _("show the revision DAG")),
106 ] + templateopts
106 ] + templateopts
107
107
108 diffopts = [
108 diffopts = [
109 ('a', 'text', None, _('treat all files as text')),
109 ('a', 'text', None, _('treat all files as text')),
110 ('g', 'git', None, _('use git extended diff format')),
110 ('g', 'git', None, _('use git extended diff format')),
111 ('', 'nodates', None, _('omit dates from diff headers'))
111 ('', 'nodates', None, _('omit dates from diff headers'))
112 ]
112 ]
113
113
114 diffwsopts = [
114 diffwsopts = [
115 ('w', 'ignore-all-space', None,
115 ('w', 'ignore-all-space', None,
116 _('ignore white space when comparing lines')),
116 _('ignore white space when comparing lines')),
117 ('b', 'ignore-space-change', None,
117 ('b', 'ignore-space-change', None,
118 _('ignore changes in the amount of white space')),
118 _('ignore changes in the amount of white space')),
119 ('B', 'ignore-blank-lines', None,
119 ('B', 'ignore-blank-lines', None,
120 _('ignore changes whose lines are all blank')),
120 _('ignore changes whose lines are all blank')),
121 ]
121 ]
122
122
123 diffopts2 = [
123 diffopts2 = [
124 ('p', 'show-function', None, _('show which function each change is in')),
124 ('p', 'show-function', None, _('show which function each change is in')),
125 ('', 'reverse', None, _('produce a diff that undoes the changes')),
125 ('', 'reverse', None, _('produce a diff that undoes the changes')),
126 ] + diffwsopts + [
126 ] + diffwsopts + [
127 ('U', 'unified', '',
127 ('U', 'unified', '',
128 _('number of lines of context to show'), _('NUM')),
128 _('number of lines of context to show'), _('NUM')),
129 ('', 'stat', None, _('output diffstat-style summary of changes')),
129 ('', 'stat', None, _('output diffstat-style summary of changes')),
130 ]
130 ]
131
131
132 mergetoolopts = [
132 mergetoolopts = [
133 ('t', 'tool', '', _('specify merge tool')),
133 ('t', 'tool', '', _('specify merge tool')),
134 ]
134 ]
135
135
136 similarityopts = [
136 similarityopts = [
137 ('s', 'similarity', '',
137 ('s', 'similarity', '',
138 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
138 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
139 ]
139 ]
140
140
141 subrepoopts = [
141 subrepoopts = [
142 ('S', 'subrepos', None,
142 ('S', 'subrepos', None,
143 _('recurse into subrepositories'))
143 _('recurse into subrepositories'))
144 ]
144 ]
145
145
146 # Commands start here, listed alphabetically
146 # Commands start here, listed alphabetically
147
147
148 @command('^add',
148 @command('^add',
149 walkopts + subrepoopts + dryrunopts,
149 walkopts + subrepoopts + dryrunopts,
150 _('[OPTION]... [FILE]...'))
150 _('[OPTION]... [FILE]...'))
151 def add(ui, repo, *pats, **opts):
151 def add(ui, repo, *pats, **opts):
152 """add the specified files on the next commit
152 """add the specified files on the next commit
153
153
154 Schedule files to be version controlled and added to the
154 Schedule files to be version controlled and added to the
155 repository.
155 repository.
156
156
157 The files will be added to the repository at the next commit. To
157 The files will be added to the repository at the next commit. To
158 undo an add before that, see :hg:`forget`.
158 undo an add before that, see :hg:`forget`.
159
159
160 If no names are given, add all files to the repository.
160 If no names are given, add all files to the repository.
161
161
162 .. container:: verbose
162 .. container:: verbose
163
163
164 An example showing how new (unknown) files are added
164 An example showing how new (unknown) files are added
165 automatically by :hg:`add`::
165 automatically by :hg:`add`::
166
166
167 $ ls
167 $ ls
168 foo.c
168 foo.c
169 $ hg status
169 $ hg status
170 ? foo.c
170 ? foo.c
171 $ hg add
171 $ hg add
172 adding foo.c
172 adding foo.c
173 $ hg status
173 $ hg status
174 A foo.c
174 A foo.c
175
175
176 Returns 0 if all files are successfully added.
176 Returns 0 if all files are successfully added.
177 """
177 """
178
178
179 m = scmutil.match(repo[None], pats, opts)
179 m = scmutil.match(repo[None], pats, opts)
180 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
180 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
181 opts.get('subrepos'), prefix="", explicitonly=False)
181 opts.get('subrepos'), prefix="", explicitonly=False)
182 return rejected and 1 or 0
182 return rejected and 1 or 0
183
183
184 @command('addremove',
184 @command('addremove',
185 similarityopts + walkopts + dryrunopts,
185 similarityopts + walkopts + dryrunopts,
186 _('[OPTION]... [FILE]...'))
186 _('[OPTION]... [FILE]...'))
187 def addremove(ui, repo, *pats, **opts):
187 def addremove(ui, repo, *pats, **opts):
188 """add all new files, delete all missing files
188 """add all new files, delete all missing files
189
189
190 Add all new files and remove all missing files from the
190 Add all new files and remove all missing files from the
191 repository.
191 repository.
192
192
193 New files are ignored if they match any of the patterns in
193 New files are ignored if they match any of the patterns in
194 ``.hgignore``. As with add, these changes take effect at the next
194 ``.hgignore``. As with add, these changes take effect at the next
195 commit.
195 commit.
196
196
197 Use the -s/--similarity option to detect renamed files. This
197 Use the -s/--similarity option to detect renamed files. This
198 option takes a percentage between 0 (disabled) and 100 (files must
198 option takes a percentage between 0 (disabled) and 100 (files must
199 be identical) as its parameter. With a parameter greater than 0,
199 be identical) as its parameter. With a parameter greater than 0,
200 this compares every removed file with every added file and records
200 this compares every removed file with every added file and records
201 those similar enough as renames. Detecting renamed files this way
201 those similar enough as renames. Detecting renamed files this way
202 can be expensive. After using this option, :hg:`status -C` can be
202 can be expensive. After using this option, :hg:`status -C` can be
203 used to check which files were identified as moved or renamed. If
203 used to check which files were identified as moved or renamed. If
204 not specified, -s/--similarity defaults to 100 and only renames of
204 not specified, -s/--similarity defaults to 100 and only renames of
205 identical files are detected.
205 identical files are detected.
206
206
207 Returns 0 if all files are successfully added.
207 Returns 0 if all files are successfully added.
208 """
208 """
209 try:
209 try:
210 sim = float(opts.get('similarity') or 100)
210 sim = float(opts.get('similarity') or 100)
211 except ValueError:
211 except ValueError:
212 raise util.Abort(_('similarity must be a number'))
212 raise util.Abort(_('similarity must be a number'))
213 if sim < 0 or sim > 100:
213 if sim < 0 or sim > 100:
214 raise util.Abort(_('similarity must be between 0 and 100'))
214 raise util.Abort(_('similarity must be between 0 and 100'))
215 return scmutil.addremove(repo, pats, opts, similarity=sim / 100.0)
215 return scmutil.addremove(repo, pats, opts, similarity=sim / 100.0)
216
216
217 @command('^annotate|blame',
217 @command('^annotate|blame',
218 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
218 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
219 ('', 'follow', None,
219 ('', 'follow', None,
220 _('follow copies/renames and list the filename (DEPRECATED)')),
220 _('follow copies/renames and list the filename (DEPRECATED)')),
221 ('', 'no-follow', None, _("don't follow copies and renames")),
221 ('', 'no-follow', None, _("don't follow copies and renames")),
222 ('a', 'text', None, _('treat all files as text')),
222 ('a', 'text', None, _('treat all files as text')),
223 ('u', 'user', None, _('list the author (long with -v)')),
223 ('u', 'user', None, _('list the author (long with -v)')),
224 ('f', 'file', None, _('list the filename')),
224 ('f', 'file', None, _('list the filename')),
225 ('d', 'date', None, _('list the date (short with -q)')),
225 ('d', 'date', None, _('list the date (short with -q)')),
226 ('n', 'number', None, _('list the revision number (default)')),
226 ('n', 'number', None, _('list the revision number (default)')),
227 ('c', 'changeset', None, _('list the changeset')),
227 ('c', 'changeset', None, _('list the changeset')),
228 ('l', 'line-number', None, _('show line number at the first appearance'))
228 ('l', 'line-number', None, _('show line number at the first appearance'))
229 ] + diffwsopts + walkopts,
229 ] + diffwsopts + walkopts,
230 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'))
230 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'))
231 def annotate(ui, repo, *pats, **opts):
231 def annotate(ui, repo, *pats, **opts):
232 """show changeset information by line for each file
232 """show changeset information by line for each file
233
233
234 List changes in files, showing the revision id responsible for
234 List changes in files, showing the revision id responsible for
235 each line
235 each line
236
236
237 This command is useful for discovering when a change was made and
237 This command is useful for discovering when a change was made and
238 by whom.
238 by whom.
239
239
240 Without the -a/--text option, annotate will avoid processing files
240 Without the -a/--text option, annotate will avoid processing files
241 it detects as binary. With -a, annotate will annotate the file
241 it detects as binary. With -a, annotate will annotate the file
242 anyway, although the results will probably be neither useful
242 anyway, although the results will probably be neither useful
243 nor desirable.
243 nor desirable.
244
244
245 Returns 0 on success.
245 Returns 0 on success.
246 """
246 """
247 if opts.get('follow'):
247 if opts.get('follow'):
248 # --follow is deprecated and now just an alias for -f/--file
248 # --follow is deprecated and now just an alias for -f/--file
249 # to mimic the behavior of Mercurial before version 1.5
249 # to mimic the behavior of Mercurial before version 1.5
250 opts['file'] = True
250 opts['file'] = True
251
251
252 datefunc = ui.quiet and util.shortdate or util.datestr
252 datefunc = ui.quiet and util.shortdate or util.datestr
253 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
253 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
254
254
255 if not pats:
255 if not pats:
256 raise util.Abort(_('at least one filename or pattern is required'))
256 raise util.Abort(_('at least one filename or pattern is required'))
257
257
258 hexfn = ui.debugflag and hex or short
258 hexfn = ui.debugflag and hex or short
259
259
260 opmap = [('user', ' ', lambda x: ui.shortuser(x[0].user())),
260 opmap = [('user', ' ', lambda x: ui.shortuser(x[0].user())),
261 ('number', ' ', lambda x: str(x[0].rev())),
261 ('number', ' ', lambda x: str(x[0].rev())),
262 ('changeset', ' ', lambda x: hexfn(x[0].node())),
262 ('changeset', ' ', lambda x: hexfn(x[0].node())),
263 ('date', ' ', getdate),
263 ('date', ' ', getdate),
264 ('file', ' ', lambda x: x[0].path()),
264 ('file', ' ', lambda x: x[0].path()),
265 ('line_number', ':', lambda x: str(x[1])),
265 ('line_number', ':', lambda x: str(x[1])),
266 ]
266 ]
267
267
268 if (not opts.get('user') and not opts.get('changeset')
268 if (not opts.get('user') and not opts.get('changeset')
269 and not opts.get('date') and not opts.get('file')):
269 and not opts.get('date') and not opts.get('file')):
270 opts['number'] = True
270 opts['number'] = True
271
271
272 linenumber = opts.get('line_number') is not None
272 linenumber = opts.get('line_number') is not None
273 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
273 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
274 raise util.Abort(_('at least one of -n/-c is required for -l'))
274 raise util.Abort(_('at least one of -n/-c is required for -l'))
275
275
276 funcmap = [(func, sep) for op, sep, func in opmap if opts.get(op)]
276 funcmap = [(func, sep) for op, sep, func in opmap if opts.get(op)]
277 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
277 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
278
278
279 def bad(x, y):
279 def bad(x, y):
280 raise util.Abort("%s: %s" % (x, y))
280 raise util.Abort("%s: %s" % (x, y))
281
281
282 ctx = scmutil.revsingle(repo, opts.get('rev'))
282 ctx = scmutil.revsingle(repo, opts.get('rev'))
283 m = scmutil.match(ctx, pats, opts)
283 m = scmutil.match(ctx, pats, opts)
284 m.bad = bad
284 m.bad = bad
285 follow = not opts.get('no_follow')
285 follow = not opts.get('no_follow')
286 diffopts = patch.diffopts(ui, opts, section='annotate')
286 diffopts = patch.diffopts(ui, opts, section='annotate')
287 for abs in ctx.walk(m):
287 for abs in ctx.walk(m):
288 fctx = ctx[abs]
288 fctx = ctx[abs]
289 if not opts.get('text') and util.binary(fctx.data()):
289 if not opts.get('text') and util.binary(fctx.data()):
290 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
290 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
291 continue
291 continue
292
292
293 lines = fctx.annotate(follow=follow, linenumber=linenumber,
293 lines = fctx.annotate(follow=follow, linenumber=linenumber,
294 diffopts=diffopts)
294 diffopts=diffopts)
295 pieces = []
295 pieces = []
296
296
297 for f, sep in funcmap:
297 for f, sep in funcmap:
298 l = [f(n) for n, dummy in lines]
298 l = [f(n) for n, dummy in lines]
299 if l:
299 if l:
300 sized = [(x, encoding.colwidth(x)) for x in l]
300 sized = [(x, encoding.colwidth(x)) for x in l]
301 ml = max([w for x, w in sized])
301 ml = max([w for x, w in sized])
302 pieces.append(["%s%s%s" % (sep, ' ' * (ml - w), x)
302 pieces.append(["%s%s%s" % (sep, ' ' * (ml - w), x)
303 for x, w in sized])
303 for x, w in sized])
304
304
305 if pieces:
305 if pieces:
306 for p, l in zip(zip(*pieces), lines):
306 for p, l in zip(zip(*pieces), lines):
307 ui.write("%s: %s" % ("".join(p), l[1]))
307 ui.write("%s: %s" % ("".join(p), l[1]))
308
308
309 if lines and not lines[-1][1].endswith('\n'):
309 if lines and not lines[-1][1].endswith('\n'):
310 ui.write('\n')
310 ui.write('\n')
311
311
312 @command('archive',
312 @command('archive',
313 [('', 'no-decode', None, _('do not pass files through decoders')),
313 [('', 'no-decode', None, _('do not pass files through decoders')),
314 ('p', 'prefix', '', _('directory prefix for files in archive'),
314 ('p', 'prefix', '', _('directory prefix for files in archive'),
315 _('PREFIX')),
315 _('PREFIX')),
316 ('r', 'rev', '', _('revision to distribute'), _('REV')),
316 ('r', 'rev', '', _('revision to distribute'), _('REV')),
317 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
317 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
318 ] + subrepoopts + walkopts,
318 ] + subrepoopts + walkopts,
319 _('[OPTION]... DEST'))
319 _('[OPTION]... DEST'))
320 def archive(ui, repo, dest, **opts):
320 def archive(ui, repo, dest, **opts):
321 '''create an unversioned archive of a repository revision
321 '''create an unversioned archive of a repository revision
322
322
323 By default, the revision used is the parent of the working
323 By default, the revision used is the parent of the working
324 directory; use -r/--rev to specify a different revision.
324 directory; use -r/--rev to specify a different revision.
325
325
326 The archive type is automatically detected based on file
326 The archive type is automatically detected based on file
327 extension (or override using -t/--type).
327 extension (or override using -t/--type).
328
328
329 .. container:: verbose
329 .. container:: verbose
330
330
331 Examples:
331 Examples:
332
332
333 - create a zip file containing the 1.0 release::
333 - create a zip file containing the 1.0 release::
334
334
335 hg archive -r 1.0 project-1.0.zip
335 hg archive -r 1.0 project-1.0.zip
336
336
337 - create a tarball excluding .hg files::
337 - create a tarball excluding .hg files::
338
338
339 hg archive project.tar.gz -X ".hg*"
339 hg archive project.tar.gz -X ".hg*"
340
340
341 Valid types are:
341 Valid types are:
342
342
343 :``files``: a directory full of files (default)
343 :``files``: a directory full of files (default)
344 :``tar``: tar archive, uncompressed
344 :``tar``: tar archive, uncompressed
345 :``tbz2``: tar archive, compressed using bzip2
345 :``tbz2``: tar archive, compressed using bzip2
346 :``tgz``: tar archive, compressed using gzip
346 :``tgz``: tar archive, compressed using gzip
347 :``uzip``: zip archive, uncompressed
347 :``uzip``: zip archive, uncompressed
348 :``zip``: zip archive, compressed using deflate
348 :``zip``: zip archive, compressed using deflate
349
349
350 The exact name of the destination archive or directory is given
350 The exact name of the destination archive or directory is given
351 using a format string; see :hg:`help export` for details.
351 using a format string; see :hg:`help export` for details.
352
352
353 Each member added to an archive file has a directory prefix
353 Each member added to an archive file has a directory prefix
354 prepended. Use -p/--prefix to specify a format string for the
354 prepended. Use -p/--prefix to specify a format string for the
355 prefix. The default is the basename of the archive, with suffixes
355 prefix. The default is the basename of the archive, with suffixes
356 removed.
356 removed.
357
357
358 Returns 0 on success.
358 Returns 0 on success.
359 '''
359 '''
360
360
361 ctx = scmutil.revsingle(repo, opts.get('rev'))
361 ctx = scmutil.revsingle(repo, opts.get('rev'))
362 if not ctx:
362 if not ctx:
363 raise util.Abort(_('no working directory: please specify a revision'))
363 raise util.Abort(_('no working directory: please specify a revision'))
364 node = ctx.node()
364 node = ctx.node()
365 dest = cmdutil.makefilename(repo, dest, node)
365 dest = cmdutil.makefilename(repo, dest, node)
366 if os.path.realpath(dest) == repo.root:
366 if os.path.realpath(dest) == repo.root:
367 raise util.Abort(_('repository root cannot be destination'))
367 raise util.Abort(_('repository root cannot be destination'))
368
368
369 kind = opts.get('type') or archival.guesskind(dest) or 'files'
369 kind = opts.get('type') or archival.guesskind(dest) or 'files'
370 prefix = opts.get('prefix')
370 prefix = opts.get('prefix')
371
371
372 if dest == '-':
372 if dest == '-':
373 if kind == 'files':
373 if kind == 'files':
374 raise util.Abort(_('cannot archive plain files to stdout'))
374 raise util.Abort(_('cannot archive plain files to stdout'))
375 dest = cmdutil.makefileobj(repo, dest)
375 dest = cmdutil.makefileobj(repo, dest)
376 if not prefix:
376 if not prefix:
377 prefix = os.path.basename(repo.root) + '-%h'
377 prefix = os.path.basename(repo.root) + '-%h'
378
378
379 prefix = cmdutil.makefilename(repo, prefix, node)
379 prefix = cmdutil.makefilename(repo, prefix, node)
380 matchfn = scmutil.match(ctx, [], opts)
380 matchfn = scmutil.match(ctx, [], opts)
381 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
381 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
382 matchfn, prefix, subrepos=opts.get('subrepos'))
382 matchfn, prefix, subrepos=opts.get('subrepos'))
383
383
384 @command('backout',
384 @command('backout',
385 [('', 'merge', None, _('merge with old dirstate parent after backout')),
385 [('', 'merge', None, _('merge with old dirstate parent after backout')),
386 ('', 'parent', '',
386 ('', 'parent', '',
387 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
387 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
388 ('r', 'rev', '', _('revision to backout'), _('REV')),
388 ('r', 'rev', '', _('revision to backout'), _('REV')),
389 ] + mergetoolopts + walkopts + commitopts + commitopts2,
389 ] + mergetoolopts + walkopts + commitopts + commitopts2,
390 _('[OPTION]... [-r] REV'))
390 _('[OPTION]... [-r] REV'))
391 def backout(ui, repo, node=None, rev=None, **opts):
391 def backout(ui, repo, node=None, rev=None, **opts):
392 '''reverse effect of earlier changeset
392 '''reverse effect of earlier changeset
393
393
394 Prepare a new changeset with the effect of REV undone in the
394 Prepare a new changeset with the effect of REV undone in the
395 current working directory.
395 current working directory.
396
396
397 If REV is the parent of the working directory, then this new changeset
397 If REV is the parent of the working directory, then this new changeset
398 is committed automatically. Otherwise, hg needs to merge the
398 is committed automatically. Otherwise, hg needs to merge the
399 changes and the merged result is left uncommitted.
399 changes and the merged result is left uncommitted.
400
400
401 .. note::
401 .. note::
402
402
403 backout cannot be used to fix either an unwanted or
403 backout cannot be used to fix either an unwanted or
404 incorrect merge.
404 incorrect merge.
405
405
406 .. container:: verbose
406 .. container:: verbose
407
407
408 By default, the pending changeset will have one parent,
408 By default, the pending changeset will have one parent,
409 maintaining a linear history. With --merge, the pending
409 maintaining a linear history. With --merge, the pending
410 changeset will instead have two parents: the old parent of the
410 changeset will instead have two parents: the old parent of the
411 working directory and a new child of REV that simply undoes REV.
411 working directory and a new child of REV that simply undoes REV.
412
412
413 Before version 1.7, the behavior without --merge was equivalent
413 Before version 1.7, the behavior without --merge was equivalent
414 to specifying --merge followed by :hg:`update --clean .` to
414 to specifying --merge followed by :hg:`update --clean .` to
415 cancel the merge and leave the child of REV as a head to be
415 cancel the merge and leave the child of REV as a head to be
416 merged separately.
416 merged separately.
417
417
418 See :hg:`help dates` for a list of formats valid for -d/--date.
418 See :hg:`help dates` for a list of formats valid for -d/--date.
419
419
420 Returns 0 on success, 1 if nothing to backout or there are unresolved
420 Returns 0 on success, 1 if nothing to backout or there are unresolved
421 files.
421 files.
422 '''
422 '''
423 if rev and node:
423 if rev and node:
424 raise util.Abort(_("please specify just one revision"))
424 raise util.Abort(_("please specify just one revision"))
425
425
426 if not rev:
426 if not rev:
427 rev = node
427 rev = node
428
428
429 if not rev:
429 if not rev:
430 raise util.Abort(_("please specify a revision to backout"))
430 raise util.Abort(_("please specify a revision to backout"))
431
431
432 date = opts.get('date')
432 date = opts.get('date')
433 if date:
433 if date:
434 opts['date'] = util.parsedate(date)
434 opts['date'] = util.parsedate(date)
435
435
436 cmdutil.checkunfinished(repo)
436 cmdutil.checkunfinished(repo)
437 cmdutil.bailifchanged(repo)
437 cmdutil.bailifchanged(repo)
438 node = scmutil.revsingle(repo, rev).node()
438 node = scmutil.revsingle(repo, rev).node()
439
439
440 op1, op2 = repo.dirstate.parents()
440 op1, op2 = repo.dirstate.parents()
441 if node not in repo.changelog.commonancestorsheads(op1, node):
441 if node not in repo.changelog.commonancestorsheads(op1, node):
442 raise util.Abort(_('cannot backout change that is not an ancestor'))
442 raise util.Abort(_('cannot backout change that is not an ancestor'))
443
443
444 p1, p2 = repo.changelog.parents(node)
444 p1, p2 = repo.changelog.parents(node)
445 if p1 == nullid:
445 if p1 == nullid:
446 raise util.Abort(_('cannot backout a change with no parents'))
446 raise util.Abort(_('cannot backout a change with no parents'))
447 if p2 != nullid:
447 if p2 != nullid:
448 if not opts.get('parent'):
448 if not opts.get('parent'):
449 raise util.Abort(_('cannot backout a merge changeset'))
449 raise util.Abort(_('cannot backout a merge changeset'))
450 p = repo.lookup(opts['parent'])
450 p = repo.lookup(opts['parent'])
451 if p not in (p1, p2):
451 if p not in (p1, p2):
452 raise util.Abort(_('%s is not a parent of %s') %
452 raise util.Abort(_('%s is not a parent of %s') %
453 (short(p), short(node)))
453 (short(p), short(node)))
454 parent = p
454 parent = p
455 else:
455 else:
456 if opts.get('parent'):
456 if opts.get('parent'):
457 raise util.Abort(_('cannot use --parent on non-merge changeset'))
457 raise util.Abort(_('cannot use --parent on non-merge changeset'))
458 parent = p1
458 parent = p1
459
459
460 # the backout should appear on the same branch
460 # the backout should appear on the same branch
461 wlock = repo.wlock()
461 wlock = repo.wlock()
462 try:
462 try:
463 branch = repo.dirstate.branch()
463 branch = repo.dirstate.branch()
464 bheads = repo.branchheads(branch)
464 bheads = repo.branchheads(branch)
465 rctx = scmutil.revsingle(repo, hex(parent))
465 rctx = scmutil.revsingle(repo, hex(parent))
466 if not opts.get('merge') and op1 != node:
466 if not opts.get('merge') and op1 != node:
467 try:
467 try:
468 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
468 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
469 'backout')
469 'backout')
470 stats = mergemod.update(repo, parent, True, True, False,
470 stats = mergemod.update(repo, parent, True, True, False,
471 node, False)
471 node, False)
472 repo.setparents(op1, op2)
472 repo.setparents(op1, op2)
473 hg._showstats(repo, stats)
473 hg._showstats(repo, stats)
474 if stats[3]:
474 if stats[3]:
475 repo.ui.status(_("use 'hg resolve' to retry unresolved "
475 repo.ui.status(_("use 'hg resolve' to retry unresolved "
476 "file merges\n"))
476 "file merges\n"))
477 else:
477 else:
478 msg = _("changeset %s backed out, "
478 msg = _("changeset %s backed out, "
479 "don't forget to commit.\n")
479 "don't forget to commit.\n")
480 ui.status(msg % short(node))
480 ui.status(msg % short(node))
481 return stats[3] > 0
481 return stats[3] > 0
482 finally:
482 finally:
483 ui.setconfig('ui', 'forcemerge', '', '')
483 ui.setconfig('ui', 'forcemerge', '', '')
484 else:
484 else:
485 hg.clean(repo, node, show_stats=False)
485 hg.clean(repo, node, show_stats=False)
486 repo.dirstate.setbranch(branch)
486 repo.dirstate.setbranch(branch)
487 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
487 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
488
488
489
489
490 def commitfunc(ui, repo, message, match, opts):
490 def commitfunc(ui, repo, message, match, opts):
491 e = cmdutil.getcommiteditor()
491 e = cmdutil.getcommiteditor()
492 if not message:
492 if not message:
493 # we don't translate commit messages
493 # we don't translate commit messages
494 message = "Backed out changeset %s" % short(node)
494 message = "Backed out changeset %s" % short(node)
495 e = cmdutil.getcommiteditor(edit=True)
495 e = cmdutil.getcommiteditor(edit=True)
496 return repo.commit(message, opts.get('user'), opts.get('date'),
496 return repo.commit(message, opts.get('user'), opts.get('date'),
497 match, editor=e)
497 match, editor=e)
498 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
498 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
499 if not newnode:
499 if not newnode:
500 ui.status(_("nothing changed\n"))
500 ui.status(_("nothing changed\n"))
501 return 1
501 return 1
502 cmdutil.commitstatus(repo, newnode, branch, bheads)
502 cmdutil.commitstatus(repo, newnode, branch, bheads)
503
503
504 def nice(node):
504 def nice(node):
505 return '%d:%s' % (repo.changelog.rev(node), short(node))
505 return '%d:%s' % (repo.changelog.rev(node), short(node))
506 ui.status(_('changeset %s backs out changeset %s\n') %
506 ui.status(_('changeset %s backs out changeset %s\n') %
507 (nice(repo.changelog.tip()), nice(node)))
507 (nice(repo.changelog.tip()), nice(node)))
508 if opts.get('merge') and op1 != node:
508 if opts.get('merge') and op1 != node:
509 hg.clean(repo, op1, show_stats=False)
509 hg.clean(repo, op1, show_stats=False)
510 ui.status(_('merging with changeset %s\n')
510 ui.status(_('merging with changeset %s\n')
511 % nice(repo.changelog.tip()))
511 % nice(repo.changelog.tip()))
512 try:
512 try:
513 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
513 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
514 'backout')
514 'backout')
515 return hg.merge(repo, hex(repo.changelog.tip()))
515 return hg.merge(repo, hex(repo.changelog.tip()))
516 finally:
516 finally:
517 ui.setconfig('ui', 'forcemerge', '', '')
517 ui.setconfig('ui', 'forcemerge', '', '')
518 finally:
518 finally:
519 wlock.release()
519 wlock.release()
520 return 0
520 return 0
521
521
522 @command('bisect',
522 @command('bisect',
523 [('r', 'reset', False, _('reset bisect state')),
523 [('r', 'reset', False, _('reset bisect state')),
524 ('g', 'good', False, _('mark changeset good')),
524 ('g', 'good', False, _('mark changeset good')),
525 ('b', 'bad', False, _('mark changeset bad')),
525 ('b', 'bad', False, _('mark changeset bad')),
526 ('s', 'skip', False, _('skip testing changeset')),
526 ('s', 'skip', False, _('skip testing changeset')),
527 ('e', 'extend', False, _('extend the bisect range')),
527 ('e', 'extend', False, _('extend the bisect range')),
528 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
528 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
529 ('U', 'noupdate', False, _('do not update to target'))],
529 ('U', 'noupdate', False, _('do not update to target'))],
530 _("[-gbsr] [-U] [-c CMD] [REV]"))
530 _("[-gbsr] [-U] [-c CMD] [REV]"))
531 def bisect(ui, repo, rev=None, extra=None, command=None,
531 def bisect(ui, repo, rev=None, extra=None, command=None,
532 reset=None, good=None, bad=None, skip=None, extend=None,
532 reset=None, good=None, bad=None, skip=None, extend=None,
533 noupdate=None):
533 noupdate=None):
534 """subdivision search of changesets
534 """subdivision search of changesets
535
535
536 This command helps to find changesets which introduce problems. To
536 This command helps to find changesets which introduce problems. To
537 use, mark the earliest changeset you know exhibits the problem as
537 use, mark the earliest changeset you know exhibits the problem as
538 bad, then mark the latest changeset which is free from the problem
538 bad, then mark the latest changeset which is free from the problem
539 as good. Bisect will update your working directory to a revision
539 as good. Bisect will update your working directory to a revision
540 for testing (unless the -U/--noupdate option is specified). Once
540 for testing (unless the -U/--noupdate option is specified). Once
541 you have performed tests, mark the working directory as good or
541 you have performed tests, mark the working directory as good or
542 bad, and bisect will either update to another candidate changeset
542 bad, and bisect will either update to another candidate changeset
543 or announce that it has found the bad revision.
543 or announce that it has found the bad revision.
544
544
545 As a shortcut, you can also use the revision argument to mark a
545 As a shortcut, you can also use the revision argument to mark a
546 revision as good or bad without checking it out first.
546 revision as good or bad without checking it out first.
547
547
548 If you supply a command, it will be used for automatic bisection.
548 If you supply a command, it will be used for automatic bisection.
549 The environment variable HG_NODE will contain the ID of the
549 The environment variable HG_NODE will contain the ID of the
550 changeset being tested. The exit status of the command will be
550 changeset being tested. The exit status of the command will be
551 used to mark revisions as good or bad: status 0 means good, 125
551 used to mark revisions as good or bad: status 0 means good, 125
552 means to skip the revision, 127 (command not found) will abort the
552 means to skip the revision, 127 (command not found) will abort the
553 bisection, and any other non-zero exit status means the revision
553 bisection, and any other non-zero exit status means the revision
554 is bad.
554 is bad.
555
555
556 .. container:: verbose
556 .. container:: verbose
557
557
558 Some examples:
558 Some examples:
559
559
560 - start a bisection with known bad revision 34, and good revision 12::
560 - start a bisection with known bad revision 34, and good revision 12::
561
561
562 hg bisect --bad 34
562 hg bisect --bad 34
563 hg bisect --good 12
563 hg bisect --good 12
564
564
565 - advance the current bisection by marking current revision as good or
565 - advance the current bisection by marking current revision as good or
566 bad::
566 bad::
567
567
568 hg bisect --good
568 hg bisect --good
569 hg bisect --bad
569 hg bisect --bad
570
570
571 - mark the current revision, or a known revision, to be skipped (e.g. if
571 - mark the current revision, or a known revision, to be skipped (e.g. if
572 that revision is not usable because of another issue)::
572 that revision is not usable because of another issue)::
573
573
574 hg bisect --skip
574 hg bisect --skip
575 hg bisect --skip 23
575 hg bisect --skip 23
576
576
577 - skip all revisions that do not touch directories ``foo`` or ``bar``::
577 - skip all revisions that do not touch directories ``foo`` or ``bar``::
578
578
579 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
579 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
580
580
581 - forget the current bisection::
581 - forget the current bisection::
582
582
583 hg bisect --reset
583 hg bisect --reset
584
584
585 - use 'make && make tests' to automatically find the first broken
585 - use 'make && make tests' to automatically find the first broken
586 revision::
586 revision::
587
587
588 hg bisect --reset
588 hg bisect --reset
589 hg bisect --bad 34
589 hg bisect --bad 34
590 hg bisect --good 12
590 hg bisect --good 12
591 hg bisect --command "make && make tests"
591 hg bisect --command "make && make tests"
592
592
593 - see all changesets whose states are already known in the current
593 - see all changesets whose states are already known in the current
594 bisection::
594 bisection::
595
595
596 hg log -r "bisect(pruned)"
596 hg log -r "bisect(pruned)"
597
597
598 - see the changeset currently being bisected (especially useful
598 - see the changeset currently being bisected (especially useful
599 if running with -U/--noupdate)::
599 if running with -U/--noupdate)::
600
600
601 hg log -r "bisect(current)"
601 hg log -r "bisect(current)"
602
602
603 - see all changesets that took part in the current bisection::
603 - see all changesets that took part in the current bisection::
604
604
605 hg log -r "bisect(range)"
605 hg log -r "bisect(range)"
606
606
607 - you can even get a nice graph::
607 - you can even get a nice graph::
608
608
609 hg log --graph -r "bisect(range)"
609 hg log --graph -r "bisect(range)"
610
610
611 See :hg:`help revsets` for more about the `bisect()` keyword.
611 See :hg:`help revsets` for more about the `bisect()` keyword.
612
612
613 Returns 0 on success.
613 Returns 0 on success.
614 """
614 """
615 def extendbisectrange(nodes, good):
615 def extendbisectrange(nodes, good):
616 # bisect is incomplete when it ends on a merge node and
616 # bisect is incomplete when it ends on a merge node and
617 # one of the parent was not checked.
617 # one of the parent was not checked.
618 parents = repo[nodes[0]].parents()
618 parents = repo[nodes[0]].parents()
619 if len(parents) > 1:
619 if len(parents) > 1:
620 side = good and state['bad'] or state['good']
620 side = good and state['bad'] or state['good']
621 num = len(set(i.node() for i in parents) & set(side))
621 num = len(set(i.node() for i in parents) & set(side))
622 if num == 1:
622 if num == 1:
623 return parents[0].ancestor(parents[1])
623 return parents[0].ancestor(parents[1])
624 return None
624 return None
625
625
626 def print_result(nodes, good):
626 def print_result(nodes, good):
627 displayer = cmdutil.show_changeset(ui, repo, {})
627 displayer = cmdutil.show_changeset(ui, repo, {})
628 if len(nodes) == 1:
628 if len(nodes) == 1:
629 # narrowed it down to a single revision
629 # narrowed it down to a single revision
630 if good:
630 if good:
631 ui.write(_("The first good revision is:\n"))
631 ui.write(_("The first good revision is:\n"))
632 else:
632 else:
633 ui.write(_("The first bad revision is:\n"))
633 ui.write(_("The first bad revision is:\n"))
634 displayer.show(repo[nodes[0]])
634 displayer.show(repo[nodes[0]])
635 extendnode = extendbisectrange(nodes, good)
635 extendnode = extendbisectrange(nodes, good)
636 if extendnode is not None:
636 if extendnode is not None:
637 ui.write(_('Not all ancestors of this changeset have been'
637 ui.write(_('Not all ancestors of this changeset have been'
638 ' checked.\nUse bisect --extend to continue the '
638 ' checked.\nUse bisect --extend to continue the '
639 'bisection from\nthe common ancestor, %s.\n')
639 'bisection from\nthe common ancestor, %s.\n')
640 % extendnode)
640 % extendnode)
641 else:
641 else:
642 # multiple possible revisions
642 # multiple possible revisions
643 if good:
643 if good:
644 ui.write(_("Due to skipped revisions, the first "
644 ui.write(_("Due to skipped revisions, the first "
645 "good revision could be any of:\n"))
645 "good revision could be any of:\n"))
646 else:
646 else:
647 ui.write(_("Due to skipped revisions, the first "
647 ui.write(_("Due to skipped revisions, the first "
648 "bad revision could be any of:\n"))
648 "bad revision could be any of:\n"))
649 for n in nodes:
649 for n in nodes:
650 displayer.show(repo[n])
650 displayer.show(repo[n])
651 displayer.close()
651 displayer.close()
652
652
653 def check_state(state, interactive=True):
653 def check_state(state, interactive=True):
654 if not state['good'] or not state['bad']:
654 if not state['good'] or not state['bad']:
655 if (good or bad or skip or reset) and interactive:
655 if (good or bad or skip or reset) and interactive:
656 return
656 return
657 if not state['good']:
657 if not state['good']:
658 raise util.Abort(_('cannot bisect (no known good revisions)'))
658 raise util.Abort(_('cannot bisect (no known good revisions)'))
659 else:
659 else:
660 raise util.Abort(_('cannot bisect (no known bad revisions)'))
660 raise util.Abort(_('cannot bisect (no known bad revisions)'))
661 return True
661 return True
662
662
663 # backward compatibility
663 # backward compatibility
664 if rev in "good bad reset init".split():
664 if rev in "good bad reset init".split():
665 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
665 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
666 cmd, rev, extra = rev, extra, None
666 cmd, rev, extra = rev, extra, None
667 if cmd == "good":
667 if cmd == "good":
668 good = True
668 good = True
669 elif cmd == "bad":
669 elif cmd == "bad":
670 bad = True
670 bad = True
671 else:
671 else:
672 reset = True
672 reset = True
673 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
673 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
674 raise util.Abort(_('incompatible arguments'))
674 raise util.Abort(_('incompatible arguments'))
675
675
676 cmdutil.checkunfinished(repo)
676 cmdutil.checkunfinished(repo)
677
677
678 if reset:
678 if reset:
679 p = repo.join("bisect.state")
679 p = repo.join("bisect.state")
680 if os.path.exists(p):
680 if os.path.exists(p):
681 os.unlink(p)
681 os.unlink(p)
682 return
682 return
683
683
684 state = hbisect.load_state(repo)
684 state = hbisect.load_state(repo)
685
685
686 if command:
686 if command:
687 changesets = 1
687 changesets = 1
688 if noupdate:
688 if noupdate:
689 try:
689 try:
690 node = state['current'][0]
690 node = state['current'][0]
691 except LookupError:
691 except LookupError:
692 raise util.Abort(_('current bisect revision is unknown - '
692 raise util.Abort(_('current bisect revision is unknown - '
693 'start a new bisect to fix'))
693 'start a new bisect to fix'))
694 else:
694 else:
695 node, p2 = repo.dirstate.parents()
695 node, p2 = repo.dirstate.parents()
696 if p2 != nullid:
696 if p2 != nullid:
697 raise util.Abort(_('current bisect revision is a merge'))
697 raise util.Abort(_('current bisect revision is a merge'))
698 try:
698 try:
699 while changesets:
699 while changesets:
700 # update state
700 # update state
701 state['current'] = [node]
701 state['current'] = [node]
702 hbisect.save_state(repo, state)
702 hbisect.save_state(repo, state)
703 status = util.system(command,
703 status = util.system(command,
704 environ={'HG_NODE': hex(node)},
704 environ={'HG_NODE': hex(node)},
705 out=ui.fout)
705 out=ui.fout)
706 if status == 125:
706 if status == 125:
707 transition = "skip"
707 transition = "skip"
708 elif status == 0:
708 elif status == 0:
709 transition = "good"
709 transition = "good"
710 # status < 0 means process was killed
710 # status < 0 means process was killed
711 elif status == 127:
711 elif status == 127:
712 raise util.Abort(_("failed to execute %s") % command)
712 raise util.Abort(_("failed to execute %s") % command)
713 elif status < 0:
713 elif status < 0:
714 raise util.Abort(_("%s killed") % command)
714 raise util.Abort(_("%s killed") % command)
715 else:
715 else:
716 transition = "bad"
716 transition = "bad"
717 ctx = scmutil.revsingle(repo, rev, node)
717 ctx = scmutil.revsingle(repo, rev, node)
718 rev = None # clear for future iterations
718 rev = None # clear for future iterations
719 state[transition].append(ctx.node())
719 state[transition].append(ctx.node())
720 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
720 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
721 check_state(state, interactive=False)
721 check_state(state, interactive=False)
722 # bisect
722 # bisect
723 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
723 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
724 # update to next check
724 # update to next check
725 node = nodes[0]
725 node = nodes[0]
726 if not noupdate:
726 if not noupdate:
727 cmdutil.bailifchanged(repo)
727 cmdutil.bailifchanged(repo)
728 hg.clean(repo, node, show_stats=False)
728 hg.clean(repo, node, show_stats=False)
729 finally:
729 finally:
730 state['current'] = [node]
730 state['current'] = [node]
731 hbisect.save_state(repo, state)
731 hbisect.save_state(repo, state)
732 print_result(nodes, bgood)
732 print_result(nodes, bgood)
733 return
733 return
734
734
735 # update state
735 # update state
736
736
737 if rev:
737 if rev:
738 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
738 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
739 else:
739 else:
740 nodes = [repo.lookup('.')]
740 nodes = [repo.lookup('.')]
741
741
742 if good or bad or skip:
742 if good or bad or skip:
743 if good:
743 if good:
744 state['good'] += nodes
744 state['good'] += nodes
745 elif bad:
745 elif bad:
746 state['bad'] += nodes
746 state['bad'] += nodes
747 elif skip:
747 elif skip:
748 state['skip'] += nodes
748 state['skip'] += nodes
749 hbisect.save_state(repo, state)
749 hbisect.save_state(repo, state)
750
750
751 if not check_state(state):
751 if not check_state(state):
752 return
752 return
753
753
754 # actually bisect
754 # actually bisect
755 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
755 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
756 if extend:
756 if extend:
757 if not changesets:
757 if not changesets:
758 extendnode = extendbisectrange(nodes, good)
758 extendnode = extendbisectrange(nodes, good)
759 if extendnode is not None:
759 if extendnode is not None:
760 ui.write(_("Extending search to changeset %d:%s\n")
760 ui.write(_("Extending search to changeset %d:%s\n")
761 % (extendnode.rev(), extendnode))
761 % (extendnode.rev(), extendnode))
762 state['current'] = [extendnode.node()]
762 state['current'] = [extendnode.node()]
763 hbisect.save_state(repo, state)
763 hbisect.save_state(repo, state)
764 if noupdate:
764 if noupdate:
765 return
765 return
766 cmdutil.bailifchanged(repo)
766 cmdutil.bailifchanged(repo)
767 return hg.clean(repo, extendnode.node())
767 return hg.clean(repo, extendnode.node())
768 raise util.Abort(_("nothing to extend"))
768 raise util.Abort(_("nothing to extend"))
769
769
770 if changesets == 0:
770 if changesets == 0:
771 print_result(nodes, good)
771 print_result(nodes, good)
772 else:
772 else:
773 assert len(nodes) == 1 # only a single node can be tested next
773 assert len(nodes) == 1 # only a single node can be tested next
774 node = nodes[0]
774 node = nodes[0]
775 # compute the approximate number of remaining tests
775 # compute the approximate number of remaining tests
776 tests, size = 0, 2
776 tests, size = 0, 2
777 while size <= changesets:
777 while size <= changesets:
778 tests, size = tests + 1, size * 2
778 tests, size = tests + 1, size * 2
779 rev = repo.changelog.rev(node)
779 rev = repo.changelog.rev(node)
780 ui.write(_("Testing changeset %d:%s "
780 ui.write(_("Testing changeset %d:%s "
781 "(%d changesets remaining, ~%d tests)\n")
781 "(%d changesets remaining, ~%d tests)\n")
782 % (rev, short(node), changesets, tests))
782 % (rev, short(node), changesets, tests))
783 state['current'] = [node]
783 state['current'] = [node]
784 hbisect.save_state(repo, state)
784 hbisect.save_state(repo, state)
785 if not noupdate:
785 if not noupdate:
786 cmdutil.bailifchanged(repo)
786 cmdutil.bailifchanged(repo)
787 return hg.clean(repo, node)
787 return hg.clean(repo, node)
788
788
789 @command('bookmarks|bookmark',
789 @command('bookmarks|bookmark',
790 [('f', 'force', False, _('force')),
790 [('f', 'force', False, _('force')),
791 ('r', 'rev', '', _('revision'), _('REV')),
791 ('r', 'rev', '', _('revision'), _('REV')),
792 ('d', 'delete', False, _('delete a given bookmark')),
792 ('d', 'delete', False, _('delete a given bookmark')),
793 ('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
793 ('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
794 ('i', 'inactive', False, _('mark a bookmark inactive'))],
794 ('i', 'inactive', False, _('mark a bookmark inactive'))],
795 _('hg bookmarks [OPTIONS]... [NAME]...'))
795 _('hg bookmarks [OPTIONS]... [NAME]...'))
796 def bookmark(ui, repo, *names, **opts):
796 def bookmark(ui, repo, *names, **opts):
797 '''track a line of development with movable markers
797 '''track a line of development with movable markers
798
798
799 Bookmarks are pointers to certain commits that move when committing.
799 Bookmarks are pointers to certain commits that move when committing.
800 Bookmarks are local. They can be renamed, copied and deleted. It is
800 Bookmarks are local. They can be renamed, copied and deleted. It is
801 possible to use :hg:`merge NAME` to merge from a given bookmark, and
801 possible to use :hg:`merge NAME` to merge from a given bookmark, and
802 :hg:`update NAME` to update to a given bookmark.
802 :hg:`update NAME` to update to a given bookmark.
803
803
804 You can use :hg:`bookmark NAME` to set a bookmark on the working
804 You can use :hg:`bookmark NAME` to set a bookmark on the working
805 directory's parent revision with the given name. If you specify
805 directory's parent revision with the given name. If you specify
806 a revision using -r REV (where REV may be an existing bookmark),
806 a revision using -r REV (where REV may be an existing bookmark),
807 the bookmark is assigned to that revision.
807 the bookmark is assigned to that revision.
808
808
809 Bookmarks can be pushed and pulled between repositories (see :hg:`help
809 Bookmarks can be pushed and pulled between repositories (see :hg:`help
810 push` and :hg:`help pull`). This requires both the local and remote
810 push` and :hg:`help pull`). This requires both the local and remote
811 repositories to support bookmarks. For versions prior to 1.8, this means
811 repositories to support bookmarks. For versions prior to 1.8, this means
812 the bookmarks extension must be enabled.
812 the bookmarks extension must be enabled.
813
813
814 If you set a bookmark called '@', new clones of the repository will
814 If you set a bookmark called '@', new clones of the repository will
815 have that revision checked out (and the bookmark made active) by
815 have that revision checked out (and the bookmark made active) by
816 default.
816 default.
817
817
818 With -i/--inactive, the new bookmark will not be made the active
818 With -i/--inactive, the new bookmark will not be made the active
819 bookmark. If -r/--rev is given, the new bookmark will not be made
819 bookmark. If -r/--rev is given, the new bookmark will not be made
820 active even if -i/--inactive is not given. If no NAME is given, the
820 active even if -i/--inactive is not given. If no NAME is given, the
821 current active bookmark will be marked inactive.
821 current active bookmark will be marked inactive.
822 '''
822 '''
823 force = opts.get('force')
823 force = opts.get('force')
824 rev = opts.get('rev')
824 rev = opts.get('rev')
825 delete = opts.get('delete')
825 delete = opts.get('delete')
826 rename = opts.get('rename')
826 rename = opts.get('rename')
827 inactive = opts.get('inactive')
827 inactive = opts.get('inactive')
828
828
829 def checkformat(mark):
829 def checkformat(mark):
830 mark = mark.strip()
830 mark = mark.strip()
831 if not mark:
831 if not mark:
832 raise util.Abort(_("bookmark names cannot consist entirely of "
832 raise util.Abort(_("bookmark names cannot consist entirely of "
833 "whitespace"))
833 "whitespace"))
834 scmutil.checknewlabel(repo, mark, 'bookmark')
834 scmutil.checknewlabel(repo, mark, 'bookmark')
835 return mark
835 return mark
836
836
837 def checkconflict(repo, mark, cur, force=False, target=None):
837 def checkconflict(repo, mark, cur, force=False, target=None):
838 if mark in marks and not force:
838 if mark in marks and not force:
839 if target:
839 if target:
840 if marks[mark] == target and target == cur:
840 if marks[mark] == target and target == cur:
841 # re-activating a bookmark
841 # re-activating a bookmark
842 return
842 return
843 anc = repo.changelog.ancestors([repo[target].rev()])
843 anc = repo.changelog.ancestors([repo[target].rev()])
844 bmctx = repo[marks[mark]]
844 bmctx = repo[marks[mark]]
845 divs = [repo[b].node() for b in marks
845 divs = [repo[b].node() for b in marks
846 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
846 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
847
847
848 # allow resolving a single divergent bookmark even if moving
848 # allow resolving a single divergent bookmark even if moving
849 # the bookmark across branches when a revision is specified
849 # the bookmark across branches when a revision is specified
850 # that contains a divergent bookmark
850 # that contains a divergent bookmark
851 if bmctx.rev() not in anc and target in divs:
851 if bmctx.rev() not in anc and target in divs:
852 bookmarks.deletedivergent(repo, [target], mark)
852 bookmarks.deletedivergent(repo, [target], mark)
853 return
853 return
854
854
855 deletefrom = [b for b in divs
855 deletefrom = [b for b in divs
856 if repo[b].rev() in anc or b == target]
856 if repo[b].rev() in anc or b == target]
857 bookmarks.deletedivergent(repo, deletefrom, mark)
857 bookmarks.deletedivergent(repo, deletefrom, mark)
858 if bookmarks.validdest(repo, bmctx, repo[target]):
858 if bookmarks.validdest(repo, bmctx, repo[target]):
859 ui.status(_("moving bookmark '%s' forward from %s\n") %
859 ui.status(_("moving bookmark '%s' forward from %s\n") %
860 (mark, short(bmctx.node())))
860 (mark, short(bmctx.node())))
861 return
861 return
862 raise util.Abort(_("bookmark '%s' already exists "
862 raise util.Abort(_("bookmark '%s' already exists "
863 "(use -f to force)") % mark)
863 "(use -f to force)") % mark)
864 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
864 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
865 and not force):
865 and not force):
866 raise util.Abort(
866 raise util.Abort(
867 _("a bookmark cannot have the name of an existing branch"))
867 _("a bookmark cannot have the name of an existing branch"))
868
868
869 if delete and rename:
869 if delete and rename:
870 raise util.Abort(_("--delete and --rename are incompatible"))
870 raise util.Abort(_("--delete and --rename are incompatible"))
871 if delete and rev:
871 if delete and rev:
872 raise util.Abort(_("--rev is incompatible with --delete"))
872 raise util.Abort(_("--rev is incompatible with --delete"))
873 if rename and rev:
873 if rename and rev:
874 raise util.Abort(_("--rev is incompatible with --rename"))
874 raise util.Abort(_("--rev is incompatible with --rename"))
875 if not names and (delete or rev):
875 if not names and (delete or rev):
876 raise util.Abort(_("bookmark name required"))
876 raise util.Abort(_("bookmark name required"))
877
877
878 if delete or rename or names or inactive:
878 if delete or rename or names or inactive:
879 wlock = repo.wlock()
879 wlock = repo.wlock()
880 try:
880 try:
881 cur = repo.changectx('.').node()
881 cur = repo.changectx('.').node()
882 marks = repo._bookmarks
882 marks = repo._bookmarks
883 if delete:
883 if delete:
884 for mark in names:
884 for mark in names:
885 if mark not in marks:
885 if mark not in marks:
886 raise util.Abort(_("bookmark '%s' does not exist") %
886 raise util.Abort(_("bookmark '%s' does not exist") %
887 mark)
887 mark)
888 if mark == repo._bookmarkcurrent:
888 if mark == repo._bookmarkcurrent:
889 bookmarks.unsetcurrent(repo)
889 bookmarks.unsetcurrent(repo)
890 del marks[mark]
890 del marks[mark]
891 marks.write()
891 marks.write()
892
892
893 elif rename:
893 elif rename:
894 if not names:
894 if not names:
895 raise util.Abort(_("new bookmark name required"))
895 raise util.Abort(_("new bookmark name required"))
896 elif len(names) > 1:
896 elif len(names) > 1:
897 raise util.Abort(_("only one new bookmark name allowed"))
897 raise util.Abort(_("only one new bookmark name allowed"))
898 mark = checkformat(names[0])
898 mark = checkformat(names[0])
899 if rename not in marks:
899 if rename not in marks:
900 raise util.Abort(_("bookmark '%s' does not exist") % rename)
900 raise util.Abort(_("bookmark '%s' does not exist") % rename)
901 checkconflict(repo, mark, cur, force)
901 checkconflict(repo, mark, cur, force)
902 marks[mark] = marks[rename]
902 marks[mark] = marks[rename]
903 if repo._bookmarkcurrent == rename and not inactive:
903 if repo._bookmarkcurrent == rename and not inactive:
904 bookmarks.setcurrent(repo, mark)
904 bookmarks.setcurrent(repo, mark)
905 del marks[rename]
905 del marks[rename]
906 marks.write()
906 marks.write()
907
907
908 elif names:
908 elif names:
909 newact = None
909 newact = None
910 for mark in names:
910 for mark in names:
911 mark = checkformat(mark)
911 mark = checkformat(mark)
912 if newact is None:
912 if newact is None:
913 newact = mark
913 newact = mark
914 if inactive and mark == repo._bookmarkcurrent:
914 if inactive and mark == repo._bookmarkcurrent:
915 bookmarks.unsetcurrent(repo)
915 bookmarks.unsetcurrent(repo)
916 return
916 return
917 tgt = cur
917 tgt = cur
918 if rev:
918 if rev:
919 tgt = scmutil.revsingle(repo, rev).node()
919 tgt = scmutil.revsingle(repo, rev).node()
920 checkconflict(repo, mark, cur, force, tgt)
920 checkconflict(repo, mark, cur, force, tgt)
921 marks[mark] = tgt
921 marks[mark] = tgt
922 if not inactive and cur == marks[newact] and not rev:
922 if not inactive and cur == marks[newact] and not rev:
923 bookmarks.setcurrent(repo, newact)
923 bookmarks.setcurrent(repo, newact)
924 elif cur != tgt and newact == repo._bookmarkcurrent:
924 elif cur != tgt and newact == repo._bookmarkcurrent:
925 bookmarks.unsetcurrent(repo)
925 bookmarks.unsetcurrent(repo)
926 marks.write()
926 marks.write()
927
927
928 elif inactive:
928 elif inactive:
929 if len(marks) == 0:
929 if len(marks) == 0:
930 ui.status(_("no bookmarks set\n"))
930 ui.status(_("no bookmarks set\n"))
931 elif not repo._bookmarkcurrent:
931 elif not repo._bookmarkcurrent:
932 ui.status(_("no active bookmark\n"))
932 ui.status(_("no active bookmark\n"))
933 else:
933 else:
934 bookmarks.unsetcurrent(repo)
934 bookmarks.unsetcurrent(repo)
935 finally:
935 finally:
936 wlock.release()
936 wlock.release()
937 else: # show bookmarks
937 else: # show bookmarks
938 hexfn = ui.debugflag and hex or short
938 hexfn = ui.debugflag and hex or short
939 marks = repo._bookmarks
939 marks = repo._bookmarks
940 if len(marks) == 0:
940 if len(marks) == 0:
941 ui.status(_("no bookmarks set\n"))
941 ui.status(_("no bookmarks set\n"))
942 else:
942 else:
943 for bmark, n in sorted(marks.iteritems()):
943 for bmark, n in sorted(marks.iteritems()):
944 current = repo._bookmarkcurrent
944 current = repo._bookmarkcurrent
945 if bmark == current:
945 if bmark == current:
946 prefix, label = '*', 'bookmarks.current'
946 prefix, label = '*', 'bookmarks.current'
947 else:
947 else:
948 prefix, label = ' ', ''
948 prefix, label = ' ', ''
949
949
950 if ui.quiet:
950 if ui.quiet:
951 ui.write("%s\n" % bmark, label=label)
951 ui.write("%s\n" % bmark, label=label)
952 else:
952 else:
953 pad = " " * (25 - encoding.colwidth(bmark))
953 pad = " " * (25 - encoding.colwidth(bmark))
954 ui.write(" %s %s%s %d:%s\n" % (
954 ui.write(" %s %s%s %d:%s\n" % (
955 prefix, bmark, pad, repo.changelog.rev(n), hexfn(n)),
955 prefix, bmark, pad, repo.changelog.rev(n), hexfn(n)),
956 label=label)
956 label=label)
957
957
958 @command('branch',
958 @command('branch',
959 [('f', 'force', None,
959 [('f', 'force', None,
960 _('set branch name even if it shadows an existing branch')),
960 _('set branch name even if it shadows an existing branch')),
961 ('C', 'clean', None, _('reset branch name to parent branch name'))],
961 ('C', 'clean', None, _('reset branch name to parent branch name'))],
962 _('[-fC] [NAME]'))
962 _('[-fC] [NAME]'))
963 def branch(ui, repo, label=None, **opts):
963 def branch(ui, repo, label=None, **opts):
964 """set or show the current branch name
964 """set or show the current branch name
965
965
966 .. note::
966 .. note::
967
967
968 Branch names are permanent and global. Use :hg:`bookmark` to create a
968 Branch names are permanent and global. Use :hg:`bookmark` to create a
969 light-weight bookmark instead. See :hg:`help glossary` for more
969 light-weight bookmark instead. See :hg:`help glossary` for more
970 information about named branches and bookmarks.
970 information about named branches and bookmarks.
971
971
972 With no argument, show the current branch name. With one argument,
972 With no argument, show the current branch name. With one argument,
973 set the working directory branch name (the branch will not exist
973 set the working directory branch name (the branch will not exist
974 in the repository until the next commit). Standard practice
974 in the repository until the next commit). Standard practice
975 recommends that primary development take place on the 'default'
975 recommends that primary development take place on the 'default'
976 branch.
976 branch.
977
977
978 Unless -f/--force is specified, branch will not let you set a
978 Unless -f/--force is specified, branch will not let you set a
979 branch name that already exists, even if it's inactive.
979 branch name that already exists, even if it's inactive.
980
980
981 Use -C/--clean to reset the working directory branch to that of
981 Use -C/--clean to reset the working directory branch to that of
982 the parent of the working directory, negating a previous branch
982 the parent of the working directory, negating a previous branch
983 change.
983 change.
984
984
985 Use the command :hg:`update` to switch to an existing branch. Use
985 Use the command :hg:`update` to switch to an existing branch. Use
986 :hg:`commit --close-branch` to mark this branch as closed.
986 :hg:`commit --close-branch` to mark this branch as closed.
987
987
988 Returns 0 on success.
988 Returns 0 on success.
989 """
989 """
990 if label:
990 if label:
991 label = label.strip()
991 label = label.strip()
992
992
993 if not opts.get('clean') and not label:
993 if not opts.get('clean') and not label:
994 ui.write("%s\n" % repo.dirstate.branch())
994 ui.write("%s\n" % repo.dirstate.branch())
995 return
995 return
996
996
997 wlock = repo.wlock()
997 wlock = repo.wlock()
998 try:
998 try:
999 if opts.get('clean'):
999 if opts.get('clean'):
1000 label = repo[None].p1().branch()
1000 label = repo[None].p1().branch()
1001 repo.dirstate.setbranch(label)
1001 repo.dirstate.setbranch(label)
1002 ui.status(_('reset working directory to branch %s\n') % label)
1002 ui.status(_('reset working directory to branch %s\n') % label)
1003 elif label:
1003 elif label:
1004 if not opts.get('force') and label in repo.branchmap():
1004 if not opts.get('force') and label in repo.branchmap():
1005 if label not in [p.branch() for p in repo.parents()]:
1005 if label not in [p.branch() for p in repo.parents()]:
1006 raise util.Abort(_('a branch of the same name already'
1006 raise util.Abort(_('a branch of the same name already'
1007 ' exists'),
1007 ' exists'),
1008 # i18n: "it" refers to an existing branch
1008 # i18n: "it" refers to an existing branch
1009 hint=_("use 'hg update' to switch to it"))
1009 hint=_("use 'hg update' to switch to it"))
1010 scmutil.checknewlabel(repo, label, 'branch')
1010 scmutil.checknewlabel(repo, label, 'branch')
1011 repo.dirstate.setbranch(label)
1011 repo.dirstate.setbranch(label)
1012 ui.status(_('marked working directory as branch %s\n') % label)
1012 ui.status(_('marked working directory as branch %s\n') % label)
1013 ui.status(_('(branches are permanent and global, '
1013 ui.status(_('(branches are permanent and global, '
1014 'did you want a bookmark?)\n'))
1014 'did you want a bookmark?)\n'))
1015 finally:
1015 finally:
1016 wlock.release()
1016 wlock.release()
1017
1017
1018 @command('branches',
1018 @command('branches',
1019 [('a', 'active', False, _('show only branches that have unmerged heads')),
1019 [('a', 'active', False, _('show only branches that have unmerged heads')),
1020 ('c', 'closed', False, _('show normal and closed branches'))],
1020 ('c', 'closed', False, _('show normal and closed branches'))],
1021 _('[-ac]'))
1021 _('[-ac]'))
1022 def branches(ui, repo, active=False, closed=False):
1022 def branches(ui, repo, active=False, closed=False):
1023 """list repository named branches
1023 """list repository named branches
1024
1024
1025 List the repository's named branches, indicating which ones are
1025 List the repository's named branches, indicating which ones are
1026 inactive. If -c/--closed is specified, also list branches which have
1026 inactive. If -c/--closed is specified, also list branches which have
1027 been marked closed (see :hg:`commit --close-branch`).
1027 been marked closed (see :hg:`commit --close-branch`).
1028
1028
1029 If -a/--active is specified, only show active branches. A branch
1029 If -a/--active is specified, only show active branches. A branch
1030 is considered active if it contains repository heads.
1030 is considered active if it contains repository heads.
1031
1031
1032 Use the command :hg:`update` to switch to an existing branch.
1032 Use the command :hg:`update` to switch to an existing branch.
1033
1033
1034 Returns 0.
1034 Returns 0.
1035 """
1035 """
1036
1036
1037 hexfunc = ui.debugflag and hex or short
1037 hexfunc = ui.debugflag and hex or short
1038
1038
1039 allheads = set(repo.heads())
1039 allheads = set(repo.heads())
1040 branches = []
1040 branches = []
1041 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1041 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1042 isactive = not isclosed and bool(set(heads) & allheads)
1042 isactive = not isclosed and bool(set(heads) & allheads)
1043 branches.append((tag, repo[tip], isactive, not isclosed))
1043 branches.append((tag, repo[tip], isactive, not isclosed))
1044 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1044 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1045 reverse=True)
1045 reverse=True)
1046
1046
1047 for tag, ctx, isactive, isopen in branches:
1047 for tag, ctx, isactive, isopen in branches:
1048 if (not active) or isactive:
1048 if (not active) or isactive:
1049 if isactive:
1049 if isactive:
1050 label = 'branches.active'
1050 label = 'branches.active'
1051 notice = ''
1051 notice = ''
1052 elif not isopen:
1052 elif not isopen:
1053 if not closed:
1053 if not closed:
1054 continue
1054 continue
1055 label = 'branches.closed'
1055 label = 'branches.closed'
1056 notice = _(' (closed)')
1056 notice = _(' (closed)')
1057 else:
1057 else:
1058 label = 'branches.inactive'
1058 label = 'branches.inactive'
1059 notice = _(' (inactive)')
1059 notice = _(' (inactive)')
1060 if tag == repo.dirstate.branch():
1060 if tag == repo.dirstate.branch():
1061 label = 'branches.current'
1061 label = 'branches.current'
1062 rev = str(ctx.rev()).rjust(31 - encoding.colwidth(tag))
1062 rev = str(ctx.rev()).rjust(31 - encoding.colwidth(tag))
1063 rev = ui.label('%s:%s' % (rev, hexfunc(ctx.node())),
1063 rev = ui.label('%s:%s' % (rev, hexfunc(ctx.node())),
1064 'log.changeset changeset.%s' % ctx.phasestr())
1064 'log.changeset changeset.%s' % ctx.phasestr())
1065 labeledtag = ui.label(tag, label)
1065 labeledtag = ui.label(tag, label)
1066 if ui.quiet:
1066 if ui.quiet:
1067 ui.write("%s\n" % labeledtag)
1067 ui.write("%s\n" % labeledtag)
1068 else:
1068 else:
1069 ui.write("%s %s%s\n" % (labeledtag, rev, notice))
1069 ui.write("%s %s%s\n" % (labeledtag, rev, notice))
1070
1070
1071 @command('bundle',
1071 @command('bundle',
1072 [('f', 'force', None, _('run even when the destination is unrelated')),
1072 [('f', 'force', None, _('run even when the destination is unrelated')),
1073 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1073 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1074 _('REV')),
1074 _('REV')),
1075 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1075 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1076 _('BRANCH')),
1076 _('BRANCH')),
1077 ('', 'base', [],
1077 ('', 'base', [],
1078 _('a base changeset assumed to be available at the destination'),
1078 _('a base changeset assumed to be available at the destination'),
1079 _('REV')),
1079 _('REV')),
1080 ('a', 'all', None, _('bundle all changesets in the repository')),
1080 ('a', 'all', None, _('bundle all changesets in the repository')),
1081 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1081 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1082 ] + remoteopts,
1082 ] + remoteopts,
1083 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1083 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1084 def bundle(ui, repo, fname, dest=None, **opts):
1084 def bundle(ui, repo, fname, dest=None, **opts):
1085 """create a changegroup file
1085 """create a changegroup file
1086
1086
1087 Generate a compressed changegroup file collecting changesets not
1087 Generate a compressed changegroup file collecting changesets not
1088 known to be in another repository.
1088 known to be in another repository.
1089
1089
1090 If you omit the destination repository, then hg assumes the
1090 If you omit the destination repository, then hg assumes the
1091 destination will have all the nodes you specify with --base
1091 destination will have all the nodes you specify with --base
1092 parameters. To create a bundle containing all changesets, use
1092 parameters. To create a bundle containing all changesets, use
1093 -a/--all (or --base null).
1093 -a/--all (or --base null).
1094
1094
1095 You can change compression method with the -t/--type option.
1095 You can change compression method with the -t/--type option.
1096 The available compression methods are: none, bzip2, and
1096 The available compression methods are: none, bzip2, and
1097 gzip (by default, bundles are compressed using bzip2).
1097 gzip (by default, bundles are compressed using bzip2).
1098
1098
1099 The bundle file can then be transferred using conventional means
1099 The bundle file can then be transferred using conventional means
1100 and applied to another repository with the unbundle or pull
1100 and applied to another repository with the unbundle or pull
1101 command. This is useful when direct push and pull are not
1101 command. This is useful when direct push and pull are not
1102 available or when exporting an entire repository is undesirable.
1102 available or when exporting an entire repository is undesirable.
1103
1103
1104 Applying bundles preserves all changeset contents including
1104 Applying bundles preserves all changeset contents including
1105 permissions, copy/rename information, and revision history.
1105 permissions, copy/rename information, and revision history.
1106
1106
1107 Returns 0 on success, 1 if no changes found.
1107 Returns 0 on success, 1 if no changes found.
1108 """
1108 """
1109 revs = None
1109 revs = None
1110 if 'rev' in opts:
1110 if 'rev' in opts:
1111 revs = scmutil.revrange(repo, opts['rev'])
1111 revs = scmutil.revrange(repo, opts['rev'])
1112
1112
1113 bundletype = opts.get('type', 'bzip2').lower()
1113 bundletype = opts.get('type', 'bzip2').lower()
1114 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1114 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1115 bundletype = btypes.get(bundletype)
1115 bundletype = btypes.get(bundletype)
1116 if bundletype not in changegroup.bundletypes:
1116 if bundletype not in changegroup.bundletypes:
1117 raise util.Abort(_('unknown bundle type specified with --type'))
1117 raise util.Abort(_('unknown bundle type specified with --type'))
1118
1118
1119 if opts.get('all'):
1119 if opts.get('all'):
1120 base = ['null']
1120 base = ['null']
1121 else:
1121 else:
1122 base = scmutil.revrange(repo, opts.get('base'))
1122 base = scmutil.revrange(repo, opts.get('base'))
1123 # TODO: get desired bundlecaps from command line.
1123 # TODO: get desired bundlecaps from command line.
1124 bundlecaps = None
1124 bundlecaps = None
1125 if base:
1125 if base:
1126 if dest:
1126 if dest:
1127 raise util.Abort(_("--base is incompatible with specifying "
1127 raise util.Abort(_("--base is incompatible with specifying "
1128 "a destination"))
1128 "a destination"))
1129 common = [repo.lookup(rev) for rev in base]
1129 common = [repo.lookup(rev) for rev in base]
1130 heads = revs and map(repo.lookup, revs) or revs
1130 heads = revs and map(repo.lookup, revs) or revs
1131 cg = changegroup.getbundle(repo, 'bundle', heads=heads, common=common,
1131 cg = changegroup.getbundle(repo, 'bundle', heads=heads, common=common,
1132 bundlecaps=bundlecaps)
1132 bundlecaps=bundlecaps)
1133 outgoing = None
1133 outgoing = None
1134 else:
1134 else:
1135 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1135 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1136 dest, branches = hg.parseurl(dest, opts.get('branch'))
1136 dest, branches = hg.parseurl(dest, opts.get('branch'))
1137 other = hg.peer(repo, opts, dest)
1137 other = hg.peer(repo, opts, dest)
1138 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1138 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1139 heads = revs and map(repo.lookup, revs) or revs
1139 heads = revs and map(repo.lookup, revs) or revs
1140 outgoing = discovery.findcommonoutgoing(repo, other,
1140 outgoing = discovery.findcommonoutgoing(repo, other,
1141 onlyheads=heads,
1141 onlyheads=heads,
1142 force=opts.get('force'),
1142 force=opts.get('force'),
1143 portable=True)
1143 portable=True)
1144 cg = changegroup.getlocalbundle(repo, 'bundle', outgoing, bundlecaps)
1144 cg = changegroup.getlocalbundle(repo, 'bundle', outgoing, bundlecaps)
1145 if not cg:
1145 if not cg:
1146 scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
1146 scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
1147 return 1
1147 return 1
1148
1148
1149 changegroup.writebundle(cg, fname, bundletype)
1149 changegroup.writebundle(cg, fname, bundletype)
1150
1150
1151 @command('cat',
1151 @command('cat',
1152 [('o', 'output', '',
1152 [('o', 'output', '',
1153 _('print output to file with formatted name'), _('FORMAT')),
1153 _('print output to file with formatted name'), _('FORMAT')),
1154 ('r', 'rev', '', _('print the given revision'), _('REV')),
1154 ('r', 'rev', '', _('print the given revision'), _('REV')),
1155 ('', 'decode', None, _('apply any matching decode filter')),
1155 ('', 'decode', None, _('apply any matching decode filter')),
1156 ] + walkopts,
1156 ] + walkopts,
1157 _('[OPTION]... FILE...'))
1157 _('[OPTION]... FILE...'))
1158 def cat(ui, repo, file1, *pats, **opts):
1158 def cat(ui, repo, file1, *pats, **opts):
1159 """output the current or given revision of files
1159 """output the current or given revision of files
1160
1160
1161 Print the specified files as they were at the given revision. If
1161 Print the specified files as they were at the given revision. If
1162 no revision is given, the parent of the working directory is used.
1162 no revision is given, the parent of the working directory is used.
1163
1163
1164 Output may be to a file, in which case the name of the file is
1164 Output may be to a file, in which case the name of the file is
1165 given using a format string. The formatting rules as follows:
1165 given using a format string. The formatting rules as follows:
1166
1166
1167 :``%%``: literal "%" character
1167 :``%%``: literal "%" character
1168 :``%s``: basename of file being printed
1168 :``%s``: basename of file being printed
1169 :``%d``: dirname of file being printed, or '.' if in repository root
1169 :``%d``: dirname of file being printed, or '.' if in repository root
1170 :``%p``: root-relative path name of file being printed
1170 :``%p``: root-relative path name of file being printed
1171 :``%H``: changeset hash (40 hexadecimal digits)
1171 :``%H``: changeset hash (40 hexadecimal digits)
1172 :``%R``: changeset revision number
1172 :``%R``: changeset revision number
1173 :``%h``: short-form changeset hash (12 hexadecimal digits)
1173 :``%h``: short-form changeset hash (12 hexadecimal digits)
1174 :``%r``: zero-padded changeset revision number
1174 :``%r``: zero-padded changeset revision number
1175 :``%b``: basename of the exporting repository
1175 :``%b``: basename of the exporting repository
1176
1176
1177 Returns 0 on success.
1177 Returns 0 on success.
1178 """
1178 """
1179 ctx = scmutil.revsingle(repo, opts.get('rev'))
1179 ctx = scmutil.revsingle(repo, opts.get('rev'))
1180 m = scmutil.match(ctx, (file1,) + pats, opts)
1180 m = scmutil.match(ctx, (file1,) + pats, opts)
1181
1181
1182 return cmdutil.cat(ui, repo, ctx, m, '', **opts)
1182 return cmdutil.cat(ui, repo, ctx, m, '', **opts)
1183
1183
1184 @command('^clone',
1184 @command('^clone',
1185 [('U', 'noupdate', None,
1185 [('U', 'noupdate', None,
1186 _('the clone will include an empty working copy (only a repository)')),
1186 _('the clone will include an empty working copy (only a repository)')),
1187 ('u', 'updaterev', '', _('revision, tag or branch to check out'), _('REV')),
1187 ('u', 'updaterev', '', _('revision, tag or branch to check out'), _('REV')),
1188 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1188 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1189 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1189 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1190 ('', 'pull', None, _('use pull protocol to copy metadata')),
1190 ('', 'pull', None, _('use pull protocol to copy metadata')),
1191 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1191 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1192 ] + remoteopts,
1192 ] + remoteopts,
1193 _('[OPTION]... SOURCE [DEST]'))
1193 _('[OPTION]... SOURCE [DEST]'))
1194 def clone(ui, source, dest=None, **opts):
1194 def clone(ui, source, dest=None, **opts):
1195 """make a copy of an existing repository
1195 """make a copy of an existing repository
1196
1196
1197 Create a copy of an existing repository in a new directory.
1197 Create a copy of an existing repository in a new directory.
1198
1198
1199 If no destination directory name is specified, it defaults to the
1199 If no destination directory name is specified, it defaults to the
1200 basename of the source.
1200 basename of the source.
1201
1201
1202 The location of the source is added to the new repository's
1202 The location of the source is added to the new repository's
1203 ``.hg/hgrc`` file, as the default to be used for future pulls.
1203 ``.hg/hgrc`` file, as the default to be used for future pulls.
1204
1204
1205 Only local paths and ``ssh://`` URLs are supported as
1205 Only local paths and ``ssh://`` URLs are supported as
1206 destinations. For ``ssh://`` destinations, no working directory or
1206 destinations. For ``ssh://`` destinations, no working directory or
1207 ``.hg/hgrc`` will be created on the remote side.
1207 ``.hg/hgrc`` will be created on the remote side.
1208
1208
1209 To pull only a subset of changesets, specify one or more revisions
1209 To pull only a subset of changesets, specify one or more revisions
1210 identifiers with -r/--rev or branches with -b/--branch. The
1210 identifiers with -r/--rev or branches with -b/--branch. The
1211 resulting clone will contain only the specified changesets and
1211 resulting clone will contain only the specified changesets and
1212 their ancestors. These options (or 'clone src#rev dest') imply
1212 their ancestors. These options (or 'clone src#rev dest') imply
1213 --pull, even for local source repositories. Note that specifying a
1213 --pull, even for local source repositories. Note that specifying a
1214 tag will include the tagged changeset but not the changeset
1214 tag will include the tagged changeset but not the changeset
1215 containing the tag.
1215 containing the tag.
1216
1216
1217 If the source repository has a bookmark called '@' set, that
1217 If the source repository has a bookmark called '@' set, that
1218 revision will be checked out in the new repository by default.
1218 revision will be checked out in the new repository by default.
1219
1219
1220 To check out a particular version, use -u/--update, or
1220 To check out a particular version, use -u/--update, or
1221 -U/--noupdate to create a clone with no working directory.
1221 -U/--noupdate to create a clone with no working directory.
1222
1222
1223 .. container:: verbose
1223 .. container:: verbose
1224
1224
1225 For efficiency, hardlinks are used for cloning whenever the
1225 For efficiency, hardlinks are used for cloning whenever the
1226 source and destination are on the same filesystem (note this
1226 source and destination are on the same filesystem (note this
1227 applies only to the repository data, not to the working
1227 applies only to the repository data, not to the working
1228 directory). Some filesystems, such as AFS, implement hardlinking
1228 directory). Some filesystems, such as AFS, implement hardlinking
1229 incorrectly, but do not report errors. In these cases, use the
1229 incorrectly, but do not report errors. In these cases, use the
1230 --pull option to avoid hardlinking.
1230 --pull option to avoid hardlinking.
1231
1231
1232 In some cases, you can clone repositories and the working
1232 In some cases, you can clone repositories and the working
1233 directory using full hardlinks with ::
1233 directory using full hardlinks with ::
1234
1234
1235 $ cp -al REPO REPOCLONE
1235 $ cp -al REPO REPOCLONE
1236
1236
1237 This is the fastest way to clone, but it is not always safe. The
1237 This is the fastest way to clone, but it is not always safe. The
1238 operation is not atomic (making sure REPO is not modified during
1238 operation is not atomic (making sure REPO is not modified during
1239 the operation is up to you) and you have to make sure your
1239 the operation is up to you) and you have to make sure your
1240 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1240 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1241 so). Also, this is not compatible with certain extensions that
1241 so). Also, this is not compatible with certain extensions that
1242 place their metadata under the .hg directory, such as mq.
1242 place their metadata under the .hg directory, such as mq.
1243
1243
1244 Mercurial will update the working directory to the first applicable
1244 Mercurial will update the working directory to the first applicable
1245 revision from this list:
1245 revision from this list:
1246
1246
1247 a) null if -U or the source repository has no changesets
1247 a) null if -U or the source repository has no changesets
1248 b) if -u . and the source repository is local, the first parent of
1248 b) if -u . and the source repository is local, the first parent of
1249 the source repository's working directory
1249 the source repository's working directory
1250 c) the changeset specified with -u (if a branch name, this means the
1250 c) the changeset specified with -u (if a branch name, this means the
1251 latest head of that branch)
1251 latest head of that branch)
1252 d) the changeset specified with -r
1252 d) the changeset specified with -r
1253 e) the tipmost head specified with -b
1253 e) the tipmost head specified with -b
1254 f) the tipmost head specified with the url#branch source syntax
1254 f) the tipmost head specified with the url#branch source syntax
1255 g) the revision marked with the '@' bookmark, if present
1255 g) the revision marked with the '@' bookmark, if present
1256 h) the tipmost head of the default branch
1256 h) the tipmost head of the default branch
1257 i) tip
1257 i) tip
1258
1258
1259 Examples:
1259 Examples:
1260
1260
1261 - clone a remote repository to a new directory named hg/::
1261 - clone a remote repository to a new directory named hg/::
1262
1262
1263 hg clone http://selenic.com/hg
1263 hg clone http://selenic.com/hg
1264
1264
1265 - create a lightweight local clone::
1265 - create a lightweight local clone::
1266
1266
1267 hg clone project/ project-feature/
1267 hg clone project/ project-feature/
1268
1268
1269 - clone from an absolute path on an ssh server (note double-slash)::
1269 - clone from an absolute path on an ssh server (note double-slash)::
1270
1270
1271 hg clone ssh://user@server//home/projects/alpha/
1271 hg clone ssh://user@server//home/projects/alpha/
1272
1272
1273 - do a high-speed clone over a LAN while checking out a
1273 - do a high-speed clone over a LAN while checking out a
1274 specified version::
1274 specified version::
1275
1275
1276 hg clone --uncompressed http://server/repo -u 1.5
1276 hg clone --uncompressed http://server/repo -u 1.5
1277
1277
1278 - create a repository without changesets after a particular revision::
1278 - create a repository without changesets after a particular revision::
1279
1279
1280 hg clone -r 04e544 experimental/ good/
1280 hg clone -r 04e544 experimental/ good/
1281
1281
1282 - clone (and track) a particular named branch::
1282 - clone (and track) a particular named branch::
1283
1283
1284 hg clone http://selenic.com/hg#stable
1284 hg clone http://selenic.com/hg#stable
1285
1285
1286 See :hg:`help urls` for details on specifying URLs.
1286 See :hg:`help urls` for details on specifying URLs.
1287
1287
1288 Returns 0 on success.
1288 Returns 0 on success.
1289 """
1289 """
1290 if opts.get('noupdate') and opts.get('updaterev'):
1290 if opts.get('noupdate') and opts.get('updaterev'):
1291 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
1291 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
1292
1292
1293 r = hg.clone(ui, opts, source, dest,
1293 r = hg.clone(ui, opts, source, dest,
1294 pull=opts.get('pull'),
1294 pull=opts.get('pull'),
1295 stream=opts.get('uncompressed'),
1295 stream=opts.get('uncompressed'),
1296 rev=opts.get('rev'),
1296 rev=opts.get('rev'),
1297 update=opts.get('updaterev') or not opts.get('noupdate'),
1297 update=opts.get('updaterev') or not opts.get('noupdate'),
1298 branch=opts.get('branch'))
1298 branch=opts.get('branch'))
1299
1299
1300 return r is None
1300 return r is None
1301
1301
1302 @command('^commit|ci',
1302 @command('^commit|ci',
1303 [('A', 'addremove', None,
1303 [('A', 'addremove', None,
1304 _('mark new/missing files as added/removed before committing')),
1304 _('mark new/missing files as added/removed before committing')),
1305 ('', 'close-branch', None,
1305 ('', 'close-branch', None,
1306 _('mark a branch as closed, hiding it from the branch list')),
1306 _('mark a branch as closed, hiding it from the branch list')),
1307 ('', 'amend', None, _('amend the parent of the working dir')),
1307 ('', 'amend', None, _('amend the parent of the working dir')),
1308 ('s', 'secret', None, _('use the secret phase for committing')),
1308 ('s', 'secret', None, _('use the secret phase for committing')),
1309 ('e', 'edit', None,
1309 ('e', 'edit', None,
1310 _('further edit commit message already specified')),
1310 _('further edit commit message already specified')),
1311 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1311 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1312 _('[OPTION]... [FILE]...'))
1312 _('[OPTION]... [FILE]...'))
1313 def commit(ui, repo, *pats, **opts):
1313 def commit(ui, repo, *pats, **opts):
1314 """commit the specified files or all outstanding changes
1314 """commit the specified files or all outstanding changes
1315
1315
1316 Commit changes to the given files into the repository. Unlike a
1316 Commit changes to the given files into the repository. Unlike a
1317 centralized SCM, this operation is a local operation. See
1317 centralized SCM, this operation is a local operation. See
1318 :hg:`push` for a way to actively distribute your changes.
1318 :hg:`push` for a way to actively distribute your changes.
1319
1319
1320 If a list of files is omitted, all changes reported by :hg:`status`
1320 If a list of files is omitted, all changes reported by :hg:`status`
1321 will be committed.
1321 will be committed.
1322
1322
1323 If you are committing the result of a merge, do not provide any
1323 If you are committing the result of a merge, do not provide any
1324 filenames or -I/-X filters.
1324 filenames or -I/-X filters.
1325
1325
1326 If no commit message is specified, Mercurial starts your
1326 If no commit message is specified, Mercurial starts your
1327 configured editor where you can enter a message. In case your
1327 configured editor where you can enter a message. In case your
1328 commit fails, you will find a backup of your message in
1328 commit fails, you will find a backup of your message in
1329 ``.hg/last-message.txt``.
1329 ``.hg/last-message.txt``.
1330
1330
1331 The --amend flag can be used to amend the parent of the
1331 The --amend flag can be used to amend the parent of the
1332 working directory with a new commit that contains the changes
1332 working directory with a new commit that contains the changes
1333 in the parent in addition to those currently reported by :hg:`status`,
1333 in the parent in addition to those currently reported by :hg:`status`,
1334 if there are any. The old commit is stored in a backup bundle in
1334 if there are any. The old commit is stored in a backup bundle in
1335 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1335 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1336 on how to restore it).
1336 on how to restore it).
1337
1337
1338 Message, user and date are taken from the amended commit unless
1338 Message, user and date are taken from the amended commit unless
1339 specified. When a message isn't specified on the command line,
1339 specified. When a message isn't specified on the command line,
1340 the editor will open with the message of the amended commit.
1340 the editor will open with the message of the amended commit.
1341
1341
1342 It is not possible to amend public changesets (see :hg:`help phases`)
1342 It is not possible to amend public changesets (see :hg:`help phases`)
1343 or changesets that have children.
1343 or changesets that have children.
1344
1344
1345 See :hg:`help dates` for a list of formats valid for -d/--date.
1345 See :hg:`help dates` for a list of formats valid for -d/--date.
1346
1346
1347 Returns 0 on success, 1 if nothing changed.
1347 Returns 0 on success, 1 if nothing changed.
1348 """
1348 """
1349 if opts.get('subrepos'):
1349 if opts.get('subrepos'):
1350 if opts.get('amend'):
1350 if opts.get('amend'):
1351 raise util.Abort(_('cannot amend with --subrepos'))
1351 raise util.Abort(_('cannot amend with --subrepos'))
1352 # Let --subrepos on the command line override config setting.
1352 # Let --subrepos on the command line override config setting.
1353 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1353 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1354
1354
1355 # Save this for restoring it later
1355 # Save this for restoring it later
1356 oldcommitphase = ui.config('phases', 'new-commit')
1356 oldcommitphase = ui.config('phases', 'new-commit')
1357
1357
1358 cmdutil.checkunfinished(repo, commit=True)
1358 cmdutil.checkunfinished(repo, commit=True)
1359
1359
1360 branch = repo[None].branch()
1360 branch = repo[None].branch()
1361 bheads = repo.branchheads(branch)
1361 bheads = repo.branchheads(branch)
1362
1362
1363 extra = {}
1363 extra = {}
1364 if opts.get('close_branch'):
1364 if opts.get('close_branch'):
1365 extra['close'] = 1
1365 extra['close'] = 1
1366
1366
1367 if not bheads:
1367 if not bheads:
1368 raise util.Abort(_('can only close branch heads'))
1368 raise util.Abort(_('can only close branch heads'))
1369 elif opts.get('amend'):
1369 elif opts.get('amend'):
1370 if repo.parents()[0].p1().branch() != branch and \
1370 if repo.parents()[0].p1().branch() != branch and \
1371 repo.parents()[0].p2().branch() != branch:
1371 repo.parents()[0].p2().branch() != branch:
1372 raise util.Abort(_('can only close branch heads'))
1372 raise util.Abort(_('can only close branch heads'))
1373
1373
1374 if opts.get('amend'):
1374 if opts.get('amend'):
1375 if ui.configbool('ui', 'commitsubrepos'):
1375 if ui.configbool('ui', 'commitsubrepos'):
1376 raise util.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1376 raise util.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1377
1377
1378 old = repo['.']
1378 old = repo['.']
1379 if old.phase() == phases.public:
1379 if old.phase() == phases.public:
1380 raise util.Abort(_('cannot amend public changesets'))
1380 raise util.Abort(_('cannot amend public changesets'))
1381 if len(repo[None].parents()) > 1:
1381 if len(repo[None].parents()) > 1:
1382 raise util.Abort(_('cannot amend while merging'))
1382 raise util.Abort(_('cannot amend while merging'))
1383 if (not obsolete._enabled) and old.children():
1383 if (not obsolete._enabled) and old.children():
1384 raise util.Abort(_('cannot amend changeset with children'))
1384 raise util.Abort(_('cannot amend changeset with children'))
1385
1385
1386 # commitfunc is used only for temporary amend commit by cmdutil.amend
1386 # commitfunc is used only for temporary amend commit by cmdutil.amend
1387 def commitfunc(ui, repo, message, match, opts):
1387 def commitfunc(ui, repo, message, match, opts):
1388 return repo.commit(message,
1388 return repo.commit(message,
1389 opts.get('user') or old.user(),
1389 opts.get('user') or old.user(),
1390 opts.get('date') or old.date(),
1390 opts.get('date') or old.date(),
1391 match,
1391 match,
1392 extra=extra)
1392 extra=extra)
1393
1393
1394 current = repo._bookmarkcurrent
1394 current = repo._bookmarkcurrent
1395 marks = old.bookmarks()
1395 marks = old.bookmarks()
1396 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1396 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1397 if node == old.node():
1397 if node == old.node():
1398 ui.status(_("nothing changed\n"))
1398 ui.status(_("nothing changed\n"))
1399 return 1
1399 return 1
1400 elif marks:
1400 elif marks:
1401 ui.debug('moving bookmarks %r from %s to %s\n' %
1401 ui.debug('moving bookmarks %r from %s to %s\n' %
1402 (marks, old.hex(), hex(node)))
1402 (marks, old.hex(), hex(node)))
1403 newmarks = repo._bookmarks
1403 newmarks = repo._bookmarks
1404 for bm in marks:
1404 for bm in marks:
1405 newmarks[bm] = node
1405 newmarks[bm] = node
1406 if bm == current:
1406 if bm == current:
1407 bookmarks.setcurrent(repo, bm)
1407 bookmarks.setcurrent(repo, bm)
1408 newmarks.write()
1408 newmarks.write()
1409 else:
1409 else:
1410 def commitfunc(ui, repo, message, match, opts):
1410 def commitfunc(ui, repo, message, match, opts):
1411 try:
1411 try:
1412 if opts.get('secret'):
1412 if opts.get('secret'):
1413 ui.setconfig('phases', 'new-commit', 'secret', 'commit')
1413 ui.setconfig('phases', 'new-commit', 'secret', 'commit')
1414 # Propagate to subrepos
1414 # Propagate to subrepos
1415 repo.baseui.setconfig('phases', 'new-commit', 'secret',
1415 repo.baseui.setconfig('phases', 'new-commit', 'secret',
1416 'commit')
1416 'commit')
1417
1417
1418 return repo.commit(message, opts.get('user'), opts.get('date'),
1418 return repo.commit(message, opts.get('user'), opts.get('date'),
1419 match,
1419 match,
1420 editor=cmdutil.getcommiteditor(**opts),
1420 editor=cmdutil.getcommiteditor(**opts),
1421 extra=extra)
1421 extra=extra)
1422 finally:
1422 finally:
1423 ui.setconfig('phases', 'new-commit', oldcommitphase, 'commit')
1423 ui.setconfig('phases', 'new-commit', oldcommitphase, 'commit')
1424 repo.baseui.setconfig('phases', 'new-commit', oldcommitphase,
1424 repo.baseui.setconfig('phases', 'new-commit', oldcommitphase,
1425 'commit')
1425 'commit')
1426
1426
1427
1427
1428 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1428 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1429
1429
1430 if not node:
1430 if not node:
1431 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
1431 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
1432 if stat[3]:
1432 if stat[3]:
1433 ui.status(_("nothing changed (%d missing files, see "
1433 ui.status(_("nothing changed (%d missing files, see "
1434 "'hg status')\n") % len(stat[3]))
1434 "'hg status')\n") % len(stat[3]))
1435 else:
1435 else:
1436 ui.status(_("nothing changed\n"))
1436 ui.status(_("nothing changed\n"))
1437 return 1
1437 return 1
1438
1438
1439 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1439 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1440
1440
1441 @command('config|showconfig|debugconfig',
1441 @command('config|showconfig|debugconfig',
1442 [('u', 'untrusted', None, _('show untrusted configuration options')),
1442 [('u', 'untrusted', None, _('show untrusted configuration options')),
1443 ('e', 'edit', None, _('edit user config')),
1443 ('e', 'edit', None, _('edit user config')),
1444 ('l', 'local', None, _('edit repository config')),
1444 ('l', 'local', None, _('edit repository config')),
1445 ('g', 'global', None, _('edit global config'))],
1445 ('g', 'global', None, _('edit global config'))],
1446 _('[-u] [NAME]...'))
1446 _('[-u] [NAME]...'))
1447 def config(ui, repo, *values, **opts):
1447 def config(ui, repo, *values, **opts):
1448 """show combined config settings from all hgrc files
1448 """show combined config settings from all hgrc files
1449
1449
1450 With no arguments, print names and values of all config items.
1450 With no arguments, print names and values of all config items.
1451
1451
1452 With one argument of the form section.name, print just the value
1452 With one argument of the form section.name, print just the value
1453 of that config item.
1453 of that config item.
1454
1454
1455 With multiple arguments, print names and values of all config
1455 With multiple arguments, print names and values of all config
1456 items with matching section names.
1456 items with matching section names.
1457
1457
1458 With --edit, start an editor on the user-level config file. With
1458 With --edit, start an editor on the user-level config file. With
1459 --global, edit the system-wide config file. With --local, edit the
1459 --global, edit the system-wide config file. With --local, edit the
1460 repository-level config file.
1460 repository-level config file.
1461
1461
1462 With --debug, the source (filename and line number) is printed
1462 With --debug, the source (filename and line number) is printed
1463 for each config item.
1463 for each config item.
1464
1464
1465 See :hg:`help config` for more information about config files.
1465 See :hg:`help config` for more information about config files.
1466
1466
1467 Returns 0 on success.
1467 Returns 0 on success.
1468
1468
1469 """
1469 """
1470
1470
1471 if opts.get('edit') or opts.get('local') or opts.get('global'):
1471 if opts.get('edit') or opts.get('local') or opts.get('global'):
1472 if opts.get('local') and opts.get('global'):
1472 if opts.get('local') and opts.get('global'):
1473 raise util.Abort(_("can't use --local and --global together"))
1473 raise util.Abort(_("can't use --local and --global together"))
1474
1474
1475 if opts.get('local'):
1475 if opts.get('local'):
1476 if not repo:
1476 if not repo:
1477 raise util.Abort(_("can't use --local outside a repository"))
1477 raise util.Abort(_("can't use --local outside a repository"))
1478 paths = [repo.join('hgrc')]
1478 paths = [repo.join('hgrc')]
1479 elif opts.get('global'):
1479 elif opts.get('global'):
1480 paths = scmutil.systemrcpath()
1480 paths = scmutil.systemrcpath()
1481 else:
1481 else:
1482 paths = scmutil.userrcpath()
1482 paths = scmutil.userrcpath()
1483
1483
1484 for f in paths:
1484 for f in paths:
1485 if os.path.exists(f):
1485 if os.path.exists(f):
1486 break
1486 break
1487 else:
1487 else:
1488 f = paths[0]
1488 f = paths[0]
1489 fp = open(f, "w")
1489 fp = open(f, "w")
1490 fp.write(
1490 fp.write(
1491 '# example config (see "hg help config" for more info)\n'
1491 '# example config (see "hg help config" for more info)\n'
1492 '\n'
1492 '\n'
1493 '[ui]\n'
1493 '[ui]\n'
1494 '# name and email, e.g.\n'
1494 '# name and email, e.g.\n'
1495 '# username = Jane Doe <jdoe@example.com>\n'
1495 '# username = Jane Doe <jdoe@example.com>\n'
1496 'username =\n'
1496 'username =\n'
1497 '\n'
1497 '\n'
1498 '[extensions]\n'
1498 '[extensions]\n'
1499 '# uncomment these lines to enable some popular extensions\n'
1499 '# uncomment these lines to enable some popular extensions\n'
1500 '# (see "hg help extensions" for more info)\n'
1500 '# (see "hg help extensions" for more info)\n'
1501 '# pager =\n'
1501 '# pager =\n'
1502 '# progress =\n'
1502 '# progress =\n'
1503 '# color =\n')
1503 '# color =\n')
1504 fp.close()
1504 fp.close()
1505
1505
1506 editor = ui.geteditor()
1506 editor = ui.geteditor()
1507 util.system("%s \"%s\"" % (editor, f),
1507 util.system("%s \"%s\"" % (editor, f),
1508 onerr=util.Abort, errprefix=_("edit failed"),
1508 onerr=util.Abort, errprefix=_("edit failed"),
1509 out=ui.fout)
1509 out=ui.fout)
1510 return
1510 return
1511
1511
1512 for f in scmutil.rcpath():
1512 for f in scmutil.rcpath():
1513 ui.debug('read config from: %s\n' % f)
1513 ui.debug('read config from: %s\n' % f)
1514 untrusted = bool(opts.get('untrusted'))
1514 untrusted = bool(opts.get('untrusted'))
1515 if values:
1515 if values:
1516 sections = [v for v in values if '.' not in v]
1516 sections = [v for v in values if '.' not in v]
1517 items = [v for v in values if '.' in v]
1517 items = [v for v in values if '.' in v]
1518 if len(items) > 1 or items and sections:
1518 if len(items) > 1 or items and sections:
1519 raise util.Abort(_('only one config item permitted'))
1519 raise util.Abort(_('only one config item permitted'))
1520 for section, name, value in ui.walkconfig(untrusted=untrusted):
1520 for section, name, value in ui.walkconfig(untrusted=untrusted):
1521 value = str(value).replace('\n', '\\n')
1521 value = str(value).replace('\n', '\\n')
1522 sectname = section + '.' + name
1522 sectname = section + '.' + name
1523 if values:
1523 if values:
1524 for v in values:
1524 for v in values:
1525 if v == section:
1525 if v == section:
1526 ui.debug('%s: ' %
1526 ui.debug('%s: ' %
1527 ui.configsource(section, name, untrusted))
1527 ui.configsource(section, name, untrusted))
1528 ui.write('%s=%s\n' % (sectname, value))
1528 ui.write('%s=%s\n' % (sectname, value))
1529 elif v == sectname:
1529 elif v == sectname:
1530 ui.debug('%s: ' %
1530 ui.debug('%s: ' %
1531 ui.configsource(section, name, untrusted))
1531 ui.configsource(section, name, untrusted))
1532 ui.write(value, '\n')
1532 ui.write(value, '\n')
1533 else:
1533 else:
1534 ui.debug('%s: ' %
1534 ui.debug('%s: ' %
1535 ui.configsource(section, name, untrusted))
1535 ui.configsource(section, name, untrusted))
1536 ui.write('%s=%s\n' % (sectname, value))
1536 ui.write('%s=%s\n' % (sectname, value))
1537
1537
1538 @command('copy|cp',
1538 @command('copy|cp',
1539 [('A', 'after', None, _('record a copy that has already occurred')),
1539 [('A', 'after', None, _('record a copy that has already occurred')),
1540 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1540 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1541 ] + walkopts + dryrunopts,
1541 ] + walkopts + dryrunopts,
1542 _('[OPTION]... [SOURCE]... DEST'))
1542 _('[OPTION]... [SOURCE]... DEST'))
1543 def copy(ui, repo, *pats, **opts):
1543 def copy(ui, repo, *pats, **opts):
1544 """mark files as copied for the next commit
1544 """mark files as copied for the next commit
1545
1545
1546 Mark dest as having copies of source files. If dest is a
1546 Mark dest as having copies of source files. If dest is a
1547 directory, copies are put in that directory. If dest is a file,
1547 directory, copies are put in that directory. If dest is a file,
1548 the source must be a single file.
1548 the source must be a single file.
1549
1549
1550 By default, this command copies the contents of files as they
1550 By default, this command copies the contents of files as they
1551 exist in the working directory. If invoked with -A/--after, the
1551 exist in the working directory. If invoked with -A/--after, the
1552 operation is recorded, but no copying is performed.
1552 operation is recorded, but no copying is performed.
1553
1553
1554 This command takes effect with the next commit. To undo a copy
1554 This command takes effect with the next commit. To undo a copy
1555 before that, see :hg:`revert`.
1555 before that, see :hg:`revert`.
1556
1556
1557 Returns 0 on success, 1 if errors are encountered.
1557 Returns 0 on success, 1 if errors are encountered.
1558 """
1558 """
1559 wlock = repo.wlock(False)
1559 wlock = repo.wlock(False)
1560 try:
1560 try:
1561 return cmdutil.copy(ui, repo, pats, opts)
1561 return cmdutil.copy(ui, repo, pats, opts)
1562 finally:
1562 finally:
1563 wlock.release()
1563 wlock.release()
1564
1564
1565 @command('debugancestor', [], _('[INDEX] REV1 REV2'))
1565 @command('debugancestor', [], _('[INDEX] REV1 REV2'))
1566 def debugancestor(ui, repo, *args):
1566 def debugancestor(ui, repo, *args):
1567 """find the ancestor revision of two revisions in a given index"""
1567 """find the ancestor revision of two revisions in a given index"""
1568 if len(args) == 3:
1568 if len(args) == 3:
1569 index, rev1, rev2 = args
1569 index, rev1, rev2 = args
1570 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1570 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1571 lookup = r.lookup
1571 lookup = r.lookup
1572 elif len(args) == 2:
1572 elif len(args) == 2:
1573 if not repo:
1573 if not repo:
1574 raise util.Abort(_("there is no Mercurial repository here "
1574 raise util.Abort(_("there is no Mercurial repository here "
1575 "(.hg not found)"))
1575 "(.hg not found)"))
1576 rev1, rev2 = args
1576 rev1, rev2 = args
1577 r = repo.changelog
1577 r = repo.changelog
1578 lookup = repo.lookup
1578 lookup = repo.lookup
1579 else:
1579 else:
1580 raise util.Abort(_('either two or three arguments required'))
1580 raise util.Abort(_('either two or three arguments required'))
1581 a = r.ancestor(lookup(rev1), lookup(rev2))
1581 a = r.ancestor(lookup(rev1), lookup(rev2))
1582 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1582 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1583
1583
1584 @command('debugbuilddag',
1584 @command('debugbuilddag',
1585 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1585 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1586 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1586 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1587 ('n', 'new-file', None, _('add new file at each rev'))],
1587 ('n', 'new-file', None, _('add new file at each rev'))],
1588 _('[OPTION]... [TEXT]'))
1588 _('[OPTION]... [TEXT]'))
1589 def debugbuilddag(ui, repo, text=None,
1589 def debugbuilddag(ui, repo, text=None,
1590 mergeable_file=False,
1590 mergeable_file=False,
1591 overwritten_file=False,
1591 overwritten_file=False,
1592 new_file=False):
1592 new_file=False):
1593 """builds a repo with a given DAG from scratch in the current empty repo
1593 """builds a repo with a given DAG from scratch in the current empty repo
1594
1594
1595 The description of the DAG is read from stdin if not given on the
1595 The description of the DAG is read from stdin if not given on the
1596 command line.
1596 command line.
1597
1597
1598 Elements:
1598 Elements:
1599
1599
1600 - "+n" is a linear run of n nodes based on the current default parent
1600 - "+n" is a linear run of n nodes based on the current default parent
1601 - "." is a single node based on the current default parent
1601 - "." is a single node based on the current default parent
1602 - "$" resets the default parent to null (implied at the start);
1602 - "$" resets the default parent to null (implied at the start);
1603 otherwise the default parent is always the last node created
1603 otherwise the default parent is always the last node created
1604 - "<p" sets the default parent to the backref p
1604 - "<p" sets the default parent to the backref p
1605 - "*p" is a fork at parent p, which is a backref
1605 - "*p" is a fork at parent p, which is a backref
1606 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1606 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1607 - "/p2" is a merge of the preceding node and p2
1607 - "/p2" is a merge of the preceding node and p2
1608 - ":tag" defines a local tag for the preceding node
1608 - ":tag" defines a local tag for the preceding node
1609 - "@branch" sets the named branch for subsequent nodes
1609 - "@branch" sets the named branch for subsequent nodes
1610 - "#...\\n" is a comment up to the end of the line
1610 - "#...\\n" is a comment up to the end of the line
1611
1611
1612 Whitespace between the above elements is ignored.
1612 Whitespace between the above elements is ignored.
1613
1613
1614 A backref is either
1614 A backref is either
1615
1615
1616 - a number n, which references the node curr-n, where curr is the current
1616 - a number n, which references the node curr-n, where curr is the current
1617 node, or
1617 node, or
1618 - the name of a local tag you placed earlier using ":tag", or
1618 - the name of a local tag you placed earlier using ":tag", or
1619 - empty to denote the default parent.
1619 - empty to denote the default parent.
1620
1620
1621 All string valued-elements are either strictly alphanumeric, or must
1621 All string valued-elements are either strictly alphanumeric, or must
1622 be enclosed in double quotes ("..."), with "\\" as escape character.
1622 be enclosed in double quotes ("..."), with "\\" as escape character.
1623 """
1623 """
1624
1624
1625 if text is None:
1625 if text is None:
1626 ui.status(_("reading DAG from stdin\n"))
1626 ui.status(_("reading DAG from stdin\n"))
1627 text = ui.fin.read()
1627 text = ui.fin.read()
1628
1628
1629 cl = repo.changelog
1629 cl = repo.changelog
1630 if len(cl) > 0:
1630 if len(cl) > 0:
1631 raise util.Abort(_('repository is not empty'))
1631 raise util.Abort(_('repository is not empty'))
1632
1632
1633 # determine number of revs in DAG
1633 # determine number of revs in DAG
1634 total = 0
1634 total = 0
1635 for type, data in dagparser.parsedag(text):
1635 for type, data in dagparser.parsedag(text):
1636 if type == 'n':
1636 if type == 'n':
1637 total += 1
1637 total += 1
1638
1638
1639 if mergeable_file:
1639 if mergeable_file:
1640 linesperrev = 2
1640 linesperrev = 2
1641 # make a file with k lines per rev
1641 # make a file with k lines per rev
1642 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1642 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1643 initialmergedlines.append("")
1643 initialmergedlines.append("")
1644
1644
1645 tags = []
1645 tags = []
1646
1646
1647 lock = tr = None
1647 lock = tr = None
1648 try:
1648 try:
1649 lock = repo.lock()
1649 lock = repo.lock()
1650 tr = repo.transaction("builddag")
1650 tr = repo.transaction("builddag")
1651
1651
1652 at = -1
1652 at = -1
1653 atbranch = 'default'
1653 atbranch = 'default'
1654 nodeids = []
1654 nodeids = []
1655 id = 0
1655 id = 0
1656 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1656 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1657 for type, data in dagparser.parsedag(text):
1657 for type, data in dagparser.parsedag(text):
1658 if type == 'n':
1658 if type == 'n':
1659 ui.note(('node %s\n' % str(data)))
1659 ui.note(('node %s\n' % str(data)))
1660 id, ps = data
1660 id, ps = data
1661
1661
1662 files = []
1662 files = []
1663 fctxs = {}
1663 fctxs = {}
1664
1664
1665 p2 = None
1665 p2 = None
1666 if mergeable_file:
1666 if mergeable_file:
1667 fn = "mf"
1667 fn = "mf"
1668 p1 = repo[ps[0]]
1668 p1 = repo[ps[0]]
1669 if len(ps) > 1:
1669 if len(ps) > 1:
1670 p2 = repo[ps[1]]
1670 p2 = repo[ps[1]]
1671 pa = p1.ancestor(p2)
1671 pa = p1.ancestor(p2)
1672 base, local, other = [x[fn].data() for x in (pa, p1,
1672 base, local, other = [x[fn].data() for x in (pa, p1,
1673 p2)]
1673 p2)]
1674 m3 = simplemerge.Merge3Text(base, local, other)
1674 m3 = simplemerge.Merge3Text(base, local, other)
1675 ml = [l.strip() for l in m3.merge_lines()]
1675 ml = [l.strip() for l in m3.merge_lines()]
1676 ml.append("")
1676 ml.append("")
1677 elif at > 0:
1677 elif at > 0:
1678 ml = p1[fn].data().split("\n")
1678 ml = p1[fn].data().split("\n")
1679 else:
1679 else:
1680 ml = initialmergedlines
1680 ml = initialmergedlines
1681 ml[id * linesperrev] += " r%i" % id
1681 ml[id * linesperrev] += " r%i" % id
1682 mergedtext = "\n".join(ml)
1682 mergedtext = "\n".join(ml)
1683 files.append(fn)
1683 files.append(fn)
1684 fctxs[fn] = context.memfilectx(fn, mergedtext)
1684 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
1685
1685
1686 if overwritten_file:
1686 if overwritten_file:
1687 fn = "of"
1687 fn = "of"
1688 files.append(fn)
1688 files.append(fn)
1689 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1689 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
1690
1690
1691 if new_file:
1691 if new_file:
1692 fn = "nf%i" % id
1692 fn = "nf%i" % id
1693 files.append(fn)
1693 files.append(fn)
1694 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1694 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
1695 if len(ps) > 1:
1695 if len(ps) > 1:
1696 if not p2:
1696 if not p2:
1697 p2 = repo[ps[1]]
1697 p2 = repo[ps[1]]
1698 for fn in p2:
1698 for fn in p2:
1699 if fn.startswith("nf"):
1699 if fn.startswith("nf"):
1700 files.append(fn)
1700 files.append(fn)
1701 fctxs[fn] = p2[fn]
1701 fctxs[fn] = p2[fn]
1702
1702
1703 def fctxfn(repo, cx, path):
1703 def fctxfn(repo, cx, path):
1704 return fctxs.get(path)
1704 return fctxs.get(path)
1705
1705
1706 if len(ps) == 0 or ps[0] < 0:
1706 if len(ps) == 0 or ps[0] < 0:
1707 pars = [None, None]
1707 pars = [None, None]
1708 elif len(ps) == 1:
1708 elif len(ps) == 1:
1709 pars = [nodeids[ps[0]], None]
1709 pars = [nodeids[ps[0]], None]
1710 else:
1710 else:
1711 pars = [nodeids[p] for p in ps]
1711 pars = [nodeids[p] for p in ps]
1712 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
1712 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
1713 date=(id, 0),
1713 date=(id, 0),
1714 user="debugbuilddag",
1714 user="debugbuilddag",
1715 extra={'branch': atbranch})
1715 extra={'branch': atbranch})
1716 nodeid = repo.commitctx(cx)
1716 nodeid = repo.commitctx(cx)
1717 nodeids.append(nodeid)
1717 nodeids.append(nodeid)
1718 at = id
1718 at = id
1719 elif type == 'l':
1719 elif type == 'l':
1720 id, name = data
1720 id, name = data
1721 ui.note(('tag %s\n' % name))
1721 ui.note(('tag %s\n' % name))
1722 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
1722 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
1723 elif type == 'a':
1723 elif type == 'a':
1724 ui.note(('branch %s\n' % data))
1724 ui.note(('branch %s\n' % data))
1725 atbranch = data
1725 atbranch = data
1726 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1726 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1727 tr.close()
1727 tr.close()
1728
1728
1729 if tags:
1729 if tags:
1730 repo.opener.write("localtags", "".join(tags))
1730 repo.opener.write("localtags", "".join(tags))
1731 finally:
1731 finally:
1732 ui.progress(_('building'), None)
1732 ui.progress(_('building'), None)
1733 release(tr, lock)
1733 release(tr, lock)
1734
1734
1735 @command('debugbundle', [('a', 'all', None, _('show all details'))], _('FILE'))
1735 @command('debugbundle', [('a', 'all', None, _('show all details'))], _('FILE'))
1736 def debugbundle(ui, bundlepath, all=None, **opts):
1736 def debugbundle(ui, bundlepath, all=None, **opts):
1737 """lists the contents of a bundle"""
1737 """lists the contents of a bundle"""
1738 f = hg.openpath(ui, bundlepath)
1738 f = hg.openpath(ui, bundlepath)
1739 try:
1739 try:
1740 gen = exchange.readbundle(ui, f, bundlepath)
1740 gen = exchange.readbundle(ui, f, bundlepath)
1741 if all:
1741 if all:
1742 ui.write(("format: id, p1, p2, cset, delta base, len(delta)\n"))
1742 ui.write(("format: id, p1, p2, cset, delta base, len(delta)\n"))
1743
1743
1744 def showchunks(named):
1744 def showchunks(named):
1745 ui.write("\n%s\n" % named)
1745 ui.write("\n%s\n" % named)
1746 chain = None
1746 chain = None
1747 while True:
1747 while True:
1748 chunkdata = gen.deltachunk(chain)
1748 chunkdata = gen.deltachunk(chain)
1749 if not chunkdata:
1749 if not chunkdata:
1750 break
1750 break
1751 node = chunkdata['node']
1751 node = chunkdata['node']
1752 p1 = chunkdata['p1']
1752 p1 = chunkdata['p1']
1753 p2 = chunkdata['p2']
1753 p2 = chunkdata['p2']
1754 cs = chunkdata['cs']
1754 cs = chunkdata['cs']
1755 deltabase = chunkdata['deltabase']
1755 deltabase = chunkdata['deltabase']
1756 delta = chunkdata['delta']
1756 delta = chunkdata['delta']
1757 ui.write("%s %s %s %s %s %s\n" %
1757 ui.write("%s %s %s %s %s %s\n" %
1758 (hex(node), hex(p1), hex(p2),
1758 (hex(node), hex(p1), hex(p2),
1759 hex(cs), hex(deltabase), len(delta)))
1759 hex(cs), hex(deltabase), len(delta)))
1760 chain = node
1760 chain = node
1761
1761
1762 chunkdata = gen.changelogheader()
1762 chunkdata = gen.changelogheader()
1763 showchunks("changelog")
1763 showchunks("changelog")
1764 chunkdata = gen.manifestheader()
1764 chunkdata = gen.manifestheader()
1765 showchunks("manifest")
1765 showchunks("manifest")
1766 while True:
1766 while True:
1767 chunkdata = gen.filelogheader()
1767 chunkdata = gen.filelogheader()
1768 if not chunkdata:
1768 if not chunkdata:
1769 break
1769 break
1770 fname = chunkdata['filename']
1770 fname = chunkdata['filename']
1771 showchunks(fname)
1771 showchunks(fname)
1772 else:
1772 else:
1773 chunkdata = gen.changelogheader()
1773 chunkdata = gen.changelogheader()
1774 chain = None
1774 chain = None
1775 while True:
1775 while True:
1776 chunkdata = gen.deltachunk(chain)
1776 chunkdata = gen.deltachunk(chain)
1777 if not chunkdata:
1777 if not chunkdata:
1778 break
1778 break
1779 node = chunkdata['node']
1779 node = chunkdata['node']
1780 ui.write("%s\n" % hex(node))
1780 ui.write("%s\n" % hex(node))
1781 chain = node
1781 chain = node
1782 finally:
1782 finally:
1783 f.close()
1783 f.close()
1784
1784
1785 @command('debugcheckstate', [], '')
1785 @command('debugcheckstate', [], '')
1786 def debugcheckstate(ui, repo):
1786 def debugcheckstate(ui, repo):
1787 """validate the correctness of the current dirstate"""
1787 """validate the correctness of the current dirstate"""
1788 parent1, parent2 = repo.dirstate.parents()
1788 parent1, parent2 = repo.dirstate.parents()
1789 m1 = repo[parent1].manifest()
1789 m1 = repo[parent1].manifest()
1790 m2 = repo[parent2].manifest()
1790 m2 = repo[parent2].manifest()
1791 errors = 0
1791 errors = 0
1792 for f in repo.dirstate:
1792 for f in repo.dirstate:
1793 state = repo.dirstate[f]
1793 state = repo.dirstate[f]
1794 if state in "nr" and f not in m1:
1794 if state in "nr" and f not in m1:
1795 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1795 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1796 errors += 1
1796 errors += 1
1797 if state in "a" and f in m1:
1797 if state in "a" and f in m1:
1798 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1798 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1799 errors += 1
1799 errors += 1
1800 if state in "m" and f not in m1 and f not in m2:
1800 if state in "m" and f not in m1 and f not in m2:
1801 ui.warn(_("%s in state %s, but not in either manifest\n") %
1801 ui.warn(_("%s in state %s, but not in either manifest\n") %
1802 (f, state))
1802 (f, state))
1803 errors += 1
1803 errors += 1
1804 for f in m1:
1804 for f in m1:
1805 state = repo.dirstate[f]
1805 state = repo.dirstate[f]
1806 if state not in "nrm":
1806 if state not in "nrm":
1807 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1807 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1808 errors += 1
1808 errors += 1
1809 if errors:
1809 if errors:
1810 error = _(".hg/dirstate inconsistent with current parent's manifest")
1810 error = _(".hg/dirstate inconsistent with current parent's manifest")
1811 raise util.Abort(error)
1811 raise util.Abort(error)
1812
1812
1813 @command('debugcommands', [], _('[COMMAND]'))
1813 @command('debugcommands', [], _('[COMMAND]'))
1814 def debugcommands(ui, cmd='', *args):
1814 def debugcommands(ui, cmd='', *args):
1815 """list all available commands and options"""
1815 """list all available commands and options"""
1816 for cmd, vals in sorted(table.iteritems()):
1816 for cmd, vals in sorted(table.iteritems()):
1817 cmd = cmd.split('|')[0].strip('^')
1817 cmd = cmd.split('|')[0].strip('^')
1818 opts = ', '.join([i[1] for i in vals[1]])
1818 opts = ', '.join([i[1] for i in vals[1]])
1819 ui.write('%s: %s\n' % (cmd, opts))
1819 ui.write('%s: %s\n' % (cmd, opts))
1820
1820
1821 @command('debugcomplete',
1821 @command('debugcomplete',
1822 [('o', 'options', None, _('show the command options'))],
1822 [('o', 'options', None, _('show the command options'))],
1823 _('[-o] CMD'))
1823 _('[-o] CMD'))
1824 def debugcomplete(ui, cmd='', **opts):
1824 def debugcomplete(ui, cmd='', **opts):
1825 """returns the completion list associated with the given command"""
1825 """returns the completion list associated with the given command"""
1826
1826
1827 if opts.get('options'):
1827 if opts.get('options'):
1828 options = []
1828 options = []
1829 otables = [globalopts]
1829 otables = [globalopts]
1830 if cmd:
1830 if cmd:
1831 aliases, entry = cmdutil.findcmd(cmd, table, False)
1831 aliases, entry = cmdutil.findcmd(cmd, table, False)
1832 otables.append(entry[1])
1832 otables.append(entry[1])
1833 for t in otables:
1833 for t in otables:
1834 for o in t:
1834 for o in t:
1835 if "(DEPRECATED)" in o[3]:
1835 if "(DEPRECATED)" in o[3]:
1836 continue
1836 continue
1837 if o[0]:
1837 if o[0]:
1838 options.append('-%s' % o[0])
1838 options.append('-%s' % o[0])
1839 options.append('--%s' % o[1])
1839 options.append('--%s' % o[1])
1840 ui.write("%s\n" % "\n".join(options))
1840 ui.write("%s\n" % "\n".join(options))
1841 return
1841 return
1842
1842
1843 cmdlist = cmdutil.findpossible(cmd, table)
1843 cmdlist = cmdutil.findpossible(cmd, table)
1844 if ui.verbose:
1844 if ui.verbose:
1845 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1845 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1846 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1846 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1847
1847
1848 @command('debugdag',
1848 @command('debugdag',
1849 [('t', 'tags', None, _('use tags as labels')),
1849 [('t', 'tags', None, _('use tags as labels')),
1850 ('b', 'branches', None, _('annotate with branch names')),
1850 ('b', 'branches', None, _('annotate with branch names')),
1851 ('', 'dots', None, _('use dots for runs')),
1851 ('', 'dots', None, _('use dots for runs')),
1852 ('s', 'spaces', None, _('separate elements by spaces'))],
1852 ('s', 'spaces', None, _('separate elements by spaces'))],
1853 _('[OPTION]... [FILE [REV]...]'))
1853 _('[OPTION]... [FILE [REV]...]'))
1854 def debugdag(ui, repo, file_=None, *revs, **opts):
1854 def debugdag(ui, repo, file_=None, *revs, **opts):
1855 """format the changelog or an index DAG as a concise textual description
1855 """format the changelog or an index DAG as a concise textual description
1856
1856
1857 If you pass a revlog index, the revlog's DAG is emitted. If you list
1857 If you pass a revlog index, the revlog's DAG is emitted. If you list
1858 revision numbers, they get labeled in the output as rN.
1858 revision numbers, they get labeled in the output as rN.
1859
1859
1860 Otherwise, the changelog DAG of the current repo is emitted.
1860 Otherwise, the changelog DAG of the current repo is emitted.
1861 """
1861 """
1862 spaces = opts.get('spaces')
1862 spaces = opts.get('spaces')
1863 dots = opts.get('dots')
1863 dots = opts.get('dots')
1864 if file_:
1864 if file_:
1865 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1865 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1866 revs = set((int(r) for r in revs))
1866 revs = set((int(r) for r in revs))
1867 def events():
1867 def events():
1868 for r in rlog:
1868 for r in rlog:
1869 yield 'n', (r, list(set(p for p in rlog.parentrevs(r)
1869 yield 'n', (r, list(set(p for p in rlog.parentrevs(r)
1870 if p != -1)))
1870 if p != -1)))
1871 if r in revs:
1871 if r in revs:
1872 yield 'l', (r, "r%i" % r)
1872 yield 'l', (r, "r%i" % r)
1873 elif repo:
1873 elif repo:
1874 cl = repo.changelog
1874 cl = repo.changelog
1875 tags = opts.get('tags')
1875 tags = opts.get('tags')
1876 branches = opts.get('branches')
1876 branches = opts.get('branches')
1877 if tags:
1877 if tags:
1878 labels = {}
1878 labels = {}
1879 for l, n in repo.tags().items():
1879 for l, n in repo.tags().items():
1880 labels.setdefault(cl.rev(n), []).append(l)
1880 labels.setdefault(cl.rev(n), []).append(l)
1881 def events():
1881 def events():
1882 b = "default"
1882 b = "default"
1883 for r in cl:
1883 for r in cl:
1884 if branches:
1884 if branches:
1885 newb = cl.read(cl.node(r))[5]['branch']
1885 newb = cl.read(cl.node(r))[5]['branch']
1886 if newb != b:
1886 if newb != b:
1887 yield 'a', newb
1887 yield 'a', newb
1888 b = newb
1888 b = newb
1889 yield 'n', (r, list(set(p for p in cl.parentrevs(r)
1889 yield 'n', (r, list(set(p for p in cl.parentrevs(r)
1890 if p != -1)))
1890 if p != -1)))
1891 if tags:
1891 if tags:
1892 ls = labels.get(r)
1892 ls = labels.get(r)
1893 if ls:
1893 if ls:
1894 for l in ls:
1894 for l in ls:
1895 yield 'l', (r, l)
1895 yield 'l', (r, l)
1896 else:
1896 else:
1897 raise util.Abort(_('need repo for changelog dag'))
1897 raise util.Abort(_('need repo for changelog dag'))
1898
1898
1899 for line in dagparser.dagtextlines(events(),
1899 for line in dagparser.dagtextlines(events(),
1900 addspaces=spaces,
1900 addspaces=spaces,
1901 wraplabels=True,
1901 wraplabels=True,
1902 wrapannotations=True,
1902 wrapannotations=True,
1903 wrapnonlinear=dots,
1903 wrapnonlinear=dots,
1904 usedots=dots,
1904 usedots=dots,
1905 maxlinewidth=70):
1905 maxlinewidth=70):
1906 ui.write(line)
1906 ui.write(line)
1907 ui.write("\n")
1907 ui.write("\n")
1908
1908
1909 @command('debugdata',
1909 @command('debugdata',
1910 [('c', 'changelog', False, _('open changelog')),
1910 [('c', 'changelog', False, _('open changelog')),
1911 ('m', 'manifest', False, _('open manifest'))],
1911 ('m', 'manifest', False, _('open manifest'))],
1912 _('-c|-m|FILE REV'))
1912 _('-c|-m|FILE REV'))
1913 def debugdata(ui, repo, file_, rev=None, **opts):
1913 def debugdata(ui, repo, file_, rev=None, **opts):
1914 """dump the contents of a data file revision"""
1914 """dump the contents of a data file revision"""
1915 if opts.get('changelog') or opts.get('manifest'):
1915 if opts.get('changelog') or opts.get('manifest'):
1916 file_, rev = None, file_
1916 file_, rev = None, file_
1917 elif rev is None:
1917 elif rev is None:
1918 raise error.CommandError('debugdata', _('invalid arguments'))
1918 raise error.CommandError('debugdata', _('invalid arguments'))
1919 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
1919 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
1920 try:
1920 try:
1921 ui.write(r.revision(r.lookup(rev)))
1921 ui.write(r.revision(r.lookup(rev)))
1922 except KeyError:
1922 except KeyError:
1923 raise util.Abort(_('invalid revision identifier %s') % rev)
1923 raise util.Abort(_('invalid revision identifier %s') % rev)
1924
1924
1925 @command('debugdate',
1925 @command('debugdate',
1926 [('e', 'extended', None, _('try extended date formats'))],
1926 [('e', 'extended', None, _('try extended date formats'))],
1927 _('[-e] DATE [RANGE]'))
1927 _('[-e] DATE [RANGE]'))
1928 def debugdate(ui, date, range=None, **opts):
1928 def debugdate(ui, date, range=None, **opts):
1929 """parse and display a date"""
1929 """parse and display a date"""
1930 if opts["extended"]:
1930 if opts["extended"]:
1931 d = util.parsedate(date, util.extendeddateformats)
1931 d = util.parsedate(date, util.extendeddateformats)
1932 else:
1932 else:
1933 d = util.parsedate(date)
1933 d = util.parsedate(date)
1934 ui.write(("internal: %s %s\n") % d)
1934 ui.write(("internal: %s %s\n") % d)
1935 ui.write(("standard: %s\n") % util.datestr(d))
1935 ui.write(("standard: %s\n") % util.datestr(d))
1936 if range:
1936 if range:
1937 m = util.matchdate(range)
1937 m = util.matchdate(range)
1938 ui.write(("match: %s\n") % m(d[0]))
1938 ui.write(("match: %s\n") % m(d[0]))
1939
1939
1940 @command('debugdiscovery',
1940 @command('debugdiscovery',
1941 [('', 'old', None, _('use old-style discovery')),
1941 [('', 'old', None, _('use old-style discovery')),
1942 ('', 'nonheads', None,
1942 ('', 'nonheads', None,
1943 _('use old-style discovery with non-heads included')),
1943 _('use old-style discovery with non-heads included')),
1944 ] + remoteopts,
1944 ] + remoteopts,
1945 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
1945 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
1946 def debugdiscovery(ui, repo, remoteurl="default", **opts):
1946 def debugdiscovery(ui, repo, remoteurl="default", **opts):
1947 """runs the changeset discovery protocol in isolation"""
1947 """runs the changeset discovery protocol in isolation"""
1948 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
1948 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
1949 opts.get('branch'))
1949 opts.get('branch'))
1950 remote = hg.peer(repo, opts, remoteurl)
1950 remote = hg.peer(repo, opts, remoteurl)
1951 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
1951 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
1952
1952
1953 # make sure tests are repeatable
1953 # make sure tests are repeatable
1954 random.seed(12323)
1954 random.seed(12323)
1955
1955
1956 def doit(localheads, remoteheads, remote=remote):
1956 def doit(localheads, remoteheads, remote=remote):
1957 if opts.get('old'):
1957 if opts.get('old'):
1958 if localheads:
1958 if localheads:
1959 raise util.Abort('cannot use localheads with old style '
1959 raise util.Abort('cannot use localheads with old style '
1960 'discovery')
1960 'discovery')
1961 if not util.safehasattr(remote, 'branches'):
1961 if not util.safehasattr(remote, 'branches'):
1962 # enable in-client legacy support
1962 # enable in-client legacy support
1963 remote = localrepo.locallegacypeer(remote.local())
1963 remote = localrepo.locallegacypeer(remote.local())
1964 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
1964 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
1965 force=True)
1965 force=True)
1966 common = set(common)
1966 common = set(common)
1967 if not opts.get('nonheads'):
1967 if not opts.get('nonheads'):
1968 ui.write(("unpruned common: %s\n") %
1968 ui.write(("unpruned common: %s\n") %
1969 " ".join(sorted(short(n) for n in common)))
1969 " ".join(sorted(short(n) for n in common)))
1970 dag = dagutil.revlogdag(repo.changelog)
1970 dag = dagutil.revlogdag(repo.changelog)
1971 all = dag.ancestorset(dag.internalizeall(common))
1971 all = dag.ancestorset(dag.internalizeall(common))
1972 common = dag.externalizeall(dag.headsetofconnecteds(all))
1972 common = dag.externalizeall(dag.headsetofconnecteds(all))
1973 else:
1973 else:
1974 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
1974 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
1975 common = set(common)
1975 common = set(common)
1976 rheads = set(hds)
1976 rheads = set(hds)
1977 lheads = set(repo.heads())
1977 lheads = set(repo.heads())
1978 ui.write(("common heads: %s\n") %
1978 ui.write(("common heads: %s\n") %
1979 " ".join(sorted(short(n) for n in common)))
1979 " ".join(sorted(short(n) for n in common)))
1980 if lheads <= common:
1980 if lheads <= common:
1981 ui.write(("local is subset\n"))
1981 ui.write(("local is subset\n"))
1982 elif rheads <= common:
1982 elif rheads <= common:
1983 ui.write(("remote is subset\n"))
1983 ui.write(("remote is subset\n"))
1984
1984
1985 serverlogs = opts.get('serverlog')
1985 serverlogs = opts.get('serverlog')
1986 if serverlogs:
1986 if serverlogs:
1987 for filename in serverlogs:
1987 for filename in serverlogs:
1988 logfile = open(filename, 'r')
1988 logfile = open(filename, 'r')
1989 try:
1989 try:
1990 line = logfile.readline()
1990 line = logfile.readline()
1991 while line:
1991 while line:
1992 parts = line.strip().split(';')
1992 parts = line.strip().split(';')
1993 op = parts[1]
1993 op = parts[1]
1994 if op == 'cg':
1994 if op == 'cg':
1995 pass
1995 pass
1996 elif op == 'cgss':
1996 elif op == 'cgss':
1997 doit(parts[2].split(' '), parts[3].split(' '))
1997 doit(parts[2].split(' '), parts[3].split(' '))
1998 elif op == 'unb':
1998 elif op == 'unb':
1999 doit(parts[3].split(' '), parts[2].split(' '))
1999 doit(parts[3].split(' '), parts[2].split(' '))
2000 line = logfile.readline()
2000 line = logfile.readline()
2001 finally:
2001 finally:
2002 logfile.close()
2002 logfile.close()
2003
2003
2004 else:
2004 else:
2005 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
2005 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
2006 opts.get('remote_head'))
2006 opts.get('remote_head'))
2007 localrevs = opts.get('local_head')
2007 localrevs = opts.get('local_head')
2008 doit(localrevs, remoterevs)
2008 doit(localrevs, remoterevs)
2009
2009
2010 @command('debugfileset',
2010 @command('debugfileset',
2011 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
2011 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
2012 _('[-r REV] FILESPEC'))
2012 _('[-r REV] FILESPEC'))
2013 def debugfileset(ui, repo, expr, **opts):
2013 def debugfileset(ui, repo, expr, **opts):
2014 '''parse and apply a fileset specification'''
2014 '''parse and apply a fileset specification'''
2015 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
2015 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
2016 if ui.verbose:
2016 if ui.verbose:
2017 tree = fileset.parse(expr)[0]
2017 tree = fileset.parse(expr)[0]
2018 ui.note(tree, "\n")
2018 ui.note(tree, "\n")
2019
2019
2020 for f in ctx.getfileset(expr):
2020 for f in ctx.getfileset(expr):
2021 ui.write("%s\n" % f)
2021 ui.write("%s\n" % f)
2022
2022
2023 @command('debugfsinfo', [], _('[PATH]'))
2023 @command('debugfsinfo', [], _('[PATH]'))
2024 def debugfsinfo(ui, path="."):
2024 def debugfsinfo(ui, path="."):
2025 """show information detected about current filesystem"""
2025 """show information detected about current filesystem"""
2026 util.writefile('.debugfsinfo', '')
2026 util.writefile('.debugfsinfo', '')
2027 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
2027 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
2028 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
2028 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
2029 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
2029 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
2030 ui.write(('case-sensitive: %s\n') % (util.checkcase('.debugfsinfo')
2030 ui.write(('case-sensitive: %s\n') % (util.checkcase('.debugfsinfo')
2031 and 'yes' or 'no'))
2031 and 'yes' or 'no'))
2032 os.unlink('.debugfsinfo')
2032 os.unlink('.debugfsinfo')
2033
2033
2034 @command('debuggetbundle',
2034 @command('debuggetbundle',
2035 [('H', 'head', [], _('id of head node'), _('ID')),
2035 [('H', 'head', [], _('id of head node'), _('ID')),
2036 ('C', 'common', [], _('id of common node'), _('ID')),
2036 ('C', 'common', [], _('id of common node'), _('ID')),
2037 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
2037 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
2038 _('REPO FILE [-H|-C ID]...'))
2038 _('REPO FILE [-H|-C ID]...'))
2039 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
2039 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
2040 """retrieves a bundle from a repo
2040 """retrieves a bundle from a repo
2041
2041
2042 Every ID must be a full-length hex node id string. Saves the bundle to the
2042 Every ID must be a full-length hex node id string. Saves the bundle to the
2043 given file.
2043 given file.
2044 """
2044 """
2045 repo = hg.peer(ui, opts, repopath)
2045 repo = hg.peer(ui, opts, repopath)
2046 if not repo.capable('getbundle'):
2046 if not repo.capable('getbundle'):
2047 raise util.Abort("getbundle() not supported by target repository")
2047 raise util.Abort("getbundle() not supported by target repository")
2048 args = {}
2048 args = {}
2049 if common:
2049 if common:
2050 args['common'] = [bin(s) for s in common]
2050 args['common'] = [bin(s) for s in common]
2051 if head:
2051 if head:
2052 args['heads'] = [bin(s) for s in head]
2052 args['heads'] = [bin(s) for s in head]
2053 # TODO: get desired bundlecaps from command line.
2053 # TODO: get desired bundlecaps from command line.
2054 args['bundlecaps'] = None
2054 args['bundlecaps'] = None
2055 bundle = repo.getbundle('debug', **args)
2055 bundle = repo.getbundle('debug', **args)
2056
2056
2057 bundletype = opts.get('type', 'bzip2').lower()
2057 bundletype = opts.get('type', 'bzip2').lower()
2058 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
2058 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
2059 bundletype = btypes.get(bundletype)
2059 bundletype = btypes.get(bundletype)
2060 if bundletype not in changegroup.bundletypes:
2060 if bundletype not in changegroup.bundletypes:
2061 raise util.Abort(_('unknown bundle type specified with --type'))
2061 raise util.Abort(_('unknown bundle type specified with --type'))
2062 changegroup.writebundle(bundle, bundlepath, bundletype)
2062 changegroup.writebundle(bundle, bundlepath, bundletype)
2063
2063
2064 @command('debugignore', [], '')
2064 @command('debugignore', [], '')
2065 def debugignore(ui, repo, *values, **opts):
2065 def debugignore(ui, repo, *values, **opts):
2066 """display the combined ignore pattern"""
2066 """display the combined ignore pattern"""
2067 ignore = repo.dirstate._ignore
2067 ignore = repo.dirstate._ignore
2068 includepat = getattr(ignore, 'includepat', None)
2068 includepat = getattr(ignore, 'includepat', None)
2069 if includepat is not None:
2069 if includepat is not None:
2070 ui.write("%s\n" % includepat)
2070 ui.write("%s\n" % includepat)
2071 else:
2071 else:
2072 raise util.Abort(_("no ignore patterns found"))
2072 raise util.Abort(_("no ignore patterns found"))
2073
2073
2074 @command('debugindex',
2074 @command('debugindex',
2075 [('c', 'changelog', False, _('open changelog')),
2075 [('c', 'changelog', False, _('open changelog')),
2076 ('m', 'manifest', False, _('open manifest')),
2076 ('m', 'manifest', False, _('open manifest')),
2077 ('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2077 ('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2078 _('[-f FORMAT] -c|-m|FILE'))
2078 _('[-f FORMAT] -c|-m|FILE'))
2079 def debugindex(ui, repo, file_=None, **opts):
2079 def debugindex(ui, repo, file_=None, **opts):
2080 """dump the contents of an index file"""
2080 """dump the contents of an index file"""
2081 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
2081 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
2082 format = opts.get('format', 0)
2082 format = opts.get('format', 0)
2083 if format not in (0, 1):
2083 if format not in (0, 1):
2084 raise util.Abort(_("unknown format %d") % format)
2084 raise util.Abort(_("unknown format %d") % format)
2085
2085
2086 generaldelta = r.version & revlog.REVLOGGENERALDELTA
2086 generaldelta = r.version & revlog.REVLOGGENERALDELTA
2087 if generaldelta:
2087 if generaldelta:
2088 basehdr = ' delta'
2088 basehdr = ' delta'
2089 else:
2089 else:
2090 basehdr = ' base'
2090 basehdr = ' base'
2091
2091
2092 if format == 0:
2092 if format == 0:
2093 ui.write(" rev offset length " + basehdr + " linkrev"
2093 ui.write(" rev offset length " + basehdr + " linkrev"
2094 " nodeid p1 p2\n")
2094 " nodeid p1 p2\n")
2095 elif format == 1:
2095 elif format == 1:
2096 ui.write(" rev flag offset length"
2096 ui.write(" rev flag offset length"
2097 " size " + basehdr + " link p1 p2"
2097 " size " + basehdr + " link p1 p2"
2098 " nodeid\n")
2098 " nodeid\n")
2099
2099
2100 for i in r:
2100 for i in r:
2101 node = r.node(i)
2101 node = r.node(i)
2102 if generaldelta:
2102 if generaldelta:
2103 base = r.deltaparent(i)
2103 base = r.deltaparent(i)
2104 else:
2104 else:
2105 base = r.chainbase(i)
2105 base = r.chainbase(i)
2106 if format == 0:
2106 if format == 0:
2107 try:
2107 try:
2108 pp = r.parents(node)
2108 pp = r.parents(node)
2109 except Exception:
2109 except Exception:
2110 pp = [nullid, nullid]
2110 pp = [nullid, nullid]
2111 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
2111 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
2112 i, r.start(i), r.length(i), base, r.linkrev(i),
2112 i, r.start(i), r.length(i), base, r.linkrev(i),
2113 short(node), short(pp[0]), short(pp[1])))
2113 short(node), short(pp[0]), short(pp[1])))
2114 elif format == 1:
2114 elif format == 1:
2115 pr = r.parentrevs(i)
2115 pr = r.parentrevs(i)
2116 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
2116 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
2117 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2117 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2118 base, r.linkrev(i), pr[0], pr[1], short(node)))
2118 base, r.linkrev(i), pr[0], pr[1], short(node)))
2119
2119
2120 @command('debugindexdot', [], _('FILE'))
2120 @command('debugindexdot', [], _('FILE'))
2121 def debugindexdot(ui, repo, file_):
2121 def debugindexdot(ui, repo, file_):
2122 """dump an index DAG as a graphviz dot file"""
2122 """dump an index DAG as a graphviz dot file"""
2123 r = None
2123 r = None
2124 if repo:
2124 if repo:
2125 filelog = repo.file(file_)
2125 filelog = repo.file(file_)
2126 if len(filelog):
2126 if len(filelog):
2127 r = filelog
2127 r = filelog
2128 if not r:
2128 if not r:
2129 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
2129 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
2130 ui.write(("digraph G {\n"))
2130 ui.write(("digraph G {\n"))
2131 for i in r:
2131 for i in r:
2132 node = r.node(i)
2132 node = r.node(i)
2133 pp = r.parents(node)
2133 pp = r.parents(node)
2134 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
2134 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
2135 if pp[1] != nullid:
2135 if pp[1] != nullid:
2136 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
2136 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
2137 ui.write("}\n")
2137 ui.write("}\n")
2138
2138
2139 @command('debuginstall', [], '')
2139 @command('debuginstall', [], '')
2140 def debuginstall(ui):
2140 def debuginstall(ui):
2141 '''test Mercurial installation
2141 '''test Mercurial installation
2142
2142
2143 Returns 0 on success.
2143 Returns 0 on success.
2144 '''
2144 '''
2145
2145
2146 def writetemp(contents):
2146 def writetemp(contents):
2147 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
2147 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
2148 f = os.fdopen(fd, "wb")
2148 f = os.fdopen(fd, "wb")
2149 f.write(contents)
2149 f.write(contents)
2150 f.close()
2150 f.close()
2151 return name
2151 return name
2152
2152
2153 problems = 0
2153 problems = 0
2154
2154
2155 # encoding
2155 # encoding
2156 ui.status(_("checking encoding (%s)...\n") % encoding.encoding)
2156 ui.status(_("checking encoding (%s)...\n") % encoding.encoding)
2157 try:
2157 try:
2158 encoding.fromlocal("test")
2158 encoding.fromlocal("test")
2159 except util.Abort, inst:
2159 except util.Abort, inst:
2160 ui.write(" %s\n" % inst)
2160 ui.write(" %s\n" % inst)
2161 ui.write(_(" (check that your locale is properly set)\n"))
2161 ui.write(_(" (check that your locale is properly set)\n"))
2162 problems += 1
2162 problems += 1
2163
2163
2164 # Python
2164 # Python
2165 ui.status(_("checking Python executable (%s)\n") % sys.executable)
2165 ui.status(_("checking Python executable (%s)\n") % sys.executable)
2166 ui.status(_("checking Python version (%s)\n")
2166 ui.status(_("checking Python version (%s)\n")
2167 % ("%s.%s.%s" % sys.version_info[:3]))
2167 % ("%s.%s.%s" % sys.version_info[:3]))
2168 ui.status(_("checking Python lib (%s)...\n")
2168 ui.status(_("checking Python lib (%s)...\n")
2169 % os.path.dirname(os.__file__))
2169 % os.path.dirname(os.__file__))
2170
2170
2171 # compiled modules
2171 # compiled modules
2172 ui.status(_("checking installed modules (%s)...\n")
2172 ui.status(_("checking installed modules (%s)...\n")
2173 % os.path.dirname(__file__))
2173 % os.path.dirname(__file__))
2174 try:
2174 try:
2175 import bdiff, mpatch, base85, osutil
2175 import bdiff, mpatch, base85, osutil
2176 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
2176 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
2177 except Exception, inst:
2177 except Exception, inst:
2178 ui.write(" %s\n" % inst)
2178 ui.write(" %s\n" % inst)
2179 ui.write(_(" One or more extensions could not be found"))
2179 ui.write(_(" One or more extensions could not be found"))
2180 ui.write(_(" (check that you compiled the extensions)\n"))
2180 ui.write(_(" (check that you compiled the extensions)\n"))
2181 problems += 1
2181 problems += 1
2182
2182
2183 # templates
2183 # templates
2184 import templater
2184 import templater
2185 p = templater.templatepath()
2185 p = templater.templatepath()
2186 ui.status(_("checking templates (%s)...\n") % ' '.join(p))
2186 ui.status(_("checking templates (%s)...\n") % ' '.join(p))
2187 if p:
2187 if p:
2188 m = templater.templatepath("map-cmdline.default")
2188 m = templater.templatepath("map-cmdline.default")
2189 if m:
2189 if m:
2190 # template found, check if it is working
2190 # template found, check if it is working
2191 try:
2191 try:
2192 templater.templater(m)
2192 templater.templater(m)
2193 except Exception, inst:
2193 except Exception, inst:
2194 ui.write(" %s\n" % inst)
2194 ui.write(" %s\n" % inst)
2195 p = None
2195 p = None
2196 else:
2196 else:
2197 ui.write(_(" template 'default' not found\n"))
2197 ui.write(_(" template 'default' not found\n"))
2198 p = None
2198 p = None
2199 else:
2199 else:
2200 ui.write(_(" no template directories found\n"))
2200 ui.write(_(" no template directories found\n"))
2201 if not p:
2201 if not p:
2202 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
2202 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
2203 problems += 1
2203 problems += 1
2204
2204
2205 # editor
2205 # editor
2206 ui.status(_("checking commit editor...\n"))
2206 ui.status(_("checking commit editor...\n"))
2207 editor = ui.geteditor()
2207 editor = ui.geteditor()
2208 cmdpath = util.findexe(editor) or util.findexe(editor.split()[0])
2208 cmdpath = util.findexe(editor) or util.findexe(editor.split()[0])
2209 if not cmdpath:
2209 if not cmdpath:
2210 if editor == 'vi':
2210 if editor == 'vi':
2211 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
2211 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
2212 ui.write(_(" (specify a commit editor in your configuration"
2212 ui.write(_(" (specify a commit editor in your configuration"
2213 " file)\n"))
2213 " file)\n"))
2214 else:
2214 else:
2215 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
2215 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
2216 ui.write(_(" (specify a commit editor in your configuration"
2216 ui.write(_(" (specify a commit editor in your configuration"
2217 " file)\n"))
2217 " file)\n"))
2218 problems += 1
2218 problems += 1
2219
2219
2220 # check username
2220 # check username
2221 ui.status(_("checking username...\n"))
2221 ui.status(_("checking username...\n"))
2222 try:
2222 try:
2223 ui.username()
2223 ui.username()
2224 except util.Abort, e:
2224 except util.Abort, e:
2225 ui.write(" %s\n" % e)
2225 ui.write(" %s\n" % e)
2226 ui.write(_(" (specify a username in your configuration file)\n"))
2226 ui.write(_(" (specify a username in your configuration file)\n"))
2227 problems += 1
2227 problems += 1
2228
2228
2229 if not problems:
2229 if not problems:
2230 ui.status(_("no problems detected\n"))
2230 ui.status(_("no problems detected\n"))
2231 else:
2231 else:
2232 ui.write(_("%s problems detected,"
2232 ui.write(_("%s problems detected,"
2233 " please check your install!\n") % problems)
2233 " please check your install!\n") % problems)
2234
2234
2235 return problems
2235 return problems
2236
2236
2237 @command('debugknown', [], _('REPO ID...'))
2237 @command('debugknown', [], _('REPO ID...'))
2238 def debugknown(ui, repopath, *ids, **opts):
2238 def debugknown(ui, repopath, *ids, **opts):
2239 """test whether node ids are known to a repo
2239 """test whether node ids are known to a repo
2240
2240
2241 Every ID must be a full-length hex node id string. Returns a list of 0s
2241 Every ID must be a full-length hex node id string. Returns a list of 0s
2242 and 1s indicating unknown/known.
2242 and 1s indicating unknown/known.
2243 """
2243 """
2244 repo = hg.peer(ui, opts, repopath)
2244 repo = hg.peer(ui, opts, repopath)
2245 if not repo.capable('known'):
2245 if not repo.capable('known'):
2246 raise util.Abort("known() not supported by target repository")
2246 raise util.Abort("known() not supported by target repository")
2247 flags = repo.known([bin(s) for s in ids])
2247 flags = repo.known([bin(s) for s in ids])
2248 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
2248 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
2249
2249
2250 @command('debuglabelcomplete', [], _('LABEL...'))
2250 @command('debuglabelcomplete', [], _('LABEL...'))
2251 def debuglabelcomplete(ui, repo, *args):
2251 def debuglabelcomplete(ui, repo, *args):
2252 '''complete "labels" - tags, open branch names, bookmark names'''
2252 '''complete "labels" - tags, open branch names, bookmark names'''
2253
2253
2254 labels = set()
2254 labels = set()
2255 labels.update(t[0] for t in repo.tagslist())
2255 labels.update(t[0] for t in repo.tagslist())
2256 labels.update(repo._bookmarks.keys())
2256 labels.update(repo._bookmarks.keys())
2257 labels.update(tag for (tag, heads, tip, closed)
2257 labels.update(tag for (tag, heads, tip, closed)
2258 in repo.branchmap().iterbranches() if not closed)
2258 in repo.branchmap().iterbranches() if not closed)
2259 completions = set()
2259 completions = set()
2260 if not args:
2260 if not args:
2261 args = ['']
2261 args = ['']
2262 for a in args:
2262 for a in args:
2263 completions.update(l for l in labels if l.startswith(a))
2263 completions.update(l for l in labels if l.startswith(a))
2264 ui.write('\n'.join(sorted(completions)))
2264 ui.write('\n'.join(sorted(completions)))
2265 ui.write('\n')
2265 ui.write('\n')
2266
2266
2267 @command('debugobsolete',
2267 @command('debugobsolete',
2268 [('', 'flags', 0, _('markers flag')),
2268 [('', 'flags', 0, _('markers flag')),
2269 ] + commitopts2,
2269 ] + commitopts2,
2270 _('[OBSOLETED [REPLACEMENT] [REPL... ]'))
2270 _('[OBSOLETED [REPLACEMENT] [REPL... ]'))
2271 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2271 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2272 """create arbitrary obsolete marker
2272 """create arbitrary obsolete marker
2273
2273
2274 With no arguments, displays the list of obsolescence markers."""
2274 With no arguments, displays the list of obsolescence markers."""
2275 def parsenodeid(s):
2275 def parsenodeid(s):
2276 try:
2276 try:
2277 # We do not use revsingle/revrange functions here to accept
2277 # We do not use revsingle/revrange functions here to accept
2278 # arbitrary node identifiers, possibly not present in the
2278 # arbitrary node identifiers, possibly not present in the
2279 # local repository.
2279 # local repository.
2280 n = bin(s)
2280 n = bin(s)
2281 if len(n) != len(nullid):
2281 if len(n) != len(nullid):
2282 raise TypeError()
2282 raise TypeError()
2283 return n
2283 return n
2284 except TypeError:
2284 except TypeError:
2285 raise util.Abort('changeset references must be full hexadecimal '
2285 raise util.Abort('changeset references must be full hexadecimal '
2286 'node identifiers')
2286 'node identifiers')
2287
2287
2288 if precursor is not None:
2288 if precursor is not None:
2289 metadata = {}
2289 metadata = {}
2290 if 'date' in opts:
2290 if 'date' in opts:
2291 metadata['date'] = opts['date']
2291 metadata['date'] = opts['date']
2292 metadata['user'] = opts['user'] or ui.username()
2292 metadata['user'] = opts['user'] or ui.username()
2293 succs = tuple(parsenodeid(succ) for succ in successors)
2293 succs = tuple(parsenodeid(succ) for succ in successors)
2294 l = repo.lock()
2294 l = repo.lock()
2295 try:
2295 try:
2296 tr = repo.transaction('debugobsolete')
2296 tr = repo.transaction('debugobsolete')
2297 try:
2297 try:
2298 repo.obsstore.create(tr, parsenodeid(precursor), succs,
2298 repo.obsstore.create(tr, parsenodeid(precursor), succs,
2299 opts['flags'], metadata)
2299 opts['flags'], metadata)
2300 tr.close()
2300 tr.close()
2301 finally:
2301 finally:
2302 tr.release()
2302 tr.release()
2303 finally:
2303 finally:
2304 l.release()
2304 l.release()
2305 else:
2305 else:
2306 for m in obsolete.allmarkers(repo):
2306 for m in obsolete.allmarkers(repo):
2307 cmdutil.showmarker(ui, m)
2307 cmdutil.showmarker(ui, m)
2308
2308
2309 @command('debugpathcomplete',
2309 @command('debugpathcomplete',
2310 [('f', 'full', None, _('complete an entire path')),
2310 [('f', 'full', None, _('complete an entire path')),
2311 ('n', 'normal', None, _('show only normal files')),
2311 ('n', 'normal', None, _('show only normal files')),
2312 ('a', 'added', None, _('show only added files')),
2312 ('a', 'added', None, _('show only added files')),
2313 ('r', 'removed', None, _('show only removed files'))],
2313 ('r', 'removed', None, _('show only removed files'))],
2314 _('FILESPEC...'))
2314 _('FILESPEC...'))
2315 def debugpathcomplete(ui, repo, *specs, **opts):
2315 def debugpathcomplete(ui, repo, *specs, **opts):
2316 '''complete part or all of a tracked path
2316 '''complete part or all of a tracked path
2317
2317
2318 This command supports shells that offer path name completion. It
2318 This command supports shells that offer path name completion. It
2319 currently completes only files already known to the dirstate.
2319 currently completes only files already known to the dirstate.
2320
2320
2321 Completion extends only to the next path segment unless
2321 Completion extends only to the next path segment unless
2322 --full is specified, in which case entire paths are used.'''
2322 --full is specified, in which case entire paths are used.'''
2323
2323
2324 def complete(path, acceptable):
2324 def complete(path, acceptable):
2325 dirstate = repo.dirstate
2325 dirstate = repo.dirstate
2326 spec = os.path.normpath(os.path.join(os.getcwd(), path))
2326 spec = os.path.normpath(os.path.join(os.getcwd(), path))
2327 rootdir = repo.root + os.sep
2327 rootdir = repo.root + os.sep
2328 if spec != repo.root and not spec.startswith(rootdir):
2328 if spec != repo.root and not spec.startswith(rootdir):
2329 return [], []
2329 return [], []
2330 if os.path.isdir(spec):
2330 if os.path.isdir(spec):
2331 spec += '/'
2331 spec += '/'
2332 spec = spec[len(rootdir):]
2332 spec = spec[len(rootdir):]
2333 fixpaths = os.sep != '/'
2333 fixpaths = os.sep != '/'
2334 if fixpaths:
2334 if fixpaths:
2335 spec = spec.replace(os.sep, '/')
2335 spec = spec.replace(os.sep, '/')
2336 speclen = len(spec)
2336 speclen = len(spec)
2337 fullpaths = opts['full']
2337 fullpaths = opts['full']
2338 files, dirs = set(), set()
2338 files, dirs = set(), set()
2339 adddir, addfile = dirs.add, files.add
2339 adddir, addfile = dirs.add, files.add
2340 for f, st in dirstate.iteritems():
2340 for f, st in dirstate.iteritems():
2341 if f.startswith(spec) and st[0] in acceptable:
2341 if f.startswith(spec) and st[0] in acceptable:
2342 if fixpaths:
2342 if fixpaths:
2343 f = f.replace('/', os.sep)
2343 f = f.replace('/', os.sep)
2344 if fullpaths:
2344 if fullpaths:
2345 addfile(f)
2345 addfile(f)
2346 continue
2346 continue
2347 s = f.find(os.sep, speclen)
2347 s = f.find(os.sep, speclen)
2348 if s >= 0:
2348 if s >= 0:
2349 adddir(f[:s])
2349 adddir(f[:s])
2350 else:
2350 else:
2351 addfile(f)
2351 addfile(f)
2352 return files, dirs
2352 return files, dirs
2353
2353
2354 acceptable = ''
2354 acceptable = ''
2355 if opts['normal']:
2355 if opts['normal']:
2356 acceptable += 'nm'
2356 acceptable += 'nm'
2357 if opts['added']:
2357 if opts['added']:
2358 acceptable += 'a'
2358 acceptable += 'a'
2359 if opts['removed']:
2359 if opts['removed']:
2360 acceptable += 'r'
2360 acceptable += 'r'
2361 cwd = repo.getcwd()
2361 cwd = repo.getcwd()
2362 if not specs:
2362 if not specs:
2363 specs = ['.']
2363 specs = ['.']
2364
2364
2365 files, dirs = set(), set()
2365 files, dirs = set(), set()
2366 for spec in specs:
2366 for spec in specs:
2367 f, d = complete(spec, acceptable or 'nmar')
2367 f, d = complete(spec, acceptable or 'nmar')
2368 files.update(f)
2368 files.update(f)
2369 dirs.update(d)
2369 dirs.update(d)
2370 files.update(dirs)
2370 files.update(dirs)
2371 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2371 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2372 ui.write('\n')
2372 ui.write('\n')
2373
2373
2374 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'))
2374 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'))
2375 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2375 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2376 '''access the pushkey key/value protocol
2376 '''access the pushkey key/value protocol
2377
2377
2378 With two args, list the keys in the given namespace.
2378 With two args, list the keys in the given namespace.
2379
2379
2380 With five args, set a key to new if it currently is set to old.
2380 With five args, set a key to new if it currently is set to old.
2381 Reports success or failure.
2381 Reports success or failure.
2382 '''
2382 '''
2383
2383
2384 target = hg.peer(ui, {}, repopath)
2384 target = hg.peer(ui, {}, repopath)
2385 if keyinfo:
2385 if keyinfo:
2386 key, old, new = keyinfo
2386 key, old, new = keyinfo
2387 r = target.pushkey(namespace, key, old, new)
2387 r = target.pushkey(namespace, key, old, new)
2388 ui.status(str(r) + '\n')
2388 ui.status(str(r) + '\n')
2389 return not r
2389 return not r
2390 else:
2390 else:
2391 for k, v in sorted(target.listkeys(namespace).iteritems()):
2391 for k, v in sorted(target.listkeys(namespace).iteritems()):
2392 ui.write("%s\t%s\n" % (k.encode('string-escape'),
2392 ui.write("%s\t%s\n" % (k.encode('string-escape'),
2393 v.encode('string-escape')))
2393 v.encode('string-escape')))
2394
2394
2395 @command('debugpvec', [], _('A B'))
2395 @command('debugpvec', [], _('A B'))
2396 def debugpvec(ui, repo, a, b=None):
2396 def debugpvec(ui, repo, a, b=None):
2397 ca = scmutil.revsingle(repo, a)
2397 ca = scmutil.revsingle(repo, a)
2398 cb = scmutil.revsingle(repo, b)
2398 cb = scmutil.revsingle(repo, b)
2399 pa = pvec.ctxpvec(ca)
2399 pa = pvec.ctxpvec(ca)
2400 pb = pvec.ctxpvec(cb)
2400 pb = pvec.ctxpvec(cb)
2401 if pa == pb:
2401 if pa == pb:
2402 rel = "="
2402 rel = "="
2403 elif pa > pb:
2403 elif pa > pb:
2404 rel = ">"
2404 rel = ">"
2405 elif pa < pb:
2405 elif pa < pb:
2406 rel = "<"
2406 rel = "<"
2407 elif pa | pb:
2407 elif pa | pb:
2408 rel = "|"
2408 rel = "|"
2409 ui.write(_("a: %s\n") % pa)
2409 ui.write(_("a: %s\n") % pa)
2410 ui.write(_("b: %s\n") % pb)
2410 ui.write(_("b: %s\n") % pb)
2411 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2411 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2412 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2412 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2413 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2413 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2414 pa.distance(pb), rel))
2414 pa.distance(pb), rel))
2415
2415
2416 @command('debugrebuilddirstate|debugrebuildstate',
2416 @command('debugrebuilddirstate|debugrebuildstate',
2417 [('r', 'rev', '', _('revision to rebuild to'), _('REV'))],
2417 [('r', 'rev', '', _('revision to rebuild to'), _('REV'))],
2418 _('[-r REV]'))
2418 _('[-r REV]'))
2419 def debugrebuilddirstate(ui, repo, rev):
2419 def debugrebuilddirstate(ui, repo, rev):
2420 """rebuild the dirstate as it would look like for the given revision
2420 """rebuild the dirstate as it would look like for the given revision
2421
2421
2422 If no revision is specified the first current parent will be used.
2422 If no revision is specified the first current parent will be used.
2423
2423
2424 The dirstate will be set to the files of the given revision.
2424 The dirstate will be set to the files of the given revision.
2425 The actual working directory content or existing dirstate
2425 The actual working directory content or existing dirstate
2426 information such as adds or removes is not considered.
2426 information such as adds or removes is not considered.
2427
2427
2428 One use of this command is to make the next :hg:`status` invocation
2428 One use of this command is to make the next :hg:`status` invocation
2429 check the actual file content.
2429 check the actual file content.
2430 """
2430 """
2431 ctx = scmutil.revsingle(repo, rev)
2431 ctx = scmutil.revsingle(repo, rev)
2432 wlock = repo.wlock()
2432 wlock = repo.wlock()
2433 try:
2433 try:
2434 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
2434 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
2435 finally:
2435 finally:
2436 wlock.release()
2436 wlock.release()
2437
2437
2438 @command('debugrename',
2438 @command('debugrename',
2439 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2439 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2440 _('[-r REV] FILE'))
2440 _('[-r REV] FILE'))
2441 def debugrename(ui, repo, file1, *pats, **opts):
2441 def debugrename(ui, repo, file1, *pats, **opts):
2442 """dump rename information"""
2442 """dump rename information"""
2443
2443
2444 ctx = scmutil.revsingle(repo, opts.get('rev'))
2444 ctx = scmutil.revsingle(repo, opts.get('rev'))
2445 m = scmutil.match(ctx, (file1,) + pats, opts)
2445 m = scmutil.match(ctx, (file1,) + pats, opts)
2446 for abs in ctx.walk(m):
2446 for abs in ctx.walk(m):
2447 fctx = ctx[abs]
2447 fctx = ctx[abs]
2448 o = fctx.filelog().renamed(fctx.filenode())
2448 o = fctx.filelog().renamed(fctx.filenode())
2449 rel = m.rel(abs)
2449 rel = m.rel(abs)
2450 if o:
2450 if o:
2451 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2451 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2452 else:
2452 else:
2453 ui.write(_("%s not renamed\n") % rel)
2453 ui.write(_("%s not renamed\n") % rel)
2454
2454
2455 @command('debugrevlog',
2455 @command('debugrevlog',
2456 [('c', 'changelog', False, _('open changelog')),
2456 [('c', 'changelog', False, _('open changelog')),
2457 ('m', 'manifest', False, _('open manifest')),
2457 ('m', 'manifest', False, _('open manifest')),
2458 ('d', 'dump', False, _('dump index data'))],
2458 ('d', 'dump', False, _('dump index data'))],
2459 _('-c|-m|FILE'))
2459 _('-c|-m|FILE'))
2460 def debugrevlog(ui, repo, file_=None, **opts):
2460 def debugrevlog(ui, repo, file_=None, **opts):
2461 """show data and statistics about a revlog"""
2461 """show data and statistics about a revlog"""
2462 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2462 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2463
2463
2464 if opts.get("dump"):
2464 if opts.get("dump"):
2465 numrevs = len(r)
2465 numrevs = len(r)
2466 ui.write("# rev p1rev p2rev start end deltastart base p1 p2"
2466 ui.write("# rev p1rev p2rev start end deltastart base p1 p2"
2467 " rawsize totalsize compression heads\n")
2467 " rawsize totalsize compression heads\n")
2468 ts = 0
2468 ts = 0
2469 heads = set()
2469 heads = set()
2470 for rev in xrange(numrevs):
2470 for rev in xrange(numrevs):
2471 dbase = r.deltaparent(rev)
2471 dbase = r.deltaparent(rev)
2472 if dbase == -1:
2472 if dbase == -1:
2473 dbase = rev
2473 dbase = rev
2474 cbase = r.chainbase(rev)
2474 cbase = r.chainbase(rev)
2475 p1, p2 = r.parentrevs(rev)
2475 p1, p2 = r.parentrevs(rev)
2476 rs = r.rawsize(rev)
2476 rs = r.rawsize(rev)
2477 ts = ts + rs
2477 ts = ts + rs
2478 heads -= set(r.parentrevs(rev))
2478 heads -= set(r.parentrevs(rev))
2479 heads.add(rev)
2479 heads.add(rev)
2480 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d %11d %5d\n" %
2480 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d %11d %5d\n" %
2481 (rev, p1, p2, r.start(rev), r.end(rev),
2481 (rev, p1, p2, r.start(rev), r.end(rev),
2482 r.start(dbase), r.start(cbase),
2482 r.start(dbase), r.start(cbase),
2483 r.start(p1), r.start(p2),
2483 r.start(p1), r.start(p2),
2484 rs, ts, ts / r.end(rev), len(heads)))
2484 rs, ts, ts / r.end(rev), len(heads)))
2485 return 0
2485 return 0
2486
2486
2487 v = r.version
2487 v = r.version
2488 format = v & 0xFFFF
2488 format = v & 0xFFFF
2489 flags = []
2489 flags = []
2490 gdelta = False
2490 gdelta = False
2491 if v & revlog.REVLOGNGINLINEDATA:
2491 if v & revlog.REVLOGNGINLINEDATA:
2492 flags.append('inline')
2492 flags.append('inline')
2493 if v & revlog.REVLOGGENERALDELTA:
2493 if v & revlog.REVLOGGENERALDELTA:
2494 gdelta = True
2494 gdelta = True
2495 flags.append('generaldelta')
2495 flags.append('generaldelta')
2496 if not flags:
2496 if not flags:
2497 flags = ['(none)']
2497 flags = ['(none)']
2498
2498
2499 nummerges = 0
2499 nummerges = 0
2500 numfull = 0
2500 numfull = 0
2501 numprev = 0
2501 numprev = 0
2502 nump1 = 0
2502 nump1 = 0
2503 nump2 = 0
2503 nump2 = 0
2504 numother = 0
2504 numother = 0
2505 nump1prev = 0
2505 nump1prev = 0
2506 nump2prev = 0
2506 nump2prev = 0
2507 chainlengths = []
2507 chainlengths = []
2508
2508
2509 datasize = [None, 0, 0L]
2509 datasize = [None, 0, 0L]
2510 fullsize = [None, 0, 0L]
2510 fullsize = [None, 0, 0L]
2511 deltasize = [None, 0, 0L]
2511 deltasize = [None, 0, 0L]
2512
2512
2513 def addsize(size, l):
2513 def addsize(size, l):
2514 if l[0] is None or size < l[0]:
2514 if l[0] is None or size < l[0]:
2515 l[0] = size
2515 l[0] = size
2516 if size > l[1]:
2516 if size > l[1]:
2517 l[1] = size
2517 l[1] = size
2518 l[2] += size
2518 l[2] += size
2519
2519
2520 numrevs = len(r)
2520 numrevs = len(r)
2521 for rev in xrange(numrevs):
2521 for rev in xrange(numrevs):
2522 p1, p2 = r.parentrevs(rev)
2522 p1, p2 = r.parentrevs(rev)
2523 delta = r.deltaparent(rev)
2523 delta = r.deltaparent(rev)
2524 if format > 0:
2524 if format > 0:
2525 addsize(r.rawsize(rev), datasize)
2525 addsize(r.rawsize(rev), datasize)
2526 if p2 != nullrev:
2526 if p2 != nullrev:
2527 nummerges += 1
2527 nummerges += 1
2528 size = r.length(rev)
2528 size = r.length(rev)
2529 if delta == nullrev:
2529 if delta == nullrev:
2530 chainlengths.append(0)
2530 chainlengths.append(0)
2531 numfull += 1
2531 numfull += 1
2532 addsize(size, fullsize)
2532 addsize(size, fullsize)
2533 else:
2533 else:
2534 chainlengths.append(chainlengths[delta] + 1)
2534 chainlengths.append(chainlengths[delta] + 1)
2535 addsize(size, deltasize)
2535 addsize(size, deltasize)
2536 if delta == rev - 1:
2536 if delta == rev - 1:
2537 numprev += 1
2537 numprev += 1
2538 if delta == p1:
2538 if delta == p1:
2539 nump1prev += 1
2539 nump1prev += 1
2540 elif delta == p2:
2540 elif delta == p2:
2541 nump2prev += 1
2541 nump2prev += 1
2542 elif delta == p1:
2542 elif delta == p1:
2543 nump1 += 1
2543 nump1 += 1
2544 elif delta == p2:
2544 elif delta == p2:
2545 nump2 += 1
2545 nump2 += 1
2546 elif delta != nullrev:
2546 elif delta != nullrev:
2547 numother += 1
2547 numother += 1
2548
2548
2549 # Adjust size min value for empty cases
2549 # Adjust size min value for empty cases
2550 for size in (datasize, fullsize, deltasize):
2550 for size in (datasize, fullsize, deltasize):
2551 if size[0] is None:
2551 if size[0] is None:
2552 size[0] = 0
2552 size[0] = 0
2553
2553
2554 numdeltas = numrevs - numfull
2554 numdeltas = numrevs - numfull
2555 numoprev = numprev - nump1prev - nump2prev
2555 numoprev = numprev - nump1prev - nump2prev
2556 totalrawsize = datasize[2]
2556 totalrawsize = datasize[2]
2557 datasize[2] /= numrevs
2557 datasize[2] /= numrevs
2558 fulltotal = fullsize[2]
2558 fulltotal = fullsize[2]
2559 fullsize[2] /= numfull
2559 fullsize[2] /= numfull
2560 deltatotal = deltasize[2]
2560 deltatotal = deltasize[2]
2561 if numrevs - numfull > 0:
2561 if numrevs - numfull > 0:
2562 deltasize[2] /= numrevs - numfull
2562 deltasize[2] /= numrevs - numfull
2563 totalsize = fulltotal + deltatotal
2563 totalsize = fulltotal + deltatotal
2564 avgchainlen = sum(chainlengths) / numrevs
2564 avgchainlen = sum(chainlengths) / numrevs
2565 compratio = totalrawsize / totalsize
2565 compratio = totalrawsize / totalsize
2566
2566
2567 basedfmtstr = '%%%dd\n'
2567 basedfmtstr = '%%%dd\n'
2568 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2568 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2569
2569
2570 def dfmtstr(max):
2570 def dfmtstr(max):
2571 return basedfmtstr % len(str(max))
2571 return basedfmtstr % len(str(max))
2572 def pcfmtstr(max, padding=0):
2572 def pcfmtstr(max, padding=0):
2573 return basepcfmtstr % (len(str(max)), ' ' * padding)
2573 return basepcfmtstr % (len(str(max)), ' ' * padding)
2574
2574
2575 def pcfmt(value, total):
2575 def pcfmt(value, total):
2576 return (value, 100 * float(value) / total)
2576 return (value, 100 * float(value) / total)
2577
2577
2578 ui.write(('format : %d\n') % format)
2578 ui.write(('format : %d\n') % format)
2579 ui.write(('flags : %s\n') % ', '.join(flags))
2579 ui.write(('flags : %s\n') % ', '.join(flags))
2580
2580
2581 ui.write('\n')
2581 ui.write('\n')
2582 fmt = pcfmtstr(totalsize)
2582 fmt = pcfmtstr(totalsize)
2583 fmt2 = dfmtstr(totalsize)
2583 fmt2 = dfmtstr(totalsize)
2584 ui.write(('revisions : ') + fmt2 % numrevs)
2584 ui.write(('revisions : ') + fmt2 % numrevs)
2585 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2585 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2586 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2586 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2587 ui.write(('revisions : ') + fmt2 % numrevs)
2587 ui.write(('revisions : ') + fmt2 % numrevs)
2588 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2588 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2589 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2589 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2590 ui.write(('revision size : ') + fmt2 % totalsize)
2590 ui.write(('revision size : ') + fmt2 % totalsize)
2591 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2591 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2592 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2592 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2593
2593
2594 ui.write('\n')
2594 ui.write('\n')
2595 fmt = dfmtstr(max(avgchainlen, compratio))
2595 fmt = dfmtstr(max(avgchainlen, compratio))
2596 ui.write(('avg chain length : ') + fmt % avgchainlen)
2596 ui.write(('avg chain length : ') + fmt % avgchainlen)
2597 ui.write(('compression ratio : ') + fmt % compratio)
2597 ui.write(('compression ratio : ') + fmt % compratio)
2598
2598
2599 if format > 0:
2599 if format > 0:
2600 ui.write('\n')
2600 ui.write('\n')
2601 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2601 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2602 % tuple(datasize))
2602 % tuple(datasize))
2603 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2603 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2604 % tuple(fullsize))
2604 % tuple(fullsize))
2605 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2605 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2606 % tuple(deltasize))
2606 % tuple(deltasize))
2607
2607
2608 if numdeltas > 0:
2608 if numdeltas > 0:
2609 ui.write('\n')
2609 ui.write('\n')
2610 fmt = pcfmtstr(numdeltas)
2610 fmt = pcfmtstr(numdeltas)
2611 fmt2 = pcfmtstr(numdeltas, 4)
2611 fmt2 = pcfmtstr(numdeltas, 4)
2612 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2612 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2613 if numprev > 0:
2613 if numprev > 0:
2614 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2614 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2615 numprev))
2615 numprev))
2616 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2616 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2617 numprev))
2617 numprev))
2618 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2618 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2619 numprev))
2619 numprev))
2620 if gdelta:
2620 if gdelta:
2621 ui.write(('deltas against p1 : ')
2621 ui.write(('deltas against p1 : ')
2622 + fmt % pcfmt(nump1, numdeltas))
2622 + fmt % pcfmt(nump1, numdeltas))
2623 ui.write(('deltas against p2 : ')
2623 ui.write(('deltas against p2 : ')
2624 + fmt % pcfmt(nump2, numdeltas))
2624 + fmt % pcfmt(nump2, numdeltas))
2625 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2625 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2626 numdeltas))
2626 numdeltas))
2627
2627
2628 @command('debugrevspec',
2628 @command('debugrevspec',
2629 [('', 'optimize', None, _('print parsed tree after optimizing'))],
2629 [('', 'optimize', None, _('print parsed tree after optimizing'))],
2630 ('REVSPEC'))
2630 ('REVSPEC'))
2631 def debugrevspec(ui, repo, expr, **opts):
2631 def debugrevspec(ui, repo, expr, **opts):
2632 """parse and apply a revision specification
2632 """parse and apply a revision specification
2633
2633
2634 Use --verbose to print the parsed tree before and after aliases
2634 Use --verbose to print the parsed tree before and after aliases
2635 expansion.
2635 expansion.
2636 """
2636 """
2637 if ui.verbose:
2637 if ui.verbose:
2638 tree = revset.parse(expr)[0]
2638 tree = revset.parse(expr)[0]
2639 ui.note(revset.prettyformat(tree), "\n")
2639 ui.note(revset.prettyformat(tree), "\n")
2640 newtree = revset.findaliases(ui, tree)
2640 newtree = revset.findaliases(ui, tree)
2641 if newtree != tree:
2641 if newtree != tree:
2642 ui.note(revset.prettyformat(newtree), "\n")
2642 ui.note(revset.prettyformat(newtree), "\n")
2643 if opts["optimize"]:
2643 if opts["optimize"]:
2644 weight, optimizedtree = revset.optimize(newtree, True)
2644 weight, optimizedtree = revset.optimize(newtree, True)
2645 ui.note("* optimized:\n", revset.prettyformat(optimizedtree), "\n")
2645 ui.note("* optimized:\n", revset.prettyformat(optimizedtree), "\n")
2646 func = revset.match(ui, expr)
2646 func = revset.match(ui, expr)
2647 for c in func(repo, revset.spanset(repo)):
2647 for c in func(repo, revset.spanset(repo)):
2648 ui.write("%s\n" % c)
2648 ui.write("%s\n" % c)
2649
2649
2650 @command('debugsetparents', [], _('REV1 [REV2]'))
2650 @command('debugsetparents', [], _('REV1 [REV2]'))
2651 def debugsetparents(ui, repo, rev1, rev2=None):
2651 def debugsetparents(ui, repo, rev1, rev2=None):
2652 """manually set the parents of the current working directory
2652 """manually set the parents of the current working directory
2653
2653
2654 This is useful for writing repository conversion tools, but should
2654 This is useful for writing repository conversion tools, but should
2655 be used with care.
2655 be used with care.
2656
2656
2657 Returns 0 on success.
2657 Returns 0 on success.
2658 """
2658 """
2659
2659
2660 r1 = scmutil.revsingle(repo, rev1).node()
2660 r1 = scmutil.revsingle(repo, rev1).node()
2661 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2661 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2662
2662
2663 wlock = repo.wlock()
2663 wlock = repo.wlock()
2664 try:
2664 try:
2665 repo.setparents(r1, r2)
2665 repo.setparents(r1, r2)
2666 finally:
2666 finally:
2667 wlock.release()
2667 wlock.release()
2668
2668
2669 @command('debugdirstate|debugstate',
2669 @command('debugdirstate|debugstate',
2670 [('', 'nodates', None, _('do not display the saved mtime')),
2670 [('', 'nodates', None, _('do not display the saved mtime')),
2671 ('', 'datesort', None, _('sort by saved mtime'))],
2671 ('', 'datesort', None, _('sort by saved mtime'))],
2672 _('[OPTION]...'))
2672 _('[OPTION]...'))
2673 def debugstate(ui, repo, nodates=None, datesort=None):
2673 def debugstate(ui, repo, nodates=None, datesort=None):
2674 """show the contents of the current dirstate"""
2674 """show the contents of the current dirstate"""
2675 timestr = ""
2675 timestr = ""
2676 showdate = not nodates
2676 showdate = not nodates
2677 if datesort:
2677 if datesort:
2678 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
2678 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
2679 else:
2679 else:
2680 keyfunc = None # sort by filename
2680 keyfunc = None # sort by filename
2681 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
2681 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
2682 if showdate:
2682 if showdate:
2683 if ent[3] == -1:
2683 if ent[3] == -1:
2684 # Pad or slice to locale representation
2684 # Pad or slice to locale representation
2685 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
2685 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
2686 time.localtime(0)))
2686 time.localtime(0)))
2687 timestr = 'unset'
2687 timestr = 'unset'
2688 timestr = (timestr[:locale_len] +
2688 timestr = (timestr[:locale_len] +
2689 ' ' * (locale_len - len(timestr)))
2689 ' ' * (locale_len - len(timestr)))
2690 else:
2690 else:
2691 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
2691 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
2692 time.localtime(ent[3]))
2692 time.localtime(ent[3]))
2693 if ent[1] & 020000:
2693 if ent[1] & 020000:
2694 mode = 'lnk'
2694 mode = 'lnk'
2695 else:
2695 else:
2696 mode = '%3o' % (ent[1] & 0777 & ~util.umask)
2696 mode = '%3o' % (ent[1] & 0777 & ~util.umask)
2697 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
2697 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
2698 for f in repo.dirstate.copies():
2698 for f in repo.dirstate.copies():
2699 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
2699 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
2700
2700
2701 @command('debugsub',
2701 @command('debugsub',
2702 [('r', 'rev', '',
2702 [('r', 'rev', '',
2703 _('revision to check'), _('REV'))],
2703 _('revision to check'), _('REV'))],
2704 _('[-r REV] [REV]'))
2704 _('[-r REV] [REV]'))
2705 def debugsub(ui, repo, rev=None):
2705 def debugsub(ui, repo, rev=None):
2706 ctx = scmutil.revsingle(repo, rev, None)
2706 ctx = scmutil.revsingle(repo, rev, None)
2707 for k, v in sorted(ctx.substate.items()):
2707 for k, v in sorted(ctx.substate.items()):
2708 ui.write(('path %s\n') % k)
2708 ui.write(('path %s\n') % k)
2709 ui.write((' source %s\n') % v[0])
2709 ui.write((' source %s\n') % v[0])
2710 ui.write((' revision %s\n') % v[1])
2710 ui.write((' revision %s\n') % v[1])
2711
2711
2712 @command('debugsuccessorssets',
2712 @command('debugsuccessorssets',
2713 [],
2713 [],
2714 _('[REV]'))
2714 _('[REV]'))
2715 def debugsuccessorssets(ui, repo, *revs):
2715 def debugsuccessorssets(ui, repo, *revs):
2716 """show set of successors for revision
2716 """show set of successors for revision
2717
2717
2718 A successors set of changeset A is a consistent group of revisions that
2718 A successors set of changeset A is a consistent group of revisions that
2719 succeed A. It contains non-obsolete changesets only.
2719 succeed A. It contains non-obsolete changesets only.
2720
2720
2721 In most cases a changeset A has a single successors set containing a single
2721 In most cases a changeset A has a single successors set containing a single
2722 successor (changeset A replaced by A').
2722 successor (changeset A replaced by A').
2723
2723
2724 A changeset that is made obsolete with no successors are called "pruned".
2724 A changeset that is made obsolete with no successors are called "pruned".
2725 Such changesets have no successors sets at all.
2725 Such changesets have no successors sets at all.
2726
2726
2727 A changeset that has been "split" will have a successors set containing
2727 A changeset that has been "split" will have a successors set containing
2728 more than one successor.
2728 more than one successor.
2729
2729
2730 A changeset that has been rewritten in multiple different ways is called
2730 A changeset that has been rewritten in multiple different ways is called
2731 "divergent". Such changesets have multiple successor sets (each of which
2731 "divergent". Such changesets have multiple successor sets (each of which
2732 may also be split, i.e. have multiple successors).
2732 may also be split, i.e. have multiple successors).
2733
2733
2734 Results are displayed as follows::
2734 Results are displayed as follows::
2735
2735
2736 <rev1>
2736 <rev1>
2737 <successors-1A>
2737 <successors-1A>
2738 <rev2>
2738 <rev2>
2739 <successors-2A>
2739 <successors-2A>
2740 <successors-2B1> <successors-2B2> <successors-2B3>
2740 <successors-2B1> <successors-2B2> <successors-2B3>
2741
2741
2742 Here rev2 has two possible (i.e. divergent) successors sets. The first
2742 Here rev2 has two possible (i.e. divergent) successors sets. The first
2743 holds one element, whereas the second holds three (i.e. the changeset has
2743 holds one element, whereas the second holds three (i.e. the changeset has
2744 been split).
2744 been split).
2745 """
2745 """
2746 # passed to successorssets caching computation from one call to another
2746 # passed to successorssets caching computation from one call to another
2747 cache = {}
2747 cache = {}
2748 ctx2str = str
2748 ctx2str = str
2749 node2str = short
2749 node2str = short
2750 if ui.debug():
2750 if ui.debug():
2751 def ctx2str(ctx):
2751 def ctx2str(ctx):
2752 return ctx.hex()
2752 return ctx.hex()
2753 node2str = hex
2753 node2str = hex
2754 for rev in scmutil.revrange(repo, revs):
2754 for rev in scmutil.revrange(repo, revs):
2755 ctx = repo[rev]
2755 ctx = repo[rev]
2756 ui.write('%s\n'% ctx2str(ctx))
2756 ui.write('%s\n'% ctx2str(ctx))
2757 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2757 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2758 if succsset:
2758 if succsset:
2759 ui.write(' ')
2759 ui.write(' ')
2760 ui.write(node2str(succsset[0]))
2760 ui.write(node2str(succsset[0]))
2761 for node in succsset[1:]:
2761 for node in succsset[1:]:
2762 ui.write(' ')
2762 ui.write(' ')
2763 ui.write(node2str(node))
2763 ui.write(node2str(node))
2764 ui.write('\n')
2764 ui.write('\n')
2765
2765
2766 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'))
2766 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'))
2767 def debugwalk(ui, repo, *pats, **opts):
2767 def debugwalk(ui, repo, *pats, **opts):
2768 """show how files match on given patterns"""
2768 """show how files match on given patterns"""
2769 m = scmutil.match(repo[None], pats, opts)
2769 m = scmutil.match(repo[None], pats, opts)
2770 items = list(repo.walk(m))
2770 items = list(repo.walk(m))
2771 if not items:
2771 if not items:
2772 return
2772 return
2773 f = lambda fn: fn
2773 f = lambda fn: fn
2774 if ui.configbool('ui', 'slash') and os.sep != '/':
2774 if ui.configbool('ui', 'slash') and os.sep != '/':
2775 f = lambda fn: util.normpath(fn)
2775 f = lambda fn: util.normpath(fn)
2776 fmt = 'f %%-%ds %%-%ds %%s' % (
2776 fmt = 'f %%-%ds %%-%ds %%s' % (
2777 max([len(abs) for abs in items]),
2777 max([len(abs) for abs in items]),
2778 max([len(m.rel(abs)) for abs in items]))
2778 max([len(m.rel(abs)) for abs in items]))
2779 for abs in items:
2779 for abs in items:
2780 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2780 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2781 ui.write("%s\n" % line.rstrip())
2781 ui.write("%s\n" % line.rstrip())
2782
2782
2783 @command('debugwireargs',
2783 @command('debugwireargs',
2784 [('', 'three', '', 'three'),
2784 [('', 'three', '', 'three'),
2785 ('', 'four', '', 'four'),
2785 ('', 'four', '', 'four'),
2786 ('', 'five', '', 'five'),
2786 ('', 'five', '', 'five'),
2787 ] + remoteopts,
2787 ] + remoteopts,
2788 _('REPO [OPTIONS]... [ONE [TWO]]'))
2788 _('REPO [OPTIONS]... [ONE [TWO]]'))
2789 def debugwireargs(ui, repopath, *vals, **opts):
2789 def debugwireargs(ui, repopath, *vals, **opts):
2790 repo = hg.peer(ui, opts, repopath)
2790 repo = hg.peer(ui, opts, repopath)
2791 for opt in remoteopts:
2791 for opt in remoteopts:
2792 del opts[opt[1]]
2792 del opts[opt[1]]
2793 args = {}
2793 args = {}
2794 for k, v in opts.iteritems():
2794 for k, v in opts.iteritems():
2795 if v:
2795 if v:
2796 args[k] = v
2796 args[k] = v
2797 # run twice to check that we don't mess up the stream for the next command
2797 # run twice to check that we don't mess up the stream for the next command
2798 res1 = repo.debugwireargs(*vals, **args)
2798 res1 = repo.debugwireargs(*vals, **args)
2799 res2 = repo.debugwireargs(*vals, **args)
2799 res2 = repo.debugwireargs(*vals, **args)
2800 ui.write("%s\n" % res1)
2800 ui.write("%s\n" % res1)
2801 if res1 != res2:
2801 if res1 != res2:
2802 ui.warn("%s\n" % res2)
2802 ui.warn("%s\n" % res2)
2803
2803
2804 @command('^diff',
2804 @command('^diff',
2805 [('r', 'rev', [], _('revision'), _('REV')),
2805 [('r', 'rev', [], _('revision'), _('REV')),
2806 ('c', 'change', '', _('change made by revision'), _('REV'))
2806 ('c', 'change', '', _('change made by revision'), _('REV'))
2807 ] + diffopts + diffopts2 + walkopts + subrepoopts,
2807 ] + diffopts + diffopts2 + walkopts + subrepoopts,
2808 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'))
2808 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'))
2809 def diff(ui, repo, *pats, **opts):
2809 def diff(ui, repo, *pats, **opts):
2810 """diff repository (or selected files)
2810 """diff repository (or selected files)
2811
2811
2812 Show differences between revisions for the specified files.
2812 Show differences between revisions for the specified files.
2813
2813
2814 Differences between files are shown using the unified diff format.
2814 Differences between files are shown using the unified diff format.
2815
2815
2816 .. note::
2816 .. note::
2817
2817
2818 diff may generate unexpected results for merges, as it will
2818 diff may generate unexpected results for merges, as it will
2819 default to comparing against the working directory's first
2819 default to comparing against the working directory's first
2820 parent changeset if no revisions are specified.
2820 parent changeset if no revisions are specified.
2821
2821
2822 When two revision arguments are given, then changes are shown
2822 When two revision arguments are given, then changes are shown
2823 between those revisions. If only one revision is specified then
2823 between those revisions. If only one revision is specified then
2824 that revision is compared to the working directory, and, when no
2824 that revision is compared to the working directory, and, when no
2825 revisions are specified, the working directory files are compared
2825 revisions are specified, the working directory files are compared
2826 to its parent.
2826 to its parent.
2827
2827
2828 Alternatively you can specify -c/--change with a revision to see
2828 Alternatively you can specify -c/--change with a revision to see
2829 the changes in that changeset relative to its first parent.
2829 the changes in that changeset relative to its first parent.
2830
2830
2831 Without the -a/--text option, diff will avoid generating diffs of
2831 Without the -a/--text option, diff will avoid generating diffs of
2832 files it detects as binary. With -a, diff will generate a diff
2832 files it detects as binary. With -a, diff will generate a diff
2833 anyway, probably with undesirable results.
2833 anyway, probably with undesirable results.
2834
2834
2835 Use the -g/--git option to generate diffs in the git extended diff
2835 Use the -g/--git option to generate diffs in the git extended diff
2836 format. For more information, read :hg:`help diffs`.
2836 format. For more information, read :hg:`help diffs`.
2837
2837
2838 .. container:: verbose
2838 .. container:: verbose
2839
2839
2840 Examples:
2840 Examples:
2841
2841
2842 - compare a file in the current working directory to its parent::
2842 - compare a file in the current working directory to its parent::
2843
2843
2844 hg diff foo.c
2844 hg diff foo.c
2845
2845
2846 - compare two historical versions of a directory, with rename info::
2846 - compare two historical versions of a directory, with rename info::
2847
2847
2848 hg diff --git -r 1.0:1.2 lib/
2848 hg diff --git -r 1.0:1.2 lib/
2849
2849
2850 - get change stats relative to the last change on some date::
2850 - get change stats relative to the last change on some date::
2851
2851
2852 hg diff --stat -r "date('may 2')"
2852 hg diff --stat -r "date('may 2')"
2853
2853
2854 - diff all newly-added files that contain a keyword::
2854 - diff all newly-added files that contain a keyword::
2855
2855
2856 hg diff "set:added() and grep(GNU)"
2856 hg diff "set:added() and grep(GNU)"
2857
2857
2858 - compare a revision and its parents::
2858 - compare a revision and its parents::
2859
2859
2860 hg diff -c 9353 # compare against first parent
2860 hg diff -c 9353 # compare against first parent
2861 hg diff -r 9353^:9353 # same using revset syntax
2861 hg diff -r 9353^:9353 # same using revset syntax
2862 hg diff -r 9353^2:9353 # compare against the second parent
2862 hg diff -r 9353^2:9353 # compare against the second parent
2863
2863
2864 Returns 0 on success.
2864 Returns 0 on success.
2865 """
2865 """
2866
2866
2867 revs = opts.get('rev')
2867 revs = opts.get('rev')
2868 change = opts.get('change')
2868 change = opts.get('change')
2869 stat = opts.get('stat')
2869 stat = opts.get('stat')
2870 reverse = opts.get('reverse')
2870 reverse = opts.get('reverse')
2871
2871
2872 if revs and change:
2872 if revs and change:
2873 msg = _('cannot specify --rev and --change at the same time')
2873 msg = _('cannot specify --rev and --change at the same time')
2874 raise util.Abort(msg)
2874 raise util.Abort(msg)
2875 elif change:
2875 elif change:
2876 node2 = scmutil.revsingle(repo, change, None).node()
2876 node2 = scmutil.revsingle(repo, change, None).node()
2877 node1 = repo[node2].p1().node()
2877 node1 = repo[node2].p1().node()
2878 else:
2878 else:
2879 node1, node2 = scmutil.revpair(repo, revs)
2879 node1, node2 = scmutil.revpair(repo, revs)
2880
2880
2881 if reverse:
2881 if reverse:
2882 node1, node2 = node2, node1
2882 node1, node2 = node2, node1
2883
2883
2884 diffopts = patch.diffopts(ui, opts)
2884 diffopts = patch.diffopts(ui, opts)
2885 m = scmutil.match(repo[node2], pats, opts)
2885 m = scmutil.match(repo[node2], pats, opts)
2886 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
2886 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
2887 listsubrepos=opts.get('subrepos'))
2887 listsubrepos=opts.get('subrepos'))
2888
2888
2889 @command('^export',
2889 @command('^export',
2890 [('o', 'output', '',
2890 [('o', 'output', '',
2891 _('print output to file with formatted name'), _('FORMAT')),
2891 _('print output to file with formatted name'), _('FORMAT')),
2892 ('', 'switch-parent', None, _('diff against the second parent')),
2892 ('', 'switch-parent', None, _('diff against the second parent')),
2893 ('r', 'rev', [], _('revisions to export'), _('REV')),
2893 ('r', 'rev', [], _('revisions to export'), _('REV')),
2894 ] + diffopts,
2894 ] + diffopts,
2895 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
2895 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
2896 def export(ui, repo, *changesets, **opts):
2896 def export(ui, repo, *changesets, **opts):
2897 """dump the header and diffs for one or more changesets
2897 """dump the header and diffs for one or more changesets
2898
2898
2899 Print the changeset header and diffs for one or more revisions.
2899 Print the changeset header and diffs for one or more revisions.
2900 If no revision is given, the parent of the working directory is used.
2900 If no revision is given, the parent of the working directory is used.
2901
2901
2902 The information shown in the changeset header is: author, date,
2902 The information shown in the changeset header is: author, date,
2903 branch name (if non-default), changeset hash, parent(s) and commit
2903 branch name (if non-default), changeset hash, parent(s) and commit
2904 comment.
2904 comment.
2905
2905
2906 .. note::
2906 .. note::
2907
2907
2908 export may generate unexpected diff output for merge
2908 export may generate unexpected diff output for merge
2909 changesets, as it will compare the merge changeset against its
2909 changesets, as it will compare the merge changeset against its
2910 first parent only.
2910 first parent only.
2911
2911
2912 Output may be to a file, in which case the name of the file is
2912 Output may be to a file, in which case the name of the file is
2913 given using a format string. The formatting rules are as follows:
2913 given using a format string. The formatting rules are as follows:
2914
2914
2915 :``%%``: literal "%" character
2915 :``%%``: literal "%" character
2916 :``%H``: changeset hash (40 hexadecimal digits)
2916 :``%H``: changeset hash (40 hexadecimal digits)
2917 :``%N``: number of patches being generated
2917 :``%N``: number of patches being generated
2918 :``%R``: changeset revision number
2918 :``%R``: changeset revision number
2919 :``%b``: basename of the exporting repository
2919 :``%b``: basename of the exporting repository
2920 :``%h``: short-form changeset hash (12 hexadecimal digits)
2920 :``%h``: short-form changeset hash (12 hexadecimal digits)
2921 :``%m``: first line of the commit message (only alphanumeric characters)
2921 :``%m``: first line of the commit message (only alphanumeric characters)
2922 :``%n``: zero-padded sequence number, starting at 1
2922 :``%n``: zero-padded sequence number, starting at 1
2923 :``%r``: zero-padded changeset revision number
2923 :``%r``: zero-padded changeset revision number
2924
2924
2925 Without the -a/--text option, export will avoid generating diffs
2925 Without the -a/--text option, export will avoid generating diffs
2926 of files it detects as binary. With -a, export will generate a
2926 of files it detects as binary. With -a, export will generate a
2927 diff anyway, probably with undesirable results.
2927 diff anyway, probably with undesirable results.
2928
2928
2929 Use the -g/--git option to generate diffs in the git extended diff
2929 Use the -g/--git option to generate diffs in the git extended diff
2930 format. See :hg:`help diffs` for more information.
2930 format. See :hg:`help diffs` for more information.
2931
2931
2932 With the --switch-parent option, the diff will be against the
2932 With the --switch-parent option, the diff will be against the
2933 second parent. It can be useful to review a merge.
2933 second parent. It can be useful to review a merge.
2934
2934
2935 .. container:: verbose
2935 .. container:: verbose
2936
2936
2937 Examples:
2937 Examples:
2938
2938
2939 - use export and import to transplant a bugfix to the current
2939 - use export and import to transplant a bugfix to the current
2940 branch::
2940 branch::
2941
2941
2942 hg export -r 9353 | hg import -
2942 hg export -r 9353 | hg import -
2943
2943
2944 - export all the changesets between two revisions to a file with
2944 - export all the changesets between two revisions to a file with
2945 rename information::
2945 rename information::
2946
2946
2947 hg export --git -r 123:150 > changes.txt
2947 hg export --git -r 123:150 > changes.txt
2948
2948
2949 - split outgoing changes into a series of patches with
2949 - split outgoing changes into a series of patches with
2950 descriptive names::
2950 descriptive names::
2951
2951
2952 hg export -r "outgoing()" -o "%n-%m.patch"
2952 hg export -r "outgoing()" -o "%n-%m.patch"
2953
2953
2954 Returns 0 on success.
2954 Returns 0 on success.
2955 """
2955 """
2956 changesets += tuple(opts.get('rev', []))
2956 changesets += tuple(opts.get('rev', []))
2957 if not changesets:
2957 if not changesets:
2958 changesets = ['.']
2958 changesets = ['.']
2959 revs = scmutil.revrange(repo, changesets)
2959 revs = scmutil.revrange(repo, changesets)
2960 if not revs:
2960 if not revs:
2961 raise util.Abort(_("export requires at least one changeset"))
2961 raise util.Abort(_("export requires at least one changeset"))
2962 if len(revs) > 1:
2962 if len(revs) > 1:
2963 ui.note(_('exporting patches:\n'))
2963 ui.note(_('exporting patches:\n'))
2964 else:
2964 else:
2965 ui.note(_('exporting patch:\n'))
2965 ui.note(_('exporting patch:\n'))
2966 cmdutil.export(repo, revs, template=opts.get('output'),
2966 cmdutil.export(repo, revs, template=opts.get('output'),
2967 switch_parent=opts.get('switch_parent'),
2967 switch_parent=opts.get('switch_parent'),
2968 opts=patch.diffopts(ui, opts))
2968 opts=patch.diffopts(ui, opts))
2969
2969
2970 @command('^forget', walkopts, _('[OPTION]... FILE...'))
2970 @command('^forget', walkopts, _('[OPTION]... FILE...'))
2971 def forget(ui, repo, *pats, **opts):
2971 def forget(ui, repo, *pats, **opts):
2972 """forget the specified files on the next commit
2972 """forget the specified files on the next commit
2973
2973
2974 Mark the specified files so they will no longer be tracked
2974 Mark the specified files so they will no longer be tracked
2975 after the next commit.
2975 after the next commit.
2976
2976
2977 This only removes files from the current branch, not from the
2977 This only removes files from the current branch, not from the
2978 entire project history, and it does not delete them from the
2978 entire project history, and it does not delete them from the
2979 working directory.
2979 working directory.
2980
2980
2981 To undo a forget before the next commit, see :hg:`add`.
2981 To undo a forget before the next commit, see :hg:`add`.
2982
2982
2983 .. container:: verbose
2983 .. container:: verbose
2984
2984
2985 Examples:
2985 Examples:
2986
2986
2987 - forget newly-added binary files::
2987 - forget newly-added binary files::
2988
2988
2989 hg forget "set:added() and binary()"
2989 hg forget "set:added() and binary()"
2990
2990
2991 - forget files that would be excluded by .hgignore::
2991 - forget files that would be excluded by .hgignore::
2992
2992
2993 hg forget "set:hgignore()"
2993 hg forget "set:hgignore()"
2994
2994
2995 Returns 0 on success.
2995 Returns 0 on success.
2996 """
2996 """
2997
2997
2998 if not pats:
2998 if not pats:
2999 raise util.Abort(_('no files specified'))
2999 raise util.Abort(_('no files specified'))
3000
3000
3001 m = scmutil.match(repo[None], pats, opts)
3001 m = scmutil.match(repo[None], pats, opts)
3002 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
3002 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
3003 return rejected and 1 or 0
3003 return rejected and 1 or 0
3004
3004
3005 @command(
3005 @command(
3006 'graft',
3006 'graft',
3007 [('r', 'rev', [], _('revisions to graft'), _('REV')),
3007 [('r', 'rev', [], _('revisions to graft'), _('REV')),
3008 ('c', 'continue', False, _('resume interrupted graft')),
3008 ('c', 'continue', False, _('resume interrupted graft')),
3009 ('e', 'edit', False, _('invoke editor on commit messages')),
3009 ('e', 'edit', False, _('invoke editor on commit messages')),
3010 ('', 'log', None, _('append graft info to log message')),
3010 ('', 'log', None, _('append graft info to log message')),
3011 ('D', 'currentdate', False,
3011 ('D', 'currentdate', False,
3012 _('record the current date as commit date')),
3012 _('record the current date as commit date')),
3013 ('U', 'currentuser', False,
3013 ('U', 'currentuser', False,
3014 _('record the current user as committer'), _('DATE'))]
3014 _('record the current user as committer'), _('DATE'))]
3015 + commitopts2 + mergetoolopts + dryrunopts,
3015 + commitopts2 + mergetoolopts + dryrunopts,
3016 _('[OPTION]... [-r] REV...'))
3016 _('[OPTION]... [-r] REV...'))
3017 def graft(ui, repo, *revs, **opts):
3017 def graft(ui, repo, *revs, **opts):
3018 '''copy changes from other branches onto the current branch
3018 '''copy changes from other branches onto the current branch
3019
3019
3020 This command uses Mercurial's merge logic to copy individual
3020 This command uses Mercurial's merge logic to copy individual
3021 changes from other branches without merging branches in the
3021 changes from other branches without merging branches in the
3022 history graph. This is sometimes known as 'backporting' or
3022 history graph. This is sometimes known as 'backporting' or
3023 'cherry-picking'. By default, graft will copy user, date, and
3023 'cherry-picking'. By default, graft will copy user, date, and
3024 description from the source changesets.
3024 description from the source changesets.
3025
3025
3026 Changesets that are ancestors of the current revision, that have
3026 Changesets that are ancestors of the current revision, that have
3027 already been grafted, or that are merges will be skipped.
3027 already been grafted, or that are merges will be skipped.
3028
3028
3029 If --log is specified, log messages will have a comment appended
3029 If --log is specified, log messages will have a comment appended
3030 of the form::
3030 of the form::
3031
3031
3032 (grafted from CHANGESETHASH)
3032 (grafted from CHANGESETHASH)
3033
3033
3034 If a graft merge results in conflicts, the graft process is
3034 If a graft merge results in conflicts, the graft process is
3035 interrupted so that the current merge can be manually resolved.
3035 interrupted so that the current merge can be manually resolved.
3036 Once all conflicts are addressed, the graft process can be
3036 Once all conflicts are addressed, the graft process can be
3037 continued with the -c/--continue option.
3037 continued with the -c/--continue option.
3038
3038
3039 .. note::
3039 .. note::
3040
3040
3041 The -c/--continue option does not reapply earlier options.
3041 The -c/--continue option does not reapply earlier options.
3042
3042
3043 .. container:: verbose
3043 .. container:: verbose
3044
3044
3045 Examples:
3045 Examples:
3046
3046
3047 - copy a single change to the stable branch and edit its description::
3047 - copy a single change to the stable branch and edit its description::
3048
3048
3049 hg update stable
3049 hg update stable
3050 hg graft --edit 9393
3050 hg graft --edit 9393
3051
3051
3052 - graft a range of changesets with one exception, updating dates::
3052 - graft a range of changesets with one exception, updating dates::
3053
3053
3054 hg graft -D "2085::2093 and not 2091"
3054 hg graft -D "2085::2093 and not 2091"
3055
3055
3056 - continue a graft after resolving conflicts::
3056 - continue a graft after resolving conflicts::
3057
3057
3058 hg graft -c
3058 hg graft -c
3059
3059
3060 - show the source of a grafted changeset::
3060 - show the source of a grafted changeset::
3061
3061
3062 hg log --debug -r .
3062 hg log --debug -r .
3063
3063
3064 Returns 0 on successful completion.
3064 Returns 0 on successful completion.
3065 '''
3065 '''
3066
3066
3067 revs = list(revs)
3067 revs = list(revs)
3068 revs.extend(opts['rev'])
3068 revs.extend(opts['rev'])
3069
3069
3070 if not opts.get('user') and opts.get('currentuser'):
3070 if not opts.get('user') and opts.get('currentuser'):
3071 opts['user'] = ui.username()
3071 opts['user'] = ui.username()
3072 if not opts.get('date') and opts.get('currentdate'):
3072 if not opts.get('date') and opts.get('currentdate'):
3073 opts['date'] = "%d %d" % util.makedate()
3073 opts['date'] = "%d %d" % util.makedate()
3074
3074
3075 editor = cmdutil.getcommiteditor(**opts)
3075 editor = cmdutil.getcommiteditor(**opts)
3076
3076
3077 cont = False
3077 cont = False
3078 if opts['continue']:
3078 if opts['continue']:
3079 cont = True
3079 cont = True
3080 if revs:
3080 if revs:
3081 raise util.Abort(_("can't specify --continue and revisions"))
3081 raise util.Abort(_("can't specify --continue and revisions"))
3082 # read in unfinished revisions
3082 # read in unfinished revisions
3083 try:
3083 try:
3084 nodes = repo.opener.read('graftstate').splitlines()
3084 nodes = repo.opener.read('graftstate').splitlines()
3085 revs = [repo[node].rev() for node in nodes]
3085 revs = [repo[node].rev() for node in nodes]
3086 except IOError, inst:
3086 except IOError, inst:
3087 if inst.errno != errno.ENOENT:
3087 if inst.errno != errno.ENOENT:
3088 raise
3088 raise
3089 raise util.Abort(_("no graft state found, can't continue"))
3089 raise util.Abort(_("no graft state found, can't continue"))
3090 else:
3090 else:
3091 cmdutil.checkunfinished(repo)
3091 cmdutil.checkunfinished(repo)
3092 cmdutil.bailifchanged(repo)
3092 cmdutil.bailifchanged(repo)
3093 if not revs:
3093 if not revs:
3094 raise util.Abort(_('no revisions specified'))
3094 raise util.Abort(_('no revisions specified'))
3095 revs = scmutil.revrange(repo, revs)
3095 revs = scmutil.revrange(repo, revs)
3096
3096
3097 # check for merges
3097 # check for merges
3098 for rev in repo.revs('%ld and merge()', revs):
3098 for rev in repo.revs('%ld and merge()', revs):
3099 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
3099 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
3100 revs.remove(rev)
3100 revs.remove(rev)
3101 if not revs:
3101 if not revs:
3102 return -1
3102 return -1
3103
3103
3104 # check for ancestors of dest branch
3104 # check for ancestors of dest branch
3105 crev = repo['.'].rev()
3105 crev = repo['.'].rev()
3106 ancestors = repo.changelog.ancestors([crev], inclusive=True)
3106 ancestors = repo.changelog.ancestors([crev], inclusive=True)
3107 # Cannot use x.remove(y) on smart set, this has to be a list.
3107 # Cannot use x.remove(y) on smart set, this has to be a list.
3108 # XXX make this lazy in the future
3108 # XXX make this lazy in the future
3109 revs = list(revs)
3109 revs = list(revs)
3110 # don't mutate while iterating, create a copy
3110 # don't mutate while iterating, create a copy
3111 for rev in list(revs):
3111 for rev in list(revs):
3112 if rev in ancestors:
3112 if rev in ancestors:
3113 ui.warn(_('skipping ancestor revision %s\n') % rev)
3113 ui.warn(_('skipping ancestor revision %s\n') % rev)
3114 # XXX remove on list is slow
3114 # XXX remove on list is slow
3115 revs.remove(rev)
3115 revs.remove(rev)
3116 if not revs:
3116 if not revs:
3117 return -1
3117 return -1
3118
3118
3119 # analyze revs for earlier grafts
3119 # analyze revs for earlier grafts
3120 ids = {}
3120 ids = {}
3121 for ctx in repo.set("%ld", revs):
3121 for ctx in repo.set("%ld", revs):
3122 ids[ctx.hex()] = ctx.rev()
3122 ids[ctx.hex()] = ctx.rev()
3123 n = ctx.extra().get('source')
3123 n = ctx.extra().get('source')
3124 if n:
3124 if n:
3125 ids[n] = ctx.rev()
3125 ids[n] = ctx.rev()
3126
3126
3127 # check ancestors for earlier grafts
3127 # check ancestors for earlier grafts
3128 ui.debug('scanning for duplicate grafts\n')
3128 ui.debug('scanning for duplicate grafts\n')
3129
3129
3130 for rev in repo.changelog.findmissingrevs(revs, [crev]):
3130 for rev in repo.changelog.findmissingrevs(revs, [crev]):
3131 ctx = repo[rev]
3131 ctx = repo[rev]
3132 n = ctx.extra().get('source')
3132 n = ctx.extra().get('source')
3133 if n in ids:
3133 if n in ids:
3134 r = repo[n].rev()
3134 r = repo[n].rev()
3135 if r in revs:
3135 if r in revs:
3136 ui.warn(_('skipping revision %s (already grafted to %s)\n')
3136 ui.warn(_('skipping revision %s (already grafted to %s)\n')
3137 % (r, rev))
3137 % (r, rev))
3138 revs.remove(r)
3138 revs.remove(r)
3139 elif ids[n] in revs:
3139 elif ids[n] in revs:
3140 ui.warn(_('skipping already grafted revision %s '
3140 ui.warn(_('skipping already grafted revision %s '
3141 '(%s also has origin %d)\n') % (ids[n], rev, r))
3141 '(%s also has origin %d)\n') % (ids[n], rev, r))
3142 revs.remove(ids[n])
3142 revs.remove(ids[n])
3143 elif ctx.hex() in ids:
3143 elif ctx.hex() in ids:
3144 r = ids[ctx.hex()]
3144 r = ids[ctx.hex()]
3145 ui.warn(_('skipping already grafted revision %s '
3145 ui.warn(_('skipping already grafted revision %s '
3146 '(was grafted from %d)\n') % (r, rev))
3146 '(was grafted from %d)\n') % (r, rev))
3147 revs.remove(r)
3147 revs.remove(r)
3148 if not revs:
3148 if not revs:
3149 return -1
3149 return -1
3150
3150
3151 wlock = repo.wlock()
3151 wlock = repo.wlock()
3152 try:
3152 try:
3153 current = repo['.']
3153 current = repo['.']
3154 for pos, ctx in enumerate(repo.set("%ld", revs)):
3154 for pos, ctx in enumerate(repo.set("%ld", revs)):
3155
3155
3156 ui.status(_('grafting revision %s\n') % ctx.rev())
3156 ui.status(_('grafting revision %s\n') % ctx.rev())
3157 if opts.get('dry_run'):
3157 if opts.get('dry_run'):
3158 continue
3158 continue
3159
3159
3160 source = ctx.extra().get('source')
3160 source = ctx.extra().get('source')
3161 if not source:
3161 if not source:
3162 source = ctx.hex()
3162 source = ctx.hex()
3163 extra = {'source': source}
3163 extra = {'source': source}
3164 user = ctx.user()
3164 user = ctx.user()
3165 if opts.get('user'):
3165 if opts.get('user'):
3166 user = opts['user']
3166 user = opts['user']
3167 date = ctx.date()
3167 date = ctx.date()
3168 if opts.get('date'):
3168 if opts.get('date'):
3169 date = opts['date']
3169 date = opts['date']
3170 message = ctx.description()
3170 message = ctx.description()
3171 if opts.get('log'):
3171 if opts.get('log'):
3172 message += '\n(grafted from %s)' % ctx.hex()
3172 message += '\n(grafted from %s)' % ctx.hex()
3173
3173
3174 # we don't merge the first commit when continuing
3174 # we don't merge the first commit when continuing
3175 if not cont:
3175 if not cont:
3176 # perform the graft merge with p1(rev) as 'ancestor'
3176 # perform the graft merge with p1(rev) as 'ancestor'
3177 try:
3177 try:
3178 # ui.forcemerge is an internal variable, do not document
3178 # ui.forcemerge is an internal variable, do not document
3179 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
3179 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
3180 'graft')
3180 'graft')
3181 stats = mergemod.update(repo, ctx.node(), True, True, False,
3181 stats = mergemod.update(repo, ctx.node(), True, True, False,
3182 ctx.p1().node(),
3182 ctx.p1().node(),
3183 labels=['local', 'graft'])
3183 labels=['local', 'graft'])
3184 finally:
3184 finally:
3185 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
3185 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
3186 # report any conflicts
3186 # report any conflicts
3187 if stats and stats[3] > 0:
3187 if stats and stats[3] > 0:
3188 # write out state for --continue
3188 # write out state for --continue
3189 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
3189 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
3190 repo.opener.write('graftstate', ''.join(nodelines))
3190 repo.opener.write('graftstate', ''.join(nodelines))
3191 raise util.Abort(
3191 raise util.Abort(
3192 _("unresolved conflicts, can't continue"),
3192 _("unresolved conflicts, can't continue"),
3193 hint=_('use hg resolve and hg graft --continue'))
3193 hint=_('use hg resolve and hg graft --continue'))
3194 else:
3194 else:
3195 cont = False
3195 cont = False
3196
3196
3197 # drop the second merge parent
3197 # drop the second merge parent
3198 repo.setparents(current.node(), nullid)
3198 repo.setparents(current.node(), nullid)
3199 repo.dirstate.write()
3199 repo.dirstate.write()
3200 # fix up dirstate for copies and renames
3200 # fix up dirstate for copies and renames
3201 cmdutil.duplicatecopies(repo, ctx.rev(), ctx.p1().rev())
3201 cmdutil.duplicatecopies(repo, ctx.rev(), ctx.p1().rev())
3202
3202
3203 # commit
3203 # commit
3204 node = repo.commit(text=message, user=user,
3204 node = repo.commit(text=message, user=user,
3205 date=date, extra=extra, editor=editor)
3205 date=date, extra=extra, editor=editor)
3206 if node is None:
3206 if node is None:
3207 ui.status(_('graft for revision %s is empty\n') % ctx.rev())
3207 ui.status(_('graft for revision %s is empty\n') % ctx.rev())
3208 else:
3208 else:
3209 current = repo[node]
3209 current = repo[node]
3210 finally:
3210 finally:
3211 wlock.release()
3211 wlock.release()
3212
3212
3213 # remove state when we complete successfully
3213 # remove state when we complete successfully
3214 if not opts.get('dry_run'):
3214 if not opts.get('dry_run'):
3215 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
3215 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
3216
3216
3217 return 0
3217 return 0
3218
3218
3219 @command('grep',
3219 @command('grep',
3220 [('0', 'print0', None, _('end fields with NUL')),
3220 [('0', 'print0', None, _('end fields with NUL')),
3221 ('', 'all', None, _('print all revisions that match')),
3221 ('', 'all', None, _('print all revisions that match')),
3222 ('a', 'text', None, _('treat all files as text')),
3222 ('a', 'text', None, _('treat all files as text')),
3223 ('f', 'follow', None,
3223 ('f', 'follow', None,
3224 _('follow changeset history,'
3224 _('follow changeset history,'
3225 ' or file history across copies and renames')),
3225 ' or file history across copies and renames')),
3226 ('i', 'ignore-case', None, _('ignore case when matching')),
3226 ('i', 'ignore-case', None, _('ignore case when matching')),
3227 ('l', 'files-with-matches', None,
3227 ('l', 'files-with-matches', None,
3228 _('print only filenames and revisions that match')),
3228 _('print only filenames and revisions that match')),
3229 ('n', 'line-number', None, _('print matching line numbers')),
3229 ('n', 'line-number', None, _('print matching line numbers')),
3230 ('r', 'rev', [],
3230 ('r', 'rev', [],
3231 _('only search files changed within revision range'), _('REV')),
3231 _('only search files changed within revision range'), _('REV')),
3232 ('u', 'user', None, _('list the author (long with -v)')),
3232 ('u', 'user', None, _('list the author (long with -v)')),
3233 ('d', 'date', None, _('list the date (short with -q)')),
3233 ('d', 'date', None, _('list the date (short with -q)')),
3234 ] + walkopts,
3234 ] + walkopts,
3235 _('[OPTION]... PATTERN [FILE]...'))
3235 _('[OPTION]... PATTERN [FILE]...'))
3236 def grep(ui, repo, pattern, *pats, **opts):
3236 def grep(ui, repo, pattern, *pats, **opts):
3237 """search for a pattern in specified files and revisions
3237 """search for a pattern in specified files and revisions
3238
3238
3239 Search revisions of files for a regular expression.
3239 Search revisions of files for a regular expression.
3240
3240
3241 This command behaves differently than Unix grep. It only accepts
3241 This command behaves differently than Unix grep. It only accepts
3242 Python/Perl regexps. It searches repository history, not the
3242 Python/Perl regexps. It searches repository history, not the
3243 working directory. It always prints the revision number in which a
3243 working directory. It always prints the revision number in which a
3244 match appears.
3244 match appears.
3245
3245
3246 By default, grep only prints output for the first revision of a
3246 By default, grep only prints output for the first revision of a
3247 file in which it finds a match. To get it to print every revision
3247 file in which it finds a match. To get it to print every revision
3248 that contains a change in match status ("-" for a match that
3248 that contains a change in match status ("-" for a match that
3249 becomes a non-match, or "+" for a non-match that becomes a match),
3249 becomes a non-match, or "+" for a non-match that becomes a match),
3250 use the --all flag.
3250 use the --all flag.
3251
3251
3252 Returns 0 if a match is found, 1 otherwise.
3252 Returns 0 if a match is found, 1 otherwise.
3253 """
3253 """
3254 reflags = re.M
3254 reflags = re.M
3255 if opts.get('ignore_case'):
3255 if opts.get('ignore_case'):
3256 reflags |= re.I
3256 reflags |= re.I
3257 try:
3257 try:
3258 regexp = util.compilere(pattern, reflags)
3258 regexp = util.compilere(pattern, reflags)
3259 except re.error, inst:
3259 except re.error, inst:
3260 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
3260 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
3261 return 1
3261 return 1
3262 sep, eol = ':', '\n'
3262 sep, eol = ':', '\n'
3263 if opts.get('print0'):
3263 if opts.get('print0'):
3264 sep = eol = '\0'
3264 sep = eol = '\0'
3265
3265
3266 getfile = util.lrucachefunc(repo.file)
3266 getfile = util.lrucachefunc(repo.file)
3267
3267
3268 def matchlines(body):
3268 def matchlines(body):
3269 begin = 0
3269 begin = 0
3270 linenum = 0
3270 linenum = 0
3271 while begin < len(body):
3271 while begin < len(body):
3272 match = regexp.search(body, begin)
3272 match = regexp.search(body, begin)
3273 if not match:
3273 if not match:
3274 break
3274 break
3275 mstart, mend = match.span()
3275 mstart, mend = match.span()
3276 linenum += body.count('\n', begin, mstart) + 1
3276 linenum += body.count('\n', begin, mstart) + 1
3277 lstart = body.rfind('\n', begin, mstart) + 1 or begin
3277 lstart = body.rfind('\n', begin, mstart) + 1 or begin
3278 begin = body.find('\n', mend) + 1 or len(body) + 1
3278 begin = body.find('\n', mend) + 1 or len(body) + 1
3279 lend = begin - 1
3279 lend = begin - 1
3280 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
3280 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
3281
3281
3282 class linestate(object):
3282 class linestate(object):
3283 def __init__(self, line, linenum, colstart, colend):
3283 def __init__(self, line, linenum, colstart, colend):
3284 self.line = line
3284 self.line = line
3285 self.linenum = linenum
3285 self.linenum = linenum
3286 self.colstart = colstart
3286 self.colstart = colstart
3287 self.colend = colend
3287 self.colend = colend
3288
3288
3289 def __hash__(self):
3289 def __hash__(self):
3290 return hash((self.linenum, self.line))
3290 return hash((self.linenum, self.line))
3291
3291
3292 def __eq__(self, other):
3292 def __eq__(self, other):
3293 return self.line == other.line
3293 return self.line == other.line
3294
3294
3295 def __iter__(self):
3295 def __iter__(self):
3296 yield (self.line[:self.colstart], '')
3296 yield (self.line[:self.colstart], '')
3297 yield (self.line[self.colstart:self.colend], 'grep.match')
3297 yield (self.line[self.colstart:self.colend], 'grep.match')
3298 rest = self.line[self.colend:]
3298 rest = self.line[self.colend:]
3299 while rest != '':
3299 while rest != '':
3300 match = regexp.search(rest)
3300 match = regexp.search(rest)
3301 if not match:
3301 if not match:
3302 yield (rest, '')
3302 yield (rest, '')
3303 break
3303 break
3304 mstart, mend = match.span()
3304 mstart, mend = match.span()
3305 yield (rest[:mstart], '')
3305 yield (rest[:mstart], '')
3306 yield (rest[mstart:mend], 'grep.match')
3306 yield (rest[mstart:mend], 'grep.match')
3307 rest = rest[mend:]
3307 rest = rest[mend:]
3308
3308
3309 matches = {}
3309 matches = {}
3310 copies = {}
3310 copies = {}
3311 def grepbody(fn, rev, body):
3311 def grepbody(fn, rev, body):
3312 matches[rev].setdefault(fn, [])
3312 matches[rev].setdefault(fn, [])
3313 m = matches[rev][fn]
3313 m = matches[rev][fn]
3314 for lnum, cstart, cend, line in matchlines(body):
3314 for lnum, cstart, cend, line in matchlines(body):
3315 s = linestate(line, lnum, cstart, cend)
3315 s = linestate(line, lnum, cstart, cend)
3316 m.append(s)
3316 m.append(s)
3317
3317
3318 def difflinestates(a, b):
3318 def difflinestates(a, b):
3319 sm = difflib.SequenceMatcher(None, a, b)
3319 sm = difflib.SequenceMatcher(None, a, b)
3320 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3320 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3321 if tag == 'insert':
3321 if tag == 'insert':
3322 for i in xrange(blo, bhi):
3322 for i in xrange(blo, bhi):
3323 yield ('+', b[i])
3323 yield ('+', b[i])
3324 elif tag == 'delete':
3324 elif tag == 'delete':
3325 for i in xrange(alo, ahi):
3325 for i in xrange(alo, ahi):
3326 yield ('-', a[i])
3326 yield ('-', a[i])
3327 elif tag == 'replace':
3327 elif tag == 'replace':
3328 for i in xrange(alo, ahi):
3328 for i in xrange(alo, ahi):
3329 yield ('-', a[i])
3329 yield ('-', a[i])
3330 for i in xrange(blo, bhi):
3330 for i in xrange(blo, bhi):
3331 yield ('+', b[i])
3331 yield ('+', b[i])
3332
3332
3333 def display(fn, ctx, pstates, states):
3333 def display(fn, ctx, pstates, states):
3334 rev = ctx.rev()
3334 rev = ctx.rev()
3335 datefunc = ui.quiet and util.shortdate or util.datestr
3335 datefunc = ui.quiet and util.shortdate or util.datestr
3336 found = False
3336 found = False
3337 @util.cachefunc
3337 @util.cachefunc
3338 def binary():
3338 def binary():
3339 flog = getfile(fn)
3339 flog = getfile(fn)
3340 return util.binary(flog.read(ctx.filenode(fn)))
3340 return util.binary(flog.read(ctx.filenode(fn)))
3341
3341
3342 if opts.get('all'):
3342 if opts.get('all'):
3343 iter = difflinestates(pstates, states)
3343 iter = difflinestates(pstates, states)
3344 else:
3344 else:
3345 iter = [('', l) for l in states]
3345 iter = [('', l) for l in states]
3346 for change, l in iter:
3346 for change, l in iter:
3347 cols = [(fn, 'grep.filename'), (str(rev), 'grep.rev')]
3347 cols = [(fn, 'grep.filename'), (str(rev), 'grep.rev')]
3348
3348
3349 if opts.get('line_number'):
3349 if opts.get('line_number'):
3350 cols.append((str(l.linenum), 'grep.linenumber'))
3350 cols.append((str(l.linenum), 'grep.linenumber'))
3351 if opts.get('all'):
3351 if opts.get('all'):
3352 cols.append((change, 'grep.change'))
3352 cols.append((change, 'grep.change'))
3353 if opts.get('user'):
3353 if opts.get('user'):
3354 cols.append((ui.shortuser(ctx.user()), 'grep.user'))
3354 cols.append((ui.shortuser(ctx.user()), 'grep.user'))
3355 if opts.get('date'):
3355 if opts.get('date'):
3356 cols.append((datefunc(ctx.date()), 'grep.date'))
3356 cols.append((datefunc(ctx.date()), 'grep.date'))
3357 for col, label in cols[:-1]:
3357 for col, label in cols[:-1]:
3358 ui.write(col, label=label)
3358 ui.write(col, label=label)
3359 ui.write(sep, label='grep.sep')
3359 ui.write(sep, label='grep.sep')
3360 ui.write(cols[-1][0], label=cols[-1][1])
3360 ui.write(cols[-1][0], label=cols[-1][1])
3361 if not opts.get('files_with_matches'):
3361 if not opts.get('files_with_matches'):
3362 ui.write(sep, label='grep.sep')
3362 ui.write(sep, label='grep.sep')
3363 if not opts.get('text') and binary():
3363 if not opts.get('text') and binary():
3364 ui.write(" Binary file matches")
3364 ui.write(" Binary file matches")
3365 else:
3365 else:
3366 for s, label in l:
3366 for s, label in l:
3367 ui.write(s, label=label)
3367 ui.write(s, label=label)
3368 ui.write(eol)
3368 ui.write(eol)
3369 found = True
3369 found = True
3370 if opts.get('files_with_matches'):
3370 if opts.get('files_with_matches'):
3371 break
3371 break
3372 return found
3372 return found
3373
3373
3374 skip = {}
3374 skip = {}
3375 revfiles = {}
3375 revfiles = {}
3376 matchfn = scmutil.match(repo[None], pats, opts)
3376 matchfn = scmutil.match(repo[None], pats, opts)
3377 found = False
3377 found = False
3378 follow = opts.get('follow')
3378 follow = opts.get('follow')
3379
3379
3380 def prep(ctx, fns):
3380 def prep(ctx, fns):
3381 rev = ctx.rev()
3381 rev = ctx.rev()
3382 pctx = ctx.p1()
3382 pctx = ctx.p1()
3383 parent = pctx.rev()
3383 parent = pctx.rev()
3384 matches.setdefault(rev, {})
3384 matches.setdefault(rev, {})
3385 matches.setdefault(parent, {})
3385 matches.setdefault(parent, {})
3386 files = revfiles.setdefault(rev, [])
3386 files = revfiles.setdefault(rev, [])
3387 for fn in fns:
3387 for fn in fns:
3388 flog = getfile(fn)
3388 flog = getfile(fn)
3389 try:
3389 try:
3390 fnode = ctx.filenode(fn)
3390 fnode = ctx.filenode(fn)
3391 except error.LookupError:
3391 except error.LookupError:
3392 continue
3392 continue
3393
3393
3394 copied = flog.renamed(fnode)
3394 copied = flog.renamed(fnode)
3395 copy = follow and copied and copied[0]
3395 copy = follow and copied and copied[0]
3396 if copy:
3396 if copy:
3397 copies.setdefault(rev, {})[fn] = copy
3397 copies.setdefault(rev, {})[fn] = copy
3398 if fn in skip:
3398 if fn in skip:
3399 if copy:
3399 if copy:
3400 skip[copy] = True
3400 skip[copy] = True
3401 continue
3401 continue
3402 files.append(fn)
3402 files.append(fn)
3403
3403
3404 if fn not in matches[rev]:
3404 if fn not in matches[rev]:
3405 grepbody(fn, rev, flog.read(fnode))
3405 grepbody(fn, rev, flog.read(fnode))
3406
3406
3407 pfn = copy or fn
3407 pfn = copy or fn
3408 if pfn not in matches[parent]:
3408 if pfn not in matches[parent]:
3409 try:
3409 try:
3410 fnode = pctx.filenode(pfn)
3410 fnode = pctx.filenode(pfn)
3411 grepbody(pfn, parent, flog.read(fnode))
3411 grepbody(pfn, parent, flog.read(fnode))
3412 except error.LookupError:
3412 except error.LookupError:
3413 pass
3413 pass
3414
3414
3415 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
3415 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
3416 rev = ctx.rev()
3416 rev = ctx.rev()
3417 parent = ctx.p1().rev()
3417 parent = ctx.p1().rev()
3418 for fn in sorted(revfiles.get(rev, [])):
3418 for fn in sorted(revfiles.get(rev, [])):
3419 states = matches[rev][fn]
3419 states = matches[rev][fn]
3420 copy = copies.get(rev, {}).get(fn)
3420 copy = copies.get(rev, {}).get(fn)
3421 if fn in skip:
3421 if fn in skip:
3422 if copy:
3422 if copy:
3423 skip[copy] = True
3423 skip[copy] = True
3424 continue
3424 continue
3425 pstates = matches.get(parent, {}).get(copy or fn, [])
3425 pstates = matches.get(parent, {}).get(copy or fn, [])
3426 if pstates or states:
3426 if pstates or states:
3427 r = display(fn, ctx, pstates, states)
3427 r = display(fn, ctx, pstates, states)
3428 found = found or r
3428 found = found or r
3429 if r and not opts.get('all'):
3429 if r and not opts.get('all'):
3430 skip[fn] = True
3430 skip[fn] = True
3431 if copy:
3431 if copy:
3432 skip[copy] = True
3432 skip[copy] = True
3433 del matches[rev]
3433 del matches[rev]
3434 del revfiles[rev]
3434 del revfiles[rev]
3435
3435
3436 return not found
3436 return not found
3437
3437
3438 @command('heads',
3438 @command('heads',
3439 [('r', 'rev', '',
3439 [('r', 'rev', '',
3440 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
3440 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
3441 ('t', 'topo', False, _('show topological heads only')),
3441 ('t', 'topo', False, _('show topological heads only')),
3442 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
3442 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
3443 ('c', 'closed', False, _('show normal and closed branch heads')),
3443 ('c', 'closed', False, _('show normal and closed branch heads')),
3444 ] + templateopts,
3444 ] + templateopts,
3445 _('[-ct] [-r STARTREV] [REV]...'))
3445 _('[-ct] [-r STARTREV] [REV]...'))
3446 def heads(ui, repo, *branchrevs, **opts):
3446 def heads(ui, repo, *branchrevs, **opts):
3447 """show branch heads
3447 """show branch heads
3448
3448
3449 With no arguments, show all open branch heads in the repository.
3449 With no arguments, show all open branch heads in the repository.
3450 Branch heads are changesets that have no descendants on the
3450 Branch heads are changesets that have no descendants on the
3451 same branch. They are where development generally takes place and
3451 same branch. They are where development generally takes place and
3452 are the usual targets for update and merge operations.
3452 are the usual targets for update and merge operations.
3453
3453
3454 If one or more REVs are given, only open branch heads on the
3454 If one or more REVs are given, only open branch heads on the
3455 branches associated with the specified changesets are shown. This
3455 branches associated with the specified changesets are shown. This
3456 means that you can use :hg:`heads .` to see the heads on the
3456 means that you can use :hg:`heads .` to see the heads on the
3457 currently checked-out branch.
3457 currently checked-out branch.
3458
3458
3459 If -c/--closed is specified, also show branch heads marked closed
3459 If -c/--closed is specified, also show branch heads marked closed
3460 (see :hg:`commit --close-branch`).
3460 (see :hg:`commit --close-branch`).
3461
3461
3462 If STARTREV is specified, only those heads that are descendants of
3462 If STARTREV is specified, only those heads that are descendants of
3463 STARTREV will be displayed.
3463 STARTREV will be displayed.
3464
3464
3465 If -t/--topo is specified, named branch mechanics will be ignored and only
3465 If -t/--topo is specified, named branch mechanics will be ignored and only
3466 topological heads (changesets with no children) will be shown.
3466 topological heads (changesets with no children) will be shown.
3467
3467
3468 Returns 0 if matching heads are found, 1 if not.
3468 Returns 0 if matching heads are found, 1 if not.
3469 """
3469 """
3470
3470
3471 start = None
3471 start = None
3472 if 'rev' in opts:
3472 if 'rev' in opts:
3473 start = scmutil.revsingle(repo, opts['rev'], None).node()
3473 start = scmutil.revsingle(repo, opts['rev'], None).node()
3474
3474
3475 if opts.get('topo'):
3475 if opts.get('topo'):
3476 heads = [repo[h] for h in repo.heads(start)]
3476 heads = [repo[h] for h in repo.heads(start)]
3477 else:
3477 else:
3478 heads = []
3478 heads = []
3479 for branch in repo.branchmap():
3479 for branch in repo.branchmap():
3480 heads += repo.branchheads(branch, start, opts.get('closed'))
3480 heads += repo.branchheads(branch, start, opts.get('closed'))
3481 heads = [repo[h] for h in heads]
3481 heads = [repo[h] for h in heads]
3482
3482
3483 if branchrevs:
3483 if branchrevs:
3484 branches = set(repo[br].branch() for br in branchrevs)
3484 branches = set(repo[br].branch() for br in branchrevs)
3485 heads = [h for h in heads if h.branch() in branches]
3485 heads = [h for h in heads if h.branch() in branches]
3486
3486
3487 if opts.get('active') and branchrevs:
3487 if opts.get('active') and branchrevs:
3488 dagheads = repo.heads(start)
3488 dagheads = repo.heads(start)
3489 heads = [h for h in heads if h.node() in dagheads]
3489 heads = [h for h in heads if h.node() in dagheads]
3490
3490
3491 if branchrevs:
3491 if branchrevs:
3492 haveheads = set(h.branch() for h in heads)
3492 haveheads = set(h.branch() for h in heads)
3493 if branches - haveheads:
3493 if branches - haveheads:
3494 headless = ', '.join(b for b in branches - haveheads)
3494 headless = ', '.join(b for b in branches - haveheads)
3495 msg = _('no open branch heads found on branches %s')
3495 msg = _('no open branch heads found on branches %s')
3496 if opts.get('rev'):
3496 if opts.get('rev'):
3497 msg += _(' (started at %s)') % opts['rev']
3497 msg += _(' (started at %s)') % opts['rev']
3498 ui.warn((msg + '\n') % headless)
3498 ui.warn((msg + '\n') % headless)
3499
3499
3500 if not heads:
3500 if not heads:
3501 return 1
3501 return 1
3502
3502
3503 heads = sorted(heads, key=lambda x: -x.rev())
3503 heads = sorted(heads, key=lambda x: -x.rev())
3504 displayer = cmdutil.show_changeset(ui, repo, opts)
3504 displayer = cmdutil.show_changeset(ui, repo, opts)
3505 for ctx in heads:
3505 for ctx in heads:
3506 displayer.show(ctx)
3506 displayer.show(ctx)
3507 displayer.close()
3507 displayer.close()
3508
3508
3509 @command('help',
3509 @command('help',
3510 [('e', 'extension', None, _('show only help for extensions')),
3510 [('e', 'extension', None, _('show only help for extensions')),
3511 ('c', 'command', None, _('show only help for commands')),
3511 ('c', 'command', None, _('show only help for commands')),
3512 ('k', 'keyword', '', _('show topics matching keyword')),
3512 ('k', 'keyword', '', _('show topics matching keyword')),
3513 ],
3513 ],
3514 _('[-ec] [TOPIC]'))
3514 _('[-ec] [TOPIC]'))
3515 def help_(ui, name=None, **opts):
3515 def help_(ui, name=None, **opts):
3516 """show help for a given topic or a help overview
3516 """show help for a given topic or a help overview
3517
3517
3518 With no arguments, print a list of commands with short help messages.
3518 With no arguments, print a list of commands with short help messages.
3519
3519
3520 Given a topic, extension, or command name, print help for that
3520 Given a topic, extension, or command name, print help for that
3521 topic.
3521 topic.
3522
3522
3523 Returns 0 if successful.
3523 Returns 0 if successful.
3524 """
3524 """
3525
3525
3526 textwidth = min(ui.termwidth(), 80) - 2
3526 textwidth = min(ui.termwidth(), 80) - 2
3527
3527
3528 keep = ui.verbose and ['verbose'] or []
3528 keep = ui.verbose and ['verbose'] or []
3529 text = help.help_(ui, name, **opts)
3529 text = help.help_(ui, name, **opts)
3530
3530
3531 formatted, pruned = minirst.format(text, textwidth, keep=keep)
3531 formatted, pruned = minirst.format(text, textwidth, keep=keep)
3532 if 'verbose' in pruned:
3532 if 'verbose' in pruned:
3533 keep.append('omitted')
3533 keep.append('omitted')
3534 else:
3534 else:
3535 keep.append('notomitted')
3535 keep.append('notomitted')
3536 formatted, pruned = minirst.format(text, textwidth, keep=keep)
3536 formatted, pruned = minirst.format(text, textwidth, keep=keep)
3537 ui.write(formatted)
3537 ui.write(formatted)
3538
3538
3539
3539
3540 @command('identify|id',
3540 @command('identify|id',
3541 [('r', 'rev', '',
3541 [('r', 'rev', '',
3542 _('identify the specified revision'), _('REV')),
3542 _('identify the specified revision'), _('REV')),
3543 ('n', 'num', None, _('show local revision number')),
3543 ('n', 'num', None, _('show local revision number')),
3544 ('i', 'id', None, _('show global revision id')),
3544 ('i', 'id', None, _('show global revision id')),
3545 ('b', 'branch', None, _('show branch')),
3545 ('b', 'branch', None, _('show branch')),
3546 ('t', 'tags', None, _('show tags')),
3546 ('t', 'tags', None, _('show tags')),
3547 ('B', 'bookmarks', None, _('show bookmarks')),
3547 ('B', 'bookmarks', None, _('show bookmarks')),
3548 ] + remoteopts,
3548 ] + remoteopts,
3549 _('[-nibtB] [-r REV] [SOURCE]'))
3549 _('[-nibtB] [-r REV] [SOURCE]'))
3550 def identify(ui, repo, source=None, rev=None,
3550 def identify(ui, repo, source=None, rev=None,
3551 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
3551 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
3552 """identify the working copy or specified revision
3552 """identify the working copy or specified revision
3553
3553
3554 Print a summary identifying the repository state at REV using one or
3554 Print a summary identifying the repository state at REV using one or
3555 two parent hash identifiers, followed by a "+" if the working
3555 two parent hash identifiers, followed by a "+" if the working
3556 directory has uncommitted changes, the branch name (if not default),
3556 directory has uncommitted changes, the branch name (if not default),
3557 a list of tags, and a list of bookmarks.
3557 a list of tags, and a list of bookmarks.
3558
3558
3559 When REV is not given, print a summary of the current state of the
3559 When REV is not given, print a summary of the current state of the
3560 repository.
3560 repository.
3561
3561
3562 Specifying a path to a repository root or Mercurial bundle will
3562 Specifying a path to a repository root or Mercurial bundle will
3563 cause lookup to operate on that repository/bundle.
3563 cause lookup to operate on that repository/bundle.
3564
3564
3565 .. container:: verbose
3565 .. container:: verbose
3566
3566
3567 Examples:
3567 Examples:
3568
3568
3569 - generate a build identifier for the working directory::
3569 - generate a build identifier for the working directory::
3570
3570
3571 hg id --id > build-id.dat
3571 hg id --id > build-id.dat
3572
3572
3573 - find the revision corresponding to a tag::
3573 - find the revision corresponding to a tag::
3574
3574
3575 hg id -n -r 1.3
3575 hg id -n -r 1.3
3576
3576
3577 - check the most recent revision of a remote repository::
3577 - check the most recent revision of a remote repository::
3578
3578
3579 hg id -r tip http://selenic.com/hg/
3579 hg id -r tip http://selenic.com/hg/
3580
3580
3581 Returns 0 if successful.
3581 Returns 0 if successful.
3582 """
3582 """
3583
3583
3584 if not repo and not source:
3584 if not repo and not source:
3585 raise util.Abort(_("there is no Mercurial repository here "
3585 raise util.Abort(_("there is no Mercurial repository here "
3586 "(.hg not found)"))
3586 "(.hg not found)"))
3587
3587
3588 hexfunc = ui.debugflag and hex or short
3588 hexfunc = ui.debugflag and hex or short
3589 default = not (num or id or branch or tags or bookmarks)
3589 default = not (num or id or branch or tags or bookmarks)
3590 output = []
3590 output = []
3591 revs = []
3591 revs = []
3592
3592
3593 if source:
3593 if source:
3594 source, branches = hg.parseurl(ui.expandpath(source))
3594 source, branches = hg.parseurl(ui.expandpath(source))
3595 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
3595 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
3596 repo = peer.local()
3596 repo = peer.local()
3597 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
3597 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
3598
3598
3599 if not repo:
3599 if not repo:
3600 if num or branch or tags:
3600 if num or branch or tags:
3601 raise util.Abort(
3601 raise util.Abort(
3602 _("can't query remote revision number, branch, or tags"))
3602 _("can't query remote revision number, branch, or tags"))
3603 if not rev and revs:
3603 if not rev and revs:
3604 rev = revs[0]
3604 rev = revs[0]
3605 if not rev:
3605 if not rev:
3606 rev = "tip"
3606 rev = "tip"
3607
3607
3608 remoterev = peer.lookup(rev)
3608 remoterev = peer.lookup(rev)
3609 if default or id:
3609 if default or id:
3610 output = [hexfunc(remoterev)]
3610 output = [hexfunc(remoterev)]
3611
3611
3612 def getbms():
3612 def getbms():
3613 bms = []
3613 bms = []
3614
3614
3615 if 'bookmarks' in peer.listkeys('namespaces'):
3615 if 'bookmarks' in peer.listkeys('namespaces'):
3616 hexremoterev = hex(remoterev)
3616 hexremoterev = hex(remoterev)
3617 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
3617 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
3618 if bmr == hexremoterev]
3618 if bmr == hexremoterev]
3619
3619
3620 return sorted(bms)
3620 return sorted(bms)
3621
3621
3622 if bookmarks:
3622 if bookmarks:
3623 output.extend(getbms())
3623 output.extend(getbms())
3624 elif default and not ui.quiet:
3624 elif default and not ui.quiet:
3625 # multiple bookmarks for a single parent separated by '/'
3625 # multiple bookmarks for a single parent separated by '/'
3626 bm = '/'.join(getbms())
3626 bm = '/'.join(getbms())
3627 if bm:
3627 if bm:
3628 output.append(bm)
3628 output.append(bm)
3629 else:
3629 else:
3630 if not rev:
3630 if not rev:
3631 ctx = repo[None]
3631 ctx = repo[None]
3632 parents = ctx.parents()
3632 parents = ctx.parents()
3633 changed = ""
3633 changed = ""
3634 if default or id or num:
3634 if default or id or num:
3635 if (util.any(repo.status())
3635 if (util.any(repo.status())
3636 or util.any(ctx.sub(s).dirty() for s in ctx.substate)):
3636 or util.any(ctx.sub(s).dirty() for s in ctx.substate)):
3637 changed = '+'
3637 changed = '+'
3638 if default or id:
3638 if default or id:
3639 output = ["%s%s" %
3639 output = ["%s%s" %
3640 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
3640 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
3641 if num:
3641 if num:
3642 output.append("%s%s" %
3642 output.append("%s%s" %
3643 ('+'.join([str(p.rev()) for p in parents]), changed))
3643 ('+'.join([str(p.rev()) for p in parents]), changed))
3644 else:
3644 else:
3645 ctx = scmutil.revsingle(repo, rev)
3645 ctx = scmutil.revsingle(repo, rev)
3646 if default or id:
3646 if default or id:
3647 output = [hexfunc(ctx.node())]
3647 output = [hexfunc(ctx.node())]
3648 if num:
3648 if num:
3649 output.append(str(ctx.rev()))
3649 output.append(str(ctx.rev()))
3650
3650
3651 if default and not ui.quiet:
3651 if default and not ui.quiet:
3652 b = ctx.branch()
3652 b = ctx.branch()
3653 if b != 'default':
3653 if b != 'default':
3654 output.append("(%s)" % b)
3654 output.append("(%s)" % b)
3655
3655
3656 # multiple tags for a single parent separated by '/'
3656 # multiple tags for a single parent separated by '/'
3657 t = '/'.join(ctx.tags())
3657 t = '/'.join(ctx.tags())
3658 if t:
3658 if t:
3659 output.append(t)
3659 output.append(t)
3660
3660
3661 # multiple bookmarks for a single parent separated by '/'
3661 # multiple bookmarks for a single parent separated by '/'
3662 bm = '/'.join(ctx.bookmarks())
3662 bm = '/'.join(ctx.bookmarks())
3663 if bm:
3663 if bm:
3664 output.append(bm)
3664 output.append(bm)
3665 else:
3665 else:
3666 if branch:
3666 if branch:
3667 output.append(ctx.branch())
3667 output.append(ctx.branch())
3668
3668
3669 if tags:
3669 if tags:
3670 output.extend(ctx.tags())
3670 output.extend(ctx.tags())
3671
3671
3672 if bookmarks:
3672 if bookmarks:
3673 output.extend(ctx.bookmarks())
3673 output.extend(ctx.bookmarks())
3674
3674
3675 ui.write("%s\n" % ' '.join(output))
3675 ui.write("%s\n" % ' '.join(output))
3676
3676
3677 @command('import|patch',
3677 @command('import|patch',
3678 [('p', 'strip', 1,
3678 [('p', 'strip', 1,
3679 _('directory strip option for patch. This has the same '
3679 _('directory strip option for patch. This has the same '
3680 'meaning as the corresponding patch option'), _('NUM')),
3680 'meaning as the corresponding patch option'), _('NUM')),
3681 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
3681 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
3682 ('e', 'edit', False, _('invoke editor on commit messages')),
3682 ('e', 'edit', False, _('invoke editor on commit messages')),
3683 ('f', 'force', None,
3683 ('f', 'force', None,
3684 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
3684 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
3685 ('', 'no-commit', None,
3685 ('', 'no-commit', None,
3686 _("don't commit, just update the working directory")),
3686 _("don't commit, just update the working directory")),
3687 ('', 'bypass', None,
3687 ('', 'bypass', None,
3688 _("apply patch without touching the working directory")),
3688 _("apply patch without touching the working directory")),
3689 ('', 'partial', None,
3689 ('', 'partial', None,
3690 _('commit even if some hunks fail')),
3690 _('commit even if some hunks fail')),
3691 ('', 'exact', None,
3691 ('', 'exact', None,
3692 _('apply patch to the nodes from which it was generated')),
3692 _('apply patch to the nodes from which it was generated')),
3693 ('', 'import-branch', None,
3693 ('', 'import-branch', None,
3694 _('use any branch information in patch (implied by --exact)'))] +
3694 _('use any branch information in patch (implied by --exact)'))] +
3695 commitopts + commitopts2 + similarityopts,
3695 commitopts + commitopts2 + similarityopts,
3696 _('[OPTION]... PATCH...'))
3696 _('[OPTION]... PATCH...'))
3697 def import_(ui, repo, patch1=None, *patches, **opts):
3697 def import_(ui, repo, patch1=None, *patches, **opts):
3698 """import an ordered set of patches
3698 """import an ordered set of patches
3699
3699
3700 Import a list of patches and commit them individually (unless
3700 Import a list of patches and commit them individually (unless
3701 --no-commit is specified).
3701 --no-commit is specified).
3702
3702
3703 Because import first applies changes to the working directory,
3703 Because import first applies changes to the working directory,
3704 import will abort if there are outstanding changes.
3704 import will abort if there are outstanding changes.
3705
3705
3706 You can import a patch straight from a mail message. Even patches
3706 You can import a patch straight from a mail message. Even patches
3707 as attachments work (to use the body part, it must have type
3707 as attachments work (to use the body part, it must have type
3708 text/plain or text/x-patch). From and Subject headers of email
3708 text/plain or text/x-patch). From and Subject headers of email
3709 message are used as default committer and commit message. All
3709 message are used as default committer and commit message. All
3710 text/plain body parts before first diff are added to commit
3710 text/plain body parts before first diff are added to commit
3711 message.
3711 message.
3712
3712
3713 If the imported patch was generated by :hg:`export`, user and
3713 If the imported patch was generated by :hg:`export`, user and
3714 description from patch override values from message headers and
3714 description from patch override values from message headers and
3715 body. Values given on command line with -m/--message and -u/--user
3715 body. Values given on command line with -m/--message and -u/--user
3716 override these.
3716 override these.
3717
3717
3718 If --exact is specified, import will set the working directory to
3718 If --exact is specified, import will set the working directory to
3719 the parent of each patch before applying it, and will abort if the
3719 the parent of each patch before applying it, and will abort if the
3720 resulting changeset has a different ID than the one recorded in
3720 resulting changeset has a different ID than the one recorded in
3721 the patch. This may happen due to character set problems or other
3721 the patch. This may happen due to character set problems or other
3722 deficiencies in the text patch format.
3722 deficiencies in the text patch format.
3723
3723
3724 Use --bypass to apply and commit patches directly to the
3724 Use --bypass to apply and commit patches directly to the
3725 repository, not touching the working directory. Without --exact,
3725 repository, not touching the working directory. Without --exact,
3726 patches will be applied on top of the working directory parent
3726 patches will be applied on top of the working directory parent
3727 revision.
3727 revision.
3728
3728
3729 With -s/--similarity, hg will attempt to discover renames and
3729 With -s/--similarity, hg will attempt to discover renames and
3730 copies in the patch in the same way as :hg:`addremove`.
3730 copies in the patch in the same way as :hg:`addremove`.
3731
3731
3732 Use --partial to ensure a changeset will be created from the patch
3732 Use --partial to ensure a changeset will be created from the patch
3733 even if some hunks fail to apply. Hunks that fail to apply will be
3733 even if some hunks fail to apply. Hunks that fail to apply will be
3734 written to a <target-file>.rej file. Conflicts can then be resolved
3734 written to a <target-file>.rej file. Conflicts can then be resolved
3735 by hand before :hg:`commit --amend` is run to update the created
3735 by hand before :hg:`commit --amend` is run to update the created
3736 changeset. This flag exists to let people import patches that
3736 changeset. This flag exists to let people import patches that
3737 partially apply without losing the associated metadata (author,
3737 partially apply without losing the associated metadata (author,
3738 date, description, ...), Note that when none of the hunk applies
3738 date, description, ...), Note that when none of the hunk applies
3739 cleanly, :hg:`import --partial` will create an empty changeset,
3739 cleanly, :hg:`import --partial` will create an empty changeset,
3740 importing only the patch metadata.
3740 importing only the patch metadata.
3741
3741
3742 To read a patch from standard input, use "-" as the patch name. If
3742 To read a patch from standard input, use "-" as the patch name. If
3743 a URL is specified, the patch will be downloaded from it.
3743 a URL is specified, the patch will be downloaded from it.
3744 See :hg:`help dates` for a list of formats valid for -d/--date.
3744 See :hg:`help dates` for a list of formats valid for -d/--date.
3745
3745
3746 .. container:: verbose
3746 .. container:: verbose
3747
3747
3748 Examples:
3748 Examples:
3749
3749
3750 - import a traditional patch from a website and detect renames::
3750 - import a traditional patch from a website and detect renames::
3751
3751
3752 hg import -s 80 http://example.com/bugfix.patch
3752 hg import -s 80 http://example.com/bugfix.patch
3753
3753
3754 - import a changeset from an hgweb server::
3754 - import a changeset from an hgweb server::
3755
3755
3756 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
3756 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
3757
3757
3758 - import all the patches in an Unix-style mbox::
3758 - import all the patches in an Unix-style mbox::
3759
3759
3760 hg import incoming-patches.mbox
3760 hg import incoming-patches.mbox
3761
3761
3762 - attempt to exactly restore an exported changeset (not always
3762 - attempt to exactly restore an exported changeset (not always
3763 possible)::
3763 possible)::
3764
3764
3765 hg import --exact proposed-fix.patch
3765 hg import --exact proposed-fix.patch
3766
3766
3767 Returns 0 on success, 1 on partial success (see --partial).
3767 Returns 0 on success, 1 on partial success (see --partial).
3768 """
3768 """
3769
3769
3770 if not patch1:
3770 if not patch1:
3771 raise util.Abort(_('need at least one patch to import'))
3771 raise util.Abort(_('need at least one patch to import'))
3772
3772
3773 patches = (patch1,) + patches
3773 patches = (patch1,) + patches
3774
3774
3775 date = opts.get('date')
3775 date = opts.get('date')
3776 if date:
3776 if date:
3777 opts['date'] = util.parsedate(date)
3777 opts['date'] = util.parsedate(date)
3778
3778
3779 update = not opts.get('bypass')
3779 update = not opts.get('bypass')
3780 if not update and opts.get('no_commit'):
3780 if not update and opts.get('no_commit'):
3781 raise util.Abort(_('cannot use --no-commit with --bypass'))
3781 raise util.Abort(_('cannot use --no-commit with --bypass'))
3782 try:
3782 try:
3783 sim = float(opts.get('similarity') or 0)
3783 sim = float(opts.get('similarity') or 0)
3784 except ValueError:
3784 except ValueError:
3785 raise util.Abort(_('similarity must be a number'))
3785 raise util.Abort(_('similarity must be a number'))
3786 if sim < 0 or sim > 100:
3786 if sim < 0 or sim > 100:
3787 raise util.Abort(_('similarity must be between 0 and 100'))
3787 raise util.Abort(_('similarity must be between 0 and 100'))
3788 if sim and not update:
3788 if sim and not update:
3789 raise util.Abort(_('cannot use --similarity with --bypass'))
3789 raise util.Abort(_('cannot use --similarity with --bypass'))
3790
3790
3791 if update:
3791 if update:
3792 cmdutil.checkunfinished(repo)
3792 cmdutil.checkunfinished(repo)
3793 if (opts.get('exact') or not opts.get('force')) and update:
3793 if (opts.get('exact') or not opts.get('force')) and update:
3794 cmdutil.bailifchanged(repo)
3794 cmdutil.bailifchanged(repo)
3795
3795
3796 base = opts["base"]
3796 base = opts["base"]
3797 wlock = lock = tr = None
3797 wlock = lock = tr = None
3798 msgs = []
3798 msgs = []
3799 ret = 0
3799 ret = 0
3800
3800
3801
3801
3802 try:
3802 try:
3803 try:
3803 try:
3804 wlock = repo.wlock()
3804 wlock = repo.wlock()
3805 if not opts.get('no_commit'):
3805 if not opts.get('no_commit'):
3806 lock = repo.lock()
3806 lock = repo.lock()
3807 tr = repo.transaction('import')
3807 tr = repo.transaction('import')
3808 parents = repo.parents()
3808 parents = repo.parents()
3809 for patchurl in patches:
3809 for patchurl in patches:
3810 if patchurl == '-':
3810 if patchurl == '-':
3811 ui.status(_('applying patch from stdin\n'))
3811 ui.status(_('applying patch from stdin\n'))
3812 patchfile = ui.fin
3812 patchfile = ui.fin
3813 patchurl = 'stdin' # for error message
3813 patchurl = 'stdin' # for error message
3814 else:
3814 else:
3815 patchurl = os.path.join(base, patchurl)
3815 patchurl = os.path.join(base, patchurl)
3816 ui.status(_('applying %s\n') % patchurl)
3816 ui.status(_('applying %s\n') % patchurl)
3817 patchfile = hg.openpath(ui, patchurl)
3817 patchfile = hg.openpath(ui, patchurl)
3818
3818
3819 haspatch = False
3819 haspatch = False
3820 for hunk in patch.split(patchfile):
3820 for hunk in patch.split(patchfile):
3821 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
3821 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
3822 parents, opts,
3822 parents, opts,
3823 msgs, hg.clean)
3823 msgs, hg.clean)
3824 if msg:
3824 if msg:
3825 haspatch = True
3825 haspatch = True
3826 ui.note(msg + '\n')
3826 ui.note(msg + '\n')
3827 if update or opts.get('exact'):
3827 if update or opts.get('exact'):
3828 parents = repo.parents()
3828 parents = repo.parents()
3829 else:
3829 else:
3830 parents = [repo[node]]
3830 parents = [repo[node]]
3831 if rej:
3831 if rej:
3832 ui.write_err(_("patch applied partially\n"))
3832 ui.write_err(_("patch applied partially\n"))
3833 ui.write_err(("(fix the .rej files and run "
3833 ui.write_err(("(fix the .rej files and run "
3834 "`hg commit --amend`)\n"))
3834 "`hg commit --amend`)\n"))
3835 ret = 1
3835 ret = 1
3836 break
3836 break
3837
3837
3838 if not haspatch:
3838 if not haspatch:
3839 raise util.Abort(_('%s: no diffs found') % patchurl)
3839 raise util.Abort(_('%s: no diffs found') % patchurl)
3840
3840
3841 if tr:
3841 if tr:
3842 tr.close()
3842 tr.close()
3843 if msgs:
3843 if msgs:
3844 repo.savecommitmessage('\n* * *\n'.join(msgs))
3844 repo.savecommitmessage('\n* * *\n'.join(msgs))
3845 return ret
3845 return ret
3846 except: # re-raises
3846 except: # re-raises
3847 # wlock.release() indirectly calls dirstate.write(): since
3847 # wlock.release() indirectly calls dirstate.write(): since
3848 # we're crashing, we do not want to change the working dir
3848 # we're crashing, we do not want to change the working dir
3849 # parent after all, so make sure it writes nothing
3849 # parent after all, so make sure it writes nothing
3850 repo.dirstate.invalidate()
3850 repo.dirstate.invalidate()
3851 raise
3851 raise
3852 finally:
3852 finally:
3853 if tr:
3853 if tr:
3854 tr.release()
3854 tr.release()
3855 release(lock, wlock)
3855 release(lock, wlock)
3856
3856
3857 @command('incoming|in',
3857 @command('incoming|in',
3858 [('f', 'force', None,
3858 [('f', 'force', None,
3859 _('run even if remote repository is unrelated')),
3859 _('run even if remote repository is unrelated')),
3860 ('n', 'newest-first', None, _('show newest record first')),
3860 ('n', 'newest-first', None, _('show newest record first')),
3861 ('', 'bundle', '',
3861 ('', 'bundle', '',
3862 _('file to store the bundles into'), _('FILE')),
3862 _('file to store the bundles into'), _('FILE')),
3863 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3863 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3864 ('B', 'bookmarks', False, _("compare bookmarks")),
3864 ('B', 'bookmarks', False, _("compare bookmarks")),
3865 ('b', 'branch', [],
3865 ('b', 'branch', [],
3866 _('a specific branch you would like to pull'), _('BRANCH')),
3866 _('a specific branch you would like to pull'), _('BRANCH')),
3867 ] + logopts + remoteopts + subrepoopts,
3867 ] + logopts + remoteopts + subrepoopts,
3868 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3868 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3869 def incoming(ui, repo, source="default", **opts):
3869 def incoming(ui, repo, source="default", **opts):
3870 """show new changesets found in source
3870 """show new changesets found in source
3871
3871
3872 Show new changesets found in the specified path/URL or the default
3872 Show new changesets found in the specified path/URL or the default
3873 pull location. These are the changesets that would have been pulled
3873 pull location. These are the changesets that would have been pulled
3874 if a pull at the time you issued this command.
3874 if a pull at the time you issued this command.
3875
3875
3876 For remote repository, using --bundle avoids downloading the
3876 For remote repository, using --bundle avoids downloading the
3877 changesets twice if the incoming is followed by a pull.
3877 changesets twice if the incoming is followed by a pull.
3878
3878
3879 See pull for valid source format details.
3879 See pull for valid source format details.
3880
3880
3881 .. container:: verbose
3881 .. container:: verbose
3882
3882
3883 Examples:
3883 Examples:
3884
3884
3885 - show incoming changes with patches and full description::
3885 - show incoming changes with patches and full description::
3886
3886
3887 hg incoming -vp
3887 hg incoming -vp
3888
3888
3889 - show incoming changes excluding merges, store a bundle::
3889 - show incoming changes excluding merges, store a bundle::
3890
3890
3891 hg in -vpM --bundle incoming.hg
3891 hg in -vpM --bundle incoming.hg
3892 hg pull incoming.hg
3892 hg pull incoming.hg
3893
3893
3894 - briefly list changes inside a bundle::
3894 - briefly list changes inside a bundle::
3895
3895
3896 hg in changes.hg -T "{desc|firstline}\\n"
3896 hg in changes.hg -T "{desc|firstline}\\n"
3897
3897
3898 Returns 0 if there are incoming changes, 1 otherwise.
3898 Returns 0 if there are incoming changes, 1 otherwise.
3899 """
3899 """
3900 if opts.get('graph'):
3900 if opts.get('graph'):
3901 cmdutil.checkunsupportedgraphflags([], opts)
3901 cmdutil.checkunsupportedgraphflags([], opts)
3902 def display(other, chlist, displayer):
3902 def display(other, chlist, displayer):
3903 revdag = cmdutil.graphrevs(other, chlist, opts)
3903 revdag = cmdutil.graphrevs(other, chlist, opts)
3904 showparents = [ctx.node() for ctx in repo[None].parents()]
3904 showparents = [ctx.node() for ctx in repo[None].parents()]
3905 cmdutil.displaygraph(ui, revdag, displayer, showparents,
3905 cmdutil.displaygraph(ui, revdag, displayer, showparents,
3906 graphmod.asciiedges)
3906 graphmod.asciiedges)
3907
3907
3908 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
3908 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
3909 return 0
3909 return 0
3910
3910
3911 if opts.get('bundle') and opts.get('subrepos'):
3911 if opts.get('bundle') and opts.get('subrepos'):
3912 raise util.Abort(_('cannot combine --bundle and --subrepos'))
3912 raise util.Abort(_('cannot combine --bundle and --subrepos'))
3913
3913
3914 if opts.get('bookmarks'):
3914 if opts.get('bookmarks'):
3915 source, branches = hg.parseurl(ui.expandpath(source),
3915 source, branches = hg.parseurl(ui.expandpath(source),
3916 opts.get('branch'))
3916 opts.get('branch'))
3917 other = hg.peer(repo, opts, source)
3917 other = hg.peer(repo, opts, source)
3918 if 'bookmarks' not in other.listkeys('namespaces'):
3918 if 'bookmarks' not in other.listkeys('namespaces'):
3919 ui.warn(_("remote doesn't support bookmarks\n"))
3919 ui.warn(_("remote doesn't support bookmarks\n"))
3920 return 0
3920 return 0
3921 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3921 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3922 return bookmarks.diff(ui, repo, other)
3922 return bookmarks.diff(ui, repo, other)
3923
3923
3924 repo._subtoppath = ui.expandpath(source)
3924 repo._subtoppath = ui.expandpath(source)
3925 try:
3925 try:
3926 return hg.incoming(ui, repo, source, opts)
3926 return hg.incoming(ui, repo, source, opts)
3927 finally:
3927 finally:
3928 del repo._subtoppath
3928 del repo._subtoppath
3929
3929
3930
3930
3931 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'))
3931 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'))
3932 def init(ui, dest=".", **opts):
3932 def init(ui, dest=".", **opts):
3933 """create a new repository in the given directory
3933 """create a new repository in the given directory
3934
3934
3935 Initialize a new repository in the given directory. If the given
3935 Initialize a new repository in the given directory. If the given
3936 directory does not exist, it will be created.
3936 directory does not exist, it will be created.
3937
3937
3938 If no directory is given, the current directory is used.
3938 If no directory is given, the current directory is used.
3939
3939
3940 It is possible to specify an ``ssh://`` URL as the destination.
3940 It is possible to specify an ``ssh://`` URL as the destination.
3941 See :hg:`help urls` for more information.
3941 See :hg:`help urls` for more information.
3942
3942
3943 Returns 0 on success.
3943 Returns 0 on success.
3944 """
3944 """
3945 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3945 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3946
3946
3947 @command('locate',
3947 @command('locate',
3948 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3948 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3949 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3949 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3950 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3950 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3951 ] + walkopts,
3951 ] + walkopts,
3952 _('[OPTION]... [PATTERN]...'))
3952 _('[OPTION]... [PATTERN]...'))
3953 def locate(ui, repo, *pats, **opts):
3953 def locate(ui, repo, *pats, **opts):
3954 """locate files matching specific patterns
3954 """locate files matching specific patterns
3955
3955
3956 Print files under Mercurial control in the working directory whose
3956 Print files under Mercurial control in the working directory whose
3957 names match the given patterns.
3957 names match the given patterns.
3958
3958
3959 By default, this command searches all directories in the working
3959 By default, this command searches all directories in the working
3960 directory. To search just the current directory and its
3960 directory. To search just the current directory and its
3961 subdirectories, use "--include .".
3961 subdirectories, use "--include .".
3962
3962
3963 If no patterns are given to match, this command prints the names
3963 If no patterns are given to match, this command prints the names
3964 of all files under Mercurial control in the working directory.
3964 of all files under Mercurial control in the working directory.
3965
3965
3966 If you want to feed the output of this command into the "xargs"
3966 If you want to feed the output of this command into the "xargs"
3967 command, use the -0 option to both this command and "xargs". This
3967 command, use the -0 option to both this command and "xargs". This
3968 will avoid the problem of "xargs" treating single filenames that
3968 will avoid the problem of "xargs" treating single filenames that
3969 contain whitespace as multiple filenames.
3969 contain whitespace as multiple filenames.
3970
3970
3971 Returns 0 if a match is found, 1 otherwise.
3971 Returns 0 if a match is found, 1 otherwise.
3972 """
3972 """
3973 end = opts.get('print0') and '\0' or '\n'
3973 end = opts.get('print0') and '\0' or '\n'
3974 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
3974 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
3975
3975
3976 ret = 1
3976 ret = 1
3977 m = scmutil.match(repo[rev], pats, opts, default='relglob')
3977 m = scmutil.match(repo[rev], pats, opts, default='relglob')
3978 m.bad = lambda x, y: False
3978 m.bad = lambda x, y: False
3979 for abs in repo[rev].walk(m):
3979 for abs in repo[rev].walk(m):
3980 if not rev and abs not in repo.dirstate:
3980 if not rev and abs not in repo.dirstate:
3981 continue
3981 continue
3982 if opts.get('fullpath'):
3982 if opts.get('fullpath'):
3983 ui.write(repo.wjoin(abs), end)
3983 ui.write(repo.wjoin(abs), end)
3984 else:
3984 else:
3985 ui.write(((pats and m.rel(abs)) or abs), end)
3985 ui.write(((pats and m.rel(abs)) or abs), end)
3986 ret = 0
3986 ret = 0
3987
3987
3988 return ret
3988 return ret
3989
3989
3990 @command('^log|history',
3990 @command('^log|history',
3991 [('f', 'follow', None,
3991 [('f', 'follow', None,
3992 _('follow changeset history, or file history across copies and renames')),
3992 _('follow changeset history, or file history across copies and renames')),
3993 ('', 'follow-first', None,
3993 ('', 'follow-first', None,
3994 _('only follow the first parent of merge changesets (DEPRECATED)')),
3994 _('only follow the first parent of merge changesets (DEPRECATED)')),
3995 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3995 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3996 ('C', 'copies', None, _('show copied files')),
3996 ('C', 'copies', None, _('show copied files')),
3997 ('k', 'keyword', [],
3997 ('k', 'keyword', [],
3998 _('do case-insensitive search for a given text'), _('TEXT')),
3998 _('do case-insensitive search for a given text'), _('TEXT')),
3999 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
3999 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
4000 ('', 'removed', None, _('include revisions where files were removed')),
4000 ('', 'removed', None, _('include revisions where files were removed')),
4001 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
4001 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
4002 ('u', 'user', [], _('revisions committed by user'), _('USER')),
4002 ('u', 'user', [], _('revisions committed by user'), _('USER')),
4003 ('', 'only-branch', [],
4003 ('', 'only-branch', [],
4004 _('show only changesets within the given named branch (DEPRECATED)'),
4004 _('show only changesets within the given named branch (DEPRECATED)'),
4005 _('BRANCH')),
4005 _('BRANCH')),
4006 ('b', 'branch', [],
4006 ('b', 'branch', [],
4007 _('show changesets within the given named branch'), _('BRANCH')),
4007 _('show changesets within the given named branch'), _('BRANCH')),
4008 ('P', 'prune', [],
4008 ('P', 'prune', [],
4009 _('do not display revision or any of its ancestors'), _('REV')),
4009 _('do not display revision or any of its ancestors'), _('REV')),
4010 ] + logopts + walkopts,
4010 ] + logopts + walkopts,
4011 _('[OPTION]... [FILE]'))
4011 _('[OPTION]... [FILE]'))
4012 def log(ui, repo, *pats, **opts):
4012 def log(ui, repo, *pats, **opts):
4013 """show revision history of entire repository or files
4013 """show revision history of entire repository or files
4014
4014
4015 Print the revision history of the specified files or the entire
4015 Print the revision history of the specified files or the entire
4016 project.
4016 project.
4017
4017
4018 If no revision range is specified, the default is ``tip:0`` unless
4018 If no revision range is specified, the default is ``tip:0`` unless
4019 --follow is set, in which case the working directory parent is
4019 --follow is set, in which case the working directory parent is
4020 used as the starting revision.
4020 used as the starting revision.
4021
4021
4022 File history is shown without following rename or copy history of
4022 File history is shown without following rename or copy history of
4023 files. Use -f/--follow with a filename to follow history across
4023 files. Use -f/--follow with a filename to follow history across
4024 renames and copies. --follow without a filename will only show
4024 renames and copies. --follow without a filename will only show
4025 ancestors or descendants of the starting revision.
4025 ancestors or descendants of the starting revision.
4026
4026
4027 By default this command prints revision number and changeset id,
4027 By default this command prints revision number and changeset id,
4028 tags, non-trivial parents, user, date and time, and a summary for
4028 tags, non-trivial parents, user, date and time, and a summary for
4029 each commit. When the -v/--verbose switch is used, the list of
4029 each commit. When the -v/--verbose switch is used, the list of
4030 changed files and full commit message are shown.
4030 changed files and full commit message are shown.
4031
4031
4032 With --graph the revisions are shown as an ASCII art DAG with the most
4032 With --graph the revisions are shown as an ASCII art DAG with the most
4033 recent changeset at the top.
4033 recent changeset at the top.
4034 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
4034 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
4035 and '+' represents a fork where the changeset from the lines below is a
4035 and '+' represents a fork where the changeset from the lines below is a
4036 parent of the 'o' merge on the same line.
4036 parent of the 'o' merge on the same line.
4037
4037
4038 .. note::
4038 .. note::
4039
4039
4040 log -p/--patch may generate unexpected diff output for merge
4040 log -p/--patch may generate unexpected diff output for merge
4041 changesets, as it will only compare the merge changeset against
4041 changesets, as it will only compare the merge changeset against
4042 its first parent. Also, only files different from BOTH parents
4042 its first parent. Also, only files different from BOTH parents
4043 will appear in files:.
4043 will appear in files:.
4044
4044
4045 .. note::
4045 .. note::
4046
4046
4047 for performance reasons, log FILE may omit duplicate changes
4047 for performance reasons, log FILE may omit duplicate changes
4048 made on branches and will not show deletions. To see all
4048 made on branches and will not show deletions. To see all
4049 changes including duplicates and deletions, use the --removed
4049 changes including duplicates and deletions, use the --removed
4050 switch.
4050 switch.
4051
4051
4052 .. container:: verbose
4052 .. container:: verbose
4053
4053
4054 Some examples:
4054 Some examples:
4055
4055
4056 - changesets with full descriptions and file lists::
4056 - changesets with full descriptions and file lists::
4057
4057
4058 hg log -v
4058 hg log -v
4059
4059
4060 - changesets ancestral to the working directory::
4060 - changesets ancestral to the working directory::
4061
4061
4062 hg log -f
4062 hg log -f
4063
4063
4064 - last 10 commits on the current branch::
4064 - last 10 commits on the current branch::
4065
4065
4066 hg log -l 10 -b .
4066 hg log -l 10 -b .
4067
4067
4068 - changesets showing all modifications of a file, including removals::
4068 - changesets showing all modifications of a file, including removals::
4069
4069
4070 hg log --removed file.c
4070 hg log --removed file.c
4071
4071
4072 - all changesets that touch a directory, with diffs, excluding merges::
4072 - all changesets that touch a directory, with diffs, excluding merges::
4073
4073
4074 hg log -Mp lib/
4074 hg log -Mp lib/
4075
4075
4076 - all revision numbers that match a keyword::
4076 - all revision numbers that match a keyword::
4077
4077
4078 hg log -k bug --template "{rev}\\n"
4078 hg log -k bug --template "{rev}\\n"
4079
4079
4080 - check if a given changeset is included is a tagged release::
4080 - check if a given changeset is included is a tagged release::
4081
4081
4082 hg log -r "a21ccf and ancestor(1.9)"
4082 hg log -r "a21ccf and ancestor(1.9)"
4083
4083
4084 - find all changesets by some user in a date range::
4084 - find all changesets by some user in a date range::
4085
4085
4086 hg log -k alice -d "may 2008 to jul 2008"
4086 hg log -k alice -d "may 2008 to jul 2008"
4087
4087
4088 - summary of all changesets after the last tag::
4088 - summary of all changesets after the last tag::
4089
4089
4090 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
4090 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
4091
4091
4092 See :hg:`help dates` for a list of formats valid for -d/--date.
4092 See :hg:`help dates` for a list of formats valid for -d/--date.
4093
4093
4094 See :hg:`help revisions` and :hg:`help revsets` for more about
4094 See :hg:`help revisions` and :hg:`help revsets` for more about
4095 specifying revisions.
4095 specifying revisions.
4096
4096
4097 See :hg:`help templates` for more about pre-packaged styles and
4097 See :hg:`help templates` for more about pre-packaged styles and
4098 specifying custom templates.
4098 specifying custom templates.
4099
4099
4100 Returns 0 on success.
4100 Returns 0 on success.
4101 """
4101 """
4102 if opts.get('graph'):
4102 if opts.get('graph'):
4103 return cmdutil.graphlog(ui, repo, *pats, **opts)
4103 return cmdutil.graphlog(ui, repo, *pats, **opts)
4104
4104
4105 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
4105 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
4106 limit = cmdutil.loglimit(opts)
4106 limit = cmdutil.loglimit(opts)
4107 count = 0
4107 count = 0
4108
4108
4109 getrenamed = None
4109 getrenamed = None
4110 if opts.get('copies'):
4110 if opts.get('copies'):
4111 endrev = None
4111 endrev = None
4112 if opts.get('rev'):
4112 if opts.get('rev'):
4113 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
4113 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
4114 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
4114 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
4115
4115
4116 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4116 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4117 for rev in revs:
4117 for rev in revs:
4118 if count == limit:
4118 if count == limit:
4119 break
4119 break
4120 ctx = repo[rev]
4120 ctx = repo[rev]
4121 copies = None
4121 copies = None
4122 if getrenamed is not None and rev:
4122 if getrenamed is not None and rev:
4123 copies = []
4123 copies = []
4124 for fn in ctx.files():
4124 for fn in ctx.files():
4125 rename = getrenamed(fn, rev)
4125 rename = getrenamed(fn, rev)
4126 if rename:
4126 if rename:
4127 copies.append((fn, rename[0]))
4127 copies.append((fn, rename[0]))
4128 revmatchfn = filematcher and filematcher(ctx.rev()) or None
4128 revmatchfn = filematcher and filematcher(ctx.rev()) or None
4129 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
4129 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
4130 if displayer.flush(rev):
4130 if displayer.flush(rev):
4131 count += 1
4131 count += 1
4132
4132
4133 displayer.close()
4133 displayer.close()
4134
4134
4135 @command('manifest',
4135 @command('manifest',
4136 [('r', 'rev', '', _('revision to display'), _('REV')),
4136 [('r', 'rev', '', _('revision to display'), _('REV')),
4137 ('', 'all', False, _("list files from all revisions"))],
4137 ('', 'all', False, _("list files from all revisions"))],
4138 _('[-r REV]'))
4138 _('[-r REV]'))
4139 def manifest(ui, repo, node=None, rev=None, **opts):
4139 def manifest(ui, repo, node=None, rev=None, **opts):
4140 """output the current or given revision of the project manifest
4140 """output the current or given revision of the project manifest
4141
4141
4142 Print a list of version controlled files for the given revision.
4142 Print a list of version controlled files for the given revision.
4143 If no revision is given, the first parent of the working directory
4143 If no revision is given, the first parent of the working directory
4144 is used, or the null revision if no revision is checked out.
4144 is used, or the null revision if no revision is checked out.
4145
4145
4146 With -v, print file permissions, symlink and executable bits.
4146 With -v, print file permissions, symlink and executable bits.
4147 With --debug, print file revision hashes.
4147 With --debug, print file revision hashes.
4148
4148
4149 If option --all is specified, the list of all files from all revisions
4149 If option --all is specified, the list of all files from all revisions
4150 is printed. This includes deleted and renamed files.
4150 is printed. This includes deleted and renamed files.
4151
4151
4152 Returns 0 on success.
4152 Returns 0 on success.
4153 """
4153 """
4154
4154
4155 fm = ui.formatter('manifest', opts)
4155 fm = ui.formatter('manifest', opts)
4156
4156
4157 if opts.get('all'):
4157 if opts.get('all'):
4158 if rev or node:
4158 if rev or node:
4159 raise util.Abort(_("can't specify a revision with --all"))
4159 raise util.Abort(_("can't specify a revision with --all"))
4160
4160
4161 res = []
4161 res = []
4162 prefix = "data/"
4162 prefix = "data/"
4163 suffix = ".i"
4163 suffix = ".i"
4164 plen = len(prefix)
4164 plen = len(prefix)
4165 slen = len(suffix)
4165 slen = len(suffix)
4166 lock = repo.lock()
4166 lock = repo.lock()
4167 try:
4167 try:
4168 for fn, b, size in repo.store.datafiles():
4168 for fn, b, size in repo.store.datafiles():
4169 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
4169 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
4170 res.append(fn[plen:-slen])
4170 res.append(fn[plen:-slen])
4171 finally:
4171 finally:
4172 lock.release()
4172 lock.release()
4173 for f in res:
4173 for f in res:
4174 fm.startitem()
4174 fm.startitem()
4175 fm.write("path", '%s\n', f)
4175 fm.write("path", '%s\n', f)
4176 fm.end()
4176 fm.end()
4177 return
4177 return
4178
4178
4179 if rev and node:
4179 if rev and node:
4180 raise util.Abort(_("please specify just one revision"))
4180 raise util.Abort(_("please specify just one revision"))
4181
4181
4182 if not node:
4182 if not node:
4183 node = rev
4183 node = rev
4184
4184
4185 char = {'l': '@', 'x': '*', '': ''}
4185 char = {'l': '@', 'x': '*', '': ''}
4186 mode = {'l': '644', 'x': '755', '': '644'}
4186 mode = {'l': '644', 'x': '755', '': '644'}
4187 ctx = scmutil.revsingle(repo, node)
4187 ctx = scmutil.revsingle(repo, node)
4188 mf = ctx.manifest()
4188 mf = ctx.manifest()
4189 for f in ctx:
4189 for f in ctx:
4190 fm.startitem()
4190 fm.startitem()
4191 fl = ctx[f].flags()
4191 fl = ctx[f].flags()
4192 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
4192 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
4193 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
4193 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
4194 fm.write('path', '%s\n', f)
4194 fm.write('path', '%s\n', f)
4195 fm.end()
4195 fm.end()
4196
4196
4197 @command('^merge',
4197 @command('^merge',
4198 [('f', 'force', None,
4198 [('f', 'force', None,
4199 _('force a merge including outstanding changes (DEPRECATED)')),
4199 _('force a merge including outstanding changes (DEPRECATED)')),
4200 ('r', 'rev', '', _('revision to merge'), _('REV')),
4200 ('r', 'rev', '', _('revision to merge'), _('REV')),
4201 ('P', 'preview', None,
4201 ('P', 'preview', None,
4202 _('review revisions to merge (no merge is performed)'))
4202 _('review revisions to merge (no merge is performed)'))
4203 ] + mergetoolopts,
4203 ] + mergetoolopts,
4204 _('[-P] [-f] [[-r] REV]'))
4204 _('[-P] [-f] [[-r] REV]'))
4205 def merge(ui, repo, node=None, **opts):
4205 def merge(ui, repo, node=None, **opts):
4206 """merge working directory with another revision
4206 """merge working directory with another revision
4207
4207
4208 The current working directory is updated with all changes made in
4208 The current working directory is updated with all changes made in
4209 the requested revision since the last common predecessor revision.
4209 the requested revision since the last common predecessor revision.
4210
4210
4211 Files that changed between either parent are marked as changed for
4211 Files that changed between either parent are marked as changed for
4212 the next commit and a commit must be performed before any further
4212 the next commit and a commit must be performed before any further
4213 updates to the repository are allowed. The next commit will have
4213 updates to the repository are allowed. The next commit will have
4214 two parents.
4214 two parents.
4215
4215
4216 ``--tool`` can be used to specify the merge tool used for file
4216 ``--tool`` can be used to specify the merge tool used for file
4217 merges. It overrides the HGMERGE environment variable and your
4217 merges. It overrides the HGMERGE environment variable and your
4218 configuration files. See :hg:`help merge-tools` for options.
4218 configuration files. See :hg:`help merge-tools` for options.
4219
4219
4220 If no revision is specified, the working directory's parent is a
4220 If no revision is specified, the working directory's parent is a
4221 head revision, and the current branch contains exactly one other
4221 head revision, and the current branch contains exactly one other
4222 head, the other head is merged with by default. Otherwise, an
4222 head, the other head is merged with by default. Otherwise, an
4223 explicit revision with which to merge with must be provided.
4223 explicit revision with which to merge with must be provided.
4224
4224
4225 :hg:`resolve` must be used to resolve unresolved files.
4225 :hg:`resolve` must be used to resolve unresolved files.
4226
4226
4227 To undo an uncommitted merge, use :hg:`update --clean .` which
4227 To undo an uncommitted merge, use :hg:`update --clean .` which
4228 will check out a clean copy of the original merge parent, losing
4228 will check out a clean copy of the original merge parent, losing
4229 all changes.
4229 all changes.
4230
4230
4231 Returns 0 on success, 1 if there are unresolved files.
4231 Returns 0 on success, 1 if there are unresolved files.
4232 """
4232 """
4233
4233
4234 if opts.get('rev') and node:
4234 if opts.get('rev') and node:
4235 raise util.Abort(_("please specify just one revision"))
4235 raise util.Abort(_("please specify just one revision"))
4236 if not node:
4236 if not node:
4237 node = opts.get('rev')
4237 node = opts.get('rev')
4238
4238
4239 if node:
4239 if node:
4240 node = scmutil.revsingle(repo, node).node()
4240 node = scmutil.revsingle(repo, node).node()
4241
4241
4242 if not node and repo._bookmarkcurrent:
4242 if not node and repo._bookmarkcurrent:
4243 bmheads = repo.bookmarkheads(repo._bookmarkcurrent)
4243 bmheads = repo.bookmarkheads(repo._bookmarkcurrent)
4244 curhead = repo[repo._bookmarkcurrent].node()
4244 curhead = repo[repo._bookmarkcurrent].node()
4245 if len(bmheads) == 2:
4245 if len(bmheads) == 2:
4246 if curhead == bmheads[0]:
4246 if curhead == bmheads[0]:
4247 node = bmheads[1]
4247 node = bmheads[1]
4248 else:
4248 else:
4249 node = bmheads[0]
4249 node = bmheads[0]
4250 elif len(bmheads) > 2:
4250 elif len(bmheads) > 2:
4251 raise util.Abort(_("multiple matching bookmarks to merge - "
4251 raise util.Abort(_("multiple matching bookmarks to merge - "
4252 "please merge with an explicit rev or bookmark"),
4252 "please merge with an explicit rev or bookmark"),
4253 hint=_("run 'hg heads' to see all heads"))
4253 hint=_("run 'hg heads' to see all heads"))
4254 elif len(bmheads) <= 1:
4254 elif len(bmheads) <= 1:
4255 raise util.Abort(_("no matching bookmark to merge - "
4255 raise util.Abort(_("no matching bookmark to merge - "
4256 "please merge with an explicit rev or bookmark"),
4256 "please merge with an explicit rev or bookmark"),
4257 hint=_("run 'hg heads' to see all heads"))
4257 hint=_("run 'hg heads' to see all heads"))
4258
4258
4259 if not node and not repo._bookmarkcurrent:
4259 if not node and not repo._bookmarkcurrent:
4260 branch = repo[None].branch()
4260 branch = repo[None].branch()
4261 bheads = repo.branchheads(branch)
4261 bheads = repo.branchheads(branch)
4262 nbhs = [bh for bh in bheads if not repo[bh].bookmarks()]
4262 nbhs = [bh for bh in bheads if not repo[bh].bookmarks()]
4263
4263
4264 if len(nbhs) > 2:
4264 if len(nbhs) > 2:
4265 raise util.Abort(_("branch '%s' has %d heads - "
4265 raise util.Abort(_("branch '%s' has %d heads - "
4266 "please merge with an explicit rev")
4266 "please merge with an explicit rev")
4267 % (branch, len(bheads)),
4267 % (branch, len(bheads)),
4268 hint=_("run 'hg heads .' to see heads"))
4268 hint=_("run 'hg heads .' to see heads"))
4269
4269
4270 parent = repo.dirstate.p1()
4270 parent = repo.dirstate.p1()
4271 if len(nbhs) <= 1:
4271 if len(nbhs) <= 1:
4272 if len(bheads) > 1:
4272 if len(bheads) > 1:
4273 raise util.Abort(_("heads are bookmarked - "
4273 raise util.Abort(_("heads are bookmarked - "
4274 "please merge with an explicit rev"),
4274 "please merge with an explicit rev"),
4275 hint=_("run 'hg heads' to see all heads"))
4275 hint=_("run 'hg heads' to see all heads"))
4276 if len(repo.heads()) > 1:
4276 if len(repo.heads()) > 1:
4277 raise util.Abort(_("branch '%s' has one head - "
4277 raise util.Abort(_("branch '%s' has one head - "
4278 "please merge with an explicit rev")
4278 "please merge with an explicit rev")
4279 % branch,
4279 % branch,
4280 hint=_("run 'hg heads' to see all heads"))
4280 hint=_("run 'hg heads' to see all heads"))
4281 msg, hint = _('nothing to merge'), None
4281 msg, hint = _('nothing to merge'), None
4282 if parent != repo.lookup(branch):
4282 if parent != repo.lookup(branch):
4283 hint = _("use 'hg update' instead")
4283 hint = _("use 'hg update' instead")
4284 raise util.Abort(msg, hint=hint)
4284 raise util.Abort(msg, hint=hint)
4285
4285
4286 if parent not in bheads:
4286 if parent not in bheads:
4287 raise util.Abort(_('working directory not at a head revision'),
4287 raise util.Abort(_('working directory not at a head revision'),
4288 hint=_("use 'hg update' or merge with an "
4288 hint=_("use 'hg update' or merge with an "
4289 "explicit revision"))
4289 "explicit revision"))
4290 if parent == nbhs[0]:
4290 if parent == nbhs[0]:
4291 node = nbhs[-1]
4291 node = nbhs[-1]
4292 else:
4292 else:
4293 node = nbhs[0]
4293 node = nbhs[0]
4294
4294
4295 if opts.get('preview'):
4295 if opts.get('preview'):
4296 # find nodes that are ancestors of p2 but not of p1
4296 # find nodes that are ancestors of p2 but not of p1
4297 p1 = repo.lookup('.')
4297 p1 = repo.lookup('.')
4298 p2 = repo.lookup(node)
4298 p2 = repo.lookup(node)
4299 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4299 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4300
4300
4301 displayer = cmdutil.show_changeset(ui, repo, opts)
4301 displayer = cmdutil.show_changeset(ui, repo, opts)
4302 for node in nodes:
4302 for node in nodes:
4303 displayer.show(repo[node])
4303 displayer.show(repo[node])
4304 displayer.close()
4304 displayer.close()
4305 return 0
4305 return 0
4306
4306
4307 try:
4307 try:
4308 # ui.forcemerge is an internal variable, do not document
4308 # ui.forcemerge is an internal variable, do not document
4309 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
4309 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
4310 return hg.merge(repo, node, force=opts.get('force'))
4310 return hg.merge(repo, node, force=opts.get('force'))
4311 finally:
4311 finally:
4312 ui.setconfig('ui', 'forcemerge', '', 'merge')
4312 ui.setconfig('ui', 'forcemerge', '', 'merge')
4313
4313
4314 @command('outgoing|out',
4314 @command('outgoing|out',
4315 [('f', 'force', None, _('run even when the destination is unrelated')),
4315 [('f', 'force', None, _('run even when the destination is unrelated')),
4316 ('r', 'rev', [],
4316 ('r', 'rev', [],
4317 _('a changeset intended to be included in the destination'), _('REV')),
4317 _('a changeset intended to be included in the destination'), _('REV')),
4318 ('n', 'newest-first', None, _('show newest record first')),
4318 ('n', 'newest-first', None, _('show newest record first')),
4319 ('B', 'bookmarks', False, _('compare bookmarks')),
4319 ('B', 'bookmarks', False, _('compare bookmarks')),
4320 ('b', 'branch', [], _('a specific branch you would like to push'),
4320 ('b', 'branch', [], _('a specific branch you would like to push'),
4321 _('BRANCH')),
4321 _('BRANCH')),
4322 ] + logopts + remoteopts + subrepoopts,
4322 ] + logopts + remoteopts + subrepoopts,
4323 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
4323 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
4324 def outgoing(ui, repo, dest=None, **opts):
4324 def outgoing(ui, repo, dest=None, **opts):
4325 """show changesets not found in the destination
4325 """show changesets not found in the destination
4326
4326
4327 Show changesets not found in the specified destination repository
4327 Show changesets not found in the specified destination repository
4328 or the default push location. These are the changesets that would
4328 or the default push location. These are the changesets that would
4329 be pushed if a push was requested.
4329 be pushed if a push was requested.
4330
4330
4331 See pull for details of valid destination formats.
4331 See pull for details of valid destination formats.
4332
4332
4333 Returns 0 if there are outgoing changes, 1 otherwise.
4333 Returns 0 if there are outgoing changes, 1 otherwise.
4334 """
4334 """
4335 if opts.get('graph'):
4335 if opts.get('graph'):
4336 cmdutil.checkunsupportedgraphflags([], opts)
4336 cmdutil.checkunsupportedgraphflags([], opts)
4337 o, other = hg._outgoing(ui, repo, dest, opts)
4337 o, other = hg._outgoing(ui, repo, dest, opts)
4338 if not o:
4338 if not o:
4339 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4339 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4340 return
4340 return
4341
4341
4342 revdag = cmdutil.graphrevs(repo, o, opts)
4342 revdag = cmdutil.graphrevs(repo, o, opts)
4343 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4343 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4344 showparents = [ctx.node() for ctx in repo[None].parents()]
4344 showparents = [ctx.node() for ctx in repo[None].parents()]
4345 cmdutil.displaygraph(ui, revdag, displayer, showparents,
4345 cmdutil.displaygraph(ui, revdag, displayer, showparents,
4346 graphmod.asciiedges)
4346 graphmod.asciiedges)
4347 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4347 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4348 return 0
4348 return 0
4349
4349
4350 if opts.get('bookmarks'):
4350 if opts.get('bookmarks'):
4351 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4351 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4352 dest, branches = hg.parseurl(dest, opts.get('branch'))
4352 dest, branches = hg.parseurl(dest, opts.get('branch'))
4353 other = hg.peer(repo, opts, dest)
4353 other = hg.peer(repo, opts, dest)
4354 if 'bookmarks' not in other.listkeys('namespaces'):
4354 if 'bookmarks' not in other.listkeys('namespaces'):
4355 ui.warn(_("remote doesn't support bookmarks\n"))
4355 ui.warn(_("remote doesn't support bookmarks\n"))
4356 return 0
4356 return 0
4357 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
4357 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
4358 return bookmarks.diff(ui, other, repo)
4358 return bookmarks.diff(ui, other, repo)
4359
4359
4360 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
4360 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
4361 try:
4361 try:
4362 return hg.outgoing(ui, repo, dest, opts)
4362 return hg.outgoing(ui, repo, dest, opts)
4363 finally:
4363 finally:
4364 del repo._subtoppath
4364 del repo._subtoppath
4365
4365
4366 @command('parents',
4366 @command('parents',
4367 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
4367 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
4368 ] + templateopts,
4368 ] + templateopts,
4369 _('[-r REV] [FILE]'))
4369 _('[-r REV] [FILE]'))
4370 def parents(ui, repo, file_=None, **opts):
4370 def parents(ui, repo, file_=None, **opts):
4371 """show the parents of the working directory or revision
4371 """show the parents of the working directory or revision
4372
4372
4373 Print the working directory's parent revisions. If a revision is
4373 Print the working directory's parent revisions. If a revision is
4374 given via -r/--rev, the parent of that revision will be printed.
4374 given via -r/--rev, the parent of that revision will be printed.
4375 If a file argument is given, the revision in which the file was
4375 If a file argument is given, the revision in which the file was
4376 last changed (before the working directory revision or the
4376 last changed (before the working directory revision or the
4377 argument to --rev if given) is printed.
4377 argument to --rev if given) is printed.
4378
4378
4379 Returns 0 on success.
4379 Returns 0 on success.
4380 """
4380 """
4381
4381
4382 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
4382 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
4383
4383
4384 if file_:
4384 if file_:
4385 m = scmutil.match(ctx, (file_,), opts)
4385 m = scmutil.match(ctx, (file_,), opts)
4386 if m.anypats() or len(m.files()) != 1:
4386 if m.anypats() or len(m.files()) != 1:
4387 raise util.Abort(_('can only specify an explicit filename'))
4387 raise util.Abort(_('can only specify an explicit filename'))
4388 file_ = m.files()[0]
4388 file_ = m.files()[0]
4389 filenodes = []
4389 filenodes = []
4390 for cp in ctx.parents():
4390 for cp in ctx.parents():
4391 if not cp:
4391 if not cp:
4392 continue
4392 continue
4393 try:
4393 try:
4394 filenodes.append(cp.filenode(file_))
4394 filenodes.append(cp.filenode(file_))
4395 except error.LookupError:
4395 except error.LookupError:
4396 pass
4396 pass
4397 if not filenodes:
4397 if not filenodes:
4398 raise util.Abort(_("'%s' not found in manifest!") % file_)
4398 raise util.Abort(_("'%s' not found in manifest!") % file_)
4399 p = []
4399 p = []
4400 for fn in filenodes:
4400 for fn in filenodes:
4401 fctx = repo.filectx(file_, fileid=fn)
4401 fctx = repo.filectx(file_, fileid=fn)
4402 p.append(fctx.node())
4402 p.append(fctx.node())
4403 else:
4403 else:
4404 p = [cp.node() for cp in ctx.parents()]
4404 p = [cp.node() for cp in ctx.parents()]
4405
4405
4406 displayer = cmdutil.show_changeset(ui, repo, opts)
4406 displayer = cmdutil.show_changeset(ui, repo, opts)
4407 for n in p:
4407 for n in p:
4408 if n != nullid:
4408 if n != nullid:
4409 displayer.show(repo[n])
4409 displayer.show(repo[n])
4410 displayer.close()
4410 displayer.close()
4411
4411
4412 @command('paths', [], _('[NAME]'))
4412 @command('paths', [], _('[NAME]'))
4413 def paths(ui, repo, search=None):
4413 def paths(ui, repo, search=None):
4414 """show aliases for remote repositories
4414 """show aliases for remote repositories
4415
4415
4416 Show definition of symbolic path name NAME. If no name is given,
4416 Show definition of symbolic path name NAME. If no name is given,
4417 show definition of all available names.
4417 show definition of all available names.
4418
4418
4419 Option -q/--quiet suppresses all output when searching for NAME
4419 Option -q/--quiet suppresses all output when searching for NAME
4420 and shows only the path names when listing all definitions.
4420 and shows only the path names when listing all definitions.
4421
4421
4422 Path names are defined in the [paths] section of your
4422 Path names are defined in the [paths] section of your
4423 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4423 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4424 repository, ``.hg/hgrc`` is used, too.
4424 repository, ``.hg/hgrc`` is used, too.
4425
4425
4426 The path names ``default`` and ``default-push`` have a special
4426 The path names ``default`` and ``default-push`` have a special
4427 meaning. When performing a push or pull operation, they are used
4427 meaning. When performing a push or pull operation, they are used
4428 as fallbacks if no location is specified on the command-line.
4428 as fallbacks if no location is specified on the command-line.
4429 When ``default-push`` is set, it will be used for push and
4429 When ``default-push`` is set, it will be used for push and
4430 ``default`` will be used for pull; otherwise ``default`` is used
4430 ``default`` will be used for pull; otherwise ``default`` is used
4431 as the fallback for both. When cloning a repository, the clone
4431 as the fallback for both. When cloning a repository, the clone
4432 source is written as ``default`` in ``.hg/hgrc``. Note that
4432 source is written as ``default`` in ``.hg/hgrc``. Note that
4433 ``default`` and ``default-push`` apply to all inbound (e.g.
4433 ``default`` and ``default-push`` apply to all inbound (e.g.
4434 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
4434 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
4435 :hg:`bundle`) operations.
4435 :hg:`bundle`) operations.
4436
4436
4437 See :hg:`help urls` for more information.
4437 See :hg:`help urls` for more information.
4438
4438
4439 Returns 0 on success.
4439 Returns 0 on success.
4440 """
4440 """
4441 if search:
4441 if search:
4442 for name, path in ui.configitems("paths"):
4442 for name, path in ui.configitems("paths"):
4443 if name == search:
4443 if name == search:
4444 ui.status("%s\n" % util.hidepassword(path))
4444 ui.status("%s\n" % util.hidepassword(path))
4445 return
4445 return
4446 if not ui.quiet:
4446 if not ui.quiet:
4447 ui.warn(_("not found!\n"))
4447 ui.warn(_("not found!\n"))
4448 return 1
4448 return 1
4449 else:
4449 else:
4450 for name, path in ui.configitems("paths"):
4450 for name, path in ui.configitems("paths"):
4451 if ui.quiet:
4451 if ui.quiet:
4452 ui.write("%s\n" % name)
4452 ui.write("%s\n" % name)
4453 else:
4453 else:
4454 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
4454 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
4455
4455
4456 @command('phase',
4456 @command('phase',
4457 [('p', 'public', False, _('set changeset phase to public')),
4457 [('p', 'public', False, _('set changeset phase to public')),
4458 ('d', 'draft', False, _('set changeset phase to draft')),
4458 ('d', 'draft', False, _('set changeset phase to draft')),
4459 ('s', 'secret', False, _('set changeset phase to secret')),
4459 ('s', 'secret', False, _('set changeset phase to secret')),
4460 ('f', 'force', False, _('allow to move boundary backward')),
4460 ('f', 'force', False, _('allow to move boundary backward')),
4461 ('r', 'rev', [], _('target revision'), _('REV')),
4461 ('r', 'rev', [], _('target revision'), _('REV')),
4462 ],
4462 ],
4463 _('[-p|-d|-s] [-f] [-r] REV...'))
4463 _('[-p|-d|-s] [-f] [-r] REV...'))
4464 def phase(ui, repo, *revs, **opts):
4464 def phase(ui, repo, *revs, **opts):
4465 """set or show the current phase name
4465 """set or show the current phase name
4466
4466
4467 With no argument, show the phase name of specified revisions.
4467 With no argument, show the phase name of specified revisions.
4468
4468
4469 With one of -p/--public, -d/--draft or -s/--secret, change the
4469 With one of -p/--public, -d/--draft or -s/--secret, change the
4470 phase value of the specified revisions.
4470 phase value of the specified revisions.
4471
4471
4472 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
4472 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
4473 lower phase to an higher phase. Phases are ordered as follows::
4473 lower phase to an higher phase. Phases are ordered as follows::
4474
4474
4475 public < draft < secret
4475 public < draft < secret
4476
4476
4477 Returns 0 on success, 1 if no phases were changed or some could not
4477 Returns 0 on success, 1 if no phases were changed or some could not
4478 be changed.
4478 be changed.
4479 """
4479 """
4480 # search for a unique phase argument
4480 # search for a unique phase argument
4481 targetphase = None
4481 targetphase = None
4482 for idx, name in enumerate(phases.phasenames):
4482 for idx, name in enumerate(phases.phasenames):
4483 if opts[name]:
4483 if opts[name]:
4484 if targetphase is not None:
4484 if targetphase is not None:
4485 raise util.Abort(_('only one phase can be specified'))
4485 raise util.Abort(_('only one phase can be specified'))
4486 targetphase = idx
4486 targetphase = idx
4487
4487
4488 # look for specified revision
4488 # look for specified revision
4489 revs = list(revs)
4489 revs = list(revs)
4490 revs.extend(opts['rev'])
4490 revs.extend(opts['rev'])
4491 if not revs:
4491 if not revs:
4492 raise util.Abort(_('no revisions specified'))
4492 raise util.Abort(_('no revisions specified'))
4493
4493
4494 revs = scmutil.revrange(repo, revs)
4494 revs = scmutil.revrange(repo, revs)
4495
4495
4496 lock = None
4496 lock = None
4497 ret = 0
4497 ret = 0
4498 if targetphase is None:
4498 if targetphase is None:
4499 # display
4499 # display
4500 for r in revs:
4500 for r in revs:
4501 ctx = repo[r]
4501 ctx = repo[r]
4502 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
4502 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
4503 else:
4503 else:
4504 lock = repo.lock()
4504 lock = repo.lock()
4505 try:
4505 try:
4506 # set phase
4506 # set phase
4507 if not revs:
4507 if not revs:
4508 raise util.Abort(_('empty revision set'))
4508 raise util.Abort(_('empty revision set'))
4509 nodes = [repo[r].node() for r in revs]
4509 nodes = [repo[r].node() for r in revs]
4510 olddata = repo._phasecache.getphaserevs(repo)[:]
4510 olddata = repo._phasecache.getphaserevs(repo)[:]
4511 phases.advanceboundary(repo, targetphase, nodes)
4511 phases.advanceboundary(repo, targetphase, nodes)
4512 if opts['force']:
4512 if opts['force']:
4513 phases.retractboundary(repo, targetphase, nodes)
4513 phases.retractboundary(repo, targetphase, nodes)
4514 finally:
4514 finally:
4515 lock.release()
4515 lock.release()
4516 # moving revision from public to draft may hide them
4516 # moving revision from public to draft may hide them
4517 # We have to check result on an unfiltered repository
4517 # We have to check result on an unfiltered repository
4518 unfi = repo.unfiltered()
4518 unfi = repo.unfiltered()
4519 newdata = repo._phasecache.getphaserevs(unfi)
4519 newdata = repo._phasecache.getphaserevs(unfi)
4520 changes = sum(o != newdata[i] for i, o in enumerate(olddata))
4520 changes = sum(o != newdata[i] for i, o in enumerate(olddata))
4521 cl = unfi.changelog
4521 cl = unfi.changelog
4522 rejected = [n for n in nodes
4522 rejected = [n for n in nodes
4523 if newdata[cl.rev(n)] < targetphase]
4523 if newdata[cl.rev(n)] < targetphase]
4524 if rejected:
4524 if rejected:
4525 ui.warn(_('cannot move %i changesets to a higher '
4525 ui.warn(_('cannot move %i changesets to a higher '
4526 'phase, use --force\n') % len(rejected))
4526 'phase, use --force\n') % len(rejected))
4527 ret = 1
4527 ret = 1
4528 if changes:
4528 if changes:
4529 msg = _('phase changed for %i changesets\n') % changes
4529 msg = _('phase changed for %i changesets\n') % changes
4530 if ret:
4530 if ret:
4531 ui.status(msg)
4531 ui.status(msg)
4532 else:
4532 else:
4533 ui.note(msg)
4533 ui.note(msg)
4534 else:
4534 else:
4535 ui.warn(_('no phases changed\n'))
4535 ui.warn(_('no phases changed\n'))
4536 ret = 1
4536 ret = 1
4537 return ret
4537 return ret
4538
4538
4539 def postincoming(ui, repo, modheads, optupdate, checkout):
4539 def postincoming(ui, repo, modheads, optupdate, checkout):
4540 if modheads == 0:
4540 if modheads == 0:
4541 return
4541 return
4542 if optupdate:
4542 if optupdate:
4543 checkout, movemarkfrom = bookmarks.calculateupdate(ui, repo, checkout)
4543 checkout, movemarkfrom = bookmarks.calculateupdate(ui, repo, checkout)
4544 try:
4544 try:
4545 ret = hg.update(repo, checkout)
4545 ret = hg.update(repo, checkout)
4546 except util.Abort, inst:
4546 except util.Abort, inst:
4547 ui.warn(_("not updating: %s\n") % str(inst))
4547 ui.warn(_("not updating: %s\n") % str(inst))
4548 if inst.hint:
4548 if inst.hint:
4549 ui.warn(_("(%s)\n") % inst.hint)
4549 ui.warn(_("(%s)\n") % inst.hint)
4550 return 0
4550 return 0
4551 if not ret and not checkout:
4551 if not ret and not checkout:
4552 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
4552 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
4553 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
4553 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
4554 return ret
4554 return ret
4555 if modheads > 1:
4555 if modheads > 1:
4556 currentbranchheads = len(repo.branchheads())
4556 currentbranchheads = len(repo.branchheads())
4557 if currentbranchheads == modheads:
4557 if currentbranchheads == modheads:
4558 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
4558 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
4559 elif currentbranchheads > 1:
4559 elif currentbranchheads > 1:
4560 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
4560 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
4561 "merge)\n"))
4561 "merge)\n"))
4562 else:
4562 else:
4563 ui.status(_("(run 'hg heads' to see heads)\n"))
4563 ui.status(_("(run 'hg heads' to see heads)\n"))
4564 else:
4564 else:
4565 ui.status(_("(run 'hg update' to get a working copy)\n"))
4565 ui.status(_("(run 'hg update' to get a working copy)\n"))
4566
4566
4567 @command('^pull',
4567 @command('^pull',
4568 [('u', 'update', None,
4568 [('u', 'update', None,
4569 _('update to new branch head if changesets were pulled')),
4569 _('update to new branch head if changesets were pulled')),
4570 ('f', 'force', None, _('run even when remote repository is unrelated')),
4570 ('f', 'force', None, _('run even when remote repository is unrelated')),
4571 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4571 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4572 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4572 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4573 ('b', 'branch', [], _('a specific branch you would like to pull'),
4573 ('b', 'branch', [], _('a specific branch you would like to pull'),
4574 _('BRANCH')),
4574 _('BRANCH')),
4575 ] + remoteopts,
4575 ] + remoteopts,
4576 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
4576 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
4577 def pull(ui, repo, source="default", **opts):
4577 def pull(ui, repo, source="default", **opts):
4578 """pull changes from the specified source
4578 """pull changes from the specified source
4579
4579
4580 Pull changes from a remote repository to a local one.
4580 Pull changes from a remote repository to a local one.
4581
4581
4582 This finds all changes from the repository at the specified path
4582 This finds all changes from the repository at the specified path
4583 or URL and adds them to a local repository (the current one unless
4583 or URL and adds them to a local repository (the current one unless
4584 -R is specified). By default, this does not update the copy of the
4584 -R is specified). By default, this does not update the copy of the
4585 project in the working directory.
4585 project in the working directory.
4586
4586
4587 Use :hg:`incoming` if you want to see what would have been added
4587 Use :hg:`incoming` if you want to see what would have been added
4588 by a pull at the time you issued this command. If you then decide
4588 by a pull at the time you issued this command. If you then decide
4589 to add those changes to the repository, you should use :hg:`pull
4589 to add those changes to the repository, you should use :hg:`pull
4590 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
4590 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
4591
4591
4592 If SOURCE is omitted, the 'default' path will be used.
4592 If SOURCE is omitted, the 'default' path will be used.
4593 See :hg:`help urls` for more information.
4593 See :hg:`help urls` for more information.
4594
4594
4595 Returns 0 on success, 1 if an update had unresolved files.
4595 Returns 0 on success, 1 if an update had unresolved files.
4596 """
4596 """
4597 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
4597 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
4598 other = hg.peer(repo, opts, source)
4598 other = hg.peer(repo, opts, source)
4599 try:
4599 try:
4600 ui.status(_('pulling from %s\n') % util.hidepassword(source))
4600 ui.status(_('pulling from %s\n') % util.hidepassword(source))
4601 revs, checkout = hg.addbranchrevs(repo, other, branches,
4601 revs, checkout = hg.addbranchrevs(repo, other, branches,
4602 opts.get('rev'))
4602 opts.get('rev'))
4603
4603
4604 remotebookmarks = other.listkeys('bookmarks')
4604 remotebookmarks = other.listkeys('bookmarks')
4605
4605
4606 if opts.get('bookmark'):
4606 if opts.get('bookmark'):
4607 if not revs:
4607 if not revs:
4608 revs = []
4608 revs = []
4609 for b in opts['bookmark']:
4609 for b in opts['bookmark']:
4610 if b not in remotebookmarks:
4610 if b not in remotebookmarks:
4611 raise util.Abort(_('remote bookmark %s not found!') % b)
4611 raise util.Abort(_('remote bookmark %s not found!') % b)
4612 revs.append(remotebookmarks[b])
4612 revs.append(remotebookmarks[b])
4613
4613
4614 if revs:
4614 if revs:
4615 try:
4615 try:
4616 revs = [other.lookup(rev) for rev in revs]
4616 revs = [other.lookup(rev) for rev in revs]
4617 except error.CapabilityError:
4617 except error.CapabilityError:
4618 err = _("other repository doesn't support revision lookup, "
4618 err = _("other repository doesn't support revision lookup, "
4619 "so a rev cannot be specified.")
4619 "so a rev cannot be specified.")
4620 raise util.Abort(err)
4620 raise util.Abort(err)
4621
4621
4622 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
4622 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
4623 bookmarks.updatefromremote(ui, repo, remotebookmarks, source)
4623 bookmarks.updatefromremote(ui, repo, remotebookmarks, source)
4624 if checkout:
4624 if checkout:
4625 checkout = str(repo.changelog.rev(other.lookup(checkout)))
4625 checkout = str(repo.changelog.rev(other.lookup(checkout)))
4626 repo._subtoppath = source
4626 repo._subtoppath = source
4627 try:
4627 try:
4628 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
4628 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
4629
4629
4630 finally:
4630 finally:
4631 del repo._subtoppath
4631 del repo._subtoppath
4632
4632
4633 # update specified bookmarks
4633 # update specified bookmarks
4634 if opts.get('bookmark'):
4634 if opts.get('bookmark'):
4635 marks = repo._bookmarks
4635 marks = repo._bookmarks
4636 for b in opts['bookmark']:
4636 for b in opts['bookmark']:
4637 # explicit pull overrides local bookmark if any
4637 # explicit pull overrides local bookmark if any
4638 ui.status(_("importing bookmark %s\n") % b)
4638 ui.status(_("importing bookmark %s\n") % b)
4639 marks[b] = repo[remotebookmarks[b]].node()
4639 marks[b] = repo[remotebookmarks[b]].node()
4640 marks.write()
4640 marks.write()
4641 finally:
4641 finally:
4642 other.close()
4642 other.close()
4643 return ret
4643 return ret
4644
4644
4645 @command('^push',
4645 @command('^push',
4646 [('f', 'force', None, _('force push')),
4646 [('f', 'force', None, _('force push')),
4647 ('r', 'rev', [],
4647 ('r', 'rev', [],
4648 _('a changeset intended to be included in the destination'),
4648 _('a changeset intended to be included in the destination'),
4649 _('REV')),
4649 _('REV')),
4650 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4650 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4651 ('b', 'branch', [],
4651 ('b', 'branch', [],
4652 _('a specific branch you would like to push'), _('BRANCH')),
4652 _('a specific branch you would like to push'), _('BRANCH')),
4653 ('', 'new-branch', False, _('allow pushing a new branch')),
4653 ('', 'new-branch', False, _('allow pushing a new branch')),
4654 ] + remoteopts,
4654 ] + remoteopts,
4655 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4655 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4656 def push(ui, repo, dest=None, **opts):
4656 def push(ui, repo, dest=None, **opts):
4657 """push changes to the specified destination
4657 """push changes to the specified destination
4658
4658
4659 Push changesets from the local repository to the specified
4659 Push changesets from the local repository to the specified
4660 destination.
4660 destination.
4661
4661
4662 This operation is symmetrical to pull: it is identical to a pull
4662 This operation is symmetrical to pull: it is identical to a pull
4663 in the destination repository from the current one.
4663 in the destination repository from the current one.
4664
4664
4665 By default, push will not allow creation of new heads at the
4665 By default, push will not allow creation of new heads at the
4666 destination, since multiple heads would make it unclear which head
4666 destination, since multiple heads would make it unclear which head
4667 to use. In this situation, it is recommended to pull and merge
4667 to use. In this situation, it is recommended to pull and merge
4668 before pushing.
4668 before pushing.
4669
4669
4670 Use --new-branch if you want to allow push to create a new named
4670 Use --new-branch if you want to allow push to create a new named
4671 branch that is not present at the destination. This allows you to
4671 branch that is not present at the destination. This allows you to
4672 only create a new branch without forcing other changes.
4672 only create a new branch without forcing other changes.
4673
4673
4674 .. note::
4674 .. note::
4675
4675
4676 Extra care should be taken with the -f/--force option,
4676 Extra care should be taken with the -f/--force option,
4677 which will push all new heads on all branches, an action which will
4677 which will push all new heads on all branches, an action which will
4678 almost always cause confusion for collaborators.
4678 almost always cause confusion for collaborators.
4679
4679
4680 If -r/--rev is used, the specified revision and all its ancestors
4680 If -r/--rev is used, the specified revision and all its ancestors
4681 will be pushed to the remote repository.
4681 will be pushed to the remote repository.
4682
4682
4683 If -B/--bookmark is used, the specified bookmarked revision, its
4683 If -B/--bookmark is used, the specified bookmarked revision, its
4684 ancestors, and the bookmark will be pushed to the remote
4684 ancestors, and the bookmark will be pushed to the remote
4685 repository.
4685 repository.
4686
4686
4687 Please see :hg:`help urls` for important details about ``ssh://``
4687 Please see :hg:`help urls` for important details about ``ssh://``
4688 URLs. If DESTINATION is omitted, a default path will be used.
4688 URLs. If DESTINATION is omitted, a default path will be used.
4689
4689
4690 Returns 0 if push was successful, 1 if nothing to push.
4690 Returns 0 if push was successful, 1 if nothing to push.
4691 """
4691 """
4692
4692
4693 if opts.get('bookmark'):
4693 if opts.get('bookmark'):
4694 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
4694 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
4695 for b in opts['bookmark']:
4695 for b in opts['bookmark']:
4696 # translate -B options to -r so changesets get pushed
4696 # translate -B options to -r so changesets get pushed
4697 if b in repo._bookmarks:
4697 if b in repo._bookmarks:
4698 opts.setdefault('rev', []).append(b)
4698 opts.setdefault('rev', []).append(b)
4699 else:
4699 else:
4700 # if we try to push a deleted bookmark, translate it to null
4700 # if we try to push a deleted bookmark, translate it to null
4701 # this lets simultaneous -r, -b options continue working
4701 # this lets simultaneous -r, -b options continue working
4702 opts.setdefault('rev', []).append("null")
4702 opts.setdefault('rev', []).append("null")
4703
4703
4704 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4704 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4705 dest, branches = hg.parseurl(dest, opts.get('branch'))
4705 dest, branches = hg.parseurl(dest, opts.get('branch'))
4706 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4706 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4707 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4707 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4708 try:
4708 try:
4709 other = hg.peer(repo, opts, dest)
4709 other = hg.peer(repo, opts, dest)
4710 except error.RepoError:
4710 except error.RepoError:
4711 if dest == "default-push":
4711 if dest == "default-push":
4712 raise util.Abort(_("default repository not configured!"),
4712 raise util.Abort(_("default repository not configured!"),
4713 hint=_('see the "path" section in "hg help config"'))
4713 hint=_('see the "path" section in "hg help config"'))
4714 else:
4714 else:
4715 raise
4715 raise
4716
4716
4717 if revs:
4717 if revs:
4718 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
4718 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
4719
4719
4720 repo._subtoppath = dest
4720 repo._subtoppath = dest
4721 try:
4721 try:
4722 # push subrepos depth-first for coherent ordering
4722 # push subrepos depth-first for coherent ordering
4723 c = repo['']
4723 c = repo['']
4724 subs = c.substate # only repos that are committed
4724 subs = c.substate # only repos that are committed
4725 for s in sorted(subs):
4725 for s in sorted(subs):
4726 result = c.sub(s).push(opts)
4726 result = c.sub(s).push(opts)
4727 if result == 0:
4727 if result == 0:
4728 return not result
4728 return not result
4729 finally:
4729 finally:
4730 del repo._subtoppath
4730 del repo._subtoppath
4731 result = repo.push(other, opts.get('force'), revs=revs,
4731 result = repo.push(other, opts.get('force'), revs=revs,
4732 newbranch=opts.get('new_branch'))
4732 newbranch=opts.get('new_branch'))
4733
4733
4734 result = not result
4734 result = not result
4735
4735
4736 if opts.get('bookmark'):
4736 if opts.get('bookmark'):
4737 bresult = bookmarks.pushtoremote(ui, repo, other, opts['bookmark'])
4737 bresult = bookmarks.pushtoremote(ui, repo, other, opts['bookmark'])
4738 if bresult == 2:
4738 if bresult == 2:
4739 return 2
4739 return 2
4740 if not result and bresult:
4740 if not result and bresult:
4741 result = 2
4741 result = 2
4742
4742
4743 return result
4743 return result
4744
4744
4745 @command('recover', [])
4745 @command('recover', [])
4746 def recover(ui, repo):
4746 def recover(ui, repo):
4747 """roll back an interrupted transaction
4747 """roll back an interrupted transaction
4748
4748
4749 Recover from an interrupted commit or pull.
4749 Recover from an interrupted commit or pull.
4750
4750
4751 This command tries to fix the repository status after an
4751 This command tries to fix the repository status after an
4752 interrupted operation. It should only be necessary when Mercurial
4752 interrupted operation. It should only be necessary when Mercurial
4753 suggests it.
4753 suggests it.
4754
4754
4755 Returns 0 if successful, 1 if nothing to recover or verify fails.
4755 Returns 0 if successful, 1 if nothing to recover or verify fails.
4756 """
4756 """
4757 if repo.recover():
4757 if repo.recover():
4758 return hg.verify(repo)
4758 return hg.verify(repo)
4759 return 1
4759 return 1
4760
4760
4761 @command('^remove|rm',
4761 @command('^remove|rm',
4762 [('A', 'after', None, _('record delete for missing files')),
4762 [('A', 'after', None, _('record delete for missing files')),
4763 ('f', 'force', None,
4763 ('f', 'force', None,
4764 _('remove (and delete) file even if added or modified')),
4764 _('remove (and delete) file even if added or modified')),
4765 ] + walkopts,
4765 ] + walkopts,
4766 _('[OPTION]... FILE...'))
4766 _('[OPTION]... FILE...'))
4767 def remove(ui, repo, *pats, **opts):
4767 def remove(ui, repo, *pats, **opts):
4768 """remove the specified files on the next commit
4768 """remove the specified files on the next commit
4769
4769
4770 Schedule the indicated files for removal from the current branch.
4770 Schedule the indicated files for removal from the current branch.
4771
4771
4772 This command schedules the files to be removed at the next commit.
4772 This command schedules the files to be removed at the next commit.
4773 To undo a remove before that, see :hg:`revert`. To undo added
4773 To undo a remove before that, see :hg:`revert`. To undo added
4774 files, see :hg:`forget`.
4774 files, see :hg:`forget`.
4775
4775
4776 .. container:: verbose
4776 .. container:: verbose
4777
4777
4778 -A/--after can be used to remove only files that have already
4778 -A/--after can be used to remove only files that have already
4779 been deleted, -f/--force can be used to force deletion, and -Af
4779 been deleted, -f/--force can be used to force deletion, and -Af
4780 can be used to remove files from the next revision without
4780 can be used to remove files from the next revision without
4781 deleting them from the working directory.
4781 deleting them from the working directory.
4782
4782
4783 The following table details the behavior of remove for different
4783 The following table details the behavior of remove for different
4784 file states (columns) and option combinations (rows). The file
4784 file states (columns) and option combinations (rows). The file
4785 states are Added [A], Clean [C], Modified [M] and Missing [!]
4785 states are Added [A], Clean [C], Modified [M] and Missing [!]
4786 (as reported by :hg:`status`). The actions are Warn, Remove
4786 (as reported by :hg:`status`). The actions are Warn, Remove
4787 (from branch) and Delete (from disk):
4787 (from branch) and Delete (from disk):
4788
4788
4789 ========= == == == ==
4789 ========= == == == ==
4790 opt/state A C M !
4790 opt/state A C M !
4791 ========= == == == ==
4791 ========= == == == ==
4792 none W RD W R
4792 none W RD W R
4793 -f R RD RD R
4793 -f R RD RD R
4794 -A W W W R
4794 -A W W W R
4795 -Af R R R R
4795 -Af R R R R
4796 ========= == == == ==
4796 ========= == == == ==
4797
4797
4798 Note that remove never deletes files in Added [A] state from the
4798 Note that remove never deletes files in Added [A] state from the
4799 working directory, not even if option --force is specified.
4799 working directory, not even if option --force is specified.
4800
4800
4801 Returns 0 on success, 1 if any warnings encountered.
4801 Returns 0 on success, 1 if any warnings encountered.
4802 """
4802 """
4803
4803
4804 ret = 0
4804 ret = 0
4805 after, force = opts.get('after'), opts.get('force')
4805 after, force = opts.get('after'), opts.get('force')
4806 if not pats and not after:
4806 if not pats and not after:
4807 raise util.Abort(_('no files specified'))
4807 raise util.Abort(_('no files specified'))
4808
4808
4809 m = scmutil.match(repo[None], pats, opts)
4809 m = scmutil.match(repo[None], pats, opts)
4810 s = repo.status(match=m, clean=True)
4810 s = repo.status(match=m, clean=True)
4811 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
4811 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
4812
4812
4813 # warn about failure to delete explicit files/dirs
4813 # warn about failure to delete explicit files/dirs
4814 wctx = repo[None]
4814 wctx = repo[None]
4815 for f in m.files():
4815 for f in m.files():
4816 if f in repo.dirstate or f in wctx.dirs():
4816 if f in repo.dirstate or f in wctx.dirs():
4817 continue
4817 continue
4818 if os.path.exists(m.rel(f)):
4818 if os.path.exists(m.rel(f)):
4819 if os.path.isdir(m.rel(f)):
4819 if os.path.isdir(m.rel(f)):
4820 ui.warn(_('not removing %s: no tracked files\n') % m.rel(f))
4820 ui.warn(_('not removing %s: no tracked files\n') % m.rel(f))
4821 else:
4821 else:
4822 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
4822 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
4823 # missing files will generate a warning elsewhere
4823 # missing files will generate a warning elsewhere
4824 ret = 1
4824 ret = 1
4825
4825
4826 if force:
4826 if force:
4827 list = modified + deleted + clean + added
4827 list = modified + deleted + clean + added
4828 elif after:
4828 elif after:
4829 list = deleted
4829 list = deleted
4830 for f in modified + added + clean:
4830 for f in modified + added + clean:
4831 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
4831 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
4832 ret = 1
4832 ret = 1
4833 else:
4833 else:
4834 list = deleted + clean
4834 list = deleted + clean
4835 for f in modified:
4835 for f in modified:
4836 ui.warn(_('not removing %s: file is modified (use -f'
4836 ui.warn(_('not removing %s: file is modified (use -f'
4837 ' to force removal)\n') % m.rel(f))
4837 ' to force removal)\n') % m.rel(f))
4838 ret = 1
4838 ret = 1
4839 for f in added:
4839 for f in added:
4840 ui.warn(_('not removing %s: file has been marked for add'
4840 ui.warn(_('not removing %s: file has been marked for add'
4841 ' (use forget to undo)\n') % m.rel(f))
4841 ' (use forget to undo)\n') % m.rel(f))
4842 ret = 1
4842 ret = 1
4843
4843
4844 for f in sorted(list):
4844 for f in sorted(list):
4845 if ui.verbose or not m.exact(f):
4845 if ui.verbose or not m.exact(f):
4846 ui.status(_('removing %s\n') % m.rel(f))
4846 ui.status(_('removing %s\n') % m.rel(f))
4847
4847
4848 wlock = repo.wlock()
4848 wlock = repo.wlock()
4849 try:
4849 try:
4850 if not after:
4850 if not after:
4851 for f in list:
4851 for f in list:
4852 if f in added:
4852 if f in added:
4853 continue # we never unlink added files on remove
4853 continue # we never unlink added files on remove
4854 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
4854 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
4855 repo[None].forget(list)
4855 repo[None].forget(list)
4856 finally:
4856 finally:
4857 wlock.release()
4857 wlock.release()
4858
4858
4859 return ret
4859 return ret
4860
4860
4861 @command('rename|move|mv',
4861 @command('rename|move|mv',
4862 [('A', 'after', None, _('record a rename that has already occurred')),
4862 [('A', 'after', None, _('record a rename that has already occurred')),
4863 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4863 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4864 ] + walkopts + dryrunopts,
4864 ] + walkopts + dryrunopts,
4865 _('[OPTION]... SOURCE... DEST'))
4865 _('[OPTION]... SOURCE... DEST'))
4866 def rename(ui, repo, *pats, **opts):
4866 def rename(ui, repo, *pats, **opts):
4867 """rename files; equivalent of copy + remove
4867 """rename files; equivalent of copy + remove
4868
4868
4869 Mark dest as copies of sources; mark sources for deletion. If dest
4869 Mark dest as copies of sources; mark sources for deletion. If dest
4870 is a directory, copies are put in that directory. If dest is a
4870 is a directory, copies are put in that directory. If dest is a
4871 file, there can only be one source.
4871 file, there can only be one source.
4872
4872
4873 By default, this command copies the contents of files as they
4873 By default, this command copies the contents of files as they
4874 exist in the working directory. If invoked with -A/--after, the
4874 exist in the working directory. If invoked with -A/--after, the
4875 operation is recorded, but no copying is performed.
4875 operation is recorded, but no copying is performed.
4876
4876
4877 This command takes effect at the next commit. To undo a rename
4877 This command takes effect at the next commit. To undo a rename
4878 before that, see :hg:`revert`.
4878 before that, see :hg:`revert`.
4879
4879
4880 Returns 0 on success, 1 if errors are encountered.
4880 Returns 0 on success, 1 if errors are encountered.
4881 """
4881 """
4882 wlock = repo.wlock(False)
4882 wlock = repo.wlock(False)
4883 try:
4883 try:
4884 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4884 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4885 finally:
4885 finally:
4886 wlock.release()
4886 wlock.release()
4887
4887
4888 @command('resolve',
4888 @command('resolve',
4889 [('a', 'all', None, _('select all unresolved files')),
4889 [('a', 'all', None, _('select all unresolved files')),
4890 ('l', 'list', None, _('list state of files needing merge')),
4890 ('l', 'list', None, _('list state of files needing merge')),
4891 ('m', 'mark', None, _('mark files as resolved')),
4891 ('m', 'mark', None, _('mark files as resolved')),
4892 ('u', 'unmark', None, _('mark files as unresolved')),
4892 ('u', 'unmark', None, _('mark files as unresolved')),
4893 ('n', 'no-status', None, _('hide status prefix'))]
4893 ('n', 'no-status', None, _('hide status prefix'))]
4894 + mergetoolopts + walkopts,
4894 + mergetoolopts + walkopts,
4895 _('[OPTION]... [FILE]...'))
4895 _('[OPTION]... [FILE]...'))
4896 def resolve(ui, repo, *pats, **opts):
4896 def resolve(ui, repo, *pats, **opts):
4897 """redo merges or set/view the merge status of files
4897 """redo merges or set/view the merge status of files
4898
4898
4899 Merges with unresolved conflicts are often the result of
4899 Merges with unresolved conflicts are often the result of
4900 non-interactive merging using the ``internal:merge`` configuration
4900 non-interactive merging using the ``internal:merge`` configuration
4901 setting, or a command-line merge tool like ``diff3``. The resolve
4901 setting, or a command-line merge tool like ``diff3``. The resolve
4902 command is used to manage the files involved in a merge, after
4902 command is used to manage the files involved in a merge, after
4903 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4903 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4904 working directory must have two parents). See :hg:`help
4904 working directory must have two parents). See :hg:`help
4905 merge-tools` for information on configuring merge tools.
4905 merge-tools` for information on configuring merge tools.
4906
4906
4907 The resolve command can be used in the following ways:
4907 The resolve command can be used in the following ways:
4908
4908
4909 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4909 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4910 files, discarding any previous merge attempts. Re-merging is not
4910 files, discarding any previous merge attempts. Re-merging is not
4911 performed for files already marked as resolved. Use ``--all/-a``
4911 performed for files already marked as resolved. Use ``--all/-a``
4912 to select all unresolved files. ``--tool`` can be used to specify
4912 to select all unresolved files. ``--tool`` can be used to specify
4913 the merge tool used for the given files. It overrides the HGMERGE
4913 the merge tool used for the given files. It overrides the HGMERGE
4914 environment variable and your configuration files. Previous file
4914 environment variable and your configuration files. Previous file
4915 contents are saved with a ``.orig`` suffix.
4915 contents are saved with a ``.orig`` suffix.
4916
4916
4917 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4917 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4918 (e.g. after having manually fixed-up the files). The default is
4918 (e.g. after having manually fixed-up the files). The default is
4919 to mark all unresolved files.
4919 to mark all unresolved files.
4920
4920
4921 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4921 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4922 default is to mark all resolved files.
4922 default is to mark all resolved files.
4923
4923
4924 - :hg:`resolve -l`: list files which had or still have conflicts.
4924 - :hg:`resolve -l`: list files which had or still have conflicts.
4925 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4925 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4926
4926
4927 Note that Mercurial will not let you commit files with unresolved
4927 Note that Mercurial will not let you commit files with unresolved
4928 merge conflicts. You must use :hg:`resolve -m ...` before you can
4928 merge conflicts. You must use :hg:`resolve -m ...` before you can
4929 commit after a conflicting merge.
4929 commit after a conflicting merge.
4930
4930
4931 Returns 0 on success, 1 if any files fail a resolve attempt.
4931 Returns 0 on success, 1 if any files fail a resolve attempt.
4932 """
4932 """
4933
4933
4934 all, mark, unmark, show, nostatus = \
4934 all, mark, unmark, show, nostatus = \
4935 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
4935 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
4936
4936
4937 if (show and (mark or unmark)) or (mark and unmark):
4937 if (show and (mark or unmark)) or (mark and unmark):
4938 raise util.Abort(_("too many options specified"))
4938 raise util.Abort(_("too many options specified"))
4939 if pats and all:
4939 if pats and all:
4940 raise util.Abort(_("can't specify --all and patterns"))
4940 raise util.Abort(_("can't specify --all and patterns"))
4941 if not (all or pats or show or mark or unmark):
4941 if not (all or pats or show or mark or unmark):
4942 raise util.Abort(_('no files or directories specified; '
4942 raise util.Abort(_('no files or directories specified; '
4943 'use --all to remerge all files'))
4943 'use --all to remerge all files'))
4944
4944
4945 ms = mergemod.mergestate(repo)
4945 ms = mergemod.mergestate(repo)
4946
4946
4947 if not ms.active() and not show:
4947 if not ms.active() and not show:
4948 raise util.Abort(_('resolve command not applicable when not merging'))
4948 raise util.Abort(_('resolve command not applicable when not merging'))
4949
4949
4950 m = scmutil.match(repo[None], pats, opts)
4950 m = scmutil.match(repo[None], pats, opts)
4951 ret = 0
4951 ret = 0
4952
4952
4953 didwork = False
4953 didwork = False
4954 for f in ms:
4954 for f in ms:
4955 if not m(f):
4955 if not m(f):
4956 continue
4956 continue
4957
4957
4958 didwork = True
4958 didwork = True
4959
4959
4960 if show:
4960 if show:
4961 if nostatus:
4961 if nostatus:
4962 ui.write("%s\n" % f)
4962 ui.write("%s\n" % f)
4963 else:
4963 else:
4964 ui.write("%s %s\n" % (ms[f].upper(), f),
4964 ui.write("%s %s\n" % (ms[f].upper(), f),
4965 label='resolve.' +
4965 label='resolve.' +
4966 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
4966 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
4967 elif mark:
4967 elif mark:
4968 ms.mark(f, "r")
4968 ms.mark(f, "r")
4969 elif unmark:
4969 elif unmark:
4970 ms.mark(f, "u")
4970 ms.mark(f, "u")
4971 else:
4971 else:
4972 wctx = repo[None]
4972 wctx = repo[None]
4973
4973
4974 # backup pre-resolve (merge uses .orig for its own purposes)
4974 # backup pre-resolve (merge uses .orig for its own purposes)
4975 a = repo.wjoin(f)
4975 a = repo.wjoin(f)
4976 util.copyfile(a, a + ".resolve")
4976 util.copyfile(a, a + ".resolve")
4977
4977
4978 try:
4978 try:
4979 # resolve file
4979 # resolve file
4980 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4980 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4981 'resolve')
4981 'resolve')
4982 if ms.resolve(f, wctx):
4982 if ms.resolve(f, wctx):
4983 ret = 1
4983 ret = 1
4984 finally:
4984 finally:
4985 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4985 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4986 ms.commit()
4986 ms.commit()
4987
4987
4988 # replace filemerge's .orig file with our resolve file
4988 # replace filemerge's .orig file with our resolve file
4989 util.rename(a + ".resolve", a + ".orig")
4989 util.rename(a + ".resolve", a + ".orig")
4990
4990
4991 ms.commit()
4991 ms.commit()
4992
4992
4993 if not didwork and pats:
4993 if not didwork and pats:
4994 ui.warn(_("arguments do not match paths that need resolved\n"))
4994 ui.warn(_("arguments do not match paths that need resolved\n"))
4995
4995
4996 # Nudge users into finishing an unfinished operation. We don't print
4996 # Nudge users into finishing an unfinished operation. We don't print
4997 # this with the list/show operation because we want list/show to remain
4997 # this with the list/show operation because we want list/show to remain
4998 # machine readable.
4998 # machine readable.
4999 if not list(ms.unresolved()) and not show:
4999 if not list(ms.unresolved()) and not show:
5000 ui.status(_('no more unresolved files\n'))
5000 ui.status(_('no more unresolved files\n'))
5001
5001
5002 return ret
5002 return ret
5003
5003
5004 @command('revert',
5004 @command('revert',
5005 [('a', 'all', None, _('revert all changes when no arguments given')),
5005 [('a', 'all', None, _('revert all changes when no arguments given')),
5006 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5006 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5007 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
5007 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
5008 ('C', 'no-backup', None, _('do not save backup copies of files')),
5008 ('C', 'no-backup', None, _('do not save backup copies of files')),
5009 ] + walkopts + dryrunopts,
5009 ] + walkopts + dryrunopts,
5010 _('[OPTION]... [-r REV] [NAME]...'))
5010 _('[OPTION]... [-r REV] [NAME]...'))
5011 def revert(ui, repo, *pats, **opts):
5011 def revert(ui, repo, *pats, **opts):
5012 """restore files to their checkout state
5012 """restore files to their checkout state
5013
5013
5014 .. note::
5014 .. note::
5015
5015
5016 To check out earlier revisions, you should use :hg:`update REV`.
5016 To check out earlier revisions, you should use :hg:`update REV`.
5017 To cancel an uncommitted merge (and lose your changes),
5017 To cancel an uncommitted merge (and lose your changes),
5018 use :hg:`update --clean .`.
5018 use :hg:`update --clean .`.
5019
5019
5020 With no revision specified, revert the specified files or directories
5020 With no revision specified, revert the specified files or directories
5021 to the contents they had in the parent of the working directory.
5021 to the contents they had in the parent of the working directory.
5022 This restores the contents of files to an unmodified
5022 This restores the contents of files to an unmodified
5023 state and unschedules adds, removes, copies, and renames. If the
5023 state and unschedules adds, removes, copies, and renames. If the
5024 working directory has two parents, you must explicitly specify a
5024 working directory has two parents, you must explicitly specify a
5025 revision.
5025 revision.
5026
5026
5027 Using the -r/--rev or -d/--date options, revert the given files or
5027 Using the -r/--rev or -d/--date options, revert the given files or
5028 directories to their states as of a specific revision. Because
5028 directories to their states as of a specific revision. Because
5029 revert does not change the working directory parents, this will
5029 revert does not change the working directory parents, this will
5030 cause these files to appear modified. This can be helpful to "back
5030 cause these files to appear modified. This can be helpful to "back
5031 out" some or all of an earlier change. See :hg:`backout` for a
5031 out" some or all of an earlier change. See :hg:`backout` for a
5032 related method.
5032 related method.
5033
5033
5034 Modified files are saved with a .orig suffix before reverting.
5034 Modified files are saved with a .orig suffix before reverting.
5035 To disable these backups, use --no-backup.
5035 To disable these backups, use --no-backup.
5036
5036
5037 See :hg:`help dates` for a list of formats valid for -d/--date.
5037 See :hg:`help dates` for a list of formats valid for -d/--date.
5038
5038
5039 Returns 0 on success.
5039 Returns 0 on success.
5040 """
5040 """
5041
5041
5042 if opts.get("date"):
5042 if opts.get("date"):
5043 if opts.get("rev"):
5043 if opts.get("rev"):
5044 raise util.Abort(_("you can't specify a revision and a date"))
5044 raise util.Abort(_("you can't specify a revision and a date"))
5045 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
5045 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
5046
5046
5047 parent, p2 = repo.dirstate.parents()
5047 parent, p2 = repo.dirstate.parents()
5048 if not opts.get('rev') and p2 != nullid:
5048 if not opts.get('rev') and p2 != nullid:
5049 # revert after merge is a trap for new users (issue2915)
5049 # revert after merge is a trap for new users (issue2915)
5050 raise util.Abort(_('uncommitted merge with no revision specified'),
5050 raise util.Abort(_('uncommitted merge with no revision specified'),
5051 hint=_('use "hg update" or see "hg help revert"'))
5051 hint=_('use "hg update" or see "hg help revert"'))
5052
5052
5053 ctx = scmutil.revsingle(repo, opts.get('rev'))
5053 ctx = scmutil.revsingle(repo, opts.get('rev'))
5054
5054
5055 if not pats and not opts.get('all'):
5055 if not pats and not opts.get('all'):
5056 msg = _("no files or directories specified")
5056 msg = _("no files or directories specified")
5057 if p2 != nullid:
5057 if p2 != nullid:
5058 hint = _("uncommitted merge, use --all to discard all changes,"
5058 hint = _("uncommitted merge, use --all to discard all changes,"
5059 " or 'hg update -C .' to abort the merge")
5059 " or 'hg update -C .' to abort the merge")
5060 raise util.Abort(msg, hint=hint)
5060 raise util.Abort(msg, hint=hint)
5061 dirty = util.any(repo.status())
5061 dirty = util.any(repo.status())
5062 node = ctx.node()
5062 node = ctx.node()
5063 if node != parent:
5063 if node != parent:
5064 if dirty:
5064 if dirty:
5065 hint = _("uncommitted changes, use --all to discard all"
5065 hint = _("uncommitted changes, use --all to discard all"
5066 " changes, or 'hg update %s' to update") % ctx.rev()
5066 " changes, or 'hg update %s' to update") % ctx.rev()
5067 else:
5067 else:
5068 hint = _("use --all to revert all files,"
5068 hint = _("use --all to revert all files,"
5069 " or 'hg update %s' to update") % ctx.rev()
5069 " or 'hg update %s' to update") % ctx.rev()
5070 elif dirty:
5070 elif dirty:
5071 hint = _("uncommitted changes, use --all to discard all changes")
5071 hint = _("uncommitted changes, use --all to discard all changes")
5072 else:
5072 else:
5073 hint = _("use --all to revert all files")
5073 hint = _("use --all to revert all files")
5074 raise util.Abort(msg, hint=hint)
5074 raise util.Abort(msg, hint=hint)
5075
5075
5076 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
5076 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
5077
5077
5078 @command('rollback', dryrunopts +
5078 @command('rollback', dryrunopts +
5079 [('f', 'force', False, _('ignore safety measures'))])
5079 [('f', 'force', False, _('ignore safety measures'))])
5080 def rollback(ui, repo, **opts):
5080 def rollback(ui, repo, **opts):
5081 """roll back the last transaction (DANGEROUS) (DEPRECATED)
5081 """roll back the last transaction (DANGEROUS) (DEPRECATED)
5082
5082
5083 Please use :hg:`commit --amend` instead of rollback to correct
5083 Please use :hg:`commit --amend` instead of rollback to correct
5084 mistakes in the last commit.
5084 mistakes in the last commit.
5085
5085
5086 This command should be used with care. There is only one level of
5086 This command should be used with care. There is only one level of
5087 rollback, and there is no way to undo a rollback. It will also
5087 rollback, and there is no way to undo a rollback. It will also
5088 restore the dirstate at the time of the last transaction, losing
5088 restore the dirstate at the time of the last transaction, losing
5089 any dirstate changes since that time. This command does not alter
5089 any dirstate changes since that time. This command does not alter
5090 the working directory.
5090 the working directory.
5091
5091
5092 Transactions are used to encapsulate the effects of all commands
5092 Transactions are used to encapsulate the effects of all commands
5093 that create new changesets or propagate existing changesets into a
5093 that create new changesets or propagate existing changesets into a
5094 repository.
5094 repository.
5095
5095
5096 .. container:: verbose
5096 .. container:: verbose
5097
5097
5098 For example, the following commands are transactional, and their
5098 For example, the following commands are transactional, and their
5099 effects can be rolled back:
5099 effects can be rolled back:
5100
5100
5101 - commit
5101 - commit
5102 - import
5102 - import
5103 - pull
5103 - pull
5104 - push (with this repository as the destination)
5104 - push (with this repository as the destination)
5105 - unbundle
5105 - unbundle
5106
5106
5107 To avoid permanent data loss, rollback will refuse to rollback a
5107 To avoid permanent data loss, rollback will refuse to rollback a
5108 commit transaction if it isn't checked out. Use --force to
5108 commit transaction if it isn't checked out. Use --force to
5109 override this protection.
5109 override this protection.
5110
5110
5111 This command is not intended for use on public repositories. Once
5111 This command is not intended for use on public repositories. Once
5112 changes are visible for pull by other users, rolling a transaction
5112 changes are visible for pull by other users, rolling a transaction
5113 back locally is ineffective (someone else may already have pulled
5113 back locally is ineffective (someone else may already have pulled
5114 the changes). Furthermore, a race is possible with readers of the
5114 the changes). Furthermore, a race is possible with readers of the
5115 repository; for example an in-progress pull from the repository
5115 repository; for example an in-progress pull from the repository
5116 may fail if a rollback is performed.
5116 may fail if a rollback is performed.
5117
5117
5118 Returns 0 on success, 1 if no rollback data is available.
5118 Returns 0 on success, 1 if no rollback data is available.
5119 """
5119 """
5120 return repo.rollback(dryrun=opts.get('dry_run'),
5120 return repo.rollback(dryrun=opts.get('dry_run'),
5121 force=opts.get('force'))
5121 force=opts.get('force'))
5122
5122
5123 @command('root', [])
5123 @command('root', [])
5124 def root(ui, repo):
5124 def root(ui, repo):
5125 """print the root (top) of the current working directory
5125 """print the root (top) of the current working directory
5126
5126
5127 Print the root directory of the current repository.
5127 Print the root directory of the current repository.
5128
5128
5129 Returns 0 on success.
5129 Returns 0 on success.
5130 """
5130 """
5131 ui.write(repo.root + "\n")
5131 ui.write(repo.root + "\n")
5132
5132
5133 @command('^serve',
5133 @command('^serve',
5134 [('A', 'accesslog', '', _('name of access log file to write to'),
5134 [('A', 'accesslog', '', _('name of access log file to write to'),
5135 _('FILE')),
5135 _('FILE')),
5136 ('d', 'daemon', None, _('run server in background')),
5136 ('d', 'daemon', None, _('run server in background')),
5137 ('', 'daemon-pipefds', '', _('used internally by daemon mode'), _('NUM')),
5137 ('', 'daemon-pipefds', '', _('used internally by daemon mode'), _('NUM')),
5138 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
5138 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
5139 # use string type, then we can check if something was passed
5139 # use string type, then we can check if something was passed
5140 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
5140 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
5141 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
5141 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
5142 _('ADDR')),
5142 _('ADDR')),
5143 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
5143 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
5144 _('PREFIX')),
5144 _('PREFIX')),
5145 ('n', 'name', '',
5145 ('n', 'name', '',
5146 _('name to show in web pages (default: working directory)'), _('NAME')),
5146 _('name to show in web pages (default: working directory)'), _('NAME')),
5147 ('', 'web-conf', '',
5147 ('', 'web-conf', '',
5148 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
5148 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
5149 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
5149 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
5150 _('FILE')),
5150 _('FILE')),
5151 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
5151 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
5152 ('', 'stdio', None, _('for remote clients')),
5152 ('', 'stdio', None, _('for remote clients')),
5153 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
5153 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
5154 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
5154 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
5155 ('', 'style', '', _('template style to use'), _('STYLE')),
5155 ('', 'style', '', _('template style to use'), _('STYLE')),
5156 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
5156 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
5157 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
5157 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
5158 _('[OPTION]...'))
5158 _('[OPTION]...'))
5159 def serve(ui, repo, **opts):
5159 def serve(ui, repo, **opts):
5160 """start stand-alone webserver
5160 """start stand-alone webserver
5161
5161
5162 Start a local HTTP repository browser and pull server. You can use
5162 Start a local HTTP repository browser and pull server. You can use
5163 this for ad-hoc sharing and browsing of repositories. It is
5163 this for ad-hoc sharing and browsing of repositories. It is
5164 recommended to use a real web server to serve a repository for
5164 recommended to use a real web server to serve a repository for
5165 longer periods of time.
5165 longer periods of time.
5166
5166
5167 Please note that the server does not implement access control.
5167 Please note that the server does not implement access control.
5168 This means that, by default, anybody can read from the server and
5168 This means that, by default, anybody can read from the server and
5169 nobody can write to it by default. Set the ``web.allow_push``
5169 nobody can write to it by default. Set the ``web.allow_push``
5170 option to ``*`` to allow everybody to push to the server. You
5170 option to ``*`` to allow everybody to push to the server. You
5171 should use a real web server if you need to authenticate users.
5171 should use a real web server if you need to authenticate users.
5172
5172
5173 By default, the server logs accesses to stdout and errors to
5173 By default, the server logs accesses to stdout and errors to
5174 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
5174 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
5175 files.
5175 files.
5176
5176
5177 To have the server choose a free port number to listen on, specify
5177 To have the server choose a free port number to listen on, specify
5178 a port number of 0; in this case, the server will print the port
5178 a port number of 0; in this case, the server will print the port
5179 number it uses.
5179 number it uses.
5180
5180
5181 Returns 0 on success.
5181 Returns 0 on success.
5182 """
5182 """
5183
5183
5184 if opts["stdio"] and opts["cmdserver"]:
5184 if opts["stdio"] and opts["cmdserver"]:
5185 raise util.Abort(_("cannot use --stdio with --cmdserver"))
5185 raise util.Abort(_("cannot use --stdio with --cmdserver"))
5186
5186
5187 def checkrepo():
5187 def checkrepo():
5188 if repo is None:
5188 if repo is None:
5189 raise error.RepoError(_("there is no Mercurial repository here"
5189 raise error.RepoError(_("there is no Mercurial repository here"
5190 " (.hg not found)"))
5190 " (.hg not found)"))
5191
5191
5192 if opts["stdio"]:
5192 if opts["stdio"]:
5193 checkrepo()
5193 checkrepo()
5194 s = sshserver.sshserver(ui, repo)
5194 s = sshserver.sshserver(ui, repo)
5195 s.serve_forever()
5195 s.serve_forever()
5196
5196
5197 if opts["cmdserver"]:
5197 if opts["cmdserver"]:
5198 s = commandserver.server(ui, repo, opts["cmdserver"])
5198 s = commandserver.server(ui, repo, opts["cmdserver"])
5199 return s.serve()
5199 return s.serve()
5200
5200
5201 # this way we can check if something was given in the command-line
5201 # this way we can check if something was given in the command-line
5202 if opts.get('port'):
5202 if opts.get('port'):
5203 opts['port'] = util.getport(opts.get('port'))
5203 opts['port'] = util.getport(opts.get('port'))
5204
5204
5205 baseui = repo and repo.baseui or ui
5205 baseui = repo and repo.baseui or ui
5206 optlist = ("name templates style address port prefix ipv6"
5206 optlist = ("name templates style address port prefix ipv6"
5207 " accesslog errorlog certificate encoding")
5207 " accesslog errorlog certificate encoding")
5208 for o in optlist.split():
5208 for o in optlist.split():
5209 val = opts.get(o, '')
5209 val = opts.get(o, '')
5210 if val in (None, ''): # should check against default options instead
5210 if val in (None, ''): # should check against default options instead
5211 continue
5211 continue
5212 baseui.setconfig("web", o, val, 'serve')
5212 baseui.setconfig("web", o, val, 'serve')
5213 if repo and repo.ui != baseui:
5213 if repo and repo.ui != baseui:
5214 repo.ui.setconfig("web", o, val, 'serve')
5214 repo.ui.setconfig("web", o, val, 'serve')
5215
5215
5216 o = opts.get('web_conf') or opts.get('webdir_conf')
5216 o = opts.get('web_conf') or opts.get('webdir_conf')
5217 if not o:
5217 if not o:
5218 if not repo:
5218 if not repo:
5219 raise error.RepoError(_("there is no Mercurial repository"
5219 raise error.RepoError(_("there is no Mercurial repository"
5220 " here (.hg not found)"))
5220 " here (.hg not found)"))
5221 o = repo
5221 o = repo
5222
5222
5223 app = hgweb.hgweb(o, baseui=baseui)
5223 app = hgweb.hgweb(o, baseui=baseui)
5224 service = httpservice(ui, app, opts)
5224 service = httpservice(ui, app, opts)
5225 cmdutil.service(opts, initfn=service.init, runfn=service.run)
5225 cmdutil.service(opts, initfn=service.init, runfn=service.run)
5226
5226
5227 class httpservice(object):
5227 class httpservice(object):
5228 def __init__(self, ui, app, opts):
5228 def __init__(self, ui, app, opts):
5229 self.ui = ui
5229 self.ui = ui
5230 self.app = app
5230 self.app = app
5231 self.opts = opts
5231 self.opts = opts
5232
5232
5233 def init(self):
5233 def init(self):
5234 util.setsignalhandler()
5234 util.setsignalhandler()
5235 self.httpd = hgweb_server.create_server(self.ui, self.app)
5235 self.httpd = hgweb_server.create_server(self.ui, self.app)
5236
5236
5237 if self.opts['port'] and not self.ui.verbose:
5237 if self.opts['port'] and not self.ui.verbose:
5238 return
5238 return
5239
5239
5240 if self.httpd.prefix:
5240 if self.httpd.prefix:
5241 prefix = self.httpd.prefix.strip('/') + '/'
5241 prefix = self.httpd.prefix.strip('/') + '/'
5242 else:
5242 else:
5243 prefix = ''
5243 prefix = ''
5244
5244
5245 port = ':%d' % self.httpd.port
5245 port = ':%d' % self.httpd.port
5246 if port == ':80':
5246 if port == ':80':
5247 port = ''
5247 port = ''
5248
5248
5249 bindaddr = self.httpd.addr
5249 bindaddr = self.httpd.addr
5250 if bindaddr == '0.0.0.0':
5250 if bindaddr == '0.0.0.0':
5251 bindaddr = '*'
5251 bindaddr = '*'
5252 elif ':' in bindaddr: # IPv6
5252 elif ':' in bindaddr: # IPv6
5253 bindaddr = '[%s]' % bindaddr
5253 bindaddr = '[%s]' % bindaddr
5254
5254
5255 fqaddr = self.httpd.fqaddr
5255 fqaddr = self.httpd.fqaddr
5256 if ':' in fqaddr:
5256 if ':' in fqaddr:
5257 fqaddr = '[%s]' % fqaddr
5257 fqaddr = '[%s]' % fqaddr
5258 if self.opts['port']:
5258 if self.opts['port']:
5259 write = self.ui.status
5259 write = self.ui.status
5260 else:
5260 else:
5261 write = self.ui.write
5261 write = self.ui.write
5262 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
5262 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
5263 (fqaddr, port, prefix, bindaddr, self.httpd.port))
5263 (fqaddr, port, prefix, bindaddr, self.httpd.port))
5264
5264
5265 def run(self):
5265 def run(self):
5266 self.httpd.serve_forever()
5266 self.httpd.serve_forever()
5267
5267
5268
5268
5269 @command('^status|st',
5269 @command('^status|st',
5270 [('A', 'all', None, _('show status of all files')),
5270 [('A', 'all', None, _('show status of all files')),
5271 ('m', 'modified', None, _('show only modified files')),
5271 ('m', 'modified', None, _('show only modified files')),
5272 ('a', 'added', None, _('show only added files')),
5272 ('a', 'added', None, _('show only added files')),
5273 ('r', 'removed', None, _('show only removed files')),
5273 ('r', 'removed', None, _('show only removed files')),
5274 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5274 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5275 ('c', 'clean', None, _('show only files without changes')),
5275 ('c', 'clean', None, _('show only files without changes')),
5276 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5276 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5277 ('i', 'ignored', None, _('show only ignored files')),
5277 ('i', 'ignored', None, _('show only ignored files')),
5278 ('n', 'no-status', None, _('hide status prefix')),
5278 ('n', 'no-status', None, _('hide status prefix')),
5279 ('C', 'copies', None, _('show source of copied files')),
5279 ('C', 'copies', None, _('show source of copied files')),
5280 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5280 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5281 ('', 'rev', [], _('show difference from revision'), _('REV')),
5281 ('', 'rev', [], _('show difference from revision'), _('REV')),
5282 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5282 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5283 ] + walkopts + subrepoopts,
5283 ] + walkopts + subrepoopts,
5284 _('[OPTION]... [FILE]...'))
5284 _('[OPTION]... [FILE]...'))
5285 def status(ui, repo, *pats, **opts):
5285 def status(ui, repo, *pats, **opts):
5286 """show changed files in the working directory
5286 """show changed files in the working directory
5287
5287
5288 Show status of files in the repository. If names are given, only
5288 Show status of files in the repository. If names are given, only
5289 files that match are shown. Files that are clean or ignored or
5289 files that match are shown. Files that are clean or ignored or
5290 the source of a copy/move operation, are not listed unless
5290 the source of a copy/move operation, are not listed unless
5291 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5291 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5292 Unless options described with "show only ..." are given, the
5292 Unless options described with "show only ..." are given, the
5293 options -mardu are used.
5293 options -mardu are used.
5294
5294
5295 Option -q/--quiet hides untracked (unknown and ignored) files
5295 Option -q/--quiet hides untracked (unknown and ignored) files
5296 unless explicitly requested with -u/--unknown or -i/--ignored.
5296 unless explicitly requested with -u/--unknown or -i/--ignored.
5297
5297
5298 .. note::
5298 .. note::
5299
5299
5300 status may appear to disagree with diff if permissions have
5300 status may appear to disagree with diff if permissions have
5301 changed or a merge has occurred. The standard diff format does
5301 changed or a merge has occurred. The standard diff format does
5302 not report permission changes and diff only reports changes
5302 not report permission changes and diff only reports changes
5303 relative to one merge parent.
5303 relative to one merge parent.
5304
5304
5305 If one revision is given, it is used as the base revision.
5305 If one revision is given, it is used as the base revision.
5306 If two revisions are given, the differences between them are
5306 If two revisions are given, the differences between them are
5307 shown. The --change option can also be used as a shortcut to list
5307 shown. The --change option can also be used as a shortcut to list
5308 the changed files of a revision from its first parent.
5308 the changed files of a revision from its first parent.
5309
5309
5310 The codes used to show the status of files are::
5310 The codes used to show the status of files are::
5311
5311
5312 M = modified
5312 M = modified
5313 A = added
5313 A = added
5314 R = removed
5314 R = removed
5315 C = clean
5315 C = clean
5316 ! = missing (deleted by non-hg command, but still tracked)
5316 ! = missing (deleted by non-hg command, but still tracked)
5317 ? = not tracked
5317 ? = not tracked
5318 I = ignored
5318 I = ignored
5319 = origin of the previous file (with --copies)
5319 = origin of the previous file (with --copies)
5320
5320
5321 .. container:: verbose
5321 .. container:: verbose
5322
5322
5323 Examples:
5323 Examples:
5324
5324
5325 - show changes in the working directory relative to a
5325 - show changes in the working directory relative to a
5326 changeset::
5326 changeset::
5327
5327
5328 hg status --rev 9353
5328 hg status --rev 9353
5329
5329
5330 - show all changes including copies in an existing changeset::
5330 - show all changes including copies in an existing changeset::
5331
5331
5332 hg status --copies --change 9353
5332 hg status --copies --change 9353
5333
5333
5334 - get a NUL separated list of added files, suitable for xargs::
5334 - get a NUL separated list of added files, suitable for xargs::
5335
5335
5336 hg status -an0
5336 hg status -an0
5337
5337
5338 Returns 0 on success.
5338 Returns 0 on success.
5339 """
5339 """
5340
5340
5341 revs = opts.get('rev')
5341 revs = opts.get('rev')
5342 change = opts.get('change')
5342 change = opts.get('change')
5343
5343
5344 if revs and change:
5344 if revs and change:
5345 msg = _('cannot specify --rev and --change at the same time')
5345 msg = _('cannot specify --rev and --change at the same time')
5346 raise util.Abort(msg)
5346 raise util.Abort(msg)
5347 elif change:
5347 elif change:
5348 node2 = scmutil.revsingle(repo, change, None).node()
5348 node2 = scmutil.revsingle(repo, change, None).node()
5349 node1 = repo[node2].p1().node()
5349 node1 = repo[node2].p1().node()
5350 else:
5350 else:
5351 node1, node2 = scmutil.revpair(repo, revs)
5351 node1, node2 = scmutil.revpair(repo, revs)
5352
5352
5353 cwd = (pats and repo.getcwd()) or ''
5353 cwd = (pats and repo.getcwd()) or ''
5354 end = opts.get('print0') and '\0' or '\n'
5354 end = opts.get('print0') and '\0' or '\n'
5355 copy = {}
5355 copy = {}
5356 states = 'modified added removed deleted unknown ignored clean'.split()
5356 states = 'modified added removed deleted unknown ignored clean'.split()
5357 show = [k for k in states if opts.get(k)]
5357 show = [k for k in states if opts.get(k)]
5358 if opts.get('all'):
5358 if opts.get('all'):
5359 show += ui.quiet and (states[:4] + ['clean']) or states
5359 show += ui.quiet and (states[:4] + ['clean']) or states
5360 if not show:
5360 if not show:
5361 show = ui.quiet and states[:4] or states[:5]
5361 show = ui.quiet and states[:4] or states[:5]
5362
5362
5363 stat = repo.status(node1, node2, scmutil.match(repo[node2], pats, opts),
5363 stat = repo.status(node1, node2, scmutil.match(repo[node2], pats, opts),
5364 'ignored' in show, 'clean' in show, 'unknown' in show,
5364 'ignored' in show, 'clean' in show, 'unknown' in show,
5365 opts.get('subrepos'))
5365 opts.get('subrepos'))
5366 changestates = zip(states, 'MAR!?IC', stat)
5366 changestates = zip(states, 'MAR!?IC', stat)
5367
5367
5368 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
5368 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
5369 copy = copies.pathcopies(repo[node1], repo[node2])
5369 copy = copies.pathcopies(repo[node1], repo[node2])
5370
5370
5371 fm = ui.formatter('status', opts)
5371 fm = ui.formatter('status', opts)
5372 fmt = '%s' + end
5372 fmt = '%s' + end
5373 showchar = not opts.get('no_status')
5373 showchar = not opts.get('no_status')
5374
5374
5375 for state, char, files in changestates:
5375 for state, char, files in changestates:
5376 if state in show:
5376 if state in show:
5377 label = 'status.' + state
5377 label = 'status.' + state
5378 for f in files:
5378 for f in files:
5379 fm.startitem()
5379 fm.startitem()
5380 fm.condwrite(showchar, 'status', '%s ', char, label=label)
5380 fm.condwrite(showchar, 'status', '%s ', char, label=label)
5381 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
5381 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
5382 if f in copy:
5382 if f in copy:
5383 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
5383 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
5384 label='status.copied')
5384 label='status.copied')
5385 fm.end()
5385 fm.end()
5386
5386
5387 @command('^summary|sum',
5387 @command('^summary|sum',
5388 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
5388 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
5389 def summary(ui, repo, **opts):
5389 def summary(ui, repo, **opts):
5390 """summarize working directory state
5390 """summarize working directory state
5391
5391
5392 This generates a brief summary of the working directory state,
5392 This generates a brief summary of the working directory state,
5393 including parents, branch, commit status, and available updates.
5393 including parents, branch, commit status, and available updates.
5394
5394
5395 With the --remote option, this will check the default paths for
5395 With the --remote option, this will check the default paths for
5396 incoming and outgoing changes. This can be time-consuming.
5396 incoming and outgoing changes. This can be time-consuming.
5397
5397
5398 Returns 0 on success.
5398 Returns 0 on success.
5399 """
5399 """
5400
5400
5401 ctx = repo[None]
5401 ctx = repo[None]
5402 parents = ctx.parents()
5402 parents = ctx.parents()
5403 pnode = parents[0].node()
5403 pnode = parents[0].node()
5404 marks = []
5404 marks = []
5405
5405
5406 for p in parents:
5406 for p in parents:
5407 # label with log.changeset (instead of log.parent) since this
5407 # label with log.changeset (instead of log.parent) since this
5408 # shows a working directory parent *changeset*:
5408 # shows a working directory parent *changeset*:
5409 # i18n: column positioning for "hg summary"
5409 # i18n: column positioning for "hg summary"
5410 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
5410 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
5411 label='log.changeset changeset.%s' % p.phasestr())
5411 label='log.changeset changeset.%s' % p.phasestr())
5412 ui.write(' '.join(p.tags()), label='log.tag')
5412 ui.write(' '.join(p.tags()), label='log.tag')
5413 if p.bookmarks():
5413 if p.bookmarks():
5414 marks.extend(p.bookmarks())
5414 marks.extend(p.bookmarks())
5415 if p.rev() == -1:
5415 if p.rev() == -1:
5416 if not len(repo):
5416 if not len(repo):
5417 ui.write(_(' (empty repository)'))
5417 ui.write(_(' (empty repository)'))
5418 else:
5418 else:
5419 ui.write(_(' (no revision checked out)'))
5419 ui.write(_(' (no revision checked out)'))
5420 ui.write('\n')
5420 ui.write('\n')
5421 if p.description():
5421 if p.description():
5422 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5422 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5423 label='log.summary')
5423 label='log.summary')
5424
5424
5425 branch = ctx.branch()
5425 branch = ctx.branch()
5426 bheads = repo.branchheads(branch)
5426 bheads = repo.branchheads(branch)
5427 # i18n: column positioning for "hg summary"
5427 # i18n: column positioning for "hg summary"
5428 m = _('branch: %s\n') % branch
5428 m = _('branch: %s\n') % branch
5429 if branch != 'default':
5429 if branch != 'default':
5430 ui.write(m, label='log.branch')
5430 ui.write(m, label='log.branch')
5431 else:
5431 else:
5432 ui.status(m, label='log.branch')
5432 ui.status(m, label='log.branch')
5433
5433
5434 if marks:
5434 if marks:
5435 current = repo._bookmarkcurrent
5435 current = repo._bookmarkcurrent
5436 # i18n: column positioning for "hg summary"
5436 # i18n: column positioning for "hg summary"
5437 ui.write(_('bookmarks:'), label='log.bookmark')
5437 ui.write(_('bookmarks:'), label='log.bookmark')
5438 if current is not None:
5438 if current is not None:
5439 if current in marks:
5439 if current in marks:
5440 ui.write(' *' + current, label='bookmarks.current')
5440 ui.write(' *' + current, label='bookmarks.current')
5441 marks.remove(current)
5441 marks.remove(current)
5442 else:
5442 else:
5443 ui.write(' [%s]' % current, label='bookmarks.current')
5443 ui.write(' [%s]' % current, label='bookmarks.current')
5444 for m in marks:
5444 for m in marks:
5445 ui.write(' ' + m, label='log.bookmark')
5445 ui.write(' ' + m, label='log.bookmark')
5446 ui.write('\n', label='log.bookmark')
5446 ui.write('\n', label='log.bookmark')
5447
5447
5448 st = list(repo.status(unknown=True))[:6]
5448 st = list(repo.status(unknown=True))[:6]
5449
5449
5450 c = repo.dirstate.copies()
5450 c = repo.dirstate.copies()
5451 copied, renamed = [], []
5451 copied, renamed = [], []
5452 for d, s in c.iteritems():
5452 for d, s in c.iteritems():
5453 if s in st[2]:
5453 if s in st[2]:
5454 st[2].remove(s)
5454 st[2].remove(s)
5455 renamed.append(d)
5455 renamed.append(d)
5456 else:
5456 else:
5457 copied.append(d)
5457 copied.append(d)
5458 if d in st[1]:
5458 if d in st[1]:
5459 st[1].remove(d)
5459 st[1].remove(d)
5460 st.insert(3, renamed)
5460 st.insert(3, renamed)
5461 st.insert(4, copied)
5461 st.insert(4, copied)
5462
5462
5463 ms = mergemod.mergestate(repo)
5463 ms = mergemod.mergestate(repo)
5464 st.append([f for f in ms if ms[f] == 'u'])
5464 st.append([f for f in ms if ms[f] == 'u'])
5465
5465
5466 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5466 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5467 st.append(subs)
5467 st.append(subs)
5468
5468
5469 labels = [ui.label(_('%d modified'), 'status.modified'),
5469 labels = [ui.label(_('%d modified'), 'status.modified'),
5470 ui.label(_('%d added'), 'status.added'),
5470 ui.label(_('%d added'), 'status.added'),
5471 ui.label(_('%d removed'), 'status.removed'),
5471 ui.label(_('%d removed'), 'status.removed'),
5472 ui.label(_('%d renamed'), 'status.copied'),
5472 ui.label(_('%d renamed'), 'status.copied'),
5473 ui.label(_('%d copied'), 'status.copied'),
5473 ui.label(_('%d copied'), 'status.copied'),
5474 ui.label(_('%d deleted'), 'status.deleted'),
5474 ui.label(_('%d deleted'), 'status.deleted'),
5475 ui.label(_('%d unknown'), 'status.unknown'),
5475 ui.label(_('%d unknown'), 'status.unknown'),
5476 ui.label(_('%d ignored'), 'status.ignored'),
5476 ui.label(_('%d ignored'), 'status.ignored'),
5477 ui.label(_('%d unresolved'), 'resolve.unresolved'),
5477 ui.label(_('%d unresolved'), 'resolve.unresolved'),
5478 ui.label(_('%d subrepos'), 'status.modified')]
5478 ui.label(_('%d subrepos'), 'status.modified')]
5479 t = []
5479 t = []
5480 for s, l in zip(st, labels):
5480 for s, l in zip(st, labels):
5481 if s:
5481 if s:
5482 t.append(l % len(s))
5482 t.append(l % len(s))
5483
5483
5484 t = ', '.join(t)
5484 t = ', '.join(t)
5485 cleanworkdir = False
5485 cleanworkdir = False
5486
5486
5487 if repo.vfs.exists('updatestate'):
5487 if repo.vfs.exists('updatestate'):
5488 t += _(' (interrupted update)')
5488 t += _(' (interrupted update)')
5489 elif len(parents) > 1:
5489 elif len(parents) > 1:
5490 t += _(' (merge)')
5490 t += _(' (merge)')
5491 elif branch != parents[0].branch():
5491 elif branch != parents[0].branch():
5492 t += _(' (new branch)')
5492 t += _(' (new branch)')
5493 elif (parents[0].closesbranch() and
5493 elif (parents[0].closesbranch() and
5494 pnode in repo.branchheads(branch, closed=True)):
5494 pnode in repo.branchheads(branch, closed=True)):
5495 t += _(' (head closed)')
5495 t += _(' (head closed)')
5496 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
5496 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
5497 t += _(' (clean)')
5497 t += _(' (clean)')
5498 cleanworkdir = True
5498 cleanworkdir = True
5499 elif pnode not in bheads:
5499 elif pnode not in bheads:
5500 t += _(' (new branch head)')
5500 t += _(' (new branch head)')
5501
5501
5502 if cleanworkdir:
5502 if cleanworkdir:
5503 # i18n: column positioning for "hg summary"
5503 # i18n: column positioning for "hg summary"
5504 ui.status(_('commit: %s\n') % t.strip())
5504 ui.status(_('commit: %s\n') % t.strip())
5505 else:
5505 else:
5506 # i18n: column positioning for "hg summary"
5506 # i18n: column positioning for "hg summary"
5507 ui.write(_('commit: %s\n') % t.strip())
5507 ui.write(_('commit: %s\n') % t.strip())
5508
5508
5509 # all ancestors of branch heads - all ancestors of parent = new csets
5509 # all ancestors of branch heads - all ancestors of parent = new csets
5510 new = len(repo.changelog.findmissing([ctx.node() for ctx in parents],
5510 new = len(repo.changelog.findmissing([ctx.node() for ctx in parents],
5511 bheads))
5511 bheads))
5512
5512
5513 if new == 0:
5513 if new == 0:
5514 # i18n: column positioning for "hg summary"
5514 # i18n: column positioning for "hg summary"
5515 ui.status(_('update: (current)\n'))
5515 ui.status(_('update: (current)\n'))
5516 elif pnode not in bheads:
5516 elif pnode not in bheads:
5517 # i18n: column positioning for "hg summary"
5517 # i18n: column positioning for "hg summary"
5518 ui.write(_('update: %d new changesets (update)\n') % new)
5518 ui.write(_('update: %d new changesets (update)\n') % new)
5519 else:
5519 else:
5520 # i18n: column positioning for "hg summary"
5520 # i18n: column positioning for "hg summary"
5521 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
5521 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
5522 (new, len(bheads)))
5522 (new, len(bheads)))
5523
5523
5524 cmdutil.summaryhooks(ui, repo)
5524 cmdutil.summaryhooks(ui, repo)
5525
5525
5526 if opts.get('remote'):
5526 if opts.get('remote'):
5527 needsincoming, needsoutgoing = True, True
5527 needsincoming, needsoutgoing = True, True
5528 else:
5528 else:
5529 needsincoming, needsoutgoing = False, False
5529 needsincoming, needsoutgoing = False, False
5530 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
5530 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
5531 if i:
5531 if i:
5532 needsincoming = True
5532 needsincoming = True
5533 if o:
5533 if o:
5534 needsoutgoing = True
5534 needsoutgoing = True
5535 if not needsincoming and not needsoutgoing:
5535 if not needsincoming and not needsoutgoing:
5536 return
5536 return
5537
5537
5538 def getincoming():
5538 def getincoming():
5539 source, branches = hg.parseurl(ui.expandpath('default'))
5539 source, branches = hg.parseurl(ui.expandpath('default'))
5540 sbranch = branches[0]
5540 sbranch = branches[0]
5541 try:
5541 try:
5542 other = hg.peer(repo, {}, source)
5542 other = hg.peer(repo, {}, source)
5543 except error.RepoError:
5543 except error.RepoError:
5544 if opts.get('remote'):
5544 if opts.get('remote'):
5545 raise
5545 raise
5546 return source, sbranch, None, None, None
5546 return source, sbranch, None, None, None
5547 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
5547 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
5548 if revs:
5548 if revs:
5549 revs = [other.lookup(rev) for rev in revs]
5549 revs = [other.lookup(rev) for rev in revs]
5550 ui.debug('comparing with %s\n' % util.hidepassword(source))
5550 ui.debug('comparing with %s\n' % util.hidepassword(source))
5551 repo.ui.pushbuffer()
5551 repo.ui.pushbuffer()
5552 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
5552 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
5553 repo.ui.popbuffer()
5553 repo.ui.popbuffer()
5554 return source, sbranch, other, commoninc, commoninc[1]
5554 return source, sbranch, other, commoninc, commoninc[1]
5555
5555
5556 if needsincoming:
5556 if needsincoming:
5557 source, sbranch, sother, commoninc, incoming = getincoming()
5557 source, sbranch, sother, commoninc, incoming = getincoming()
5558 else:
5558 else:
5559 source = sbranch = sother = commoninc = incoming = None
5559 source = sbranch = sother = commoninc = incoming = None
5560
5560
5561 def getoutgoing():
5561 def getoutgoing():
5562 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5562 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5563 dbranch = branches[0]
5563 dbranch = branches[0]
5564 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5564 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5565 if source != dest:
5565 if source != dest:
5566 try:
5566 try:
5567 dother = hg.peer(repo, {}, dest)
5567 dother = hg.peer(repo, {}, dest)
5568 except error.RepoError:
5568 except error.RepoError:
5569 if opts.get('remote'):
5569 if opts.get('remote'):
5570 raise
5570 raise
5571 return dest, dbranch, None, None
5571 return dest, dbranch, None, None
5572 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5572 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5573 elif sother is None:
5573 elif sother is None:
5574 # there is no explicit destination peer, but source one is invalid
5574 # there is no explicit destination peer, but source one is invalid
5575 return dest, dbranch, None, None
5575 return dest, dbranch, None, None
5576 else:
5576 else:
5577 dother = sother
5577 dother = sother
5578 if (source != dest or (sbranch is not None and sbranch != dbranch)):
5578 if (source != dest or (sbranch is not None and sbranch != dbranch)):
5579 common = None
5579 common = None
5580 else:
5580 else:
5581 common = commoninc
5581 common = commoninc
5582 if revs:
5582 if revs:
5583 revs = [repo.lookup(rev) for rev in revs]
5583 revs = [repo.lookup(rev) for rev in revs]
5584 repo.ui.pushbuffer()
5584 repo.ui.pushbuffer()
5585 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
5585 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
5586 commoninc=common)
5586 commoninc=common)
5587 repo.ui.popbuffer()
5587 repo.ui.popbuffer()
5588 return dest, dbranch, dother, outgoing
5588 return dest, dbranch, dother, outgoing
5589
5589
5590 if needsoutgoing:
5590 if needsoutgoing:
5591 dest, dbranch, dother, outgoing = getoutgoing()
5591 dest, dbranch, dother, outgoing = getoutgoing()
5592 else:
5592 else:
5593 dest = dbranch = dother = outgoing = None
5593 dest = dbranch = dother = outgoing = None
5594
5594
5595 if opts.get('remote'):
5595 if opts.get('remote'):
5596 t = []
5596 t = []
5597 if incoming:
5597 if incoming:
5598 t.append(_('1 or more incoming'))
5598 t.append(_('1 or more incoming'))
5599 o = outgoing.missing
5599 o = outgoing.missing
5600 if o:
5600 if o:
5601 t.append(_('%d outgoing') % len(o))
5601 t.append(_('%d outgoing') % len(o))
5602 other = dother or sother
5602 other = dother or sother
5603 if 'bookmarks' in other.listkeys('namespaces'):
5603 if 'bookmarks' in other.listkeys('namespaces'):
5604 lmarks = repo.listkeys('bookmarks')
5604 lmarks = repo.listkeys('bookmarks')
5605 rmarks = other.listkeys('bookmarks')
5605 rmarks = other.listkeys('bookmarks')
5606 diff = set(rmarks) - set(lmarks)
5606 diff = set(rmarks) - set(lmarks)
5607 if len(diff) > 0:
5607 if len(diff) > 0:
5608 t.append(_('%d incoming bookmarks') % len(diff))
5608 t.append(_('%d incoming bookmarks') % len(diff))
5609 diff = set(lmarks) - set(rmarks)
5609 diff = set(lmarks) - set(rmarks)
5610 if len(diff) > 0:
5610 if len(diff) > 0:
5611 t.append(_('%d outgoing bookmarks') % len(diff))
5611 t.append(_('%d outgoing bookmarks') % len(diff))
5612
5612
5613 if t:
5613 if t:
5614 # i18n: column positioning for "hg summary"
5614 # i18n: column positioning for "hg summary"
5615 ui.write(_('remote: %s\n') % (', '.join(t)))
5615 ui.write(_('remote: %s\n') % (', '.join(t)))
5616 else:
5616 else:
5617 # i18n: column positioning for "hg summary"
5617 # i18n: column positioning for "hg summary"
5618 ui.status(_('remote: (synced)\n'))
5618 ui.status(_('remote: (synced)\n'))
5619
5619
5620 cmdutil.summaryremotehooks(ui, repo, opts,
5620 cmdutil.summaryremotehooks(ui, repo, opts,
5621 ((source, sbranch, sother, commoninc),
5621 ((source, sbranch, sother, commoninc),
5622 (dest, dbranch, dother, outgoing)))
5622 (dest, dbranch, dother, outgoing)))
5623
5623
5624 @command('tag',
5624 @command('tag',
5625 [('f', 'force', None, _('force tag')),
5625 [('f', 'force', None, _('force tag')),
5626 ('l', 'local', None, _('make the tag local')),
5626 ('l', 'local', None, _('make the tag local')),
5627 ('r', 'rev', '', _('revision to tag'), _('REV')),
5627 ('r', 'rev', '', _('revision to tag'), _('REV')),
5628 ('', 'remove', None, _('remove a tag')),
5628 ('', 'remove', None, _('remove a tag')),
5629 # -l/--local is already there, commitopts cannot be used
5629 # -l/--local is already there, commitopts cannot be used
5630 ('e', 'edit', None, _('edit commit message')),
5630 ('e', 'edit', None, _('edit commit message')),
5631 ('m', 'message', '', _('use <text> as commit message'), _('TEXT')),
5631 ('m', 'message', '', _('use <text> as commit message'), _('TEXT')),
5632 ] + commitopts2,
5632 ] + commitopts2,
5633 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5633 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5634 def tag(ui, repo, name1, *names, **opts):
5634 def tag(ui, repo, name1, *names, **opts):
5635 """add one or more tags for the current or given revision
5635 """add one or more tags for the current or given revision
5636
5636
5637 Name a particular revision using <name>.
5637 Name a particular revision using <name>.
5638
5638
5639 Tags are used to name particular revisions of the repository and are
5639 Tags are used to name particular revisions of the repository and are
5640 very useful to compare different revisions, to go back to significant
5640 very useful to compare different revisions, to go back to significant
5641 earlier versions or to mark branch points as releases, etc. Changing
5641 earlier versions or to mark branch points as releases, etc. Changing
5642 an existing tag is normally disallowed; use -f/--force to override.
5642 an existing tag is normally disallowed; use -f/--force to override.
5643
5643
5644 If no revision is given, the parent of the working directory is
5644 If no revision is given, the parent of the working directory is
5645 used.
5645 used.
5646
5646
5647 To facilitate version control, distribution, and merging of tags,
5647 To facilitate version control, distribution, and merging of tags,
5648 they are stored as a file named ".hgtags" which is managed similarly
5648 they are stored as a file named ".hgtags" which is managed similarly
5649 to other project files and can be hand-edited if necessary. This
5649 to other project files and can be hand-edited if necessary. This
5650 also means that tagging creates a new commit. The file
5650 also means that tagging creates a new commit. The file
5651 ".hg/localtags" is used for local tags (not shared among
5651 ".hg/localtags" is used for local tags (not shared among
5652 repositories).
5652 repositories).
5653
5653
5654 Tag commits are usually made at the head of a branch. If the parent
5654 Tag commits are usually made at the head of a branch. If the parent
5655 of the working directory is not a branch head, :hg:`tag` aborts; use
5655 of the working directory is not a branch head, :hg:`tag` aborts; use
5656 -f/--force to force the tag commit to be based on a non-head
5656 -f/--force to force the tag commit to be based on a non-head
5657 changeset.
5657 changeset.
5658
5658
5659 See :hg:`help dates` for a list of formats valid for -d/--date.
5659 See :hg:`help dates` for a list of formats valid for -d/--date.
5660
5660
5661 Since tag names have priority over branch names during revision
5661 Since tag names have priority over branch names during revision
5662 lookup, using an existing branch name as a tag name is discouraged.
5662 lookup, using an existing branch name as a tag name is discouraged.
5663
5663
5664 Returns 0 on success.
5664 Returns 0 on success.
5665 """
5665 """
5666 wlock = lock = None
5666 wlock = lock = None
5667 try:
5667 try:
5668 wlock = repo.wlock()
5668 wlock = repo.wlock()
5669 lock = repo.lock()
5669 lock = repo.lock()
5670 rev_ = "."
5670 rev_ = "."
5671 names = [t.strip() for t in (name1,) + names]
5671 names = [t.strip() for t in (name1,) + names]
5672 if len(names) != len(set(names)):
5672 if len(names) != len(set(names)):
5673 raise util.Abort(_('tag names must be unique'))
5673 raise util.Abort(_('tag names must be unique'))
5674 for n in names:
5674 for n in names:
5675 scmutil.checknewlabel(repo, n, 'tag')
5675 scmutil.checknewlabel(repo, n, 'tag')
5676 if not n:
5676 if not n:
5677 raise util.Abort(_('tag names cannot consist entirely of '
5677 raise util.Abort(_('tag names cannot consist entirely of '
5678 'whitespace'))
5678 'whitespace'))
5679 if opts.get('rev') and opts.get('remove'):
5679 if opts.get('rev') and opts.get('remove'):
5680 raise util.Abort(_("--rev and --remove are incompatible"))
5680 raise util.Abort(_("--rev and --remove are incompatible"))
5681 if opts.get('rev'):
5681 if opts.get('rev'):
5682 rev_ = opts['rev']
5682 rev_ = opts['rev']
5683 message = opts.get('message')
5683 message = opts.get('message')
5684 if opts.get('remove'):
5684 if opts.get('remove'):
5685 expectedtype = opts.get('local') and 'local' or 'global'
5685 expectedtype = opts.get('local') and 'local' or 'global'
5686 for n in names:
5686 for n in names:
5687 if not repo.tagtype(n):
5687 if not repo.tagtype(n):
5688 raise util.Abort(_("tag '%s' does not exist") % n)
5688 raise util.Abort(_("tag '%s' does not exist") % n)
5689 if repo.tagtype(n) != expectedtype:
5689 if repo.tagtype(n) != expectedtype:
5690 if expectedtype == 'global':
5690 if expectedtype == 'global':
5691 raise util.Abort(_("tag '%s' is not a global tag") % n)
5691 raise util.Abort(_("tag '%s' is not a global tag") % n)
5692 else:
5692 else:
5693 raise util.Abort(_("tag '%s' is not a local tag") % n)
5693 raise util.Abort(_("tag '%s' is not a local tag") % n)
5694 rev_ = nullid
5694 rev_ = nullid
5695 if not message:
5695 if not message:
5696 # we don't translate commit messages
5696 # we don't translate commit messages
5697 message = 'Removed tag %s' % ', '.join(names)
5697 message = 'Removed tag %s' % ', '.join(names)
5698 elif not opts.get('force'):
5698 elif not opts.get('force'):
5699 for n in names:
5699 for n in names:
5700 if n in repo.tags():
5700 if n in repo.tags():
5701 raise util.Abort(_("tag '%s' already exists "
5701 raise util.Abort(_("tag '%s' already exists "
5702 "(use -f to force)") % n)
5702 "(use -f to force)") % n)
5703 if not opts.get('local'):
5703 if not opts.get('local'):
5704 p1, p2 = repo.dirstate.parents()
5704 p1, p2 = repo.dirstate.parents()
5705 if p2 != nullid:
5705 if p2 != nullid:
5706 raise util.Abort(_('uncommitted merge'))
5706 raise util.Abort(_('uncommitted merge'))
5707 bheads = repo.branchheads()
5707 bheads = repo.branchheads()
5708 if not opts.get('force') and bheads and p1 not in bheads:
5708 if not opts.get('force') and bheads and p1 not in bheads:
5709 raise util.Abort(_('not at a branch head (use -f to force)'))
5709 raise util.Abort(_('not at a branch head (use -f to force)'))
5710 r = scmutil.revsingle(repo, rev_).node()
5710 r = scmutil.revsingle(repo, rev_).node()
5711
5711
5712 if not message:
5712 if not message:
5713 # we don't translate commit messages
5713 # we don't translate commit messages
5714 message = ('Added tag %s for changeset %s' %
5714 message = ('Added tag %s for changeset %s' %
5715 (', '.join(names), short(r)))
5715 (', '.join(names), short(r)))
5716
5716
5717 date = opts.get('date')
5717 date = opts.get('date')
5718 if date:
5718 if date:
5719 date = util.parsedate(date)
5719 date = util.parsedate(date)
5720
5720
5721 editor = cmdutil.getcommiteditor(**opts)
5721 editor = cmdutil.getcommiteditor(**opts)
5722
5722
5723 # don't allow tagging the null rev
5723 # don't allow tagging the null rev
5724 if (not opts.get('remove') and
5724 if (not opts.get('remove') and
5725 scmutil.revsingle(repo, rev_).rev() == nullrev):
5725 scmutil.revsingle(repo, rev_).rev() == nullrev):
5726 raise util.Abort(_("cannot tag null revision"))
5726 raise util.Abort(_("cannot tag null revision"))
5727
5727
5728 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date,
5728 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date,
5729 editor=editor)
5729 editor=editor)
5730 finally:
5730 finally:
5731 release(lock, wlock)
5731 release(lock, wlock)
5732
5732
5733 @command('tags', [], '')
5733 @command('tags', [], '')
5734 def tags(ui, repo, **opts):
5734 def tags(ui, repo, **opts):
5735 """list repository tags
5735 """list repository tags
5736
5736
5737 This lists both regular and local tags. When the -v/--verbose
5737 This lists both regular and local tags. When the -v/--verbose
5738 switch is used, a third column "local" is printed for local tags.
5738 switch is used, a third column "local" is printed for local tags.
5739
5739
5740 Returns 0 on success.
5740 Returns 0 on success.
5741 """
5741 """
5742
5742
5743 fm = ui.formatter('tags', opts)
5743 fm = ui.formatter('tags', opts)
5744 hexfunc = ui.debugflag and hex or short
5744 hexfunc = ui.debugflag and hex or short
5745 tagtype = ""
5745 tagtype = ""
5746
5746
5747 for t, n in reversed(repo.tagslist()):
5747 for t, n in reversed(repo.tagslist()):
5748 hn = hexfunc(n)
5748 hn = hexfunc(n)
5749 label = 'tags.normal'
5749 label = 'tags.normal'
5750 tagtype = ''
5750 tagtype = ''
5751 if repo.tagtype(t) == 'local':
5751 if repo.tagtype(t) == 'local':
5752 label = 'tags.local'
5752 label = 'tags.local'
5753 tagtype = 'local'
5753 tagtype = 'local'
5754
5754
5755 fm.startitem()
5755 fm.startitem()
5756 fm.write('tag', '%s', t, label=label)
5756 fm.write('tag', '%s', t, label=label)
5757 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5757 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5758 fm.condwrite(not ui.quiet, 'rev id', fmt,
5758 fm.condwrite(not ui.quiet, 'rev id', fmt,
5759 repo.changelog.rev(n), hn, label=label)
5759 repo.changelog.rev(n), hn, label=label)
5760 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
5760 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
5761 tagtype, label=label)
5761 tagtype, label=label)
5762 fm.plain('\n')
5762 fm.plain('\n')
5763 fm.end()
5763 fm.end()
5764
5764
5765 @command('tip',
5765 @command('tip',
5766 [('p', 'patch', None, _('show patch')),
5766 [('p', 'patch', None, _('show patch')),
5767 ('g', 'git', None, _('use git extended diff format')),
5767 ('g', 'git', None, _('use git extended diff format')),
5768 ] + templateopts,
5768 ] + templateopts,
5769 _('[-p] [-g]'))
5769 _('[-p] [-g]'))
5770 def tip(ui, repo, **opts):
5770 def tip(ui, repo, **opts):
5771 """show the tip revision (DEPRECATED)
5771 """show the tip revision (DEPRECATED)
5772
5772
5773 The tip revision (usually just called the tip) is the changeset
5773 The tip revision (usually just called the tip) is the changeset
5774 most recently added to the repository (and therefore the most
5774 most recently added to the repository (and therefore the most
5775 recently changed head).
5775 recently changed head).
5776
5776
5777 If you have just made a commit, that commit will be the tip. If
5777 If you have just made a commit, that commit will be the tip. If
5778 you have just pulled changes from another repository, the tip of
5778 you have just pulled changes from another repository, the tip of
5779 that repository becomes the current tip. The "tip" tag is special
5779 that repository becomes the current tip. The "tip" tag is special
5780 and cannot be renamed or assigned to a different changeset.
5780 and cannot be renamed or assigned to a different changeset.
5781
5781
5782 This command is deprecated, please use :hg:`heads` instead.
5782 This command is deprecated, please use :hg:`heads` instead.
5783
5783
5784 Returns 0 on success.
5784 Returns 0 on success.
5785 """
5785 """
5786 displayer = cmdutil.show_changeset(ui, repo, opts)
5786 displayer = cmdutil.show_changeset(ui, repo, opts)
5787 displayer.show(repo['tip'])
5787 displayer.show(repo['tip'])
5788 displayer.close()
5788 displayer.close()
5789
5789
5790 @command('unbundle',
5790 @command('unbundle',
5791 [('u', 'update', None,
5791 [('u', 'update', None,
5792 _('update to new branch head if changesets were unbundled'))],
5792 _('update to new branch head if changesets were unbundled'))],
5793 _('[-u] FILE...'))
5793 _('[-u] FILE...'))
5794 def unbundle(ui, repo, fname1, *fnames, **opts):
5794 def unbundle(ui, repo, fname1, *fnames, **opts):
5795 """apply one or more changegroup files
5795 """apply one or more changegroup files
5796
5796
5797 Apply one or more compressed changegroup files generated by the
5797 Apply one or more compressed changegroup files generated by the
5798 bundle command.
5798 bundle command.
5799
5799
5800 Returns 0 on success, 1 if an update has unresolved files.
5800 Returns 0 on success, 1 if an update has unresolved files.
5801 """
5801 """
5802 fnames = (fname1,) + fnames
5802 fnames = (fname1,) + fnames
5803
5803
5804 lock = repo.lock()
5804 lock = repo.lock()
5805 wc = repo['.']
5805 wc = repo['.']
5806 try:
5806 try:
5807 for fname in fnames:
5807 for fname in fnames:
5808 f = hg.openpath(ui, fname)
5808 f = hg.openpath(ui, fname)
5809 gen = exchange.readbundle(ui, f, fname)
5809 gen = exchange.readbundle(ui, f, fname)
5810 modheads = changegroup.addchangegroup(repo, gen, 'unbundle',
5810 modheads = changegroup.addchangegroup(repo, gen, 'unbundle',
5811 'bundle:' + fname)
5811 'bundle:' + fname)
5812 finally:
5812 finally:
5813 lock.release()
5813 lock.release()
5814 bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
5814 bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
5815 return postincoming(ui, repo, modheads, opts.get('update'), None)
5815 return postincoming(ui, repo, modheads, opts.get('update'), None)
5816
5816
5817 @command('^update|up|checkout|co',
5817 @command('^update|up|checkout|co',
5818 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5818 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5819 ('c', 'check', None,
5819 ('c', 'check', None,
5820 _('update across branches if no uncommitted changes')),
5820 _('update across branches if no uncommitted changes')),
5821 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5821 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5822 ('r', 'rev', '', _('revision'), _('REV'))
5822 ('r', 'rev', '', _('revision'), _('REV'))
5823 ] + mergetoolopts,
5823 ] + mergetoolopts,
5824 _('[-c] [-C] [-d DATE] [[-r] REV]'))
5824 _('[-c] [-C] [-d DATE] [[-r] REV]'))
5825 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
5825 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
5826 tool=None):
5826 tool=None):
5827 """update working directory (or switch revisions)
5827 """update working directory (or switch revisions)
5828
5828
5829 Update the repository's working directory to the specified
5829 Update the repository's working directory to the specified
5830 changeset. If no changeset is specified, update to the tip of the
5830 changeset. If no changeset is specified, update to the tip of the
5831 current named branch and move the current bookmark (see :hg:`help
5831 current named branch and move the current bookmark (see :hg:`help
5832 bookmarks`).
5832 bookmarks`).
5833
5833
5834 Update sets the working directory's parent revision to the specified
5834 Update sets the working directory's parent revision to the specified
5835 changeset (see :hg:`help parents`).
5835 changeset (see :hg:`help parents`).
5836
5836
5837 If the changeset is not a descendant or ancestor of the working
5837 If the changeset is not a descendant or ancestor of the working
5838 directory's parent, the update is aborted. With the -c/--check
5838 directory's parent, the update is aborted. With the -c/--check
5839 option, the working directory is checked for uncommitted changes; if
5839 option, the working directory is checked for uncommitted changes; if
5840 none are found, the working directory is updated to the specified
5840 none are found, the working directory is updated to the specified
5841 changeset.
5841 changeset.
5842
5842
5843 .. container:: verbose
5843 .. container:: verbose
5844
5844
5845 The following rules apply when the working directory contains
5845 The following rules apply when the working directory contains
5846 uncommitted changes:
5846 uncommitted changes:
5847
5847
5848 1. If neither -c/--check nor -C/--clean is specified, and if
5848 1. If neither -c/--check nor -C/--clean is specified, and if
5849 the requested changeset is an ancestor or descendant of
5849 the requested changeset is an ancestor or descendant of
5850 the working directory's parent, the uncommitted changes
5850 the working directory's parent, the uncommitted changes
5851 are merged into the requested changeset and the merged
5851 are merged into the requested changeset and the merged
5852 result is left uncommitted. If the requested changeset is
5852 result is left uncommitted. If the requested changeset is
5853 not an ancestor or descendant (that is, it is on another
5853 not an ancestor or descendant (that is, it is on another
5854 branch), the update is aborted and the uncommitted changes
5854 branch), the update is aborted and the uncommitted changes
5855 are preserved.
5855 are preserved.
5856
5856
5857 2. With the -c/--check option, the update is aborted and the
5857 2. With the -c/--check option, the update is aborted and the
5858 uncommitted changes are preserved.
5858 uncommitted changes are preserved.
5859
5859
5860 3. With the -C/--clean option, uncommitted changes are discarded and
5860 3. With the -C/--clean option, uncommitted changes are discarded and
5861 the working directory is updated to the requested changeset.
5861 the working directory is updated to the requested changeset.
5862
5862
5863 To cancel an uncommitted merge (and lose your changes), use
5863 To cancel an uncommitted merge (and lose your changes), use
5864 :hg:`update --clean .`.
5864 :hg:`update --clean .`.
5865
5865
5866 Use null as the changeset to remove the working directory (like
5866 Use null as the changeset to remove the working directory (like
5867 :hg:`clone -U`).
5867 :hg:`clone -U`).
5868
5868
5869 If you want to revert just one file to an older revision, use
5869 If you want to revert just one file to an older revision, use
5870 :hg:`revert [-r REV] NAME`.
5870 :hg:`revert [-r REV] NAME`.
5871
5871
5872 See :hg:`help dates` for a list of formats valid for -d/--date.
5872 See :hg:`help dates` for a list of formats valid for -d/--date.
5873
5873
5874 Returns 0 on success, 1 if there are unresolved files.
5874 Returns 0 on success, 1 if there are unresolved files.
5875 """
5875 """
5876 if rev and node:
5876 if rev and node:
5877 raise util.Abort(_("please specify just one revision"))
5877 raise util.Abort(_("please specify just one revision"))
5878
5878
5879 if rev is None or rev == '':
5879 if rev is None or rev == '':
5880 rev = node
5880 rev = node
5881
5881
5882 cmdutil.clearunfinished(repo)
5882 cmdutil.clearunfinished(repo)
5883
5883
5884 # with no argument, we also move the current bookmark, if any
5884 # with no argument, we also move the current bookmark, if any
5885 rev, movemarkfrom = bookmarks.calculateupdate(ui, repo, rev)
5885 rev, movemarkfrom = bookmarks.calculateupdate(ui, repo, rev)
5886
5886
5887 # if we defined a bookmark, we have to remember the original bookmark name
5887 # if we defined a bookmark, we have to remember the original bookmark name
5888 brev = rev
5888 brev = rev
5889 rev = scmutil.revsingle(repo, rev, rev).rev()
5889 rev = scmutil.revsingle(repo, rev, rev).rev()
5890
5890
5891 if check and clean:
5891 if check and clean:
5892 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
5892 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
5893
5893
5894 if date:
5894 if date:
5895 if rev is not None:
5895 if rev is not None:
5896 raise util.Abort(_("you can't specify a revision and a date"))
5896 raise util.Abort(_("you can't specify a revision and a date"))
5897 rev = cmdutil.finddate(ui, repo, date)
5897 rev = cmdutil.finddate(ui, repo, date)
5898
5898
5899 if check:
5899 if check:
5900 c = repo[None]
5900 c = repo[None]
5901 if c.dirty(merge=False, branch=False, missing=True):
5901 if c.dirty(merge=False, branch=False, missing=True):
5902 raise util.Abort(_("uncommitted changes"))
5902 raise util.Abort(_("uncommitted changes"))
5903 if rev is None:
5903 if rev is None:
5904 rev = repo[repo[None].branch()].rev()
5904 rev = repo[repo[None].branch()].rev()
5905 mergemod._checkunknown(repo, repo[None], repo[rev])
5905 mergemod._checkunknown(repo, repo[None], repo[rev])
5906
5906
5907 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
5907 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
5908
5908
5909 if clean:
5909 if clean:
5910 ret = hg.clean(repo, rev)
5910 ret = hg.clean(repo, rev)
5911 else:
5911 else:
5912 ret = hg.update(repo, rev)
5912 ret = hg.update(repo, rev)
5913
5913
5914 if not ret and movemarkfrom:
5914 if not ret and movemarkfrom:
5915 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
5915 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
5916 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
5916 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
5917 elif brev in repo._bookmarks:
5917 elif brev in repo._bookmarks:
5918 bookmarks.setcurrent(repo, brev)
5918 bookmarks.setcurrent(repo, brev)
5919 ui.status(_("(activating bookmark %s)\n") % brev)
5919 ui.status(_("(activating bookmark %s)\n") % brev)
5920 elif brev:
5920 elif brev:
5921 if repo._bookmarkcurrent:
5921 if repo._bookmarkcurrent:
5922 ui.status(_("(leaving bookmark %s)\n") %
5922 ui.status(_("(leaving bookmark %s)\n") %
5923 repo._bookmarkcurrent)
5923 repo._bookmarkcurrent)
5924 bookmarks.unsetcurrent(repo)
5924 bookmarks.unsetcurrent(repo)
5925
5925
5926 return ret
5926 return ret
5927
5927
5928 @command('verify', [])
5928 @command('verify', [])
5929 def verify(ui, repo):
5929 def verify(ui, repo):
5930 """verify the integrity of the repository
5930 """verify the integrity of the repository
5931
5931
5932 Verify the integrity of the current repository.
5932 Verify the integrity of the current repository.
5933
5933
5934 This will perform an extensive check of the repository's
5934 This will perform an extensive check of the repository's
5935 integrity, validating the hashes and checksums of each entry in
5935 integrity, validating the hashes and checksums of each entry in
5936 the changelog, manifest, and tracked files, as well as the
5936 the changelog, manifest, and tracked files, as well as the
5937 integrity of their crosslinks and indices.
5937 integrity of their crosslinks and indices.
5938
5938
5939 Please see http://mercurial.selenic.com/wiki/RepositoryCorruption
5939 Please see http://mercurial.selenic.com/wiki/RepositoryCorruption
5940 for more information about recovery from corruption of the
5940 for more information about recovery from corruption of the
5941 repository.
5941 repository.
5942
5942
5943 Returns 0 on success, 1 if errors are encountered.
5943 Returns 0 on success, 1 if errors are encountered.
5944 """
5944 """
5945 return hg.verify(repo)
5945 return hg.verify(repo)
5946
5946
5947 @command('version', [])
5947 @command('version', [])
5948 def version_(ui):
5948 def version_(ui):
5949 """output version and copyright information"""
5949 """output version and copyright information"""
5950 ui.write(_("Mercurial Distributed SCM (version %s)\n")
5950 ui.write(_("Mercurial Distributed SCM (version %s)\n")
5951 % util.version())
5951 % util.version())
5952 ui.status(_(
5952 ui.status(_(
5953 "(see http://mercurial.selenic.com for more information)\n"
5953 "(see http://mercurial.selenic.com for more information)\n"
5954 "\nCopyright (C) 2005-2014 Matt Mackall and others\n"
5954 "\nCopyright (C) 2005-2014 Matt Mackall and others\n"
5955 "This is free software; see the source for copying conditions. "
5955 "This is free software; see the source for copying conditions. "
5956 "There is NO\nwarranty; "
5956 "There is NO\nwarranty; "
5957 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5957 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5958 ))
5958 ))
5959
5959
5960 norepo = ("clone init version help debugcommands debugcomplete"
5960 norepo = ("clone init version help debugcommands debugcomplete"
5961 " debugdate debuginstall debugfsinfo debugpushkey debugwireargs"
5961 " debugdate debuginstall debugfsinfo debugpushkey debugwireargs"
5962 " debugknown debuggetbundle debugbundle")
5962 " debugknown debuggetbundle debugbundle")
5963 optionalrepo = ("identify paths serve config showconfig debugancestor debugdag"
5963 optionalrepo = ("identify paths serve config showconfig debugancestor debugdag"
5964 " debugdata debugindex debugindexdot debugrevlog")
5964 " debugdata debugindex debugindexdot debugrevlog")
5965 inferrepo = ("add addremove annotate cat commit diff grep forget log parents"
5965 inferrepo = ("add addremove annotate cat commit diff grep forget log parents"
5966 " remove resolve status debugwalk")
5966 " remove resolve status debugwalk")
@@ -1,1624 +1,1625
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid, nullrev, short, hex, bin
8 from node import nullid, nullrev, short, hex, bin
9 from i18n import _
9 from i18n import _
10 import mdiff, error, util, scmutil, subrepo, patch, encoding, phases
10 import mdiff, error, util, scmutil, subrepo, patch, encoding, phases
11 import match as matchmod
11 import match as matchmod
12 import os, errno, stat
12 import os, errno, stat
13 import obsolete as obsmod
13 import obsolete as obsmod
14 import repoview
14 import repoview
15 import fileset
15 import fileset
16
16
17 propertycache = util.propertycache
17 propertycache = util.propertycache
18
18
19 class basectx(object):
19 class basectx(object):
20 """A basectx object represents the common logic for its children:
20 """A basectx object represents the common logic for its children:
21 changectx: read-only context that is already present in the repo,
21 changectx: read-only context that is already present in the repo,
22 workingctx: a context that represents the working directory and can
22 workingctx: a context that represents the working directory and can
23 be committed,
23 be committed,
24 memctx: a context that represents changes in-memory and can also
24 memctx: a context that represents changes in-memory and can also
25 be committed."""
25 be committed."""
26 def __new__(cls, repo, changeid='', *args, **kwargs):
26 def __new__(cls, repo, changeid='', *args, **kwargs):
27 if isinstance(changeid, basectx):
27 if isinstance(changeid, basectx):
28 return changeid
28 return changeid
29
29
30 o = super(basectx, cls).__new__(cls)
30 o = super(basectx, cls).__new__(cls)
31
31
32 o._repo = repo
32 o._repo = repo
33 o._rev = nullrev
33 o._rev = nullrev
34 o._node = nullid
34 o._node = nullid
35
35
36 return o
36 return o
37
37
38 def __str__(self):
38 def __str__(self):
39 return short(self.node())
39 return short(self.node())
40
40
41 def __int__(self):
41 def __int__(self):
42 return self.rev()
42 return self.rev()
43
43
44 def __repr__(self):
44 def __repr__(self):
45 return "<%s %s>" % (type(self).__name__, str(self))
45 return "<%s %s>" % (type(self).__name__, str(self))
46
46
47 def __eq__(self, other):
47 def __eq__(self, other):
48 try:
48 try:
49 return type(self) == type(other) and self._rev == other._rev
49 return type(self) == type(other) and self._rev == other._rev
50 except AttributeError:
50 except AttributeError:
51 return False
51 return False
52
52
53 def __ne__(self, other):
53 def __ne__(self, other):
54 return not (self == other)
54 return not (self == other)
55
55
56 def __contains__(self, key):
56 def __contains__(self, key):
57 return key in self._manifest
57 return key in self._manifest
58
58
59 def __getitem__(self, key):
59 def __getitem__(self, key):
60 return self.filectx(key)
60 return self.filectx(key)
61
61
62 def __iter__(self):
62 def __iter__(self):
63 for f in sorted(self._manifest):
63 for f in sorted(self._manifest):
64 yield f
64 yield f
65
65
66 def _manifestmatches(self, match, s):
66 def _manifestmatches(self, match, s):
67 """generate a new manifest filtered by the match argument
67 """generate a new manifest filtered by the match argument
68
68
69 This method is for internal use only and mainly exists to provide an
69 This method is for internal use only and mainly exists to provide an
70 object oriented way for other contexts to customize the manifest
70 object oriented way for other contexts to customize the manifest
71 generation.
71 generation.
72 """
72 """
73 mf = self.manifest().copy()
73 mf = self.manifest().copy()
74 if match.always():
74 if match.always():
75 return mf
75 return mf
76 for fn in mf.keys():
76 for fn in mf.keys():
77 if not match(fn):
77 if not match(fn):
78 del mf[fn]
78 del mf[fn]
79 return mf
79 return mf
80
80
81 def _matchstatus(self, other, s, match, listignored, listclean,
81 def _matchstatus(self, other, s, match, listignored, listclean,
82 listunknown):
82 listunknown):
83 """return match.always if match is none
83 """return match.always if match is none
84
84
85 This internal method provides a way for child objects to override the
85 This internal method provides a way for child objects to override the
86 match operator.
86 match operator.
87 """
87 """
88 return match or matchmod.always(self._repo.root, self._repo.getcwd())
88 return match or matchmod.always(self._repo.root, self._repo.getcwd())
89
89
90 def _prestatus(self, other, s, match, listignored, listclean, listunknown):
90 def _prestatus(self, other, s, match, listignored, listclean, listunknown):
91 """provide a hook to allow child objects to preprocess status results
91 """provide a hook to allow child objects to preprocess status results
92
92
93 For example, this allows other contexts, such as workingctx, to query
93 For example, this allows other contexts, such as workingctx, to query
94 the dirstate before comparing the manifests.
94 the dirstate before comparing the manifests.
95 """
95 """
96 # load earliest manifest first for caching reasons
96 # load earliest manifest first for caching reasons
97 if self.rev() < other.rev():
97 if self.rev() < other.rev():
98 self.manifest()
98 self.manifest()
99 return s
99 return s
100
100
101 def _poststatus(self, other, s, match, listignored, listclean, listunknown):
101 def _poststatus(self, other, s, match, listignored, listclean, listunknown):
102 """provide a hook to allow child objects to postprocess status results
102 """provide a hook to allow child objects to postprocess status results
103
103
104 For example, this allows other contexts, such as workingctx, to filter
104 For example, this allows other contexts, such as workingctx, to filter
105 suspect symlinks in the case of FAT32 and NTFS filesytems.
105 suspect symlinks in the case of FAT32 and NTFS filesytems.
106 """
106 """
107 return s
107 return s
108
108
109 def _buildstatus(self, other, s, match, listignored, listclean,
109 def _buildstatus(self, other, s, match, listignored, listclean,
110 listunknown):
110 listunknown):
111 """build a status with respect to another context"""
111 """build a status with respect to another context"""
112 mf1 = other._manifestmatches(match, s)
112 mf1 = other._manifestmatches(match, s)
113 mf2 = self._manifestmatches(match, s)
113 mf2 = self._manifestmatches(match, s)
114
114
115 modified, added, clean = [], [], []
115 modified, added, clean = [], [], []
116 deleted, unknown, ignored = s[3], [], []
116 deleted, unknown, ignored = s[3], [], []
117 withflags = mf1.withflags() | mf2.withflags()
117 withflags = mf1.withflags() | mf2.withflags()
118 for fn, mf2node in mf2.iteritems():
118 for fn, mf2node in mf2.iteritems():
119 if fn in mf1:
119 if fn in mf1:
120 if (fn not in deleted and
120 if (fn not in deleted and
121 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
121 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
122 (mf1[fn] != mf2node and
122 (mf1[fn] != mf2node and
123 (mf2node or self[fn].cmp(other[fn]))))):
123 (mf2node or self[fn].cmp(other[fn]))))):
124 modified.append(fn)
124 modified.append(fn)
125 elif listclean:
125 elif listclean:
126 clean.append(fn)
126 clean.append(fn)
127 del mf1[fn]
127 del mf1[fn]
128 elif fn not in deleted:
128 elif fn not in deleted:
129 added.append(fn)
129 added.append(fn)
130 removed = mf1.keys()
130 removed = mf1.keys()
131
131
132 return [modified, added, removed, deleted, unknown, ignored, clean]
132 return [modified, added, removed, deleted, unknown, ignored, clean]
133
133
134 @propertycache
134 @propertycache
135 def substate(self):
135 def substate(self):
136 return subrepo.state(self, self._repo.ui)
136 return subrepo.state(self, self._repo.ui)
137
137
138 def subrev(self, subpath):
138 def subrev(self, subpath):
139 return self.substate[subpath][1]
139 return self.substate[subpath][1]
140
140
141 def rev(self):
141 def rev(self):
142 return self._rev
142 return self._rev
143 def node(self):
143 def node(self):
144 return self._node
144 return self._node
145 def hex(self):
145 def hex(self):
146 return hex(self.node())
146 return hex(self.node())
147 def manifest(self):
147 def manifest(self):
148 return self._manifest
148 return self._manifest
149 def phasestr(self):
149 def phasestr(self):
150 return phases.phasenames[self.phase()]
150 return phases.phasenames[self.phase()]
151 def mutable(self):
151 def mutable(self):
152 return self.phase() > phases.public
152 return self.phase() > phases.public
153
153
154 def getfileset(self, expr):
154 def getfileset(self, expr):
155 return fileset.getfileset(self, expr)
155 return fileset.getfileset(self, expr)
156
156
157 def obsolete(self):
157 def obsolete(self):
158 """True if the changeset is obsolete"""
158 """True if the changeset is obsolete"""
159 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
159 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
160
160
161 def extinct(self):
161 def extinct(self):
162 """True if the changeset is extinct"""
162 """True if the changeset is extinct"""
163 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
163 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
164
164
165 def unstable(self):
165 def unstable(self):
166 """True if the changeset is not obsolete but it's ancestor are"""
166 """True if the changeset is not obsolete but it's ancestor are"""
167 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
167 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
168
168
169 def bumped(self):
169 def bumped(self):
170 """True if the changeset try to be a successor of a public changeset
170 """True if the changeset try to be a successor of a public changeset
171
171
172 Only non-public and non-obsolete changesets may be bumped.
172 Only non-public and non-obsolete changesets may be bumped.
173 """
173 """
174 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
174 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
175
175
176 def divergent(self):
176 def divergent(self):
177 """Is a successors of a changeset with multiple possible successors set
177 """Is a successors of a changeset with multiple possible successors set
178
178
179 Only non-public and non-obsolete changesets may be divergent.
179 Only non-public and non-obsolete changesets may be divergent.
180 """
180 """
181 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
181 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
182
182
183 def troubled(self):
183 def troubled(self):
184 """True if the changeset is either unstable, bumped or divergent"""
184 """True if the changeset is either unstable, bumped or divergent"""
185 return self.unstable() or self.bumped() or self.divergent()
185 return self.unstable() or self.bumped() or self.divergent()
186
186
187 def troubles(self):
187 def troubles(self):
188 """return the list of troubles affecting this changesets.
188 """return the list of troubles affecting this changesets.
189
189
190 Troubles are returned as strings. possible values are:
190 Troubles are returned as strings. possible values are:
191 - unstable,
191 - unstable,
192 - bumped,
192 - bumped,
193 - divergent.
193 - divergent.
194 """
194 """
195 troubles = []
195 troubles = []
196 if self.unstable():
196 if self.unstable():
197 troubles.append('unstable')
197 troubles.append('unstable')
198 if self.bumped():
198 if self.bumped():
199 troubles.append('bumped')
199 troubles.append('bumped')
200 if self.divergent():
200 if self.divergent():
201 troubles.append('divergent')
201 troubles.append('divergent')
202 return troubles
202 return troubles
203
203
204 def parents(self):
204 def parents(self):
205 """return contexts for each parent changeset"""
205 """return contexts for each parent changeset"""
206 return self._parents
206 return self._parents
207
207
208 def p1(self):
208 def p1(self):
209 return self._parents[0]
209 return self._parents[0]
210
210
211 def p2(self):
211 def p2(self):
212 if len(self._parents) == 2:
212 if len(self._parents) == 2:
213 return self._parents[1]
213 return self._parents[1]
214 return changectx(self._repo, -1)
214 return changectx(self._repo, -1)
215
215
216 def _fileinfo(self, path):
216 def _fileinfo(self, path):
217 if '_manifest' in self.__dict__:
217 if '_manifest' in self.__dict__:
218 try:
218 try:
219 return self._manifest[path], self._manifest.flags(path)
219 return self._manifest[path], self._manifest.flags(path)
220 except KeyError:
220 except KeyError:
221 raise error.ManifestLookupError(self._node, path,
221 raise error.ManifestLookupError(self._node, path,
222 _('not found in manifest'))
222 _('not found in manifest'))
223 if '_manifestdelta' in self.__dict__ or path in self.files():
223 if '_manifestdelta' in self.__dict__ or path in self.files():
224 if path in self._manifestdelta:
224 if path in self._manifestdelta:
225 return (self._manifestdelta[path],
225 return (self._manifestdelta[path],
226 self._manifestdelta.flags(path))
226 self._manifestdelta.flags(path))
227 node, flag = self._repo.manifest.find(self._changeset[0], path)
227 node, flag = self._repo.manifest.find(self._changeset[0], path)
228 if not node:
228 if not node:
229 raise error.ManifestLookupError(self._node, path,
229 raise error.ManifestLookupError(self._node, path,
230 _('not found in manifest'))
230 _('not found in manifest'))
231
231
232 return node, flag
232 return node, flag
233
233
234 def filenode(self, path):
234 def filenode(self, path):
235 return self._fileinfo(path)[0]
235 return self._fileinfo(path)[0]
236
236
237 def flags(self, path):
237 def flags(self, path):
238 try:
238 try:
239 return self._fileinfo(path)[1]
239 return self._fileinfo(path)[1]
240 except error.LookupError:
240 except error.LookupError:
241 return ''
241 return ''
242
242
243 def sub(self, path):
243 def sub(self, path):
244 return subrepo.subrepo(self, path)
244 return subrepo.subrepo(self, path)
245
245
246 def match(self, pats=[], include=None, exclude=None, default='glob'):
246 def match(self, pats=[], include=None, exclude=None, default='glob'):
247 r = self._repo
247 r = self._repo
248 return matchmod.match(r.root, r.getcwd(), pats,
248 return matchmod.match(r.root, r.getcwd(), pats,
249 include, exclude, default,
249 include, exclude, default,
250 auditor=r.auditor, ctx=self)
250 auditor=r.auditor, ctx=self)
251
251
252 def diff(self, ctx2=None, match=None, **opts):
252 def diff(self, ctx2=None, match=None, **opts):
253 """Returns a diff generator for the given contexts and matcher"""
253 """Returns a diff generator for the given contexts and matcher"""
254 if ctx2 is None:
254 if ctx2 is None:
255 ctx2 = self.p1()
255 ctx2 = self.p1()
256 if ctx2 is not None:
256 if ctx2 is not None:
257 ctx2 = self._repo[ctx2]
257 ctx2 = self._repo[ctx2]
258 diffopts = patch.diffopts(self._repo.ui, opts)
258 diffopts = patch.diffopts(self._repo.ui, opts)
259 return patch.diff(self._repo, ctx2.node(), self.node(),
259 return patch.diff(self._repo, ctx2.node(), self.node(),
260 match=match, opts=diffopts)
260 match=match, opts=diffopts)
261
261
262 @propertycache
262 @propertycache
263 def _dirs(self):
263 def _dirs(self):
264 return scmutil.dirs(self._manifest)
264 return scmutil.dirs(self._manifest)
265
265
266 def dirs(self):
266 def dirs(self):
267 return self._dirs
267 return self._dirs
268
268
269 def dirty(self):
269 def dirty(self):
270 return False
270 return False
271
271
272 def status(self, other=None, match=None, listignored=False,
272 def status(self, other=None, match=None, listignored=False,
273 listclean=False, listunknown=False, listsubrepos=False):
273 listclean=False, listunknown=False, listsubrepos=False):
274 """return status of files between two nodes or node and working
274 """return status of files between two nodes or node and working
275 directory.
275 directory.
276
276
277 If other is None, compare this node with working directory.
277 If other is None, compare this node with working directory.
278 """
278 """
279
279
280 ctx1 = self
280 ctx1 = self
281 ctx2 = self._repo[other]
281 ctx2 = self._repo[other]
282
282
283 # This next code block is, admittedly, fragile logic that tests for
283 # This next code block is, admittedly, fragile logic that tests for
284 # reversing the contexts and wouldn't need to exist if it weren't for
284 # reversing the contexts and wouldn't need to exist if it weren't for
285 # the fast (and common) code path of comparing the working directory
285 # the fast (and common) code path of comparing the working directory
286 # with its first parent.
286 # with its first parent.
287 #
287 #
288 # What we're aiming for here is the ability to call:
288 # What we're aiming for here is the ability to call:
289 #
289 #
290 # workingctx.status(parentctx)
290 # workingctx.status(parentctx)
291 #
291 #
292 # If we always built the manifest for each context and compared those,
292 # If we always built the manifest for each context and compared those,
293 # then we'd be done. But the special case of the above call means we
293 # then we'd be done. But the special case of the above call means we
294 # just copy the manifest of the parent.
294 # just copy the manifest of the parent.
295 reversed = False
295 reversed = False
296 if (not isinstance(ctx1, changectx)
296 if (not isinstance(ctx1, changectx)
297 and isinstance(ctx2, changectx)):
297 and isinstance(ctx2, changectx)):
298 reversed = True
298 reversed = True
299 ctx1, ctx2 = ctx2, ctx1
299 ctx1, ctx2 = ctx2, ctx1
300
300
301 r = [[], [], [], [], [], [], []]
301 r = [[], [], [], [], [], [], []]
302 match = ctx2._matchstatus(ctx1, r, match, listignored, listclean,
302 match = ctx2._matchstatus(ctx1, r, match, listignored, listclean,
303 listunknown)
303 listunknown)
304 r = ctx2._prestatus(ctx1, r, match, listignored, listclean, listunknown)
304 r = ctx2._prestatus(ctx1, r, match, listignored, listclean, listunknown)
305 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
305 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
306 listunknown)
306 listunknown)
307 r = ctx2._poststatus(ctx1, r, match, listignored, listclean,
307 r = ctx2._poststatus(ctx1, r, match, listignored, listclean,
308 listunknown)
308 listunknown)
309
309
310 if reversed:
310 if reversed:
311 r[1], r[2], r[3], r[4] = r[2], r[1], r[4], r[3]
311 r[1], r[2], r[3], r[4] = r[2], r[1], r[4], r[3]
312
312
313 if listsubrepos:
313 if listsubrepos:
314 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
314 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
315 rev2 = ctx2.subrev(subpath)
315 rev2 = ctx2.subrev(subpath)
316 try:
316 try:
317 submatch = matchmod.narrowmatcher(subpath, match)
317 submatch = matchmod.narrowmatcher(subpath, match)
318 s = sub.status(rev2, match=submatch, ignored=listignored,
318 s = sub.status(rev2, match=submatch, ignored=listignored,
319 clean=listclean, unknown=listunknown,
319 clean=listclean, unknown=listunknown,
320 listsubrepos=True)
320 listsubrepos=True)
321 for rfiles, sfiles in zip(r, s):
321 for rfiles, sfiles in zip(r, s):
322 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
322 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
323 except error.LookupError:
323 except error.LookupError:
324 self._repo.ui.status(_("skipping missing "
324 self._repo.ui.status(_("skipping missing "
325 "subrepository: %s\n") % subpath)
325 "subrepository: %s\n") % subpath)
326
326
327 for l in r:
327 for l in r:
328 l.sort()
328 l.sort()
329
329
330 # we return a tuple to signify that this list isn't changing
330 # we return a tuple to signify that this list isn't changing
331 return tuple(r)
331 return tuple(r)
332
332
333
333
334 def makememctx(repo, parents, text, user, date, branch, files, store,
334 def makememctx(repo, parents, text, user, date, branch, files, store,
335 editor=None):
335 editor=None):
336 def getfilectx(repo, memctx, path):
336 def getfilectx(repo, memctx, path):
337 data, (islink, isexec), copied = store.getfile(path)
337 data, (islink, isexec), copied = store.getfile(path)
338 return memfilectx(path, data, islink=islink, isexec=isexec,
338 return memfilectx(repo, path, data, islink=islink, isexec=isexec,
339 copied=copied)
339 copied=copied, memctx=memctx)
340 extra = {}
340 extra = {}
341 if branch:
341 if branch:
342 extra['branch'] = encoding.fromlocal(branch)
342 extra['branch'] = encoding.fromlocal(branch)
343 ctx = memctx(repo, parents, text, files, getfilectx, user,
343 ctx = memctx(repo, parents, text, files, getfilectx, user,
344 date, extra, editor)
344 date, extra, editor)
345 return ctx
345 return ctx
346
346
347 class changectx(basectx):
347 class changectx(basectx):
348 """A changecontext object makes access to data related to a particular
348 """A changecontext object makes access to data related to a particular
349 changeset convenient. It represents a read-only context already present in
349 changeset convenient. It represents a read-only context already present in
350 the repo."""
350 the repo."""
351 def __init__(self, repo, changeid=''):
351 def __init__(self, repo, changeid=''):
352 """changeid is a revision number, node, or tag"""
352 """changeid is a revision number, node, or tag"""
353
353
354 # since basectx.__new__ already took care of copying the object, we
354 # since basectx.__new__ already took care of copying the object, we
355 # don't need to do anything in __init__, so we just exit here
355 # don't need to do anything in __init__, so we just exit here
356 if isinstance(changeid, basectx):
356 if isinstance(changeid, basectx):
357 return
357 return
358
358
359 if changeid == '':
359 if changeid == '':
360 changeid = '.'
360 changeid = '.'
361 self._repo = repo
361 self._repo = repo
362
362
363 if isinstance(changeid, int):
363 if isinstance(changeid, int):
364 try:
364 try:
365 self._node = repo.changelog.node(changeid)
365 self._node = repo.changelog.node(changeid)
366 except IndexError:
366 except IndexError:
367 raise error.RepoLookupError(
367 raise error.RepoLookupError(
368 _("unknown revision '%s'") % changeid)
368 _("unknown revision '%s'") % changeid)
369 self._rev = changeid
369 self._rev = changeid
370 return
370 return
371 if isinstance(changeid, long):
371 if isinstance(changeid, long):
372 changeid = str(changeid)
372 changeid = str(changeid)
373 if changeid == '.':
373 if changeid == '.':
374 self._node = repo.dirstate.p1()
374 self._node = repo.dirstate.p1()
375 self._rev = repo.changelog.rev(self._node)
375 self._rev = repo.changelog.rev(self._node)
376 return
376 return
377 if changeid == 'null':
377 if changeid == 'null':
378 self._node = nullid
378 self._node = nullid
379 self._rev = nullrev
379 self._rev = nullrev
380 return
380 return
381 if changeid == 'tip':
381 if changeid == 'tip':
382 self._node = repo.changelog.tip()
382 self._node = repo.changelog.tip()
383 self._rev = repo.changelog.rev(self._node)
383 self._rev = repo.changelog.rev(self._node)
384 return
384 return
385 if len(changeid) == 20:
385 if len(changeid) == 20:
386 try:
386 try:
387 self._node = changeid
387 self._node = changeid
388 self._rev = repo.changelog.rev(changeid)
388 self._rev = repo.changelog.rev(changeid)
389 return
389 return
390 except LookupError:
390 except LookupError:
391 pass
391 pass
392
392
393 try:
393 try:
394 r = int(changeid)
394 r = int(changeid)
395 if str(r) != changeid:
395 if str(r) != changeid:
396 raise ValueError
396 raise ValueError
397 l = len(repo.changelog)
397 l = len(repo.changelog)
398 if r < 0:
398 if r < 0:
399 r += l
399 r += l
400 if r < 0 or r >= l:
400 if r < 0 or r >= l:
401 raise ValueError
401 raise ValueError
402 self._rev = r
402 self._rev = r
403 self._node = repo.changelog.node(r)
403 self._node = repo.changelog.node(r)
404 return
404 return
405 except (ValueError, OverflowError, IndexError):
405 except (ValueError, OverflowError, IndexError):
406 pass
406 pass
407
407
408 if len(changeid) == 40:
408 if len(changeid) == 40:
409 try:
409 try:
410 self._node = bin(changeid)
410 self._node = bin(changeid)
411 self._rev = repo.changelog.rev(self._node)
411 self._rev = repo.changelog.rev(self._node)
412 return
412 return
413 except (TypeError, LookupError):
413 except (TypeError, LookupError):
414 pass
414 pass
415
415
416 if changeid in repo._bookmarks:
416 if changeid in repo._bookmarks:
417 self._node = repo._bookmarks[changeid]
417 self._node = repo._bookmarks[changeid]
418 self._rev = repo.changelog.rev(self._node)
418 self._rev = repo.changelog.rev(self._node)
419 return
419 return
420 if changeid in repo._tagscache.tags:
420 if changeid in repo._tagscache.tags:
421 self._node = repo._tagscache.tags[changeid]
421 self._node = repo._tagscache.tags[changeid]
422 self._rev = repo.changelog.rev(self._node)
422 self._rev = repo.changelog.rev(self._node)
423 return
423 return
424 try:
424 try:
425 self._node = repo.branchtip(changeid)
425 self._node = repo.branchtip(changeid)
426 self._rev = repo.changelog.rev(self._node)
426 self._rev = repo.changelog.rev(self._node)
427 return
427 return
428 except error.RepoLookupError:
428 except error.RepoLookupError:
429 pass
429 pass
430
430
431 self._node = repo.changelog._partialmatch(changeid)
431 self._node = repo.changelog._partialmatch(changeid)
432 if self._node is not None:
432 if self._node is not None:
433 self._rev = repo.changelog.rev(self._node)
433 self._rev = repo.changelog.rev(self._node)
434 return
434 return
435
435
436 # lookup failed
436 # lookup failed
437 # check if it might have come from damaged dirstate
437 # check if it might have come from damaged dirstate
438 #
438 #
439 # XXX we could avoid the unfiltered if we had a recognizable exception
439 # XXX we could avoid the unfiltered if we had a recognizable exception
440 # for filtered changeset access
440 # for filtered changeset access
441 if changeid in repo.unfiltered().dirstate.parents():
441 if changeid in repo.unfiltered().dirstate.parents():
442 raise error.Abort(_("working directory has unknown parent '%s'!")
442 raise error.Abort(_("working directory has unknown parent '%s'!")
443 % short(changeid))
443 % short(changeid))
444 try:
444 try:
445 if len(changeid) == 20:
445 if len(changeid) == 20:
446 changeid = hex(changeid)
446 changeid = hex(changeid)
447 except TypeError:
447 except TypeError:
448 pass
448 pass
449 raise error.RepoLookupError(
449 raise error.RepoLookupError(
450 _("unknown revision '%s'") % changeid)
450 _("unknown revision '%s'") % changeid)
451
451
452 def __hash__(self):
452 def __hash__(self):
453 try:
453 try:
454 return hash(self._rev)
454 return hash(self._rev)
455 except AttributeError:
455 except AttributeError:
456 return id(self)
456 return id(self)
457
457
458 def __nonzero__(self):
458 def __nonzero__(self):
459 return self._rev != nullrev
459 return self._rev != nullrev
460
460
461 @propertycache
461 @propertycache
462 def _changeset(self):
462 def _changeset(self):
463 return self._repo.changelog.read(self.rev())
463 return self._repo.changelog.read(self.rev())
464
464
465 @propertycache
465 @propertycache
466 def _manifest(self):
466 def _manifest(self):
467 return self._repo.manifest.read(self._changeset[0])
467 return self._repo.manifest.read(self._changeset[0])
468
468
469 @propertycache
469 @propertycache
470 def _manifestdelta(self):
470 def _manifestdelta(self):
471 return self._repo.manifest.readdelta(self._changeset[0])
471 return self._repo.manifest.readdelta(self._changeset[0])
472
472
473 @propertycache
473 @propertycache
474 def _parents(self):
474 def _parents(self):
475 p = self._repo.changelog.parentrevs(self._rev)
475 p = self._repo.changelog.parentrevs(self._rev)
476 if p[1] == nullrev:
476 if p[1] == nullrev:
477 p = p[:-1]
477 p = p[:-1]
478 return [changectx(self._repo, x) for x in p]
478 return [changectx(self._repo, x) for x in p]
479
479
480 def changeset(self):
480 def changeset(self):
481 return self._changeset
481 return self._changeset
482 def manifestnode(self):
482 def manifestnode(self):
483 return self._changeset[0]
483 return self._changeset[0]
484
484
485 def user(self):
485 def user(self):
486 return self._changeset[1]
486 return self._changeset[1]
487 def date(self):
487 def date(self):
488 return self._changeset[2]
488 return self._changeset[2]
489 def files(self):
489 def files(self):
490 return self._changeset[3]
490 return self._changeset[3]
491 def description(self):
491 def description(self):
492 return self._changeset[4]
492 return self._changeset[4]
493 def branch(self):
493 def branch(self):
494 return encoding.tolocal(self._changeset[5].get("branch"))
494 return encoding.tolocal(self._changeset[5].get("branch"))
495 def closesbranch(self):
495 def closesbranch(self):
496 return 'close' in self._changeset[5]
496 return 'close' in self._changeset[5]
497 def extra(self):
497 def extra(self):
498 return self._changeset[5]
498 return self._changeset[5]
499 def tags(self):
499 def tags(self):
500 return self._repo.nodetags(self._node)
500 return self._repo.nodetags(self._node)
501 def bookmarks(self):
501 def bookmarks(self):
502 return self._repo.nodebookmarks(self._node)
502 return self._repo.nodebookmarks(self._node)
503 def phase(self):
503 def phase(self):
504 return self._repo._phasecache.phase(self._repo, self._rev)
504 return self._repo._phasecache.phase(self._repo, self._rev)
505 def hidden(self):
505 def hidden(self):
506 return self._rev in repoview.filterrevs(self._repo, 'visible')
506 return self._rev in repoview.filterrevs(self._repo, 'visible')
507
507
508 def children(self):
508 def children(self):
509 """return contexts for each child changeset"""
509 """return contexts for each child changeset"""
510 c = self._repo.changelog.children(self._node)
510 c = self._repo.changelog.children(self._node)
511 return [changectx(self._repo, x) for x in c]
511 return [changectx(self._repo, x) for x in c]
512
512
513 def ancestors(self):
513 def ancestors(self):
514 for a in self._repo.changelog.ancestors([self._rev]):
514 for a in self._repo.changelog.ancestors([self._rev]):
515 yield changectx(self._repo, a)
515 yield changectx(self._repo, a)
516
516
517 def descendants(self):
517 def descendants(self):
518 for d in self._repo.changelog.descendants([self._rev]):
518 for d in self._repo.changelog.descendants([self._rev]):
519 yield changectx(self._repo, d)
519 yield changectx(self._repo, d)
520
520
521 def filectx(self, path, fileid=None, filelog=None):
521 def filectx(self, path, fileid=None, filelog=None):
522 """get a file context from this changeset"""
522 """get a file context from this changeset"""
523 if fileid is None:
523 if fileid is None:
524 fileid = self.filenode(path)
524 fileid = self.filenode(path)
525 return filectx(self._repo, path, fileid=fileid,
525 return filectx(self._repo, path, fileid=fileid,
526 changectx=self, filelog=filelog)
526 changectx=self, filelog=filelog)
527
527
528 def ancestor(self, c2, warn=False):
528 def ancestor(self, c2, warn=False):
529 """
529 """
530 return the "best" ancestor context of self and c2
530 return the "best" ancestor context of self and c2
531 """
531 """
532 # deal with workingctxs
532 # deal with workingctxs
533 n2 = c2._node
533 n2 = c2._node
534 if n2 is None:
534 if n2 is None:
535 n2 = c2._parents[0]._node
535 n2 = c2._parents[0]._node
536 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
536 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
537 if not cahs:
537 if not cahs:
538 anc = nullid
538 anc = nullid
539 elif len(cahs) == 1:
539 elif len(cahs) == 1:
540 anc = cahs[0]
540 anc = cahs[0]
541 else:
541 else:
542 for r in self._repo.ui.configlist('merge', 'preferancestor'):
542 for r in self._repo.ui.configlist('merge', 'preferancestor'):
543 ctx = changectx(self._repo, r)
543 ctx = changectx(self._repo, r)
544 anc = ctx.node()
544 anc = ctx.node()
545 if anc in cahs:
545 if anc in cahs:
546 break
546 break
547 else:
547 else:
548 anc = self._repo.changelog.ancestor(self._node, n2)
548 anc = self._repo.changelog.ancestor(self._node, n2)
549 if warn:
549 if warn:
550 self._repo.ui.status(
550 self._repo.ui.status(
551 (_("note: using %s as ancestor of %s and %s\n") %
551 (_("note: using %s as ancestor of %s and %s\n") %
552 (short(anc), short(self._node), short(n2))) +
552 (short(anc), short(self._node), short(n2))) +
553 ''.join(_(" alternatively, use --config "
553 ''.join(_(" alternatively, use --config "
554 "merge.preferancestor=%s\n") %
554 "merge.preferancestor=%s\n") %
555 short(n) for n in sorted(cahs) if n != anc))
555 short(n) for n in sorted(cahs) if n != anc))
556 return changectx(self._repo, anc)
556 return changectx(self._repo, anc)
557
557
558 def descendant(self, other):
558 def descendant(self, other):
559 """True if other is descendant of this changeset"""
559 """True if other is descendant of this changeset"""
560 return self._repo.changelog.descendant(self._rev, other._rev)
560 return self._repo.changelog.descendant(self._rev, other._rev)
561
561
562 def walk(self, match):
562 def walk(self, match):
563 fset = set(match.files())
563 fset = set(match.files())
564 # for dirstate.walk, files=['.'] means "walk the whole tree".
564 # for dirstate.walk, files=['.'] means "walk the whole tree".
565 # follow that here, too
565 # follow that here, too
566 fset.discard('.')
566 fset.discard('.')
567
567
568 # avoid the entire walk if we're only looking for specific files
568 # avoid the entire walk if we're only looking for specific files
569 if fset and not match.anypats():
569 if fset and not match.anypats():
570 if util.all([fn in self for fn in fset]):
570 if util.all([fn in self for fn in fset]):
571 for fn in sorted(fset):
571 for fn in sorted(fset):
572 if match(fn):
572 if match(fn):
573 yield fn
573 yield fn
574 raise StopIteration
574 raise StopIteration
575
575
576 for fn in self:
576 for fn in self:
577 if fn in fset:
577 if fn in fset:
578 # specified pattern is the exact name
578 # specified pattern is the exact name
579 fset.remove(fn)
579 fset.remove(fn)
580 if match(fn):
580 if match(fn):
581 yield fn
581 yield fn
582 for fn in sorted(fset):
582 for fn in sorted(fset):
583 if fn in self._dirs:
583 if fn in self._dirs:
584 # specified pattern is a directory
584 # specified pattern is a directory
585 continue
585 continue
586 match.bad(fn, _('no such file in rev %s') % self)
586 match.bad(fn, _('no such file in rev %s') % self)
587
587
588 class basefilectx(object):
588 class basefilectx(object):
589 """A filecontext object represents the common logic for its children:
589 """A filecontext object represents the common logic for its children:
590 filectx: read-only access to a filerevision that is already present
590 filectx: read-only access to a filerevision that is already present
591 in the repo,
591 in the repo,
592 workingfilectx: a filecontext that represents files from the working
592 workingfilectx: a filecontext that represents files from the working
593 directory,
593 directory,
594 memfilectx: a filecontext that represents files in-memory."""
594 memfilectx: a filecontext that represents files in-memory."""
595 def __new__(cls, repo, path, *args, **kwargs):
595 def __new__(cls, repo, path, *args, **kwargs):
596 return super(basefilectx, cls).__new__(cls)
596 return super(basefilectx, cls).__new__(cls)
597
597
598 @propertycache
598 @propertycache
599 def _filelog(self):
599 def _filelog(self):
600 return self._repo.file(self._path)
600 return self._repo.file(self._path)
601
601
602 @propertycache
602 @propertycache
603 def _changeid(self):
603 def _changeid(self):
604 if '_changeid' in self.__dict__:
604 if '_changeid' in self.__dict__:
605 return self._changeid
605 return self._changeid
606 elif '_changectx' in self.__dict__:
606 elif '_changectx' in self.__dict__:
607 return self._changectx.rev()
607 return self._changectx.rev()
608 else:
608 else:
609 return self._filelog.linkrev(self._filerev)
609 return self._filelog.linkrev(self._filerev)
610
610
611 @propertycache
611 @propertycache
612 def _filenode(self):
612 def _filenode(self):
613 if '_fileid' in self.__dict__:
613 if '_fileid' in self.__dict__:
614 return self._filelog.lookup(self._fileid)
614 return self._filelog.lookup(self._fileid)
615 else:
615 else:
616 return self._changectx.filenode(self._path)
616 return self._changectx.filenode(self._path)
617
617
618 @propertycache
618 @propertycache
619 def _filerev(self):
619 def _filerev(self):
620 return self._filelog.rev(self._filenode)
620 return self._filelog.rev(self._filenode)
621
621
622 @propertycache
622 @propertycache
623 def _repopath(self):
623 def _repopath(self):
624 return self._path
624 return self._path
625
625
626 def __nonzero__(self):
626 def __nonzero__(self):
627 try:
627 try:
628 self._filenode
628 self._filenode
629 return True
629 return True
630 except error.LookupError:
630 except error.LookupError:
631 # file is missing
631 # file is missing
632 return False
632 return False
633
633
634 def __str__(self):
634 def __str__(self):
635 return "%s@%s" % (self.path(), self._changectx)
635 return "%s@%s" % (self.path(), self._changectx)
636
636
637 def __repr__(self):
637 def __repr__(self):
638 return "<%s %s>" % (type(self).__name__, str(self))
638 return "<%s %s>" % (type(self).__name__, str(self))
639
639
640 def __hash__(self):
640 def __hash__(self):
641 try:
641 try:
642 return hash((self._path, self._filenode))
642 return hash((self._path, self._filenode))
643 except AttributeError:
643 except AttributeError:
644 return id(self)
644 return id(self)
645
645
646 def __eq__(self, other):
646 def __eq__(self, other):
647 try:
647 try:
648 return (type(self) == type(other) and self._path == other._path
648 return (type(self) == type(other) and self._path == other._path
649 and self._filenode == other._filenode)
649 and self._filenode == other._filenode)
650 except AttributeError:
650 except AttributeError:
651 return False
651 return False
652
652
653 def __ne__(self, other):
653 def __ne__(self, other):
654 return not (self == other)
654 return not (self == other)
655
655
656 def filerev(self):
656 def filerev(self):
657 return self._filerev
657 return self._filerev
658 def filenode(self):
658 def filenode(self):
659 return self._filenode
659 return self._filenode
660 def flags(self):
660 def flags(self):
661 return self._changectx.flags(self._path)
661 return self._changectx.flags(self._path)
662 def filelog(self):
662 def filelog(self):
663 return self._filelog
663 return self._filelog
664 def rev(self):
664 def rev(self):
665 return self._changeid
665 return self._changeid
666 def linkrev(self):
666 def linkrev(self):
667 return self._filelog.linkrev(self._filerev)
667 return self._filelog.linkrev(self._filerev)
668 def node(self):
668 def node(self):
669 return self._changectx.node()
669 return self._changectx.node()
670 def hex(self):
670 def hex(self):
671 return self._changectx.hex()
671 return self._changectx.hex()
672 def user(self):
672 def user(self):
673 return self._changectx.user()
673 return self._changectx.user()
674 def date(self):
674 def date(self):
675 return self._changectx.date()
675 return self._changectx.date()
676 def files(self):
676 def files(self):
677 return self._changectx.files()
677 return self._changectx.files()
678 def description(self):
678 def description(self):
679 return self._changectx.description()
679 return self._changectx.description()
680 def branch(self):
680 def branch(self):
681 return self._changectx.branch()
681 return self._changectx.branch()
682 def extra(self):
682 def extra(self):
683 return self._changectx.extra()
683 return self._changectx.extra()
684 def phase(self):
684 def phase(self):
685 return self._changectx.phase()
685 return self._changectx.phase()
686 def phasestr(self):
686 def phasestr(self):
687 return self._changectx.phasestr()
687 return self._changectx.phasestr()
688 def manifest(self):
688 def manifest(self):
689 return self._changectx.manifest()
689 return self._changectx.manifest()
690 def changectx(self):
690 def changectx(self):
691 return self._changectx
691 return self._changectx
692
692
693 def path(self):
693 def path(self):
694 return self._path
694 return self._path
695
695
696 def isbinary(self):
696 def isbinary(self):
697 try:
697 try:
698 return util.binary(self.data())
698 return util.binary(self.data())
699 except IOError:
699 except IOError:
700 return False
700 return False
701
701
702 def cmp(self, fctx):
702 def cmp(self, fctx):
703 """compare with other file context
703 """compare with other file context
704
704
705 returns True if different than fctx.
705 returns True if different than fctx.
706 """
706 """
707 if (fctx._filerev is None
707 if (fctx._filerev is None
708 and (self._repo._encodefilterpats
708 and (self._repo._encodefilterpats
709 # if file data starts with '\1\n', empty metadata block is
709 # if file data starts with '\1\n', empty metadata block is
710 # prepended, which adds 4 bytes to filelog.size().
710 # prepended, which adds 4 bytes to filelog.size().
711 or self.size() - 4 == fctx.size())
711 or self.size() - 4 == fctx.size())
712 or self.size() == fctx.size()):
712 or self.size() == fctx.size()):
713 return self._filelog.cmp(self._filenode, fctx.data())
713 return self._filelog.cmp(self._filenode, fctx.data())
714
714
715 return True
715 return True
716
716
717 def parents(self):
717 def parents(self):
718 p = self._path
718 p = self._path
719 fl = self._filelog
719 fl = self._filelog
720 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
720 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
721
721
722 r = self._filelog.renamed(self._filenode)
722 r = self._filelog.renamed(self._filenode)
723 if r:
723 if r:
724 pl[0] = (r[0], r[1], None)
724 pl[0] = (r[0], r[1], None)
725
725
726 return [filectx(self._repo, p, fileid=n, filelog=l)
726 return [filectx(self._repo, p, fileid=n, filelog=l)
727 for p, n, l in pl if n != nullid]
727 for p, n, l in pl if n != nullid]
728
728
729 def p1(self):
729 def p1(self):
730 return self.parents()[0]
730 return self.parents()[0]
731
731
732 def p2(self):
732 def p2(self):
733 p = self.parents()
733 p = self.parents()
734 if len(p) == 2:
734 if len(p) == 2:
735 return p[1]
735 return p[1]
736 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
736 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
737
737
738 def annotate(self, follow=False, linenumber=None, diffopts=None):
738 def annotate(self, follow=False, linenumber=None, diffopts=None):
739 '''returns a list of tuples of (ctx, line) for each line
739 '''returns a list of tuples of (ctx, line) for each line
740 in the file, where ctx is the filectx of the node where
740 in the file, where ctx is the filectx of the node where
741 that line was last changed.
741 that line was last changed.
742 This returns tuples of ((ctx, linenumber), line) for each line,
742 This returns tuples of ((ctx, linenumber), line) for each line,
743 if "linenumber" parameter is NOT "None".
743 if "linenumber" parameter is NOT "None".
744 In such tuples, linenumber means one at the first appearance
744 In such tuples, linenumber means one at the first appearance
745 in the managed file.
745 in the managed file.
746 To reduce annotation cost,
746 To reduce annotation cost,
747 this returns fixed value(False is used) as linenumber,
747 this returns fixed value(False is used) as linenumber,
748 if "linenumber" parameter is "False".'''
748 if "linenumber" parameter is "False".'''
749
749
750 def decorate_compat(text, rev):
750 def decorate_compat(text, rev):
751 return ([rev] * len(text.splitlines()), text)
751 return ([rev] * len(text.splitlines()), text)
752
752
753 def without_linenumber(text, rev):
753 def without_linenumber(text, rev):
754 return ([(rev, False)] * len(text.splitlines()), text)
754 return ([(rev, False)] * len(text.splitlines()), text)
755
755
756 def with_linenumber(text, rev):
756 def with_linenumber(text, rev):
757 size = len(text.splitlines())
757 size = len(text.splitlines())
758 return ([(rev, i) for i in xrange(1, size + 1)], text)
758 return ([(rev, i) for i in xrange(1, size + 1)], text)
759
759
760 decorate = (((linenumber is None) and decorate_compat) or
760 decorate = (((linenumber is None) and decorate_compat) or
761 (linenumber and with_linenumber) or
761 (linenumber and with_linenumber) or
762 without_linenumber)
762 without_linenumber)
763
763
764 def pair(parent, child):
764 def pair(parent, child):
765 blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
765 blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
766 refine=True)
766 refine=True)
767 for (a1, a2, b1, b2), t in blocks:
767 for (a1, a2, b1, b2), t in blocks:
768 # Changed blocks ('!') or blocks made only of blank lines ('~')
768 # Changed blocks ('!') or blocks made only of blank lines ('~')
769 # belong to the child.
769 # belong to the child.
770 if t == '=':
770 if t == '=':
771 child[0][b1:b2] = parent[0][a1:a2]
771 child[0][b1:b2] = parent[0][a1:a2]
772 return child
772 return child
773
773
774 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
774 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
775
775
776 def parents(f):
776 def parents(f):
777 pl = f.parents()
777 pl = f.parents()
778
778
779 # Don't return renamed parents if we aren't following.
779 # Don't return renamed parents if we aren't following.
780 if not follow:
780 if not follow:
781 pl = [p for p in pl if p.path() == f.path()]
781 pl = [p for p in pl if p.path() == f.path()]
782
782
783 # renamed filectx won't have a filelog yet, so set it
783 # renamed filectx won't have a filelog yet, so set it
784 # from the cache to save time
784 # from the cache to save time
785 for p in pl:
785 for p in pl:
786 if not '_filelog' in p.__dict__:
786 if not '_filelog' in p.__dict__:
787 p._filelog = getlog(p.path())
787 p._filelog = getlog(p.path())
788
788
789 return pl
789 return pl
790
790
791 # use linkrev to find the first changeset where self appeared
791 # use linkrev to find the first changeset where self appeared
792 if self.rev() != self.linkrev():
792 if self.rev() != self.linkrev():
793 base = self.filectx(self.filenode())
793 base = self.filectx(self.filenode())
794 else:
794 else:
795 base = self
795 base = self
796
796
797 # This algorithm would prefer to be recursive, but Python is a
797 # This algorithm would prefer to be recursive, but Python is a
798 # bit recursion-hostile. Instead we do an iterative
798 # bit recursion-hostile. Instead we do an iterative
799 # depth-first search.
799 # depth-first search.
800
800
801 visit = [base]
801 visit = [base]
802 hist = {}
802 hist = {}
803 pcache = {}
803 pcache = {}
804 needed = {base: 1}
804 needed = {base: 1}
805 while visit:
805 while visit:
806 f = visit[-1]
806 f = visit[-1]
807 pcached = f in pcache
807 pcached = f in pcache
808 if not pcached:
808 if not pcached:
809 pcache[f] = parents(f)
809 pcache[f] = parents(f)
810
810
811 ready = True
811 ready = True
812 pl = pcache[f]
812 pl = pcache[f]
813 for p in pl:
813 for p in pl:
814 if p not in hist:
814 if p not in hist:
815 ready = False
815 ready = False
816 visit.append(p)
816 visit.append(p)
817 if not pcached:
817 if not pcached:
818 needed[p] = needed.get(p, 0) + 1
818 needed[p] = needed.get(p, 0) + 1
819 if ready:
819 if ready:
820 visit.pop()
820 visit.pop()
821 reusable = f in hist
821 reusable = f in hist
822 if reusable:
822 if reusable:
823 curr = hist[f]
823 curr = hist[f]
824 else:
824 else:
825 curr = decorate(f.data(), f)
825 curr = decorate(f.data(), f)
826 for p in pl:
826 for p in pl:
827 if not reusable:
827 if not reusable:
828 curr = pair(hist[p], curr)
828 curr = pair(hist[p], curr)
829 if needed[p] == 1:
829 if needed[p] == 1:
830 del hist[p]
830 del hist[p]
831 del needed[p]
831 del needed[p]
832 else:
832 else:
833 needed[p] -= 1
833 needed[p] -= 1
834
834
835 hist[f] = curr
835 hist[f] = curr
836 pcache[f] = []
836 pcache[f] = []
837
837
838 return zip(hist[base][0], hist[base][1].splitlines(True))
838 return zip(hist[base][0], hist[base][1].splitlines(True))
839
839
840 def ancestors(self, followfirst=False):
840 def ancestors(self, followfirst=False):
841 visit = {}
841 visit = {}
842 c = self
842 c = self
843 cut = followfirst and 1 or None
843 cut = followfirst and 1 or None
844 while True:
844 while True:
845 for parent in c.parents()[:cut]:
845 for parent in c.parents()[:cut]:
846 visit[(parent.rev(), parent.node())] = parent
846 visit[(parent.rev(), parent.node())] = parent
847 if not visit:
847 if not visit:
848 break
848 break
849 c = visit.pop(max(visit))
849 c = visit.pop(max(visit))
850 yield c
850 yield c
851
851
852 class filectx(basefilectx):
852 class filectx(basefilectx):
853 """A filecontext object makes access to data related to a particular
853 """A filecontext object makes access to data related to a particular
854 filerevision convenient."""
854 filerevision convenient."""
855 def __init__(self, repo, path, changeid=None, fileid=None,
855 def __init__(self, repo, path, changeid=None, fileid=None,
856 filelog=None, changectx=None):
856 filelog=None, changectx=None):
857 """changeid can be a changeset revision, node, or tag.
857 """changeid can be a changeset revision, node, or tag.
858 fileid can be a file revision or node."""
858 fileid can be a file revision or node."""
859 self._repo = repo
859 self._repo = repo
860 self._path = path
860 self._path = path
861
861
862 assert (changeid is not None
862 assert (changeid is not None
863 or fileid is not None
863 or fileid is not None
864 or changectx is not None), \
864 or changectx is not None), \
865 ("bad args: changeid=%r, fileid=%r, changectx=%r"
865 ("bad args: changeid=%r, fileid=%r, changectx=%r"
866 % (changeid, fileid, changectx))
866 % (changeid, fileid, changectx))
867
867
868 if filelog is not None:
868 if filelog is not None:
869 self._filelog = filelog
869 self._filelog = filelog
870
870
871 if changeid is not None:
871 if changeid is not None:
872 self._changeid = changeid
872 self._changeid = changeid
873 if changectx is not None:
873 if changectx is not None:
874 self._changectx = changectx
874 self._changectx = changectx
875 if fileid is not None:
875 if fileid is not None:
876 self._fileid = fileid
876 self._fileid = fileid
877
877
878 @propertycache
878 @propertycache
879 def _changectx(self):
879 def _changectx(self):
880 try:
880 try:
881 return changectx(self._repo, self._changeid)
881 return changectx(self._repo, self._changeid)
882 except error.RepoLookupError:
882 except error.RepoLookupError:
883 # Linkrev may point to any revision in the repository. When the
883 # Linkrev may point to any revision in the repository. When the
884 # repository is filtered this may lead to `filectx` trying to build
884 # repository is filtered this may lead to `filectx` trying to build
885 # `changectx` for filtered revision. In such case we fallback to
885 # `changectx` for filtered revision. In such case we fallback to
886 # creating `changectx` on the unfiltered version of the reposition.
886 # creating `changectx` on the unfiltered version of the reposition.
887 # This fallback should not be an issue because `changectx` from
887 # This fallback should not be an issue because `changectx` from
888 # `filectx` are not used in complex operations that care about
888 # `filectx` are not used in complex operations that care about
889 # filtering.
889 # filtering.
890 #
890 #
891 # This fallback is a cheap and dirty fix that prevent several
891 # This fallback is a cheap and dirty fix that prevent several
892 # crashes. It does not ensure the behavior is correct. However the
892 # crashes. It does not ensure the behavior is correct. However the
893 # behavior was not correct before filtering either and "incorrect
893 # behavior was not correct before filtering either and "incorrect
894 # behavior" is seen as better as "crash"
894 # behavior" is seen as better as "crash"
895 #
895 #
896 # Linkrevs have several serious troubles with filtering that are
896 # Linkrevs have several serious troubles with filtering that are
897 # complicated to solve. Proper handling of the issue here should be
897 # complicated to solve. Proper handling of the issue here should be
898 # considered when solving linkrev issue are on the table.
898 # considered when solving linkrev issue are on the table.
899 return changectx(self._repo.unfiltered(), self._changeid)
899 return changectx(self._repo.unfiltered(), self._changeid)
900
900
901 def filectx(self, fileid):
901 def filectx(self, fileid):
902 '''opens an arbitrary revision of the file without
902 '''opens an arbitrary revision of the file without
903 opening a new filelog'''
903 opening a new filelog'''
904 return filectx(self._repo, self._path, fileid=fileid,
904 return filectx(self._repo, self._path, fileid=fileid,
905 filelog=self._filelog)
905 filelog=self._filelog)
906
906
907 def data(self):
907 def data(self):
908 return self._filelog.read(self._filenode)
908 return self._filelog.read(self._filenode)
909 def size(self):
909 def size(self):
910 return self._filelog.size(self._filerev)
910 return self._filelog.size(self._filerev)
911
911
912 def renamed(self):
912 def renamed(self):
913 """check if file was actually renamed in this changeset revision
913 """check if file was actually renamed in this changeset revision
914
914
915 If rename logged in file revision, we report copy for changeset only
915 If rename logged in file revision, we report copy for changeset only
916 if file revisions linkrev points back to the changeset in question
916 if file revisions linkrev points back to the changeset in question
917 or both changeset parents contain different file revisions.
917 or both changeset parents contain different file revisions.
918 """
918 """
919
919
920 renamed = self._filelog.renamed(self._filenode)
920 renamed = self._filelog.renamed(self._filenode)
921 if not renamed:
921 if not renamed:
922 return renamed
922 return renamed
923
923
924 if self.rev() == self.linkrev():
924 if self.rev() == self.linkrev():
925 return renamed
925 return renamed
926
926
927 name = self.path()
927 name = self.path()
928 fnode = self._filenode
928 fnode = self._filenode
929 for p in self._changectx.parents():
929 for p in self._changectx.parents():
930 try:
930 try:
931 if fnode == p.filenode(name):
931 if fnode == p.filenode(name):
932 return None
932 return None
933 except error.LookupError:
933 except error.LookupError:
934 pass
934 pass
935 return renamed
935 return renamed
936
936
937 def children(self):
937 def children(self):
938 # hard for renames
938 # hard for renames
939 c = self._filelog.children(self._filenode)
939 c = self._filelog.children(self._filenode)
940 return [filectx(self._repo, self._path, fileid=x,
940 return [filectx(self._repo, self._path, fileid=x,
941 filelog=self._filelog) for x in c]
941 filelog=self._filelog) for x in c]
942
942
943 class committablectx(basectx):
943 class committablectx(basectx):
944 """A committablectx object provides common functionality for a context that
944 """A committablectx object provides common functionality for a context that
945 wants the ability to commit, e.g. workingctx or memctx."""
945 wants the ability to commit, e.g. workingctx or memctx."""
946 def __init__(self, repo, text="", user=None, date=None, extra=None,
946 def __init__(self, repo, text="", user=None, date=None, extra=None,
947 changes=None):
947 changes=None):
948 self._repo = repo
948 self._repo = repo
949 self._rev = None
949 self._rev = None
950 self._node = None
950 self._node = None
951 self._text = text
951 self._text = text
952 if date:
952 if date:
953 self._date = util.parsedate(date)
953 self._date = util.parsedate(date)
954 if user:
954 if user:
955 self._user = user
955 self._user = user
956 if changes:
956 if changes:
957 self._status = changes
957 self._status = changes
958
958
959 self._extra = {}
959 self._extra = {}
960 if extra:
960 if extra:
961 self._extra = extra.copy()
961 self._extra = extra.copy()
962 if 'branch' not in self._extra:
962 if 'branch' not in self._extra:
963 try:
963 try:
964 branch = encoding.fromlocal(self._repo.dirstate.branch())
964 branch = encoding.fromlocal(self._repo.dirstate.branch())
965 except UnicodeDecodeError:
965 except UnicodeDecodeError:
966 raise util.Abort(_('branch name not in UTF-8!'))
966 raise util.Abort(_('branch name not in UTF-8!'))
967 self._extra['branch'] = branch
967 self._extra['branch'] = branch
968 if self._extra['branch'] == '':
968 if self._extra['branch'] == '':
969 self._extra['branch'] = 'default'
969 self._extra['branch'] = 'default'
970
970
971 def __str__(self):
971 def __str__(self):
972 return str(self._parents[0]) + "+"
972 return str(self._parents[0]) + "+"
973
973
974 def __nonzero__(self):
974 def __nonzero__(self):
975 return True
975 return True
976
976
977 def __contains__(self, key):
977 def __contains__(self, key):
978 return self._repo.dirstate[key] not in "?r"
978 return self._repo.dirstate[key] not in "?r"
979
979
980 def _buildflagfunc(self):
980 def _buildflagfunc(self):
981 # Create a fallback function for getting file flags when the
981 # Create a fallback function for getting file flags when the
982 # filesystem doesn't support them
982 # filesystem doesn't support them
983
983
984 copiesget = self._repo.dirstate.copies().get
984 copiesget = self._repo.dirstate.copies().get
985
985
986 if len(self._parents) < 2:
986 if len(self._parents) < 2:
987 # when we have one parent, it's easy: copy from parent
987 # when we have one parent, it's easy: copy from parent
988 man = self._parents[0].manifest()
988 man = self._parents[0].manifest()
989 def func(f):
989 def func(f):
990 f = copiesget(f, f)
990 f = copiesget(f, f)
991 return man.flags(f)
991 return man.flags(f)
992 else:
992 else:
993 # merges are tricky: we try to reconstruct the unstored
993 # merges are tricky: we try to reconstruct the unstored
994 # result from the merge (issue1802)
994 # result from the merge (issue1802)
995 p1, p2 = self._parents
995 p1, p2 = self._parents
996 pa = p1.ancestor(p2)
996 pa = p1.ancestor(p2)
997 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
997 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
998
998
999 def func(f):
999 def func(f):
1000 f = copiesget(f, f) # may be wrong for merges with copies
1000 f = copiesget(f, f) # may be wrong for merges with copies
1001 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1001 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1002 if fl1 == fl2:
1002 if fl1 == fl2:
1003 return fl1
1003 return fl1
1004 if fl1 == fla:
1004 if fl1 == fla:
1005 return fl2
1005 return fl2
1006 if fl2 == fla:
1006 if fl2 == fla:
1007 return fl1
1007 return fl1
1008 return '' # punt for conflicts
1008 return '' # punt for conflicts
1009
1009
1010 return func
1010 return func
1011
1011
1012 @propertycache
1012 @propertycache
1013 def _flagfunc(self):
1013 def _flagfunc(self):
1014 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1014 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1015
1015
1016 @propertycache
1016 @propertycache
1017 def _manifest(self):
1017 def _manifest(self):
1018 """generate a manifest corresponding to the values in self._status"""
1018 """generate a manifest corresponding to the values in self._status"""
1019
1019
1020 man = self._parents[0].manifest().copy()
1020 man = self._parents[0].manifest().copy()
1021 if len(self._parents) > 1:
1021 if len(self._parents) > 1:
1022 man2 = self.p2().manifest()
1022 man2 = self.p2().manifest()
1023 def getman(f):
1023 def getman(f):
1024 if f in man:
1024 if f in man:
1025 return man
1025 return man
1026 return man2
1026 return man2
1027 else:
1027 else:
1028 getman = lambda f: man
1028 getman = lambda f: man
1029
1029
1030 copied = self._repo.dirstate.copies()
1030 copied = self._repo.dirstate.copies()
1031 ff = self._flagfunc
1031 ff = self._flagfunc
1032 modified, added, removed, deleted = self._status[:4]
1032 modified, added, removed, deleted = self._status[:4]
1033 for i, l in (("a", added), ("m", modified)):
1033 for i, l in (("a", added), ("m", modified)):
1034 for f in l:
1034 for f in l:
1035 orig = copied.get(f, f)
1035 orig = copied.get(f, f)
1036 man[f] = getman(orig).get(orig, nullid) + i
1036 man[f] = getman(orig).get(orig, nullid) + i
1037 try:
1037 try:
1038 man.set(f, ff(f))
1038 man.set(f, ff(f))
1039 except OSError:
1039 except OSError:
1040 pass
1040 pass
1041
1041
1042 for f in deleted + removed:
1042 for f in deleted + removed:
1043 if f in man:
1043 if f in man:
1044 del man[f]
1044 del man[f]
1045
1045
1046 return man
1046 return man
1047
1047
1048 @propertycache
1048 @propertycache
1049 def _status(self):
1049 def _status(self):
1050 return self._repo.status()
1050 return self._repo.status()
1051
1051
1052 @propertycache
1052 @propertycache
1053 def _user(self):
1053 def _user(self):
1054 return self._repo.ui.username()
1054 return self._repo.ui.username()
1055
1055
1056 @propertycache
1056 @propertycache
1057 def _date(self):
1057 def _date(self):
1058 return util.makedate()
1058 return util.makedate()
1059
1059
1060 def subrev(self, subpath):
1060 def subrev(self, subpath):
1061 return None
1061 return None
1062
1062
1063 def user(self):
1063 def user(self):
1064 return self._user or self._repo.ui.username()
1064 return self._user or self._repo.ui.username()
1065 def date(self):
1065 def date(self):
1066 return self._date
1066 return self._date
1067 def description(self):
1067 def description(self):
1068 return self._text
1068 return self._text
1069 def files(self):
1069 def files(self):
1070 return sorted(self._status[0] + self._status[1] + self._status[2])
1070 return sorted(self._status[0] + self._status[1] + self._status[2])
1071
1071
1072 def modified(self):
1072 def modified(self):
1073 return self._status[0]
1073 return self._status[0]
1074 def added(self):
1074 def added(self):
1075 return self._status[1]
1075 return self._status[1]
1076 def removed(self):
1076 def removed(self):
1077 return self._status[2]
1077 return self._status[2]
1078 def deleted(self):
1078 def deleted(self):
1079 return self._status[3]
1079 return self._status[3]
1080 def unknown(self):
1080 def unknown(self):
1081 return self._status[4]
1081 return self._status[4]
1082 def ignored(self):
1082 def ignored(self):
1083 return self._status[5]
1083 return self._status[5]
1084 def clean(self):
1084 def clean(self):
1085 return self._status[6]
1085 return self._status[6]
1086 def branch(self):
1086 def branch(self):
1087 return encoding.tolocal(self._extra['branch'])
1087 return encoding.tolocal(self._extra['branch'])
1088 def closesbranch(self):
1088 def closesbranch(self):
1089 return 'close' in self._extra
1089 return 'close' in self._extra
1090 def extra(self):
1090 def extra(self):
1091 return self._extra
1091 return self._extra
1092
1092
1093 def tags(self):
1093 def tags(self):
1094 t = []
1094 t = []
1095 for p in self.parents():
1095 for p in self.parents():
1096 t.extend(p.tags())
1096 t.extend(p.tags())
1097 return t
1097 return t
1098
1098
1099 def bookmarks(self):
1099 def bookmarks(self):
1100 b = []
1100 b = []
1101 for p in self.parents():
1101 for p in self.parents():
1102 b.extend(p.bookmarks())
1102 b.extend(p.bookmarks())
1103 return b
1103 return b
1104
1104
1105 def phase(self):
1105 def phase(self):
1106 phase = phases.draft # default phase to draft
1106 phase = phases.draft # default phase to draft
1107 for p in self.parents():
1107 for p in self.parents():
1108 phase = max(phase, p.phase())
1108 phase = max(phase, p.phase())
1109 return phase
1109 return phase
1110
1110
1111 def hidden(self):
1111 def hidden(self):
1112 return False
1112 return False
1113
1113
1114 def children(self):
1114 def children(self):
1115 return []
1115 return []
1116
1116
1117 def flags(self, path):
1117 def flags(self, path):
1118 if '_manifest' in self.__dict__:
1118 if '_manifest' in self.__dict__:
1119 try:
1119 try:
1120 return self._manifest.flags(path)
1120 return self._manifest.flags(path)
1121 except KeyError:
1121 except KeyError:
1122 return ''
1122 return ''
1123
1123
1124 try:
1124 try:
1125 return self._flagfunc(path)
1125 return self._flagfunc(path)
1126 except OSError:
1126 except OSError:
1127 return ''
1127 return ''
1128
1128
1129 def ancestor(self, c2):
1129 def ancestor(self, c2):
1130 """return the ancestor context of self and c2"""
1130 """return the ancestor context of self and c2"""
1131 return self._parents[0].ancestor(c2) # punt on two parents for now
1131 return self._parents[0].ancestor(c2) # punt on two parents for now
1132
1132
1133 def walk(self, match):
1133 def walk(self, match):
1134 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1134 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1135 True, False))
1135 True, False))
1136
1136
1137 def ancestors(self):
1137 def ancestors(self):
1138 for a in self._repo.changelog.ancestors(
1138 for a in self._repo.changelog.ancestors(
1139 [p.rev() for p in self._parents]):
1139 [p.rev() for p in self._parents]):
1140 yield changectx(self._repo, a)
1140 yield changectx(self._repo, a)
1141
1141
1142 def markcommitted(self, node):
1142 def markcommitted(self, node):
1143 """Perform post-commit cleanup necessary after committing this ctx
1143 """Perform post-commit cleanup necessary after committing this ctx
1144
1144
1145 Specifically, this updates backing stores this working context
1145 Specifically, this updates backing stores this working context
1146 wraps to reflect the fact that the changes reflected by this
1146 wraps to reflect the fact that the changes reflected by this
1147 workingctx have been committed. For example, it marks
1147 workingctx have been committed. For example, it marks
1148 modified and added files as normal in the dirstate.
1148 modified and added files as normal in the dirstate.
1149
1149
1150 """
1150 """
1151
1151
1152 for f in self.modified() + self.added():
1152 for f in self.modified() + self.added():
1153 self._repo.dirstate.normal(f)
1153 self._repo.dirstate.normal(f)
1154 for f in self.removed():
1154 for f in self.removed():
1155 self._repo.dirstate.drop(f)
1155 self._repo.dirstate.drop(f)
1156 self._repo.dirstate.setparents(node)
1156 self._repo.dirstate.setparents(node)
1157
1157
1158 def dirs(self):
1158 def dirs(self):
1159 return self._repo.dirstate.dirs()
1159 return self._repo.dirstate.dirs()
1160
1160
1161 class workingctx(committablectx):
1161 class workingctx(committablectx):
1162 """A workingctx object makes access to data related to
1162 """A workingctx object makes access to data related to
1163 the current working directory convenient.
1163 the current working directory convenient.
1164 date - any valid date string or (unixtime, offset), or None.
1164 date - any valid date string or (unixtime, offset), or None.
1165 user - username string, or None.
1165 user - username string, or None.
1166 extra - a dictionary of extra values, or None.
1166 extra - a dictionary of extra values, or None.
1167 changes - a list of file lists as returned by localrepo.status()
1167 changes - a list of file lists as returned by localrepo.status()
1168 or None to use the repository status.
1168 or None to use the repository status.
1169 """
1169 """
1170 def __init__(self, repo, text="", user=None, date=None, extra=None,
1170 def __init__(self, repo, text="", user=None, date=None, extra=None,
1171 changes=None):
1171 changes=None):
1172 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1172 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1173
1173
1174 def __iter__(self):
1174 def __iter__(self):
1175 d = self._repo.dirstate
1175 d = self._repo.dirstate
1176 for f in d:
1176 for f in d:
1177 if d[f] != 'r':
1177 if d[f] != 'r':
1178 yield f
1178 yield f
1179
1179
1180 @propertycache
1180 @propertycache
1181 def _parents(self):
1181 def _parents(self):
1182 p = self._repo.dirstate.parents()
1182 p = self._repo.dirstate.parents()
1183 if p[1] == nullid:
1183 if p[1] == nullid:
1184 p = p[:-1]
1184 p = p[:-1]
1185 return [changectx(self._repo, x) for x in p]
1185 return [changectx(self._repo, x) for x in p]
1186
1186
1187 def filectx(self, path, filelog=None):
1187 def filectx(self, path, filelog=None):
1188 """get a file context from the working directory"""
1188 """get a file context from the working directory"""
1189 return workingfilectx(self._repo, path, workingctx=self,
1189 return workingfilectx(self._repo, path, workingctx=self,
1190 filelog=filelog)
1190 filelog=filelog)
1191
1191
1192 def dirty(self, missing=False, merge=True, branch=True):
1192 def dirty(self, missing=False, merge=True, branch=True):
1193 "check whether a working directory is modified"
1193 "check whether a working directory is modified"
1194 # check subrepos first
1194 # check subrepos first
1195 for s in sorted(self.substate):
1195 for s in sorted(self.substate):
1196 if self.sub(s).dirty():
1196 if self.sub(s).dirty():
1197 return True
1197 return True
1198 # check current working dir
1198 # check current working dir
1199 return ((merge and self.p2()) or
1199 return ((merge and self.p2()) or
1200 (branch and self.branch() != self.p1().branch()) or
1200 (branch and self.branch() != self.p1().branch()) or
1201 self.modified() or self.added() or self.removed() or
1201 self.modified() or self.added() or self.removed() or
1202 (missing and self.deleted()))
1202 (missing and self.deleted()))
1203
1203
1204 def add(self, list, prefix=""):
1204 def add(self, list, prefix=""):
1205 join = lambda f: os.path.join(prefix, f)
1205 join = lambda f: os.path.join(prefix, f)
1206 wlock = self._repo.wlock()
1206 wlock = self._repo.wlock()
1207 ui, ds = self._repo.ui, self._repo.dirstate
1207 ui, ds = self._repo.ui, self._repo.dirstate
1208 try:
1208 try:
1209 rejected = []
1209 rejected = []
1210 lstat = self._repo.wvfs.lstat
1210 lstat = self._repo.wvfs.lstat
1211 for f in list:
1211 for f in list:
1212 scmutil.checkportable(ui, join(f))
1212 scmutil.checkportable(ui, join(f))
1213 try:
1213 try:
1214 st = lstat(f)
1214 st = lstat(f)
1215 except OSError:
1215 except OSError:
1216 ui.warn(_("%s does not exist!\n") % join(f))
1216 ui.warn(_("%s does not exist!\n") % join(f))
1217 rejected.append(f)
1217 rejected.append(f)
1218 continue
1218 continue
1219 if st.st_size > 10000000:
1219 if st.st_size > 10000000:
1220 ui.warn(_("%s: up to %d MB of RAM may be required "
1220 ui.warn(_("%s: up to %d MB of RAM may be required "
1221 "to manage this file\n"
1221 "to manage this file\n"
1222 "(use 'hg revert %s' to cancel the "
1222 "(use 'hg revert %s' to cancel the "
1223 "pending addition)\n")
1223 "pending addition)\n")
1224 % (f, 3 * st.st_size // 1000000, join(f)))
1224 % (f, 3 * st.st_size // 1000000, join(f)))
1225 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1225 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1226 ui.warn(_("%s not added: only files and symlinks "
1226 ui.warn(_("%s not added: only files and symlinks "
1227 "supported currently\n") % join(f))
1227 "supported currently\n") % join(f))
1228 rejected.append(f)
1228 rejected.append(f)
1229 elif ds[f] in 'amn':
1229 elif ds[f] in 'amn':
1230 ui.warn(_("%s already tracked!\n") % join(f))
1230 ui.warn(_("%s already tracked!\n") % join(f))
1231 elif ds[f] == 'r':
1231 elif ds[f] == 'r':
1232 ds.normallookup(f)
1232 ds.normallookup(f)
1233 else:
1233 else:
1234 ds.add(f)
1234 ds.add(f)
1235 return rejected
1235 return rejected
1236 finally:
1236 finally:
1237 wlock.release()
1237 wlock.release()
1238
1238
1239 def forget(self, files, prefix=""):
1239 def forget(self, files, prefix=""):
1240 join = lambda f: os.path.join(prefix, f)
1240 join = lambda f: os.path.join(prefix, f)
1241 wlock = self._repo.wlock()
1241 wlock = self._repo.wlock()
1242 try:
1242 try:
1243 rejected = []
1243 rejected = []
1244 for f in files:
1244 for f in files:
1245 if f not in self._repo.dirstate:
1245 if f not in self._repo.dirstate:
1246 self._repo.ui.warn(_("%s not tracked!\n") % join(f))
1246 self._repo.ui.warn(_("%s not tracked!\n") % join(f))
1247 rejected.append(f)
1247 rejected.append(f)
1248 elif self._repo.dirstate[f] != 'a':
1248 elif self._repo.dirstate[f] != 'a':
1249 self._repo.dirstate.remove(f)
1249 self._repo.dirstate.remove(f)
1250 else:
1250 else:
1251 self._repo.dirstate.drop(f)
1251 self._repo.dirstate.drop(f)
1252 return rejected
1252 return rejected
1253 finally:
1253 finally:
1254 wlock.release()
1254 wlock.release()
1255
1255
1256 def undelete(self, list):
1256 def undelete(self, list):
1257 pctxs = self.parents()
1257 pctxs = self.parents()
1258 wlock = self._repo.wlock()
1258 wlock = self._repo.wlock()
1259 try:
1259 try:
1260 for f in list:
1260 for f in list:
1261 if self._repo.dirstate[f] != 'r':
1261 if self._repo.dirstate[f] != 'r':
1262 self._repo.ui.warn(_("%s not removed!\n") % f)
1262 self._repo.ui.warn(_("%s not removed!\n") % f)
1263 else:
1263 else:
1264 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1264 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1265 t = fctx.data()
1265 t = fctx.data()
1266 self._repo.wwrite(f, t, fctx.flags())
1266 self._repo.wwrite(f, t, fctx.flags())
1267 self._repo.dirstate.normal(f)
1267 self._repo.dirstate.normal(f)
1268 finally:
1268 finally:
1269 wlock.release()
1269 wlock.release()
1270
1270
1271 def copy(self, source, dest):
1271 def copy(self, source, dest):
1272 try:
1272 try:
1273 st = self._repo.wvfs.lstat(dest)
1273 st = self._repo.wvfs.lstat(dest)
1274 except OSError, err:
1274 except OSError, err:
1275 if err.errno != errno.ENOENT:
1275 if err.errno != errno.ENOENT:
1276 raise
1276 raise
1277 self._repo.ui.warn(_("%s does not exist!\n") % dest)
1277 self._repo.ui.warn(_("%s does not exist!\n") % dest)
1278 return
1278 return
1279 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1279 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1280 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1280 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1281 "symbolic link\n") % dest)
1281 "symbolic link\n") % dest)
1282 else:
1282 else:
1283 wlock = self._repo.wlock()
1283 wlock = self._repo.wlock()
1284 try:
1284 try:
1285 if self._repo.dirstate[dest] in '?r':
1285 if self._repo.dirstate[dest] in '?r':
1286 self._repo.dirstate.add(dest)
1286 self._repo.dirstate.add(dest)
1287 self._repo.dirstate.copy(source, dest)
1287 self._repo.dirstate.copy(source, dest)
1288 finally:
1288 finally:
1289 wlock.release()
1289 wlock.release()
1290
1290
1291 def _filtersuspectsymlink(self, files):
1291 def _filtersuspectsymlink(self, files):
1292 if not files or self._repo.dirstate._checklink:
1292 if not files or self._repo.dirstate._checklink:
1293 return files
1293 return files
1294
1294
1295 # Symlink placeholders may get non-symlink-like contents
1295 # Symlink placeholders may get non-symlink-like contents
1296 # via user error or dereferencing by NFS or Samba servers,
1296 # via user error or dereferencing by NFS or Samba servers,
1297 # so we filter out any placeholders that don't look like a
1297 # so we filter out any placeholders that don't look like a
1298 # symlink
1298 # symlink
1299 sane = []
1299 sane = []
1300 for f in files:
1300 for f in files:
1301 if self.flags(f) == 'l':
1301 if self.flags(f) == 'l':
1302 d = self[f].data()
1302 d = self[f].data()
1303 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1303 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1304 self._repo.ui.debug('ignoring suspect symlink placeholder'
1304 self._repo.ui.debug('ignoring suspect symlink placeholder'
1305 ' "%s"\n' % f)
1305 ' "%s"\n' % f)
1306 continue
1306 continue
1307 sane.append(f)
1307 sane.append(f)
1308 return sane
1308 return sane
1309
1309
1310 def _checklookup(self, files):
1310 def _checklookup(self, files):
1311 # check for any possibly clean files
1311 # check for any possibly clean files
1312 if not files:
1312 if not files:
1313 return [], []
1313 return [], []
1314
1314
1315 modified = []
1315 modified = []
1316 fixup = []
1316 fixup = []
1317 pctx = self._parents[0]
1317 pctx = self._parents[0]
1318 # do a full compare of any files that might have changed
1318 # do a full compare of any files that might have changed
1319 for f in sorted(files):
1319 for f in sorted(files):
1320 if (f not in pctx or self.flags(f) != pctx.flags(f)
1320 if (f not in pctx or self.flags(f) != pctx.flags(f)
1321 or pctx[f].cmp(self[f])):
1321 or pctx[f].cmp(self[f])):
1322 modified.append(f)
1322 modified.append(f)
1323 else:
1323 else:
1324 fixup.append(f)
1324 fixup.append(f)
1325
1325
1326 # update dirstate for files that are actually clean
1326 # update dirstate for files that are actually clean
1327 if fixup:
1327 if fixup:
1328 try:
1328 try:
1329 # updating the dirstate is optional
1329 # updating the dirstate is optional
1330 # so we don't wait on the lock
1330 # so we don't wait on the lock
1331 normal = self._repo.dirstate.normal
1331 normal = self._repo.dirstate.normal
1332 wlock = self._repo.wlock(False)
1332 wlock = self._repo.wlock(False)
1333 try:
1333 try:
1334 for f in fixup:
1334 for f in fixup:
1335 normal(f)
1335 normal(f)
1336 finally:
1336 finally:
1337 wlock.release()
1337 wlock.release()
1338 except error.LockError:
1338 except error.LockError:
1339 pass
1339 pass
1340 return modified, fixup
1340 return modified, fixup
1341
1341
1342 def _manifestmatches(self, match, s):
1342 def _manifestmatches(self, match, s):
1343 """Slow path for workingctx
1343 """Slow path for workingctx
1344
1344
1345 The fast path is when we compare the working directory to its parent
1345 The fast path is when we compare the working directory to its parent
1346 which means this function is comparing with a non-parent; therefore we
1346 which means this function is comparing with a non-parent; therefore we
1347 need to build a manifest and return what matches.
1347 need to build a manifest and return what matches.
1348 """
1348 """
1349 mf = self._repo['.']._manifestmatches(match, s)
1349 mf = self._repo['.']._manifestmatches(match, s)
1350 modified, added, removed = s[0:3]
1350 modified, added, removed = s[0:3]
1351 for f in modified + added:
1351 for f in modified + added:
1352 mf[f] = None
1352 mf[f] = None
1353 mf.set(f, self.flags(f))
1353 mf.set(f, self.flags(f))
1354 for f in removed:
1354 for f in removed:
1355 if f in mf:
1355 if f in mf:
1356 del mf[f]
1356 del mf[f]
1357 return mf
1357 return mf
1358
1358
1359 def _prestatus(self, other, s, match, listignored, listclean, listunknown):
1359 def _prestatus(self, other, s, match, listignored, listclean, listunknown):
1360 """override the parent hook with a dirstate query
1360 """override the parent hook with a dirstate query
1361
1361
1362 We use this prestatus hook to populate the status with information from
1362 We use this prestatus hook to populate the status with information from
1363 the dirstate.
1363 the dirstate.
1364 """
1364 """
1365 # doesn't need to call super; if that changes, be aware that super
1365 # doesn't need to call super; if that changes, be aware that super
1366 # calls self.manifest which would slow down the common case of calling
1366 # calls self.manifest which would slow down the common case of calling
1367 # status against a workingctx's parent
1367 # status against a workingctx's parent
1368 return self._dirstatestatus(match, listignored, listclean, listunknown)
1368 return self._dirstatestatus(match, listignored, listclean, listunknown)
1369
1369
1370 def _poststatus(self, other, s, match, listignored, listclean, listunknown):
1370 def _poststatus(self, other, s, match, listignored, listclean, listunknown):
1371 """override the parent hook with a filter for suspect symlinks
1371 """override the parent hook with a filter for suspect symlinks
1372
1372
1373 We use this poststatus hook to filter out symlinks that might have
1373 We use this poststatus hook to filter out symlinks that might have
1374 accidentally ended up with the entire contents of the file they are
1374 accidentally ended up with the entire contents of the file they are
1375 susposed to be linking to.
1375 susposed to be linking to.
1376 """
1376 """
1377 s[0] = self._filtersuspectsymlink(s[0])
1377 s[0] = self._filtersuspectsymlink(s[0])
1378 self._status = s
1378 self._status = s
1379 return s
1379 return s
1380
1380
1381 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1381 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1382 unknown=False):
1382 unknown=False):
1383 '''Gets the status from the dirstate -- internal use only.'''
1383 '''Gets the status from the dirstate -- internal use only.'''
1384 listignored, listclean, listunknown = ignored, clean, unknown
1384 listignored, listclean, listunknown = ignored, clean, unknown
1385 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1385 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1386 subrepos = []
1386 subrepos = []
1387 if '.hgsub' in self:
1387 if '.hgsub' in self:
1388 subrepos = sorted(self.substate)
1388 subrepos = sorted(self.substate)
1389 s = self._repo.dirstate.status(match, subrepos, listignored,
1389 s = self._repo.dirstate.status(match, subrepos, listignored,
1390 listclean, listunknown)
1390 listclean, listunknown)
1391 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1391 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1392
1392
1393 # check for any possibly clean files
1393 # check for any possibly clean files
1394 if cmp:
1394 if cmp:
1395 modified2, fixup = self._checklookup(cmp)
1395 modified2, fixup = self._checklookup(cmp)
1396 modified += modified2
1396 modified += modified2
1397
1397
1398 # update dirstate for files that are actually clean
1398 # update dirstate for files that are actually clean
1399 if fixup and listclean:
1399 if fixup and listclean:
1400 clean += fixup
1400 clean += fixup
1401
1401
1402 return [modified, added, removed, deleted, unknown, ignored, clean]
1402 return [modified, added, removed, deleted, unknown, ignored, clean]
1403
1403
1404 def _buildstatus(self, other, s, match, listignored, listclean,
1404 def _buildstatus(self, other, s, match, listignored, listclean,
1405 listunknown):
1405 listunknown):
1406 """build a status with respect to another context
1406 """build a status with respect to another context
1407
1407
1408 This includes logic for maintaining the fast path of status when
1408 This includes logic for maintaining the fast path of status when
1409 comparing the working directory against its parent, which is to skip
1409 comparing the working directory against its parent, which is to skip
1410 building a new manifest if self (working directory) is not comparing
1410 building a new manifest if self (working directory) is not comparing
1411 against its parent (repo['.']).
1411 against its parent (repo['.']).
1412 """
1412 """
1413 if other != self._repo['.']:
1413 if other != self._repo['.']:
1414 s = super(workingctx, self)._buildstatus(other, s, match,
1414 s = super(workingctx, self)._buildstatus(other, s, match,
1415 listignored, listclean,
1415 listignored, listclean,
1416 listunknown)
1416 listunknown)
1417 return s
1417 return s
1418
1418
1419 def _matchstatus(self, other, s, match, listignored, listclean,
1419 def _matchstatus(self, other, s, match, listignored, listclean,
1420 listunknown):
1420 listunknown):
1421 """override the match method with a filter for directory patterns
1421 """override the match method with a filter for directory patterns
1422
1422
1423 We use inheritance to customize the match.bad method only in cases of
1423 We use inheritance to customize the match.bad method only in cases of
1424 workingctx since it belongs only to the working directory when
1424 workingctx since it belongs only to the working directory when
1425 comparing against the parent changeset.
1425 comparing against the parent changeset.
1426
1426
1427 If we aren't comparing against the working directory's parent, then we
1427 If we aren't comparing against the working directory's parent, then we
1428 just use the default match object sent to us.
1428 just use the default match object sent to us.
1429 """
1429 """
1430 superself = super(workingctx, self)
1430 superself = super(workingctx, self)
1431 match = superself._matchstatus(other, s, match, listignored, listclean,
1431 match = superself._matchstatus(other, s, match, listignored, listclean,
1432 listunknown)
1432 listunknown)
1433 if other != self._repo['.']:
1433 if other != self._repo['.']:
1434 def bad(f, msg):
1434 def bad(f, msg):
1435 # 'f' may be a directory pattern from 'match.files()',
1435 # 'f' may be a directory pattern from 'match.files()',
1436 # so 'f not in ctx1' is not enough
1436 # so 'f not in ctx1' is not enough
1437 if f not in other and f not in other.dirs():
1437 if f not in other and f not in other.dirs():
1438 self._repo.ui.warn('%s: %s\n' %
1438 self._repo.ui.warn('%s: %s\n' %
1439 (self._repo.dirstate.pathto(f), msg))
1439 (self._repo.dirstate.pathto(f), msg))
1440 match.bad = bad
1440 match.bad = bad
1441 return match
1441 return match
1442
1442
1443 def status(self, other='.', match=None, listignored=False,
1443 def status(self, other='.', match=None, listignored=False,
1444 listclean=False, listunknown=False, listsubrepos=False):
1444 listclean=False, listunknown=False, listsubrepos=False):
1445 # yet to be determined: what to do if 'other' is a 'workingctx' or a
1445 # yet to be determined: what to do if 'other' is a 'workingctx' or a
1446 # 'memctx'?
1446 # 'memctx'?
1447 s = super(workingctx, self).status(other, match, listignored, listclean,
1447 s = super(workingctx, self).status(other, match, listignored, listclean,
1448 listunknown, listsubrepos)
1448 listunknown, listsubrepos)
1449 # calling 'super' subtly reveresed the contexts, so we flip the results
1449 # calling 'super' subtly reveresed the contexts, so we flip the results
1450 # (s[1] is 'added' and s[2] is 'removed')
1450 # (s[1] is 'added' and s[2] is 'removed')
1451 s = list(s)
1451 s = list(s)
1452 s[1], s[2] = s[2], s[1]
1452 s[1], s[2] = s[2], s[1]
1453 return tuple(s)
1453 return tuple(s)
1454
1454
1455 class committablefilectx(basefilectx):
1455 class committablefilectx(basefilectx):
1456 """A committablefilectx provides common functionality for a file context
1456 """A committablefilectx provides common functionality for a file context
1457 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1457 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1458 def __init__(self, repo, path, filelog=None, ctx=None):
1458 def __init__(self, repo, path, filelog=None, ctx=None):
1459 self._repo = repo
1459 self._repo = repo
1460 self._path = path
1460 self._path = path
1461 self._changeid = None
1461 self._changeid = None
1462 self._filerev = self._filenode = None
1462 self._filerev = self._filenode = None
1463
1463
1464 if filelog is not None:
1464 if filelog is not None:
1465 self._filelog = filelog
1465 self._filelog = filelog
1466 if ctx:
1466 if ctx:
1467 self._changectx = ctx
1467 self._changectx = ctx
1468
1468
1469 def __nonzero__(self):
1469 def __nonzero__(self):
1470 return True
1470 return True
1471
1471
1472 def parents(self):
1472 def parents(self):
1473 '''return parent filectxs, following copies if necessary'''
1473 '''return parent filectxs, following copies if necessary'''
1474 def filenode(ctx, path):
1474 def filenode(ctx, path):
1475 return ctx._manifest.get(path, nullid)
1475 return ctx._manifest.get(path, nullid)
1476
1476
1477 path = self._path
1477 path = self._path
1478 fl = self._filelog
1478 fl = self._filelog
1479 pcl = self._changectx._parents
1479 pcl = self._changectx._parents
1480 renamed = self.renamed()
1480 renamed = self.renamed()
1481
1481
1482 if renamed:
1482 if renamed:
1483 pl = [renamed + (None,)]
1483 pl = [renamed + (None,)]
1484 else:
1484 else:
1485 pl = [(path, filenode(pcl[0], path), fl)]
1485 pl = [(path, filenode(pcl[0], path), fl)]
1486
1486
1487 for pc in pcl[1:]:
1487 for pc in pcl[1:]:
1488 pl.append((path, filenode(pc, path), fl))
1488 pl.append((path, filenode(pc, path), fl))
1489
1489
1490 return [filectx(self._repo, p, fileid=n, filelog=l)
1490 return [filectx(self._repo, p, fileid=n, filelog=l)
1491 for p, n, l in pl if n != nullid]
1491 for p, n, l in pl if n != nullid]
1492
1492
1493 def children(self):
1493 def children(self):
1494 return []
1494 return []
1495
1495
1496 class workingfilectx(committablefilectx):
1496 class workingfilectx(committablefilectx):
1497 """A workingfilectx object makes access to data related to a particular
1497 """A workingfilectx object makes access to data related to a particular
1498 file in the working directory convenient."""
1498 file in the working directory convenient."""
1499 def __init__(self, repo, path, filelog=None, workingctx=None):
1499 def __init__(self, repo, path, filelog=None, workingctx=None):
1500 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1500 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1501
1501
1502 @propertycache
1502 @propertycache
1503 def _changectx(self):
1503 def _changectx(self):
1504 return workingctx(self._repo)
1504 return workingctx(self._repo)
1505
1505
1506 def data(self):
1506 def data(self):
1507 return self._repo.wread(self._path)
1507 return self._repo.wread(self._path)
1508 def renamed(self):
1508 def renamed(self):
1509 rp = self._repo.dirstate.copied(self._path)
1509 rp = self._repo.dirstate.copied(self._path)
1510 if not rp:
1510 if not rp:
1511 return None
1511 return None
1512 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1512 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1513
1513
1514 def size(self):
1514 def size(self):
1515 return self._repo.wvfs.lstat(self._path).st_size
1515 return self._repo.wvfs.lstat(self._path).st_size
1516 def date(self):
1516 def date(self):
1517 t, tz = self._changectx.date()
1517 t, tz = self._changectx.date()
1518 try:
1518 try:
1519 return (int(self._repo.wvfs.lstat(self._path).st_mtime), tz)
1519 return (int(self._repo.wvfs.lstat(self._path).st_mtime), tz)
1520 except OSError, err:
1520 except OSError, err:
1521 if err.errno != errno.ENOENT:
1521 if err.errno != errno.ENOENT:
1522 raise
1522 raise
1523 return (t, tz)
1523 return (t, tz)
1524
1524
1525 def cmp(self, fctx):
1525 def cmp(self, fctx):
1526 """compare with other file context
1526 """compare with other file context
1527
1527
1528 returns True if different than fctx.
1528 returns True if different than fctx.
1529 """
1529 """
1530 # fctx should be a filectx (not a workingfilectx)
1530 # fctx should be a filectx (not a workingfilectx)
1531 # invert comparison to reuse the same code path
1531 # invert comparison to reuse the same code path
1532 return fctx.cmp(self)
1532 return fctx.cmp(self)
1533
1533
1534 class memctx(committablectx):
1534 class memctx(committablectx):
1535 """Use memctx to perform in-memory commits via localrepo.commitctx().
1535 """Use memctx to perform in-memory commits via localrepo.commitctx().
1536
1536
1537 Revision information is supplied at initialization time while
1537 Revision information is supplied at initialization time while
1538 related files data and is made available through a callback
1538 related files data and is made available through a callback
1539 mechanism. 'repo' is the current localrepo, 'parents' is a
1539 mechanism. 'repo' is the current localrepo, 'parents' is a
1540 sequence of two parent revisions identifiers (pass None for every
1540 sequence of two parent revisions identifiers (pass None for every
1541 missing parent), 'text' is the commit message and 'files' lists
1541 missing parent), 'text' is the commit message and 'files' lists
1542 names of files touched by the revision (normalized and relative to
1542 names of files touched by the revision (normalized and relative to
1543 repository root).
1543 repository root).
1544
1544
1545 filectxfn(repo, memctx, path) is a callable receiving the
1545 filectxfn(repo, memctx, path) is a callable receiving the
1546 repository, the current memctx object and the normalized path of
1546 repository, the current memctx object and the normalized path of
1547 requested file, relative to repository root. It is fired by the
1547 requested file, relative to repository root. It is fired by the
1548 commit function for every file in 'files', but calls order is
1548 commit function for every file in 'files', but calls order is
1549 undefined. If the file is available in the revision being
1549 undefined. If the file is available in the revision being
1550 committed (updated or added), filectxfn returns a memfilectx
1550 committed (updated or added), filectxfn returns a memfilectx
1551 object. If the file was removed, filectxfn raises an
1551 object. If the file was removed, filectxfn raises an
1552 IOError. Moved files are represented by marking the source file
1552 IOError. Moved files are represented by marking the source file
1553 removed and the new file added with copy information (see
1553 removed and the new file added with copy information (see
1554 memfilectx).
1554 memfilectx).
1555
1555
1556 user receives the committer name and defaults to current
1556 user receives the committer name and defaults to current
1557 repository username, date is the commit date in any format
1557 repository username, date is the commit date in any format
1558 supported by util.parsedate() and defaults to current date, extra
1558 supported by util.parsedate() and defaults to current date, extra
1559 is a dictionary of metadata or is left empty.
1559 is a dictionary of metadata or is left empty.
1560 """
1560 """
1561 def __init__(self, repo, parents, text, files, filectxfn, user=None,
1561 def __init__(self, repo, parents, text, files, filectxfn, user=None,
1562 date=None, extra=None, editor=False):
1562 date=None, extra=None, editor=False):
1563 super(memctx, self).__init__(repo, text, user, date, extra)
1563 super(memctx, self).__init__(repo, text, user, date, extra)
1564 self._rev = None
1564 self._rev = None
1565 self._node = None
1565 self._node = None
1566 parents = [(p or nullid) for p in parents]
1566 parents = [(p or nullid) for p in parents]
1567 p1, p2 = parents
1567 p1, p2 = parents
1568 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
1568 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
1569 files = sorted(set(files))
1569 files = sorted(set(files))
1570 self._status = [files, [], [], [], []]
1570 self._status = [files, [], [], [], []]
1571 self._filectxfn = filectxfn
1571 self._filectxfn = filectxfn
1572
1572
1573 self._extra = extra and extra.copy() or {}
1573 self._extra = extra and extra.copy() or {}
1574 if self._extra.get('branch', '') == '':
1574 if self._extra.get('branch', '') == '':
1575 self._extra['branch'] = 'default'
1575 self._extra['branch'] = 'default'
1576
1576
1577 if editor:
1577 if editor:
1578 self._text = editor(self._repo, self, [])
1578 self._text = editor(self._repo, self, [])
1579 self._repo.savecommitmessage(self._text)
1579 self._repo.savecommitmessage(self._text)
1580
1580
1581 def filectx(self, path, filelog=None):
1581 def filectx(self, path, filelog=None):
1582 """get a file context from the working directory"""
1582 """get a file context from the working directory"""
1583 return self._filectxfn(self._repo, self, path)
1583 return self._filectxfn(self._repo, self, path)
1584
1584
1585 def commit(self):
1585 def commit(self):
1586 """commit context to the repo"""
1586 """commit context to the repo"""
1587 return self._repo.commitctx(self)
1587 return self._repo.commitctx(self)
1588
1588
1589 class memfilectx(committablefilectx):
1589 class memfilectx(committablefilectx):
1590 """memfilectx represents an in-memory file to commit.
1590 """memfilectx represents an in-memory file to commit.
1591
1591
1592 See memctx for more details.
1592 See memctx and commitablefilectx for more details.
1593 """
1593 """
1594 def __init__(self, path, data, islink=False, isexec=False, copied=None):
1594 def __init__(self, repo, path, data, islink=False,
1595 isexec=False, copied=None, memctx=None):
1595 """
1596 """
1596 path is the normalized file path relative to repository root.
1597 path is the normalized file path relative to repository root.
1597 data is the file content as a string.
1598 data is the file content as a string.
1598 islink is True if the file is a symbolic link.
1599 islink is True if the file is a symbolic link.
1599 isexec is True if the file is executable.
1600 isexec is True if the file is executable.
1600 copied is the source file path if current file was copied in the
1601 copied is the source file path if current file was copied in the
1601 revision being committed, or None."""
1602 revision being committed, or None."""
1602 self._path = path
1603 super(memfilectx, self).__init__(repo, path, None, memctx)
1603 self._data = data
1604 self._data = data
1604 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1605 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1605 self._copied = None
1606 self._copied = None
1606 if copied:
1607 if copied:
1607 self._copied = (copied, nullid)
1608 self._copied = (copied, nullid)
1608
1609
1609 def __nonzero__(self):
1610 def __nonzero__(self):
1610 return True
1611 return True
1611 def __str__(self):
1612 def __str__(self):
1612 return "%s@%s" % (self.path(), self._changectx)
1613 return "%s@%s" % (self.path(), self._changectx)
1613 def path(self):
1614 def path(self):
1614 return self._path
1615 return self._path
1615 def data(self):
1616 def data(self):
1616 return self._data
1617 return self._data
1617 def flags(self):
1618 def flags(self):
1618 return self._flags
1619 return self._flags
1619 def isexec(self):
1620 def isexec(self):
1620 return 'x' in self._flags
1621 return 'x' in self._flags
1621 def islink(self):
1622 def islink(self):
1622 return 'l' in self._flags
1623 return 'l' in self._flags
1623 def renamed(self):
1624 def renamed(self):
1624 return self._copied
1625 return self._copied
@@ -1,32 +1,32
1 import os
1 import os
2 from mercurial import hg, ui, context, encoding
2 from mercurial import hg, ui, context, encoding
3
3
4 u = ui.ui()
4 u = ui.ui()
5
5
6 repo = hg.repository(u, 'test1', create=1)
6 repo = hg.repository(u, 'test1', create=1)
7 os.chdir('test1')
7 os.chdir('test1')
8
8
9 # create 'foo' with fixed time stamp
9 # create 'foo' with fixed time stamp
10 f = open('foo', 'w')
10 f = open('foo', 'w')
11 f.write('foo\n')
11 f.write('foo\n')
12 f.close()
12 f.close()
13 os.utime('foo', (1000, 1000))
13 os.utime('foo', (1000, 1000))
14
14
15 # add+commit 'foo'
15 # add+commit 'foo'
16 repo[None].add(['foo'])
16 repo[None].add(['foo'])
17 repo.commit(text='commit1', date="0 0")
17 repo.commit(text='commit1', date="0 0")
18
18
19 print "workingfilectx.date =", repo[None]['foo'].date()
19 print "workingfilectx.date =", repo[None]['foo'].date()
20
20
21 # test memctx with non-ASCII commit message
21 # test memctx with non-ASCII commit message
22
22
23 def filectxfn(repo, memctx, path):
23 def filectxfn(repo, memctx, path):
24 return context.memfilectx("foo", "")
24 return context.memfilectx(repo, "foo", "")
25
25
26 ctx = context.memctx(repo, ['tip', None],
26 ctx = context.memctx(repo, ['tip', None],
27 encoding.tolocal("Gr\xc3\xbcezi!"),
27 encoding.tolocal("Gr\xc3\xbcezi!"),
28 ["foo"], filectxfn)
28 ["foo"], filectxfn)
29 ctx.commit()
29 ctx.commit()
30 for enc in "ASCII", "Latin-1", "UTF-8":
30 for enc in "ASCII", "Latin-1", "UTF-8":
31 encoding.encoding = enc
31 encoding.encoding = enc
32 print "%-8s: %s" % (enc, repo["tip"].description())
32 print "%-8s: %s" % (enc, repo["tip"].description())
General Comments 0
You need to be logged in to leave comments. Login now