##// END OF EJS Templates
memfilectx: make changectx argument mandatory in constructor (API)...
Martin von Zweigbergk -
r35401:8a0cac20 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,516 +1,516 b''
1 # synthrepo.py - repo synthesis
1 # synthrepo.py - repo synthesis
2 #
2 #
3 # Copyright 2012 Facebook
3 # Copyright 2012 Facebook
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''synthesize structurally interesting change history
8 '''synthesize structurally interesting change history
9
9
10 This extension is useful for creating a repository with properties
10 This extension is useful for creating a repository with properties
11 that are statistically similar to an existing repository. During
11 that are statistically similar to an existing repository. During
12 analysis, a simple probability table is constructed from the history
12 analysis, a simple probability table is constructed from the history
13 of an existing repository. During synthesis, these properties are
13 of an existing repository. During synthesis, these properties are
14 reconstructed.
14 reconstructed.
15
15
16 Properties that are analyzed and synthesized include the following:
16 Properties that are analyzed and synthesized include the following:
17
17
18 - Lines added or removed when an existing file is modified
18 - Lines added or removed when an existing file is modified
19 - Number and sizes of files added
19 - Number and sizes of files added
20 - Number of files removed
20 - Number of files removed
21 - Line lengths
21 - Line lengths
22 - Topological distance to parent changeset(s)
22 - Topological distance to parent changeset(s)
23 - Probability of a commit being a merge
23 - Probability of a commit being a merge
24 - Probability of a newly added file being added to a new directory
24 - Probability of a newly added file being added to a new directory
25 - Interarrival time, and time zone, of commits
25 - Interarrival time, and time zone, of commits
26 - Number of files in each directory
26 - Number of files in each directory
27
27
28 A few obvious properties that are not currently handled realistically:
28 A few obvious properties that are not currently handled realistically:
29
29
30 - Merges are treated as regular commits with two parents, which is not
30 - Merges are treated as regular commits with two parents, which is not
31 realistic
31 realistic
32 - Modifications are not treated as operations on hunks of lines, but
32 - Modifications are not treated as operations on hunks of lines, but
33 as insertions and deletions of randomly chosen single lines
33 as insertions and deletions of randomly chosen single lines
34 - Committer ID (always random)
34 - Committer ID (always random)
35 - Executability of files
35 - Executability of files
36 - Symlinks and binary files are ignored
36 - Symlinks and binary files are ignored
37 '''
37 '''
38
38
39 from __future__ import absolute_import
39 from __future__ import absolute_import
40 import bisect
40 import bisect
41 import collections
41 import collections
42 import itertools
42 import itertools
43 import json
43 import json
44 import os
44 import os
45 import random
45 import random
46 import sys
46 import sys
47 import time
47 import time
48
48
49 from mercurial.i18n import _
49 from mercurial.i18n import _
50 from mercurial.node import (
50 from mercurial.node import (
51 nullid,
51 nullid,
52 nullrev,
52 nullrev,
53 short,
53 short,
54 )
54 )
55 from mercurial import (
55 from mercurial import (
56 context,
56 context,
57 error,
57 error,
58 hg,
58 hg,
59 patch,
59 patch,
60 registrar,
60 registrar,
61 scmutil,
61 scmutil,
62 util,
62 util,
63 )
63 )
64
64
65 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
65 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
66 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
66 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
67 # be specifying the version(s) of Mercurial they are tested with, or
67 # be specifying the version(s) of Mercurial they are tested with, or
68 # leave the attribute unspecified.
68 # leave the attribute unspecified.
69 testedwith = 'ships-with-hg-core'
69 testedwith = 'ships-with-hg-core'
70
70
71 cmdtable = {}
71 cmdtable = {}
72 command = registrar.command(cmdtable)
72 command = registrar.command(cmdtable)
73
73
74 newfile = {'new fi', 'rename', 'copy f', 'copy t'}
74 newfile = {'new fi', 'rename', 'copy f', 'copy t'}
75
75
76 def zerodict():
76 def zerodict():
77 return collections.defaultdict(lambda: 0)
77 return collections.defaultdict(lambda: 0)
78
78
79 def roundto(x, k):
79 def roundto(x, k):
80 if x > k * 2:
80 if x > k * 2:
81 return int(round(x / float(k)) * k)
81 return int(round(x / float(k)) * k)
82 return int(round(x))
82 return int(round(x))
83
83
84 def parsegitdiff(lines):
84 def parsegitdiff(lines):
85 filename, mar, lineadd, lineremove = None, None, zerodict(), 0
85 filename, mar, lineadd, lineremove = None, None, zerodict(), 0
86 binary = False
86 binary = False
87 for line in lines:
87 for line in lines:
88 start = line[:6]
88 start = line[:6]
89 if start == 'diff -':
89 if start == 'diff -':
90 if filename:
90 if filename:
91 yield filename, mar, lineadd, lineremove, binary
91 yield filename, mar, lineadd, lineremove, binary
92 mar, lineadd, lineremove, binary = 'm', zerodict(), 0, False
92 mar, lineadd, lineremove, binary = 'm', zerodict(), 0, False
93 filename = patch.gitre.match(line).group(1)
93 filename = patch.gitre.match(line).group(1)
94 elif start in newfile:
94 elif start in newfile:
95 mar = 'a'
95 mar = 'a'
96 elif start == 'GIT bi':
96 elif start == 'GIT bi':
97 binary = True
97 binary = True
98 elif start == 'delete':
98 elif start == 'delete':
99 mar = 'r'
99 mar = 'r'
100 elif start:
100 elif start:
101 s = start[0]
101 s = start[0]
102 if s == '-' and not line.startswith('--- '):
102 if s == '-' and not line.startswith('--- '):
103 lineremove += 1
103 lineremove += 1
104 elif s == '+' and not line.startswith('+++ '):
104 elif s == '+' and not line.startswith('+++ '):
105 lineadd[roundto(len(line) - 1, 5)] += 1
105 lineadd[roundto(len(line) - 1, 5)] += 1
106 if filename:
106 if filename:
107 yield filename, mar, lineadd, lineremove, binary
107 yield filename, mar, lineadd, lineremove, binary
108
108
109 @command('analyze',
109 @command('analyze',
110 [('o', 'output', '', _('write output to given file'), _('FILE')),
110 [('o', 'output', '', _('write output to given file'), _('FILE')),
111 ('r', 'rev', [], _('analyze specified revisions'), _('REV'))],
111 ('r', 'rev', [], _('analyze specified revisions'), _('REV'))],
112 _('hg analyze'), optionalrepo=True)
112 _('hg analyze'), optionalrepo=True)
113 def analyze(ui, repo, *revs, **opts):
113 def analyze(ui, repo, *revs, **opts):
114 '''create a simple model of a repository to use for later synthesis
114 '''create a simple model of a repository to use for later synthesis
115
115
116 This command examines every changeset in the given range (or all
116 This command examines every changeset in the given range (or all
117 of history if none are specified) and creates a simple statistical
117 of history if none are specified) and creates a simple statistical
118 model of the history of the repository. It also measures the directory
118 model of the history of the repository. It also measures the directory
119 structure of the repository as checked out.
119 structure of the repository as checked out.
120
120
121 The model is written out to a JSON file, and can be used by
121 The model is written out to a JSON file, and can be used by
122 :hg:`synthesize` to create or augment a repository with synthetic
122 :hg:`synthesize` to create or augment a repository with synthetic
123 commits that have a structure that is statistically similar to the
123 commits that have a structure that is statistically similar to the
124 analyzed repository.
124 analyzed repository.
125 '''
125 '''
126 root = repo.root
126 root = repo.root
127 if not root.endswith(os.path.sep):
127 if not root.endswith(os.path.sep):
128 root += os.path.sep
128 root += os.path.sep
129
129
130 revs = list(revs)
130 revs = list(revs)
131 revs.extend(opts['rev'])
131 revs.extend(opts['rev'])
132 if not revs:
132 if not revs:
133 revs = [':']
133 revs = [':']
134
134
135 output = opts['output']
135 output = opts['output']
136 if not output:
136 if not output:
137 output = os.path.basename(root) + '.json'
137 output = os.path.basename(root) + '.json'
138
138
139 if output == '-':
139 if output == '-':
140 fp = sys.stdout
140 fp = sys.stdout
141 else:
141 else:
142 fp = open(output, 'w')
142 fp = open(output, 'w')
143
143
144 # Always obtain file counts of each directory in the given root directory.
144 # Always obtain file counts of each directory in the given root directory.
145 def onerror(e):
145 def onerror(e):
146 ui.warn(_('error walking directory structure: %s\n') % e)
146 ui.warn(_('error walking directory structure: %s\n') % e)
147
147
148 dirs = {}
148 dirs = {}
149 rootprefixlen = len(root)
149 rootprefixlen = len(root)
150 for dirpath, dirnames, filenames in os.walk(root, onerror=onerror):
150 for dirpath, dirnames, filenames in os.walk(root, onerror=onerror):
151 dirpathfromroot = dirpath[rootprefixlen:]
151 dirpathfromroot = dirpath[rootprefixlen:]
152 dirs[dirpathfromroot] = len(filenames)
152 dirs[dirpathfromroot] = len(filenames)
153 if '.hg' in dirnames:
153 if '.hg' in dirnames:
154 dirnames.remove('.hg')
154 dirnames.remove('.hg')
155
155
156 lineschanged = zerodict()
156 lineschanged = zerodict()
157 children = zerodict()
157 children = zerodict()
158 p1distance = zerodict()
158 p1distance = zerodict()
159 p2distance = zerodict()
159 p2distance = zerodict()
160 linesinfilesadded = zerodict()
160 linesinfilesadded = zerodict()
161 fileschanged = zerodict()
161 fileschanged = zerodict()
162 filesadded = zerodict()
162 filesadded = zerodict()
163 filesremoved = zerodict()
163 filesremoved = zerodict()
164 linelengths = zerodict()
164 linelengths = zerodict()
165 interarrival = zerodict()
165 interarrival = zerodict()
166 parents = zerodict()
166 parents = zerodict()
167 dirsadded = zerodict()
167 dirsadded = zerodict()
168 tzoffset = zerodict()
168 tzoffset = zerodict()
169
169
170 # If a mercurial repo is available, also model the commit history.
170 # If a mercurial repo is available, also model the commit history.
171 if repo:
171 if repo:
172 revs = scmutil.revrange(repo, revs)
172 revs = scmutil.revrange(repo, revs)
173 revs.sort()
173 revs.sort()
174
174
175 progress = ui.progress
175 progress = ui.progress
176 _analyzing = _('analyzing')
176 _analyzing = _('analyzing')
177 _changesets = _('changesets')
177 _changesets = _('changesets')
178 _total = len(revs)
178 _total = len(revs)
179
179
180 for i, rev in enumerate(revs):
180 for i, rev in enumerate(revs):
181 progress(_analyzing, i, unit=_changesets, total=_total)
181 progress(_analyzing, i, unit=_changesets, total=_total)
182 ctx = repo[rev]
182 ctx = repo[rev]
183 pl = ctx.parents()
183 pl = ctx.parents()
184 pctx = pl[0]
184 pctx = pl[0]
185 prev = pctx.rev()
185 prev = pctx.rev()
186 children[prev] += 1
186 children[prev] += 1
187 p1distance[rev - prev] += 1
187 p1distance[rev - prev] += 1
188 parents[len(pl)] += 1
188 parents[len(pl)] += 1
189 tzoffset[ctx.date()[1]] += 1
189 tzoffset[ctx.date()[1]] += 1
190 if len(pl) > 1:
190 if len(pl) > 1:
191 p2distance[rev - pl[1].rev()] += 1
191 p2distance[rev - pl[1].rev()] += 1
192 if prev == rev - 1:
192 if prev == rev - 1:
193 lastctx = pctx
193 lastctx = pctx
194 else:
194 else:
195 lastctx = repo[rev - 1]
195 lastctx = repo[rev - 1]
196 if lastctx.rev() != nullrev:
196 if lastctx.rev() != nullrev:
197 timedelta = ctx.date()[0] - lastctx.date()[0]
197 timedelta = ctx.date()[0] - lastctx.date()[0]
198 interarrival[roundto(timedelta, 300)] += 1
198 interarrival[roundto(timedelta, 300)] += 1
199 diff = sum((d.splitlines() for d in ctx.diff(pctx, git=True)), [])
199 diff = sum((d.splitlines() for d in ctx.diff(pctx, git=True)), [])
200 fileadds, diradds, fileremoves, filechanges = 0, 0, 0, 0
200 fileadds, diradds, fileremoves, filechanges = 0, 0, 0, 0
201 for filename, mar, lineadd, lineremove, isbin in parsegitdiff(diff):
201 for filename, mar, lineadd, lineremove, isbin in parsegitdiff(diff):
202 if isbin:
202 if isbin:
203 continue
203 continue
204 added = sum(lineadd.itervalues(), 0)
204 added = sum(lineadd.itervalues(), 0)
205 if mar == 'm':
205 if mar == 'm':
206 if added and lineremove:
206 if added and lineremove:
207 lineschanged[roundto(added, 5),
207 lineschanged[roundto(added, 5),
208 roundto(lineremove, 5)] += 1
208 roundto(lineremove, 5)] += 1
209 filechanges += 1
209 filechanges += 1
210 elif mar == 'a':
210 elif mar == 'a':
211 fileadds += 1
211 fileadds += 1
212 if '/' in filename:
212 if '/' in filename:
213 filedir = filename.rsplit('/', 1)[0]
213 filedir = filename.rsplit('/', 1)[0]
214 if filedir not in pctx.dirs():
214 if filedir not in pctx.dirs():
215 diradds += 1
215 diradds += 1
216 linesinfilesadded[roundto(added, 5)] += 1
216 linesinfilesadded[roundto(added, 5)] += 1
217 elif mar == 'r':
217 elif mar == 'r':
218 fileremoves += 1
218 fileremoves += 1
219 for length, count in lineadd.iteritems():
219 for length, count in lineadd.iteritems():
220 linelengths[length] += count
220 linelengths[length] += count
221 fileschanged[filechanges] += 1
221 fileschanged[filechanges] += 1
222 filesadded[fileadds] += 1
222 filesadded[fileadds] += 1
223 dirsadded[diradds] += 1
223 dirsadded[diradds] += 1
224 filesremoved[fileremoves] += 1
224 filesremoved[fileremoves] += 1
225
225
226 invchildren = zerodict()
226 invchildren = zerodict()
227
227
228 for rev, count in children.iteritems():
228 for rev, count in children.iteritems():
229 invchildren[count] += 1
229 invchildren[count] += 1
230
230
231 if output != '-':
231 if output != '-':
232 ui.status(_('writing output to %s\n') % output)
232 ui.status(_('writing output to %s\n') % output)
233
233
234 def pronk(d):
234 def pronk(d):
235 return sorted(d.iteritems(), key=lambda x: x[1], reverse=True)
235 return sorted(d.iteritems(), key=lambda x: x[1], reverse=True)
236
236
237 json.dump({'revs': len(revs),
237 json.dump({'revs': len(revs),
238 'initdirs': pronk(dirs),
238 'initdirs': pronk(dirs),
239 'lineschanged': pronk(lineschanged),
239 'lineschanged': pronk(lineschanged),
240 'children': pronk(invchildren),
240 'children': pronk(invchildren),
241 'fileschanged': pronk(fileschanged),
241 'fileschanged': pronk(fileschanged),
242 'filesadded': pronk(filesadded),
242 'filesadded': pronk(filesadded),
243 'linesinfilesadded': pronk(linesinfilesadded),
243 'linesinfilesadded': pronk(linesinfilesadded),
244 'dirsadded': pronk(dirsadded),
244 'dirsadded': pronk(dirsadded),
245 'filesremoved': pronk(filesremoved),
245 'filesremoved': pronk(filesremoved),
246 'linelengths': pronk(linelengths),
246 'linelengths': pronk(linelengths),
247 'parents': pronk(parents),
247 'parents': pronk(parents),
248 'p1distance': pronk(p1distance),
248 'p1distance': pronk(p1distance),
249 'p2distance': pronk(p2distance),
249 'p2distance': pronk(p2distance),
250 'interarrival': pronk(interarrival),
250 'interarrival': pronk(interarrival),
251 'tzoffset': pronk(tzoffset),
251 'tzoffset': pronk(tzoffset),
252 },
252 },
253 fp)
253 fp)
254 fp.close()
254 fp.close()
255
255
256 @command('synthesize',
256 @command('synthesize',
257 [('c', 'count', 0, _('create given number of commits'), _('COUNT')),
257 [('c', 'count', 0, _('create given number of commits'), _('COUNT')),
258 ('', 'dict', '', _('path to a dictionary of words'), _('FILE')),
258 ('', 'dict', '', _('path to a dictionary of words'), _('FILE')),
259 ('', 'initfiles', 0, _('initial file count to create'), _('COUNT'))],
259 ('', 'initfiles', 0, _('initial file count to create'), _('COUNT'))],
260 _('hg synthesize [OPTION].. DESCFILE'))
260 _('hg synthesize [OPTION].. DESCFILE'))
261 def synthesize(ui, repo, descpath, **opts):
261 def synthesize(ui, repo, descpath, **opts):
262 '''synthesize commits based on a model of an existing repository
262 '''synthesize commits based on a model of an existing repository
263
263
264 The model must have been generated by :hg:`analyze`. Commits will
264 The model must have been generated by :hg:`analyze`. Commits will
265 be generated randomly according to the probabilities described in
265 be generated randomly according to the probabilities described in
266 the model. If --initfiles is set, the repository will be seeded with
266 the model. If --initfiles is set, the repository will be seeded with
267 the given number files following the modeled repository's directory
267 the given number files following the modeled repository's directory
268 structure.
268 structure.
269
269
270 When synthesizing new content, commit descriptions, and user
270 When synthesizing new content, commit descriptions, and user
271 names, words will be chosen randomly from a dictionary that is
271 names, words will be chosen randomly from a dictionary that is
272 presumed to contain one word per line. Use --dict to specify the
272 presumed to contain one word per line. Use --dict to specify the
273 path to an alternate dictionary to use.
273 path to an alternate dictionary to use.
274 '''
274 '''
275 try:
275 try:
276 fp = hg.openpath(ui, descpath)
276 fp = hg.openpath(ui, descpath)
277 except Exception as err:
277 except Exception as err:
278 raise error.Abort('%s: %s' % (descpath, err[0].strerror))
278 raise error.Abort('%s: %s' % (descpath, err[0].strerror))
279 desc = json.load(fp)
279 desc = json.load(fp)
280 fp.close()
280 fp.close()
281
281
282 def cdf(l):
282 def cdf(l):
283 if not l:
283 if not l:
284 return [], []
284 return [], []
285 vals, probs = zip(*sorted(l, key=lambda x: x[1], reverse=True))
285 vals, probs = zip(*sorted(l, key=lambda x: x[1], reverse=True))
286 t = float(sum(probs, 0))
286 t = float(sum(probs, 0))
287 s, cdfs = 0, []
287 s, cdfs = 0, []
288 for v in probs:
288 for v in probs:
289 s += v
289 s += v
290 cdfs.append(s / t)
290 cdfs.append(s / t)
291 return vals, cdfs
291 return vals, cdfs
292
292
293 lineschanged = cdf(desc['lineschanged'])
293 lineschanged = cdf(desc['lineschanged'])
294 fileschanged = cdf(desc['fileschanged'])
294 fileschanged = cdf(desc['fileschanged'])
295 filesadded = cdf(desc['filesadded'])
295 filesadded = cdf(desc['filesadded'])
296 dirsadded = cdf(desc['dirsadded'])
296 dirsadded = cdf(desc['dirsadded'])
297 filesremoved = cdf(desc['filesremoved'])
297 filesremoved = cdf(desc['filesremoved'])
298 linelengths = cdf(desc['linelengths'])
298 linelengths = cdf(desc['linelengths'])
299 parents = cdf(desc['parents'])
299 parents = cdf(desc['parents'])
300 p1distance = cdf(desc['p1distance'])
300 p1distance = cdf(desc['p1distance'])
301 p2distance = cdf(desc['p2distance'])
301 p2distance = cdf(desc['p2distance'])
302 interarrival = cdf(desc['interarrival'])
302 interarrival = cdf(desc['interarrival'])
303 linesinfilesadded = cdf(desc['linesinfilesadded'])
303 linesinfilesadded = cdf(desc['linesinfilesadded'])
304 tzoffset = cdf(desc['tzoffset'])
304 tzoffset = cdf(desc['tzoffset'])
305
305
306 dictfile = opts.get('dict') or '/usr/share/dict/words'
306 dictfile = opts.get('dict') or '/usr/share/dict/words'
307 try:
307 try:
308 fp = open(dictfile, 'rU')
308 fp = open(dictfile, 'rU')
309 except IOError as err:
309 except IOError as err:
310 raise error.Abort('%s: %s' % (dictfile, err.strerror))
310 raise error.Abort('%s: %s' % (dictfile, err.strerror))
311 words = fp.read().splitlines()
311 words = fp.read().splitlines()
312 fp.close()
312 fp.close()
313
313
314 initdirs = {}
314 initdirs = {}
315 if desc['initdirs']:
315 if desc['initdirs']:
316 for k, v in desc['initdirs']:
316 for k, v in desc['initdirs']:
317 initdirs[k.encode('utf-8').replace('.hg', '_hg')] = v
317 initdirs[k.encode('utf-8').replace('.hg', '_hg')] = v
318 initdirs = renamedirs(initdirs, words)
318 initdirs = renamedirs(initdirs, words)
319 initdirscdf = cdf(initdirs)
319 initdirscdf = cdf(initdirs)
320
320
321 def pick(cdf):
321 def pick(cdf):
322 return cdf[0][bisect.bisect_left(cdf[1], random.random())]
322 return cdf[0][bisect.bisect_left(cdf[1], random.random())]
323
323
324 def pickpath():
324 def pickpath():
325 return os.path.join(pick(initdirscdf), random.choice(words))
325 return os.path.join(pick(initdirscdf), random.choice(words))
326
326
327 def makeline(minimum=0):
327 def makeline(minimum=0):
328 total = max(minimum, pick(linelengths))
328 total = max(minimum, pick(linelengths))
329 c, l = 0, []
329 c, l = 0, []
330 while c < total:
330 while c < total:
331 w = random.choice(words)
331 w = random.choice(words)
332 c += len(w) + 1
332 c += len(w) + 1
333 l.append(w)
333 l.append(w)
334 return ' '.join(l)
334 return ' '.join(l)
335
335
336 wlock = repo.wlock()
336 wlock = repo.wlock()
337 lock = repo.lock()
337 lock = repo.lock()
338
338
339 nevertouch = {'.hgsub', '.hgignore', '.hgtags'}
339 nevertouch = {'.hgsub', '.hgignore', '.hgtags'}
340
340
341 progress = ui.progress
341 progress = ui.progress
342 _synthesizing = _('synthesizing')
342 _synthesizing = _('synthesizing')
343 _files = _('initial files')
343 _files = _('initial files')
344 _changesets = _('changesets')
344 _changesets = _('changesets')
345
345
346 # Synthesize a single initial revision adding files to the repo according
346 # Synthesize a single initial revision adding files to the repo according
347 # to the modeled directory structure.
347 # to the modeled directory structure.
348 initcount = int(opts['initfiles'])
348 initcount = int(opts['initfiles'])
349 if initcount and initdirs:
349 if initcount and initdirs:
350 pctx = repo[None].parents()[0]
350 pctx = repo[None].parents()[0]
351 dirs = set(pctx.dirs())
351 dirs = set(pctx.dirs())
352 files = {}
352 files = {}
353
353
354 def validpath(path):
354 def validpath(path):
355 # Don't pick filenames which are already directory names.
355 # Don't pick filenames which are already directory names.
356 if path in dirs:
356 if path in dirs:
357 return False
357 return False
358 # Don't pick directories which were used as file names.
358 # Don't pick directories which were used as file names.
359 while path:
359 while path:
360 if path in files:
360 if path in files:
361 return False
361 return False
362 path = os.path.dirname(path)
362 path = os.path.dirname(path)
363 return True
363 return True
364
364
365 for i in xrange(0, initcount):
365 for i in xrange(0, initcount):
366 ui.progress(_synthesizing, i, unit=_files, total=initcount)
366 ui.progress(_synthesizing, i, unit=_files, total=initcount)
367
367
368 path = pickpath()
368 path = pickpath()
369 while not validpath(path):
369 while not validpath(path):
370 path = pickpath()
370 path = pickpath()
371 data = '%s contents\n' % path
371 data = '%s contents\n' % path
372 files[path] = data
372 files[path] = data
373 dir = os.path.dirname(path)
373 dir = os.path.dirname(path)
374 while dir and dir not in dirs:
374 while dir and dir not in dirs:
375 dirs.add(dir)
375 dirs.add(dir)
376 dir = os.path.dirname(dir)
376 dir = os.path.dirname(dir)
377
377
378 def filectxfn(repo, memctx, path):
378 def filectxfn(repo, memctx, path):
379 return context.memfilectx(repo, path, files[path])
379 return context.memfilectx(repo, memctx, path, files[path])
380
380
381 ui.progress(_synthesizing, None)
381 ui.progress(_synthesizing, None)
382 message = 'synthesized wide repo with %d files' % (len(files),)
382 message = 'synthesized wide repo with %d files' % (len(files),)
383 mc = context.memctx(repo, [pctx.node(), nullid], message,
383 mc = context.memctx(repo, [pctx.node(), nullid], message,
384 files.iterkeys(), filectxfn, ui.username(),
384 files.iterkeys(), filectxfn, ui.username(),
385 '%d %d' % util.makedate())
385 '%d %d' % util.makedate())
386 initnode = mc.commit()
386 initnode = mc.commit()
387 if ui.debugflag:
387 if ui.debugflag:
388 hexfn = hex
388 hexfn = hex
389 else:
389 else:
390 hexfn = short
390 hexfn = short
391 ui.status(_('added commit %s with %d files\n')
391 ui.status(_('added commit %s with %d files\n')
392 % (hexfn(initnode), len(files)))
392 % (hexfn(initnode), len(files)))
393
393
394 # Synthesize incremental revisions to the repository, adding repo depth.
394 # Synthesize incremental revisions to the repository, adding repo depth.
395 count = int(opts['count'])
395 count = int(opts['count'])
396 heads = set(map(repo.changelog.rev, repo.heads()))
396 heads = set(map(repo.changelog.rev, repo.heads()))
397 for i in xrange(count):
397 for i in xrange(count):
398 progress(_synthesizing, i, unit=_changesets, total=count)
398 progress(_synthesizing, i, unit=_changesets, total=count)
399
399
400 node = repo.changelog.node
400 node = repo.changelog.node
401 revs = len(repo)
401 revs = len(repo)
402
402
403 def pickhead(heads, distance):
403 def pickhead(heads, distance):
404 if heads:
404 if heads:
405 lheads = sorted(heads)
405 lheads = sorted(heads)
406 rev = revs - min(pick(distance), revs)
406 rev = revs - min(pick(distance), revs)
407 if rev < lheads[-1]:
407 if rev < lheads[-1]:
408 rev = lheads[bisect.bisect_left(lheads, rev)]
408 rev = lheads[bisect.bisect_left(lheads, rev)]
409 else:
409 else:
410 rev = lheads[-1]
410 rev = lheads[-1]
411 return rev, node(rev)
411 return rev, node(rev)
412 return nullrev, nullid
412 return nullrev, nullid
413
413
414 r1 = revs - min(pick(p1distance), revs)
414 r1 = revs - min(pick(p1distance), revs)
415 p1 = node(r1)
415 p1 = node(r1)
416
416
417 # the number of heads will grow without bound if we use a pure
417 # the number of heads will grow without bound if we use a pure
418 # model, so artificially constrain their proliferation
418 # model, so artificially constrain their proliferation
419 toomanyheads = len(heads) > random.randint(1, 20)
419 toomanyheads = len(heads) > random.randint(1, 20)
420 if p2distance[0] and (pick(parents) == 2 or toomanyheads):
420 if p2distance[0] and (pick(parents) == 2 or toomanyheads):
421 r2, p2 = pickhead(heads.difference([r1]), p2distance)
421 r2, p2 = pickhead(heads.difference([r1]), p2distance)
422 else:
422 else:
423 r2, p2 = nullrev, nullid
423 r2, p2 = nullrev, nullid
424
424
425 pl = [p1, p2]
425 pl = [p1, p2]
426 pctx = repo[r1]
426 pctx = repo[r1]
427 mf = pctx.manifest()
427 mf = pctx.manifest()
428 mfk = mf.keys()
428 mfk = mf.keys()
429 changes = {}
429 changes = {}
430 if mfk:
430 if mfk:
431 for __ in xrange(pick(fileschanged)):
431 for __ in xrange(pick(fileschanged)):
432 for __ in xrange(10):
432 for __ in xrange(10):
433 fctx = pctx.filectx(random.choice(mfk))
433 fctx = pctx.filectx(random.choice(mfk))
434 path = fctx.path()
434 path = fctx.path()
435 if not (path in nevertouch or fctx.isbinary() or
435 if not (path in nevertouch or fctx.isbinary() or
436 'l' in fctx.flags()):
436 'l' in fctx.flags()):
437 break
437 break
438 lines = fctx.data().splitlines()
438 lines = fctx.data().splitlines()
439 add, remove = pick(lineschanged)
439 add, remove = pick(lineschanged)
440 for __ in xrange(remove):
440 for __ in xrange(remove):
441 if not lines:
441 if not lines:
442 break
442 break
443 del lines[random.randrange(0, len(lines))]
443 del lines[random.randrange(0, len(lines))]
444 for __ in xrange(add):
444 for __ in xrange(add):
445 lines.insert(random.randint(0, len(lines)), makeline())
445 lines.insert(random.randint(0, len(lines)), makeline())
446 path = fctx.path()
446 path = fctx.path()
447 changes[path] = '\n'.join(lines) + '\n'
447 changes[path] = '\n'.join(lines) + '\n'
448 for __ in xrange(pick(filesremoved)):
448 for __ in xrange(pick(filesremoved)):
449 path = random.choice(mfk)
449 path = random.choice(mfk)
450 for __ in xrange(10):
450 for __ in xrange(10):
451 path = random.choice(mfk)
451 path = random.choice(mfk)
452 if path not in changes:
452 if path not in changes:
453 break
453 break
454 if filesadded:
454 if filesadded:
455 dirs = list(pctx.dirs())
455 dirs = list(pctx.dirs())
456 dirs.insert(0, '')
456 dirs.insert(0, '')
457 for __ in xrange(pick(filesadded)):
457 for __ in xrange(pick(filesadded)):
458 pathstr = ''
458 pathstr = ''
459 while pathstr in dirs:
459 while pathstr in dirs:
460 path = [random.choice(dirs)]
460 path = [random.choice(dirs)]
461 if pick(dirsadded):
461 if pick(dirsadded):
462 path.append(random.choice(words))
462 path.append(random.choice(words))
463 path.append(random.choice(words))
463 path.append(random.choice(words))
464 pathstr = '/'.join(filter(None, path))
464 pathstr = '/'.join(filter(None, path))
465 data = '\n'.join(makeline()
465 data = '\n'.join(makeline()
466 for __ in xrange(pick(linesinfilesadded))) + '\n'
466 for __ in xrange(pick(linesinfilesadded))) + '\n'
467 changes[pathstr] = data
467 changes[pathstr] = data
468 def filectxfn(repo, memctx, path):
468 def filectxfn(repo, memctx, path):
469 if path not in changes:
469 if path not in changes:
470 return None
470 return None
471 return context.memfilectx(repo, path, changes[path])
471 return context.memfilectx(repo, memctx, path, changes[path])
472 if not changes:
472 if not changes:
473 continue
473 continue
474 if revs:
474 if revs:
475 date = repo['tip'].date()[0] + pick(interarrival)
475 date = repo['tip'].date()[0] + pick(interarrival)
476 else:
476 else:
477 date = time.time() - (86400 * count)
477 date = time.time() - (86400 * count)
478 # dates in mercurial must be positive, fit in 32-bit signed integers.
478 # dates in mercurial must be positive, fit in 32-bit signed integers.
479 date = min(0x7fffffff, max(0, date))
479 date = min(0x7fffffff, max(0, date))
480 user = random.choice(words) + '@' + random.choice(words)
480 user = random.choice(words) + '@' + random.choice(words)
481 mc = context.memctx(repo, pl, makeline(minimum=2),
481 mc = context.memctx(repo, pl, makeline(minimum=2),
482 sorted(changes),
482 sorted(changes),
483 filectxfn, user, '%d %d' % (date, pick(tzoffset)))
483 filectxfn, user, '%d %d' % (date, pick(tzoffset)))
484 newnode = mc.commit()
484 newnode = mc.commit()
485 heads.add(repo.changelog.rev(newnode))
485 heads.add(repo.changelog.rev(newnode))
486 heads.discard(r1)
486 heads.discard(r1)
487 heads.discard(r2)
487 heads.discard(r2)
488
488
489 lock.release()
489 lock.release()
490 wlock.release()
490 wlock.release()
491
491
492 def renamedirs(dirs, words):
492 def renamedirs(dirs, words):
493 '''Randomly rename the directory names in the per-dir file count dict.'''
493 '''Randomly rename the directory names in the per-dir file count dict.'''
494 wordgen = itertools.cycle(words)
494 wordgen = itertools.cycle(words)
495 replacements = {'': ''}
495 replacements = {'': ''}
496 def rename(dirpath):
496 def rename(dirpath):
497 '''Recursively rename the directory and all path prefixes.
497 '''Recursively rename the directory and all path prefixes.
498
498
499 The mapping from path to renamed path is stored for all path prefixes
499 The mapping from path to renamed path is stored for all path prefixes
500 as in dynamic programming, ensuring linear runtime and consistent
500 as in dynamic programming, ensuring linear runtime and consistent
501 renaming regardless of iteration order through the model.
501 renaming regardless of iteration order through the model.
502 '''
502 '''
503 if dirpath in replacements:
503 if dirpath in replacements:
504 return replacements[dirpath]
504 return replacements[dirpath]
505 head, _ = os.path.split(dirpath)
505 head, _ = os.path.split(dirpath)
506 if head:
506 if head:
507 head = rename(head)
507 head = rename(head)
508 else:
508 else:
509 head = ''
509 head = ''
510 renamed = os.path.join(head, next(wordgen))
510 renamed = os.path.join(head, next(wordgen))
511 replacements[dirpath] = renamed
511 replacements[dirpath] = renamed
512 return renamed
512 return renamed
513 result = []
513 result = []
514 for dirpath, count in dirs.iteritems():
514 for dirpath, count in dirs.iteritems():
515 result.append([rename(dirpath.lstrip(os.sep)), count])
515 result.append([rename(dirpath.lstrip(os.sep)), count])
516 return result
516 return result
@@ -1,655 +1,655 b''
1 # hg.py - hg backend for convert extension
1 # hg.py - hg backend for convert extension
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # Notes for hg->hg conversion:
8 # Notes for hg->hg conversion:
9 #
9 #
10 # * Old versions of Mercurial didn't trim the whitespace from the ends
10 # * Old versions of Mercurial didn't trim the whitespace from the ends
11 # of commit messages, but new versions do. Changesets created by
11 # of commit messages, but new versions do. Changesets created by
12 # those older versions, then converted, may thus have different
12 # those older versions, then converted, may thus have different
13 # hashes for changesets that are otherwise identical.
13 # hashes for changesets that are otherwise identical.
14 #
14 #
15 # * Using "--config convert.hg.saverev=true" will make the source
15 # * Using "--config convert.hg.saverev=true" will make the source
16 # identifier to be stored in the converted revision. This will cause
16 # identifier to be stored in the converted revision. This will cause
17 # the converted revision to have a different identity than the
17 # the converted revision to have a different identity than the
18 # source.
18 # source.
19 from __future__ import absolute_import
19 from __future__ import absolute_import
20
20
21 import os
21 import os
22 import re
22 import re
23 import time
23 import time
24
24
25 from mercurial.i18n import _
25 from mercurial.i18n import _
26 from mercurial import (
26 from mercurial import (
27 bookmarks,
27 bookmarks,
28 context,
28 context,
29 error,
29 error,
30 exchange,
30 exchange,
31 hg,
31 hg,
32 lock as lockmod,
32 lock as lockmod,
33 merge as mergemod,
33 merge as mergemod,
34 node as nodemod,
34 node as nodemod,
35 phases,
35 phases,
36 scmutil,
36 scmutil,
37 util,
37 util,
38 )
38 )
39 stringio = util.stringio
39 stringio = util.stringio
40
40
41 from . import common
41 from . import common
42 mapfile = common.mapfile
42 mapfile = common.mapfile
43 NoRepo = common.NoRepo
43 NoRepo = common.NoRepo
44
44
45 sha1re = re.compile(r'\b[0-9a-f]{12,40}\b')
45 sha1re = re.compile(r'\b[0-9a-f]{12,40}\b')
46
46
47 class mercurial_sink(common.converter_sink):
47 class mercurial_sink(common.converter_sink):
48 def __init__(self, ui, repotype, path):
48 def __init__(self, ui, repotype, path):
49 common.converter_sink.__init__(self, ui, repotype, path)
49 common.converter_sink.__init__(self, ui, repotype, path)
50 self.branchnames = ui.configbool('convert', 'hg.usebranchnames')
50 self.branchnames = ui.configbool('convert', 'hg.usebranchnames')
51 self.clonebranches = ui.configbool('convert', 'hg.clonebranches')
51 self.clonebranches = ui.configbool('convert', 'hg.clonebranches')
52 self.tagsbranch = ui.config('convert', 'hg.tagsbranch')
52 self.tagsbranch = ui.config('convert', 'hg.tagsbranch')
53 self.lastbranch = None
53 self.lastbranch = None
54 if os.path.isdir(path) and len(os.listdir(path)) > 0:
54 if os.path.isdir(path) and len(os.listdir(path)) > 0:
55 try:
55 try:
56 self.repo = hg.repository(self.ui, path)
56 self.repo = hg.repository(self.ui, path)
57 if not self.repo.local():
57 if not self.repo.local():
58 raise NoRepo(_('%s is not a local Mercurial repository')
58 raise NoRepo(_('%s is not a local Mercurial repository')
59 % path)
59 % path)
60 except error.RepoError as err:
60 except error.RepoError as err:
61 ui.traceback()
61 ui.traceback()
62 raise NoRepo(err.args[0])
62 raise NoRepo(err.args[0])
63 else:
63 else:
64 try:
64 try:
65 ui.status(_('initializing destination %s repository\n') % path)
65 ui.status(_('initializing destination %s repository\n') % path)
66 self.repo = hg.repository(self.ui, path, create=True)
66 self.repo = hg.repository(self.ui, path, create=True)
67 if not self.repo.local():
67 if not self.repo.local():
68 raise NoRepo(_('%s is not a local Mercurial repository')
68 raise NoRepo(_('%s is not a local Mercurial repository')
69 % path)
69 % path)
70 self.created.append(path)
70 self.created.append(path)
71 except error.RepoError:
71 except error.RepoError:
72 ui.traceback()
72 ui.traceback()
73 raise NoRepo(_("could not create hg repository %s as sink")
73 raise NoRepo(_("could not create hg repository %s as sink")
74 % path)
74 % path)
75 self.lock = None
75 self.lock = None
76 self.wlock = None
76 self.wlock = None
77 self.filemapmode = False
77 self.filemapmode = False
78 self.subrevmaps = {}
78 self.subrevmaps = {}
79
79
80 def before(self):
80 def before(self):
81 self.ui.debug('run hg sink pre-conversion action\n')
81 self.ui.debug('run hg sink pre-conversion action\n')
82 self.wlock = self.repo.wlock()
82 self.wlock = self.repo.wlock()
83 self.lock = self.repo.lock()
83 self.lock = self.repo.lock()
84
84
85 def after(self):
85 def after(self):
86 self.ui.debug('run hg sink post-conversion action\n')
86 self.ui.debug('run hg sink post-conversion action\n')
87 if self.lock:
87 if self.lock:
88 self.lock.release()
88 self.lock.release()
89 if self.wlock:
89 if self.wlock:
90 self.wlock.release()
90 self.wlock.release()
91
91
92 def revmapfile(self):
92 def revmapfile(self):
93 return self.repo.vfs.join("shamap")
93 return self.repo.vfs.join("shamap")
94
94
95 def authorfile(self):
95 def authorfile(self):
96 return self.repo.vfs.join("authormap")
96 return self.repo.vfs.join("authormap")
97
97
98 def setbranch(self, branch, pbranches):
98 def setbranch(self, branch, pbranches):
99 if not self.clonebranches:
99 if not self.clonebranches:
100 return
100 return
101
101
102 setbranch = (branch != self.lastbranch)
102 setbranch = (branch != self.lastbranch)
103 self.lastbranch = branch
103 self.lastbranch = branch
104 if not branch:
104 if not branch:
105 branch = 'default'
105 branch = 'default'
106 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
106 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
107 if pbranches:
107 if pbranches:
108 pbranch = pbranches[0][1]
108 pbranch = pbranches[0][1]
109 else:
109 else:
110 pbranch = 'default'
110 pbranch = 'default'
111
111
112 branchpath = os.path.join(self.path, branch)
112 branchpath = os.path.join(self.path, branch)
113 if setbranch:
113 if setbranch:
114 self.after()
114 self.after()
115 try:
115 try:
116 self.repo = hg.repository(self.ui, branchpath)
116 self.repo = hg.repository(self.ui, branchpath)
117 except Exception:
117 except Exception:
118 self.repo = hg.repository(self.ui, branchpath, create=True)
118 self.repo = hg.repository(self.ui, branchpath, create=True)
119 self.before()
119 self.before()
120
120
121 # pbranches may bring revisions from other branches (merge parents)
121 # pbranches may bring revisions from other branches (merge parents)
122 # Make sure we have them, or pull them.
122 # Make sure we have them, or pull them.
123 missings = {}
123 missings = {}
124 for b in pbranches:
124 for b in pbranches:
125 try:
125 try:
126 self.repo.lookup(b[0])
126 self.repo.lookup(b[0])
127 except Exception:
127 except Exception:
128 missings.setdefault(b[1], []).append(b[0])
128 missings.setdefault(b[1], []).append(b[0])
129
129
130 if missings:
130 if missings:
131 self.after()
131 self.after()
132 for pbranch, heads in sorted(missings.iteritems()):
132 for pbranch, heads in sorted(missings.iteritems()):
133 pbranchpath = os.path.join(self.path, pbranch)
133 pbranchpath = os.path.join(self.path, pbranch)
134 prepo = hg.peer(self.ui, {}, pbranchpath)
134 prepo = hg.peer(self.ui, {}, pbranchpath)
135 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
135 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
136 exchange.pull(self.repo, prepo,
136 exchange.pull(self.repo, prepo,
137 [prepo.lookup(h) for h in heads])
137 [prepo.lookup(h) for h in heads])
138 self.before()
138 self.before()
139
139
140 def _rewritetags(self, source, revmap, data):
140 def _rewritetags(self, source, revmap, data):
141 fp = stringio()
141 fp = stringio()
142 for line in data.splitlines():
142 for line in data.splitlines():
143 s = line.split(' ', 1)
143 s = line.split(' ', 1)
144 if len(s) != 2:
144 if len(s) != 2:
145 continue
145 continue
146 revid = revmap.get(source.lookuprev(s[0]))
146 revid = revmap.get(source.lookuprev(s[0]))
147 if not revid:
147 if not revid:
148 if s[0] == nodemod.nullhex:
148 if s[0] == nodemod.nullhex:
149 revid = s[0]
149 revid = s[0]
150 else:
150 else:
151 continue
151 continue
152 fp.write('%s %s\n' % (revid, s[1]))
152 fp.write('%s %s\n' % (revid, s[1]))
153 return fp.getvalue()
153 return fp.getvalue()
154
154
155 def _rewritesubstate(self, source, data):
155 def _rewritesubstate(self, source, data):
156 fp = stringio()
156 fp = stringio()
157 for line in data.splitlines():
157 for line in data.splitlines():
158 s = line.split(' ', 1)
158 s = line.split(' ', 1)
159 if len(s) != 2:
159 if len(s) != 2:
160 continue
160 continue
161
161
162 revid = s[0]
162 revid = s[0]
163 subpath = s[1]
163 subpath = s[1]
164 if revid != nodemod.nullhex:
164 if revid != nodemod.nullhex:
165 revmap = self.subrevmaps.get(subpath)
165 revmap = self.subrevmaps.get(subpath)
166 if revmap is None:
166 if revmap is None:
167 revmap = mapfile(self.ui,
167 revmap = mapfile(self.ui,
168 self.repo.wjoin(subpath, '.hg/shamap'))
168 self.repo.wjoin(subpath, '.hg/shamap'))
169 self.subrevmaps[subpath] = revmap
169 self.subrevmaps[subpath] = revmap
170
170
171 # It is reasonable that one or more of the subrepos don't
171 # It is reasonable that one or more of the subrepos don't
172 # need to be converted, in which case they can be cloned
172 # need to be converted, in which case they can be cloned
173 # into place instead of converted. Therefore, only warn
173 # into place instead of converted. Therefore, only warn
174 # once.
174 # once.
175 msg = _('no ".hgsubstate" updates will be made for "%s"\n')
175 msg = _('no ".hgsubstate" updates will be made for "%s"\n')
176 if len(revmap) == 0:
176 if len(revmap) == 0:
177 sub = self.repo.wvfs.reljoin(subpath, '.hg')
177 sub = self.repo.wvfs.reljoin(subpath, '.hg')
178
178
179 if self.repo.wvfs.exists(sub):
179 if self.repo.wvfs.exists(sub):
180 self.ui.warn(msg % subpath)
180 self.ui.warn(msg % subpath)
181
181
182 newid = revmap.get(revid)
182 newid = revmap.get(revid)
183 if not newid:
183 if not newid:
184 if len(revmap) > 0:
184 if len(revmap) > 0:
185 self.ui.warn(_("%s is missing from %s/.hg/shamap\n") %
185 self.ui.warn(_("%s is missing from %s/.hg/shamap\n") %
186 (revid, subpath))
186 (revid, subpath))
187 else:
187 else:
188 revid = newid
188 revid = newid
189
189
190 fp.write('%s %s\n' % (revid, subpath))
190 fp.write('%s %s\n' % (revid, subpath))
191
191
192 return fp.getvalue()
192 return fp.getvalue()
193
193
194 def _calculatemergedfiles(self, source, p1ctx, p2ctx):
194 def _calculatemergedfiles(self, source, p1ctx, p2ctx):
195 """Calculates the files from p2 that we need to pull in when merging p1
195 """Calculates the files from p2 that we need to pull in when merging p1
196 and p2, given that the merge is coming from the given source.
196 and p2, given that the merge is coming from the given source.
197
197
198 This prevents us from losing files that only exist in the target p2 and
198 This prevents us from losing files that only exist in the target p2 and
199 that don't come from the source repo (like if you're merging multiple
199 that don't come from the source repo (like if you're merging multiple
200 repositories together).
200 repositories together).
201 """
201 """
202 anc = [p1ctx.ancestor(p2ctx)]
202 anc = [p1ctx.ancestor(p2ctx)]
203 # Calculate what files are coming from p2
203 # Calculate what files are coming from p2
204 actions, diverge, rename = mergemod.calculateupdates(
204 actions, diverge, rename = mergemod.calculateupdates(
205 self.repo, p1ctx, p2ctx, anc,
205 self.repo, p1ctx, p2ctx, anc,
206 True, # branchmerge
206 True, # branchmerge
207 True, # force
207 True, # force
208 False, # acceptremote
208 False, # acceptremote
209 False, # followcopies
209 False, # followcopies
210 )
210 )
211
211
212 for file, (action, info, msg) in actions.iteritems():
212 for file, (action, info, msg) in actions.iteritems():
213 if source.targetfilebelongstosource(file):
213 if source.targetfilebelongstosource(file):
214 # If the file belongs to the source repo, ignore the p2
214 # If the file belongs to the source repo, ignore the p2
215 # since it will be covered by the existing fileset.
215 # since it will be covered by the existing fileset.
216 continue
216 continue
217
217
218 # If the file requires actual merging, abort. We don't have enough
218 # If the file requires actual merging, abort. We don't have enough
219 # context to resolve merges correctly.
219 # context to resolve merges correctly.
220 if action in ['m', 'dm', 'cd', 'dc']:
220 if action in ['m', 'dm', 'cd', 'dc']:
221 raise error.Abort(_("unable to convert merge commit "
221 raise error.Abort(_("unable to convert merge commit "
222 "since target parents do not merge cleanly (file "
222 "since target parents do not merge cleanly (file "
223 "%s, parents %s and %s)") % (file, p1ctx,
223 "%s, parents %s and %s)") % (file, p1ctx,
224 p2ctx))
224 p2ctx))
225 elif action == 'k':
225 elif action == 'k':
226 # 'keep' means nothing changed from p1
226 # 'keep' means nothing changed from p1
227 continue
227 continue
228 else:
228 else:
229 # Any other change means we want to take the p2 version
229 # Any other change means we want to take the p2 version
230 yield file
230 yield file
231
231
232 def putcommit(self, files, copies, parents, commit, source, revmap, full,
232 def putcommit(self, files, copies, parents, commit, source, revmap, full,
233 cleanp2):
233 cleanp2):
234 files = dict(files)
234 files = dict(files)
235
235
236 def getfilectx(repo, memctx, f):
236 def getfilectx(repo, memctx, f):
237 if p2ctx and f in p2files and f not in copies:
237 if p2ctx and f in p2files and f not in copies:
238 self.ui.debug('reusing %s from p2\n' % f)
238 self.ui.debug('reusing %s from p2\n' % f)
239 try:
239 try:
240 return p2ctx[f]
240 return p2ctx[f]
241 except error.ManifestLookupError:
241 except error.ManifestLookupError:
242 # If the file doesn't exist in p2, then we're syncing a
242 # If the file doesn't exist in p2, then we're syncing a
243 # delete, so just return None.
243 # delete, so just return None.
244 return None
244 return None
245 try:
245 try:
246 v = files[f]
246 v = files[f]
247 except KeyError:
247 except KeyError:
248 return None
248 return None
249 data, mode = source.getfile(f, v)
249 data, mode = source.getfile(f, v)
250 if data is None:
250 if data is None:
251 return None
251 return None
252 if f == '.hgtags':
252 if f == '.hgtags':
253 data = self._rewritetags(source, revmap, data)
253 data = self._rewritetags(source, revmap, data)
254 if f == '.hgsubstate':
254 if f == '.hgsubstate':
255 data = self._rewritesubstate(source, data)
255 data = self._rewritesubstate(source, data)
256 return context.memfilectx(self.repo, f, data, 'l' in mode,
256 return context.memfilectx(self.repo, memctx, f, data, 'l' in mode,
257 'x' in mode, copies.get(f))
257 'x' in mode, copies.get(f))
258
258
259 pl = []
259 pl = []
260 for p in parents:
260 for p in parents:
261 if p not in pl:
261 if p not in pl:
262 pl.append(p)
262 pl.append(p)
263 parents = pl
263 parents = pl
264 nparents = len(parents)
264 nparents = len(parents)
265 if self.filemapmode and nparents == 1:
265 if self.filemapmode and nparents == 1:
266 m1node = self.repo.changelog.read(nodemod.bin(parents[0]))[0]
266 m1node = self.repo.changelog.read(nodemod.bin(parents[0]))[0]
267 parent = parents[0]
267 parent = parents[0]
268
268
269 if len(parents) < 2:
269 if len(parents) < 2:
270 parents.append(nodemod.nullid)
270 parents.append(nodemod.nullid)
271 if len(parents) < 2:
271 if len(parents) < 2:
272 parents.append(nodemod.nullid)
272 parents.append(nodemod.nullid)
273 p2 = parents.pop(0)
273 p2 = parents.pop(0)
274
274
275 text = commit.desc
275 text = commit.desc
276
276
277 sha1s = re.findall(sha1re, text)
277 sha1s = re.findall(sha1re, text)
278 for sha1 in sha1s:
278 for sha1 in sha1s:
279 oldrev = source.lookuprev(sha1)
279 oldrev = source.lookuprev(sha1)
280 newrev = revmap.get(oldrev)
280 newrev = revmap.get(oldrev)
281 if newrev is not None:
281 if newrev is not None:
282 text = text.replace(sha1, newrev[:len(sha1)])
282 text = text.replace(sha1, newrev[:len(sha1)])
283
283
284 extra = commit.extra.copy()
284 extra = commit.extra.copy()
285
285
286 sourcename = self.repo.ui.config('convert', 'hg.sourcename')
286 sourcename = self.repo.ui.config('convert', 'hg.sourcename')
287 if sourcename:
287 if sourcename:
288 extra['convert_source'] = sourcename
288 extra['convert_source'] = sourcename
289
289
290 for label in ('source', 'transplant_source', 'rebase_source',
290 for label in ('source', 'transplant_source', 'rebase_source',
291 'intermediate-source'):
291 'intermediate-source'):
292 node = extra.get(label)
292 node = extra.get(label)
293
293
294 if node is None:
294 if node is None:
295 continue
295 continue
296
296
297 # Only transplant stores its reference in binary
297 # Only transplant stores its reference in binary
298 if label == 'transplant_source':
298 if label == 'transplant_source':
299 node = nodemod.hex(node)
299 node = nodemod.hex(node)
300
300
301 newrev = revmap.get(node)
301 newrev = revmap.get(node)
302 if newrev is not None:
302 if newrev is not None:
303 if label == 'transplant_source':
303 if label == 'transplant_source':
304 newrev = nodemod.bin(newrev)
304 newrev = nodemod.bin(newrev)
305
305
306 extra[label] = newrev
306 extra[label] = newrev
307
307
308 if self.branchnames and commit.branch:
308 if self.branchnames and commit.branch:
309 extra['branch'] = commit.branch
309 extra['branch'] = commit.branch
310 if commit.rev and commit.saverev:
310 if commit.rev and commit.saverev:
311 extra['convert_revision'] = commit.rev
311 extra['convert_revision'] = commit.rev
312
312
313 while parents:
313 while parents:
314 p1 = p2
314 p1 = p2
315 p2 = parents.pop(0)
315 p2 = parents.pop(0)
316 p1ctx = self.repo[p1]
316 p1ctx = self.repo[p1]
317 p2ctx = None
317 p2ctx = None
318 if p2 != nodemod.nullid:
318 if p2 != nodemod.nullid:
319 p2ctx = self.repo[p2]
319 p2ctx = self.repo[p2]
320 fileset = set(files)
320 fileset = set(files)
321 if full:
321 if full:
322 fileset.update(self.repo[p1])
322 fileset.update(self.repo[p1])
323 fileset.update(self.repo[p2])
323 fileset.update(self.repo[p2])
324
324
325 if p2ctx:
325 if p2ctx:
326 p2files = set(cleanp2)
326 p2files = set(cleanp2)
327 for file in self._calculatemergedfiles(source, p1ctx, p2ctx):
327 for file in self._calculatemergedfiles(source, p1ctx, p2ctx):
328 p2files.add(file)
328 p2files.add(file)
329 fileset.add(file)
329 fileset.add(file)
330
330
331 ctx = context.memctx(self.repo, (p1, p2), text, fileset,
331 ctx = context.memctx(self.repo, (p1, p2), text, fileset,
332 getfilectx, commit.author, commit.date, extra)
332 getfilectx, commit.author, commit.date, extra)
333
333
334 # We won't know if the conversion changes the node until after the
334 # We won't know if the conversion changes the node until after the
335 # commit, so copy the source's phase for now.
335 # commit, so copy the source's phase for now.
336 self.repo.ui.setconfig('phases', 'new-commit',
336 self.repo.ui.setconfig('phases', 'new-commit',
337 phases.phasenames[commit.phase], 'convert')
337 phases.phasenames[commit.phase], 'convert')
338
338
339 with self.repo.transaction("convert") as tr:
339 with self.repo.transaction("convert") as tr:
340 node = nodemod.hex(self.repo.commitctx(ctx))
340 node = nodemod.hex(self.repo.commitctx(ctx))
341
341
342 # If the node value has changed, but the phase is lower than
342 # If the node value has changed, but the phase is lower than
343 # draft, set it back to draft since it hasn't been exposed
343 # draft, set it back to draft since it hasn't been exposed
344 # anywhere.
344 # anywhere.
345 if commit.rev != node:
345 if commit.rev != node:
346 ctx = self.repo[node]
346 ctx = self.repo[node]
347 if ctx.phase() < phases.draft:
347 if ctx.phase() < phases.draft:
348 phases.registernew(self.repo, tr, phases.draft,
348 phases.registernew(self.repo, tr, phases.draft,
349 [ctx.node()])
349 [ctx.node()])
350
350
351 text = "(octopus merge fixup)\n"
351 text = "(octopus merge fixup)\n"
352 p2 = node
352 p2 = node
353
353
354 if self.filemapmode and nparents == 1:
354 if self.filemapmode and nparents == 1:
355 man = self.repo.manifestlog._revlog
355 man = self.repo.manifestlog._revlog
356 mnode = self.repo.changelog.read(nodemod.bin(p2))[0]
356 mnode = self.repo.changelog.read(nodemod.bin(p2))[0]
357 closed = 'close' in commit.extra
357 closed = 'close' in commit.extra
358 if not closed and not man.cmp(m1node, man.revision(mnode)):
358 if not closed and not man.cmp(m1node, man.revision(mnode)):
359 self.ui.status(_("filtering out empty revision\n"))
359 self.ui.status(_("filtering out empty revision\n"))
360 self.repo.rollback(force=True)
360 self.repo.rollback(force=True)
361 return parent
361 return parent
362 return p2
362 return p2
363
363
364 def puttags(self, tags):
364 def puttags(self, tags):
365 try:
365 try:
366 parentctx = self.repo[self.tagsbranch]
366 parentctx = self.repo[self.tagsbranch]
367 tagparent = parentctx.node()
367 tagparent = parentctx.node()
368 except error.RepoError:
368 except error.RepoError:
369 parentctx = None
369 parentctx = None
370 tagparent = nodemod.nullid
370 tagparent = nodemod.nullid
371
371
372 oldlines = set()
372 oldlines = set()
373 for branch, heads in self.repo.branchmap().iteritems():
373 for branch, heads in self.repo.branchmap().iteritems():
374 for h in heads:
374 for h in heads:
375 if '.hgtags' in self.repo[h]:
375 if '.hgtags' in self.repo[h]:
376 oldlines.update(
376 oldlines.update(
377 set(self.repo[h]['.hgtags'].data().splitlines(True)))
377 set(self.repo[h]['.hgtags'].data().splitlines(True)))
378 oldlines = sorted(list(oldlines))
378 oldlines = sorted(list(oldlines))
379
379
380 newlines = sorted([("%s %s\n" % (tags[tag], tag)) for tag in tags])
380 newlines = sorted([("%s %s\n" % (tags[tag], tag)) for tag in tags])
381 if newlines == oldlines:
381 if newlines == oldlines:
382 return None, None
382 return None, None
383
383
384 # if the old and new tags match, then there is nothing to update
384 # if the old and new tags match, then there is nothing to update
385 oldtags = set()
385 oldtags = set()
386 newtags = set()
386 newtags = set()
387 for line in oldlines:
387 for line in oldlines:
388 s = line.strip().split(' ', 1)
388 s = line.strip().split(' ', 1)
389 if len(s) != 2:
389 if len(s) != 2:
390 continue
390 continue
391 oldtags.add(s[1])
391 oldtags.add(s[1])
392 for line in newlines:
392 for line in newlines:
393 s = line.strip().split(' ', 1)
393 s = line.strip().split(' ', 1)
394 if len(s) != 2:
394 if len(s) != 2:
395 continue
395 continue
396 if s[1] not in oldtags:
396 if s[1] not in oldtags:
397 newtags.add(s[1].strip())
397 newtags.add(s[1].strip())
398
398
399 if not newtags:
399 if not newtags:
400 return None, None
400 return None, None
401
401
402 data = "".join(newlines)
402 data = "".join(newlines)
403 def getfilectx(repo, memctx, f):
403 def getfilectx(repo, memctx, f):
404 return context.memfilectx(repo, f, data, False, False, None)
404 return context.memfilectx(repo, memctx, f, data, False, False, None)
405
405
406 self.ui.status(_("updating tags\n"))
406 self.ui.status(_("updating tags\n"))
407 date = "%s 0" % int(time.mktime(time.gmtime()))
407 date = "%s 0" % int(time.mktime(time.gmtime()))
408 extra = {'branch': self.tagsbranch}
408 extra = {'branch': self.tagsbranch}
409 ctx = context.memctx(self.repo, (tagparent, None), "update tags",
409 ctx = context.memctx(self.repo, (tagparent, None), "update tags",
410 [".hgtags"], getfilectx, "convert-repo", date,
410 [".hgtags"], getfilectx, "convert-repo", date,
411 extra)
411 extra)
412 node = self.repo.commitctx(ctx)
412 node = self.repo.commitctx(ctx)
413 return nodemod.hex(node), nodemod.hex(tagparent)
413 return nodemod.hex(node), nodemod.hex(tagparent)
414
414
415 def setfilemapmode(self, active):
415 def setfilemapmode(self, active):
416 self.filemapmode = active
416 self.filemapmode = active
417
417
418 def putbookmarks(self, updatedbookmark):
418 def putbookmarks(self, updatedbookmark):
419 if not len(updatedbookmark):
419 if not len(updatedbookmark):
420 return
420 return
421 wlock = lock = tr = None
421 wlock = lock = tr = None
422 try:
422 try:
423 wlock = self.repo.wlock()
423 wlock = self.repo.wlock()
424 lock = self.repo.lock()
424 lock = self.repo.lock()
425 tr = self.repo.transaction('bookmark')
425 tr = self.repo.transaction('bookmark')
426 self.ui.status(_("updating bookmarks\n"))
426 self.ui.status(_("updating bookmarks\n"))
427 destmarks = self.repo._bookmarks
427 destmarks = self.repo._bookmarks
428 changes = [(bookmark, nodemod.bin(updatedbookmark[bookmark]))
428 changes = [(bookmark, nodemod.bin(updatedbookmark[bookmark]))
429 for bookmark in updatedbookmark]
429 for bookmark in updatedbookmark]
430 destmarks.applychanges(self.repo, tr, changes)
430 destmarks.applychanges(self.repo, tr, changes)
431 tr.close()
431 tr.close()
432 finally:
432 finally:
433 lockmod.release(lock, wlock, tr)
433 lockmod.release(lock, wlock, tr)
434
434
435 def hascommitfrommap(self, rev):
435 def hascommitfrommap(self, rev):
436 # the exact semantics of clonebranches is unclear so we can't say no
436 # the exact semantics of clonebranches is unclear so we can't say no
437 return rev in self.repo or self.clonebranches
437 return rev in self.repo or self.clonebranches
438
438
439 def hascommitforsplicemap(self, rev):
439 def hascommitforsplicemap(self, rev):
440 if rev not in self.repo and self.clonebranches:
440 if rev not in self.repo and self.clonebranches:
441 raise error.Abort(_('revision %s not found in destination '
441 raise error.Abort(_('revision %s not found in destination '
442 'repository (lookups with clonebranches=true '
442 'repository (lookups with clonebranches=true '
443 'are not implemented)') % rev)
443 'are not implemented)') % rev)
444 return rev in self.repo
444 return rev in self.repo
445
445
446 class mercurial_source(common.converter_source):
446 class mercurial_source(common.converter_source):
447 def __init__(self, ui, repotype, path, revs=None):
447 def __init__(self, ui, repotype, path, revs=None):
448 common.converter_source.__init__(self, ui, repotype, path, revs)
448 common.converter_source.__init__(self, ui, repotype, path, revs)
449 self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors')
449 self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors')
450 self.ignored = set()
450 self.ignored = set()
451 self.saverev = ui.configbool('convert', 'hg.saverev')
451 self.saverev = ui.configbool('convert', 'hg.saverev')
452 try:
452 try:
453 self.repo = hg.repository(self.ui, path)
453 self.repo = hg.repository(self.ui, path)
454 # try to provoke an exception if this isn't really a hg
454 # try to provoke an exception if this isn't really a hg
455 # repo, but some other bogus compatible-looking url
455 # repo, but some other bogus compatible-looking url
456 if not self.repo.local():
456 if not self.repo.local():
457 raise error.RepoError
457 raise error.RepoError
458 except error.RepoError:
458 except error.RepoError:
459 ui.traceback()
459 ui.traceback()
460 raise NoRepo(_("%s is not a local Mercurial repository") % path)
460 raise NoRepo(_("%s is not a local Mercurial repository") % path)
461 self.lastrev = None
461 self.lastrev = None
462 self.lastctx = None
462 self.lastctx = None
463 self._changescache = None, None
463 self._changescache = None, None
464 self.convertfp = None
464 self.convertfp = None
465 # Restrict converted revisions to startrev descendants
465 # Restrict converted revisions to startrev descendants
466 startnode = ui.config('convert', 'hg.startrev')
466 startnode = ui.config('convert', 'hg.startrev')
467 hgrevs = ui.config('convert', 'hg.revs')
467 hgrevs = ui.config('convert', 'hg.revs')
468 if hgrevs is None:
468 if hgrevs is None:
469 if startnode is not None:
469 if startnode is not None:
470 try:
470 try:
471 startnode = self.repo.lookup(startnode)
471 startnode = self.repo.lookup(startnode)
472 except error.RepoError:
472 except error.RepoError:
473 raise error.Abort(_('%s is not a valid start revision')
473 raise error.Abort(_('%s is not a valid start revision')
474 % startnode)
474 % startnode)
475 startrev = self.repo.changelog.rev(startnode)
475 startrev = self.repo.changelog.rev(startnode)
476 children = {startnode: 1}
476 children = {startnode: 1}
477 for r in self.repo.changelog.descendants([startrev]):
477 for r in self.repo.changelog.descendants([startrev]):
478 children[self.repo.changelog.node(r)] = 1
478 children[self.repo.changelog.node(r)] = 1
479 self.keep = children.__contains__
479 self.keep = children.__contains__
480 else:
480 else:
481 self.keep = util.always
481 self.keep = util.always
482 if revs:
482 if revs:
483 self._heads = [self.repo[r].node() for r in revs]
483 self._heads = [self.repo[r].node() for r in revs]
484 else:
484 else:
485 self._heads = self.repo.heads()
485 self._heads = self.repo.heads()
486 else:
486 else:
487 if revs or startnode is not None:
487 if revs or startnode is not None:
488 raise error.Abort(_('hg.revs cannot be combined with '
488 raise error.Abort(_('hg.revs cannot be combined with '
489 'hg.startrev or --rev'))
489 'hg.startrev or --rev'))
490 nodes = set()
490 nodes = set()
491 parents = set()
491 parents = set()
492 for r in scmutil.revrange(self.repo, [hgrevs]):
492 for r in scmutil.revrange(self.repo, [hgrevs]):
493 ctx = self.repo[r]
493 ctx = self.repo[r]
494 nodes.add(ctx.node())
494 nodes.add(ctx.node())
495 parents.update(p.node() for p in ctx.parents())
495 parents.update(p.node() for p in ctx.parents())
496 self.keep = nodes.__contains__
496 self.keep = nodes.__contains__
497 self._heads = nodes - parents
497 self._heads = nodes - parents
498
498
499 def _changectx(self, rev):
499 def _changectx(self, rev):
500 if self.lastrev != rev:
500 if self.lastrev != rev:
501 self.lastctx = self.repo[rev]
501 self.lastctx = self.repo[rev]
502 self.lastrev = rev
502 self.lastrev = rev
503 return self.lastctx
503 return self.lastctx
504
504
505 def _parents(self, ctx):
505 def _parents(self, ctx):
506 return [p for p in ctx.parents() if p and self.keep(p.node())]
506 return [p for p in ctx.parents() if p and self.keep(p.node())]
507
507
508 def getheads(self):
508 def getheads(self):
509 return [nodemod.hex(h) for h in self._heads if self.keep(h)]
509 return [nodemod.hex(h) for h in self._heads if self.keep(h)]
510
510
511 def getfile(self, name, rev):
511 def getfile(self, name, rev):
512 try:
512 try:
513 fctx = self._changectx(rev)[name]
513 fctx = self._changectx(rev)[name]
514 return fctx.data(), fctx.flags()
514 return fctx.data(), fctx.flags()
515 except error.LookupError:
515 except error.LookupError:
516 return None, None
516 return None, None
517
517
518 def _changedfiles(self, ctx1, ctx2):
518 def _changedfiles(self, ctx1, ctx2):
519 ma, r = [], []
519 ma, r = [], []
520 maappend = ma.append
520 maappend = ma.append
521 rappend = r.append
521 rappend = r.append
522 d = ctx1.manifest().diff(ctx2.manifest())
522 d = ctx1.manifest().diff(ctx2.manifest())
523 for f, ((node1, flag1), (node2, flag2)) in d.iteritems():
523 for f, ((node1, flag1), (node2, flag2)) in d.iteritems():
524 if node2 is None:
524 if node2 is None:
525 rappend(f)
525 rappend(f)
526 else:
526 else:
527 maappend(f)
527 maappend(f)
528 return ma, r
528 return ma, r
529
529
530 def getchanges(self, rev, full):
530 def getchanges(self, rev, full):
531 ctx = self._changectx(rev)
531 ctx = self._changectx(rev)
532 parents = self._parents(ctx)
532 parents = self._parents(ctx)
533 if full or not parents:
533 if full or not parents:
534 files = copyfiles = ctx.manifest()
534 files = copyfiles = ctx.manifest()
535 if parents:
535 if parents:
536 if self._changescache[0] == rev:
536 if self._changescache[0] == rev:
537 ma, r = self._changescache[1]
537 ma, r = self._changescache[1]
538 else:
538 else:
539 ma, r = self._changedfiles(parents[0], ctx)
539 ma, r = self._changedfiles(parents[0], ctx)
540 if not full:
540 if not full:
541 files = ma + r
541 files = ma + r
542 copyfiles = ma
542 copyfiles = ma
543 # _getcopies() is also run for roots and before filtering so missing
543 # _getcopies() is also run for roots and before filtering so missing
544 # revlogs are detected early
544 # revlogs are detected early
545 copies = self._getcopies(ctx, parents, copyfiles)
545 copies = self._getcopies(ctx, parents, copyfiles)
546 cleanp2 = set()
546 cleanp2 = set()
547 if len(parents) == 2:
547 if len(parents) == 2:
548 d = parents[1].manifest().diff(ctx.manifest(), clean=True)
548 d = parents[1].manifest().diff(ctx.manifest(), clean=True)
549 for f, value in d.iteritems():
549 for f, value in d.iteritems():
550 if value is None:
550 if value is None:
551 cleanp2.add(f)
551 cleanp2.add(f)
552 changes = [(f, rev) for f in files if f not in self.ignored]
552 changes = [(f, rev) for f in files if f not in self.ignored]
553 changes.sort()
553 changes.sort()
554 return changes, copies, cleanp2
554 return changes, copies, cleanp2
555
555
556 def _getcopies(self, ctx, parents, files):
556 def _getcopies(self, ctx, parents, files):
557 copies = {}
557 copies = {}
558 for name in files:
558 for name in files:
559 if name in self.ignored:
559 if name in self.ignored:
560 continue
560 continue
561 try:
561 try:
562 copysource, _copynode = ctx.filectx(name).renamed()
562 copysource, _copynode = ctx.filectx(name).renamed()
563 if copysource in self.ignored:
563 if copysource in self.ignored:
564 continue
564 continue
565 # Ignore copy sources not in parent revisions
565 # Ignore copy sources not in parent revisions
566 found = False
566 found = False
567 for p in parents:
567 for p in parents:
568 if copysource in p:
568 if copysource in p:
569 found = True
569 found = True
570 break
570 break
571 if not found:
571 if not found:
572 continue
572 continue
573 copies[name] = copysource
573 copies[name] = copysource
574 except TypeError:
574 except TypeError:
575 pass
575 pass
576 except error.LookupError as e:
576 except error.LookupError as e:
577 if not self.ignoreerrors:
577 if not self.ignoreerrors:
578 raise
578 raise
579 self.ignored.add(name)
579 self.ignored.add(name)
580 self.ui.warn(_('ignoring: %s\n') % e)
580 self.ui.warn(_('ignoring: %s\n') % e)
581 return copies
581 return copies
582
582
583 def getcommit(self, rev):
583 def getcommit(self, rev):
584 ctx = self._changectx(rev)
584 ctx = self._changectx(rev)
585 _parents = self._parents(ctx)
585 _parents = self._parents(ctx)
586 parents = [p.hex() for p in _parents]
586 parents = [p.hex() for p in _parents]
587 optparents = [p.hex() for p in ctx.parents() if p and p not in _parents]
587 optparents = [p.hex() for p in ctx.parents() if p and p not in _parents]
588 crev = rev
588 crev = rev
589
589
590 return common.commit(author=ctx.user(),
590 return common.commit(author=ctx.user(),
591 date=util.datestr(ctx.date(),
591 date=util.datestr(ctx.date(),
592 '%Y-%m-%d %H:%M:%S %1%2'),
592 '%Y-%m-%d %H:%M:%S %1%2'),
593 desc=ctx.description(),
593 desc=ctx.description(),
594 rev=crev,
594 rev=crev,
595 parents=parents,
595 parents=parents,
596 optparents=optparents,
596 optparents=optparents,
597 branch=ctx.branch(),
597 branch=ctx.branch(),
598 extra=ctx.extra(),
598 extra=ctx.extra(),
599 sortkey=ctx.rev(),
599 sortkey=ctx.rev(),
600 saverev=self.saverev,
600 saverev=self.saverev,
601 phase=ctx.phase())
601 phase=ctx.phase())
602
602
603 def gettags(self):
603 def gettags(self):
604 # This will get written to .hgtags, filter non global tags out.
604 # This will get written to .hgtags, filter non global tags out.
605 tags = [t for t in self.repo.tagslist()
605 tags = [t for t in self.repo.tagslist()
606 if self.repo.tagtype(t[0]) == 'global']
606 if self.repo.tagtype(t[0]) == 'global']
607 return dict([(name, nodemod.hex(node)) for name, node in tags
607 return dict([(name, nodemod.hex(node)) for name, node in tags
608 if self.keep(node)])
608 if self.keep(node)])
609
609
610 def getchangedfiles(self, rev, i):
610 def getchangedfiles(self, rev, i):
611 ctx = self._changectx(rev)
611 ctx = self._changectx(rev)
612 parents = self._parents(ctx)
612 parents = self._parents(ctx)
613 if not parents and i is None:
613 if not parents and i is None:
614 i = 0
614 i = 0
615 ma, r = ctx.manifest().keys(), []
615 ma, r = ctx.manifest().keys(), []
616 else:
616 else:
617 i = i or 0
617 i = i or 0
618 ma, r = self._changedfiles(parents[i], ctx)
618 ma, r = self._changedfiles(parents[i], ctx)
619 ma, r = [[f for f in l if f not in self.ignored] for l in (ma, r)]
619 ma, r = [[f for f in l if f not in self.ignored] for l in (ma, r)]
620
620
621 if i == 0:
621 if i == 0:
622 self._changescache = (rev, (ma, r))
622 self._changescache = (rev, (ma, r))
623
623
624 return ma + r
624 return ma + r
625
625
626 def converted(self, rev, destrev):
626 def converted(self, rev, destrev):
627 if self.convertfp is None:
627 if self.convertfp is None:
628 self.convertfp = open(self.repo.vfs.join('shamap'), 'a')
628 self.convertfp = open(self.repo.vfs.join('shamap'), 'a')
629 self.convertfp.write('%s %s\n' % (destrev, rev))
629 self.convertfp.write('%s %s\n' % (destrev, rev))
630 self.convertfp.flush()
630 self.convertfp.flush()
631
631
632 def before(self):
632 def before(self):
633 self.ui.debug('run hg source pre-conversion action\n')
633 self.ui.debug('run hg source pre-conversion action\n')
634
634
635 def after(self):
635 def after(self):
636 self.ui.debug('run hg source post-conversion action\n')
636 self.ui.debug('run hg source post-conversion action\n')
637
637
638 def hasnativeorder(self):
638 def hasnativeorder(self):
639 return True
639 return True
640
640
641 def hasnativeclose(self):
641 def hasnativeclose(self):
642 return True
642 return True
643
643
644 def lookuprev(self, rev):
644 def lookuprev(self, rev):
645 try:
645 try:
646 return nodemod.hex(self.repo.lookup(rev))
646 return nodemod.hex(self.repo.lookup(rev))
647 except (error.RepoError, error.LookupError):
647 except (error.RepoError, error.LookupError):
648 return None
648 return None
649
649
650 def getbookmarks(self):
650 def getbookmarks(self):
651 return bookmarks.listbookmarks(self.repo)
651 return bookmarks.listbookmarks(self.repo)
652
652
653 def checkrevformat(self, revstr, mapname='splicemap'):
653 def checkrevformat(self, revstr, mapname='splicemap'):
654 """ Mercurial, revision string is a 40 byte hex """
654 """ Mercurial, revision string is a 40 byte hex """
655 self.checkhexformat(revstr, mapname)
655 self.checkhexformat(revstr, mapname)
@@ -1,1647 +1,1647 b''
1 # histedit.py - interactive history editing for mercurial
1 # histedit.py - interactive history editing for mercurial
2 #
2 #
3 # Copyright 2009 Augie Fackler <raf@durin42.com>
3 # Copyright 2009 Augie Fackler <raf@durin42.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """interactive history editing
7 """interactive history editing
8
8
9 With this extension installed, Mercurial gains one new command: histedit. Usage
9 With this extension installed, Mercurial gains one new command: histedit. Usage
10 is as follows, assuming the following history::
10 is as follows, assuming the following history::
11
11
12 @ 3[tip] 7c2fd3b9020c 2009-04-27 18:04 -0500 durin42
12 @ 3[tip] 7c2fd3b9020c 2009-04-27 18:04 -0500 durin42
13 | Add delta
13 | Add delta
14 |
14 |
15 o 2 030b686bedc4 2009-04-27 18:04 -0500 durin42
15 o 2 030b686bedc4 2009-04-27 18:04 -0500 durin42
16 | Add gamma
16 | Add gamma
17 |
17 |
18 o 1 c561b4e977df 2009-04-27 18:04 -0500 durin42
18 o 1 c561b4e977df 2009-04-27 18:04 -0500 durin42
19 | Add beta
19 | Add beta
20 |
20 |
21 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
21 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
22 Add alpha
22 Add alpha
23
23
24 If you were to run ``hg histedit c561b4e977df``, you would see the following
24 If you were to run ``hg histedit c561b4e977df``, you would see the following
25 file open in your editor::
25 file open in your editor::
26
26
27 pick c561b4e977df Add beta
27 pick c561b4e977df Add beta
28 pick 030b686bedc4 Add gamma
28 pick 030b686bedc4 Add gamma
29 pick 7c2fd3b9020c Add delta
29 pick 7c2fd3b9020c Add delta
30
30
31 # Edit history between c561b4e977df and 7c2fd3b9020c
31 # Edit history between c561b4e977df and 7c2fd3b9020c
32 #
32 #
33 # Commits are listed from least to most recent
33 # Commits are listed from least to most recent
34 #
34 #
35 # Commands:
35 # Commands:
36 # p, pick = use commit
36 # p, pick = use commit
37 # e, edit = use commit, but stop for amending
37 # e, edit = use commit, but stop for amending
38 # f, fold = use commit, but combine it with the one above
38 # f, fold = use commit, but combine it with the one above
39 # r, roll = like fold, but discard this commit's description and date
39 # r, roll = like fold, but discard this commit's description and date
40 # d, drop = remove commit from history
40 # d, drop = remove commit from history
41 # m, mess = edit commit message without changing commit content
41 # m, mess = edit commit message without changing commit content
42 # b, base = checkout changeset and apply further changesets from there
42 # b, base = checkout changeset and apply further changesets from there
43 #
43 #
44
44
45 In this file, lines beginning with ``#`` are ignored. You must specify a rule
45 In this file, lines beginning with ``#`` are ignored. You must specify a rule
46 for each revision in your history. For example, if you had meant to add gamma
46 for each revision in your history. For example, if you had meant to add gamma
47 before beta, and then wanted to add delta in the same revision as beta, you
47 before beta, and then wanted to add delta in the same revision as beta, you
48 would reorganize the file to look like this::
48 would reorganize the file to look like this::
49
49
50 pick 030b686bedc4 Add gamma
50 pick 030b686bedc4 Add gamma
51 pick c561b4e977df Add beta
51 pick c561b4e977df Add beta
52 fold 7c2fd3b9020c Add delta
52 fold 7c2fd3b9020c Add delta
53
53
54 # Edit history between c561b4e977df and 7c2fd3b9020c
54 # Edit history between c561b4e977df and 7c2fd3b9020c
55 #
55 #
56 # Commits are listed from least to most recent
56 # Commits are listed from least to most recent
57 #
57 #
58 # Commands:
58 # Commands:
59 # p, pick = use commit
59 # p, pick = use commit
60 # e, edit = use commit, but stop for amending
60 # e, edit = use commit, but stop for amending
61 # f, fold = use commit, but combine it with the one above
61 # f, fold = use commit, but combine it with the one above
62 # r, roll = like fold, but discard this commit's description and date
62 # r, roll = like fold, but discard this commit's description and date
63 # d, drop = remove commit from history
63 # d, drop = remove commit from history
64 # m, mess = edit commit message without changing commit content
64 # m, mess = edit commit message without changing commit content
65 # b, base = checkout changeset and apply further changesets from there
65 # b, base = checkout changeset and apply further changesets from there
66 #
66 #
67
67
68 At which point you close the editor and ``histedit`` starts working. When you
68 At which point you close the editor and ``histedit`` starts working. When you
69 specify a ``fold`` operation, ``histedit`` will open an editor when it folds
69 specify a ``fold`` operation, ``histedit`` will open an editor when it folds
70 those revisions together, offering you a chance to clean up the commit message::
70 those revisions together, offering you a chance to clean up the commit message::
71
71
72 Add beta
72 Add beta
73 ***
73 ***
74 Add delta
74 Add delta
75
75
76 Edit the commit message to your liking, then close the editor. The date used
76 Edit the commit message to your liking, then close the editor. The date used
77 for the commit will be the later of the two commits' dates. For this example,
77 for the commit will be the later of the two commits' dates. For this example,
78 let's assume that the commit message was changed to ``Add beta and delta.``
78 let's assume that the commit message was changed to ``Add beta and delta.``
79 After histedit has run and had a chance to remove any old or temporary
79 After histedit has run and had a chance to remove any old or temporary
80 revisions it needed, the history looks like this::
80 revisions it needed, the history looks like this::
81
81
82 @ 2[tip] 989b4d060121 2009-04-27 18:04 -0500 durin42
82 @ 2[tip] 989b4d060121 2009-04-27 18:04 -0500 durin42
83 | Add beta and delta.
83 | Add beta and delta.
84 |
84 |
85 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
85 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
86 | Add gamma
86 | Add gamma
87 |
87 |
88 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
88 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
89 Add alpha
89 Add alpha
90
90
91 Note that ``histedit`` does *not* remove any revisions (even its own temporary
91 Note that ``histedit`` does *not* remove any revisions (even its own temporary
92 ones) until after it has completed all the editing operations, so it will
92 ones) until after it has completed all the editing operations, so it will
93 probably perform several strip operations when it's done. For the above example,
93 probably perform several strip operations when it's done. For the above example,
94 it had to run strip twice. Strip can be slow depending on a variety of factors,
94 it had to run strip twice. Strip can be slow depending on a variety of factors,
95 so you might need to be a little patient. You can choose to keep the original
95 so you might need to be a little patient. You can choose to keep the original
96 revisions by passing the ``--keep`` flag.
96 revisions by passing the ``--keep`` flag.
97
97
98 The ``edit`` operation will drop you back to a command prompt,
98 The ``edit`` operation will drop you back to a command prompt,
99 allowing you to edit files freely, or even use ``hg record`` to commit
99 allowing you to edit files freely, or even use ``hg record`` to commit
100 some changes as a separate commit. When you're done, any remaining
100 some changes as a separate commit. When you're done, any remaining
101 uncommitted changes will be committed as well. When done, run ``hg
101 uncommitted changes will be committed as well. When done, run ``hg
102 histedit --continue`` to finish this step. If there are uncommitted
102 histedit --continue`` to finish this step. If there are uncommitted
103 changes, you'll be prompted for a new commit message, but the default
103 changes, you'll be prompted for a new commit message, but the default
104 commit message will be the original message for the ``edit`` ed
104 commit message will be the original message for the ``edit`` ed
105 revision, and the date of the original commit will be preserved.
105 revision, and the date of the original commit will be preserved.
106
106
107 The ``message`` operation will give you a chance to revise a commit
107 The ``message`` operation will give you a chance to revise a commit
108 message without changing the contents. It's a shortcut for doing
108 message without changing the contents. It's a shortcut for doing
109 ``edit`` immediately followed by `hg histedit --continue``.
109 ``edit`` immediately followed by `hg histedit --continue``.
110
110
111 If ``histedit`` encounters a conflict when moving a revision (while
111 If ``histedit`` encounters a conflict when moving a revision (while
112 handling ``pick`` or ``fold``), it'll stop in a similar manner to
112 handling ``pick`` or ``fold``), it'll stop in a similar manner to
113 ``edit`` with the difference that it won't prompt you for a commit
113 ``edit`` with the difference that it won't prompt you for a commit
114 message when done. If you decide at this point that you don't like how
114 message when done. If you decide at this point that you don't like how
115 much work it will be to rearrange history, or that you made a mistake,
115 much work it will be to rearrange history, or that you made a mistake,
116 you can use ``hg histedit --abort`` to abandon the new changes you
116 you can use ``hg histedit --abort`` to abandon the new changes you
117 have made and return to the state before you attempted to edit your
117 have made and return to the state before you attempted to edit your
118 history.
118 history.
119
119
120 If we clone the histedit-ed example repository above and add four more
120 If we clone the histedit-ed example repository above and add four more
121 changes, such that we have the following history::
121 changes, such that we have the following history::
122
122
123 @ 6[tip] 038383181893 2009-04-27 18:04 -0500 stefan
123 @ 6[tip] 038383181893 2009-04-27 18:04 -0500 stefan
124 | Add theta
124 | Add theta
125 |
125 |
126 o 5 140988835471 2009-04-27 18:04 -0500 stefan
126 o 5 140988835471 2009-04-27 18:04 -0500 stefan
127 | Add eta
127 | Add eta
128 |
128 |
129 o 4 122930637314 2009-04-27 18:04 -0500 stefan
129 o 4 122930637314 2009-04-27 18:04 -0500 stefan
130 | Add zeta
130 | Add zeta
131 |
131 |
132 o 3 836302820282 2009-04-27 18:04 -0500 stefan
132 o 3 836302820282 2009-04-27 18:04 -0500 stefan
133 | Add epsilon
133 | Add epsilon
134 |
134 |
135 o 2 989b4d060121 2009-04-27 18:04 -0500 durin42
135 o 2 989b4d060121 2009-04-27 18:04 -0500 durin42
136 | Add beta and delta.
136 | Add beta and delta.
137 |
137 |
138 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
138 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
139 | Add gamma
139 | Add gamma
140 |
140 |
141 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
141 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
142 Add alpha
142 Add alpha
143
143
144 If you run ``hg histedit --outgoing`` on the clone then it is the same
144 If you run ``hg histedit --outgoing`` on the clone then it is the same
145 as running ``hg histedit 836302820282``. If you need plan to push to a
145 as running ``hg histedit 836302820282``. If you need plan to push to a
146 repository that Mercurial does not detect to be related to the source
146 repository that Mercurial does not detect to be related to the source
147 repo, you can add a ``--force`` option.
147 repo, you can add a ``--force`` option.
148
148
149 Config
149 Config
150 ------
150 ------
151
151
152 Histedit rule lines are truncated to 80 characters by default. You
152 Histedit rule lines are truncated to 80 characters by default. You
153 can customize this behavior by setting a different length in your
153 can customize this behavior by setting a different length in your
154 configuration file::
154 configuration file::
155
155
156 [histedit]
156 [histedit]
157 linelen = 120 # truncate rule lines at 120 characters
157 linelen = 120 # truncate rule lines at 120 characters
158
158
159 ``hg histedit`` attempts to automatically choose an appropriate base
159 ``hg histedit`` attempts to automatically choose an appropriate base
160 revision to use. To change which base revision is used, define a
160 revision to use. To change which base revision is used, define a
161 revset in your configuration file::
161 revset in your configuration file::
162
162
163 [histedit]
163 [histedit]
164 defaultrev = only(.) & draft()
164 defaultrev = only(.) & draft()
165
165
166 By default each edited revision needs to be present in histedit commands.
166 By default each edited revision needs to be present in histedit commands.
167 To remove revision you need to use ``drop`` operation. You can configure
167 To remove revision you need to use ``drop`` operation. You can configure
168 the drop to be implicit for missing commits by adding::
168 the drop to be implicit for missing commits by adding::
169
169
170 [histedit]
170 [histedit]
171 dropmissing = True
171 dropmissing = True
172
172
173 By default, histedit will close the transaction after each action. For
173 By default, histedit will close the transaction after each action. For
174 performance purposes, you can configure histedit to use a single transaction
174 performance purposes, you can configure histedit to use a single transaction
175 across the entire histedit. WARNING: This setting introduces a significant risk
175 across the entire histedit. WARNING: This setting introduces a significant risk
176 of losing the work you've done in a histedit if the histedit aborts
176 of losing the work you've done in a histedit if the histedit aborts
177 unexpectedly::
177 unexpectedly::
178
178
179 [histedit]
179 [histedit]
180 singletransaction = True
180 singletransaction = True
181
181
182 """
182 """
183
183
184 from __future__ import absolute_import
184 from __future__ import absolute_import
185
185
186 import errno
186 import errno
187 import os
187 import os
188
188
189 from mercurial.i18n import _
189 from mercurial.i18n import _
190 from mercurial import (
190 from mercurial import (
191 bundle2,
191 bundle2,
192 cmdutil,
192 cmdutil,
193 context,
193 context,
194 copies,
194 copies,
195 destutil,
195 destutil,
196 discovery,
196 discovery,
197 error,
197 error,
198 exchange,
198 exchange,
199 extensions,
199 extensions,
200 hg,
200 hg,
201 lock,
201 lock,
202 merge as mergemod,
202 merge as mergemod,
203 mergeutil,
203 mergeutil,
204 node,
204 node,
205 obsolete,
205 obsolete,
206 pycompat,
206 pycompat,
207 registrar,
207 registrar,
208 repair,
208 repair,
209 scmutil,
209 scmutil,
210 util,
210 util,
211 )
211 )
212
212
213 pickle = util.pickle
213 pickle = util.pickle
214 release = lock.release
214 release = lock.release
215 cmdtable = {}
215 cmdtable = {}
216 command = registrar.command(cmdtable)
216 command = registrar.command(cmdtable)
217
217
218 configtable = {}
218 configtable = {}
219 configitem = registrar.configitem(configtable)
219 configitem = registrar.configitem(configtable)
220 configitem('experimental', 'histedit.autoverb',
220 configitem('experimental', 'histedit.autoverb',
221 default=False,
221 default=False,
222 )
222 )
223 configitem('histedit', 'defaultrev',
223 configitem('histedit', 'defaultrev',
224 default=configitem.dynamicdefault,
224 default=configitem.dynamicdefault,
225 )
225 )
226 configitem('histedit', 'dropmissing',
226 configitem('histedit', 'dropmissing',
227 default=False,
227 default=False,
228 )
228 )
229 configitem('histedit', 'linelen',
229 configitem('histedit', 'linelen',
230 default=80,
230 default=80,
231 )
231 )
232 configitem('histedit', 'singletransaction',
232 configitem('histedit', 'singletransaction',
233 default=False,
233 default=False,
234 )
234 )
235
235
236 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
236 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
237 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
237 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
238 # be specifying the version(s) of Mercurial they are tested with, or
238 # be specifying the version(s) of Mercurial they are tested with, or
239 # leave the attribute unspecified.
239 # leave the attribute unspecified.
240 testedwith = 'ships-with-hg-core'
240 testedwith = 'ships-with-hg-core'
241
241
242 actiontable = {}
242 actiontable = {}
243 primaryactions = set()
243 primaryactions = set()
244 secondaryactions = set()
244 secondaryactions = set()
245 tertiaryactions = set()
245 tertiaryactions = set()
246 internalactions = set()
246 internalactions = set()
247
247
248 def geteditcomment(ui, first, last):
248 def geteditcomment(ui, first, last):
249 """ construct the editor comment
249 """ construct the editor comment
250 The comment includes::
250 The comment includes::
251 - an intro
251 - an intro
252 - sorted primary commands
252 - sorted primary commands
253 - sorted short commands
253 - sorted short commands
254 - sorted long commands
254 - sorted long commands
255 - additional hints
255 - additional hints
256
256
257 Commands are only included once.
257 Commands are only included once.
258 """
258 """
259 intro = _("""Edit history between %s and %s
259 intro = _("""Edit history between %s and %s
260
260
261 Commits are listed from least to most recent
261 Commits are listed from least to most recent
262
262
263 You can reorder changesets by reordering the lines
263 You can reorder changesets by reordering the lines
264
264
265 Commands:
265 Commands:
266 """)
266 """)
267 actions = []
267 actions = []
268 def addverb(v):
268 def addverb(v):
269 a = actiontable[v]
269 a = actiontable[v]
270 lines = a.message.split("\n")
270 lines = a.message.split("\n")
271 if len(a.verbs):
271 if len(a.verbs):
272 v = ', '.join(sorted(a.verbs, key=lambda v: len(v)))
272 v = ', '.join(sorted(a.verbs, key=lambda v: len(v)))
273 actions.append(" %s = %s" % (v, lines[0]))
273 actions.append(" %s = %s" % (v, lines[0]))
274 actions.extend([' %s' for l in lines[1:]])
274 actions.extend([' %s' for l in lines[1:]])
275
275
276 for v in (
276 for v in (
277 sorted(primaryactions) +
277 sorted(primaryactions) +
278 sorted(secondaryactions) +
278 sorted(secondaryactions) +
279 sorted(tertiaryactions)
279 sorted(tertiaryactions)
280 ):
280 ):
281 addverb(v)
281 addverb(v)
282 actions.append('')
282 actions.append('')
283
283
284 hints = []
284 hints = []
285 if ui.configbool('histedit', 'dropmissing'):
285 if ui.configbool('histedit', 'dropmissing'):
286 hints.append("Deleting a changeset from the list "
286 hints.append("Deleting a changeset from the list "
287 "will DISCARD it from the edited history!")
287 "will DISCARD it from the edited history!")
288
288
289 lines = (intro % (first, last)).split('\n') + actions + hints
289 lines = (intro % (first, last)).split('\n') + actions + hints
290
290
291 return ''.join(['# %s\n' % l if l else '#\n' for l in lines])
291 return ''.join(['# %s\n' % l if l else '#\n' for l in lines])
292
292
293 class histeditstate(object):
293 class histeditstate(object):
294 def __init__(self, repo, parentctxnode=None, actions=None, keep=None,
294 def __init__(self, repo, parentctxnode=None, actions=None, keep=None,
295 topmost=None, replacements=None, lock=None, wlock=None):
295 topmost=None, replacements=None, lock=None, wlock=None):
296 self.repo = repo
296 self.repo = repo
297 self.actions = actions
297 self.actions = actions
298 self.keep = keep
298 self.keep = keep
299 self.topmost = topmost
299 self.topmost = topmost
300 self.parentctxnode = parentctxnode
300 self.parentctxnode = parentctxnode
301 self.lock = lock
301 self.lock = lock
302 self.wlock = wlock
302 self.wlock = wlock
303 self.backupfile = None
303 self.backupfile = None
304 if replacements is None:
304 if replacements is None:
305 self.replacements = []
305 self.replacements = []
306 else:
306 else:
307 self.replacements = replacements
307 self.replacements = replacements
308
308
309 def read(self):
309 def read(self):
310 """Load histedit state from disk and set fields appropriately."""
310 """Load histedit state from disk and set fields appropriately."""
311 try:
311 try:
312 state = self.repo.vfs.read('histedit-state')
312 state = self.repo.vfs.read('histedit-state')
313 except IOError as err:
313 except IOError as err:
314 if err.errno != errno.ENOENT:
314 if err.errno != errno.ENOENT:
315 raise
315 raise
316 cmdutil.wrongtooltocontinue(self.repo, _('histedit'))
316 cmdutil.wrongtooltocontinue(self.repo, _('histedit'))
317
317
318 if state.startswith('v1\n'):
318 if state.startswith('v1\n'):
319 data = self._load()
319 data = self._load()
320 parentctxnode, rules, keep, topmost, replacements, backupfile = data
320 parentctxnode, rules, keep, topmost, replacements, backupfile = data
321 else:
321 else:
322 data = pickle.loads(state)
322 data = pickle.loads(state)
323 parentctxnode, rules, keep, topmost, replacements = data
323 parentctxnode, rules, keep, topmost, replacements = data
324 backupfile = None
324 backupfile = None
325
325
326 self.parentctxnode = parentctxnode
326 self.parentctxnode = parentctxnode
327 rules = "\n".join(["%s %s" % (verb, rest) for [verb, rest] in rules])
327 rules = "\n".join(["%s %s" % (verb, rest) for [verb, rest] in rules])
328 actions = parserules(rules, self)
328 actions = parserules(rules, self)
329 self.actions = actions
329 self.actions = actions
330 self.keep = keep
330 self.keep = keep
331 self.topmost = topmost
331 self.topmost = topmost
332 self.replacements = replacements
332 self.replacements = replacements
333 self.backupfile = backupfile
333 self.backupfile = backupfile
334
334
335 def write(self, tr=None):
335 def write(self, tr=None):
336 if tr:
336 if tr:
337 tr.addfilegenerator('histedit-state', ('histedit-state',),
337 tr.addfilegenerator('histedit-state', ('histedit-state',),
338 self._write, location='plain')
338 self._write, location='plain')
339 else:
339 else:
340 with self.repo.vfs("histedit-state", "w") as f:
340 with self.repo.vfs("histedit-state", "w") as f:
341 self._write(f)
341 self._write(f)
342
342
343 def _write(self, fp):
343 def _write(self, fp):
344 fp.write('v1\n')
344 fp.write('v1\n')
345 fp.write('%s\n' % node.hex(self.parentctxnode))
345 fp.write('%s\n' % node.hex(self.parentctxnode))
346 fp.write('%s\n' % node.hex(self.topmost))
346 fp.write('%s\n' % node.hex(self.topmost))
347 fp.write('%s\n' % self.keep)
347 fp.write('%s\n' % self.keep)
348 fp.write('%d\n' % len(self.actions))
348 fp.write('%d\n' % len(self.actions))
349 for action in self.actions:
349 for action in self.actions:
350 fp.write('%s\n' % action.tostate())
350 fp.write('%s\n' % action.tostate())
351 fp.write('%d\n' % len(self.replacements))
351 fp.write('%d\n' % len(self.replacements))
352 for replacement in self.replacements:
352 for replacement in self.replacements:
353 fp.write('%s%s\n' % (node.hex(replacement[0]), ''.join(node.hex(r)
353 fp.write('%s%s\n' % (node.hex(replacement[0]), ''.join(node.hex(r)
354 for r in replacement[1])))
354 for r in replacement[1])))
355 backupfile = self.backupfile
355 backupfile = self.backupfile
356 if not backupfile:
356 if not backupfile:
357 backupfile = ''
357 backupfile = ''
358 fp.write('%s\n' % backupfile)
358 fp.write('%s\n' % backupfile)
359
359
360 def _load(self):
360 def _load(self):
361 fp = self.repo.vfs('histedit-state', 'r')
361 fp = self.repo.vfs('histedit-state', 'r')
362 lines = [l[:-1] for l in fp.readlines()]
362 lines = [l[:-1] for l in fp.readlines()]
363
363
364 index = 0
364 index = 0
365 lines[index] # version number
365 lines[index] # version number
366 index += 1
366 index += 1
367
367
368 parentctxnode = node.bin(lines[index])
368 parentctxnode = node.bin(lines[index])
369 index += 1
369 index += 1
370
370
371 topmost = node.bin(lines[index])
371 topmost = node.bin(lines[index])
372 index += 1
372 index += 1
373
373
374 keep = lines[index] == 'True'
374 keep = lines[index] == 'True'
375 index += 1
375 index += 1
376
376
377 # Rules
377 # Rules
378 rules = []
378 rules = []
379 rulelen = int(lines[index])
379 rulelen = int(lines[index])
380 index += 1
380 index += 1
381 for i in xrange(rulelen):
381 for i in xrange(rulelen):
382 ruleaction = lines[index]
382 ruleaction = lines[index]
383 index += 1
383 index += 1
384 rule = lines[index]
384 rule = lines[index]
385 index += 1
385 index += 1
386 rules.append((ruleaction, rule))
386 rules.append((ruleaction, rule))
387
387
388 # Replacements
388 # Replacements
389 replacements = []
389 replacements = []
390 replacementlen = int(lines[index])
390 replacementlen = int(lines[index])
391 index += 1
391 index += 1
392 for i in xrange(replacementlen):
392 for i in xrange(replacementlen):
393 replacement = lines[index]
393 replacement = lines[index]
394 original = node.bin(replacement[:40])
394 original = node.bin(replacement[:40])
395 succ = [node.bin(replacement[i:i + 40]) for i in
395 succ = [node.bin(replacement[i:i + 40]) for i in
396 range(40, len(replacement), 40)]
396 range(40, len(replacement), 40)]
397 replacements.append((original, succ))
397 replacements.append((original, succ))
398 index += 1
398 index += 1
399
399
400 backupfile = lines[index]
400 backupfile = lines[index]
401 index += 1
401 index += 1
402
402
403 fp.close()
403 fp.close()
404
404
405 return parentctxnode, rules, keep, topmost, replacements, backupfile
405 return parentctxnode, rules, keep, topmost, replacements, backupfile
406
406
407 def clear(self):
407 def clear(self):
408 if self.inprogress():
408 if self.inprogress():
409 self.repo.vfs.unlink('histedit-state')
409 self.repo.vfs.unlink('histedit-state')
410
410
411 def inprogress(self):
411 def inprogress(self):
412 return self.repo.vfs.exists('histedit-state')
412 return self.repo.vfs.exists('histedit-state')
413
413
414
414
415 class histeditaction(object):
415 class histeditaction(object):
416 def __init__(self, state, node):
416 def __init__(self, state, node):
417 self.state = state
417 self.state = state
418 self.repo = state.repo
418 self.repo = state.repo
419 self.node = node
419 self.node = node
420
420
421 @classmethod
421 @classmethod
422 def fromrule(cls, state, rule):
422 def fromrule(cls, state, rule):
423 """Parses the given rule, returning an instance of the histeditaction.
423 """Parses the given rule, returning an instance of the histeditaction.
424 """
424 """
425 rulehash = rule.strip().split(' ', 1)[0]
425 rulehash = rule.strip().split(' ', 1)[0]
426 try:
426 try:
427 rev = node.bin(rulehash)
427 rev = node.bin(rulehash)
428 except TypeError:
428 except TypeError:
429 raise error.ParseError("invalid changeset %s" % rulehash)
429 raise error.ParseError("invalid changeset %s" % rulehash)
430 return cls(state, rev)
430 return cls(state, rev)
431
431
432 def verify(self, prev, expected, seen):
432 def verify(self, prev, expected, seen):
433 """ Verifies semantic correctness of the rule"""
433 """ Verifies semantic correctness of the rule"""
434 repo = self.repo
434 repo = self.repo
435 ha = node.hex(self.node)
435 ha = node.hex(self.node)
436 try:
436 try:
437 self.node = repo[ha].node()
437 self.node = repo[ha].node()
438 except error.RepoError:
438 except error.RepoError:
439 raise error.ParseError(_('unknown changeset %s listed')
439 raise error.ParseError(_('unknown changeset %s listed')
440 % ha[:12])
440 % ha[:12])
441 if self.node is not None:
441 if self.node is not None:
442 self._verifynodeconstraints(prev, expected, seen)
442 self._verifynodeconstraints(prev, expected, seen)
443
443
444 def _verifynodeconstraints(self, prev, expected, seen):
444 def _verifynodeconstraints(self, prev, expected, seen):
445 # by default command need a node in the edited list
445 # by default command need a node in the edited list
446 if self.node not in expected:
446 if self.node not in expected:
447 raise error.ParseError(_('%s "%s" changeset was not a candidate')
447 raise error.ParseError(_('%s "%s" changeset was not a candidate')
448 % (self.verb, node.short(self.node)),
448 % (self.verb, node.short(self.node)),
449 hint=_('only use listed changesets'))
449 hint=_('only use listed changesets'))
450 # and only one command per node
450 # and only one command per node
451 if self.node in seen:
451 if self.node in seen:
452 raise error.ParseError(_('duplicated command for changeset %s') %
452 raise error.ParseError(_('duplicated command for changeset %s') %
453 node.short(self.node))
453 node.short(self.node))
454
454
455 def torule(self):
455 def torule(self):
456 """build a histedit rule line for an action
456 """build a histedit rule line for an action
457
457
458 by default lines are in the form:
458 by default lines are in the form:
459 <hash> <rev> <summary>
459 <hash> <rev> <summary>
460 """
460 """
461 ctx = self.repo[self.node]
461 ctx = self.repo[self.node]
462 summary = _getsummary(ctx)
462 summary = _getsummary(ctx)
463 line = '%s %s %d %s' % (self.verb, ctx, ctx.rev(), summary)
463 line = '%s %s %d %s' % (self.verb, ctx, ctx.rev(), summary)
464 # trim to 75 columns by default so it's not stupidly wide in my editor
464 # trim to 75 columns by default so it's not stupidly wide in my editor
465 # (the 5 more are left for verb)
465 # (the 5 more are left for verb)
466 maxlen = self.repo.ui.configint('histedit', 'linelen')
466 maxlen = self.repo.ui.configint('histedit', 'linelen')
467 maxlen = max(maxlen, 22) # avoid truncating hash
467 maxlen = max(maxlen, 22) # avoid truncating hash
468 return util.ellipsis(line, maxlen)
468 return util.ellipsis(line, maxlen)
469
469
470 def tostate(self):
470 def tostate(self):
471 """Print an action in format used by histedit state files
471 """Print an action in format used by histedit state files
472 (the first line is a verb, the remainder is the second)
472 (the first line is a verb, the remainder is the second)
473 """
473 """
474 return "%s\n%s" % (self.verb, node.hex(self.node))
474 return "%s\n%s" % (self.verb, node.hex(self.node))
475
475
476 def run(self):
476 def run(self):
477 """Runs the action. The default behavior is simply apply the action's
477 """Runs the action. The default behavior is simply apply the action's
478 rulectx onto the current parentctx."""
478 rulectx onto the current parentctx."""
479 self.applychange()
479 self.applychange()
480 self.continuedirty()
480 self.continuedirty()
481 return self.continueclean()
481 return self.continueclean()
482
482
483 def applychange(self):
483 def applychange(self):
484 """Applies the changes from this action's rulectx onto the current
484 """Applies the changes from this action's rulectx onto the current
485 parentctx, but does not commit them."""
485 parentctx, but does not commit them."""
486 repo = self.repo
486 repo = self.repo
487 rulectx = repo[self.node]
487 rulectx = repo[self.node]
488 repo.ui.pushbuffer(error=True, labeled=True)
488 repo.ui.pushbuffer(error=True, labeled=True)
489 hg.update(repo, self.state.parentctxnode, quietempty=True)
489 hg.update(repo, self.state.parentctxnode, quietempty=True)
490 stats = applychanges(repo.ui, repo, rulectx, {})
490 stats = applychanges(repo.ui, repo, rulectx, {})
491 if stats and stats[3] > 0:
491 if stats and stats[3] > 0:
492 buf = repo.ui.popbuffer()
492 buf = repo.ui.popbuffer()
493 repo.ui.write(*buf)
493 repo.ui.write(*buf)
494 raise error.InterventionRequired(
494 raise error.InterventionRequired(
495 _('Fix up the change (%s %s)') %
495 _('Fix up the change (%s %s)') %
496 (self.verb, node.short(self.node)),
496 (self.verb, node.short(self.node)),
497 hint=_('hg histedit --continue to resume'))
497 hint=_('hg histedit --continue to resume'))
498 else:
498 else:
499 repo.ui.popbuffer()
499 repo.ui.popbuffer()
500
500
501 def continuedirty(self):
501 def continuedirty(self):
502 """Continues the action when changes have been applied to the working
502 """Continues the action when changes have been applied to the working
503 copy. The default behavior is to commit the dirty changes."""
503 copy. The default behavior is to commit the dirty changes."""
504 repo = self.repo
504 repo = self.repo
505 rulectx = repo[self.node]
505 rulectx = repo[self.node]
506
506
507 editor = self.commiteditor()
507 editor = self.commiteditor()
508 commit = commitfuncfor(repo, rulectx)
508 commit = commitfuncfor(repo, rulectx)
509
509
510 commit(text=rulectx.description(), user=rulectx.user(),
510 commit(text=rulectx.description(), user=rulectx.user(),
511 date=rulectx.date(), extra=rulectx.extra(), editor=editor)
511 date=rulectx.date(), extra=rulectx.extra(), editor=editor)
512
512
513 def commiteditor(self):
513 def commiteditor(self):
514 """The editor to be used to edit the commit message."""
514 """The editor to be used to edit the commit message."""
515 return False
515 return False
516
516
517 def continueclean(self):
517 def continueclean(self):
518 """Continues the action when the working copy is clean. The default
518 """Continues the action when the working copy is clean. The default
519 behavior is to accept the current commit as the new version of the
519 behavior is to accept the current commit as the new version of the
520 rulectx."""
520 rulectx."""
521 ctx = self.repo['.']
521 ctx = self.repo['.']
522 if ctx.node() == self.state.parentctxnode:
522 if ctx.node() == self.state.parentctxnode:
523 self.repo.ui.warn(_('%s: skipping changeset (no changes)\n') %
523 self.repo.ui.warn(_('%s: skipping changeset (no changes)\n') %
524 node.short(self.node))
524 node.short(self.node))
525 return ctx, [(self.node, tuple())]
525 return ctx, [(self.node, tuple())]
526 if ctx.node() == self.node:
526 if ctx.node() == self.node:
527 # Nothing changed
527 # Nothing changed
528 return ctx, []
528 return ctx, []
529 return ctx, [(self.node, (ctx.node(),))]
529 return ctx, [(self.node, (ctx.node(),))]
530
530
531 def commitfuncfor(repo, src):
531 def commitfuncfor(repo, src):
532 """Build a commit function for the replacement of <src>
532 """Build a commit function for the replacement of <src>
533
533
534 This function ensure we apply the same treatment to all changesets.
534 This function ensure we apply the same treatment to all changesets.
535
535
536 - Add a 'histedit_source' entry in extra.
536 - Add a 'histedit_source' entry in extra.
537
537
538 Note that fold has its own separated logic because its handling is a bit
538 Note that fold has its own separated logic because its handling is a bit
539 different and not easily factored out of the fold method.
539 different and not easily factored out of the fold method.
540 """
540 """
541 phasemin = src.phase()
541 phasemin = src.phase()
542 def commitfunc(**kwargs):
542 def commitfunc(**kwargs):
543 overrides = {('phases', 'new-commit'): phasemin}
543 overrides = {('phases', 'new-commit'): phasemin}
544 with repo.ui.configoverride(overrides, 'histedit'):
544 with repo.ui.configoverride(overrides, 'histedit'):
545 extra = kwargs.get(r'extra', {}).copy()
545 extra = kwargs.get(r'extra', {}).copy()
546 extra['histedit_source'] = src.hex()
546 extra['histedit_source'] = src.hex()
547 kwargs[r'extra'] = extra
547 kwargs[r'extra'] = extra
548 return repo.commit(**kwargs)
548 return repo.commit(**kwargs)
549 return commitfunc
549 return commitfunc
550
550
551 def applychanges(ui, repo, ctx, opts):
551 def applychanges(ui, repo, ctx, opts):
552 """Merge changeset from ctx (only) in the current working directory"""
552 """Merge changeset from ctx (only) in the current working directory"""
553 wcpar = repo.dirstate.parents()[0]
553 wcpar = repo.dirstate.parents()[0]
554 if ctx.p1().node() == wcpar:
554 if ctx.p1().node() == wcpar:
555 # edits are "in place" we do not need to make any merge,
555 # edits are "in place" we do not need to make any merge,
556 # just applies changes on parent for editing
556 # just applies changes on parent for editing
557 cmdutil.revert(ui, repo, ctx, (wcpar, node.nullid), all=True)
557 cmdutil.revert(ui, repo, ctx, (wcpar, node.nullid), all=True)
558 stats = None
558 stats = None
559 else:
559 else:
560 try:
560 try:
561 # ui.forcemerge is an internal variable, do not document
561 # ui.forcemerge is an internal variable, do not document
562 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
562 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
563 'histedit')
563 'histedit')
564 stats = mergemod.graft(repo, ctx, ctx.p1(), ['local', 'histedit'])
564 stats = mergemod.graft(repo, ctx, ctx.p1(), ['local', 'histedit'])
565 finally:
565 finally:
566 repo.ui.setconfig('ui', 'forcemerge', '', 'histedit')
566 repo.ui.setconfig('ui', 'forcemerge', '', 'histedit')
567 return stats
567 return stats
568
568
569 def collapse(repo, first, last, commitopts, skipprompt=False):
569 def collapse(repo, first, last, commitopts, skipprompt=False):
570 """collapse the set of revisions from first to last as new one.
570 """collapse the set of revisions from first to last as new one.
571
571
572 Expected commit options are:
572 Expected commit options are:
573 - message
573 - message
574 - date
574 - date
575 - username
575 - username
576 Commit message is edited in all cases.
576 Commit message is edited in all cases.
577
577
578 This function works in memory."""
578 This function works in memory."""
579 ctxs = list(repo.set('%d::%d', first, last))
579 ctxs = list(repo.set('%d::%d', first, last))
580 if not ctxs:
580 if not ctxs:
581 return None
581 return None
582 for c in ctxs:
582 for c in ctxs:
583 if not c.mutable():
583 if not c.mutable():
584 raise error.ParseError(
584 raise error.ParseError(
585 _("cannot fold into public change %s") % node.short(c.node()))
585 _("cannot fold into public change %s") % node.short(c.node()))
586 base = first.parents()[0]
586 base = first.parents()[0]
587
587
588 # commit a new version of the old changeset, including the update
588 # commit a new version of the old changeset, including the update
589 # collect all files which might be affected
589 # collect all files which might be affected
590 files = set()
590 files = set()
591 for ctx in ctxs:
591 for ctx in ctxs:
592 files.update(ctx.files())
592 files.update(ctx.files())
593
593
594 # Recompute copies (avoid recording a -> b -> a)
594 # Recompute copies (avoid recording a -> b -> a)
595 copied = copies.pathcopies(base, last)
595 copied = copies.pathcopies(base, last)
596
596
597 # prune files which were reverted by the updates
597 # prune files which were reverted by the updates
598 files = [f for f in files if not cmdutil.samefile(f, last, base)]
598 files = [f for f in files if not cmdutil.samefile(f, last, base)]
599 # commit version of these files as defined by head
599 # commit version of these files as defined by head
600 headmf = last.manifest()
600 headmf = last.manifest()
601 def filectxfn(repo, ctx, path):
601 def filectxfn(repo, ctx, path):
602 if path in headmf:
602 if path in headmf:
603 fctx = last[path]
603 fctx = last[path]
604 flags = fctx.flags()
604 flags = fctx.flags()
605 mctx = context.memfilectx(repo,
605 mctx = context.memfilectx(repo, ctx,
606 fctx.path(), fctx.data(),
606 fctx.path(), fctx.data(),
607 islink='l' in flags,
607 islink='l' in flags,
608 isexec='x' in flags,
608 isexec='x' in flags,
609 copied=copied.get(path))
609 copied=copied.get(path))
610 return mctx
610 return mctx
611 return None
611 return None
612
612
613 if commitopts.get('message'):
613 if commitopts.get('message'):
614 message = commitopts['message']
614 message = commitopts['message']
615 else:
615 else:
616 message = first.description()
616 message = first.description()
617 user = commitopts.get('user')
617 user = commitopts.get('user')
618 date = commitopts.get('date')
618 date = commitopts.get('date')
619 extra = commitopts.get('extra')
619 extra = commitopts.get('extra')
620
620
621 parents = (first.p1().node(), first.p2().node())
621 parents = (first.p1().node(), first.p2().node())
622 editor = None
622 editor = None
623 if not skipprompt:
623 if not skipprompt:
624 editor = cmdutil.getcommiteditor(edit=True, editform='histedit.fold')
624 editor = cmdutil.getcommiteditor(edit=True, editform='histedit.fold')
625 new = context.memctx(repo,
625 new = context.memctx(repo,
626 parents=parents,
626 parents=parents,
627 text=message,
627 text=message,
628 files=files,
628 files=files,
629 filectxfn=filectxfn,
629 filectxfn=filectxfn,
630 user=user,
630 user=user,
631 date=date,
631 date=date,
632 extra=extra,
632 extra=extra,
633 editor=editor)
633 editor=editor)
634 return repo.commitctx(new)
634 return repo.commitctx(new)
635
635
636 def _isdirtywc(repo):
636 def _isdirtywc(repo):
637 return repo[None].dirty(missing=True)
637 return repo[None].dirty(missing=True)
638
638
639 def abortdirty():
639 def abortdirty():
640 raise error.Abort(_('working copy has pending changes'),
640 raise error.Abort(_('working copy has pending changes'),
641 hint=_('amend, commit, or revert them and run histedit '
641 hint=_('amend, commit, or revert them and run histedit '
642 '--continue, or abort with histedit --abort'))
642 '--continue, or abort with histedit --abort'))
643
643
644 def action(verbs, message, priority=False, internal=False):
644 def action(verbs, message, priority=False, internal=False):
645 def wrap(cls):
645 def wrap(cls):
646 assert not priority or not internal
646 assert not priority or not internal
647 verb = verbs[0]
647 verb = verbs[0]
648 if priority:
648 if priority:
649 primaryactions.add(verb)
649 primaryactions.add(verb)
650 elif internal:
650 elif internal:
651 internalactions.add(verb)
651 internalactions.add(verb)
652 elif len(verbs) > 1:
652 elif len(verbs) > 1:
653 secondaryactions.add(verb)
653 secondaryactions.add(verb)
654 else:
654 else:
655 tertiaryactions.add(verb)
655 tertiaryactions.add(verb)
656
656
657 cls.verb = verb
657 cls.verb = verb
658 cls.verbs = verbs
658 cls.verbs = verbs
659 cls.message = message
659 cls.message = message
660 for verb in verbs:
660 for verb in verbs:
661 actiontable[verb] = cls
661 actiontable[verb] = cls
662 return cls
662 return cls
663 return wrap
663 return wrap
664
664
665 @action(['pick', 'p'],
665 @action(['pick', 'p'],
666 _('use commit'),
666 _('use commit'),
667 priority=True)
667 priority=True)
668 class pick(histeditaction):
668 class pick(histeditaction):
669 def run(self):
669 def run(self):
670 rulectx = self.repo[self.node]
670 rulectx = self.repo[self.node]
671 if rulectx.parents()[0].node() == self.state.parentctxnode:
671 if rulectx.parents()[0].node() == self.state.parentctxnode:
672 self.repo.ui.debug('node %s unchanged\n' % node.short(self.node))
672 self.repo.ui.debug('node %s unchanged\n' % node.short(self.node))
673 return rulectx, []
673 return rulectx, []
674
674
675 return super(pick, self).run()
675 return super(pick, self).run()
676
676
677 @action(['edit', 'e'],
677 @action(['edit', 'e'],
678 _('use commit, but stop for amending'),
678 _('use commit, but stop for amending'),
679 priority=True)
679 priority=True)
680 class edit(histeditaction):
680 class edit(histeditaction):
681 def run(self):
681 def run(self):
682 repo = self.repo
682 repo = self.repo
683 rulectx = repo[self.node]
683 rulectx = repo[self.node]
684 hg.update(repo, self.state.parentctxnode, quietempty=True)
684 hg.update(repo, self.state.parentctxnode, quietempty=True)
685 applychanges(repo.ui, repo, rulectx, {})
685 applychanges(repo.ui, repo, rulectx, {})
686 raise error.InterventionRequired(
686 raise error.InterventionRequired(
687 _('Editing (%s), you may commit or record as needed now.')
687 _('Editing (%s), you may commit or record as needed now.')
688 % node.short(self.node),
688 % node.short(self.node),
689 hint=_('hg histedit --continue to resume'))
689 hint=_('hg histedit --continue to resume'))
690
690
691 def commiteditor(self):
691 def commiteditor(self):
692 return cmdutil.getcommiteditor(edit=True, editform='histedit.edit')
692 return cmdutil.getcommiteditor(edit=True, editform='histedit.edit')
693
693
694 @action(['fold', 'f'],
694 @action(['fold', 'f'],
695 _('use commit, but combine it with the one above'))
695 _('use commit, but combine it with the one above'))
696 class fold(histeditaction):
696 class fold(histeditaction):
697 def verify(self, prev, expected, seen):
697 def verify(self, prev, expected, seen):
698 """ Verifies semantic correctness of the fold rule"""
698 """ Verifies semantic correctness of the fold rule"""
699 super(fold, self).verify(prev, expected, seen)
699 super(fold, self).verify(prev, expected, seen)
700 repo = self.repo
700 repo = self.repo
701 if not prev:
701 if not prev:
702 c = repo[self.node].parents()[0]
702 c = repo[self.node].parents()[0]
703 elif not prev.verb in ('pick', 'base'):
703 elif not prev.verb in ('pick', 'base'):
704 return
704 return
705 else:
705 else:
706 c = repo[prev.node]
706 c = repo[prev.node]
707 if not c.mutable():
707 if not c.mutable():
708 raise error.ParseError(
708 raise error.ParseError(
709 _("cannot fold into public change %s") % node.short(c.node()))
709 _("cannot fold into public change %s") % node.short(c.node()))
710
710
711
711
712 def continuedirty(self):
712 def continuedirty(self):
713 repo = self.repo
713 repo = self.repo
714 rulectx = repo[self.node]
714 rulectx = repo[self.node]
715
715
716 commit = commitfuncfor(repo, rulectx)
716 commit = commitfuncfor(repo, rulectx)
717 commit(text='fold-temp-revision %s' % node.short(self.node),
717 commit(text='fold-temp-revision %s' % node.short(self.node),
718 user=rulectx.user(), date=rulectx.date(),
718 user=rulectx.user(), date=rulectx.date(),
719 extra=rulectx.extra())
719 extra=rulectx.extra())
720
720
721 def continueclean(self):
721 def continueclean(self):
722 repo = self.repo
722 repo = self.repo
723 ctx = repo['.']
723 ctx = repo['.']
724 rulectx = repo[self.node]
724 rulectx = repo[self.node]
725 parentctxnode = self.state.parentctxnode
725 parentctxnode = self.state.parentctxnode
726 if ctx.node() == parentctxnode:
726 if ctx.node() == parentctxnode:
727 repo.ui.warn(_('%s: empty changeset\n') %
727 repo.ui.warn(_('%s: empty changeset\n') %
728 node.short(self.node))
728 node.short(self.node))
729 return ctx, [(self.node, (parentctxnode,))]
729 return ctx, [(self.node, (parentctxnode,))]
730
730
731 parentctx = repo[parentctxnode]
731 parentctx = repo[parentctxnode]
732 newcommits = set(c.node() for c in repo.set('(%d::. - %d)', parentctx,
732 newcommits = set(c.node() for c in repo.set('(%d::. - %d)', parentctx,
733 parentctx))
733 parentctx))
734 if not newcommits:
734 if not newcommits:
735 repo.ui.warn(_('%s: cannot fold - working copy is not a '
735 repo.ui.warn(_('%s: cannot fold - working copy is not a '
736 'descendant of previous commit %s\n') %
736 'descendant of previous commit %s\n') %
737 (node.short(self.node), node.short(parentctxnode)))
737 (node.short(self.node), node.short(parentctxnode)))
738 return ctx, [(self.node, (ctx.node(),))]
738 return ctx, [(self.node, (ctx.node(),))]
739
739
740 middlecommits = newcommits.copy()
740 middlecommits = newcommits.copy()
741 middlecommits.discard(ctx.node())
741 middlecommits.discard(ctx.node())
742
742
743 return self.finishfold(repo.ui, repo, parentctx, rulectx, ctx.node(),
743 return self.finishfold(repo.ui, repo, parentctx, rulectx, ctx.node(),
744 middlecommits)
744 middlecommits)
745
745
746 def skipprompt(self):
746 def skipprompt(self):
747 """Returns true if the rule should skip the message editor.
747 """Returns true if the rule should skip the message editor.
748
748
749 For example, 'fold' wants to show an editor, but 'rollup'
749 For example, 'fold' wants to show an editor, but 'rollup'
750 doesn't want to.
750 doesn't want to.
751 """
751 """
752 return False
752 return False
753
753
754 def mergedescs(self):
754 def mergedescs(self):
755 """Returns true if the rule should merge messages of multiple changes.
755 """Returns true if the rule should merge messages of multiple changes.
756
756
757 This exists mainly so that 'rollup' rules can be a subclass of
757 This exists mainly so that 'rollup' rules can be a subclass of
758 'fold'.
758 'fold'.
759 """
759 """
760 return True
760 return True
761
761
762 def firstdate(self):
762 def firstdate(self):
763 """Returns true if the rule should preserve the date of the first
763 """Returns true if the rule should preserve the date of the first
764 change.
764 change.
765
765
766 This exists mainly so that 'rollup' rules can be a subclass of
766 This exists mainly so that 'rollup' rules can be a subclass of
767 'fold'.
767 'fold'.
768 """
768 """
769 return False
769 return False
770
770
771 def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges):
771 def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges):
772 parent = ctx.parents()[0].node()
772 parent = ctx.parents()[0].node()
773 repo.ui.pushbuffer()
773 repo.ui.pushbuffer()
774 hg.update(repo, parent)
774 hg.update(repo, parent)
775 repo.ui.popbuffer()
775 repo.ui.popbuffer()
776 ### prepare new commit data
776 ### prepare new commit data
777 commitopts = {}
777 commitopts = {}
778 commitopts['user'] = ctx.user()
778 commitopts['user'] = ctx.user()
779 # commit message
779 # commit message
780 if not self.mergedescs():
780 if not self.mergedescs():
781 newmessage = ctx.description()
781 newmessage = ctx.description()
782 else:
782 else:
783 newmessage = '\n***\n'.join(
783 newmessage = '\n***\n'.join(
784 [ctx.description()] +
784 [ctx.description()] +
785 [repo[r].description() for r in internalchanges] +
785 [repo[r].description() for r in internalchanges] +
786 [oldctx.description()]) + '\n'
786 [oldctx.description()]) + '\n'
787 commitopts['message'] = newmessage
787 commitopts['message'] = newmessage
788 # date
788 # date
789 if self.firstdate():
789 if self.firstdate():
790 commitopts['date'] = ctx.date()
790 commitopts['date'] = ctx.date()
791 else:
791 else:
792 commitopts['date'] = max(ctx.date(), oldctx.date())
792 commitopts['date'] = max(ctx.date(), oldctx.date())
793 extra = ctx.extra().copy()
793 extra = ctx.extra().copy()
794 # histedit_source
794 # histedit_source
795 # note: ctx is likely a temporary commit but that the best we can do
795 # note: ctx is likely a temporary commit but that the best we can do
796 # here. This is sufficient to solve issue3681 anyway.
796 # here. This is sufficient to solve issue3681 anyway.
797 extra['histedit_source'] = '%s,%s' % (ctx.hex(), oldctx.hex())
797 extra['histedit_source'] = '%s,%s' % (ctx.hex(), oldctx.hex())
798 commitopts['extra'] = extra
798 commitopts['extra'] = extra
799 phasemin = max(ctx.phase(), oldctx.phase())
799 phasemin = max(ctx.phase(), oldctx.phase())
800 overrides = {('phases', 'new-commit'): phasemin}
800 overrides = {('phases', 'new-commit'): phasemin}
801 with repo.ui.configoverride(overrides, 'histedit'):
801 with repo.ui.configoverride(overrides, 'histedit'):
802 n = collapse(repo, ctx, repo[newnode], commitopts,
802 n = collapse(repo, ctx, repo[newnode], commitopts,
803 skipprompt=self.skipprompt())
803 skipprompt=self.skipprompt())
804 if n is None:
804 if n is None:
805 return ctx, []
805 return ctx, []
806 repo.ui.pushbuffer()
806 repo.ui.pushbuffer()
807 hg.update(repo, n)
807 hg.update(repo, n)
808 repo.ui.popbuffer()
808 repo.ui.popbuffer()
809 replacements = [(oldctx.node(), (newnode,)),
809 replacements = [(oldctx.node(), (newnode,)),
810 (ctx.node(), (n,)),
810 (ctx.node(), (n,)),
811 (newnode, (n,)),
811 (newnode, (n,)),
812 ]
812 ]
813 for ich in internalchanges:
813 for ich in internalchanges:
814 replacements.append((ich, (n,)))
814 replacements.append((ich, (n,)))
815 return repo[n], replacements
815 return repo[n], replacements
816
816
817 @action(['base', 'b'],
817 @action(['base', 'b'],
818 _('checkout changeset and apply further changesets from there'))
818 _('checkout changeset and apply further changesets from there'))
819 class base(histeditaction):
819 class base(histeditaction):
820
820
821 def run(self):
821 def run(self):
822 if self.repo['.'].node() != self.node:
822 if self.repo['.'].node() != self.node:
823 mergemod.update(self.repo, self.node, False, True)
823 mergemod.update(self.repo, self.node, False, True)
824 # branchmerge, force)
824 # branchmerge, force)
825 return self.continueclean()
825 return self.continueclean()
826
826
827 def continuedirty(self):
827 def continuedirty(self):
828 abortdirty()
828 abortdirty()
829
829
830 def continueclean(self):
830 def continueclean(self):
831 basectx = self.repo['.']
831 basectx = self.repo['.']
832 return basectx, []
832 return basectx, []
833
833
834 def _verifynodeconstraints(self, prev, expected, seen):
834 def _verifynodeconstraints(self, prev, expected, seen):
835 # base can only be use with a node not in the edited set
835 # base can only be use with a node not in the edited set
836 if self.node in expected:
836 if self.node in expected:
837 msg = _('%s "%s" changeset was an edited list candidate')
837 msg = _('%s "%s" changeset was an edited list candidate')
838 raise error.ParseError(
838 raise error.ParseError(
839 msg % (self.verb, node.short(self.node)),
839 msg % (self.verb, node.short(self.node)),
840 hint=_('base must only use unlisted changesets'))
840 hint=_('base must only use unlisted changesets'))
841
841
842 @action(['_multifold'],
842 @action(['_multifold'],
843 _(
843 _(
844 """fold subclass used for when multiple folds happen in a row
844 """fold subclass used for when multiple folds happen in a row
845
845
846 We only want to fire the editor for the folded message once when
846 We only want to fire the editor for the folded message once when
847 (say) four changes are folded down into a single change. This is
847 (say) four changes are folded down into a single change. This is
848 similar to rollup, but we should preserve both messages so that
848 similar to rollup, but we should preserve both messages so that
849 when the last fold operation runs we can show the user all the
849 when the last fold operation runs we can show the user all the
850 commit messages in their editor.
850 commit messages in their editor.
851 """),
851 """),
852 internal=True)
852 internal=True)
853 class _multifold(fold):
853 class _multifold(fold):
854 def skipprompt(self):
854 def skipprompt(self):
855 return True
855 return True
856
856
857 @action(["roll", "r"],
857 @action(["roll", "r"],
858 _("like fold, but discard this commit's description and date"))
858 _("like fold, but discard this commit's description and date"))
859 class rollup(fold):
859 class rollup(fold):
860 def mergedescs(self):
860 def mergedescs(self):
861 return False
861 return False
862
862
863 def skipprompt(self):
863 def skipprompt(self):
864 return True
864 return True
865
865
866 def firstdate(self):
866 def firstdate(self):
867 return True
867 return True
868
868
869 @action(["drop", "d"],
869 @action(["drop", "d"],
870 _('remove commit from history'))
870 _('remove commit from history'))
871 class drop(histeditaction):
871 class drop(histeditaction):
872 def run(self):
872 def run(self):
873 parentctx = self.repo[self.state.parentctxnode]
873 parentctx = self.repo[self.state.parentctxnode]
874 return parentctx, [(self.node, tuple())]
874 return parentctx, [(self.node, tuple())]
875
875
876 @action(["mess", "m"],
876 @action(["mess", "m"],
877 _('edit commit message without changing commit content'),
877 _('edit commit message without changing commit content'),
878 priority=True)
878 priority=True)
879 class message(histeditaction):
879 class message(histeditaction):
880 def commiteditor(self):
880 def commiteditor(self):
881 return cmdutil.getcommiteditor(edit=True, editform='histedit.mess')
881 return cmdutil.getcommiteditor(edit=True, editform='histedit.mess')
882
882
883 def findoutgoing(ui, repo, remote=None, force=False, opts=None):
883 def findoutgoing(ui, repo, remote=None, force=False, opts=None):
884 """utility function to find the first outgoing changeset
884 """utility function to find the first outgoing changeset
885
885
886 Used by initialization code"""
886 Used by initialization code"""
887 if opts is None:
887 if opts is None:
888 opts = {}
888 opts = {}
889 dest = ui.expandpath(remote or 'default-push', remote or 'default')
889 dest = ui.expandpath(remote or 'default-push', remote or 'default')
890 dest, revs = hg.parseurl(dest, None)[:2]
890 dest, revs = hg.parseurl(dest, None)[:2]
891 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
891 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
892
892
893 revs, checkout = hg.addbranchrevs(repo, repo, revs, None)
893 revs, checkout = hg.addbranchrevs(repo, repo, revs, None)
894 other = hg.peer(repo, opts, dest)
894 other = hg.peer(repo, opts, dest)
895
895
896 if revs:
896 if revs:
897 revs = [repo.lookup(rev) for rev in revs]
897 revs = [repo.lookup(rev) for rev in revs]
898
898
899 outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force)
899 outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force)
900 if not outgoing.missing:
900 if not outgoing.missing:
901 raise error.Abort(_('no outgoing ancestors'))
901 raise error.Abort(_('no outgoing ancestors'))
902 roots = list(repo.revs("roots(%ln)", outgoing.missing))
902 roots = list(repo.revs("roots(%ln)", outgoing.missing))
903 if 1 < len(roots):
903 if 1 < len(roots):
904 msg = _('there are ambiguous outgoing revisions')
904 msg = _('there are ambiguous outgoing revisions')
905 hint = _("see 'hg help histedit' for more detail")
905 hint = _("see 'hg help histedit' for more detail")
906 raise error.Abort(msg, hint=hint)
906 raise error.Abort(msg, hint=hint)
907 return repo.lookup(roots[0])
907 return repo.lookup(roots[0])
908
908
909 @command('histedit',
909 @command('histedit',
910 [('', 'commands', '',
910 [('', 'commands', '',
911 _('read history edits from the specified file'), _('FILE')),
911 _('read history edits from the specified file'), _('FILE')),
912 ('c', 'continue', False, _('continue an edit already in progress')),
912 ('c', 'continue', False, _('continue an edit already in progress')),
913 ('', 'edit-plan', False, _('edit remaining actions list')),
913 ('', 'edit-plan', False, _('edit remaining actions list')),
914 ('k', 'keep', False,
914 ('k', 'keep', False,
915 _("don't strip old nodes after edit is complete")),
915 _("don't strip old nodes after edit is complete")),
916 ('', 'abort', False, _('abort an edit in progress')),
916 ('', 'abort', False, _('abort an edit in progress')),
917 ('o', 'outgoing', False, _('changesets not found in destination')),
917 ('o', 'outgoing', False, _('changesets not found in destination')),
918 ('f', 'force', False,
918 ('f', 'force', False,
919 _('force outgoing even for unrelated repositories')),
919 _('force outgoing even for unrelated repositories')),
920 ('r', 'rev', [], _('first revision to be edited'), _('REV'))] +
920 ('r', 'rev', [], _('first revision to be edited'), _('REV'))] +
921 cmdutil.formatteropts,
921 cmdutil.formatteropts,
922 _("[OPTIONS] ([ANCESTOR] | --outgoing [URL])"))
922 _("[OPTIONS] ([ANCESTOR] | --outgoing [URL])"))
923 def histedit(ui, repo, *freeargs, **opts):
923 def histedit(ui, repo, *freeargs, **opts):
924 """interactively edit changeset history
924 """interactively edit changeset history
925
925
926 This command lets you edit a linear series of changesets (up to
926 This command lets you edit a linear series of changesets (up to
927 and including the working directory, which should be clean).
927 and including the working directory, which should be clean).
928 You can:
928 You can:
929
929
930 - `pick` to [re]order a changeset
930 - `pick` to [re]order a changeset
931
931
932 - `drop` to omit changeset
932 - `drop` to omit changeset
933
933
934 - `mess` to reword the changeset commit message
934 - `mess` to reword the changeset commit message
935
935
936 - `fold` to combine it with the preceding changeset (using the later date)
936 - `fold` to combine it with the preceding changeset (using the later date)
937
937
938 - `roll` like fold, but discarding this commit's description and date
938 - `roll` like fold, but discarding this commit's description and date
939
939
940 - `edit` to edit this changeset (preserving date)
940 - `edit` to edit this changeset (preserving date)
941
941
942 - `base` to checkout changeset and apply further changesets from there
942 - `base` to checkout changeset and apply further changesets from there
943
943
944 There are a number of ways to select the root changeset:
944 There are a number of ways to select the root changeset:
945
945
946 - Specify ANCESTOR directly
946 - Specify ANCESTOR directly
947
947
948 - Use --outgoing -- it will be the first linear changeset not
948 - Use --outgoing -- it will be the first linear changeset not
949 included in destination. (See :hg:`help config.paths.default-push`)
949 included in destination. (See :hg:`help config.paths.default-push`)
950
950
951 - Otherwise, the value from the "histedit.defaultrev" config option
951 - Otherwise, the value from the "histedit.defaultrev" config option
952 is used as a revset to select the base revision when ANCESTOR is not
952 is used as a revset to select the base revision when ANCESTOR is not
953 specified. The first revision returned by the revset is used. By
953 specified. The first revision returned by the revset is used. By
954 default, this selects the editable history that is unique to the
954 default, this selects the editable history that is unique to the
955 ancestry of the working directory.
955 ancestry of the working directory.
956
956
957 .. container:: verbose
957 .. container:: verbose
958
958
959 If you use --outgoing, this command will abort if there are ambiguous
959 If you use --outgoing, this command will abort if there are ambiguous
960 outgoing revisions. For example, if there are multiple branches
960 outgoing revisions. For example, if there are multiple branches
961 containing outgoing revisions.
961 containing outgoing revisions.
962
962
963 Use "min(outgoing() and ::.)" or similar revset specification
963 Use "min(outgoing() and ::.)" or similar revset specification
964 instead of --outgoing to specify edit target revision exactly in
964 instead of --outgoing to specify edit target revision exactly in
965 such ambiguous situation. See :hg:`help revsets` for detail about
965 such ambiguous situation. See :hg:`help revsets` for detail about
966 selecting revisions.
966 selecting revisions.
967
967
968 .. container:: verbose
968 .. container:: verbose
969
969
970 Examples:
970 Examples:
971
971
972 - A number of changes have been made.
972 - A number of changes have been made.
973 Revision 3 is no longer needed.
973 Revision 3 is no longer needed.
974
974
975 Start history editing from revision 3::
975 Start history editing from revision 3::
976
976
977 hg histedit -r 3
977 hg histedit -r 3
978
978
979 An editor opens, containing the list of revisions,
979 An editor opens, containing the list of revisions,
980 with specific actions specified::
980 with specific actions specified::
981
981
982 pick 5339bf82f0ca 3 Zworgle the foobar
982 pick 5339bf82f0ca 3 Zworgle the foobar
983 pick 8ef592ce7cc4 4 Bedazzle the zerlog
983 pick 8ef592ce7cc4 4 Bedazzle the zerlog
984 pick 0a9639fcda9d 5 Morgify the cromulancy
984 pick 0a9639fcda9d 5 Morgify the cromulancy
985
985
986 Additional information about the possible actions
986 Additional information about the possible actions
987 to take appears below the list of revisions.
987 to take appears below the list of revisions.
988
988
989 To remove revision 3 from the history,
989 To remove revision 3 from the history,
990 its action (at the beginning of the relevant line)
990 its action (at the beginning of the relevant line)
991 is changed to 'drop'::
991 is changed to 'drop'::
992
992
993 drop 5339bf82f0ca 3 Zworgle the foobar
993 drop 5339bf82f0ca 3 Zworgle the foobar
994 pick 8ef592ce7cc4 4 Bedazzle the zerlog
994 pick 8ef592ce7cc4 4 Bedazzle the zerlog
995 pick 0a9639fcda9d 5 Morgify the cromulancy
995 pick 0a9639fcda9d 5 Morgify the cromulancy
996
996
997 - A number of changes have been made.
997 - A number of changes have been made.
998 Revision 2 and 4 need to be swapped.
998 Revision 2 and 4 need to be swapped.
999
999
1000 Start history editing from revision 2::
1000 Start history editing from revision 2::
1001
1001
1002 hg histedit -r 2
1002 hg histedit -r 2
1003
1003
1004 An editor opens, containing the list of revisions,
1004 An editor opens, containing the list of revisions,
1005 with specific actions specified::
1005 with specific actions specified::
1006
1006
1007 pick 252a1af424ad 2 Blorb a morgwazzle
1007 pick 252a1af424ad 2 Blorb a morgwazzle
1008 pick 5339bf82f0ca 3 Zworgle the foobar
1008 pick 5339bf82f0ca 3 Zworgle the foobar
1009 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1009 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1010
1010
1011 To swap revision 2 and 4, its lines are swapped
1011 To swap revision 2 and 4, its lines are swapped
1012 in the editor::
1012 in the editor::
1013
1013
1014 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1014 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1015 pick 5339bf82f0ca 3 Zworgle the foobar
1015 pick 5339bf82f0ca 3 Zworgle the foobar
1016 pick 252a1af424ad 2 Blorb a morgwazzle
1016 pick 252a1af424ad 2 Blorb a morgwazzle
1017
1017
1018 Returns 0 on success, 1 if user intervention is required (not only
1018 Returns 0 on success, 1 if user intervention is required (not only
1019 for intentional "edit" command, but also for resolving unexpected
1019 for intentional "edit" command, but also for resolving unexpected
1020 conflicts).
1020 conflicts).
1021 """
1021 """
1022 state = histeditstate(repo)
1022 state = histeditstate(repo)
1023 try:
1023 try:
1024 state.wlock = repo.wlock()
1024 state.wlock = repo.wlock()
1025 state.lock = repo.lock()
1025 state.lock = repo.lock()
1026 _histedit(ui, repo, state, *freeargs, **opts)
1026 _histedit(ui, repo, state, *freeargs, **opts)
1027 finally:
1027 finally:
1028 release(state.lock, state.wlock)
1028 release(state.lock, state.wlock)
1029
1029
1030 goalcontinue = 'continue'
1030 goalcontinue = 'continue'
1031 goalabort = 'abort'
1031 goalabort = 'abort'
1032 goaleditplan = 'edit-plan'
1032 goaleditplan = 'edit-plan'
1033 goalnew = 'new'
1033 goalnew = 'new'
1034
1034
1035 def _getgoal(opts):
1035 def _getgoal(opts):
1036 if opts.get('continue'):
1036 if opts.get('continue'):
1037 return goalcontinue
1037 return goalcontinue
1038 if opts.get('abort'):
1038 if opts.get('abort'):
1039 return goalabort
1039 return goalabort
1040 if opts.get('edit_plan'):
1040 if opts.get('edit_plan'):
1041 return goaleditplan
1041 return goaleditplan
1042 return goalnew
1042 return goalnew
1043
1043
1044 def _readfile(ui, path):
1044 def _readfile(ui, path):
1045 if path == '-':
1045 if path == '-':
1046 with ui.timeblockedsection('histedit'):
1046 with ui.timeblockedsection('histedit'):
1047 return ui.fin.read()
1047 return ui.fin.read()
1048 else:
1048 else:
1049 with open(path, 'rb') as f:
1049 with open(path, 'rb') as f:
1050 return f.read()
1050 return f.read()
1051
1051
1052 def _validateargs(ui, repo, state, freeargs, opts, goal, rules, revs):
1052 def _validateargs(ui, repo, state, freeargs, opts, goal, rules, revs):
1053 # TODO only abort if we try to histedit mq patches, not just
1053 # TODO only abort if we try to histedit mq patches, not just
1054 # blanket if mq patches are applied somewhere
1054 # blanket if mq patches are applied somewhere
1055 mq = getattr(repo, 'mq', None)
1055 mq = getattr(repo, 'mq', None)
1056 if mq and mq.applied:
1056 if mq and mq.applied:
1057 raise error.Abort(_('source has mq patches applied'))
1057 raise error.Abort(_('source has mq patches applied'))
1058
1058
1059 # basic argument incompatibility processing
1059 # basic argument incompatibility processing
1060 outg = opts.get('outgoing')
1060 outg = opts.get('outgoing')
1061 editplan = opts.get('edit_plan')
1061 editplan = opts.get('edit_plan')
1062 abort = opts.get('abort')
1062 abort = opts.get('abort')
1063 force = opts.get('force')
1063 force = opts.get('force')
1064 if force and not outg:
1064 if force and not outg:
1065 raise error.Abort(_('--force only allowed with --outgoing'))
1065 raise error.Abort(_('--force only allowed with --outgoing'))
1066 if goal == 'continue':
1066 if goal == 'continue':
1067 if any((outg, abort, revs, freeargs, rules, editplan)):
1067 if any((outg, abort, revs, freeargs, rules, editplan)):
1068 raise error.Abort(_('no arguments allowed with --continue'))
1068 raise error.Abort(_('no arguments allowed with --continue'))
1069 elif goal == 'abort':
1069 elif goal == 'abort':
1070 if any((outg, revs, freeargs, rules, editplan)):
1070 if any((outg, revs, freeargs, rules, editplan)):
1071 raise error.Abort(_('no arguments allowed with --abort'))
1071 raise error.Abort(_('no arguments allowed with --abort'))
1072 elif goal == 'edit-plan':
1072 elif goal == 'edit-plan':
1073 if any((outg, revs, freeargs)):
1073 if any((outg, revs, freeargs)):
1074 raise error.Abort(_('only --commands argument allowed with '
1074 raise error.Abort(_('only --commands argument allowed with '
1075 '--edit-plan'))
1075 '--edit-plan'))
1076 else:
1076 else:
1077 if os.path.exists(os.path.join(repo.path, 'histedit-state')):
1077 if os.path.exists(os.path.join(repo.path, 'histedit-state')):
1078 raise error.Abort(_('history edit already in progress, try '
1078 raise error.Abort(_('history edit already in progress, try '
1079 '--continue or --abort'))
1079 '--continue or --abort'))
1080 if outg:
1080 if outg:
1081 if revs:
1081 if revs:
1082 raise error.Abort(_('no revisions allowed with --outgoing'))
1082 raise error.Abort(_('no revisions allowed with --outgoing'))
1083 if len(freeargs) > 1:
1083 if len(freeargs) > 1:
1084 raise error.Abort(
1084 raise error.Abort(
1085 _('only one repo argument allowed with --outgoing'))
1085 _('only one repo argument allowed with --outgoing'))
1086 else:
1086 else:
1087 revs.extend(freeargs)
1087 revs.extend(freeargs)
1088 if len(revs) == 0:
1088 if len(revs) == 0:
1089 defaultrev = destutil.desthistedit(ui, repo)
1089 defaultrev = destutil.desthistedit(ui, repo)
1090 if defaultrev is not None:
1090 if defaultrev is not None:
1091 revs.append(defaultrev)
1091 revs.append(defaultrev)
1092
1092
1093 if len(revs) != 1:
1093 if len(revs) != 1:
1094 raise error.Abort(
1094 raise error.Abort(
1095 _('histedit requires exactly one ancestor revision'))
1095 _('histedit requires exactly one ancestor revision'))
1096
1096
1097 def _histedit(ui, repo, state, *freeargs, **opts):
1097 def _histedit(ui, repo, state, *freeargs, **opts):
1098 opts = pycompat.byteskwargs(opts)
1098 opts = pycompat.byteskwargs(opts)
1099 fm = ui.formatter('histedit', opts)
1099 fm = ui.formatter('histedit', opts)
1100 fm.startitem()
1100 fm.startitem()
1101 goal = _getgoal(opts)
1101 goal = _getgoal(opts)
1102 revs = opts.get('rev', [])
1102 revs = opts.get('rev', [])
1103 rules = opts.get('commands', '')
1103 rules = opts.get('commands', '')
1104 state.keep = opts.get('keep', False)
1104 state.keep = opts.get('keep', False)
1105
1105
1106 _validateargs(ui, repo, state, freeargs, opts, goal, rules, revs)
1106 _validateargs(ui, repo, state, freeargs, opts, goal, rules, revs)
1107
1107
1108 # rebuild state
1108 # rebuild state
1109 if goal == goalcontinue:
1109 if goal == goalcontinue:
1110 state.read()
1110 state.read()
1111 state = bootstrapcontinue(ui, state, opts)
1111 state = bootstrapcontinue(ui, state, opts)
1112 elif goal == goaleditplan:
1112 elif goal == goaleditplan:
1113 _edithisteditplan(ui, repo, state, rules)
1113 _edithisteditplan(ui, repo, state, rules)
1114 return
1114 return
1115 elif goal == goalabort:
1115 elif goal == goalabort:
1116 _aborthistedit(ui, repo, state)
1116 _aborthistedit(ui, repo, state)
1117 return
1117 return
1118 else:
1118 else:
1119 # goal == goalnew
1119 # goal == goalnew
1120 _newhistedit(ui, repo, state, revs, freeargs, opts)
1120 _newhistedit(ui, repo, state, revs, freeargs, opts)
1121
1121
1122 _continuehistedit(ui, repo, state)
1122 _continuehistedit(ui, repo, state)
1123 _finishhistedit(ui, repo, state, fm)
1123 _finishhistedit(ui, repo, state, fm)
1124 fm.end()
1124 fm.end()
1125
1125
1126 def _continuehistedit(ui, repo, state):
1126 def _continuehistedit(ui, repo, state):
1127 """This function runs after either:
1127 """This function runs after either:
1128 - bootstrapcontinue (if the goal is 'continue')
1128 - bootstrapcontinue (if the goal is 'continue')
1129 - _newhistedit (if the goal is 'new')
1129 - _newhistedit (if the goal is 'new')
1130 """
1130 """
1131 # preprocess rules so that we can hide inner folds from the user
1131 # preprocess rules so that we can hide inner folds from the user
1132 # and only show one editor
1132 # and only show one editor
1133 actions = state.actions[:]
1133 actions = state.actions[:]
1134 for idx, (action, nextact) in enumerate(
1134 for idx, (action, nextact) in enumerate(
1135 zip(actions, actions[1:] + [None])):
1135 zip(actions, actions[1:] + [None])):
1136 if action.verb == 'fold' and nextact and nextact.verb == 'fold':
1136 if action.verb == 'fold' and nextact and nextact.verb == 'fold':
1137 state.actions[idx].__class__ = _multifold
1137 state.actions[idx].__class__ = _multifold
1138
1138
1139 # Force an initial state file write, so the user can run --abort/continue
1139 # Force an initial state file write, so the user can run --abort/continue
1140 # even if there's an exception before the first transaction serialize.
1140 # even if there's an exception before the first transaction serialize.
1141 state.write()
1141 state.write()
1142
1142
1143 total = len(state.actions)
1143 total = len(state.actions)
1144 pos = 0
1144 pos = 0
1145 tr = None
1145 tr = None
1146 # Don't use singletransaction by default since it rolls the entire
1146 # Don't use singletransaction by default since it rolls the entire
1147 # transaction back if an unexpected exception happens (like a
1147 # transaction back if an unexpected exception happens (like a
1148 # pretxncommit hook throws, or the user aborts the commit msg editor).
1148 # pretxncommit hook throws, or the user aborts the commit msg editor).
1149 if ui.configbool("histedit", "singletransaction"):
1149 if ui.configbool("histedit", "singletransaction"):
1150 # Don't use a 'with' for the transaction, since actions may close
1150 # Don't use a 'with' for the transaction, since actions may close
1151 # and reopen a transaction. For example, if the action executes an
1151 # and reopen a transaction. For example, if the action executes an
1152 # external process it may choose to commit the transaction first.
1152 # external process it may choose to commit the transaction first.
1153 tr = repo.transaction('histedit')
1153 tr = repo.transaction('histedit')
1154 with util.acceptintervention(tr):
1154 with util.acceptintervention(tr):
1155 while state.actions:
1155 while state.actions:
1156 state.write(tr=tr)
1156 state.write(tr=tr)
1157 actobj = state.actions[0]
1157 actobj = state.actions[0]
1158 pos += 1
1158 pos += 1
1159 ui.progress(_("editing"), pos, actobj.torule(),
1159 ui.progress(_("editing"), pos, actobj.torule(),
1160 _('changes'), total)
1160 _('changes'), total)
1161 ui.debug('histedit: processing %s %s\n' % (actobj.verb,\
1161 ui.debug('histedit: processing %s %s\n' % (actobj.verb,\
1162 actobj.torule()))
1162 actobj.torule()))
1163 parentctx, replacement_ = actobj.run()
1163 parentctx, replacement_ = actobj.run()
1164 state.parentctxnode = parentctx.node()
1164 state.parentctxnode = parentctx.node()
1165 state.replacements.extend(replacement_)
1165 state.replacements.extend(replacement_)
1166 state.actions.pop(0)
1166 state.actions.pop(0)
1167
1167
1168 state.write()
1168 state.write()
1169 ui.progress(_("editing"), None)
1169 ui.progress(_("editing"), None)
1170
1170
1171 def _finishhistedit(ui, repo, state, fm):
1171 def _finishhistedit(ui, repo, state, fm):
1172 """This action runs when histedit is finishing its session"""
1172 """This action runs when histedit is finishing its session"""
1173 repo.ui.pushbuffer()
1173 repo.ui.pushbuffer()
1174 hg.update(repo, state.parentctxnode, quietempty=True)
1174 hg.update(repo, state.parentctxnode, quietempty=True)
1175 repo.ui.popbuffer()
1175 repo.ui.popbuffer()
1176
1176
1177 mapping, tmpnodes, created, ntm = processreplacement(state)
1177 mapping, tmpnodes, created, ntm = processreplacement(state)
1178 if mapping:
1178 if mapping:
1179 for prec, succs in mapping.iteritems():
1179 for prec, succs in mapping.iteritems():
1180 if not succs:
1180 if not succs:
1181 ui.debug('histedit: %s is dropped\n' % node.short(prec))
1181 ui.debug('histedit: %s is dropped\n' % node.short(prec))
1182 else:
1182 else:
1183 ui.debug('histedit: %s is replaced by %s\n' % (
1183 ui.debug('histedit: %s is replaced by %s\n' % (
1184 node.short(prec), node.short(succs[0])))
1184 node.short(prec), node.short(succs[0])))
1185 if len(succs) > 1:
1185 if len(succs) > 1:
1186 m = 'histedit: %s'
1186 m = 'histedit: %s'
1187 for n in succs[1:]:
1187 for n in succs[1:]:
1188 ui.debug(m % node.short(n))
1188 ui.debug(m % node.short(n))
1189
1189
1190 if not state.keep:
1190 if not state.keep:
1191 if mapping:
1191 if mapping:
1192 movetopmostbookmarks(repo, state.topmost, ntm)
1192 movetopmostbookmarks(repo, state.topmost, ntm)
1193 # TODO update mq state
1193 # TODO update mq state
1194 else:
1194 else:
1195 mapping = {}
1195 mapping = {}
1196
1196
1197 for n in tmpnodes:
1197 for n in tmpnodes:
1198 mapping[n] = ()
1198 mapping[n] = ()
1199
1199
1200 # remove entries about unknown nodes
1200 # remove entries about unknown nodes
1201 nodemap = repo.unfiltered().changelog.nodemap
1201 nodemap = repo.unfiltered().changelog.nodemap
1202 mapping = {k: v for k, v in mapping.items()
1202 mapping = {k: v for k, v in mapping.items()
1203 if k in nodemap and all(n in nodemap for n in v)}
1203 if k in nodemap and all(n in nodemap for n in v)}
1204 scmutil.cleanupnodes(repo, mapping, 'histedit')
1204 scmutil.cleanupnodes(repo, mapping, 'histedit')
1205 hf = fm.hexfunc
1205 hf = fm.hexfunc
1206 fl = fm.formatlist
1206 fl = fm.formatlist
1207 fd = fm.formatdict
1207 fd = fm.formatdict
1208 nodechanges = fd({hf(oldn): fl([hf(n) for n in newn], name='node')
1208 nodechanges = fd({hf(oldn): fl([hf(n) for n in newn], name='node')
1209 for oldn, newn in mapping.iteritems()},
1209 for oldn, newn in mapping.iteritems()},
1210 key="oldnode", value="newnodes")
1210 key="oldnode", value="newnodes")
1211 fm.data(nodechanges=nodechanges)
1211 fm.data(nodechanges=nodechanges)
1212
1212
1213 state.clear()
1213 state.clear()
1214 if os.path.exists(repo.sjoin('undo')):
1214 if os.path.exists(repo.sjoin('undo')):
1215 os.unlink(repo.sjoin('undo'))
1215 os.unlink(repo.sjoin('undo'))
1216 if repo.vfs.exists('histedit-last-edit.txt'):
1216 if repo.vfs.exists('histedit-last-edit.txt'):
1217 repo.vfs.unlink('histedit-last-edit.txt')
1217 repo.vfs.unlink('histedit-last-edit.txt')
1218
1218
1219 def _aborthistedit(ui, repo, state):
1219 def _aborthistedit(ui, repo, state):
1220 try:
1220 try:
1221 state.read()
1221 state.read()
1222 __, leafs, tmpnodes, __ = processreplacement(state)
1222 __, leafs, tmpnodes, __ = processreplacement(state)
1223 ui.debug('restore wc to old parent %s\n'
1223 ui.debug('restore wc to old parent %s\n'
1224 % node.short(state.topmost))
1224 % node.short(state.topmost))
1225
1225
1226 # Recover our old commits if necessary
1226 # Recover our old commits if necessary
1227 if not state.topmost in repo and state.backupfile:
1227 if not state.topmost in repo and state.backupfile:
1228 backupfile = repo.vfs.join(state.backupfile)
1228 backupfile = repo.vfs.join(state.backupfile)
1229 f = hg.openpath(ui, backupfile)
1229 f = hg.openpath(ui, backupfile)
1230 gen = exchange.readbundle(ui, f, backupfile)
1230 gen = exchange.readbundle(ui, f, backupfile)
1231 with repo.transaction('histedit.abort') as tr:
1231 with repo.transaction('histedit.abort') as tr:
1232 bundle2.applybundle(repo, gen, tr, source='histedit',
1232 bundle2.applybundle(repo, gen, tr, source='histedit',
1233 url='bundle:' + backupfile)
1233 url='bundle:' + backupfile)
1234
1234
1235 os.remove(backupfile)
1235 os.remove(backupfile)
1236
1236
1237 # check whether we should update away
1237 # check whether we should update away
1238 if repo.unfiltered().revs('parents() and (%n or %ln::)',
1238 if repo.unfiltered().revs('parents() and (%n or %ln::)',
1239 state.parentctxnode, leafs | tmpnodes):
1239 state.parentctxnode, leafs | tmpnodes):
1240 hg.clean(repo, state.topmost, show_stats=True, quietempty=True)
1240 hg.clean(repo, state.topmost, show_stats=True, quietempty=True)
1241 cleanupnode(ui, repo, tmpnodes)
1241 cleanupnode(ui, repo, tmpnodes)
1242 cleanupnode(ui, repo, leafs)
1242 cleanupnode(ui, repo, leafs)
1243 except Exception:
1243 except Exception:
1244 if state.inprogress():
1244 if state.inprogress():
1245 ui.warn(_('warning: encountered an exception during histedit '
1245 ui.warn(_('warning: encountered an exception during histedit '
1246 '--abort; the repository may not have been completely '
1246 '--abort; the repository may not have been completely '
1247 'cleaned up\n'))
1247 'cleaned up\n'))
1248 raise
1248 raise
1249 finally:
1249 finally:
1250 state.clear()
1250 state.clear()
1251
1251
1252 def _edithisteditplan(ui, repo, state, rules):
1252 def _edithisteditplan(ui, repo, state, rules):
1253 state.read()
1253 state.read()
1254 if not rules:
1254 if not rules:
1255 comment = geteditcomment(ui,
1255 comment = geteditcomment(ui,
1256 node.short(state.parentctxnode),
1256 node.short(state.parentctxnode),
1257 node.short(state.topmost))
1257 node.short(state.topmost))
1258 rules = ruleeditor(repo, ui, state.actions, comment)
1258 rules = ruleeditor(repo, ui, state.actions, comment)
1259 else:
1259 else:
1260 rules = _readfile(ui, rules)
1260 rules = _readfile(ui, rules)
1261 actions = parserules(rules, state)
1261 actions = parserules(rules, state)
1262 ctxs = [repo[act.node] \
1262 ctxs = [repo[act.node] \
1263 for act in state.actions if act.node]
1263 for act in state.actions if act.node]
1264 warnverifyactions(ui, repo, actions, state, ctxs)
1264 warnverifyactions(ui, repo, actions, state, ctxs)
1265 state.actions = actions
1265 state.actions = actions
1266 state.write()
1266 state.write()
1267
1267
1268 def _newhistedit(ui, repo, state, revs, freeargs, opts):
1268 def _newhistedit(ui, repo, state, revs, freeargs, opts):
1269 outg = opts.get('outgoing')
1269 outg = opts.get('outgoing')
1270 rules = opts.get('commands', '')
1270 rules = opts.get('commands', '')
1271 force = opts.get('force')
1271 force = opts.get('force')
1272
1272
1273 cmdutil.checkunfinished(repo)
1273 cmdutil.checkunfinished(repo)
1274 cmdutil.bailifchanged(repo)
1274 cmdutil.bailifchanged(repo)
1275
1275
1276 topmost, empty = repo.dirstate.parents()
1276 topmost, empty = repo.dirstate.parents()
1277 if outg:
1277 if outg:
1278 if freeargs:
1278 if freeargs:
1279 remote = freeargs[0]
1279 remote = freeargs[0]
1280 else:
1280 else:
1281 remote = None
1281 remote = None
1282 root = findoutgoing(ui, repo, remote, force, opts)
1282 root = findoutgoing(ui, repo, remote, force, opts)
1283 else:
1283 else:
1284 rr = list(repo.set('roots(%ld)', scmutil.revrange(repo, revs)))
1284 rr = list(repo.set('roots(%ld)', scmutil.revrange(repo, revs)))
1285 if len(rr) != 1:
1285 if len(rr) != 1:
1286 raise error.Abort(_('The specified revisions must have '
1286 raise error.Abort(_('The specified revisions must have '
1287 'exactly one common root'))
1287 'exactly one common root'))
1288 root = rr[0].node()
1288 root = rr[0].node()
1289
1289
1290 revs = between(repo, root, topmost, state.keep)
1290 revs = between(repo, root, topmost, state.keep)
1291 if not revs:
1291 if not revs:
1292 raise error.Abort(_('%s is not an ancestor of working directory') %
1292 raise error.Abort(_('%s is not an ancestor of working directory') %
1293 node.short(root))
1293 node.short(root))
1294
1294
1295 ctxs = [repo[r] for r in revs]
1295 ctxs = [repo[r] for r in revs]
1296 if not rules:
1296 if not rules:
1297 comment = geteditcomment(ui, node.short(root), node.short(topmost))
1297 comment = geteditcomment(ui, node.short(root), node.short(topmost))
1298 actions = [pick(state, r) for r in revs]
1298 actions = [pick(state, r) for r in revs]
1299 rules = ruleeditor(repo, ui, actions, comment)
1299 rules = ruleeditor(repo, ui, actions, comment)
1300 else:
1300 else:
1301 rules = _readfile(ui, rules)
1301 rules = _readfile(ui, rules)
1302 actions = parserules(rules, state)
1302 actions = parserules(rules, state)
1303 warnverifyactions(ui, repo, actions, state, ctxs)
1303 warnverifyactions(ui, repo, actions, state, ctxs)
1304
1304
1305 parentctxnode = repo[root].parents()[0].node()
1305 parentctxnode = repo[root].parents()[0].node()
1306
1306
1307 state.parentctxnode = parentctxnode
1307 state.parentctxnode = parentctxnode
1308 state.actions = actions
1308 state.actions = actions
1309 state.topmost = topmost
1309 state.topmost = topmost
1310 state.replacements = []
1310 state.replacements = []
1311
1311
1312 # Create a backup so we can always abort completely.
1312 # Create a backup so we can always abort completely.
1313 backupfile = None
1313 backupfile = None
1314 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1314 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1315 backupfile = repair._bundle(repo, [parentctxnode], [topmost], root,
1315 backupfile = repair._bundle(repo, [parentctxnode], [topmost], root,
1316 'histedit')
1316 'histedit')
1317 state.backupfile = backupfile
1317 state.backupfile = backupfile
1318
1318
1319 def _getsummary(ctx):
1319 def _getsummary(ctx):
1320 # a common pattern is to extract the summary but default to the empty
1320 # a common pattern is to extract the summary but default to the empty
1321 # string
1321 # string
1322 summary = ctx.description() or ''
1322 summary = ctx.description() or ''
1323 if summary:
1323 if summary:
1324 summary = summary.splitlines()[0]
1324 summary = summary.splitlines()[0]
1325 return summary
1325 return summary
1326
1326
1327 def bootstrapcontinue(ui, state, opts):
1327 def bootstrapcontinue(ui, state, opts):
1328 repo = state.repo
1328 repo = state.repo
1329
1329
1330 ms = mergemod.mergestate.read(repo)
1330 ms = mergemod.mergestate.read(repo)
1331 mergeutil.checkunresolved(ms)
1331 mergeutil.checkunresolved(ms)
1332
1332
1333 if state.actions:
1333 if state.actions:
1334 actobj = state.actions.pop(0)
1334 actobj = state.actions.pop(0)
1335
1335
1336 if _isdirtywc(repo):
1336 if _isdirtywc(repo):
1337 actobj.continuedirty()
1337 actobj.continuedirty()
1338 if _isdirtywc(repo):
1338 if _isdirtywc(repo):
1339 abortdirty()
1339 abortdirty()
1340
1340
1341 parentctx, replacements = actobj.continueclean()
1341 parentctx, replacements = actobj.continueclean()
1342
1342
1343 state.parentctxnode = parentctx.node()
1343 state.parentctxnode = parentctx.node()
1344 state.replacements.extend(replacements)
1344 state.replacements.extend(replacements)
1345
1345
1346 return state
1346 return state
1347
1347
1348 def between(repo, old, new, keep):
1348 def between(repo, old, new, keep):
1349 """select and validate the set of revision to edit
1349 """select and validate the set of revision to edit
1350
1350
1351 When keep is false, the specified set can't have children."""
1351 When keep is false, the specified set can't have children."""
1352 ctxs = list(repo.set('%n::%n', old, new))
1352 ctxs = list(repo.set('%n::%n', old, new))
1353 if ctxs and not keep:
1353 if ctxs and not keep:
1354 if (not obsolete.isenabled(repo, obsolete.allowunstableopt) and
1354 if (not obsolete.isenabled(repo, obsolete.allowunstableopt) and
1355 repo.revs('(%ld::) - (%ld)', ctxs, ctxs)):
1355 repo.revs('(%ld::) - (%ld)', ctxs, ctxs)):
1356 raise error.Abort(_('can only histedit a changeset together '
1356 raise error.Abort(_('can only histedit a changeset together '
1357 'with all its descendants'))
1357 'with all its descendants'))
1358 if repo.revs('(%ld) and merge()', ctxs):
1358 if repo.revs('(%ld) and merge()', ctxs):
1359 raise error.Abort(_('cannot edit history that contains merges'))
1359 raise error.Abort(_('cannot edit history that contains merges'))
1360 root = ctxs[0] # list is already sorted by repo.set
1360 root = ctxs[0] # list is already sorted by repo.set
1361 if not root.mutable():
1361 if not root.mutable():
1362 raise error.Abort(_('cannot edit public changeset: %s') % root,
1362 raise error.Abort(_('cannot edit public changeset: %s') % root,
1363 hint=_("see 'hg help phases' for details"))
1363 hint=_("see 'hg help phases' for details"))
1364 return [c.node() for c in ctxs]
1364 return [c.node() for c in ctxs]
1365
1365
1366 def ruleeditor(repo, ui, actions, editcomment=""):
1366 def ruleeditor(repo, ui, actions, editcomment=""):
1367 """open an editor to edit rules
1367 """open an editor to edit rules
1368
1368
1369 rules are in the format [ [act, ctx], ...] like in state.rules
1369 rules are in the format [ [act, ctx], ...] like in state.rules
1370 """
1370 """
1371 if repo.ui.configbool("experimental", "histedit.autoverb"):
1371 if repo.ui.configbool("experimental", "histedit.autoverb"):
1372 newact = util.sortdict()
1372 newact = util.sortdict()
1373 for act in actions:
1373 for act in actions:
1374 ctx = repo[act.node]
1374 ctx = repo[act.node]
1375 summary = _getsummary(ctx)
1375 summary = _getsummary(ctx)
1376 fword = summary.split(' ', 1)[0].lower()
1376 fword = summary.split(' ', 1)[0].lower()
1377 added = False
1377 added = False
1378
1378
1379 # if it doesn't end with the special character '!' just skip this
1379 # if it doesn't end with the special character '!' just skip this
1380 if fword.endswith('!'):
1380 if fword.endswith('!'):
1381 fword = fword[:-1]
1381 fword = fword[:-1]
1382 if fword in primaryactions | secondaryactions | tertiaryactions:
1382 if fword in primaryactions | secondaryactions | tertiaryactions:
1383 act.verb = fword
1383 act.verb = fword
1384 # get the target summary
1384 # get the target summary
1385 tsum = summary[len(fword) + 1:].lstrip()
1385 tsum = summary[len(fword) + 1:].lstrip()
1386 # safe but slow: reverse iterate over the actions so we
1386 # safe but slow: reverse iterate over the actions so we
1387 # don't clash on two commits having the same summary
1387 # don't clash on two commits having the same summary
1388 for na, l in reversed(list(newact.iteritems())):
1388 for na, l in reversed(list(newact.iteritems())):
1389 actx = repo[na.node]
1389 actx = repo[na.node]
1390 asum = _getsummary(actx)
1390 asum = _getsummary(actx)
1391 if asum == tsum:
1391 if asum == tsum:
1392 added = True
1392 added = True
1393 l.append(act)
1393 l.append(act)
1394 break
1394 break
1395
1395
1396 if not added:
1396 if not added:
1397 newact[act] = []
1397 newact[act] = []
1398
1398
1399 # copy over and flatten the new list
1399 # copy over and flatten the new list
1400 actions = []
1400 actions = []
1401 for na, l in newact.iteritems():
1401 for na, l in newact.iteritems():
1402 actions.append(na)
1402 actions.append(na)
1403 actions += l
1403 actions += l
1404
1404
1405 rules = '\n'.join([act.torule() for act in actions])
1405 rules = '\n'.join([act.torule() for act in actions])
1406 rules += '\n\n'
1406 rules += '\n\n'
1407 rules += editcomment
1407 rules += editcomment
1408 rules = ui.edit(rules, ui.username(), {'prefix': 'histedit'},
1408 rules = ui.edit(rules, ui.username(), {'prefix': 'histedit'},
1409 repopath=repo.path, action='histedit')
1409 repopath=repo.path, action='histedit')
1410
1410
1411 # Save edit rules in .hg/histedit-last-edit.txt in case
1411 # Save edit rules in .hg/histedit-last-edit.txt in case
1412 # the user needs to ask for help after something
1412 # the user needs to ask for help after something
1413 # surprising happens.
1413 # surprising happens.
1414 f = open(repo.vfs.join('histedit-last-edit.txt'), 'w')
1414 f = open(repo.vfs.join('histedit-last-edit.txt'), 'w')
1415 f.write(rules)
1415 f.write(rules)
1416 f.close()
1416 f.close()
1417
1417
1418 return rules
1418 return rules
1419
1419
1420 def parserules(rules, state):
1420 def parserules(rules, state):
1421 """Read the histedit rules string and return list of action objects """
1421 """Read the histedit rules string and return list of action objects """
1422 rules = [l for l in (r.strip() for r in rules.splitlines())
1422 rules = [l for l in (r.strip() for r in rules.splitlines())
1423 if l and not l.startswith('#')]
1423 if l and not l.startswith('#')]
1424 actions = []
1424 actions = []
1425 for r in rules:
1425 for r in rules:
1426 if ' ' not in r:
1426 if ' ' not in r:
1427 raise error.ParseError(_('malformed line "%s"') % r)
1427 raise error.ParseError(_('malformed line "%s"') % r)
1428 verb, rest = r.split(' ', 1)
1428 verb, rest = r.split(' ', 1)
1429
1429
1430 if verb not in actiontable:
1430 if verb not in actiontable:
1431 raise error.ParseError(_('unknown action "%s"') % verb)
1431 raise error.ParseError(_('unknown action "%s"') % verb)
1432
1432
1433 action = actiontable[verb].fromrule(state, rest)
1433 action = actiontable[verb].fromrule(state, rest)
1434 actions.append(action)
1434 actions.append(action)
1435 return actions
1435 return actions
1436
1436
1437 def warnverifyactions(ui, repo, actions, state, ctxs):
1437 def warnverifyactions(ui, repo, actions, state, ctxs):
1438 try:
1438 try:
1439 verifyactions(actions, state, ctxs)
1439 verifyactions(actions, state, ctxs)
1440 except error.ParseError:
1440 except error.ParseError:
1441 if repo.vfs.exists('histedit-last-edit.txt'):
1441 if repo.vfs.exists('histedit-last-edit.txt'):
1442 ui.warn(_('warning: histedit rules saved '
1442 ui.warn(_('warning: histedit rules saved '
1443 'to: .hg/histedit-last-edit.txt\n'))
1443 'to: .hg/histedit-last-edit.txt\n'))
1444 raise
1444 raise
1445
1445
1446 def verifyactions(actions, state, ctxs):
1446 def verifyactions(actions, state, ctxs):
1447 """Verify that there exists exactly one action per given changeset and
1447 """Verify that there exists exactly one action per given changeset and
1448 other constraints.
1448 other constraints.
1449
1449
1450 Will abort if there are to many or too few rules, a malformed rule,
1450 Will abort if there are to many or too few rules, a malformed rule,
1451 or a rule on a changeset outside of the user-given range.
1451 or a rule on a changeset outside of the user-given range.
1452 """
1452 """
1453 expected = set(c.node() for c in ctxs)
1453 expected = set(c.node() for c in ctxs)
1454 seen = set()
1454 seen = set()
1455 prev = None
1455 prev = None
1456
1456
1457 if actions and actions[0].verb in ['roll', 'fold']:
1457 if actions and actions[0].verb in ['roll', 'fold']:
1458 raise error.ParseError(_('first changeset cannot use verb "%s"') %
1458 raise error.ParseError(_('first changeset cannot use verb "%s"') %
1459 actions[0].verb)
1459 actions[0].verb)
1460
1460
1461 for action in actions:
1461 for action in actions:
1462 action.verify(prev, expected, seen)
1462 action.verify(prev, expected, seen)
1463 prev = action
1463 prev = action
1464 if action.node is not None:
1464 if action.node is not None:
1465 seen.add(action.node)
1465 seen.add(action.node)
1466 missing = sorted(expected - seen) # sort to stabilize output
1466 missing = sorted(expected - seen) # sort to stabilize output
1467
1467
1468 if state.repo.ui.configbool('histedit', 'dropmissing'):
1468 if state.repo.ui.configbool('histedit', 'dropmissing'):
1469 if len(actions) == 0:
1469 if len(actions) == 0:
1470 raise error.ParseError(_('no rules provided'),
1470 raise error.ParseError(_('no rules provided'),
1471 hint=_('use strip extension to remove commits'))
1471 hint=_('use strip extension to remove commits'))
1472
1472
1473 drops = [drop(state, n) for n in missing]
1473 drops = [drop(state, n) for n in missing]
1474 # put the in the beginning so they execute immediately and
1474 # put the in the beginning so they execute immediately and
1475 # don't show in the edit-plan in the future
1475 # don't show in the edit-plan in the future
1476 actions[:0] = drops
1476 actions[:0] = drops
1477 elif missing:
1477 elif missing:
1478 raise error.ParseError(_('missing rules for changeset %s') %
1478 raise error.ParseError(_('missing rules for changeset %s') %
1479 node.short(missing[0]),
1479 node.short(missing[0]),
1480 hint=_('use "drop %s" to discard, see also: '
1480 hint=_('use "drop %s" to discard, see also: '
1481 "'hg help -e histedit.config'")
1481 "'hg help -e histedit.config'")
1482 % node.short(missing[0]))
1482 % node.short(missing[0]))
1483
1483
1484 def adjustreplacementsfrommarkers(repo, oldreplacements):
1484 def adjustreplacementsfrommarkers(repo, oldreplacements):
1485 """Adjust replacements from obsolescence markers
1485 """Adjust replacements from obsolescence markers
1486
1486
1487 Replacements structure is originally generated based on
1487 Replacements structure is originally generated based on
1488 histedit's state and does not account for changes that are
1488 histedit's state and does not account for changes that are
1489 not recorded there. This function fixes that by adding
1489 not recorded there. This function fixes that by adding
1490 data read from obsolescence markers"""
1490 data read from obsolescence markers"""
1491 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1491 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1492 return oldreplacements
1492 return oldreplacements
1493
1493
1494 unfi = repo.unfiltered()
1494 unfi = repo.unfiltered()
1495 nm = unfi.changelog.nodemap
1495 nm = unfi.changelog.nodemap
1496 obsstore = repo.obsstore
1496 obsstore = repo.obsstore
1497 newreplacements = list(oldreplacements)
1497 newreplacements = list(oldreplacements)
1498 oldsuccs = [r[1] for r in oldreplacements]
1498 oldsuccs = [r[1] for r in oldreplacements]
1499 # successors that have already been added to succstocheck once
1499 # successors that have already been added to succstocheck once
1500 seensuccs = set().union(*oldsuccs) # create a set from an iterable of tuples
1500 seensuccs = set().union(*oldsuccs) # create a set from an iterable of tuples
1501 succstocheck = list(seensuccs)
1501 succstocheck = list(seensuccs)
1502 while succstocheck:
1502 while succstocheck:
1503 n = succstocheck.pop()
1503 n = succstocheck.pop()
1504 missing = nm.get(n) is None
1504 missing = nm.get(n) is None
1505 markers = obsstore.successors.get(n, ())
1505 markers = obsstore.successors.get(n, ())
1506 if missing and not markers:
1506 if missing and not markers:
1507 # dead end, mark it as such
1507 # dead end, mark it as such
1508 newreplacements.append((n, ()))
1508 newreplacements.append((n, ()))
1509 for marker in markers:
1509 for marker in markers:
1510 nsuccs = marker[1]
1510 nsuccs = marker[1]
1511 newreplacements.append((n, nsuccs))
1511 newreplacements.append((n, nsuccs))
1512 for nsucc in nsuccs:
1512 for nsucc in nsuccs:
1513 if nsucc not in seensuccs:
1513 if nsucc not in seensuccs:
1514 seensuccs.add(nsucc)
1514 seensuccs.add(nsucc)
1515 succstocheck.append(nsucc)
1515 succstocheck.append(nsucc)
1516
1516
1517 return newreplacements
1517 return newreplacements
1518
1518
1519 def processreplacement(state):
1519 def processreplacement(state):
1520 """process the list of replacements to return
1520 """process the list of replacements to return
1521
1521
1522 1) the final mapping between original and created nodes
1522 1) the final mapping between original and created nodes
1523 2) the list of temporary node created by histedit
1523 2) the list of temporary node created by histedit
1524 3) the list of new commit created by histedit"""
1524 3) the list of new commit created by histedit"""
1525 replacements = adjustreplacementsfrommarkers(state.repo, state.replacements)
1525 replacements = adjustreplacementsfrommarkers(state.repo, state.replacements)
1526 allsuccs = set()
1526 allsuccs = set()
1527 replaced = set()
1527 replaced = set()
1528 fullmapping = {}
1528 fullmapping = {}
1529 # initialize basic set
1529 # initialize basic set
1530 # fullmapping records all operations recorded in replacement
1530 # fullmapping records all operations recorded in replacement
1531 for rep in replacements:
1531 for rep in replacements:
1532 allsuccs.update(rep[1])
1532 allsuccs.update(rep[1])
1533 replaced.add(rep[0])
1533 replaced.add(rep[0])
1534 fullmapping.setdefault(rep[0], set()).update(rep[1])
1534 fullmapping.setdefault(rep[0], set()).update(rep[1])
1535 new = allsuccs - replaced
1535 new = allsuccs - replaced
1536 tmpnodes = allsuccs & replaced
1536 tmpnodes = allsuccs & replaced
1537 # Reduce content fullmapping into direct relation between original nodes
1537 # Reduce content fullmapping into direct relation between original nodes
1538 # and final node created during history edition
1538 # and final node created during history edition
1539 # Dropped changeset are replaced by an empty list
1539 # Dropped changeset are replaced by an empty list
1540 toproceed = set(fullmapping)
1540 toproceed = set(fullmapping)
1541 final = {}
1541 final = {}
1542 while toproceed:
1542 while toproceed:
1543 for x in list(toproceed):
1543 for x in list(toproceed):
1544 succs = fullmapping[x]
1544 succs = fullmapping[x]
1545 for s in list(succs):
1545 for s in list(succs):
1546 if s in toproceed:
1546 if s in toproceed:
1547 # non final node with unknown closure
1547 # non final node with unknown closure
1548 # We can't process this now
1548 # We can't process this now
1549 break
1549 break
1550 elif s in final:
1550 elif s in final:
1551 # non final node, replace with closure
1551 # non final node, replace with closure
1552 succs.remove(s)
1552 succs.remove(s)
1553 succs.update(final[s])
1553 succs.update(final[s])
1554 else:
1554 else:
1555 final[x] = succs
1555 final[x] = succs
1556 toproceed.remove(x)
1556 toproceed.remove(x)
1557 # remove tmpnodes from final mapping
1557 # remove tmpnodes from final mapping
1558 for n in tmpnodes:
1558 for n in tmpnodes:
1559 del final[n]
1559 del final[n]
1560 # we expect all changes involved in final to exist in the repo
1560 # we expect all changes involved in final to exist in the repo
1561 # turn `final` into list (topologically sorted)
1561 # turn `final` into list (topologically sorted)
1562 nm = state.repo.changelog.nodemap
1562 nm = state.repo.changelog.nodemap
1563 for prec, succs in final.items():
1563 for prec, succs in final.items():
1564 final[prec] = sorted(succs, key=nm.get)
1564 final[prec] = sorted(succs, key=nm.get)
1565
1565
1566 # computed topmost element (necessary for bookmark)
1566 # computed topmost element (necessary for bookmark)
1567 if new:
1567 if new:
1568 newtopmost = sorted(new, key=state.repo.changelog.rev)[-1]
1568 newtopmost = sorted(new, key=state.repo.changelog.rev)[-1]
1569 elif not final:
1569 elif not final:
1570 # Nothing rewritten at all. we won't need `newtopmost`
1570 # Nothing rewritten at all. we won't need `newtopmost`
1571 # It is the same as `oldtopmost` and `processreplacement` know it
1571 # It is the same as `oldtopmost` and `processreplacement` know it
1572 newtopmost = None
1572 newtopmost = None
1573 else:
1573 else:
1574 # every body died. The newtopmost is the parent of the root.
1574 # every body died. The newtopmost is the parent of the root.
1575 r = state.repo.changelog.rev
1575 r = state.repo.changelog.rev
1576 newtopmost = state.repo[sorted(final, key=r)[0]].p1().node()
1576 newtopmost = state.repo[sorted(final, key=r)[0]].p1().node()
1577
1577
1578 return final, tmpnodes, new, newtopmost
1578 return final, tmpnodes, new, newtopmost
1579
1579
1580 def movetopmostbookmarks(repo, oldtopmost, newtopmost):
1580 def movetopmostbookmarks(repo, oldtopmost, newtopmost):
1581 """Move bookmark from oldtopmost to newly created topmost
1581 """Move bookmark from oldtopmost to newly created topmost
1582
1582
1583 This is arguably a feature and we may only want that for the active
1583 This is arguably a feature and we may only want that for the active
1584 bookmark. But the behavior is kept compatible with the old version for now.
1584 bookmark. But the behavior is kept compatible with the old version for now.
1585 """
1585 """
1586 if not oldtopmost or not newtopmost:
1586 if not oldtopmost or not newtopmost:
1587 return
1587 return
1588 oldbmarks = repo.nodebookmarks(oldtopmost)
1588 oldbmarks = repo.nodebookmarks(oldtopmost)
1589 if oldbmarks:
1589 if oldbmarks:
1590 with repo.lock(), repo.transaction('histedit') as tr:
1590 with repo.lock(), repo.transaction('histedit') as tr:
1591 marks = repo._bookmarks
1591 marks = repo._bookmarks
1592 changes = []
1592 changes = []
1593 for name in oldbmarks:
1593 for name in oldbmarks:
1594 changes.append((name, newtopmost))
1594 changes.append((name, newtopmost))
1595 marks.applychanges(repo, tr, changes)
1595 marks.applychanges(repo, tr, changes)
1596
1596
1597 def cleanupnode(ui, repo, nodes):
1597 def cleanupnode(ui, repo, nodes):
1598 """strip a group of nodes from the repository
1598 """strip a group of nodes from the repository
1599
1599
1600 The set of node to strip may contains unknown nodes."""
1600 The set of node to strip may contains unknown nodes."""
1601 with repo.lock():
1601 with repo.lock():
1602 # do not let filtering get in the way of the cleanse
1602 # do not let filtering get in the way of the cleanse
1603 # we should probably get rid of obsolescence marker created during the
1603 # we should probably get rid of obsolescence marker created during the
1604 # histedit, but we currently do not have such information.
1604 # histedit, but we currently do not have such information.
1605 repo = repo.unfiltered()
1605 repo = repo.unfiltered()
1606 # Find all nodes that need to be stripped
1606 # Find all nodes that need to be stripped
1607 # (we use %lr instead of %ln to silently ignore unknown items)
1607 # (we use %lr instead of %ln to silently ignore unknown items)
1608 nm = repo.changelog.nodemap
1608 nm = repo.changelog.nodemap
1609 nodes = sorted(n for n in nodes if n in nm)
1609 nodes = sorted(n for n in nodes if n in nm)
1610 roots = [c.node() for c in repo.set("roots(%ln)", nodes)]
1610 roots = [c.node() for c in repo.set("roots(%ln)", nodes)]
1611 if roots:
1611 if roots:
1612 repair.strip(ui, repo, roots)
1612 repair.strip(ui, repo, roots)
1613
1613
1614 def stripwrapper(orig, ui, repo, nodelist, *args, **kwargs):
1614 def stripwrapper(orig, ui, repo, nodelist, *args, **kwargs):
1615 if isinstance(nodelist, str):
1615 if isinstance(nodelist, str):
1616 nodelist = [nodelist]
1616 nodelist = [nodelist]
1617 if os.path.exists(os.path.join(repo.path, 'histedit-state')):
1617 if os.path.exists(os.path.join(repo.path, 'histedit-state')):
1618 state = histeditstate(repo)
1618 state = histeditstate(repo)
1619 state.read()
1619 state.read()
1620 histedit_nodes = {action.node for action
1620 histedit_nodes = {action.node for action
1621 in state.actions if action.node}
1621 in state.actions if action.node}
1622 common_nodes = histedit_nodes & set(nodelist)
1622 common_nodes = histedit_nodes & set(nodelist)
1623 if common_nodes:
1623 if common_nodes:
1624 raise error.Abort(_("histedit in progress, can't strip %s")
1624 raise error.Abort(_("histedit in progress, can't strip %s")
1625 % ', '.join(node.short(x) for x in common_nodes))
1625 % ', '.join(node.short(x) for x in common_nodes))
1626 return orig(ui, repo, nodelist, *args, **kwargs)
1626 return orig(ui, repo, nodelist, *args, **kwargs)
1627
1627
1628 extensions.wrapfunction(repair, 'strip', stripwrapper)
1628 extensions.wrapfunction(repair, 'strip', stripwrapper)
1629
1629
1630 def summaryhook(ui, repo):
1630 def summaryhook(ui, repo):
1631 if not os.path.exists(repo.vfs.join('histedit-state')):
1631 if not os.path.exists(repo.vfs.join('histedit-state')):
1632 return
1632 return
1633 state = histeditstate(repo)
1633 state = histeditstate(repo)
1634 state.read()
1634 state.read()
1635 if state.actions:
1635 if state.actions:
1636 # i18n: column positioning for "hg summary"
1636 # i18n: column positioning for "hg summary"
1637 ui.write(_('hist: %s (histedit --continue)\n') %
1637 ui.write(_('hist: %s (histedit --continue)\n') %
1638 (ui.label(_('%d remaining'), 'histedit.remaining') %
1638 (ui.label(_('%d remaining'), 'histedit.remaining') %
1639 len(state.actions)))
1639 len(state.actions)))
1640
1640
1641 def extsetup(ui):
1641 def extsetup(ui):
1642 cmdutil.summaryhooks.add('histedit', summaryhook)
1642 cmdutil.summaryhooks.add('histedit', summaryhook)
1643 cmdutil.unfinishedstates.append(
1643 cmdutil.unfinishedstates.append(
1644 ['histedit-state', False, True, _('histedit in progress'),
1644 ['histedit-state', False, True, _('histedit in progress'),
1645 _("use 'hg histedit --continue' or 'hg histedit --abort'")])
1645 _("use 'hg histedit --continue' or 'hg histedit --abort'")])
1646 cmdutil.afterresolvedstates.append(
1646 cmdutil.afterresolvedstates.append(
1647 ['histedit-state', _('hg histedit --continue')])
1647 ['histedit-state', _('hg histedit --continue')])
@@ -1,594 +1,595 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''High-level command function for lfconvert, plus the cmdtable.'''
9 '''High-level command function for lfconvert, plus the cmdtable.'''
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 import errno
12 import errno
13 import hashlib
13 import hashlib
14 import os
14 import os
15 import shutil
15 import shutil
16
16
17 from mercurial.i18n import _
17 from mercurial.i18n import _
18
18
19 from mercurial import (
19 from mercurial import (
20 cmdutil,
20 cmdutil,
21 context,
21 context,
22 error,
22 error,
23 hg,
23 hg,
24 lock,
24 lock,
25 match as matchmod,
25 match as matchmod,
26 node,
26 node,
27 pycompat,
27 pycompat,
28 registrar,
28 registrar,
29 scmutil,
29 scmutil,
30 util,
30 util,
31 )
31 )
32
32
33 from ..convert import (
33 from ..convert import (
34 convcmd,
34 convcmd,
35 filemap,
35 filemap,
36 )
36 )
37
37
38 from . import (
38 from . import (
39 lfutil,
39 lfutil,
40 storefactory
40 storefactory
41 )
41 )
42
42
43 release = lock.release
43 release = lock.release
44
44
45 # -- Commands ----------------------------------------------------------
45 # -- Commands ----------------------------------------------------------
46
46
47 cmdtable = {}
47 cmdtable = {}
48 command = registrar.command(cmdtable)
48 command = registrar.command(cmdtable)
49
49
50 @command('lfconvert',
50 @command('lfconvert',
51 [('s', 'size', '',
51 [('s', 'size', '',
52 _('minimum size (MB) for files to be converted as largefiles'), 'SIZE'),
52 _('minimum size (MB) for files to be converted as largefiles'), 'SIZE'),
53 ('', 'to-normal', False,
53 ('', 'to-normal', False,
54 _('convert from a largefiles repo to a normal repo')),
54 _('convert from a largefiles repo to a normal repo')),
55 ],
55 ],
56 _('hg lfconvert SOURCE DEST [FILE ...]'),
56 _('hg lfconvert SOURCE DEST [FILE ...]'),
57 norepo=True,
57 norepo=True,
58 inferrepo=True)
58 inferrepo=True)
59 def lfconvert(ui, src, dest, *pats, **opts):
59 def lfconvert(ui, src, dest, *pats, **opts):
60 '''convert a normal repository to a largefiles repository
60 '''convert a normal repository to a largefiles repository
61
61
62 Convert repository SOURCE to a new repository DEST, identical to
62 Convert repository SOURCE to a new repository DEST, identical to
63 SOURCE except that certain files will be converted as largefiles:
63 SOURCE except that certain files will be converted as largefiles:
64 specifically, any file that matches any PATTERN *or* whose size is
64 specifically, any file that matches any PATTERN *or* whose size is
65 above the minimum size threshold is converted as a largefile. The
65 above the minimum size threshold is converted as a largefile. The
66 size used to determine whether or not to track a file as a
66 size used to determine whether or not to track a file as a
67 largefile is the size of the first version of the file. The
67 largefile is the size of the first version of the file. The
68 minimum size can be specified either with --size or in
68 minimum size can be specified either with --size or in
69 configuration as ``largefiles.size``.
69 configuration as ``largefiles.size``.
70
70
71 After running this command you will need to make sure that
71 After running this command you will need to make sure that
72 largefiles is enabled anywhere you intend to push the new
72 largefiles is enabled anywhere you intend to push the new
73 repository.
73 repository.
74
74
75 Use --to-normal to convert largefiles back to normal files; after
75 Use --to-normal to convert largefiles back to normal files; after
76 this, the DEST repository can be used without largefiles at all.'''
76 this, the DEST repository can be used without largefiles at all.'''
77
77
78 opts = pycompat.byteskwargs(opts)
78 opts = pycompat.byteskwargs(opts)
79 if opts['to_normal']:
79 if opts['to_normal']:
80 tolfile = False
80 tolfile = False
81 else:
81 else:
82 tolfile = True
82 tolfile = True
83 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
83 size = lfutil.getminsize(ui, True, opts.get('size'), default=None)
84
84
85 if not hg.islocal(src):
85 if not hg.islocal(src):
86 raise error.Abort(_('%s is not a local Mercurial repo') % src)
86 raise error.Abort(_('%s is not a local Mercurial repo') % src)
87 if not hg.islocal(dest):
87 if not hg.islocal(dest):
88 raise error.Abort(_('%s is not a local Mercurial repo') % dest)
88 raise error.Abort(_('%s is not a local Mercurial repo') % dest)
89
89
90 rsrc = hg.repository(ui, src)
90 rsrc = hg.repository(ui, src)
91 ui.status(_('initializing destination %s\n') % dest)
91 ui.status(_('initializing destination %s\n') % dest)
92 rdst = hg.repository(ui, dest, create=True)
92 rdst = hg.repository(ui, dest, create=True)
93
93
94 success = False
94 success = False
95 dstwlock = dstlock = None
95 dstwlock = dstlock = None
96 try:
96 try:
97 # Get a list of all changesets in the source. The easy way to do this
97 # Get a list of all changesets in the source. The easy way to do this
98 # is to simply walk the changelog, using changelog.nodesbetween().
98 # is to simply walk the changelog, using changelog.nodesbetween().
99 # Take a look at mercurial/revlog.py:639 for more details.
99 # Take a look at mercurial/revlog.py:639 for more details.
100 # Use a generator instead of a list to decrease memory usage
100 # Use a generator instead of a list to decrease memory usage
101 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
101 ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
102 rsrc.heads())[0])
102 rsrc.heads())[0])
103 revmap = {node.nullid: node.nullid}
103 revmap = {node.nullid: node.nullid}
104 if tolfile:
104 if tolfile:
105 # Lock destination to prevent modification while it is converted to.
105 # Lock destination to prevent modification while it is converted to.
106 # Don't need to lock src because we are just reading from its
106 # Don't need to lock src because we are just reading from its
107 # history which can't change.
107 # history which can't change.
108 dstwlock = rdst.wlock()
108 dstwlock = rdst.wlock()
109 dstlock = rdst.lock()
109 dstlock = rdst.lock()
110
110
111 lfiles = set()
111 lfiles = set()
112 normalfiles = set()
112 normalfiles = set()
113 if not pats:
113 if not pats:
114 pats = ui.configlist(lfutil.longname, 'patterns')
114 pats = ui.configlist(lfutil.longname, 'patterns')
115 if pats:
115 if pats:
116 matcher = matchmod.match(rsrc.root, '', list(pats))
116 matcher = matchmod.match(rsrc.root, '', list(pats))
117 else:
117 else:
118 matcher = None
118 matcher = None
119
119
120 lfiletohash = {}
120 lfiletohash = {}
121 for ctx in ctxs:
121 for ctx in ctxs:
122 ui.progress(_('converting revisions'), ctx.rev(),
122 ui.progress(_('converting revisions'), ctx.rev(),
123 unit=_('revisions'), total=rsrc['tip'].rev())
123 unit=_('revisions'), total=rsrc['tip'].rev())
124 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
124 _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
125 lfiles, normalfiles, matcher, size, lfiletohash)
125 lfiles, normalfiles, matcher, size, lfiletohash)
126 ui.progress(_('converting revisions'), None)
126 ui.progress(_('converting revisions'), None)
127
127
128 if rdst.wvfs.exists(lfutil.shortname):
128 if rdst.wvfs.exists(lfutil.shortname):
129 rdst.wvfs.rmtree(lfutil.shortname)
129 rdst.wvfs.rmtree(lfutil.shortname)
130
130
131 for f in lfiletohash.keys():
131 for f in lfiletohash.keys():
132 if rdst.wvfs.isfile(f):
132 if rdst.wvfs.isfile(f):
133 rdst.wvfs.unlink(f)
133 rdst.wvfs.unlink(f)
134 try:
134 try:
135 rdst.wvfs.removedirs(rdst.wvfs.dirname(f))
135 rdst.wvfs.removedirs(rdst.wvfs.dirname(f))
136 except OSError:
136 except OSError:
137 pass
137 pass
138
138
139 # If there were any files converted to largefiles, add largefiles
139 # If there were any files converted to largefiles, add largefiles
140 # to the destination repository's requirements.
140 # to the destination repository's requirements.
141 if lfiles:
141 if lfiles:
142 rdst.requirements.add('largefiles')
142 rdst.requirements.add('largefiles')
143 rdst._writerequirements()
143 rdst._writerequirements()
144 else:
144 else:
145 class lfsource(filemap.filemap_source):
145 class lfsource(filemap.filemap_source):
146 def __init__(self, ui, source):
146 def __init__(self, ui, source):
147 super(lfsource, self).__init__(ui, source, None)
147 super(lfsource, self).__init__(ui, source, None)
148 self.filemapper.rename[lfutil.shortname] = '.'
148 self.filemapper.rename[lfutil.shortname] = '.'
149
149
150 def getfile(self, name, rev):
150 def getfile(self, name, rev):
151 realname, realrev = rev
151 realname, realrev = rev
152 f = super(lfsource, self).getfile(name, rev)
152 f = super(lfsource, self).getfile(name, rev)
153
153
154 if (not realname.startswith(lfutil.shortnameslash)
154 if (not realname.startswith(lfutil.shortnameslash)
155 or f[0] is None):
155 or f[0] is None):
156 return f
156 return f
157
157
158 # Substitute in the largefile data for the hash
158 # Substitute in the largefile data for the hash
159 hash = f[0].strip()
159 hash = f[0].strip()
160 path = lfutil.findfile(rsrc, hash)
160 path = lfutil.findfile(rsrc, hash)
161
161
162 if path is None:
162 if path is None:
163 raise error.Abort(_("missing largefile for '%s' in %s")
163 raise error.Abort(_("missing largefile for '%s' in %s")
164 % (realname, realrev))
164 % (realname, realrev))
165 return util.readfile(path), f[1]
165 return util.readfile(path), f[1]
166
166
167 class converter(convcmd.converter):
167 class converter(convcmd.converter):
168 def __init__(self, ui, source, dest, revmapfile, opts):
168 def __init__(self, ui, source, dest, revmapfile, opts):
169 src = lfsource(ui, source)
169 src = lfsource(ui, source)
170
170
171 super(converter, self).__init__(ui, src, dest, revmapfile,
171 super(converter, self).__init__(ui, src, dest, revmapfile,
172 opts)
172 opts)
173
173
174 found, missing = downloadlfiles(ui, rsrc)
174 found, missing = downloadlfiles(ui, rsrc)
175 if missing != 0:
175 if missing != 0:
176 raise error.Abort(_("all largefiles must be present locally"))
176 raise error.Abort(_("all largefiles must be present locally"))
177
177
178 orig = convcmd.converter
178 orig = convcmd.converter
179 convcmd.converter = converter
179 convcmd.converter = converter
180
180
181 try:
181 try:
182 convcmd.convert(ui, src, dest, source_type='hg', dest_type='hg')
182 convcmd.convert(ui, src, dest, source_type='hg', dest_type='hg')
183 finally:
183 finally:
184 convcmd.converter = orig
184 convcmd.converter = orig
185 success = True
185 success = True
186 finally:
186 finally:
187 if tolfile:
187 if tolfile:
188 rdst.dirstate.clear()
188 rdst.dirstate.clear()
189 release(dstlock, dstwlock)
189 release(dstlock, dstwlock)
190 if not success:
190 if not success:
191 # we failed, remove the new directory
191 # we failed, remove the new directory
192 shutil.rmtree(rdst.root)
192 shutil.rmtree(rdst.root)
193
193
194 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
194 def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
195 matcher, size, lfiletohash):
195 matcher, size, lfiletohash):
196 # Convert src parents to dst parents
196 # Convert src parents to dst parents
197 parents = _convertparents(ctx, revmap)
197 parents = _convertparents(ctx, revmap)
198
198
199 # Generate list of changed files
199 # Generate list of changed files
200 files = _getchangedfiles(ctx, parents)
200 files = _getchangedfiles(ctx, parents)
201
201
202 dstfiles = []
202 dstfiles = []
203 for f in files:
203 for f in files:
204 if f not in lfiles and f not in normalfiles:
204 if f not in lfiles and f not in normalfiles:
205 islfile = _islfile(f, ctx, matcher, size)
205 islfile = _islfile(f, ctx, matcher, size)
206 # If this file was renamed or copied then copy
206 # If this file was renamed or copied then copy
207 # the largefile-ness of its predecessor
207 # the largefile-ness of its predecessor
208 if f in ctx.manifest():
208 if f in ctx.manifest():
209 fctx = ctx.filectx(f)
209 fctx = ctx.filectx(f)
210 renamed = fctx.renamed()
210 renamed = fctx.renamed()
211 renamedlfile = renamed and renamed[0] in lfiles
211 renamedlfile = renamed and renamed[0] in lfiles
212 islfile |= renamedlfile
212 islfile |= renamedlfile
213 if 'l' in fctx.flags():
213 if 'l' in fctx.flags():
214 if renamedlfile:
214 if renamedlfile:
215 raise error.Abort(
215 raise error.Abort(
216 _('renamed/copied largefile %s becomes symlink')
216 _('renamed/copied largefile %s becomes symlink')
217 % f)
217 % f)
218 islfile = False
218 islfile = False
219 if islfile:
219 if islfile:
220 lfiles.add(f)
220 lfiles.add(f)
221 else:
221 else:
222 normalfiles.add(f)
222 normalfiles.add(f)
223
223
224 if f in lfiles:
224 if f in lfiles:
225 fstandin = lfutil.standin(f)
225 fstandin = lfutil.standin(f)
226 dstfiles.append(fstandin)
226 dstfiles.append(fstandin)
227 # largefile in manifest if it has not been removed/renamed
227 # largefile in manifest if it has not been removed/renamed
228 if f in ctx.manifest():
228 if f in ctx.manifest():
229 fctx = ctx.filectx(f)
229 fctx = ctx.filectx(f)
230 if 'l' in fctx.flags():
230 if 'l' in fctx.flags():
231 renamed = fctx.renamed()
231 renamed = fctx.renamed()
232 if renamed and renamed[0] in lfiles:
232 if renamed and renamed[0] in lfiles:
233 raise error.Abort(_('largefile %s becomes symlink') % f)
233 raise error.Abort(_('largefile %s becomes symlink') % f)
234
234
235 # largefile was modified, update standins
235 # largefile was modified, update standins
236 m = hashlib.sha1('')
236 m = hashlib.sha1('')
237 m.update(ctx[f].data())
237 m.update(ctx[f].data())
238 hash = m.hexdigest()
238 hash = m.hexdigest()
239 if f not in lfiletohash or lfiletohash[f] != hash:
239 if f not in lfiletohash or lfiletohash[f] != hash:
240 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
240 rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
241 executable = 'x' in ctx[f].flags()
241 executable = 'x' in ctx[f].flags()
242 lfutil.writestandin(rdst, fstandin, hash,
242 lfutil.writestandin(rdst, fstandin, hash,
243 executable)
243 executable)
244 lfiletohash[f] = hash
244 lfiletohash[f] = hash
245 else:
245 else:
246 # normal file
246 # normal file
247 dstfiles.append(f)
247 dstfiles.append(f)
248
248
249 def getfilectx(repo, memctx, f):
249 def getfilectx(repo, memctx, f):
250 srcfname = lfutil.splitstandin(f)
250 srcfname = lfutil.splitstandin(f)
251 if srcfname is not None:
251 if srcfname is not None:
252 # if the file isn't in the manifest then it was removed
252 # if the file isn't in the manifest then it was removed
253 # or renamed, return None to indicate this
253 # or renamed, return None to indicate this
254 try:
254 try:
255 fctx = ctx.filectx(srcfname)
255 fctx = ctx.filectx(srcfname)
256 except error.LookupError:
256 except error.LookupError:
257 return None
257 return None
258 renamed = fctx.renamed()
258 renamed = fctx.renamed()
259 if renamed:
259 if renamed:
260 # standin is always a largefile because largefile-ness
260 # standin is always a largefile because largefile-ness
261 # doesn't change after rename or copy
261 # doesn't change after rename or copy
262 renamed = lfutil.standin(renamed[0])
262 renamed = lfutil.standin(renamed[0])
263
263
264 return context.memfilectx(repo, f, lfiletohash[srcfname] + '\n',
264 return context.memfilectx(repo, memctx, f,
265 lfiletohash[srcfname] + '\n',
265 'l' in fctx.flags(), 'x' in fctx.flags(),
266 'l' in fctx.flags(), 'x' in fctx.flags(),
266 renamed)
267 renamed)
267 else:
268 else:
268 return _getnormalcontext(repo, ctx, f, revmap)
269 return _getnormalcontext(repo, ctx, f, revmap)
269
270
270 # Commit
271 # Commit
271 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
272 _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
272
273
273 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
274 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
274 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
275 mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
275 getfilectx, ctx.user(), ctx.date(), ctx.extra())
276 getfilectx, ctx.user(), ctx.date(), ctx.extra())
276 ret = rdst.commitctx(mctx)
277 ret = rdst.commitctx(mctx)
277 lfutil.copyalltostore(rdst, ret)
278 lfutil.copyalltostore(rdst, ret)
278 rdst.setparents(ret)
279 rdst.setparents(ret)
279 revmap[ctx.node()] = rdst.changelog.tip()
280 revmap[ctx.node()] = rdst.changelog.tip()
280
281
281 # Generate list of changed files
282 # Generate list of changed files
282 def _getchangedfiles(ctx, parents):
283 def _getchangedfiles(ctx, parents):
283 files = set(ctx.files())
284 files = set(ctx.files())
284 if node.nullid not in parents:
285 if node.nullid not in parents:
285 mc = ctx.manifest()
286 mc = ctx.manifest()
286 mp1 = ctx.parents()[0].manifest()
287 mp1 = ctx.parents()[0].manifest()
287 mp2 = ctx.parents()[1].manifest()
288 mp2 = ctx.parents()[1].manifest()
288 files |= (set(mp1) | set(mp2)) - set(mc)
289 files |= (set(mp1) | set(mp2)) - set(mc)
289 for f in mc:
290 for f in mc:
290 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
291 if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
291 files.add(f)
292 files.add(f)
292 return files
293 return files
293
294
294 # Convert src parents to dst parents
295 # Convert src parents to dst parents
295 def _convertparents(ctx, revmap):
296 def _convertparents(ctx, revmap):
296 parents = []
297 parents = []
297 for p in ctx.parents():
298 for p in ctx.parents():
298 parents.append(revmap[p.node()])
299 parents.append(revmap[p.node()])
299 while len(parents) < 2:
300 while len(parents) < 2:
300 parents.append(node.nullid)
301 parents.append(node.nullid)
301 return parents
302 return parents
302
303
303 # Get memfilectx for a normal file
304 # Get memfilectx for a normal file
304 def _getnormalcontext(repo, ctx, f, revmap):
305 def _getnormalcontext(repo, ctx, f, revmap):
305 try:
306 try:
306 fctx = ctx.filectx(f)
307 fctx = ctx.filectx(f)
307 except error.LookupError:
308 except error.LookupError:
308 return None
309 return None
309 renamed = fctx.renamed()
310 renamed = fctx.renamed()
310 if renamed:
311 if renamed:
311 renamed = renamed[0]
312 renamed = renamed[0]
312
313
313 data = fctx.data()
314 data = fctx.data()
314 if f == '.hgtags':
315 if f == '.hgtags':
315 data = _converttags (repo.ui, revmap, data)
316 data = _converttags (repo.ui, revmap, data)
316 return context.memfilectx(repo, f, data, 'l' in fctx.flags(),
317 return context.memfilectx(repo, ctx, f, data, 'l' in fctx.flags(),
317 'x' in fctx.flags(), renamed)
318 'x' in fctx.flags(), renamed)
318
319
319 # Remap tag data using a revision map
320 # Remap tag data using a revision map
320 def _converttags(ui, revmap, data):
321 def _converttags(ui, revmap, data):
321 newdata = []
322 newdata = []
322 for line in data.splitlines():
323 for line in data.splitlines():
323 try:
324 try:
324 id, name = line.split(' ', 1)
325 id, name = line.split(' ', 1)
325 except ValueError:
326 except ValueError:
326 ui.warn(_('skipping incorrectly formatted tag %s\n')
327 ui.warn(_('skipping incorrectly formatted tag %s\n')
327 % line)
328 % line)
328 continue
329 continue
329 try:
330 try:
330 newid = node.bin(id)
331 newid = node.bin(id)
331 except TypeError:
332 except TypeError:
332 ui.warn(_('skipping incorrectly formatted id %s\n')
333 ui.warn(_('skipping incorrectly formatted id %s\n')
333 % id)
334 % id)
334 continue
335 continue
335 try:
336 try:
336 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
337 newdata.append('%s %s\n' % (node.hex(revmap[newid]),
337 name))
338 name))
338 except KeyError:
339 except KeyError:
339 ui.warn(_('no mapping for id %s\n') % id)
340 ui.warn(_('no mapping for id %s\n') % id)
340 continue
341 continue
341 return ''.join(newdata)
342 return ''.join(newdata)
342
343
343 def _islfile(file, ctx, matcher, size):
344 def _islfile(file, ctx, matcher, size):
344 '''Return true if file should be considered a largefile, i.e.
345 '''Return true if file should be considered a largefile, i.e.
345 matcher matches it or it is larger than size.'''
346 matcher matches it or it is larger than size.'''
346 # never store special .hg* files as largefiles
347 # never store special .hg* files as largefiles
347 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
348 if file == '.hgtags' or file == '.hgignore' or file == '.hgsigs':
348 return False
349 return False
349 if matcher and matcher(file):
350 if matcher and matcher(file):
350 return True
351 return True
351 try:
352 try:
352 return ctx.filectx(file).size() >= size * 1024 * 1024
353 return ctx.filectx(file).size() >= size * 1024 * 1024
353 except error.LookupError:
354 except error.LookupError:
354 return False
355 return False
355
356
356 def uploadlfiles(ui, rsrc, rdst, files):
357 def uploadlfiles(ui, rsrc, rdst, files):
357 '''upload largefiles to the central store'''
358 '''upload largefiles to the central store'''
358
359
359 if not files:
360 if not files:
360 return
361 return
361
362
362 store = storefactory.openstore(rsrc, rdst, put=True)
363 store = storefactory.openstore(rsrc, rdst, put=True)
363
364
364 at = 0
365 at = 0
365 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
366 ui.debug("sending statlfile command for %d largefiles\n" % len(files))
366 retval = store.exists(files)
367 retval = store.exists(files)
367 files = filter(lambda h: not retval[h], files)
368 files = filter(lambda h: not retval[h], files)
368 ui.debug("%d largefiles need to be uploaded\n" % len(files))
369 ui.debug("%d largefiles need to be uploaded\n" % len(files))
369
370
370 for hash in files:
371 for hash in files:
371 ui.progress(_('uploading largefiles'), at, unit=_('files'),
372 ui.progress(_('uploading largefiles'), at, unit=_('files'),
372 total=len(files))
373 total=len(files))
373 source = lfutil.findfile(rsrc, hash)
374 source = lfutil.findfile(rsrc, hash)
374 if not source:
375 if not source:
375 raise error.Abort(_('largefile %s missing from store'
376 raise error.Abort(_('largefile %s missing from store'
376 ' (needs to be uploaded)') % hash)
377 ' (needs to be uploaded)') % hash)
377 # XXX check for errors here
378 # XXX check for errors here
378 store.put(source, hash)
379 store.put(source, hash)
379 at += 1
380 at += 1
380 ui.progress(_('uploading largefiles'), None)
381 ui.progress(_('uploading largefiles'), None)
381
382
382 def verifylfiles(ui, repo, all=False, contents=False):
383 def verifylfiles(ui, repo, all=False, contents=False):
383 '''Verify that every largefile revision in the current changeset
384 '''Verify that every largefile revision in the current changeset
384 exists in the central store. With --contents, also verify that
385 exists in the central store. With --contents, also verify that
385 the contents of each local largefile file revision are correct (SHA-1 hash
386 the contents of each local largefile file revision are correct (SHA-1 hash
386 matches the revision ID). With --all, check every changeset in
387 matches the revision ID). With --all, check every changeset in
387 this repository.'''
388 this repository.'''
388 if all:
389 if all:
389 revs = repo.revs('all()')
390 revs = repo.revs('all()')
390 else:
391 else:
391 revs = ['.']
392 revs = ['.']
392
393
393 store = storefactory.openstore(repo)
394 store = storefactory.openstore(repo)
394 return store.verify(revs, contents=contents)
395 return store.verify(revs, contents=contents)
395
396
396 def cachelfiles(ui, repo, node, filelist=None):
397 def cachelfiles(ui, repo, node, filelist=None):
397 '''cachelfiles ensures that all largefiles needed by the specified revision
398 '''cachelfiles ensures that all largefiles needed by the specified revision
398 are present in the repository's largefile cache.
399 are present in the repository's largefile cache.
399
400
400 returns a tuple (cached, missing). cached is the list of files downloaded
401 returns a tuple (cached, missing). cached is the list of files downloaded
401 by this operation; missing is the list of files that were needed but could
402 by this operation; missing is the list of files that were needed but could
402 not be found.'''
403 not be found.'''
403 lfiles = lfutil.listlfiles(repo, node)
404 lfiles = lfutil.listlfiles(repo, node)
404 if filelist:
405 if filelist:
405 lfiles = set(lfiles) & set(filelist)
406 lfiles = set(lfiles) & set(filelist)
406 toget = []
407 toget = []
407
408
408 ctx = repo[node]
409 ctx = repo[node]
409 for lfile in lfiles:
410 for lfile in lfiles:
410 try:
411 try:
411 expectedhash = lfutil.readasstandin(ctx[lfutil.standin(lfile)])
412 expectedhash = lfutil.readasstandin(ctx[lfutil.standin(lfile)])
412 except IOError as err:
413 except IOError as err:
413 if err.errno == errno.ENOENT:
414 if err.errno == errno.ENOENT:
414 continue # node must be None and standin wasn't found in wctx
415 continue # node must be None and standin wasn't found in wctx
415 raise
416 raise
416 if not lfutil.findfile(repo, expectedhash):
417 if not lfutil.findfile(repo, expectedhash):
417 toget.append((lfile, expectedhash))
418 toget.append((lfile, expectedhash))
418
419
419 if toget:
420 if toget:
420 store = storefactory.openstore(repo)
421 store = storefactory.openstore(repo)
421 ret = store.get(toget)
422 ret = store.get(toget)
422 return ret
423 return ret
423
424
424 return ([], [])
425 return ([], [])
425
426
426 def downloadlfiles(ui, repo, rev=None):
427 def downloadlfiles(ui, repo, rev=None):
427 match = scmutil.match(repo[None], [repo.wjoin(lfutil.shortname)], {})
428 match = scmutil.match(repo[None], [repo.wjoin(lfutil.shortname)], {})
428 def prepare(ctx, fns):
429 def prepare(ctx, fns):
429 pass
430 pass
430 totalsuccess = 0
431 totalsuccess = 0
431 totalmissing = 0
432 totalmissing = 0
432 if rev != []: # walkchangerevs on empty list would return all revs
433 if rev != []: # walkchangerevs on empty list would return all revs
433 for ctx in cmdutil.walkchangerevs(repo, match, {'rev' : rev},
434 for ctx in cmdutil.walkchangerevs(repo, match, {'rev' : rev},
434 prepare):
435 prepare):
435 success, missing = cachelfiles(ui, repo, ctx.node())
436 success, missing = cachelfiles(ui, repo, ctx.node())
436 totalsuccess += len(success)
437 totalsuccess += len(success)
437 totalmissing += len(missing)
438 totalmissing += len(missing)
438 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
439 ui.status(_("%d additional largefiles cached\n") % totalsuccess)
439 if totalmissing > 0:
440 if totalmissing > 0:
440 ui.status(_("%d largefiles failed to download\n") % totalmissing)
441 ui.status(_("%d largefiles failed to download\n") % totalmissing)
441 return totalsuccess, totalmissing
442 return totalsuccess, totalmissing
442
443
443 def updatelfiles(ui, repo, filelist=None, printmessage=None,
444 def updatelfiles(ui, repo, filelist=None, printmessage=None,
444 normallookup=False):
445 normallookup=False):
445 '''Update largefiles according to standins in the working directory
446 '''Update largefiles according to standins in the working directory
446
447
447 If ``printmessage`` is other than ``None``, it means "print (or
448 If ``printmessage`` is other than ``None``, it means "print (or
448 ignore, for false) message forcibly".
449 ignore, for false) message forcibly".
449 '''
450 '''
450 statuswriter = lfutil.getstatuswriter(ui, repo, printmessage)
451 statuswriter = lfutil.getstatuswriter(ui, repo, printmessage)
451 with repo.wlock():
452 with repo.wlock():
452 lfdirstate = lfutil.openlfdirstate(ui, repo)
453 lfdirstate = lfutil.openlfdirstate(ui, repo)
453 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
454 lfiles = set(lfutil.listlfiles(repo)) | set(lfdirstate)
454
455
455 if filelist is not None:
456 if filelist is not None:
456 filelist = set(filelist)
457 filelist = set(filelist)
457 lfiles = [f for f in lfiles if f in filelist]
458 lfiles = [f for f in lfiles if f in filelist]
458
459
459 update = {}
460 update = {}
460 dropped = set()
461 dropped = set()
461 updated, removed = 0, 0
462 updated, removed = 0, 0
462 wvfs = repo.wvfs
463 wvfs = repo.wvfs
463 wctx = repo[None]
464 wctx = repo[None]
464 for lfile in lfiles:
465 for lfile in lfiles:
465 rellfile = lfile
466 rellfile = lfile
466 rellfileorig = os.path.relpath(
467 rellfileorig = os.path.relpath(
467 scmutil.origpath(ui, repo, wvfs.join(rellfile)),
468 scmutil.origpath(ui, repo, wvfs.join(rellfile)),
468 start=repo.root)
469 start=repo.root)
469 relstandin = lfutil.standin(lfile)
470 relstandin = lfutil.standin(lfile)
470 relstandinorig = os.path.relpath(
471 relstandinorig = os.path.relpath(
471 scmutil.origpath(ui, repo, wvfs.join(relstandin)),
472 scmutil.origpath(ui, repo, wvfs.join(relstandin)),
472 start=repo.root)
473 start=repo.root)
473 if wvfs.exists(relstandin):
474 if wvfs.exists(relstandin):
474 if (wvfs.exists(relstandinorig) and
475 if (wvfs.exists(relstandinorig) and
475 wvfs.exists(rellfile)):
476 wvfs.exists(rellfile)):
476 shutil.copyfile(wvfs.join(rellfile),
477 shutil.copyfile(wvfs.join(rellfile),
477 wvfs.join(rellfileorig))
478 wvfs.join(rellfileorig))
478 wvfs.unlinkpath(relstandinorig)
479 wvfs.unlinkpath(relstandinorig)
479 expecthash = lfutil.readasstandin(wctx[relstandin])
480 expecthash = lfutil.readasstandin(wctx[relstandin])
480 if expecthash != '':
481 if expecthash != '':
481 if lfile not in wctx: # not switched to normal file
482 if lfile not in wctx: # not switched to normal file
482 if repo.dirstate[relstandin] != '?':
483 if repo.dirstate[relstandin] != '?':
483 wvfs.unlinkpath(rellfile, ignoremissing=True)
484 wvfs.unlinkpath(rellfile, ignoremissing=True)
484 else:
485 else:
485 dropped.add(rellfile)
486 dropped.add(rellfile)
486
487
487 # use normallookup() to allocate an entry in largefiles
488 # use normallookup() to allocate an entry in largefiles
488 # dirstate to prevent lfilesrepo.status() from reporting
489 # dirstate to prevent lfilesrepo.status() from reporting
489 # missing files as removed.
490 # missing files as removed.
490 lfdirstate.normallookup(lfile)
491 lfdirstate.normallookup(lfile)
491 update[lfile] = expecthash
492 update[lfile] = expecthash
492 else:
493 else:
493 # Remove lfiles for which the standin is deleted, unless the
494 # Remove lfiles for which the standin is deleted, unless the
494 # lfile is added to the repository again. This happens when a
495 # lfile is added to the repository again. This happens when a
495 # largefile is converted back to a normal file: the standin
496 # largefile is converted back to a normal file: the standin
496 # disappears, but a new (normal) file appears as the lfile.
497 # disappears, but a new (normal) file appears as the lfile.
497 if (wvfs.exists(rellfile) and
498 if (wvfs.exists(rellfile) and
498 repo.dirstate.normalize(lfile) not in wctx):
499 repo.dirstate.normalize(lfile) not in wctx):
499 wvfs.unlinkpath(rellfile)
500 wvfs.unlinkpath(rellfile)
500 removed += 1
501 removed += 1
501
502
502 # largefile processing might be slow and be interrupted - be prepared
503 # largefile processing might be slow and be interrupted - be prepared
503 lfdirstate.write()
504 lfdirstate.write()
504
505
505 if lfiles:
506 if lfiles:
506 lfiles = [f for f in lfiles if f not in dropped]
507 lfiles = [f for f in lfiles if f not in dropped]
507
508
508 for f in dropped:
509 for f in dropped:
509 repo.wvfs.unlinkpath(lfutil.standin(f))
510 repo.wvfs.unlinkpath(lfutil.standin(f))
510
511
511 # This needs to happen for dropped files, otherwise they stay in
512 # This needs to happen for dropped files, otherwise they stay in
512 # the M state.
513 # the M state.
513 lfutil.synclfdirstate(repo, lfdirstate, f, normallookup)
514 lfutil.synclfdirstate(repo, lfdirstate, f, normallookup)
514
515
515 statuswriter(_('getting changed largefiles\n'))
516 statuswriter(_('getting changed largefiles\n'))
516 cachelfiles(ui, repo, None, lfiles)
517 cachelfiles(ui, repo, None, lfiles)
517
518
518 for lfile in lfiles:
519 for lfile in lfiles:
519 update1 = 0
520 update1 = 0
520
521
521 expecthash = update.get(lfile)
522 expecthash = update.get(lfile)
522 if expecthash:
523 if expecthash:
523 if not lfutil.copyfromcache(repo, expecthash, lfile):
524 if not lfutil.copyfromcache(repo, expecthash, lfile):
524 # failed ... but already removed and set to normallookup
525 # failed ... but already removed and set to normallookup
525 continue
526 continue
526 # Synchronize largefile dirstate to the last modified
527 # Synchronize largefile dirstate to the last modified
527 # time of the file
528 # time of the file
528 lfdirstate.normal(lfile)
529 lfdirstate.normal(lfile)
529 update1 = 1
530 update1 = 1
530
531
531 # copy the exec mode of largefile standin from the repository's
532 # copy the exec mode of largefile standin from the repository's
532 # dirstate to its state in the lfdirstate.
533 # dirstate to its state in the lfdirstate.
533 rellfile = lfile
534 rellfile = lfile
534 relstandin = lfutil.standin(lfile)
535 relstandin = lfutil.standin(lfile)
535 if wvfs.exists(relstandin):
536 if wvfs.exists(relstandin):
536 # exec is decided by the users permissions using mask 0o100
537 # exec is decided by the users permissions using mask 0o100
537 standinexec = wvfs.stat(relstandin).st_mode & 0o100
538 standinexec = wvfs.stat(relstandin).st_mode & 0o100
538 st = wvfs.stat(rellfile)
539 st = wvfs.stat(rellfile)
539 mode = st.st_mode
540 mode = st.st_mode
540 if standinexec != mode & 0o100:
541 if standinexec != mode & 0o100:
541 # first remove all X bits, then shift all R bits to X
542 # first remove all X bits, then shift all R bits to X
542 mode &= ~0o111
543 mode &= ~0o111
543 if standinexec:
544 if standinexec:
544 mode |= (mode >> 2) & 0o111 & ~util.umask
545 mode |= (mode >> 2) & 0o111 & ~util.umask
545 wvfs.chmod(rellfile, mode)
546 wvfs.chmod(rellfile, mode)
546 update1 = 1
547 update1 = 1
547
548
548 updated += update1
549 updated += update1
549
550
550 lfutil.synclfdirstate(repo, lfdirstate, lfile, normallookup)
551 lfutil.synclfdirstate(repo, lfdirstate, lfile, normallookup)
551
552
552 lfdirstate.write()
553 lfdirstate.write()
553 if lfiles:
554 if lfiles:
554 statuswriter(_('%d largefiles updated, %d removed\n') % (updated,
555 statuswriter(_('%d largefiles updated, %d removed\n') % (updated,
555 removed))
556 removed))
556
557
557 @command('lfpull',
558 @command('lfpull',
558 [('r', 'rev', [], _('pull largefiles for these revisions'))
559 [('r', 'rev', [], _('pull largefiles for these revisions'))
559 ] + cmdutil.remoteopts,
560 ] + cmdutil.remoteopts,
560 _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'))
561 _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'))
561 def lfpull(ui, repo, source="default", **opts):
562 def lfpull(ui, repo, source="default", **opts):
562 """pull largefiles for the specified revisions from the specified source
563 """pull largefiles for the specified revisions from the specified source
563
564
564 Pull largefiles that are referenced from local changesets but missing
565 Pull largefiles that are referenced from local changesets but missing
565 locally, pulling from a remote repository to the local cache.
566 locally, pulling from a remote repository to the local cache.
566
567
567 If SOURCE is omitted, the 'default' path will be used.
568 If SOURCE is omitted, the 'default' path will be used.
568 See :hg:`help urls` for more information.
569 See :hg:`help urls` for more information.
569
570
570 .. container:: verbose
571 .. container:: verbose
571
572
572 Some examples:
573 Some examples:
573
574
574 - pull largefiles for all branch heads::
575 - pull largefiles for all branch heads::
575
576
576 hg lfpull -r "head() and not closed()"
577 hg lfpull -r "head() and not closed()"
577
578
578 - pull largefiles on the default branch::
579 - pull largefiles on the default branch::
579
580
580 hg lfpull -r "branch(default)"
581 hg lfpull -r "branch(default)"
581 """
582 """
582 repo.lfpullsource = source
583 repo.lfpullsource = source
583
584
584 revs = opts.get(r'rev', [])
585 revs = opts.get(r'rev', [])
585 if not revs:
586 if not revs:
586 raise error.Abort(_('no revisions specified'))
587 raise error.Abort(_('no revisions specified'))
587 revs = scmutil.revrange(repo, revs)
588 revs = scmutil.revrange(repo, revs)
588
589
589 numcached = 0
590 numcached = 0
590 for rev in revs:
591 for rev in revs:
591 ui.note(_('pulling largefiles for revision %s\n') % rev)
592 ui.note(_('pulling largefiles for revision %s\n') % rev)
592 (cached, missing) = cachelfiles(ui, repo, rev)
593 (cached, missing) = cachelfiles(ui, repo, rev)
593 numcached += len(cached)
594 numcached += len(cached)
594 ui.status(_("%d largefiles cached\n") % numcached)
595 ui.status(_("%d largefiles cached\n") % numcached)
@@ -1,261 +1,261 b''
1 # uncommit - undo the actions of a commit
1 # uncommit - undo the actions of a commit
2 #
2 #
3 # Copyright 2011 Peter Arrenbrecht <peter.arrenbrecht@gmail.com>
3 # Copyright 2011 Peter Arrenbrecht <peter.arrenbrecht@gmail.com>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 # Pierre-Yves David <pierre-yves.david@ens-lyon.org>
5 # Pierre-Yves David <pierre-yves.david@ens-lyon.org>
6 # Patrick Mezard <patrick@mezard.eu>
6 # Patrick Mezard <patrick@mezard.eu>
7 # Copyright 2016 Facebook, Inc.
7 # Copyright 2016 Facebook, Inc.
8 #
8 #
9 # This software may be used and distributed according to the terms of the
9 # This software may be used and distributed according to the terms of the
10 # GNU General Public License version 2 or any later version.
10 # GNU General Public License version 2 or any later version.
11
11
12 """uncommit part or all of a local changeset (EXPERIMENTAL)
12 """uncommit part or all of a local changeset (EXPERIMENTAL)
13
13
14 This command undoes the effect of a local commit, returning the affected
14 This command undoes the effect of a local commit, returning the affected
15 files to their uncommitted state. This means that files modified, added or
15 files to their uncommitted state. This means that files modified, added or
16 removed in the changeset will be left unchanged, and so will remain modified,
16 removed in the changeset will be left unchanged, and so will remain modified,
17 added and removed in the working directory.
17 added and removed in the working directory.
18 """
18 """
19
19
20 from __future__ import absolute_import
20 from __future__ import absolute_import
21
21
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23
23
24 from mercurial import (
24 from mercurial import (
25 cmdutil,
25 cmdutil,
26 commands,
26 commands,
27 context,
27 context,
28 copies,
28 copies,
29 error,
29 error,
30 node,
30 node,
31 obsutil,
31 obsutil,
32 pycompat,
32 pycompat,
33 registrar,
33 registrar,
34 rewriteutil,
34 rewriteutil,
35 scmutil,
35 scmutil,
36 )
36 )
37
37
38 cmdtable = {}
38 cmdtable = {}
39 command = registrar.command(cmdtable)
39 command = registrar.command(cmdtable)
40
40
41 configtable = {}
41 configtable = {}
42 configitem = registrar.configitem(configtable)
42 configitem = registrar.configitem(configtable)
43
43
44 configitem('experimental', 'uncommitondirtywdir',
44 configitem('experimental', 'uncommitondirtywdir',
45 default=False,
45 default=False,
46 )
46 )
47
47
48 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
48 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
49 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
49 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
50 # be specifying the version(s) of Mercurial they are tested with, or
50 # be specifying the version(s) of Mercurial they are tested with, or
51 # leave the attribute unspecified.
51 # leave the attribute unspecified.
52 testedwith = 'ships-with-hg-core'
52 testedwith = 'ships-with-hg-core'
53
53
54 def _commitfiltered(repo, ctx, match, allowempty):
54 def _commitfiltered(repo, ctx, match, allowempty):
55 """Recommit ctx with changed files not in match. Return the new
55 """Recommit ctx with changed files not in match. Return the new
56 node identifier, or None if nothing changed.
56 node identifier, or None if nothing changed.
57 """
57 """
58 base = ctx.p1()
58 base = ctx.p1()
59 # ctx
59 # ctx
60 initialfiles = set(ctx.files())
60 initialfiles = set(ctx.files())
61 exclude = set(f for f in initialfiles if match(f))
61 exclude = set(f for f in initialfiles if match(f))
62
62
63 # No files matched commit, so nothing excluded
63 # No files matched commit, so nothing excluded
64 if not exclude:
64 if not exclude:
65 return None
65 return None
66
66
67 files = (initialfiles - exclude)
67 files = (initialfiles - exclude)
68 # return the p1 so that we don't create an obsmarker later
68 # return the p1 so that we don't create an obsmarker later
69 if not files and not allowempty:
69 if not files and not allowempty:
70 return ctx.parents()[0].node()
70 return ctx.parents()[0].node()
71
71
72 # Filter copies
72 # Filter copies
73 copied = copies.pathcopies(base, ctx)
73 copied = copies.pathcopies(base, ctx)
74 copied = dict((dst, src) for dst, src in copied.iteritems()
74 copied = dict((dst, src) for dst, src in copied.iteritems()
75 if dst in files)
75 if dst in files)
76 def filectxfn(repo, memctx, path, contentctx=ctx, redirect=()):
76 def filectxfn(repo, memctx, path, contentctx=ctx, redirect=()):
77 if path not in contentctx:
77 if path not in contentctx:
78 return None
78 return None
79 fctx = contentctx[path]
79 fctx = contentctx[path]
80 mctx = context.memfilectx(repo, fctx.path(), fctx.data(),
80 mctx = context.memfilectx(repo, memctx, fctx.path(), fctx.data(),
81 fctx.islink(),
81 fctx.islink(),
82 fctx.isexec(),
82 fctx.isexec(),
83 copied=copied.get(path))
83 copied=copied.get(path))
84 return mctx
84 return mctx
85
85
86 new = context.memctx(repo,
86 new = context.memctx(repo,
87 parents=[base.node(), node.nullid],
87 parents=[base.node(), node.nullid],
88 text=ctx.description(),
88 text=ctx.description(),
89 files=files,
89 files=files,
90 filectxfn=filectxfn,
90 filectxfn=filectxfn,
91 user=ctx.user(),
91 user=ctx.user(),
92 date=ctx.date(),
92 date=ctx.date(),
93 extra=ctx.extra())
93 extra=ctx.extra())
94 # phase handling
94 # phase handling
95 commitphase = ctx.phase()
95 commitphase = ctx.phase()
96 overrides = {('phases', 'new-commit'): commitphase}
96 overrides = {('phases', 'new-commit'): commitphase}
97 with repo.ui.configoverride(overrides, 'uncommit'):
97 with repo.ui.configoverride(overrides, 'uncommit'):
98 newid = repo.commitctx(new)
98 newid = repo.commitctx(new)
99 return newid
99 return newid
100
100
101 def _fixdirstate(repo, oldctx, newctx, status):
101 def _fixdirstate(repo, oldctx, newctx, status):
102 """ fix the dirstate after switching the working directory from oldctx to
102 """ fix the dirstate after switching the working directory from oldctx to
103 newctx which can be result of either unamend or uncommit.
103 newctx which can be result of either unamend or uncommit.
104 """
104 """
105 ds = repo.dirstate
105 ds = repo.dirstate
106 copies = dict(ds.copies())
106 copies = dict(ds.copies())
107 s = status
107 s = status
108 for f in s.modified:
108 for f in s.modified:
109 if ds[f] == 'r':
109 if ds[f] == 'r':
110 # modified + removed -> removed
110 # modified + removed -> removed
111 continue
111 continue
112 ds.normallookup(f)
112 ds.normallookup(f)
113
113
114 for f in s.added:
114 for f in s.added:
115 if ds[f] == 'r':
115 if ds[f] == 'r':
116 # added + removed -> unknown
116 # added + removed -> unknown
117 ds.drop(f)
117 ds.drop(f)
118 elif ds[f] != 'a':
118 elif ds[f] != 'a':
119 ds.add(f)
119 ds.add(f)
120
120
121 for f in s.removed:
121 for f in s.removed:
122 if ds[f] == 'a':
122 if ds[f] == 'a':
123 # removed + added -> normal
123 # removed + added -> normal
124 ds.normallookup(f)
124 ds.normallookup(f)
125 elif ds[f] != 'r':
125 elif ds[f] != 'r':
126 ds.remove(f)
126 ds.remove(f)
127
127
128 # Merge old parent and old working dir copies
128 # Merge old parent and old working dir copies
129 oldcopies = {}
129 oldcopies = {}
130 for f in (s.modified + s.added):
130 for f in (s.modified + s.added):
131 src = oldctx[f].renamed()
131 src = oldctx[f].renamed()
132 if src:
132 if src:
133 oldcopies[f] = src[0]
133 oldcopies[f] = src[0]
134 oldcopies.update(copies)
134 oldcopies.update(copies)
135 copies = dict((dst, oldcopies.get(src, src))
135 copies = dict((dst, oldcopies.get(src, src))
136 for dst, src in oldcopies.iteritems())
136 for dst, src in oldcopies.iteritems())
137 # Adjust the dirstate copies
137 # Adjust the dirstate copies
138 for dst, src in copies.iteritems():
138 for dst, src in copies.iteritems():
139 if (src not in newctx or dst in newctx or ds[dst] != 'a'):
139 if (src not in newctx or dst in newctx or ds[dst] != 'a'):
140 src = None
140 src = None
141 ds.copy(src, dst)
141 ds.copy(src, dst)
142
142
143 @command('uncommit',
143 @command('uncommit',
144 [('', 'keep', False, _('allow an empty commit after uncommiting')),
144 [('', 'keep', False, _('allow an empty commit after uncommiting')),
145 ] + commands.walkopts,
145 ] + commands.walkopts,
146 _('[OPTION]... [FILE]...'))
146 _('[OPTION]... [FILE]...'))
147 def uncommit(ui, repo, *pats, **opts):
147 def uncommit(ui, repo, *pats, **opts):
148 """uncommit part or all of a local changeset
148 """uncommit part or all of a local changeset
149
149
150 This command undoes the effect of a local commit, returning the affected
150 This command undoes the effect of a local commit, returning the affected
151 files to their uncommitted state. This means that files modified or
151 files to their uncommitted state. This means that files modified or
152 deleted in the changeset will be left unchanged, and so will remain
152 deleted in the changeset will be left unchanged, and so will remain
153 modified in the working directory.
153 modified in the working directory.
154 """
154 """
155 opts = pycompat.byteskwargs(opts)
155 opts = pycompat.byteskwargs(opts)
156
156
157 with repo.wlock(), repo.lock():
157 with repo.wlock(), repo.lock():
158
158
159 if not pats and not repo.ui.configbool('experimental',
159 if not pats and not repo.ui.configbool('experimental',
160 'uncommitondirtywdir'):
160 'uncommitondirtywdir'):
161 cmdutil.bailifchanged(repo)
161 cmdutil.bailifchanged(repo)
162 old = repo['.']
162 old = repo['.']
163 rewriteutil.precheck(repo, [old.rev()], 'uncommit')
163 rewriteutil.precheck(repo, [old.rev()], 'uncommit')
164 if len(old.parents()) > 1:
164 if len(old.parents()) > 1:
165 raise error.Abort(_("cannot uncommit merge changeset"))
165 raise error.Abort(_("cannot uncommit merge changeset"))
166
166
167 with repo.transaction('uncommit'):
167 with repo.transaction('uncommit'):
168 match = scmutil.match(old, pats, opts)
168 match = scmutil.match(old, pats, opts)
169 newid = _commitfiltered(repo, old, match, opts.get('keep'))
169 newid = _commitfiltered(repo, old, match, opts.get('keep'))
170 if newid is None:
170 if newid is None:
171 ui.status(_("nothing to uncommit\n"))
171 ui.status(_("nothing to uncommit\n"))
172 return 1
172 return 1
173
173
174 mapping = {}
174 mapping = {}
175 if newid != old.p1().node():
175 if newid != old.p1().node():
176 # Move local changes on filtered changeset
176 # Move local changes on filtered changeset
177 mapping[old.node()] = (newid,)
177 mapping[old.node()] = (newid,)
178 else:
178 else:
179 # Fully removed the old commit
179 # Fully removed the old commit
180 mapping[old.node()] = ()
180 mapping[old.node()] = ()
181
181
182 scmutil.cleanupnodes(repo, mapping, 'uncommit')
182 scmutil.cleanupnodes(repo, mapping, 'uncommit')
183
183
184 with repo.dirstate.parentchange():
184 with repo.dirstate.parentchange():
185 repo.dirstate.setparents(newid, node.nullid)
185 repo.dirstate.setparents(newid, node.nullid)
186 s = repo.status(old.p1(), old, match=match)
186 s = repo.status(old.p1(), old, match=match)
187 _fixdirstate(repo, old, repo[newid], s)
187 _fixdirstate(repo, old, repo[newid], s)
188
188
189 def predecessormarkers(ctx):
189 def predecessormarkers(ctx):
190 """yields the obsolete markers marking the given changeset as a successor"""
190 """yields the obsolete markers marking the given changeset as a successor"""
191 for data in ctx.repo().obsstore.predecessors.get(ctx.node(), ()):
191 for data in ctx.repo().obsstore.predecessors.get(ctx.node(), ()):
192 yield obsutil.marker(ctx.repo(), data)
192 yield obsutil.marker(ctx.repo(), data)
193
193
194 @command('^unamend', [])
194 @command('^unamend', [])
195 def unamend(ui, repo, **opts):
195 def unamend(ui, repo, **opts):
196 """
196 """
197 undo the most recent amend operation on a current changeset
197 undo the most recent amend operation on a current changeset
198
198
199 This command will roll back to the previous version of a changeset,
199 This command will roll back to the previous version of a changeset,
200 leaving working directory in state in which it was before running
200 leaving working directory in state in which it was before running
201 `hg amend` (e.g. files modified as part of an amend will be
201 `hg amend` (e.g. files modified as part of an amend will be
202 marked as modified `hg status`)
202 marked as modified `hg status`)
203 """
203 """
204
204
205 unfi = repo.unfiltered()
205 unfi = repo.unfiltered()
206 with repo.wlock(), repo.lock(), repo.transaction('unamend'):
206 with repo.wlock(), repo.lock(), repo.transaction('unamend'):
207
207
208 # identify the commit from which to unamend
208 # identify the commit from which to unamend
209 curctx = repo['.']
209 curctx = repo['.']
210
210
211 if not curctx.mutable():
211 if not curctx.mutable():
212 raise error.Abort(_('cannot unamend public changesets'))
212 raise error.Abort(_('cannot unamend public changesets'))
213
213
214 # identify the commit to which to unamend
214 # identify the commit to which to unamend
215 markers = list(predecessormarkers(curctx))
215 markers = list(predecessormarkers(curctx))
216 if len(markers) != 1:
216 if len(markers) != 1:
217 e = _("changeset must have one predecessor, found %i predecessors")
217 e = _("changeset must have one predecessor, found %i predecessors")
218 raise error.Abort(e % len(markers))
218 raise error.Abort(e % len(markers))
219
219
220 prednode = markers[0].prednode()
220 prednode = markers[0].prednode()
221 predctx = unfi[prednode]
221 predctx = unfi[prednode]
222
222
223 if curctx.children():
223 if curctx.children():
224 raise error.Abort(_("cannot unamend a changeset with children"))
224 raise error.Abort(_("cannot unamend a changeset with children"))
225
225
226 # add an extra so that we get a new hash
226 # add an extra so that we get a new hash
227 # note: allowing unamend to undo an unamend is an intentional feature
227 # note: allowing unamend to undo an unamend is an intentional feature
228 extras = predctx.extra()
228 extras = predctx.extra()
229 extras['unamend_source'] = curctx.hex()
229 extras['unamend_source'] = curctx.hex()
230
230
231 def filectxfn(repo, ctx_, path):
231 def filectxfn(repo, ctx_, path):
232 try:
232 try:
233 return predctx.filectx(path)
233 return predctx.filectx(path)
234 except KeyError:
234 except KeyError:
235 return None
235 return None
236
236
237 # Make a new commit same as predctx
237 # Make a new commit same as predctx
238 newctx = context.memctx(repo,
238 newctx = context.memctx(repo,
239 parents=(predctx.p1(), predctx.p2()),
239 parents=(predctx.p1(), predctx.p2()),
240 text=predctx.description(),
240 text=predctx.description(),
241 files=predctx.files(),
241 files=predctx.files(),
242 filectxfn=filectxfn,
242 filectxfn=filectxfn,
243 user=predctx.user(),
243 user=predctx.user(),
244 date=predctx.date(),
244 date=predctx.date(),
245 extra=extras)
245 extra=extras)
246 # phase handling
246 # phase handling
247 commitphase = curctx.phase()
247 commitphase = curctx.phase()
248 overrides = {('phases', 'new-commit'): commitphase}
248 overrides = {('phases', 'new-commit'): commitphase}
249 with repo.ui.configoverride(overrides, 'uncommit'):
249 with repo.ui.configoverride(overrides, 'uncommit'):
250 newprednode = repo.commitctx(newctx)
250 newprednode = repo.commitctx(newctx)
251
251
252 newpredctx = repo[newprednode]
252 newpredctx = repo[newprednode]
253 dirstate = repo.dirstate
253 dirstate = repo.dirstate
254
254
255 with dirstate.parentchange():
255 with dirstate.parentchange():
256 dirstate.setparents(newprednode, node.nullid)
256 dirstate.setparents(newprednode, node.nullid)
257 s = repo.status(predctx, curctx)
257 s = repo.status(predctx, curctx)
258 _fixdirstate(repo, curctx, newpredctx, s)
258 _fixdirstate(repo, curctx, newpredctx, s)
259
259
260 mapping = {curctx.node(): (newprednode,)}
260 mapping = {curctx.node(): (newprednode,)}
261 scmutil.cleanupnodes(repo, mapping, 'unamend')
261 scmutil.cleanupnodes(repo, mapping, 'unamend')
@@ -1,3975 +1,3975 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import itertools
11 import itertools
12 import os
12 import os
13 import re
13 import re
14 import tempfile
14 import tempfile
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 hex,
18 hex,
19 nullid,
19 nullid,
20 nullrev,
20 nullrev,
21 short,
21 short,
22 )
22 )
23
23
24 from . import (
24 from . import (
25 bookmarks,
25 bookmarks,
26 changelog,
26 changelog,
27 copies,
27 copies,
28 crecord as crecordmod,
28 crecord as crecordmod,
29 dagop,
29 dagop,
30 dirstateguard,
30 dirstateguard,
31 encoding,
31 encoding,
32 error,
32 error,
33 formatter,
33 formatter,
34 graphmod,
34 graphmod,
35 match as matchmod,
35 match as matchmod,
36 mdiff,
36 mdiff,
37 obsolete,
37 obsolete,
38 patch,
38 patch,
39 pathutil,
39 pathutil,
40 pycompat,
40 pycompat,
41 registrar,
41 registrar,
42 revlog,
42 revlog,
43 revset,
43 revset,
44 scmutil,
44 scmutil,
45 smartset,
45 smartset,
46 templatekw,
46 templatekw,
47 templater,
47 templater,
48 util,
48 util,
49 vfs as vfsmod,
49 vfs as vfsmod,
50 )
50 )
51 stringio = util.stringio
51 stringio = util.stringio
52
52
53 # templates of common command options
53 # templates of common command options
54
54
55 dryrunopts = [
55 dryrunopts = [
56 ('n', 'dry-run', None,
56 ('n', 'dry-run', None,
57 _('do not perform actions, just print output')),
57 _('do not perform actions, just print output')),
58 ]
58 ]
59
59
60 remoteopts = [
60 remoteopts = [
61 ('e', 'ssh', '',
61 ('e', 'ssh', '',
62 _('specify ssh command to use'), _('CMD')),
62 _('specify ssh command to use'), _('CMD')),
63 ('', 'remotecmd', '',
63 ('', 'remotecmd', '',
64 _('specify hg command to run on the remote side'), _('CMD')),
64 _('specify hg command to run on the remote side'), _('CMD')),
65 ('', 'insecure', None,
65 ('', 'insecure', None,
66 _('do not verify server certificate (ignoring web.cacerts config)')),
66 _('do not verify server certificate (ignoring web.cacerts config)')),
67 ]
67 ]
68
68
69 walkopts = [
69 walkopts = [
70 ('I', 'include', [],
70 ('I', 'include', [],
71 _('include names matching the given patterns'), _('PATTERN')),
71 _('include names matching the given patterns'), _('PATTERN')),
72 ('X', 'exclude', [],
72 ('X', 'exclude', [],
73 _('exclude names matching the given patterns'), _('PATTERN')),
73 _('exclude names matching the given patterns'), _('PATTERN')),
74 ]
74 ]
75
75
76 commitopts = [
76 commitopts = [
77 ('m', 'message', '',
77 ('m', 'message', '',
78 _('use text as commit message'), _('TEXT')),
78 _('use text as commit message'), _('TEXT')),
79 ('l', 'logfile', '',
79 ('l', 'logfile', '',
80 _('read commit message from file'), _('FILE')),
80 _('read commit message from file'), _('FILE')),
81 ]
81 ]
82
82
83 commitopts2 = [
83 commitopts2 = [
84 ('d', 'date', '',
84 ('d', 'date', '',
85 _('record the specified date as commit date'), _('DATE')),
85 _('record the specified date as commit date'), _('DATE')),
86 ('u', 'user', '',
86 ('u', 'user', '',
87 _('record the specified user as committer'), _('USER')),
87 _('record the specified user as committer'), _('USER')),
88 ]
88 ]
89
89
90 # hidden for now
90 # hidden for now
91 formatteropts = [
91 formatteropts = [
92 ('T', 'template', '',
92 ('T', 'template', '',
93 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
93 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
94 ]
94 ]
95
95
96 templateopts = [
96 templateopts = [
97 ('', 'style', '',
97 ('', 'style', '',
98 _('display using template map file (DEPRECATED)'), _('STYLE')),
98 _('display using template map file (DEPRECATED)'), _('STYLE')),
99 ('T', 'template', '',
99 ('T', 'template', '',
100 _('display with template'), _('TEMPLATE')),
100 _('display with template'), _('TEMPLATE')),
101 ]
101 ]
102
102
103 logopts = [
103 logopts = [
104 ('p', 'patch', None, _('show patch')),
104 ('p', 'patch', None, _('show patch')),
105 ('g', 'git', None, _('use git extended diff format')),
105 ('g', 'git', None, _('use git extended diff format')),
106 ('l', 'limit', '',
106 ('l', 'limit', '',
107 _('limit number of changes displayed'), _('NUM')),
107 _('limit number of changes displayed'), _('NUM')),
108 ('M', 'no-merges', None, _('do not show merges')),
108 ('M', 'no-merges', None, _('do not show merges')),
109 ('', 'stat', None, _('output diffstat-style summary of changes')),
109 ('', 'stat', None, _('output diffstat-style summary of changes')),
110 ('G', 'graph', None, _("show the revision DAG")),
110 ('G', 'graph', None, _("show the revision DAG")),
111 ] + templateopts
111 ] + templateopts
112
112
113 diffopts = [
113 diffopts = [
114 ('a', 'text', None, _('treat all files as text')),
114 ('a', 'text', None, _('treat all files as text')),
115 ('g', 'git', None, _('use git extended diff format')),
115 ('g', 'git', None, _('use git extended diff format')),
116 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
116 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
117 ('', 'nodates', None, _('omit dates from diff headers'))
117 ('', 'nodates', None, _('omit dates from diff headers'))
118 ]
118 ]
119
119
120 diffwsopts = [
120 diffwsopts = [
121 ('w', 'ignore-all-space', None,
121 ('w', 'ignore-all-space', None,
122 _('ignore white space when comparing lines')),
122 _('ignore white space when comparing lines')),
123 ('b', 'ignore-space-change', None,
123 ('b', 'ignore-space-change', None,
124 _('ignore changes in the amount of white space')),
124 _('ignore changes in the amount of white space')),
125 ('B', 'ignore-blank-lines', None,
125 ('B', 'ignore-blank-lines', None,
126 _('ignore changes whose lines are all blank')),
126 _('ignore changes whose lines are all blank')),
127 ('Z', 'ignore-space-at-eol', None,
127 ('Z', 'ignore-space-at-eol', None,
128 _('ignore changes in whitespace at EOL')),
128 _('ignore changes in whitespace at EOL')),
129 ]
129 ]
130
130
131 diffopts2 = [
131 diffopts2 = [
132 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
132 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
133 ('p', 'show-function', None, _('show which function each change is in')),
133 ('p', 'show-function', None, _('show which function each change is in')),
134 ('', 'reverse', None, _('produce a diff that undoes the changes')),
134 ('', 'reverse', None, _('produce a diff that undoes the changes')),
135 ] + diffwsopts + [
135 ] + diffwsopts + [
136 ('U', 'unified', '',
136 ('U', 'unified', '',
137 _('number of lines of context to show'), _('NUM')),
137 _('number of lines of context to show'), _('NUM')),
138 ('', 'stat', None, _('output diffstat-style summary of changes')),
138 ('', 'stat', None, _('output diffstat-style summary of changes')),
139 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
139 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
140 ]
140 ]
141
141
142 mergetoolopts = [
142 mergetoolopts = [
143 ('t', 'tool', '', _('specify merge tool')),
143 ('t', 'tool', '', _('specify merge tool')),
144 ]
144 ]
145
145
146 similarityopts = [
146 similarityopts = [
147 ('s', 'similarity', '',
147 ('s', 'similarity', '',
148 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
148 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
149 ]
149 ]
150
150
151 subrepoopts = [
151 subrepoopts = [
152 ('S', 'subrepos', None,
152 ('S', 'subrepos', None,
153 _('recurse into subrepositories'))
153 _('recurse into subrepositories'))
154 ]
154 ]
155
155
156 debugrevlogopts = [
156 debugrevlogopts = [
157 ('c', 'changelog', False, _('open changelog')),
157 ('c', 'changelog', False, _('open changelog')),
158 ('m', 'manifest', False, _('open manifest')),
158 ('m', 'manifest', False, _('open manifest')),
159 ('', 'dir', '', _('open directory manifest')),
159 ('', 'dir', '', _('open directory manifest')),
160 ]
160 ]
161
161
162 # special string such that everything below this line will be ingored in the
162 # special string such that everything below this line will be ingored in the
163 # editor text
163 # editor text
164 _linebelow = "^HG: ------------------------ >8 ------------------------$"
164 _linebelow = "^HG: ------------------------ >8 ------------------------$"
165
165
166 def ishunk(x):
166 def ishunk(x):
167 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
167 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
168 return isinstance(x, hunkclasses)
168 return isinstance(x, hunkclasses)
169
169
170 def newandmodified(chunks, originalchunks):
170 def newandmodified(chunks, originalchunks):
171 newlyaddedandmodifiedfiles = set()
171 newlyaddedandmodifiedfiles = set()
172 for chunk in chunks:
172 for chunk in chunks:
173 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
173 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
174 originalchunks:
174 originalchunks:
175 newlyaddedandmodifiedfiles.add(chunk.header.filename())
175 newlyaddedandmodifiedfiles.add(chunk.header.filename())
176 return newlyaddedandmodifiedfiles
176 return newlyaddedandmodifiedfiles
177
177
178 def parsealiases(cmd):
178 def parsealiases(cmd):
179 return cmd.lstrip("^").split("|")
179 return cmd.lstrip("^").split("|")
180
180
181 def setupwrapcolorwrite(ui):
181 def setupwrapcolorwrite(ui):
182 # wrap ui.write so diff output can be labeled/colorized
182 # wrap ui.write so diff output can be labeled/colorized
183 def wrapwrite(orig, *args, **kw):
183 def wrapwrite(orig, *args, **kw):
184 label = kw.pop(r'label', '')
184 label = kw.pop(r'label', '')
185 for chunk, l in patch.difflabel(lambda: args):
185 for chunk, l in patch.difflabel(lambda: args):
186 orig(chunk, label=label + l)
186 orig(chunk, label=label + l)
187
187
188 oldwrite = ui.write
188 oldwrite = ui.write
189 def wrap(*args, **kwargs):
189 def wrap(*args, **kwargs):
190 return wrapwrite(oldwrite, *args, **kwargs)
190 return wrapwrite(oldwrite, *args, **kwargs)
191 setattr(ui, 'write', wrap)
191 setattr(ui, 'write', wrap)
192 return oldwrite
192 return oldwrite
193
193
194 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
194 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
195 if usecurses:
195 if usecurses:
196 if testfile:
196 if testfile:
197 recordfn = crecordmod.testdecorator(testfile,
197 recordfn = crecordmod.testdecorator(testfile,
198 crecordmod.testchunkselector)
198 crecordmod.testchunkselector)
199 else:
199 else:
200 recordfn = crecordmod.chunkselector
200 recordfn = crecordmod.chunkselector
201
201
202 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
202 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
203
203
204 else:
204 else:
205 return patch.filterpatch(ui, originalhunks, operation)
205 return patch.filterpatch(ui, originalhunks, operation)
206
206
207 def recordfilter(ui, originalhunks, operation=None):
207 def recordfilter(ui, originalhunks, operation=None):
208 """ Prompts the user to filter the originalhunks and return a list of
208 """ Prompts the user to filter the originalhunks and return a list of
209 selected hunks.
209 selected hunks.
210 *operation* is used for to build ui messages to indicate the user what
210 *operation* is used for to build ui messages to indicate the user what
211 kind of filtering they are doing: reverting, committing, shelving, etc.
211 kind of filtering they are doing: reverting, committing, shelving, etc.
212 (see patch.filterpatch).
212 (see patch.filterpatch).
213 """
213 """
214 usecurses = crecordmod.checkcurses(ui)
214 usecurses = crecordmod.checkcurses(ui)
215 testfile = ui.config('experimental', 'crecordtest')
215 testfile = ui.config('experimental', 'crecordtest')
216 oldwrite = setupwrapcolorwrite(ui)
216 oldwrite = setupwrapcolorwrite(ui)
217 try:
217 try:
218 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
218 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
219 testfile, operation)
219 testfile, operation)
220 finally:
220 finally:
221 ui.write = oldwrite
221 ui.write = oldwrite
222 return newchunks, newopts
222 return newchunks, newopts
223
223
224 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
224 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
225 filterfn, *pats, **opts):
225 filterfn, *pats, **opts):
226 from . import merge as mergemod
226 from . import merge as mergemod
227 opts = pycompat.byteskwargs(opts)
227 opts = pycompat.byteskwargs(opts)
228 if not ui.interactive():
228 if not ui.interactive():
229 if cmdsuggest:
229 if cmdsuggest:
230 msg = _('running non-interactively, use %s instead') % cmdsuggest
230 msg = _('running non-interactively, use %s instead') % cmdsuggest
231 else:
231 else:
232 msg = _('running non-interactively')
232 msg = _('running non-interactively')
233 raise error.Abort(msg)
233 raise error.Abort(msg)
234
234
235 # make sure username is set before going interactive
235 # make sure username is set before going interactive
236 if not opts.get('user'):
236 if not opts.get('user'):
237 ui.username() # raise exception, username not provided
237 ui.username() # raise exception, username not provided
238
238
239 def recordfunc(ui, repo, message, match, opts):
239 def recordfunc(ui, repo, message, match, opts):
240 """This is generic record driver.
240 """This is generic record driver.
241
241
242 Its job is to interactively filter local changes, and
242 Its job is to interactively filter local changes, and
243 accordingly prepare working directory into a state in which the
243 accordingly prepare working directory into a state in which the
244 job can be delegated to a non-interactive commit command such as
244 job can be delegated to a non-interactive commit command such as
245 'commit' or 'qrefresh'.
245 'commit' or 'qrefresh'.
246
246
247 After the actual job is done by non-interactive command, the
247 After the actual job is done by non-interactive command, the
248 working directory is restored to its original state.
248 working directory is restored to its original state.
249
249
250 In the end we'll record interesting changes, and everything else
250 In the end we'll record interesting changes, and everything else
251 will be left in place, so the user can continue working.
251 will be left in place, so the user can continue working.
252 """
252 """
253
253
254 checkunfinished(repo, commit=True)
254 checkunfinished(repo, commit=True)
255 wctx = repo[None]
255 wctx = repo[None]
256 merge = len(wctx.parents()) > 1
256 merge = len(wctx.parents()) > 1
257 if merge:
257 if merge:
258 raise error.Abort(_('cannot partially commit a merge '
258 raise error.Abort(_('cannot partially commit a merge '
259 '(use "hg commit" instead)'))
259 '(use "hg commit" instead)'))
260
260
261 def fail(f, msg):
261 def fail(f, msg):
262 raise error.Abort('%s: %s' % (f, msg))
262 raise error.Abort('%s: %s' % (f, msg))
263
263
264 force = opts.get('force')
264 force = opts.get('force')
265 if not force:
265 if not force:
266 vdirs = []
266 vdirs = []
267 match.explicitdir = vdirs.append
267 match.explicitdir = vdirs.append
268 match.bad = fail
268 match.bad = fail
269
269
270 status = repo.status(match=match)
270 status = repo.status(match=match)
271 if not force:
271 if not force:
272 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
272 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
273 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
273 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
274 diffopts.nodates = True
274 diffopts.nodates = True
275 diffopts.git = True
275 diffopts.git = True
276 diffopts.showfunc = True
276 diffopts.showfunc = True
277 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
277 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
278 originalchunks = patch.parsepatch(originaldiff)
278 originalchunks = patch.parsepatch(originaldiff)
279
279
280 # 1. filter patch, since we are intending to apply subset of it
280 # 1. filter patch, since we are intending to apply subset of it
281 try:
281 try:
282 chunks, newopts = filterfn(ui, originalchunks)
282 chunks, newopts = filterfn(ui, originalchunks)
283 except error.PatchError as err:
283 except error.PatchError as err:
284 raise error.Abort(_('error parsing patch: %s') % err)
284 raise error.Abort(_('error parsing patch: %s') % err)
285 opts.update(newopts)
285 opts.update(newopts)
286
286
287 # We need to keep a backup of files that have been newly added and
287 # We need to keep a backup of files that have been newly added and
288 # modified during the recording process because there is a previous
288 # modified during the recording process because there is a previous
289 # version without the edit in the workdir
289 # version without the edit in the workdir
290 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
290 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
291 contenders = set()
291 contenders = set()
292 for h in chunks:
292 for h in chunks:
293 try:
293 try:
294 contenders.update(set(h.files()))
294 contenders.update(set(h.files()))
295 except AttributeError:
295 except AttributeError:
296 pass
296 pass
297
297
298 changed = status.modified + status.added + status.removed
298 changed = status.modified + status.added + status.removed
299 newfiles = [f for f in changed if f in contenders]
299 newfiles = [f for f in changed if f in contenders]
300 if not newfiles:
300 if not newfiles:
301 ui.status(_('no changes to record\n'))
301 ui.status(_('no changes to record\n'))
302 return 0
302 return 0
303
303
304 modified = set(status.modified)
304 modified = set(status.modified)
305
305
306 # 2. backup changed files, so we can restore them in the end
306 # 2. backup changed files, so we can restore them in the end
307
307
308 if backupall:
308 if backupall:
309 tobackup = changed
309 tobackup = changed
310 else:
310 else:
311 tobackup = [f for f in newfiles if f in modified or f in \
311 tobackup = [f for f in newfiles if f in modified or f in \
312 newlyaddedandmodifiedfiles]
312 newlyaddedandmodifiedfiles]
313 backups = {}
313 backups = {}
314 if tobackup:
314 if tobackup:
315 backupdir = repo.vfs.join('record-backups')
315 backupdir = repo.vfs.join('record-backups')
316 try:
316 try:
317 os.mkdir(backupdir)
317 os.mkdir(backupdir)
318 except OSError as err:
318 except OSError as err:
319 if err.errno != errno.EEXIST:
319 if err.errno != errno.EEXIST:
320 raise
320 raise
321 try:
321 try:
322 # backup continues
322 # backup continues
323 for f in tobackup:
323 for f in tobackup:
324 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
324 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
325 dir=backupdir)
325 dir=backupdir)
326 os.close(fd)
326 os.close(fd)
327 ui.debug('backup %r as %r\n' % (f, tmpname))
327 ui.debug('backup %r as %r\n' % (f, tmpname))
328 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
328 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
329 backups[f] = tmpname
329 backups[f] = tmpname
330
330
331 fp = stringio()
331 fp = stringio()
332 for c in chunks:
332 for c in chunks:
333 fname = c.filename()
333 fname = c.filename()
334 if fname in backups:
334 if fname in backups:
335 c.write(fp)
335 c.write(fp)
336 dopatch = fp.tell()
336 dopatch = fp.tell()
337 fp.seek(0)
337 fp.seek(0)
338
338
339 # 2.5 optionally review / modify patch in text editor
339 # 2.5 optionally review / modify patch in text editor
340 if opts.get('review', False):
340 if opts.get('review', False):
341 patchtext = (crecordmod.diffhelptext
341 patchtext = (crecordmod.diffhelptext
342 + crecordmod.patchhelptext
342 + crecordmod.patchhelptext
343 + fp.read())
343 + fp.read())
344 reviewedpatch = ui.edit(patchtext, "",
344 reviewedpatch = ui.edit(patchtext, "",
345 action="diff",
345 action="diff",
346 repopath=repo.path)
346 repopath=repo.path)
347 fp.truncate(0)
347 fp.truncate(0)
348 fp.write(reviewedpatch)
348 fp.write(reviewedpatch)
349 fp.seek(0)
349 fp.seek(0)
350
350
351 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
351 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
352 # 3a. apply filtered patch to clean repo (clean)
352 # 3a. apply filtered patch to clean repo (clean)
353 if backups:
353 if backups:
354 # Equivalent to hg.revert
354 # Equivalent to hg.revert
355 m = scmutil.matchfiles(repo, backups.keys())
355 m = scmutil.matchfiles(repo, backups.keys())
356 mergemod.update(repo, repo.dirstate.p1(),
356 mergemod.update(repo, repo.dirstate.p1(),
357 False, True, matcher=m)
357 False, True, matcher=m)
358
358
359 # 3b. (apply)
359 # 3b. (apply)
360 if dopatch:
360 if dopatch:
361 try:
361 try:
362 ui.debug('applying patch\n')
362 ui.debug('applying patch\n')
363 ui.debug(fp.getvalue())
363 ui.debug(fp.getvalue())
364 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
364 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
365 except error.PatchError as err:
365 except error.PatchError as err:
366 raise error.Abort(str(err))
366 raise error.Abort(str(err))
367 del fp
367 del fp
368
368
369 # 4. We prepared working directory according to filtered
369 # 4. We prepared working directory according to filtered
370 # patch. Now is the time to delegate the job to
370 # patch. Now is the time to delegate the job to
371 # commit/qrefresh or the like!
371 # commit/qrefresh or the like!
372
372
373 # Make all of the pathnames absolute.
373 # Make all of the pathnames absolute.
374 newfiles = [repo.wjoin(nf) for nf in newfiles]
374 newfiles = [repo.wjoin(nf) for nf in newfiles]
375 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
375 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
376 finally:
376 finally:
377 # 5. finally restore backed-up files
377 # 5. finally restore backed-up files
378 try:
378 try:
379 dirstate = repo.dirstate
379 dirstate = repo.dirstate
380 for realname, tmpname in backups.iteritems():
380 for realname, tmpname in backups.iteritems():
381 ui.debug('restoring %r to %r\n' % (tmpname, realname))
381 ui.debug('restoring %r to %r\n' % (tmpname, realname))
382
382
383 if dirstate[realname] == 'n':
383 if dirstate[realname] == 'n':
384 # without normallookup, restoring timestamp
384 # without normallookup, restoring timestamp
385 # may cause partially committed files
385 # may cause partially committed files
386 # to be treated as unmodified
386 # to be treated as unmodified
387 dirstate.normallookup(realname)
387 dirstate.normallookup(realname)
388
388
389 # copystat=True here and above are a hack to trick any
389 # copystat=True here and above are a hack to trick any
390 # editors that have f open that we haven't modified them.
390 # editors that have f open that we haven't modified them.
391 #
391 #
392 # Also note that this racy as an editor could notice the
392 # Also note that this racy as an editor could notice the
393 # file's mtime before we've finished writing it.
393 # file's mtime before we've finished writing it.
394 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
394 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
395 os.unlink(tmpname)
395 os.unlink(tmpname)
396 if tobackup:
396 if tobackup:
397 os.rmdir(backupdir)
397 os.rmdir(backupdir)
398 except OSError:
398 except OSError:
399 pass
399 pass
400
400
401 def recordinwlock(ui, repo, message, match, opts):
401 def recordinwlock(ui, repo, message, match, opts):
402 with repo.wlock():
402 with repo.wlock():
403 return recordfunc(ui, repo, message, match, opts)
403 return recordfunc(ui, repo, message, match, opts)
404
404
405 return commit(ui, repo, recordinwlock, pats, opts)
405 return commit(ui, repo, recordinwlock, pats, opts)
406
406
407 class dirnode(object):
407 class dirnode(object):
408 """
408 """
409 Represent a directory in user working copy with information required for
409 Represent a directory in user working copy with information required for
410 the purpose of tersing its status.
410 the purpose of tersing its status.
411
411
412 path is the path to the directory
412 path is the path to the directory
413
413
414 statuses is a set of statuses of all files in this directory (this includes
414 statuses is a set of statuses of all files in this directory (this includes
415 all the files in all the subdirectories too)
415 all the files in all the subdirectories too)
416
416
417 files is a list of files which are direct child of this directory
417 files is a list of files which are direct child of this directory
418
418
419 subdirs is a dictionary of sub-directory name as the key and it's own
419 subdirs is a dictionary of sub-directory name as the key and it's own
420 dirnode object as the value
420 dirnode object as the value
421 """
421 """
422
422
423 def __init__(self, dirpath):
423 def __init__(self, dirpath):
424 self.path = dirpath
424 self.path = dirpath
425 self.statuses = set([])
425 self.statuses = set([])
426 self.files = []
426 self.files = []
427 self.subdirs = {}
427 self.subdirs = {}
428
428
429 def _addfileindir(self, filename, status):
429 def _addfileindir(self, filename, status):
430 """Add a file in this directory as a direct child."""
430 """Add a file in this directory as a direct child."""
431 self.files.append((filename, status))
431 self.files.append((filename, status))
432
432
433 def addfile(self, filename, status):
433 def addfile(self, filename, status):
434 """
434 """
435 Add a file to this directory or to its direct parent directory.
435 Add a file to this directory or to its direct parent directory.
436
436
437 If the file is not direct child of this directory, we traverse to the
437 If the file is not direct child of this directory, we traverse to the
438 directory of which this file is a direct child of and add the file
438 directory of which this file is a direct child of and add the file
439 there.
439 there.
440 """
440 """
441
441
442 # the filename contains a path separator, it means it's not the direct
442 # the filename contains a path separator, it means it's not the direct
443 # child of this directory
443 # child of this directory
444 if '/' in filename:
444 if '/' in filename:
445 subdir, filep = filename.split('/', 1)
445 subdir, filep = filename.split('/', 1)
446
446
447 # does the dirnode object for subdir exists
447 # does the dirnode object for subdir exists
448 if subdir not in self.subdirs:
448 if subdir not in self.subdirs:
449 subdirpath = os.path.join(self.path, subdir)
449 subdirpath = os.path.join(self.path, subdir)
450 self.subdirs[subdir] = dirnode(subdirpath)
450 self.subdirs[subdir] = dirnode(subdirpath)
451
451
452 # try adding the file in subdir
452 # try adding the file in subdir
453 self.subdirs[subdir].addfile(filep, status)
453 self.subdirs[subdir].addfile(filep, status)
454
454
455 else:
455 else:
456 self._addfileindir(filename, status)
456 self._addfileindir(filename, status)
457
457
458 if status not in self.statuses:
458 if status not in self.statuses:
459 self.statuses.add(status)
459 self.statuses.add(status)
460
460
461 def iterfilepaths(self):
461 def iterfilepaths(self):
462 """Yield (status, path) for files directly under this directory."""
462 """Yield (status, path) for files directly under this directory."""
463 for f, st in self.files:
463 for f, st in self.files:
464 yield st, os.path.join(self.path, f)
464 yield st, os.path.join(self.path, f)
465
465
466 def tersewalk(self, terseargs):
466 def tersewalk(self, terseargs):
467 """
467 """
468 Yield (status, path) obtained by processing the status of this
468 Yield (status, path) obtained by processing the status of this
469 dirnode.
469 dirnode.
470
470
471 terseargs is the string of arguments passed by the user with `--terse`
471 terseargs is the string of arguments passed by the user with `--terse`
472 flag.
472 flag.
473
473
474 Following are the cases which can happen:
474 Following are the cases which can happen:
475
475
476 1) All the files in the directory (including all the files in its
476 1) All the files in the directory (including all the files in its
477 subdirectories) share the same status and the user has asked us to terse
477 subdirectories) share the same status and the user has asked us to terse
478 that status. -> yield (status, dirpath)
478 that status. -> yield (status, dirpath)
479
479
480 2) Otherwise, we do following:
480 2) Otherwise, we do following:
481
481
482 a) Yield (status, filepath) for all the files which are in this
482 a) Yield (status, filepath) for all the files which are in this
483 directory (only the ones in this directory, not the subdirs)
483 directory (only the ones in this directory, not the subdirs)
484
484
485 b) Recurse the function on all the subdirectories of this
485 b) Recurse the function on all the subdirectories of this
486 directory
486 directory
487 """
487 """
488
488
489 if len(self.statuses) == 1:
489 if len(self.statuses) == 1:
490 onlyst = self.statuses.pop()
490 onlyst = self.statuses.pop()
491
491
492 # Making sure we terse only when the status abbreviation is
492 # Making sure we terse only when the status abbreviation is
493 # passed as terse argument
493 # passed as terse argument
494 if onlyst in terseargs:
494 if onlyst in terseargs:
495 yield onlyst, self.path + pycompat.ossep
495 yield onlyst, self.path + pycompat.ossep
496 return
496 return
497
497
498 # add the files to status list
498 # add the files to status list
499 for st, fpath in self.iterfilepaths():
499 for st, fpath in self.iterfilepaths():
500 yield st, fpath
500 yield st, fpath
501
501
502 #recurse on the subdirs
502 #recurse on the subdirs
503 for dirobj in self.subdirs.values():
503 for dirobj in self.subdirs.values():
504 for st, fpath in dirobj.tersewalk(terseargs):
504 for st, fpath in dirobj.tersewalk(terseargs):
505 yield st, fpath
505 yield st, fpath
506
506
507 def tersedir(statuslist, terseargs):
507 def tersedir(statuslist, terseargs):
508 """
508 """
509 Terse the status if all the files in a directory shares the same status.
509 Terse the status if all the files in a directory shares the same status.
510
510
511 statuslist is scmutil.status() object which contains a list of files for
511 statuslist is scmutil.status() object which contains a list of files for
512 each status.
512 each status.
513 terseargs is string which is passed by the user as the argument to `--terse`
513 terseargs is string which is passed by the user as the argument to `--terse`
514 flag.
514 flag.
515
515
516 The function makes a tree of objects of dirnode class, and at each node it
516 The function makes a tree of objects of dirnode class, and at each node it
517 stores the information required to know whether we can terse a certain
517 stores the information required to know whether we can terse a certain
518 directory or not.
518 directory or not.
519 """
519 """
520 # the order matters here as that is used to produce final list
520 # the order matters here as that is used to produce final list
521 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
521 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
522
522
523 # checking the argument validity
523 # checking the argument validity
524 for s in pycompat.bytestr(terseargs):
524 for s in pycompat.bytestr(terseargs):
525 if s not in allst:
525 if s not in allst:
526 raise error.Abort(_("'%s' not recognized") % s)
526 raise error.Abort(_("'%s' not recognized") % s)
527
527
528 # creating a dirnode object for the root of the repo
528 # creating a dirnode object for the root of the repo
529 rootobj = dirnode('')
529 rootobj = dirnode('')
530 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
530 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
531 'ignored', 'removed')
531 'ignored', 'removed')
532
532
533 tersedict = {}
533 tersedict = {}
534 for attrname in pstatus:
534 for attrname in pstatus:
535 statuschar = attrname[0:1]
535 statuschar = attrname[0:1]
536 for f in getattr(statuslist, attrname):
536 for f in getattr(statuslist, attrname):
537 rootobj.addfile(f, statuschar)
537 rootobj.addfile(f, statuschar)
538 tersedict[statuschar] = []
538 tersedict[statuschar] = []
539
539
540 # we won't be tersing the root dir, so add files in it
540 # we won't be tersing the root dir, so add files in it
541 for st, fpath in rootobj.iterfilepaths():
541 for st, fpath in rootobj.iterfilepaths():
542 tersedict[st].append(fpath)
542 tersedict[st].append(fpath)
543
543
544 # process each sub-directory and build tersedict
544 # process each sub-directory and build tersedict
545 for subdir in rootobj.subdirs.values():
545 for subdir in rootobj.subdirs.values():
546 for st, f in subdir.tersewalk(terseargs):
546 for st, f in subdir.tersewalk(terseargs):
547 tersedict[st].append(f)
547 tersedict[st].append(f)
548
548
549 tersedlist = []
549 tersedlist = []
550 for st in allst:
550 for st in allst:
551 tersedict[st].sort()
551 tersedict[st].sort()
552 tersedlist.append(tersedict[st])
552 tersedlist.append(tersedict[st])
553
553
554 return tersedlist
554 return tersedlist
555
555
556 def _commentlines(raw):
556 def _commentlines(raw):
557 '''Surround lineswith a comment char and a new line'''
557 '''Surround lineswith a comment char and a new line'''
558 lines = raw.splitlines()
558 lines = raw.splitlines()
559 commentedlines = ['# %s' % line for line in lines]
559 commentedlines = ['# %s' % line for line in lines]
560 return '\n'.join(commentedlines) + '\n'
560 return '\n'.join(commentedlines) + '\n'
561
561
562 def _conflictsmsg(repo):
562 def _conflictsmsg(repo):
563 # avoid merge cycle
563 # avoid merge cycle
564 from . import merge as mergemod
564 from . import merge as mergemod
565 mergestate = mergemod.mergestate.read(repo)
565 mergestate = mergemod.mergestate.read(repo)
566 if not mergestate.active():
566 if not mergestate.active():
567 return
567 return
568
568
569 m = scmutil.match(repo[None])
569 m = scmutil.match(repo[None])
570 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
570 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
571 if unresolvedlist:
571 if unresolvedlist:
572 mergeliststr = '\n'.join(
572 mergeliststr = '\n'.join(
573 [' %s' % util.pathto(repo.root, pycompat.getcwd(), path)
573 [' %s' % util.pathto(repo.root, pycompat.getcwd(), path)
574 for path in unresolvedlist])
574 for path in unresolvedlist])
575 msg = _('''Unresolved merge conflicts:
575 msg = _('''Unresolved merge conflicts:
576
576
577 %s
577 %s
578
578
579 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
579 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
580 else:
580 else:
581 msg = _('No unresolved merge conflicts.')
581 msg = _('No unresolved merge conflicts.')
582
582
583 return _commentlines(msg)
583 return _commentlines(msg)
584
584
585 def _helpmessage(continuecmd, abortcmd):
585 def _helpmessage(continuecmd, abortcmd):
586 msg = _('To continue: %s\n'
586 msg = _('To continue: %s\n'
587 'To abort: %s') % (continuecmd, abortcmd)
587 'To abort: %s') % (continuecmd, abortcmd)
588 return _commentlines(msg)
588 return _commentlines(msg)
589
589
590 def _rebasemsg():
590 def _rebasemsg():
591 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
591 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
592
592
593 def _histeditmsg():
593 def _histeditmsg():
594 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
594 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
595
595
596 def _unshelvemsg():
596 def _unshelvemsg():
597 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
597 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
598
598
599 def _updatecleanmsg(dest=None):
599 def _updatecleanmsg(dest=None):
600 warning = _('warning: this will discard uncommitted changes')
600 warning = _('warning: this will discard uncommitted changes')
601 return 'hg update --clean %s (%s)' % (dest or '.', warning)
601 return 'hg update --clean %s (%s)' % (dest or '.', warning)
602
602
603 def _graftmsg():
603 def _graftmsg():
604 # tweakdefaults requires `update` to have a rev hence the `.`
604 # tweakdefaults requires `update` to have a rev hence the `.`
605 return _helpmessage('hg graft --continue', _updatecleanmsg())
605 return _helpmessage('hg graft --continue', _updatecleanmsg())
606
606
607 def _mergemsg():
607 def _mergemsg():
608 # tweakdefaults requires `update` to have a rev hence the `.`
608 # tweakdefaults requires `update` to have a rev hence the `.`
609 return _helpmessage('hg commit', _updatecleanmsg())
609 return _helpmessage('hg commit', _updatecleanmsg())
610
610
611 def _bisectmsg():
611 def _bisectmsg():
612 msg = _('To mark the changeset good: hg bisect --good\n'
612 msg = _('To mark the changeset good: hg bisect --good\n'
613 'To mark the changeset bad: hg bisect --bad\n'
613 'To mark the changeset bad: hg bisect --bad\n'
614 'To abort: hg bisect --reset\n')
614 'To abort: hg bisect --reset\n')
615 return _commentlines(msg)
615 return _commentlines(msg)
616
616
617 def fileexistspredicate(filename):
617 def fileexistspredicate(filename):
618 return lambda repo: repo.vfs.exists(filename)
618 return lambda repo: repo.vfs.exists(filename)
619
619
620 def _mergepredicate(repo):
620 def _mergepredicate(repo):
621 return len(repo[None].parents()) > 1
621 return len(repo[None].parents()) > 1
622
622
623 STATES = (
623 STATES = (
624 # (state, predicate to detect states, helpful message function)
624 # (state, predicate to detect states, helpful message function)
625 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
625 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
626 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
626 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
627 ('graft', fileexistspredicate('graftstate'), _graftmsg),
627 ('graft', fileexistspredicate('graftstate'), _graftmsg),
628 ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
628 ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
629 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
629 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
630 # The merge state is part of a list that will be iterated over.
630 # The merge state is part of a list that will be iterated over.
631 # They need to be last because some of the other unfinished states may also
631 # They need to be last because some of the other unfinished states may also
632 # be in a merge or update state (eg. rebase, histedit, graft, etc).
632 # be in a merge or update state (eg. rebase, histedit, graft, etc).
633 # We want those to have priority.
633 # We want those to have priority.
634 ('merge', _mergepredicate, _mergemsg),
634 ('merge', _mergepredicate, _mergemsg),
635 )
635 )
636
636
637 def _getrepostate(repo):
637 def _getrepostate(repo):
638 # experimental config: commands.status.skipstates
638 # experimental config: commands.status.skipstates
639 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
639 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
640 for state, statedetectionpredicate, msgfn in STATES:
640 for state, statedetectionpredicate, msgfn in STATES:
641 if state in skip:
641 if state in skip:
642 continue
642 continue
643 if statedetectionpredicate(repo):
643 if statedetectionpredicate(repo):
644 return (state, statedetectionpredicate, msgfn)
644 return (state, statedetectionpredicate, msgfn)
645
645
646 def morestatus(repo, fm):
646 def morestatus(repo, fm):
647 statetuple = _getrepostate(repo)
647 statetuple = _getrepostate(repo)
648 label = 'status.morestatus'
648 label = 'status.morestatus'
649 if statetuple:
649 if statetuple:
650 fm.startitem()
650 fm.startitem()
651 state, statedetectionpredicate, helpfulmsg = statetuple
651 state, statedetectionpredicate, helpfulmsg = statetuple
652 statemsg = _('The repository is in an unfinished *%s* state.') % state
652 statemsg = _('The repository is in an unfinished *%s* state.') % state
653 fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
653 fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
654 conmsg = _conflictsmsg(repo)
654 conmsg = _conflictsmsg(repo)
655 if conmsg:
655 if conmsg:
656 fm.write('conflictsmsg', '%s\n', conmsg, label=label)
656 fm.write('conflictsmsg', '%s\n', conmsg, label=label)
657 if helpfulmsg:
657 if helpfulmsg:
658 helpmsg = helpfulmsg()
658 helpmsg = helpfulmsg()
659 fm.write('helpmsg', '%s\n', helpmsg, label=label)
659 fm.write('helpmsg', '%s\n', helpmsg, label=label)
660
660
661 def findpossible(cmd, table, strict=False):
661 def findpossible(cmd, table, strict=False):
662 """
662 """
663 Return cmd -> (aliases, command table entry)
663 Return cmd -> (aliases, command table entry)
664 for each matching command.
664 for each matching command.
665 Return debug commands (or their aliases) only if no normal command matches.
665 Return debug commands (or their aliases) only if no normal command matches.
666 """
666 """
667 choice = {}
667 choice = {}
668 debugchoice = {}
668 debugchoice = {}
669
669
670 if cmd in table:
670 if cmd in table:
671 # short-circuit exact matches, "log" alias beats "^log|history"
671 # short-circuit exact matches, "log" alias beats "^log|history"
672 keys = [cmd]
672 keys = [cmd]
673 else:
673 else:
674 keys = table.keys()
674 keys = table.keys()
675
675
676 allcmds = []
676 allcmds = []
677 for e in keys:
677 for e in keys:
678 aliases = parsealiases(e)
678 aliases = parsealiases(e)
679 allcmds.extend(aliases)
679 allcmds.extend(aliases)
680 found = None
680 found = None
681 if cmd in aliases:
681 if cmd in aliases:
682 found = cmd
682 found = cmd
683 elif not strict:
683 elif not strict:
684 for a in aliases:
684 for a in aliases:
685 if a.startswith(cmd):
685 if a.startswith(cmd):
686 found = a
686 found = a
687 break
687 break
688 if found is not None:
688 if found is not None:
689 if aliases[0].startswith("debug") or found.startswith("debug"):
689 if aliases[0].startswith("debug") or found.startswith("debug"):
690 debugchoice[found] = (aliases, table[e])
690 debugchoice[found] = (aliases, table[e])
691 else:
691 else:
692 choice[found] = (aliases, table[e])
692 choice[found] = (aliases, table[e])
693
693
694 if not choice and debugchoice:
694 if not choice and debugchoice:
695 choice = debugchoice
695 choice = debugchoice
696
696
697 return choice, allcmds
697 return choice, allcmds
698
698
699 def findcmd(cmd, table, strict=True):
699 def findcmd(cmd, table, strict=True):
700 """Return (aliases, command table entry) for command string."""
700 """Return (aliases, command table entry) for command string."""
701 choice, allcmds = findpossible(cmd, table, strict)
701 choice, allcmds = findpossible(cmd, table, strict)
702
702
703 if cmd in choice:
703 if cmd in choice:
704 return choice[cmd]
704 return choice[cmd]
705
705
706 if len(choice) > 1:
706 if len(choice) > 1:
707 clist = sorted(choice)
707 clist = sorted(choice)
708 raise error.AmbiguousCommand(cmd, clist)
708 raise error.AmbiguousCommand(cmd, clist)
709
709
710 if choice:
710 if choice:
711 return list(choice.values())[0]
711 return list(choice.values())[0]
712
712
713 raise error.UnknownCommand(cmd, allcmds)
713 raise error.UnknownCommand(cmd, allcmds)
714
714
715 def findrepo(p):
715 def findrepo(p):
716 while not os.path.isdir(os.path.join(p, ".hg")):
716 while not os.path.isdir(os.path.join(p, ".hg")):
717 oldp, p = p, os.path.dirname(p)
717 oldp, p = p, os.path.dirname(p)
718 if p == oldp:
718 if p == oldp:
719 return None
719 return None
720
720
721 return p
721 return p
722
722
723 def bailifchanged(repo, merge=True, hint=None):
723 def bailifchanged(repo, merge=True, hint=None):
724 """ enforce the precondition that working directory must be clean.
724 """ enforce the precondition that working directory must be clean.
725
725
726 'merge' can be set to false if a pending uncommitted merge should be
726 'merge' can be set to false if a pending uncommitted merge should be
727 ignored (such as when 'update --check' runs).
727 ignored (such as when 'update --check' runs).
728
728
729 'hint' is the usual hint given to Abort exception.
729 'hint' is the usual hint given to Abort exception.
730 """
730 """
731
731
732 if merge and repo.dirstate.p2() != nullid:
732 if merge and repo.dirstate.p2() != nullid:
733 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
733 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
734 modified, added, removed, deleted = repo.status()[:4]
734 modified, added, removed, deleted = repo.status()[:4]
735 if modified or added or removed or deleted:
735 if modified or added or removed or deleted:
736 raise error.Abort(_('uncommitted changes'), hint=hint)
736 raise error.Abort(_('uncommitted changes'), hint=hint)
737 ctx = repo[None]
737 ctx = repo[None]
738 for s in sorted(ctx.substate):
738 for s in sorted(ctx.substate):
739 ctx.sub(s).bailifchanged(hint=hint)
739 ctx.sub(s).bailifchanged(hint=hint)
740
740
741 def logmessage(ui, opts):
741 def logmessage(ui, opts):
742 """ get the log message according to -m and -l option """
742 """ get the log message according to -m and -l option """
743 message = opts.get('message')
743 message = opts.get('message')
744 logfile = opts.get('logfile')
744 logfile = opts.get('logfile')
745
745
746 if message and logfile:
746 if message and logfile:
747 raise error.Abort(_('options --message and --logfile are mutually '
747 raise error.Abort(_('options --message and --logfile are mutually '
748 'exclusive'))
748 'exclusive'))
749 if not message and logfile:
749 if not message and logfile:
750 try:
750 try:
751 if isstdiofilename(logfile):
751 if isstdiofilename(logfile):
752 message = ui.fin.read()
752 message = ui.fin.read()
753 else:
753 else:
754 message = '\n'.join(util.readfile(logfile).splitlines())
754 message = '\n'.join(util.readfile(logfile).splitlines())
755 except IOError as inst:
755 except IOError as inst:
756 raise error.Abort(_("can't read commit message '%s': %s") %
756 raise error.Abort(_("can't read commit message '%s': %s") %
757 (logfile, encoding.strtolocal(inst.strerror)))
757 (logfile, encoding.strtolocal(inst.strerror)))
758 return message
758 return message
759
759
760 def mergeeditform(ctxorbool, baseformname):
760 def mergeeditform(ctxorbool, baseformname):
761 """return appropriate editform name (referencing a committemplate)
761 """return appropriate editform name (referencing a committemplate)
762
762
763 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
763 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
764 merging is committed.
764 merging is committed.
765
765
766 This returns baseformname with '.merge' appended if it is a merge,
766 This returns baseformname with '.merge' appended if it is a merge,
767 otherwise '.normal' is appended.
767 otherwise '.normal' is appended.
768 """
768 """
769 if isinstance(ctxorbool, bool):
769 if isinstance(ctxorbool, bool):
770 if ctxorbool:
770 if ctxorbool:
771 return baseformname + ".merge"
771 return baseformname + ".merge"
772 elif 1 < len(ctxorbool.parents()):
772 elif 1 < len(ctxorbool.parents()):
773 return baseformname + ".merge"
773 return baseformname + ".merge"
774
774
775 return baseformname + ".normal"
775 return baseformname + ".normal"
776
776
777 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
777 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
778 editform='', **opts):
778 editform='', **opts):
779 """get appropriate commit message editor according to '--edit' option
779 """get appropriate commit message editor according to '--edit' option
780
780
781 'finishdesc' is a function to be called with edited commit message
781 'finishdesc' is a function to be called with edited commit message
782 (= 'description' of the new changeset) just after editing, but
782 (= 'description' of the new changeset) just after editing, but
783 before checking empty-ness. It should return actual text to be
783 before checking empty-ness. It should return actual text to be
784 stored into history. This allows to change description before
784 stored into history. This allows to change description before
785 storing.
785 storing.
786
786
787 'extramsg' is a extra message to be shown in the editor instead of
787 'extramsg' is a extra message to be shown in the editor instead of
788 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
788 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
789 is automatically added.
789 is automatically added.
790
790
791 'editform' is a dot-separated list of names, to distinguish
791 'editform' is a dot-separated list of names, to distinguish
792 the purpose of commit text editing.
792 the purpose of commit text editing.
793
793
794 'getcommiteditor' returns 'commitforceeditor' regardless of
794 'getcommiteditor' returns 'commitforceeditor' regardless of
795 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
795 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
796 they are specific for usage in MQ.
796 they are specific for usage in MQ.
797 """
797 """
798 if edit or finishdesc or extramsg:
798 if edit or finishdesc or extramsg:
799 return lambda r, c, s: commitforceeditor(r, c, s,
799 return lambda r, c, s: commitforceeditor(r, c, s,
800 finishdesc=finishdesc,
800 finishdesc=finishdesc,
801 extramsg=extramsg,
801 extramsg=extramsg,
802 editform=editform)
802 editform=editform)
803 elif editform:
803 elif editform:
804 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
804 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
805 else:
805 else:
806 return commiteditor
806 return commiteditor
807
807
808 def loglimit(opts):
808 def loglimit(opts):
809 """get the log limit according to option -l/--limit"""
809 """get the log limit according to option -l/--limit"""
810 limit = opts.get('limit')
810 limit = opts.get('limit')
811 if limit:
811 if limit:
812 try:
812 try:
813 limit = int(limit)
813 limit = int(limit)
814 except ValueError:
814 except ValueError:
815 raise error.Abort(_('limit must be a positive integer'))
815 raise error.Abort(_('limit must be a positive integer'))
816 if limit <= 0:
816 if limit <= 0:
817 raise error.Abort(_('limit must be positive'))
817 raise error.Abort(_('limit must be positive'))
818 else:
818 else:
819 limit = None
819 limit = None
820 return limit
820 return limit
821
821
822 def makefilename(repo, pat, node, desc=None,
822 def makefilename(repo, pat, node, desc=None,
823 total=None, seqno=None, revwidth=None, pathname=None):
823 total=None, seqno=None, revwidth=None, pathname=None):
824 node_expander = {
824 node_expander = {
825 'H': lambda: hex(node),
825 'H': lambda: hex(node),
826 'R': lambda: '%d' % repo.changelog.rev(node),
826 'R': lambda: '%d' % repo.changelog.rev(node),
827 'h': lambda: short(node),
827 'h': lambda: short(node),
828 'm': lambda: re.sub('[^\w]', '_', desc or '')
828 'm': lambda: re.sub('[^\w]', '_', desc or '')
829 }
829 }
830 expander = {
830 expander = {
831 '%': lambda: '%',
831 '%': lambda: '%',
832 'b': lambda: os.path.basename(repo.root),
832 'b': lambda: os.path.basename(repo.root),
833 }
833 }
834
834
835 try:
835 try:
836 if node:
836 if node:
837 expander.update(node_expander)
837 expander.update(node_expander)
838 if node:
838 if node:
839 expander['r'] = (lambda:
839 expander['r'] = (lambda:
840 ('%d' % repo.changelog.rev(node)).zfill(revwidth or 0))
840 ('%d' % repo.changelog.rev(node)).zfill(revwidth or 0))
841 if total is not None:
841 if total is not None:
842 expander['N'] = lambda: '%d' % total
842 expander['N'] = lambda: '%d' % total
843 if seqno is not None:
843 if seqno is not None:
844 expander['n'] = lambda: '%d' % seqno
844 expander['n'] = lambda: '%d' % seqno
845 if total is not None and seqno is not None:
845 if total is not None and seqno is not None:
846 expander['n'] = (lambda: ('%d' % seqno).zfill(len('%d' % total)))
846 expander['n'] = (lambda: ('%d' % seqno).zfill(len('%d' % total)))
847 if pathname is not None:
847 if pathname is not None:
848 expander['s'] = lambda: os.path.basename(pathname)
848 expander['s'] = lambda: os.path.basename(pathname)
849 expander['d'] = lambda: os.path.dirname(pathname) or '.'
849 expander['d'] = lambda: os.path.dirname(pathname) or '.'
850 expander['p'] = lambda: pathname
850 expander['p'] = lambda: pathname
851
851
852 newname = []
852 newname = []
853 patlen = len(pat)
853 patlen = len(pat)
854 i = 0
854 i = 0
855 while i < patlen:
855 while i < patlen:
856 c = pat[i:i + 1]
856 c = pat[i:i + 1]
857 if c == '%':
857 if c == '%':
858 i += 1
858 i += 1
859 c = pat[i:i + 1]
859 c = pat[i:i + 1]
860 c = expander[c]()
860 c = expander[c]()
861 newname.append(c)
861 newname.append(c)
862 i += 1
862 i += 1
863 return ''.join(newname)
863 return ''.join(newname)
864 except KeyError as inst:
864 except KeyError as inst:
865 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
865 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
866 inst.args[0])
866 inst.args[0])
867
867
868 def isstdiofilename(pat):
868 def isstdiofilename(pat):
869 """True if the given pat looks like a filename denoting stdin/stdout"""
869 """True if the given pat looks like a filename denoting stdin/stdout"""
870 return not pat or pat == '-'
870 return not pat or pat == '-'
871
871
872 class _unclosablefile(object):
872 class _unclosablefile(object):
873 def __init__(self, fp):
873 def __init__(self, fp):
874 self._fp = fp
874 self._fp = fp
875
875
876 def close(self):
876 def close(self):
877 pass
877 pass
878
878
879 def __iter__(self):
879 def __iter__(self):
880 return iter(self._fp)
880 return iter(self._fp)
881
881
882 def __getattr__(self, attr):
882 def __getattr__(self, attr):
883 return getattr(self._fp, attr)
883 return getattr(self._fp, attr)
884
884
885 def __enter__(self):
885 def __enter__(self):
886 return self
886 return self
887
887
888 def __exit__(self, exc_type, exc_value, exc_tb):
888 def __exit__(self, exc_type, exc_value, exc_tb):
889 pass
889 pass
890
890
891 def makefileobj(repo, pat, node=None, desc=None, total=None,
891 def makefileobj(repo, pat, node=None, desc=None, total=None,
892 seqno=None, revwidth=None, mode='wb', modemap=None,
892 seqno=None, revwidth=None, mode='wb', modemap=None,
893 pathname=None):
893 pathname=None):
894
894
895 writable = mode not in ('r', 'rb')
895 writable = mode not in ('r', 'rb')
896
896
897 if isstdiofilename(pat):
897 if isstdiofilename(pat):
898 if writable:
898 if writable:
899 fp = repo.ui.fout
899 fp = repo.ui.fout
900 else:
900 else:
901 fp = repo.ui.fin
901 fp = repo.ui.fin
902 return _unclosablefile(fp)
902 return _unclosablefile(fp)
903 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
903 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
904 if modemap is not None:
904 if modemap is not None:
905 mode = modemap.get(fn, mode)
905 mode = modemap.get(fn, mode)
906 if mode == 'wb':
906 if mode == 'wb':
907 modemap[fn] = 'ab'
907 modemap[fn] = 'ab'
908 return open(fn, mode)
908 return open(fn, mode)
909
909
910 def openrevlog(repo, cmd, file_, opts):
910 def openrevlog(repo, cmd, file_, opts):
911 """opens the changelog, manifest, a filelog or a given revlog"""
911 """opens the changelog, manifest, a filelog or a given revlog"""
912 cl = opts['changelog']
912 cl = opts['changelog']
913 mf = opts['manifest']
913 mf = opts['manifest']
914 dir = opts['dir']
914 dir = opts['dir']
915 msg = None
915 msg = None
916 if cl and mf:
916 if cl and mf:
917 msg = _('cannot specify --changelog and --manifest at the same time')
917 msg = _('cannot specify --changelog and --manifest at the same time')
918 elif cl and dir:
918 elif cl and dir:
919 msg = _('cannot specify --changelog and --dir at the same time')
919 msg = _('cannot specify --changelog and --dir at the same time')
920 elif cl or mf or dir:
920 elif cl or mf or dir:
921 if file_:
921 if file_:
922 msg = _('cannot specify filename with --changelog or --manifest')
922 msg = _('cannot specify filename with --changelog or --manifest')
923 elif not repo:
923 elif not repo:
924 msg = _('cannot specify --changelog or --manifest or --dir '
924 msg = _('cannot specify --changelog or --manifest or --dir '
925 'without a repository')
925 'without a repository')
926 if msg:
926 if msg:
927 raise error.Abort(msg)
927 raise error.Abort(msg)
928
928
929 r = None
929 r = None
930 if repo:
930 if repo:
931 if cl:
931 if cl:
932 r = repo.unfiltered().changelog
932 r = repo.unfiltered().changelog
933 elif dir:
933 elif dir:
934 if 'treemanifest' not in repo.requirements:
934 if 'treemanifest' not in repo.requirements:
935 raise error.Abort(_("--dir can only be used on repos with "
935 raise error.Abort(_("--dir can only be used on repos with "
936 "treemanifest enabled"))
936 "treemanifest enabled"))
937 dirlog = repo.manifestlog._revlog.dirlog(dir)
937 dirlog = repo.manifestlog._revlog.dirlog(dir)
938 if len(dirlog):
938 if len(dirlog):
939 r = dirlog
939 r = dirlog
940 elif mf:
940 elif mf:
941 r = repo.manifestlog._revlog
941 r = repo.manifestlog._revlog
942 elif file_:
942 elif file_:
943 filelog = repo.file(file_)
943 filelog = repo.file(file_)
944 if len(filelog):
944 if len(filelog):
945 r = filelog
945 r = filelog
946 if not r:
946 if not r:
947 if not file_:
947 if not file_:
948 raise error.CommandError(cmd, _('invalid arguments'))
948 raise error.CommandError(cmd, _('invalid arguments'))
949 if not os.path.isfile(file_):
949 if not os.path.isfile(file_):
950 raise error.Abort(_("revlog '%s' not found") % file_)
950 raise error.Abort(_("revlog '%s' not found") % file_)
951 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
951 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
952 file_[:-2] + ".i")
952 file_[:-2] + ".i")
953 return r
953 return r
954
954
955 def copy(ui, repo, pats, opts, rename=False):
955 def copy(ui, repo, pats, opts, rename=False):
956 # called with the repo lock held
956 # called with the repo lock held
957 #
957 #
958 # hgsep => pathname that uses "/" to separate directories
958 # hgsep => pathname that uses "/" to separate directories
959 # ossep => pathname that uses os.sep to separate directories
959 # ossep => pathname that uses os.sep to separate directories
960 cwd = repo.getcwd()
960 cwd = repo.getcwd()
961 targets = {}
961 targets = {}
962 after = opts.get("after")
962 after = opts.get("after")
963 dryrun = opts.get("dry_run")
963 dryrun = opts.get("dry_run")
964 wctx = repo[None]
964 wctx = repo[None]
965
965
966 def walkpat(pat):
966 def walkpat(pat):
967 srcs = []
967 srcs = []
968 if after:
968 if after:
969 badstates = '?'
969 badstates = '?'
970 else:
970 else:
971 badstates = '?r'
971 badstates = '?r'
972 m = scmutil.match(wctx, [pat], opts, globbed=True)
972 m = scmutil.match(wctx, [pat], opts, globbed=True)
973 for abs in wctx.walk(m):
973 for abs in wctx.walk(m):
974 state = repo.dirstate[abs]
974 state = repo.dirstate[abs]
975 rel = m.rel(abs)
975 rel = m.rel(abs)
976 exact = m.exact(abs)
976 exact = m.exact(abs)
977 if state in badstates:
977 if state in badstates:
978 if exact and state == '?':
978 if exact and state == '?':
979 ui.warn(_('%s: not copying - file is not managed\n') % rel)
979 ui.warn(_('%s: not copying - file is not managed\n') % rel)
980 if exact and state == 'r':
980 if exact and state == 'r':
981 ui.warn(_('%s: not copying - file has been marked for'
981 ui.warn(_('%s: not copying - file has been marked for'
982 ' remove\n') % rel)
982 ' remove\n') % rel)
983 continue
983 continue
984 # abs: hgsep
984 # abs: hgsep
985 # rel: ossep
985 # rel: ossep
986 srcs.append((abs, rel, exact))
986 srcs.append((abs, rel, exact))
987 return srcs
987 return srcs
988
988
989 # abssrc: hgsep
989 # abssrc: hgsep
990 # relsrc: ossep
990 # relsrc: ossep
991 # otarget: ossep
991 # otarget: ossep
992 def copyfile(abssrc, relsrc, otarget, exact):
992 def copyfile(abssrc, relsrc, otarget, exact):
993 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
993 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
994 if '/' in abstarget:
994 if '/' in abstarget:
995 # We cannot normalize abstarget itself, this would prevent
995 # We cannot normalize abstarget itself, this would prevent
996 # case only renames, like a => A.
996 # case only renames, like a => A.
997 abspath, absname = abstarget.rsplit('/', 1)
997 abspath, absname = abstarget.rsplit('/', 1)
998 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
998 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
999 reltarget = repo.pathto(abstarget, cwd)
999 reltarget = repo.pathto(abstarget, cwd)
1000 target = repo.wjoin(abstarget)
1000 target = repo.wjoin(abstarget)
1001 src = repo.wjoin(abssrc)
1001 src = repo.wjoin(abssrc)
1002 state = repo.dirstate[abstarget]
1002 state = repo.dirstate[abstarget]
1003
1003
1004 scmutil.checkportable(ui, abstarget)
1004 scmutil.checkportable(ui, abstarget)
1005
1005
1006 # check for collisions
1006 # check for collisions
1007 prevsrc = targets.get(abstarget)
1007 prevsrc = targets.get(abstarget)
1008 if prevsrc is not None:
1008 if prevsrc is not None:
1009 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1009 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1010 (reltarget, repo.pathto(abssrc, cwd),
1010 (reltarget, repo.pathto(abssrc, cwd),
1011 repo.pathto(prevsrc, cwd)))
1011 repo.pathto(prevsrc, cwd)))
1012 return
1012 return
1013
1013
1014 # check for overwrites
1014 # check for overwrites
1015 exists = os.path.lexists(target)
1015 exists = os.path.lexists(target)
1016 samefile = False
1016 samefile = False
1017 if exists and abssrc != abstarget:
1017 if exists and abssrc != abstarget:
1018 if (repo.dirstate.normalize(abssrc) ==
1018 if (repo.dirstate.normalize(abssrc) ==
1019 repo.dirstate.normalize(abstarget)):
1019 repo.dirstate.normalize(abstarget)):
1020 if not rename:
1020 if not rename:
1021 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1021 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1022 return
1022 return
1023 exists = False
1023 exists = False
1024 samefile = True
1024 samefile = True
1025
1025
1026 if not after and exists or after and state in 'mn':
1026 if not after and exists or after and state in 'mn':
1027 if not opts['force']:
1027 if not opts['force']:
1028 if state in 'mn':
1028 if state in 'mn':
1029 msg = _('%s: not overwriting - file already committed\n')
1029 msg = _('%s: not overwriting - file already committed\n')
1030 if after:
1030 if after:
1031 flags = '--after --force'
1031 flags = '--after --force'
1032 else:
1032 else:
1033 flags = '--force'
1033 flags = '--force'
1034 if rename:
1034 if rename:
1035 hint = _('(hg rename %s to replace the file by '
1035 hint = _('(hg rename %s to replace the file by '
1036 'recording a rename)\n') % flags
1036 'recording a rename)\n') % flags
1037 else:
1037 else:
1038 hint = _('(hg copy %s to replace the file by '
1038 hint = _('(hg copy %s to replace the file by '
1039 'recording a copy)\n') % flags
1039 'recording a copy)\n') % flags
1040 else:
1040 else:
1041 msg = _('%s: not overwriting - file exists\n')
1041 msg = _('%s: not overwriting - file exists\n')
1042 if rename:
1042 if rename:
1043 hint = _('(hg rename --after to record the rename)\n')
1043 hint = _('(hg rename --after to record the rename)\n')
1044 else:
1044 else:
1045 hint = _('(hg copy --after to record the copy)\n')
1045 hint = _('(hg copy --after to record the copy)\n')
1046 ui.warn(msg % reltarget)
1046 ui.warn(msg % reltarget)
1047 ui.warn(hint)
1047 ui.warn(hint)
1048 return
1048 return
1049
1049
1050 if after:
1050 if after:
1051 if not exists:
1051 if not exists:
1052 if rename:
1052 if rename:
1053 ui.warn(_('%s: not recording move - %s does not exist\n') %
1053 ui.warn(_('%s: not recording move - %s does not exist\n') %
1054 (relsrc, reltarget))
1054 (relsrc, reltarget))
1055 else:
1055 else:
1056 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1056 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1057 (relsrc, reltarget))
1057 (relsrc, reltarget))
1058 return
1058 return
1059 elif not dryrun:
1059 elif not dryrun:
1060 try:
1060 try:
1061 if exists:
1061 if exists:
1062 os.unlink(target)
1062 os.unlink(target)
1063 targetdir = os.path.dirname(target) or '.'
1063 targetdir = os.path.dirname(target) or '.'
1064 if not os.path.isdir(targetdir):
1064 if not os.path.isdir(targetdir):
1065 os.makedirs(targetdir)
1065 os.makedirs(targetdir)
1066 if samefile:
1066 if samefile:
1067 tmp = target + "~hgrename"
1067 tmp = target + "~hgrename"
1068 os.rename(src, tmp)
1068 os.rename(src, tmp)
1069 os.rename(tmp, target)
1069 os.rename(tmp, target)
1070 else:
1070 else:
1071 util.copyfile(src, target)
1071 util.copyfile(src, target)
1072 srcexists = True
1072 srcexists = True
1073 except IOError as inst:
1073 except IOError as inst:
1074 if inst.errno == errno.ENOENT:
1074 if inst.errno == errno.ENOENT:
1075 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1075 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1076 srcexists = False
1076 srcexists = False
1077 else:
1077 else:
1078 ui.warn(_('%s: cannot copy - %s\n') %
1078 ui.warn(_('%s: cannot copy - %s\n') %
1079 (relsrc, encoding.strtolocal(inst.strerror)))
1079 (relsrc, encoding.strtolocal(inst.strerror)))
1080 return True # report a failure
1080 return True # report a failure
1081
1081
1082 if ui.verbose or not exact:
1082 if ui.verbose or not exact:
1083 if rename:
1083 if rename:
1084 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1084 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1085 else:
1085 else:
1086 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1086 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1087
1087
1088 targets[abstarget] = abssrc
1088 targets[abstarget] = abssrc
1089
1089
1090 # fix up dirstate
1090 # fix up dirstate
1091 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1091 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1092 dryrun=dryrun, cwd=cwd)
1092 dryrun=dryrun, cwd=cwd)
1093 if rename and not dryrun:
1093 if rename and not dryrun:
1094 if not after and srcexists and not samefile:
1094 if not after and srcexists and not samefile:
1095 repo.wvfs.unlinkpath(abssrc)
1095 repo.wvfs.unlinkpath(abssrc)
1096 wctx.forget([abssrc])
1096 wctx.forget([abssrc])
1097
1097
1098 # pat: ossep
1098 # pat: ossep
1099 # dest ossep
1099 # dest ossep
1100 # srcs: list of (hgsep, hgsep, ossep, bool)
1100 # srcs: list of (hgsep, hgsep, ossep, bool)
1101 # return: function that takes hgsep and returns ossep
1101 # return: function that takes hgsep and returns ossep
1102 def targetpathfn(pat, dest, srcs):
1102 def targetpathfn(pat, dest, srcs):
1103 if os.path.isdir(pat):
1103 if os.path.isdir(pat):
1104 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1104 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1105 abspfx = util.localpath(abspfx)
1105 abspfx = util.localpath(abspfx)
1106 if destdirexists:
1106 if destdirexists:
1107 striplen = len(os.path.split(abspfx)[0])
1107 striplen = len(os.path.split(abspfx)[0])
1108 else:
1108 else:
1109 striplen = len(abspfx)
1109 striplen = len(abspfx)
1110 if striplen:
1110 if striplen:
1111 striplen += len(pycompat.ossep)
1111 striplen += len(pycompat.ossep)
1112 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1112 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1113 elif destdirexists:
1113 elif destdirexists:
1114 res = lambda p: os.path.join(dest,
1114 res = lambda p: os.path.join(dest,
1115 os.path.basename(util.localpath(p)))
1115 os.path.basename(util.localpath(p)))
1116 else:
1116 else:
1117 res = lambda p: dest
1117 res = lambda p: dest
1118 return res
1118 return res
1119
1119
1120 # pat: ossep
1120 # pat: ossep
1121 # dest ossep
1121 # dest ossep
1122 # srcs: list of (hgsep, hgsep, ossep, bool)
1122 # srcs: list of (hgsep, hgsep, ossep, bool)
1123 # return: function that takes hgsep and returns ossep
1123 # return: function that takes hgsep and returns ossep
1124 def targetpathafterfn(pat, dest, srcs):
1124 def targetpathafterfn(pat, dest, srcs):
1125 if matchmod.patkind(pat):
1125 if matchmod.patkind(pat):
1126 # a mercurial pattern
1126 # a mercurial pattern
1127 res = lambda p: os.path.join(dest,
1127 res = lambda p: os.path.join(dest,
1128 os.path.basename(util.localpath(p)))
1128 os.path.basename(util.localpath(p)))
1129 else:
1129 else:
1130 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1130 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1131 if len(abspfx) < len(srcs[0][0]):
1131 if len(abspfx) < len(srcs[0][0]):
1132 # A directory. Either the target path contains the last
1132 # A directory. Either the target path contains the last
1133 # component of the source path or it does not.
1133 # component of the source path or it does not.
1134 def evalpath(striplen):
1134 def evalpath(striplen):
1135 score = 0
1135 score = 0
1136 for s in srcs:
1136 for s in srcs:
1137 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1137 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1138 if os.path.lexists(t):
1138 if os.path.lexists(t):
1139 score += 1
1139 score += 1
1140 return score
1140 return score
1141
1141
1142 abspfx = util.localpath(abspfx)
1142 abspfx = util.localpath(abspfx)
1143 striplen = len(abspfx)
1143 striplen = len(abspfx)
1144 if striplen:
1144 if striplen:
1145 striplen += len(pycompat.ossep)
1145 striplen += len(pycompat.ossep)
1146 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1146 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1147 score = evalpath(striplen)
1147 score = evalpath(striplen)
1148 striplen1 = len(os.path.split(abspfx)[0])
1148 striplen1 = len(os.path.split(abspfx)[0])
1149 if striplen1:
1149 if striplen1:
1150 striplen1 += len(pycompat.ossep)
1150 striplen1 += len(pycompat.ossep)
1151 if evalpath(striplen1) > score:
1151 if evalpath(striplen1) > score:
1152 striplen = striplen1
1152 striplen = striplen1
1153 res = lambda p: os.path.join(dest,
1153 res = lambda p: os.path.join(dest,
1154 util.localpath(p)[striplen:])
1154 util.localpath(p)[striplen:])
1155 else:
1155 else:
1156 # a file
1156 # a file
1157 if destdirexists:
1157 if destdirexists:
1158 res = lambda p: os.path.join(dest,
1158 res = lambda p: os.path.join(dest,
1159 os.path.basename(util.localpath(p)))
1159 os.path.basename(util.localpath(p)))
1160 else:
1160 else:
1161 res = lambda p: dest
1161 res = lambda p: dest
1162 return res
1162 return res
1163
1163
1164 pats = scmutil.expandpats(pats)
1164 pats = scmutil.expandpats(pats)
1165 if not pats:
1165 if not pats:
1166 raise error.Abort(_('no source or destination specified'))
1166 raise error.Abort(_('no source or destination specified'))
1167 if len(pats) == 1:
1167 if len(pats) == 1:
1168 raise error.Abort(_('no destination specified'))
1168 raise error.Abort(_('no destination specified'))
1169 dest = pats.pop()
1169 dest = pats.pop()
1170 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1170 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1171 if not destdirexists:
1171 if not destdirexists:
1172 if len(pats) > 1 or matchmod.patkind(pats[0]):
1172 if len(pats) > 1 or matchmod.patkind(pats[0]):
1173 raise error.Abort(_('with multiple sources, destination must be an '
1173 raise error.Abort(_('with multiple sources, destination must be an '
1174 'existing directory'))
1174 'existing directory'))
1175 if util.endswithsep(dest):
1175 if util.endswithsep(dest):
1176 raise error.Abort(_('destination %s is not a directory') % dest)
1176 raise error.Abort(_('destination %s is not a directory') % dest)
1177
1177
1178 tfn = targetpathfn
1178 tfn = targetpathfn
1179 if after:
1179 if after:
1180 tfn = targetpathafterfn
1180 tfn = targetpathafterfn
1181 copylist = []
1181 copylist = []
1182 for pat in pats:
1182 for pat in pats:
1183 srcs = walkpat(pat)
1183 srcs = walkpat(pat)
1184 if not srcs:
1184 if not srcs:
1185 continue
1185 continue
1186 copylist.append((tfn(pat, dest, srcs), srcs))
1186 copylist.append((tfn(pat, dest, srcs), srcs))
1187 if not copylist:
1187 if not copylist:
1188 raise error.Abort(_('no files to copy'))
1188 raise error.Abort(_('no files to copy'))
1189
1189
1190 errors = 0
1190 errors = 0
1191 for targetpath, srcs in copylist:
1191 for targetpath, srcs in copylist:
1192 for abssrc, relsrc, exact in srcs:
1192 for abssrc, relsrc, exact in srcs:
1193 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1193 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1194 errors += 1
1194 errors += 1
1195
1195
1196 if errors:
1196 if errors:
1197 ui.warn(_('(consider using --after)\n'))
1197 ui.warn(_('(consider using --after)\n'))
1198
1198
1199 return errors != 0
1199 return errors != 0
1200
1200
1201 ## facility to let extension process additional data into an import patch
1201 ## facility to let extension process additional data into an import patch
1202 # list of identifier to be executed in order
1202 # list of identifier to be executed in order
1203 extrapreimport = [] # run before commit
1203 extrapreimport = [] # run before commit
1204 extrapostimport = [] # run after commit
1204 extrapostimport = [] # run after commit
1205 # mapping from identifier to actual import function
1205 # mapping from identifier to actual import function
1206 #
1206 #
1207 # 'preimport' are run before the commit is made and are provided the following
1207 # 'preimport' are run before the commit is made and are provided the following
1208 # arguments:
1208 # arguments:
1209 # - repo: the localrepository instance,
1209 # - repo: the localrepository instance,
1210 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1210 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1211 # - extra: the future extra dictionary of the changeset, please mutate it,
1211 # - extra: the future extra dictionary of the changeset, please mutate it,
1212 # - opts: the import options.
1212 # - opts: the import options.
1213 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1213 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1214 # mutation of in memory commit and more. Feel free to rework the code to get
1214 # mutation of in memory commit and more. Feel free to rework the code to get
1215 # there.
1215 # there.
1216 extrapreimportmap = {}
1216 extrapreimportmap = {}
1217 # 'postimport' are run after the commit is made and are provided the following
1217 # 'postimport' are run after the commit is made and are provided the following
1218 # argument:
1218 # argument:
1219 # - ctx: the changectx created by import.
1219 # - ctx: the changectx created by import.
1220 extrapostimportmap = {}
1220 extrapostimportmap = {}
1221
1221
1222 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1222 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1223 """Utility function used by commands.import to import a single patch
1223 """Utility function used by commands.import to import a single patch
1224
1224
1225 This function is explicitly defined here to help the evolve extension to
1225 This function is explicitly defined here to help the evolve extension to
1226 wrap this part of the import logic.
1226 wrap this part of the import logic.
1227
1227
1228 The API is currently a bit ugly because it a simple code translation from
1228 The API is currently a bit ugly because it a simple code translation from
1229 the import command. Feel free to make it better.
1229 the import command. Feel free to make it better.
1230
1230
1231 :hunk: a patch (as a binary string)
1231 :hunk: a patch (as a binary string)
1232 :parents: nodes that will be parent of the created commit
1232 :parents: nodes that will be parent of the created commit
1233 :opts: the full dict of option passed to the import command
1233 :opts: the full dict of option passed to the import command
1234 :msgs: list to save commit message to.
1234 :msgs: list to save commit message to.
1235 (used in case we need to save it when failing)
1235 (used in case we need to save it when failing)
1236 :updatefunc: a function that update a repo to a given node
1236 :updatefunc: a function that update a repo to a given node
1237 updatefunc(<repo>, <node>)
1237 updatefunc(<repo>, <node>)
1238 """
1238 """
1239 # avoid cycle context -> subrepo -> cmdutil
1239 # avoid cycle context -> subrepo -> cmdutil
1240 from . import context
1240 from . import context
1241 extractdata = patch.extract(ui, hunk)
1241 extractdata = patch.extract(ui, hunk)
1242 tmpname = extractdata.get('filename')
1242 tmpname = extractdata.get('filename')
1243 message = extractdata.get('message')
1243 message = extractdata.get('message')
1244 user = opts.get('user') or extractdata.get('user')
1244 user = opts.get('user') or extractdata.get('user')
1245 date = opts.get('date') or extractdata.get('date')
1245 date = opts.get('date') or extractdata.get('date')
1246 branch = extractdata.get('branch')
1246 branch = extractdata.get('branch')
1247 nodeid = extractdata.get('nodeid')
1247 nodeid = extractdata.get('nodeid')
1248 p1 = extractdata.get('p1')
1248 p1 = extractdata.get('p1')
1249 p2 = extractdata.get('p2')
1249 p2 = extractdata.get('p2')
1250
1250
1251 nocommit = opts.get('no_commit')
1251 nocommit = opts.get('no_commit')
1252 importbranch = opts.get('import_branch')
1252 importbranch = opts.get('import_branch')
1253 update = not opts.get('bypass')
1253 update = not opts.get('bypass')
1254 strip = opts["strip"]
1254 strip = opts["strip"]
1255 prefix = opts["prefix"]
1255 prefix = opts["prefix"]
1256 sim = float(opts.get('similarity') or 0)
1256 sim = float(opts.get('similarity') or 0)
1257 if not tmpname:
1257 if not tmpname:
1258 return (None, None, False)
1258 return (None, None, False)
1259
1259
1260 rejects = False
1260 rejects = False
1261
1261
1262 try:
1262 try:
1263 cmdline_message = logmessage(ui, opts)
1263 cmdline_message = logmessage(ui, opts)
1264 if cmdline_message:
1264 if cmdline_message:
1265 # pickup the cmdline msg
1265 # pickup the cmdline msg
1266 message = cmdline_message
1266 message = cmdline_message
1267 elif message:
1267 elif message:
1268 # pickup the patch msg
1268 # pickup the patch msg
1269 message = message.strip()
1269 message = message.strip()
1270 else:
1270 else:
1271 # launch the editor
1271 # launch the editor
1272 message = None
1272 message = None
1273 ui.debug('message:\n%s\n' % message)
1273 ui.debug('message:\n%s\n' % message)
1274
1274
1275 if len(parents) == 1:
1275 if len(parents) == 1:
1276 parents.append(repo[nullid])
1276 parents.append(repo[nullid])
1277 if opts.get('exact'):
1277 if opts.get('exact'):
1278 if not nodeid or not p1:
1278 if not nodeid or not p1:
1279 raise error.Abort(_('not a Mercurial patch'))
1279 raise error.Abort(_('not a Mercurial patch'))
1280 p1 = repo[p1]
1280 p1 = repo[p1]
1281 p2 = repo[p2 or nullid]
1281 p2 = repo[p2 or nullid]
1282 elif p2:
1282 elif p2:
1283 try:
1283 try:
1284 p1 = repo[p1]
1284 p1 = repo[p1]
1285 p2 = repo[p2]
1285 p2 = repo[p2]
1286 # Without any options, consider p2 only if the
1286 # Without any options, consider p2 only if the
1287 # patch is being applied on top of the recorded
1287 # patch is being applied on top of the recorded
1288 # first parent.
1288 # first parent.
1289 if p1 != parents[0]:
1289 if p1 != parents[0]:
1290 p1 = parents[0]
1290 p1 = parents[0]
1291 p2 = repo[nullid]
1291 p2 = repo[nullid]
1292 except error.RepoError:
1292 except error.RepoError:
1293 p1, p2 = parents
1293 p1, p2 = parents
1294 if p2.node() == nullid:
1294 if p2.node() == nullid:
1295 ui.warn(_("warning: import the patch as a normal revision\n"
1295 ui.warn(_("warning: import the patch as a normal revision\n"
1296 "(use --exact to import the patch as a merge)\n"))
1296 "(use --exact to import the patch as a merge)\n"))
1297 else:
1297 else:
1298 p1, p2 = parents
1298 p1, p2 = parents
1299
1299
1300 n = None
1300 n = None
1301 if update:
1301 if update:
1302 if p1 != parents[0]:
1302 if p1 != parents[0]:
1303 updatefunc(repo, p1.node())
1303 updatefunc(repo, p1.node())
1304 if p2 != parents[1]:
1304 if p2 != parents[1]:
1305 repo.setparents(p1.node(), p2.node())
1305 repo.setparents(p1.node(), p2.node())
1306
1306
1307 if opts.get('exact') or importbranch:
1307 if opts.get('exact') or importbranch:
1308 repo.dirstate.setbranch(branch or 'default')
1308 repo.dirstate.setbranch(branch or 'default')
1309
1309
1310 partial = opts.get('partial', False)
1310 partial = opts.get('partial', False)
1311 files = set()
1311 files = set()
1312 try:
1312 try:
1313 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1313 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1314 files=files, eolmode=None, similarity=sim / 100.0)
1314 files=files, eolmode=None, similarity=sim / 100.0)
1315 except error.PatchError as e:
1315 except error.PatchError as e:
1316 if not partial:
1316 if not partial:
1317 raise error.Abort(str(e))
1317 raise error.Abort(str(e))
1318 if partial:
1318 if partial:
1319 rejects = True
1319 rejects = True
1320
1320
1321 files = list(files)
1321 files = list(files)
1322 if nocommit:
1322 if nocommit:
1323 if message:
1323 if message:
1324 msgs.append(message)
1324 msgs.append(message)
1325 else:
1325 else:
1326 if opts.get('exact') or p2:
1326 if opts.get('exact') or p2:
1327 # If you got here, you either use --force and know what
1327 # If you got here, you either use --force and know what
1328 # you are doing or used --exact or a merge patch while
1328 # you are doing or used --exact or a merge patch while
1329 # being updated to its first parent.
1329 # being updated to its first parent.
1330 m = None
1330 m = None
1331 else:
1331 else:
1332 m = scmutil.matchfiles(repo, files or [])
1332 m = scmutil.matchfiles(repo, files or [])
1333 editform = mergeeditform(repo[None], 'import.normal')
1333 editform = mergeeditform(repo[None], 'import.normal')
1334 if opts.get('exact'):
1334 if opts.get('exact'):
1335 editor = None
1335 editor = None
1336 else:
1336 else:
1337 editor = getcommiteditor(editform=editform,
1337 editor = getcommiteditor(editform=editform,
1338 **pycompat.strkwargs(opts))
1338 **pycompat.strkwargs(opts))
1339 extra = {}
1339 extra = {}
1340 for idfunc in extrapreimport:
1340 for idfunc in extrapreimport:
1341 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1341 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1342 overrides = {}
1342 overrides = {}
1343 if partial:
1343 if partial:
1344 overrides[('ui', 'allowemptycommit')] = True
1344 overrides[('ui', 'allowemptycommit')] = True
1345 with repo.ui.configoverride(overrides, 'import'):
1345 with repo.ui.configoverride(overrides, 'import'):
1346 n = repo.commit(message, user,
1346 n = repo.commit(message, user,
1347 date, match=m,
1347 date, match=m,
1348 editor=editor, extra=extra)
1348 editor=editor, extra=extra)
1349 for idfunc in extrapostimport:
1349 for idfunc in extrapostimport:
1350 extrapostimportmap[idfunc](repo[n])
1350 extrapostimportmap[idfunc](repo[n])
1351 else:
1351 else:
1352 if opts.get('exact') or importbranch:
1352 if opts.get('exact') or importbranch:
1353 branch = branch or 'default'
1353 branch = branch or 'default'
1354 else:
1354 else:
1355 branch = p1.branch()
1355 branch = p1.branch()
1356 store = patch.filestore()
1356 store = patch.filestore()
1357 try:
1357 try:
1358 files = set()
1358 files = set()
1359 try:
1359 try:
1360 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1360 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1361 files, eolmode=None)
1361 files, eolmode=None)
1362 except error.PatchError as e:
1362 except error.PatchError as e:
1363 raise error.Abort(str(e))
1363 raise error.Abort(str(e))
1364 if opts.get('exact'):
1364 if opts.get('exact'):
1365 editor = None
1365 editor = None
1366 else:
1366 else:
1367 editor = getcommiteditor(editform='import.bypass')
1367 editor = getcommiteditor(editform='import.bypass')
1368 memctx = context.memctx(repo, (p1.node(), p2.node()),
1368 memctx = context.memctx(repo, (p1.node(), p2.node()),
1369 message,
1369 message,
1370 files=files,
1370 files=files,
1371 filectxfn=store,
1371 filectxfn=store,
1372 user=user,
1372 user=user,
1373 date=date,
1373 date=date,
1374 branch=branch,
1374 branch=branch,
1375 editor=editor)
1375 editor=editor)
1376 n = memctx.commit()
1376 n = memctx.commit()
1377 finally:
1377 finally:
1378 store.close()
1378 store.close()
1379 if opts.get('exact') and nocommit:
1379 if opts.get('exact') and nocommit:
1380 # --exact with --no-commit is still useful in that it does merge
1380 # --exact with --no-commit is still useful in that it does merge
1381 # and branch bits
1381 # and branch bits
1382 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1382 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1383 elif opts.get('exact') and hex(n) != nodeid:
1383 elif opts.get('exact') and hex(n) != nodeid:
1384 raise error.Abort(_('patch is damaged or loses information'))
1384 raise error.Abort(_('patch is damaged or loses information'))
1385 msg = _('applied to working directory')
1385 msg = _('applied to working directory')
1386 if n:
1386 if n:
1387 # i18n: refers to a short changeset id
1387 # i18n: refers to a short changeset id
1388 msg = _('created %s') % short(n)
1388 msg = _('created %s') % short(n)
1389 return (msg, n, rejects)
1389 return (msg, n, rejects)
1390 finally:
1390 finally:
1391 os.unlink(tmpname)
1391 os.unlink(tmpname)
1392
1392
1393 # facility to let extensions include additional data in an exported patch
1393 # facility to let extensions include additional data in an exported patch
1394 # list of identifiers to be executed in order
1394 # list of identifiers to be executed in order
1395 extraexport = []
1395 extraexport = []
1396 # mapping from identifier to actual export function
1396 # mapping from identifier to actual export function
1397 # function as to return a string to be added to the header or None
1397 # function as to return a string to be added to the header or None
1398 # it is given two arguments (sequencenumber, changectx)
1398 # it is given two arguments (sequencenumber, changectx)
1399 extraexportmap = {}
1399 extraexportmap = {}
1400
1400
1401 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1401 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1402 node = scmutil.binnode(ctx)
1402 node = scmutil.binnode(ctx)
1403 parents = [p.node() for p in ctx.parents() if p]
1403 parents = [p.node() for p in ctx.parents() if p]
1404 branch = ctx.branch()
1404 branch = ctx.branch()
1405 if switch_parent:
1405 if switch_parent:
1406 parents.reverse()
1406 parents.reverse()
1407
1407
1408 if parents:
1408 if parents:
1409 prev = parents[0]
1409 prev = parents[0]
1410 else:
1410 else:
1411 prev = nullid
1411 prev = nullid
1412
1412
1413 write("# HG changeset patch\n")
1413 write("# HG changeset patch\n")
1414 write("# User %s\n" % ctx.user())
1414 write("# User %s\n" % ctx.user())
1415 write("# Date %d %d\n" % ctx.date())
1415 write("# Date %d %d\n" % ctx.date())
1416 write("# %s\n" % util.datestr(ctx.date()))
1416 write("# %s\n" % util.datestr(ctx.date()))
1417 if branch and branch != 'default':
1417 if branch and branch != 'default':
1418 write("# Branch %s\n" % branch)
1418 write("# Branch %s\n" % branch)
1419 write("# Node ID %s\n" % hex(node))
1419 write("# Node ID %s\n" % hex(node))
1420 write("# Parent %s\n" % hex(prev))
1420 write("# Parent %s\n" % hex(prev))
1421 if len(parents) > 1:
1421 if len(parents) > 1:
1422 write("# Parent %s\n" % hex(parents[1]))
1422 write("# Parent %s\n" % hex(parents[1]))
1423
1423
1424 for headerid in extraexport:
1424 for headerid in extraexport:
1425 header = extraexportmap[headerid](seqno, ctx)
1425 header = extraexportmap[headerid](seqno, ctx)
1426 if header is not None:
1426 if header is not None:
1427 write('# %s\n' % header)
1427 write('# %s\n' % header)
1428 write(ctx.description().rstrip())
1428 write(ctx.description().rstrip())
1429 write("\n\n")
1429 write("\n\n")
1430
1430
1431 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1431 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1432 write(chunk, label=label)
1432 write(chunk, label=label)
1433
1433
1434 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1434 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1435 opts=None, match=None):
1435 opts=None, match=None):
1436 '''export changesets as hg patches
1436 '''export changesets as hg patches
1437
1437
1438 Args:
1438 Args:
1439 repo: The repository from which we're exporting revisions.
1439 repo: The repository from which we're exporting revisions.
1440 revs: A list of revisions to export as revision numbers.
1440 revs: A list of revisions to export as revision numbers.
1441 fntemplate: An optional string to use for generating patch file names.
1441 fntemplate: An optional string to use for generating patch file names.
1442 fp: An optional file-like object to which patches should be written.
1442 fp: An optional file-like object to which patches should be written.
1443 switch_parent: If True, show diffs against second parent when not nullid.
1443 switch_parent: If True, show diffs against second parent when not nullid.
1444 Default is false, which always shows diff against p1.
1444 Default is false, which always shows diff against p1.
1445 opts: diff options to use for generating the patch.
1445 opts: diff options to use for generating the patch.
1446 match: If specified, only export changes to files matching this matcher.
1446 match: If specified, only export changes to files matching this matcher.
1447
1447
1448 Returns:
1448 Returns:
1449 Nothing.
1449 Nothing.
1450
1450
1451 Side Effect:
1451 Side Effect:
1452 "HG Changeset Patch" data is emitted to one of the following
1452 "HG Changeset Patch" data is emitted to one of the following
1453 destinations:
1453 destinations:
1454 fp is specified: All revs are written to the specified
1454 fp is specified: All revs are written to the specified
1455 file-like object.
1455 file-like object.
1456 fntemplate specified: Each rev is written to a unique file named using
1456 fntemplate specified: Each rev is written to a unique file named using
1457 the given template.
1457 the given template.
1458 Neither fp nor template specified: All revs written to repo.ui.write()
1458 Neither fp nor template specified: All revs written to repo.ui.write()
1459 '''
1459 '''
1460
1460
1461 total = len(revs)
1461 total = len(revs)
1462 revwidth = max(len(str(rev)) for rev in revs)
1462 revwidth = max(len(str(rev)) for rev in revs)
1463 filemode = {}
1463 filemode = {}
1464
1464
1465 write = None
1465 write = None
1466 dest = '<unnamed>'
1466 dest = '<unnamed>'
1467 if fp:
1467 if fp:
1468 dest = getattr(fp, 'name', dest)
1468 dest = getattr(fp, 'name', dest)
1469 def write(s, **kw):
1469 def write(s, **kw):
1470 fp.write(s)
1470 fp.write(s)
1471 elif not fntemplate:
1471 elif not fntemplate:
1472 write = repo.ui.write
1472 write = repo.ui.write
1473
1473
1474 for seqno, rev in enumerate(revs, 1):
1474 for seqno, rev in enumerate(revs, 1):
1475 ctx = repo[rev]
1475 ctx = repo[rev]
1476 fo = None
1476 fo = None
1477 if not fp and fntemplate:
1477 if not fp and fntemplate:
1478 desc_lines = ctx.description().rstrip().split('\n')
1478 desc_lines = ctx.description().rstrip().split('\n')
1479 desc = desc_lines[0] #Commit always has a first line.
1479 desc = desc_lines[0] #Commit always has a first line.
1480 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1480 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1481 total=total, seqno=seqno, revwidth=revwidth,
1481 total=total, seqno=seqno, revwidth=revwidth,
1482 mode='wb', modemap=filemode)
1482 mode='wb', modemap=filemode)
1483 dest = fo.name
1483 dest = fo.name
1484 def write(s, **kw):
1484 def write(s, **kw):
1485 fo.write(s)
1485 fo.write(s)
1486 if not dest.startswith('<'):
1486 if not dest.startswith('<'):
1487 repo.ui.note("%s\n" % dest)
1487 repo.ui.note("%s\n" % dest)
1488 _exportsingle(
1488 _exportsingle(
1489 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1489 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1490 if fo is not None:
1490 if fo is not None:
1491 fo.close()
1491 fo.close()
1492
1492
1493 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1493 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1494 changes=None, stat=False, fp=None, prefix='',
1494 changes=None, stat=False, fp=None, prefix='',
1495 root='', listsubrepos=False, hunksfilterfn=None):
1495 root='', listsubrepos=False, hunksfilterfn=None):
1496 '''show diff or diffstat.'''
1496 '''show diff or diffstat.'''
1497 if fp is None:
1497 if fp is None:
1498 write = ui.write
1498 write = ui.write
1499 else:
1499 else:
1500 def write(s, **kw):
1500 def write(s, **kw):
1501 fp.write(s)
1501 fp.write(s)
1502
1502
1503 if root:
1503 if root:
1504 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1504 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1505 else:
1505 else:
1506 relroot = ''
1506 relroot = ''
1507 if relroot != '':
1507 if relroot != '':
1508 # XXX relative roots currently don't work if the root is within a
1508 # XXX relative roots currently don't work if the root is within a
1509 # subrepo
1509 # subrepo
1510 uirelroot = match.uipath(relroot)
1510 uirelroot = match.uipath(relroot)
1511 relroot += '/'
1511 relroot += '/'
1512 for matchroot in match.files():
1512 for matchroot in match.files():
1513 if not matchroot.startswith(relroot):
1513 if not matchroot.startswith(relroot):
1514 ui.warn(_('warning: %s not inside relative root %s\n') % (
1514 ui.warn(_('warning: %s not inside relative root %s\n') % (
1515 match.uipath(matchroot), uirelroot))
1515 match.uipath(matchroot), uirelroot))
1516
1516
1517 if stat:
1517 if stat:
1518 diffopts = diffopts.copy(context=0)
1518 diffopts = diffopts.copy(context=0)
1519 width = 80
1519 width = 80
1520 if not ui.plain():
1520 if not ui.plain():
1521 width = ui.termwidth()
1521 width = ui.termwidth()
1522 chunks = patch.diff(repo, node1, node2, match, changes, opts=diffopts,
1522 chunks = patch.diff(repo, node1, node2, match, changes, opts=diffopts,
1523 prefix=prefix, relroot=relroot,
1523 prefix=prefix, relroot=relroot,
1524 hunksfilterfn=hunksfilterfn)
1524 hunksfilterfn=hunksfilterfn)
1525 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1525 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1526 width=width):
1526 width=width):
1527 write(chunk, label=label)
1527 write(chunk, label=label)
1528 else:
1528 else:
1529 for chunk, label in patch.diffui(repo, node1, node2, match,
1529 for chunk, label in patch.diffui(repo, node1, node2, match,
1530 changes, opts=diffopts, prefix=prefix,
1530 changes, opts=diffopts, prefix=prefix,
1531 relroot=relroot,
1531 relroot=relroot,
1532 hunksfilterfn=hunksfilterfn):
1532 hunksfilterfn=hunksfilterfn):
1533 write(chunk, label=label)
1533 write(chunk, label=label)
1534
1534
1535 if listsubrepos:
1535 if listsubrepos:
1536 ctx1 = repo[node1]
1536 ctx1 = repo[node1]
1537 ctx2 = repo[node2]
1537 ctx2 = repo[node2]
1538 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1538 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1539 tempnode2 = node2
1539 tempnode2 = node2
1540 try:
1540 try:
1541 if node2 is not None:
1541 if node2 is not None:
1542 tempnode2 = ctx2.substate[subpath][1]
1542 tempnode2 = ctx2.substate[subpath][1]
1543 except KeyError:
1543 except KeyError:
1544 # A subrepo that existed in node1 was deleted between node1 and
1544 # A subrepo that existed in node1 was deleted between node1 and
1545 # node2 (inclusive). Thus, ctx2's substate won't contain that
1545 # node2 (inclusive). Thus, ctx2's substate won't contain that
1546 # subpath. The best we can do is to ignore it.
1546 # subpath. The best we can do is to ignore it.
1547 tempnode2 = None
1547 tempnode2 = None
1548 submatch = matchmod.subdirmatcher(subpath, match)
1548 submatch = matchmod.subdirmatcher(subpath, match)
1549 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1549 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1550 stat=stat, fp=fp, prefix=prefix)
1550 stat=stat, fp=fp, prefix=prefix)
1551
1551
1552 def _changesetlabels(ctx):
1552 def _changesetlabels(ctx):
1553 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1553 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1554 if ctx.obsolete():
1554 if ctx.obsolete():
1555 labels.append('changeset.obsolete')
1555 labels.append('changeset.obsolete')
1556 if ctx.isunstable():
1556 if ctx.isunstable():
1557 labels.append('changeset.unstable')
1557 labels.append('changeset.unstable')
1558 for instability in ctx.instabilities():
1558 for instability in ctx.instabilities():
1559 labels.append('instability.%s' % instability)
1559 labels.append('instability.%s' % instability)
1560 return ' '.join(labels)
1560 return ' '.join(labels)
1561
1561
1562 class changeset_printer(object):
1562 class changeset_printer(object):
1563 '''show changeset information when templating not requested.'''
1563 '''show changeset information when templating not requested.'''
1564
1564
1565 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1565 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1566 self.ui = ui
1566 self.ui = ui
1567 self.repo = repo
1567 self.repo = repo
1568 self.buffered = buffered
1568 self.buffered = buffered
1569 self.matchfn = matchfn
1569 self.matchfn = matchfn
1570 self.diffopts = diffopts
1570 self.diffopts = diffopts
1571 self.header = {}
1571 self.header = {}
1572 self.hunk = {}
1572 self.hunk = {}
1573 self.lastheader = None
1573 self.lastheader = None
1574 self.footer = None
1574 self.footer = None
1575 self._columns = templatekw.getlogcolumns()
1575 self._columns = templatekw.getlogcolumns()
1576
1576
1577 def flush(self, ctx):
1577 def flush(self, ctx):
1578 rev = ctx.rev()
1578 rev = ctx.rev()
1579 if rev in self.header:
1579 if rev in self.header:
1580 h = self.header[rev]
1580 h = self.header[rev]
1581 if h != self.lastheader:
1581 if h != self.lastheader:
1582 self.lastheader = h
1582 self.lastheader = h
1583 self.ui.write(h)
1583 self.ui.write(h)
1584 del self.header[rev]
1584 del self.header[rev]
1585 if rev in self.hunk:
1585 if rev in self.hunk:
1586 self.ui.write(self.hunk[rev])
1586 self.ui.write(self.hunk[rev])
1587 del self.hunk[rev]
1587 del self.hunk[rev]
1588 return 1
1588 return 1
1589 return 0
1589 return 0
1590
1590
1591 def close(self):
1591 def close(self):
1592 if self.footer:
1592 if self.footer:
1593 self.ui.write(self.footer)
1593 self.ui.write(self.footer)
1594
1594
1595 def show(self, ctx, copies=None, matchfn=None, hunksfilterfn=None,
1595 def show(self, ctx, copies=None, matchfn=None, hunksfilterfn=None,
1596 **props):
1596 **props):
1597 props = pycompat.byteskwargs(props)
1597 props = pycompat.byteskwargs(props)
1598 if self.buffered:
1598 if self.buffered:
1599 self.ui.pushbuffer(labeled=True)
1599 self.ui.pushbuffer(labeled=True)
1600 self._show(ctx, copies, matchfn, hunksfilterfn, props)
1600 self._show(ctx, copies, matchfn, hunksfilterfn, props)
1601 self.hunk[ctx.rev()] = self.ui.popbuffer()
1601 self.hunk[ctx.rev()] = self.ui.popbuffer()
1602 else:
1602 else:
1603 self._show(ctx, copies, matchfn, hunksfilterfn, props)
1603 self._show(ctx, copies, matchfn, hunksfilterfn, props)
1604
1604
1605 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1605 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1606 '''show a single changeset or file revision'''
1606 '''show a single changeset or file revision'''
1607 changenode = ctx.node()
1607 changenode = ctx.node()
1608 rev = ctx.rev()
1608 rev = ctx.rev()
1609
1609
1610 if self.ui.quiet:
1610 if self.ui.quiet:
1611 self.ui.write("%s\n" % scmutil.formatchangeid(ctx),
1611 self.ui.write("%s\n" % scmutil.formatchangeid(ctx),
1612 label='log.node')
1612 label='log.node')
1613 return
1613 return
1614
1614
1615 columns = self._columns
1615 columns = self._columns
1616 self.ui.write(columns['changeset'] % scmutil.formatchangeid(ctx),
1616 self.ui.write(columns['changeset'] % scmutil.formatchangeid(ctx),
1617 label=_changesetlabels(ctx))
1617 label=_changesetlabels(ctx))
1618
1618
1619 # branches are shown first before any other names due to backwards
1619 # branches are shown first before any other names due to backwards
1620 # compatibility
1620 # compatibility
1621 branch = ctx.branch()
1621 branch = ctx.branch()
1622 # don't show the default branch name
1622 # don't show the default branch name
1623 if branch != 'default':
1623 if branch != 'default':
1624 self.ui.write(columns['branch'] % branch, label='log.branch')
1624 self.ui.write(columns['branch'] % branch, label='log.branch')
1625
1625
1626 for nsname, ns in self.repo.names.iteritems():
1626 for nsname, ns in self.repo.names.iteritems():
1627 # branches has special logic already handled above, so here we just
1627 # branches has special logic already handled above, so here we just
1628 # skip it
1628 # skip it
1629 if nsname == 'branches':
1629 if nsname == 'branches':
1630 continue
1630 continue
1631 # we will use the templatename as the color name since those two
1631 # we will use the templatename as the color name since those two
1632 # should be the same
1632 # should be the same
1633 for name in ns.names(self.repo, changenode):
1633 for name in ns.names(self.repo, changenode):
1634 self.ui.write(ns.logfmt % name,
1634 self.ui.write(ns.logfmt % name,
1635 label='log.%s' % ns.colorname)
1635 label='log.%s' % ns.colorname)
1636 if self.ui.debugflag:
1636 if self.ui.debugflag:
1637 self.ui.write(columns['phase'] % ctx.phasestr(), label='log.phase')
1637 self.ui.write(columns['phase'] % ctx.phasestr(), label='log.phase')
1638 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1638 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1639 label = 'log.parent changeset.%s' % pctx.phasestr()
1639 label = 'log.parent changeset.%s' % pctx.phasestr()
1640 self.ui.write(columns['parent'] % scmutil.formatchangeid(pctx),
1640 self.ui.write(columns['parent'] % scmutil.formatchangeid(pctx),
1641 label=label)
1641 label=label)
1642
1642
1643 if self.ui.debugflag and rev is not None:
1643 if self.ui.debugflag and rev is not None:
1644 mnode = ctx.manifestnode()
1644 mnode = ctx.manifestnode()
1645 mrev = self.repo.manifestlog._revlog.rev(mnode)
1645 mrev = self.repo.manifestlog._revlog.rev(mnode)
1646 self.ui.write(columns['manifest']
1646 self.ui.write(columns['manifest']
1647 % scmutil.formatrevnode(self.ui, mrev, mnode),
1647 % scmutil.formatrevnode(self.ui, mrev, mnode),
1648 label='ui.debug log.manifest')
1648 label='ui.debug log.manifest')
1649 self.ui.write(columns['user'] % ctx.user(), label='log.user')
1649 self.ui.write(columns['user'] % ctx.user(), label='log.user')
1650 self.ui.write(columns['date'] % util.datestr(ctx.date()),
1650 self.ui.write(columns['date'] % util.datestr(ctx.date()),
1651 label='log.date')
1651 label='log.date')
1652
1652
1653 if ctx.isunstable():
1653 if ctx.isunstable():
1654 instabilities = ctx.instabilities()
1654 instabilities = ctx.instabilities()
1655 self.ui.write(columns['instability'] % ', '.join(instabilities),
1655 self.ui.write(columns['instability'] % ', '.join(instabilities),
1656 label='log.instability')
1656 label='log.instability')
1657
1657
1658 elif ctx.obsolete():
1658 elif ctx.obsolete():
1659 self._showobsfate(ctx)
1659 self._showobsfate(ctx)
1660
1660
1661 self._exthook(ctx)
1661 self._exthook(ctx)
1662
1662
1663 if self.ui.debugflag:
1663 if self.ui.debugflag:
1664 files = ctx.p1().status(ctx)[:3]
1664 files = ctx.p1().status(ctx)[:3]
1665 for key, value in zip(['files', 'files+', 'files-'], files):
1665 for key, value in zip(['files', 'files+', 'files-'], files):
1666 if value:
1666 if value:
1667 self.ui.write(columns[key] % " ".join(value),
1667 self.ui.write(columns[key] % " ".join(value),
1668 label='ui.debug log.files')
1668 label='ui.debug log.files')
1669 elif ctx.files() and self.ui.verbose:
1669 elif ctx.files() and self.ui.verbose:
1670 self.ui.write(columns['files'] % " ".join(ctx.files()),
1670 self.ui.write(columns['files'] % " ".join(ctx.files()),
1671 label='ui.note log.files')
1671 label='ui.note log.files')
1672 if copies and self.ui.verbose:
1672 if copies and self.ui.verbose:
1673 copies = ['%s (%s)' % c for c in copies]
1673 copies = ['%s (%s)' % c for c in copies]
1674 self.ui.write(columns['copies'] % ' '.join(copies),
1674 self.ui.write(columns['copies'] % ' '.join(copies),
1675 label='ui.note log.copies')
1675 label='ui.note log.copies')
1676
1676
1677 extra = ctx.extra()
1677 extra = ctx.extra()
1678 if extra and self.ui.debugflag:
1678 if extra and self.ui.debugflag:
1679 for key, value in sorted(extra.items()):
1679 for key, value in sorted(extra.items()):
1680 self.ui.write(columns['extra'] % (key, util.escapestr(value)),
1680 self.ui.write(columns['extra'] % (key, util.escapestr(value)),
1681 label='ui.debug log.extra')
1681 label='ui.debug log.extra')
1682
1682
1683 description = ctx.description().strip()
1683 description = ctx.description().strip()
1684 if description:
1684 if description:
1685 if self.ui.verbose:
1685 if self.ui.verbose:
1686 self.ui.write(_("description:\n"),
1686 self.ui.write(_("description:\n"),
1687 label='ui.note log.description')
1687 label='ui.note log.description')
1688 self.ui.write(description,
1688 self.ui.write(description,
1689 label='ui.note log.description')
1689 label='ui.note log.description')
1690 self.ui.write("\n\n")
1690 self.ui.write("\n\n")
1691 else:
1691 else:
1692 self.ui.write(columns['summary'] % description.splitlines()[0],
1692 self.ui.write(columns['summary'] % description.splitlines()[0],
1693 label='log.summary')
1693 label='log.summary')
1694 self.ui.write("\n")
1694 self.ui.write("\n")
1695
1695
1696 self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
1696 self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
1697
1697
1698 def _showobsfate(self, ctx):
1698 def _showobsfate(self, ctx):
1699 obsfate = templatekw.showobsfate(repo=self.repo, ctx=ctx, ui=self.ui)
1699 obsfate = templatekw.showobsfate(repo=self.repo, ctx=ctx, ui=self.ui)
1700
1700
1701 if obsfate:
1701 if obsfate:
1702 for obsfateline in obsfate:
1702 for obsfateline in obsfate:
1703 self.ui.write(self._columns['obsolete'] % obsfateline,
1703 self.ui.write(self._columns['obsolete'] % obsfateline,
1704 label='log.obsfate')
1704 label='log.obsfate')
1705
1705
1706 def _exthook(self, ctx):
1706 def _exthook(self, ctx):
1707 '''empty method used by extension as a hook point
1707 '''empty method used by extension as a hook point
1708 '''
1708 '''
1709
1709
1710 def showpatch(self, ctx, matchfn, hunksfilterfn=None):
1710 def showpatch(self, ctx, matchfn, hunksfilterfn=None):
1711 if not matchfn:
1711 if not matchfn:
1712 matchfn = self.matchfn
1712 matchfn = self.matchfn
1713 if matchfn:
1713 if matchfn:
1714 stat = self.diffopts.get('stat')
1714 stat = self.diffopts.get('stat')
1715 diff = self.diffopts.get('patch')
1715 diff = self.diffopts.get('patch')
1716 diffopts = patch.diffallopts(self.ui, self.diffopts)
1716 diffopts = patch.diffallopts(self.ui, self.diffopts)
1717 node = ctx.node()
1717 node = ctx.node()
1718 prev = ctx.p1().node()
1718 prev = ctx.p1().node()
1719 if stat:
1719 if stat:
1720 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1720 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1721 match=matchfn, stat=True,
1721 match=matchfn, stat=True,
1722 hunksfilterfn=hunksfilterfn)
1722 hunksfilterfn=hunksfilterfn)
1723 if diff:
1723 if diff:
1724 if stat:
1724 if stat:
1725 self.ui.write("\n")
1725 self.ui.write("\n")
1726 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1726 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1727 match=matchfn, stat=False,
1727 match=matchfn, stat=False,
1728 hunksfilterfn=hunksfilterfn)
1728 hunksfilterfn=hunksfilterfn)
1729 self.ui.write("\n")
1729 self.ui.write("\n")
1730
1730
1731 class jsonchangeset(changeset_printer):
1731 class jsonchangeset(changeset_printer):
1732 '''format changeset information.'''
1732 '''format changeset information.'''
1733
1733
1734 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1734 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1735 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1735 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1736 self.cache = {}
1736 self.cache = {}
1737 self._first = True
1737 self._first = True
1738
1738
1739 def close(self):
1739 def close(self):
1740 if not self._first:
1740 if not self._first:
1741 self.ui.write("\n]\n")
1741 self.ui.write("\n]\n")
1742 else:
1742 else:
1743 self.ui.write("[]\n")
1743 self.ui.write("[]\n")
1744
1744
1745 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1745 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1746 '''show a single changeset or file revision'''
1746 '''show a single changeset or file revision'''
1747 rev = ctx.rev()
1747 rev = ctx.rev()
1748 if rev is None:
1748 if rev is None:
1749 jrev = jnode = 'null'
1749 jrev = jnode = 'null'
1750 else:
1750 else:
1751 jrev = '%d' % rev
1751 jrev = '%d' % rev
1752 jnode = '"%s"' % hex(ctx.node())
1752 jnode = '"%s"' % hex(ctx.node())
1753 j = encoding.jsonescape
1753 j = encoding.jsonescape
1754
1754
1755 if self._first:
1755 if self._first:
1756 self.ui.write("[\n {")
1756 self.ui.write("[\n {")
1757 self._first = False
1757 self._first = False
1758 else:
1758 else:
1759 self.ui.write(",\n {")
1759 self.ui.write(",\n {")
1760
1760
1761 if self.ui.quiet:
1761 if self.ui.quiet:
1762 self.ui.write(('\n "rev": %s') % jrev)
1762 self.ui.write(('\n "rev": %s') % jrev)
1763 self.ui.write((',\n "node": %s') % jnode)
1763 self.ui.write((',\n "node": %s') % jnode)
1764 self.ui.write('\n }')
1764 self.ui.write('\n }')
1765 return
1765 return
1766
1766
1767 self.ui.write(('\n "rev": %s') % jrev)
1767 self.ui.write(('\n "rev": %s') % jrev)
1768 self.ui.write((',\n "node": %s') % jnode)
1768 self.ui.write((',\n "node": %s') % jnode)
1769 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1769 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1770 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1770 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1771 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1771 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1772 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1772 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1773 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1773 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1774
1774
1775 self.ui.write((',\n "bookmarks": [%s]') %
1775 self.ui.write((',\n "bookmarks": [%s]') %
1776 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1776 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1777 self.ui.write((',\n "tags": [%s]') %
1777 self.ui.write((',\n "tags": [%s]') %
1778 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1778 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1779 self.ui.write((',\n "parents": [%s]') %
1779 self.ui.write((',\n "parents": [%s]') %
1780 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1780 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1781
1781
1782 if self.ui.debugflag:
1782 if self.ui.debugflag:
1783 if rev is None:
1783 if rev is None:
1784 jmanifestnode = 'null'
1784 jmanifestnode = 'null'
1785 else:
1785 else:
1786 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1786 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1787 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1787 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1788
1788
1789 self.ui.write((',\n "extra": {%s}') %
1789 self.ui.write((',\n "extra": {%s}') %
1790 ", ".join('"%s": "%s"' % (j(k), j(v))
1790 ", ".join('"%s": "%s"' % (j(k), j(v))
1791 for k, v in ctx.extra().items()))
1791 for k, v in ctx.extra().items()))
1792
1792
1793 files = ctx.p1().status(ctx)
1793 files = ctx.p1().status(ctx)
1794 self.ui.write((',\n "modified": [%s]') %
1794 self.ui.write((',\n "modified": [%s]') %
1795 ", ".join('"%s"' % j(f) for f in files[0]))
1795 ", ".join('"%s"' % j(f) for f in files[0]))
1796 self.ui.write((',\n "added": [%s]') %
1796 self.ui.write((',\n "added": [%s]') %
1797 ", ".join('"%s"' % j(f) for f in files[1]))
1797 ", ".join('"%s"' % j(f) for f in files[1]))
1798 self.ui.write((',\n "removed": [%s]') %
1798 self.ui.write((',\n "removed": [%s]') %
1799 ", ".join('"%s"' % j(f) for f in files[2]))
1799 ", ".join('"%s"' % j(f) for f in files[2]))
1800
1800
1801 elif self.ui.verbose:
1801 elif self.ui.verbose:
1802 self.ui.write((',\n "files": [%s]') %
1802 self.ui.write((',\n "files": [%s]') %
1803 ", ".join('"%s"' % j(f) for f in ctx.files()))
1803 ", ".join('"%s"' % j(f) for f in ctx.files()))
1804
1804
1805 if copies:
1805 if copies:
1806 self.ui.write((',\n "copies": {%s}') %
1806 self.ui.write((',\n "copies": {%s}') %
1807 ", ".join('"%s": "%s"' % (j(k), j(v))
1807 ", ".join('"%s": "%s"' % (j(k), j(v))
1808 for k, v in copies))
1808 for k, v in copies))
1809
1809
1810 matchfn = self.matchfn
1810 matchfn = self.matchfn
1811 if matchfn:
1811 if matchfn:
1812 stat = self.diffopts.get('stat')
1812 stat = self.diffopts.get('stat')
1813 diff = self.diffopts.get('patch')
1813 diff = self.diffopts.get('patch')
1814 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1814 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1815 node, prev = ctx.node(), ctx.p1().node()
1815 node, prev = ctx.node(), ctx.p1().node()
1816 if stat:
1816 if stat:
1817 self.ui.pushbuffer()
1817 self.ui.pushbuffer()
1818 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1818 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1819 match=matchfn, stat=True)
1819 match=matchfn, stat=True)
1820 self.ui.write((',\n "diffstat": "%s"')
1820 self.ui.write((',\n "diffstat": "%s"')
1821 % j(self.ui.popbuffer()))
1821 % j(self.ui.popbuffer()))
1822 if diff:
1822 if diff:
1823 self.ui.pushbuffer()
1823 self.ui.pushbuffer()
1824 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1824 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1825 match=matchfn, stat=False)
1825 match=matchfn, stat=False)
1826 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1826 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1827
1827
1828 self.ui.write("\n }")
1828 self.ui.write("\n }")
1829
1829
1830 class changeset_templater(changeset_printer):
1830 class changeset_templater(changeset_printer):
1831 '''format changeset information.
1831 '''format changeset information.
1832
1832
1833 Note: there are a variety of convenience functions to build a
1833 Note: there are a variety of convenience functions to build a
1834 changeset_templater for common cases. See functions such as:
1834 changeset_templater for common cases. See functions such as:
1835 makelogtemplater, show_changeset, buildcommittemplate, or other
1835 makelogtemplater, show_changeset, buildcommittemplate, or other
1836 functions that use changesest_templater.
1836 functions that use changesest_templater.
1837 '''
1837 '''
1838
1838
1839 # Arguments before "buffered" used to be positional. Consider not
1839 # Arguments before "buffered" used to be positional. Consider not
1840 # adding/removing arguments before "buffered" to not break callers.
1840 # adding/removing arguments before "buffered" to not break callers.
1841 def __init__(self, ui, repo, tmplspec, matchfn=None, diffopts=None,
1841 def __init__(self, ui, repo, tmplspec, matchfn=None, diffopts=None,
1842 buffered=False):
1842 buffered=False):
1843 diffopts = diffopts or {}
1843 diffopts = diffopts or {}
1844
1844
1845 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1845 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1846 self.t = formatter.loadtemplater(ui, tmplspec,
1846 self.t = formatter.loadtemplater(ui, tmplspec,
1847 cache=templatekw.defaulttempl)
1847 cache=templatekw.defaulttempl)
1848 self._counter = itertools.count()
1848 self._counter = itertools.count()
1849 self.cache = {}
1849 self.cache = {}
1850
1850
1851 self._tref = tmplspec.ref
1851 self._tref = tmplspec.ref
1852 self._parts = {'header': '', 'footer': '',
1852 self._parts = {'header': '', 'footer': '',
1853 tmplspec.ref: tmplspec.ref,
1853 tmplspec.ref: tmplspec.ref,
1854 'docheader': '', 'docfooter': '',
1854 'docheader': '', 'docfooter': '',
1855 'separator': ''}
1855 'separator': ''}
1856 if tmplspec.mapfile:
1856 if tmplspec.mapfile:
1857 # find correct templates for current mode, for backward
1857 # find correct templates for current mode, for backward
1858 # compatibility with 'log -v/-q/--debug' using a mapfile
1858 # compatibility with 'log -v/-q/--debug' using a mapfile
1859 tmplmodes = [
1859 tmplmodes = [
1860 (True, ''),
1860 (True, ''),
1861 (self.ui.verbose, '_verbose'),
1861 (self.ui.verbose, '_verbose'),
1862 (self.ui.quiet, '_quiet'),
1862 (self.ui.quiet, '_quiet'),
1863 (self.ui.debugflag, '_debug'),
1863 (self.ui.debugflag, '_debug'),
1864 ]
1864 ]
1865 for mode, postfix in tmplmodes:
1865 for mode, postfix in tmplmodes:
1866 for t in self._parts:
1866 for t in self._parts:
1867 cur = t + postfix
1867 cur = t + postfix
1868 if mode and cur in self.t:
1868 if mode and cur in self.t:
1869 self._parts[t] = cur
1869 self._parts[t] = cur
1870 else:
1870 else:
1871 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
1871 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
1872 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
1872 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
1873 self._parts.update(m)
1873 self._parts.update(m)
1874
1874
1875 if self._parts['docheader']:
1875 if self._parts['docheader']:
1876 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1876 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1877
1877
1878 def close(self):
1878 def close(self):
1879 if self._parts['docfooter']:
1879 if self._parts['docfooter']:
1880 if not self.footer:
1880 if not self.footer:
1881 self.footer = ""
1881 self.footer = ""
1882 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1882 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1883 return super(changeset_templater, self).close()
1883 return super(changeset_templater, self).close()
1884
1884
1885 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1885 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1886 '''show a single changeset or file revision'''
1886 '''show a single changeset or file revision'''
1887 props = props.copy()
1887 props = props.copy()
1888 props.update(templatekw.keywords)
1888 props.update(templatekw.keywords)
1889 props['templ'] = self.t
1889 props['templ'] = self.t
1890 props['ctx'] = ctx
1890 props['ctx'] = ctx
1891 props['repo'] = self.repo
1891 props['repo'] = self.repo
1892 props['ui'] = self.repo.ui
1892 props['ui'] = self.repo.ui
1893 props['index'] = index = next(self._counter)
1893 props['index'] = index = next(self._counter)
1894 props['revcache'] = {'copies': copies}
1894 props['revcache'] = {'copies': copies}
1895 props['cache'] = self.cache
1895 props['cache'] = self.cache
1896 props = pycompat.strkwargs(props)
1896 props = pycompat.strkwargs(props)
1897
1897
1898 # write separator, which wouldn't work well with the header part below
1898 # write separator, which wouldn't work well with the header part below
1899 # since there's inherently a conflict between header (across items) and
1899 # since there's inherently a conflict between header (across items) and
1900 # separator (per item)
1900 # separator (per item)
1901 if self._parts['separator'] and index > 0:
1901 if self._parts['separator'] and index > 0:
1902 self.ui.write(templater.stringify(self.t(self._parts['separator'])))
1902 self.ui.write(templater.stringify(self.t(self._parts['separator'])))
1903
1903
1904 # write header
1904 # write header
1905 if self._parts['header']:
1905 if self._parts['header']:
1906 h = templater.stringify(self.t(self._parts['header'], **props))
1906 h = templater.stringify(self.t(self._parts['header'], **props))
1907 if self.buffered:
1907 if self.buffered:
1908 self.header[ctx.rev()] = h
1908 self.header[ctx.rev()] = h
1909 else:
1909 else:
1910 if self.lastheader != h:
1910 if self.lastheader != h:
1911 self.lastheader = h
1911 self.lastheader = h
1912 self.ui.write(h)
1912 self.ui.write(h)
1913
1913
1914 # write changeset metadata, then patch if requested
1914 # write changeset metadata, then patch if requested
1915 key = self._parts[self._tref]
1915 key = self._parts[self._tref]
1916 self.ui.write(templater.stringify(self.t(key, **props)))
1916 self.ui.write(templater.stringify(self.t(key, **props)))
1917 self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
1917 self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
1918
1918
1919 if self._parts['footer']:
1919 if self._parts['footer']:
1920 if not self.footer:
1920 if not self.footer:
1921 self.footer = templater.stringify(
1921 self.footer = templater.stringify(
1922 self.t(self._parts['footer'], **props))
1922 self.t(self._parts['footer'], **props))
1923
1923
1924 def logtemplatespec(tmpl, mapfile):
1924 def logtemplatespec(tmpl, mapfile):
1925 if mapfile:
1925 if mapfile:
1926 return formatter.templatespec('changeset', tmpl, mapfile)
1926 return formatter.templatespec('changeset', tmpl, mapfile)
1927 else:
1927 else:
1928 return formatter.templatespec('', tmpl, None)
1928 return formatter.templatespec('', tmpl, None)
1929
1929
1930 def _lookuplogtemplate(ui, tmpl, style):
1930 def _lookuplogtemplate(ui, tmpl, style):
1931 """Find the template matching the given template spec or style
1931 """Find the template matching the given template spec or style
1932
1932
1933 See formatter.lookuptemplate() for details.
1933 See formatter.lookuptemplate() for details.
1934 """
1934 """
1935
1935
1936 # ui settings
1936 # ui settings
1937 if not tmpl and not style: # template are stronger than style
1937 if not tmpl and not style: # template are stronger than style
1938 tmpl = ui.config('ui', 'logtemplate')
1938 tmpl = ui.config('ui', 'logtemplate')
1939 if tmpl:
1939 if tmpl:
1940 return logtemplatespec(templater.unquotestring(tmpl), None)
1940 return logtemplatespec(templater.unquotestring(tmpl), None)
1941 else:
1941 else:
1942 style = util.expandpath(ui.config('ui', 'style'))
1942 style = util.expandpath(ui.config('ui', 'style'))
1943
1943
1944 if not tmpl and style:
1944 if not tmpl and style:
1945 mapfile = style
1945 mapfile = style
1946 if not os.path.split(mapfile)[0]:
1946 if not os.path.split(mapfile)[0]:
1947 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1947 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1948 or templater.templatepath(mapfile))
1948 or templater.templatepath(mapfile))
1949 if mapname:
1949 if mapname:
1950 mapfile = mapname
1950 mapfile = mapname
1951 return logtemplatespec(None, mapfile)
1951 return logtemplatespec(None, mapfile)
1952
1952
1953 if not tmpl:
1953 if not tmpl:
1954 return logtemplatespec(None, None)
1954 return logtemplatespec(None, None)
1955
1955
1956 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1956 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1957
1957
1958 def makelogtemplater(ui, repo, tmpl, buffered=False):
1958 def makelogtemplater(ui, repo, tmpl, buffered=False):
1959 """Create a changeset_templater from a literal template 'tmpl'
1959 """Create a changeset_templater from a literal template 'tmpl'
1960 byte-string."""
1960 byte-string."""
1961 spec = logtemplatespec(tmpl, None)
1961 spec = logtemplatespec(tmpl, None)
1962 return changeset_templater(ui, repo, spec, buffered=buffered)
1962 return changeset_templater(ui, repo, spec, buffered=buffered)
1963
1963
1964 def show_changeset(ui, repo, opts, buffered=False):
1964 def show_changeset(ui, repo, opts, buffered=False):
1965 """show one changeset using template or regular display.
1965 """show one changeset using template or regular display.
1966
1966
1967 Display format will be the first non-empty hit of:
1967 Display format will be the first non-empty hit of:
1968 1. option 'template'
1968 1. option 'template'
1969 2. option 'style'
1969 2. option 'style'
1970 3. [ui] setting 'logtemplate'
1970 3. [ui] setting 'logtemplate'
1971 4. [ui] setting 'style'
1971 4. [ui] setting 'style'
1972 If all of these values are either the unset or the empty string,
1972 If all of these values are either the unset or the empty string,
1973 regular display via changeset_printer() is done.
1973 regular display via changeset_printer() is done.
1974 """
1974 """
1975 # options
1975 # options
1976 match = None
1976 match = None
1977 if opts.get('patch') or opts.get('stat'):
1977 if opts.get('patch') or opts.get('stat'):
1978 match = scmutil.matchall(repo)
1978 match = scmutil.matchall(repo)
1979
1979
1980 if opts.get('template') == 'json':
1980 if opts.get('template') == 'json':
1981 return jsonchangeset(ui, repo, match, opts, buffered)
1981 return jsonchangeset(ui, repo, match, opts, buffered)
1982
1982
1983 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
1983 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
1984
1984
1985 if not spec.ref and not spec.tmpl and not spec.mapfile:
1985 if not spec.ref and not spec.tmpl and not spec.mapfile:
1986 return changeset_printer(ui, repo, match, opts, buffered)
1986 return changeset_printer(ui, repo, match, opts, buffered)
1987
1987
1988 return changeset_templater(ui, repo, spec, match, opts, buffered)
1988 return changeset_templater(ui, repo, spec, match, opts, buffered)
1989
1989
1990 def showmarker(fm, marker, index=None):
1990 def showmarker(fm, marker, index=None):
1991 """utility function to display obsolescence marker in a readable way
1991 """utility function to display obsolescence marker in a readable way
1992
1992
1993 To be used by debug function."""
1993 To be used by debug function."""
1994 if index is not None:
1994 if index is not None:
1995 fm.write('index', '%i ', index)
1995 fm.write('index', '%i ', index)
1996 fm.write('prednode', '%s ', hex(marker.prednode()))
1996 fm.write('prednode', '%s ', hex(marker.prednode()))
1997 succs = marker.succnodes()
1997 succs = marker.succnodes()
1998 fm.condwrite(succs, 'succnodes', '%s ',
1998 fm.condwrite(succs, 'succnodes', '%s ',
1999 fm.formatlist(map(hex, succs), name='node'))
1999 fm.formatlist(map(hex, succs), name='node'))
2000 fm.write('flag', '%X ', marker.flags())
2000 fm.write('flag', '%X ', marker.flags())
2001 parents = marker.parentnodes()
2001 parents = marker.parentnodes()
2002 if parents is not None:
2002 if parents is not None:
2003 fm.write('parentnodes', '{%s} ',
2003 fm.write('parentnodes', '{%s} ',
2004 fm.formatlist(map(hex, parents), name='node', sep=', '))
2004 fm.formatlist(map(hex, parents), name='node', sep=', '))
2005 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
2005 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
2006 meta = marker.metadata().copy()
2006 meta = marker.metadata().copy()
2007 meta.pop('date', None)
2007 meta.pop('date', None)
2008 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
2008 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
2009 fm.plain('\n')
2009 fm.plain('\n')
2010
2010
2011 def finddate(ui, repo, date):
2011 def finddate(ui, repo, date):
2012 """Find the tipmost changeset that matches the given date spec"""
2012 """Find the tipmost changeset that matches the given date spec"""
2013
2013
2014 df = util.matchdate(date)
2014 df = util.matchdate(date)
2015 m = scmutil.matchall(repo)
2015 m = scmutil.matchall(repo)
2016 results = {}
2016 results = {}
2017
2017
2018 def prep(ctx, fns):
2018 def prep(ctx, fns):
2019 d = ctx.date()
2019 d = ctx.date()
2020 if df(d[0]):
2020 if df(d[0]):
2021 results[ctx.rev()] = d
2021 results[ctx.rev()] = d
2022
2022
2023 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
2023 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
2024 rev = ctx.rev()
2024 rev = ctx.rev()
2025 if rev in results:
2025 if rev in results:
2026 ui.status(_("found revision %s from %s\n") %
2026 ui.status(_("found revision %s from %s\n") %
2027 (rev, util.datestr(results[rev])))
2027 (rev, util.datestr(results[rev])))
2028 return '%d' % rev
2028 return '%d' % rev
2029
2029
2030 raise error.Abort(_("revision matching date not found"))
2030 raise error.Abort(_("revision matching date not found"))
2031
2031
2032 def increasingwindows(windowsize=8, sizelimit=512):
2032 def increasingwindows(windowsize=8, sizelimit=512):
2033 while True:
2033 while True:
2034 yield windowsize
2034 yield windowsize
2035 if windowsize < sizelimit:
2035 if windowsize < sizelimit:
2036 windowsize *= 2
2036 windowsize *= 2
2037
2037
2038 class FileWalkError(Exception):
2038 class FileWalkError(Exception):
2039 pass
2039 pass
2040
2040
2041 def walkfilerevs(repo, match, follow, revs, fncache):
2041 def walkfilerevs(repo, match, follow, revs, fncache):
2042 '''Walks the file history for the matched files.
2042 '''Walks the file history for the matched files.
2043
2043
2044 Returns the changeset revs that are involved in the file history.
2044 Returns the changeset revs that are involved in the file history.
2045
2045
2046 Throws FileWalkError if the file history can't be walked using
2046 Throws FileWalkError if the file history can't be walked using
2047 filelogs alone.
2047 filelogs alone.
2048 '''
2048 '''
2049 wanted = set()
2049 wanted = set()
2050 copies = []
2050 copies = []
2051 minrev, maxrev = min(revs), max(revs)
2051 minrev, maxrev = min(revs), max(revs)
2052 def filerevgen(filelog, last):
2052 def filerevgen(filelog, last):
2053 """
2053 """
2054 Only files, no patterns. Check the history of each file.
2054 Only files, no patterns. Check the history of each file.
2055
2055
2056 Examines filelog entries within minrev, maxrev linkrev range
2056 Examines filelog entries within minrev, maxrev linkrev range
2057 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2057 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2058 tuples in backwards order
2058 tuples in backwards order
2059 """
2059 """
2060 cl_count = len(repo)
2060 cl_count = len(repo)
2061 revs = []
2061 revs = []
2062 for j in xrange(0, last + 1):
2062 for j in xrange(0, last + 1):
2063 linkrev = filelog.linkrev(j)
2063 linkrev = filelog.linkrev(j)
2064 if linkrev < minrev:
2064 if linkrev < minrev:
2065 continue
2065 continue
2066 # only yield rev for which we have the changelog, it can
2066 # only yield rev for which we have the changelog, it can
2067 # happen while doing "hg log" during a pull or commit
2067 # happen while doing "hg log" during a pull or commit
2068 if linkrev >= cl_count:
2068 if linkrev >= cl_count:
2069 break
2069 break
2070
2070
2071 parentlinkrevs = []
2071 parentlinkrevs = []
2072 for p in filelog.parentrevs(j):
2072 for p in filelog.parentrevs(j):
2073 if p != nullrev:
2073 if p != nullrev:
2074 parentlinkrevs.append(filelog.linkrev(p))
2074 parentlinkrevs.append(filelog.linkrev(p))
2075 n = filelog.node(j)
2075 n = filelog.node(j)
2076 revs.append((linkrev, parentlinkrevs,
2076 revs.append((linkrev, parentlinkrevs,
2077 follow and filelog.renamed(n)))
2077 follow and filelog.renamed(n)))
2078
2078
2079 return reversed(revs)
2079 return reversed(revs)
2080 def iterfiles():
2080 def iterfiles():
2081 pctx = repo['.']
2081 pctx = repo['.']
2082 for filename in match.files():
2082 for filename in match.files():
2083 if follow:
2083 if follow:
2084 if filename not in pctx:
2084 if filename not in pctx:
2085 raise error.Abort(_('cannot follow file not in parent '
2085 raise error.Abort(_('cannot follow file not in parent '
2086 'revision: "%s"') % filename)
2086 'revision: "%s"') % filename)
2087 yield filename, pctx[filename].filenode()
2087 yield filename, pctx[filename].filenode()
2088 else:
2088 else:
2089 yield filename, None
2089 yield filename, None
2090 for filename_node in copies:
2090 for filename_node in copies:
2091 yield filename_node
2091 yield filename_node
2092
2092
2093 for file_, node in iterfiles():
2093 for file_, node in iterfiles():
2094 filelog = repo.file(file_)
2094 filelog = repo.file(file_)
2095 if not len(filelog):
2095 if not len(filelog):
2096 if node is None:
2096 if node is None:
2097 # A zero count may be a directory or deleted file, so
2097 # A zero count may be a directory or deleted file, so
2098 # try to find matching entries on the slow path.
2098 # try to find matching entries on the slow path.
2099 if follow:
2099 if follow:
2100 raise error.Abort(
2100 raise error.Abort(
2101 _('cannot follow nonexistent file: "%s"') % file_)
2101 _('cannot follow nonexistent file: "%s"') % file_)
2102 raise FileWalkError("Cannot walk via filelog")
2102 raise FileWalkError("Cannot walk via filelog")
2103 else:
2103 else:
2104 continue
2104 continue
2105
2105
2106 if node is None:
2106 if node is None:
2107 last = len(filelog) - 1
2107 last = len(filelog) - 1
2108 else:
2108 else:
2109 last = filelog.rev(node)
2109 last = filelog.rev(node)
2110
2110
2111 # keep track of all ancestors of the file
2111 # keep track of all ancestors of the file
2112 ancestors = {filelog.linkrev(last)}
2112 ancestors = {filelog.linkrev(last)}
2113
2113
2114 # iterate from latest to oldest revision
2114 # iterate from latest to oldest revision
2115 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
2115 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
2116 if not follow:
2116 if not follow:
2117 if rev > maxrev:
2117 if rev > maxrev:
2118 continue
2118 continue
2119 else:
2119 else:
2120 # Note that last might not be the first interesting
2120 # Note that last might not be the first interesting
2121 # rev to us:
2121 # rev to us:
2122 # if the file has been changed after maxrev, we'll
2122 # if the file has been changed after maxrev, we'll
2123 # have linkrev(last) > maxrev, and we still need
2123 # have linkrev(last) > maxrev, and we still need
2124 # to explore the file graph
2124 # to explore the file graph
2125 if rev not in ancestors:
2125 if rev not in ancestors:
2126 continue
2126 continue
2127 # XXX insert 1327 fix here
2127 # XXX insert 1327 fix here
2128 if flparentlinkrevs:
2128 if flparentlinkrevs:
2129 ancestors.update(flparentlinkrevs)
2129 ancestors.update(flparentlinkrevs)
2130
2130
2131 fncache.setdefault(rev, []).append(file_)
2131 fncache.setdefault(rev, []).append(file_)
2132 wanted.add(rev)
2132 wanted.add(rev)
2133 if copied:
2133 if copied:
2134 copies.append(copied)
2134 copies.append(copied)
2135
2135
2136 return wanted
2136 return wanted
2137
2137
2138 class _followfilter(object):
2138 class _followfilter(object):
2139 def __init__(self, repo, onlyfirst=False):
2139 def __init__(self, repo, onlyfirst=False):
2140 self.repo = repo
2140 self.repo = repo
2141 self.startrev = nullrev
2141 self.startrev = nullrev
2142 self.roots = set()
2142 self.roots = set()
2143 self.onlyfirst = onlyfirst
2143 self.onlyfirst = onlyfirst
2144
2144
2145 def match(self, rev):
2145 def match(self, rev):
2146 def realparents(rev):
2146 def realparents(rev):
2147 if self.onlyfirst:
2147 if self.onlyfirst:
2148 return self.repo.changelog.parentrevs(rev)[0:1]
2148 return self.repo.changelog.parentrevs(rev)[0:1]
2149 else:
2149 else:
2150 return filter(lambda x: x != nullrev,
2150 return filter(lambda x: x != nullrev,
2151 self.repo.changelog.parentrevs(rev))
2151 self.repo.changelog.parentrevs(rev))
2152
2152
2153 if self.startrev == nullrev:
2153 if self.startrev == nullrev:
2154 self.startrev = rev
2154 self.startrev = rev
2155 return True
2155 return True
2156
2156
2157 if rev > self.startrev:
2157 if rev > self.startrev:
2158 # forward: all descendants
2158 # forward: all descendants
2159 if not self.roots:
2159 if not self.roots:
2160 self.roots.add(self.startrev)
2160 self.roots.add(self.startrev)
2161 for parent in realparents(rev):
2161 for parent in realparents(rev):
2162 if parent in self.roots:
2162 if parent in self.roots:
2163 self.roots.add(rev)
2163 self.roots.add(rev)
2164 return True
2164 return True
2165 else:
2165 else:
2166 # backwards: all parents
2166 # backwards: all parents
2167 if not self.roots:
2167 if not self.roots:
2168 self.roots.update(realparents(self.startrev))
2168 self.roots.update(realparents(self.startrev))
2169 if rev in self.roots:
2169 if rev in self.roots:
2170 self.roots.remove(rev)
2170 self.roots.remove(rev)
2171 self.roots.update(realparents(rev))
2171 self.roots.update(realparents(rev))
2172 return True
2172 return True
2173
2173
2174 return False
2174 return False
2175
2175
2176 def walkchangerevs(repo, match, opts, prepare):
2176 def walkchangerevs(repo, match, opts, prepare):
2177 '''Iterate over files and the revs in which they changed.
2177 '''Iterate over files and the revs in which they changed.
2178
2178
2179 Callers most commonly need to iterate backwards over the history
2179 Callers most commonly need to iterate backwards over the history
2180 in which they are interested. Doing so has awful (quadratic-looking)
2180 in which they are interested. Doing so has awful (quadratic-looking)
2181 performance, so we use iterators in a "windowed" way.
2181 performance, so we use iterators in a "windowed" way.
2182
2182
2183 We walk a window of revisions in the desired order. Within the
2183 We walk a window of revisions in the desired order. Within the
2184 window, we first walk forwards to gather data, then in the desired
2184 window, we first walk forwards to gather data, then in the desired
2185 order (usually backwards) to display it.
2185 order (usually backwards) to display it.
2186
2186
2187 This function returns an iterator yielding contexts. Before
2187 This function returns an iterator yielding contexts. Before
2188 yielding each context, the iterator will first call the prepare
2188 yielding each context, the iterator will first call the prepare
2189 function on each context in the window in forward order.'''
2189 function on each context in the window in forward order.'''
2190
2190
2191 follow = opts.get('follow') or opts.get('follow_first')
2191 follow = opts.get('follow') or opts.get('follow_first')
2192 revs = _logrevs(repo, opts)
2192 revs = _logrevs(repo, opts)
2193 if not revs:
2193 if not revs:
2194 return []
2194 return []
2195 wanted = set()
2195 wanted = set()
2196 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2196 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2197 opts.get('removed'))
2197 opts.get('removed'))
2198 fncache = {}
2198 fncache = {}
2199 change = repo.changectx
2199 change = repo.changectx
2200
2200
2201 # First step is to fill wanted, the set of revisions that we want to yield.
2201 # First step is to fill wanted, the set of revisions that we want to yield.
2202 # When it does not induce extra cost, we also fill fncache for revisions in
2202 # When it does not induce extra cost, we also fill fncache for revisions in
2203 # wanted: a cache of filenames that were changed (ctx.files()) and that
2203 # wanted: a cache of filenames that were changed (ctx.files()) and that
2204 # match the file filtering conditions.
2204 # match the file filtering conditions.
2205
2205
2206 if match.always():
2206 if match.always():
2207 # No files, no patterns. Display all revs.
2207 # No files, no patterns. Display all revs.
2208 wanted = revs
2208 wanted = revs
2209 elif not slowpath:
2209 elif not slowpath:
2210 # We only have to read through the filelog to find wanted revisions
2210 # We only have to read through the filelog to find wanted revisions
2211
2211
2212 try:
2212 try:
2213 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2213 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2214 except FileWalkError:
2214 except FileWalkError:
2215 slowpath = True
2215 slowpath = True
2216
2216
2217 # We decided to fall back to the slowpath because at least one
2217 # We decided to fall back to the slowpath because at least one
2218 # of the paths was not a file. Check to see if at least one of them
2218 # of the paths was not a file. Check to see if at least one of them
2219 # existed in history, otherwise simply return
2219 # existed in history, otherwise simply return
2220 for path in match.files():
2220 for path in match.files():
2221 if path == '.' or path in repo.store:
2221 if path == '.' or path in repo.store:
2222 break
2222 break
2223 else:
2223 else:
2224 return []
2224 return []
2225
2225
2226 if slowpath:
2226 if slowpath:
2227 # We have to read the changelog to match filenames against
2227 # We have to read the changelog to match filenames against
2228 # changed files
2228 # changed files
2229
2229
2230 if follow:
2230 if follow:
2231 raise error.Abort(_('can only follow copies/renames for explicit '
2231 raise error.Abort(_('can only follow copies/renames for explicit '
2232 'filenames'))
2232 'filenames'))
2233
2233
2234 # The slow path checks files modified in every changeset.
2234 # The slow path checks files modified in every changeset.
2235 # This is really slow on large repos, so compute the set lazily.
2235 # This is really slow on large repos, so compute the set lazily.
2236 class lazywantedset(object):
2236 class lazywantedset(object):
2237 def __init__(self):
2237 def __init__(self):
2238 self.set = set()
2238 self.set = set()
2239 self.revs = set(revs)
2239 self.revs = set(revs)
2240
2240
2241 # No need to worry about locality here because it will be accessed
2241 # No need to worry about locality here because it will be accessed
2242 # in the same order as the increasing window below.
2242 # in the same order as the increasing window below.
2243 def __contains__(self, value):
2243 def __contains__(self, value):
2244 if value in self.set:
2244 if value in self.set:
2245 return True
2245 return True
2246 elif not value in self.revs:
2246 elif not value in self.revs:
2247 return False
2247 return False
2248 else:
2248 else:
2249 self.revs.discard(value)
2249 self.revs.discard(value)
2250 ctx = change(value)
2250 ctx = change(value)
2251 matches = filter(match, ctx.files())
2251 matches = filter(match, ctx.files())
2252 if matches:
2252 if matches:
2253 fncache[value] = matches
2253 fncache[value] = matches
2254 self.set.add(value)
2254 self.set.add(value)
2255 return True
2255 return True
2256 return False
2256 return False
2257
2257
2258 def discard(self, value):
2258 def discard(self, value):
2259 self.revs.discard(value)
2259 self.revs.discard(value)
2260 self.set.discard(value)
2260 self.set.discard(value)
2261
2261
2262 wanted = lazywantedset()
2262 wanted = lazywantedset()
2263
2263
2264 # it might be worthwhile to do this in the iterator if the rev range
2264 # it might be worthwhile to do this in the iterator if the rev range
2265 # is descending and the prune args are all within that range
2265 # is descending and the prune args are all within that range
2266 for rev in opts.get('prune', ()):
2266 for rev in opts.get('prune', ()):
2267 rev = repo[rev].rev()
2267 rev = repo[rev].rev()
2268 ff = _followfilter(repo)
2268 ff = _followfilter(repo)
2269 stop = min(revs[0], revs[-1])
2269 stop = min(revs[0], revs[-1])
2270 for x in xrange(rev, stop - 1, -1):
2270 for x in xrange(rev, stop - 1, -1):
2271 if ff.match(x):
2271 if ff.match(x):
2272 wanted = wanted - [x]
2272 wanted = wanted - [x]
2273
2273
2274 # Now that wanted is correctly initialized, we can iterate over the
2274 # Now that wanted is correctly initialized, we can iterate over the
2275 # revision range, yielding only revisions in wanted.
2275 # revision range, yielding only revisions in wanted.
2276 def iterate():
2276 def iterate():
2277 if follow and match.always():
2277 if follow and match.always():
2278 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
2278 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
2279 def want(rev):
2279 def want(rev):
2280 return ff.match(rev) and rev in wanted
2280 return ff.match(rev) and rev in wanted
2281 else:
2281 else:
2282 def want(rev):
2282 def want(rev):
2283 return rev in wanted
2283 return rev in wanted
2284
2284
2285 it = iter(revs)
2285 it = iter(revs)
2286 stopiteration = False
2286 stopiteration = False
2287 for windowsize in increasingwindows():
2287 for windowsize in increasingwindows():
2288 nrevs = []
2288 nrevs = []
2289 for i in xrange(windowsize):
2289 for i in xrange(windowsize):
2290 rev = next(it, None)
2290 rev = next(it, None)
2291 if rev is None:
2291 if rev is None:
2292 stopiteration = True
2292 stopiteration = True
2293 break
2293 break
2294 elif want(rev):
2294 elif want(rev):
2295 nrevs.append(rev)
2295 nrevs.append(rev)
2296 for rev in sorted(nrevs):
2296 for rev in sorted(nrevs):
2297 fns = fncache.get(rev)
2297 fns = fncache.get(rev)
2298 ctx = change(rev)
2298 ctx = change(rev)
2299 if not fns:
2299 if not fns:
2300 def fns_generator():
2300 def fns_generator():
2301 for f in ctx.files():
2301 for f in ctx.files():
2302 if match(f):
2302 if match(f):
2303 yield f
2303 yield f
2304 fns = fns_generator()
2304 fns = fns_generator()
2305 prepare(ctx, fns)
2305 prepare(ctx, fns)
2306 for rev in nrevs:
2306 for rev in nrevs:
2307 yield change(rev)
2307 yield change(rev)
2308
2308
2309 if stopiteration:
2309 if stopiteration:
2310 break
2310 break
2311
2311
2312 return iterate()
2312 return iterate()
2313
2313
2314 def _makefollowlogfilematcher(repo, files, followfirst):
2314 def _makefollowlogfilematcher(repo, files, followfirst):
2315 # When displaying a revision with --patch --follow FILE, we have
2315 # When displaying a revision with --patch --follow FILE, we have
2316 # to know which file of the revision must be diffed. With
2316 # to know which file of the revision must be diffed. With
2317 # --follow, we want the names of the ancestors of FILE in the
2317 # --follow, we want the names of the ancestors of FILE in the
2318 # revision, stored in "fcache". "fcache" is populated by
2318 # revision, stored in "fcache". "fcache" is populated by
2319 # reproducing the graph traversal already done by --follow revset
2319 # reproducing the graph traversal already done by --follow revset
2320 # and relating revs to file names (which is not "correct" but
2320 # and relating revs to file names (which is not "correct" but
2321 # good enough).
2321 # good enough).
2322 fcache = {}
2322 fcache = {}
2323 fcacheready = [False]
2323 fcacheready = [False]
2324 pctx = repo['.']
2324 pctx = repo['.']
2325
2325
2326 def populate():
2326 def populate():
2327 for fn in files:
2327 for fn in files:
2328 fctx = pctx[fn]
2328 fctx = pctx[fn]
2329 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2329 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2330 for c in fctx.ancestors(followfirst=followfirst):
2330 for c in fctx.ancestors(followfirst=followfirst):
2331 fcache.setdefault(c.rev(), set()).add(c.path())
2331 fcache.setdefault(c.rev(), set()).add(c.path())
2332
2332
2333 def filematcher(rev):
2333 def filematcher(rev):
2334 if not fcacheready[0]:
2334 if not fcacheready[0]:
2335 # Lazy initialization
2335 # Lazy initialization
2336 fcacheready[0] = True
2336 fcacheready[0] = True
2337 populate()
2337 populate()
2338 return scmutil.matchfiles(repo, fcache.get(rev, []))
2338 return scmutil.matchfiles(repo, fcache.get(rev, []))
2339
2339
2340 return filematcher
2340 return filematcher
2341
2341
2342 def _makenofollowlogfilematcher(repo, pats, opts):
2342 def _makenofollowlogfilematcher(repo, pats, opts):
2343 '''hook for extensions to override the filematcher for non-follow cases'''
2343 '''hook for extensions to override the filematcher for non-follow cases'''
2344 return None
2344 return None
2345
2345
2346 def _makelogrevset(repo, pats, opts, revs):
2346 def _makelogrevset(repo, pats, opts, revs):
2347 """Return (expr, filematcher) where expr is a revset string built
2347 """Return (expr, filematcher) where expr is a revset string built
2348 from log options and file patterns or None. If --stat or --patch
2348 from log options and file patterns or None. If --stat or --patch
2349 are not passed filematcher is None. Otherwise it is a callable
2349 are not passed filematcher is None. Otherwise it is a callable
2350 taking a revision number and returning a match objects filtering
2350 taking a revision number and returning a match objects filtering
2351 the files to be detailed when displaying the revision.
2351 the files to be detailed when displaying the revision.
2352 """
2352 """
2353 opt2revset = {
2353 opt2revset = {
2354 'no_merges': ('not merge()', None),
2354 'no_merges': ('not merge()', None),
2355 'only_merges': ('merge()', None),
2355 'only_merges': ('merge()', None),
2356 '_ancestors': ('ancestors(%(val)s)', None),
2356 '_ancestors': ('ancestors(%(val)s)', None),
2357 '_fancestors': ('_firstancestors(%(val)s)', None),
2357 '_fancestors': ('_firstancestors(%(val)s)', None),
2358 '_descendants': ('descendants(%(val)s)', None),
2358 '_descendants': ('descendants(%(val)s)', None),
2359 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2359 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2360 '_matchfiles': ('_matchfiles(%(val)s)', None),
2360 '_matchfiles': ('_matchfiles(%(val)s)', None),
2361 'date': ('date(%(val)r)', None),
2361 'date': ('date(%(val)r)', None),
2362 'branch': ('branch(%(val)r)', ' or '),
2362 'branch': ('branch(%(val)r)', ' or '),
2363 '_patslog': ('filelog(%(val)r)', ' or '),
2363 '_patslog': ('filelog(%(val)r)', ' or '),
2364 '_patsfollow': ('follow(%(val)r)', ' or '),
2364 '_patsfollow': ('follow(%(val)r)', ' or '),
2365 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2365 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2366 'keyword': ('keyword(%(val)r)', ' or '),
2366 'keyword': ('keyword(%(val)r)', ' or '),
2367 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2367 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2368 'user': ('user(%(val)r)', ' or '),
2368 'user': ('user(%(val)r)', ' or '),
2369 }
2369 }
2370
2370
2371 opts = dict(opts)
2371 opts = dict(opts)
2372 # follow or not follow?
2372 # follow or not follow?
2373 follow = opts.get('follow') or opts.get('follow_first')
2373 follow = opts.get('follow') or opts.get('follow_first')
2374 if opts.get('follow_first'):
2374 if opts.get('follow_first'):
2375 followfirst = 1
2375 followfirst = 1
2376 else:
2376 else:
2377 followfirst = 0
2377 followfirst = 0
2378 # --follow with FILE behavior depends on revs...
2378 # --follow with FILE behavior depends on revs...
2379 it = iter(revs)
2379 it = iter(revs)
2380 startrev = next(it)
2380 startrev = next(it)
2381 followdescendants = startrev < next(it, startrev)
2381 followdescendants = startrev < next(it, startrev)
2382
2382
2383 # branch and only_branch are really aliases and must be handled at
2383 # branch and only_branch are really aliases and must be handled at
2384 # the same time
2384 # the same time
2385 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2385 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2386 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2386 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2387 # pats/include/exclude are passed to match.match() directly in
2387 # pats/include/exclude are passed to match.match() directly in
2388 # _matchfiles() revset but walkchangerevs() builds its matcher with
2388 # _matchfiles() revset but walkchangerevs() builds its matcher with
2389 # scmutil.match(). The difference is input pats are globbed on
2389 # scmutil.match(). The difference is input pats are globbed on
2390 # platforms without shell expansion (windows).
2390 # platforms without shell expansion (windows).
2391 wctx = repo[None]
2391 wctx = repo[None]
2392 match, pats = scmutil.matchandpats(wctx, pats, opts)
2392 match, pats = scmutil.matchandpats(wctx, pats, opts)
2393 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2393 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2394 opts.get('removed'))
2394 opts.get('removed'))
2395 if not slowpath:
2395 if not slowpath:
2396 for f in match.files():
2396 for f in match.files():
2397 if follow and f not in wctx:
2397 if follow and f not in wctx:
2398 # If the file exists, it may be a directory, so let it
2398 # If the file exists, it may be a directory, so let it
2399 # take the slow path.
2399 # take the slow path.
2400 if os.path.exists(repo.wjoin(f)):
2400 if os.path.exists(repo.wjoin(f)):
2401 slowpath = True
2401 slowpath = True
2402 continue
2402 continue
2403 else:
2403 else:
2404 raise error.Abort(_('cannot follow file not in parent '
2404 raise error.Abort(_('cannot follow file not in parent '
2405 'revision: "%s"') % f)
2405 'revision: "%s"') % f)
2406 filelog = repo.file(f)
2406 filelog = repo.file(f)
2407 if not filelog:
2407 if not filelog:
2408 # A zero count may be a directory or deleted file, so
2408 # A zero count may be a directory or deleted file, so
2409 # try to find matching entries on the slow path.
2409 # try to find matching entries on the slow path.
2410 if follow:
2410 if follow:
2411 raise error.Abort(
2411 raise error.Abort(
2412 _('cannot follow nonexistent file: "%s"') % f)
2412 _('cannot follow nonexistent file: "%s"') % f)
2413 slowpath = True
2413 slowpath = True
2414
2414
2415 # We decided to fall back to the slowpath because at least one
2415 # We decided to fall back to the slowpath because at least one
2416 # of the paths was not a file. Check to see if at least one of them
2416 # of the paths was not a file. Check to see if at least one of them
2417 # existed in history - in that case, we'll continue down the
2417 # existed in history - in that case, we'll continue down the
2418 # slowpath; otherwise, we can turn off the slowpath
2418 # slowpath; otherwise, we can turn off the slowpath
2419 if slowpath:
2419 if slowpath:
2420 for path in match.files():
2420 for path in match.files():
2421 if path == '.' or path in repo.store:
2421 if path == '.' or path in repo.store:
2422 break
2422 break
2423 else:
2423 else:
2424 slowpath = False
2424 slowpath = False
2425
2425
2426 fpats = ('_patsfollow', '_patsfollowfirst')
2426 fpats = ('_patsfollow', '_patsfollowfirst')
2427 fnopats = (('_ancestors', '_fancestors'),
2427 fnopats = (('_ancestors', '_fancestors'),
2428 ('_descendants', '_fdescendants'))
2428 ('_descendants', '_fdescendants'))
2429 if slowpath:
2429 if slowpath:
2430 # See walkchangerevs() slow path.
2430 # See walkchangerevs() slow path.
2431 #
2431 #
2432 # pats/include/exclude cannot be represented as separate
2432 # pats/include/exclude cannot be represented as separate
2433 # revset expressions as their filtering logic applies at file
2433 # revset expressions as their filtering logic applies at file
2434 # level. For instance "-I a -X a" matches a revision touching
2434 # level. For instance "-I a -X a" matches a revision touching
2435 # "a" and "b" while "file(a) and not file(b)" does
2435 # "a" and "b" while "file(a) and not file(b)" does
2436 # not. Besides, filesets are evaluated against the working
2436 # not. Besides, filesets are evaluated against the working
2437 # directory.
2437 # directory.
2438 matchargs = ['r:', 'd:relpath']
2438 matchargs = ['r:', 'd:relpath']
2439 for p in pats:
2439 for p in pats:
2440 matchargs.append('p:' + p)
2440 matchargs.append('p:' + p)
2441 for p in opts.get('include', []):
2441 for p in opts.get('include', []):
2442 matchargs.append('i:' + p)
2442 matchargs.append('i:' + p)
2443 for p in opts.get('exclude', []):
2443 for p in opts.get('exclude', []):
2444 matchargs.append('x:' + p)
2444 matchargs.append('x:' + p)
2445 matchargs = ','.join(('%r' % p) for p in matchargs)
2445 matchargs = ','.join(('%r' % p) for p in matchargs)
2446 opts['_matchfiles'] = matchargs
2446 opts['_matchfiles'] = matchargs
2447 if follow:
2447 if follow:
2448 opts[fnopats[0][followfirst]] = '.'
2448 opts[fnopats[0][followfirst]] = '.'
2449 else:
2449 else:
2450 if follow:
2450 if follow:
2451 if pats:
2451 if pats:
2452 # follow() revset interprets its file argument as a
2452 # follow() revset interprets its file argument as a
2453 # manifest entry, so use match.files(), not pats.
2453 # manifest entry, so use match.files(), not pats.
2454 opts[fpats[followfirst]] = list(match.files())
2454 opts[fpats[followfirst]] = list(match.files())
2455 else:
2455 else:
2456 op = fnopats[followdescendants][followfirst]
2456 op = fnopats[followdescendants][followfirst]
2457 opts[op] = 'rev(%d)' % startrev
2457 opts[op] = 'rev(%d)' % startrev
2458 else:
2458 else:
2459 opts['_patslog'] = list(pats)
2459 opts['_patslog'] = list(pats)
2460
2460
2461 filematcher = None
2461 filematcher = None
2462 if opts.get('patch') or opts.get('stat'):
2462 if opts.get('patch') or opts.get('stat'):
2463 # When following files, track renames via a special matcher.
2463 # When following files, track renames via a special matcher.
2464 # If we're forced to take the slowpath it means we're following
2464 # If we're forced to take the slowpath it means we're following
2465 # at least one pattern/directory, so don't bother with rename tracking.
2465 # at least one pattern/directory, so don't bother with rename tracking.
2466 if follow and not match.always() and not slowpath:
2466 if follow and not match.always() and not slowpath:
2467 # _makefollowlogfilematcher expects its files argument to be
2467 # _makefollowlogfilematcher expects its files argument to be
2468 # relative to the repo root, so use match.files(), not pats.
2468 # relative to the repo root, so use match.files(), not pats.
2469 filematcher = _makefollowlogfilematcher(repo, match.files(),
2469 filematcher = _makefollowlogfilematcher(repo, match.files(),
2470 followfirst)
2470 followfirst)
2471 else:
2471 else:
2472 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2472 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2473 if filematcher is None:
2473 if filematcher is None:
2474 filematcher = lambda rev: match
2474 filematcher = lambda rev: match
2475
2475
2476 expr = []
2476 expr = []
2477 for op, val in sorted(opts.iteritems()):
2477 for op, val in sorted(opts.iteritems()):
2478 if not val:
2478 if not val:
2479 continue
2479 continue
2480 if op not in opt2revset:
2480 if op not in opt2revset:
2481 continue
2481 continue
2482 revop, andor = opt2revset[op]
2482 revop, andor = opt2revset[op]
2483 if '%(val)' not in revop:
2483 if '%(val)' not in revop:
2484 expr.append(revop)
2484 expr.append(revop)
2485 else:
2485 else:
2486 if not isinstance(val, list):
2486 if not isinstance(val, list):
2487 e = revop % {'val': val}
2487 e = revop % {'val': val}
2488 else:
2488 else:
2489 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2489 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2490 expr.append(e)
2490 expr.append(e)
2491
2491
2492 if expr:
2492 if expr:
2493 expr = '(' + ' and '.join(expr) + ')'
2493 expr = '(' + ' and '.join(expr) + ')'
2494 else:
2494 else:
2495 expr = None
2495 expr = None
2496 return expr, filematcher
2496 return expr, filematcher
2497
2497
2498 def _logrevs(repo, opts):
2498 def _logrevs(repo, opts):
2499 # Default --rev value depends on --follow but --follow behavior
2499 # Default --rev value depends on --follow but --follow behavior
2500 # depends on revisions resolved from --rev...
2500 # depends on revisions resolved from --rev...
2501 follow = opts.get('follow') or opts.get('follow_first')
2501 follow = opts.get('follow') or opts.get('follow_first')
2502 if opts.get('rev'):
2502 if opts.get('rev'):
2503 revs = scmutil.revrange(repo, opts['rev'])
2503 revs = scmutil.revrange(repo, opts['rev'])
2504 elif follow and repo.dirstate.p1() == nullid:
2504 elif follow and repo.dirstate.p1() == nullid:
2505 revs = smartset.baseset()
2505 revs = smartset.baseset()
2506 elif follow:
2506 elif follow:
2507 revs = repo.revs('reverse(:.)')
2507 revs = repo.revs('reverse(:.)')
2508 else:
2508 else:
2509 revs = smartset.spanset(repo)
2509 revs = smartset.spanset(repo)
2510 revs.reverse()
2510 revs.reverse()
2511 return revs
2511 return revs
2512
2512
2513 def getgraphlogrevs(repo, pats, opts):
2513 def getgraphlogrevs(repo, pats, opts):
2514 """Return (revs, expr, filematcher) where revs is an iterable of
2514 """Return (revs, expr, filematcher) where revs is an iterable of
2515 revision numbers, expr is a revset string built from log options
2515 revision numbers, expr is a revset string built from log options
2516 and file patterns or None, and used to filter 'revs'. If --stat or
2516 and file patterns or None, and used to filter 'revs'. If --stat or
2517 --patch are not passed filematcher is None. Otherwise it is a
2517 --patch are not passed filematcher is None. Otherwise it is a
2518 callable taking a revision number and returning a match objects
2518 callable taking a revision number and returning a match objects
2519 filtering the files to be detailed when displaying the revision.
2519 filtering the files to be detailed when displaying the revision.
2520 """
2520 """
2521 limit = loglimit(opts)
2521 limit = loglimit(opts)
2522 revs = _logrevs(repo, opts)
2522 revs = _logrevs(repo, opts)
2523 if not revs:
2523 if not revs:
2524 return smartset.baseset(), None, None
2524 return smartset.baseset(), None, None
2525 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2525 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2526 if opts.get('rev'):
2526 if opts.get('rev'):
2527 # User-specified revs might be unsorted, but don't sort before
2527 # User-specified revs might be unsorted, but don't sort before
2528 # _makelogrevset because it might depend on the order of revs
2528 # _makelogrevset because it might depend on the order of revs
2529 if not (revs.isdescending() or revs.istopo()):
2529 if not (revs.isdescending() or revs.istopo()):
2530 revs.sort(reverse=True)
2530 revs.sort(reverse=True)
2531 if expr:
2531 if expr:
2532 matcher = revset.match(repo.ui, expr)
2532 matcher = revset.match(repo.ui, expr)
2533 revs = matcher(repo, revs)
2533 revs = matcher(repo, revs)
2534 if limit is not None:
2534 if limit is not None:
2535 limitedrevs = []
2535 limitedrevs = []
2536 for idx, rev in enumerate(revs):
2536 for idx, rev in enumerate(revs):
2537 if idx >= limit:
2537 if idx >= limit:
2538 break
2538 break
2539 limitedrevs.append(rev)
2539 limitedrevs.append(rev)
2540 revs = smartset.baseset(limitedrevs)
2540 revs = smartset.baseset(limitedrevs)
2541
2541
2542 return revs, expr, filematcher
2542 return revs, expr, filematcher
2543
2543
2544 def getlogrevs(repo, pats, opts):
2544 def getlogrevs(repo, pats, opts):
2545 """Return (revs, expr, filematcher) where revs is an iterable of
2545 """Return (revs, expr, filematcher) where revs is an iterable of
2546 revision numbers, expr is a revset string built from log options
2546 revision numbers, expr is a revset string built from log options
2547 and file patterns or None, and used to filter 'revs'. If --stat or
2547 and file patterns or None, and used to filter 'revs'. If --stat or
2548 --patch are not passed filematcher is None. Otherwise it is a
2548 --patch are not passed filematcher is None. Otherwise it is a
2549 callable taking a revision number and returning a match objects
2549 callable taking a revision number and returning a match objects
2550 filtering the files to be detailed when displaying the revision.
2550 filtering the files to be detailed when displaying the revision.
2551 """
2551 """
2552 limit = loglimit(opts)
2552 limit = loglimit(opts)
2553 revs = _logrevs(repo, opts)
2553 revs = _logrevs(repo, opts)
2554 if not revs:
2554 if not revs:
2555 return smartset.baseset([]), None, None
2555 return smartset.baseset([]), None, None
2556 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2556 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2557 if expr:
2557 if expr:
2558 matcher = revset.match(repo.ui, expr)
2558 matcher = revset.match(repo.ui, expr)
2559 revs = matcher(repo, revs)
2559 revs = matcher(repo, revs)
2560 if limit is not None:
2560 if limit is not None:
2561 limitedrevs = []
2561 limitedrevs = []
2562 for idx, r in enumerate(revs):
2562 for idx, r in enumerate(revs):
2563 if limit <= idx:
2563 if limit <= idx:
2564 break
2564 break
2565 limitedrevs.append(r)
2565 limitedrevs.append(r)
2566 revs = smartset.baseset(limitedrevs)
2566 revs = smartset.baseset(limitedrevs)
2567
2567
2568 return revs, expr, filematcher
2568 return revs, expr, filematcher
2569
2569
2570 def _parselinerangelogopt(repo, opts):
2570 def _parselinerangelogopt(repo, opts):
2571 """Parse --line-range log option and return a list of tuples (filename,
2571 """Parse --line-range log option and return a list of tuples (filename,
2572 (fromline, toline)).
2572 (fromline, toline)).
2573 """
2573 """
2574 linerangebyfname = []
2574 linerangebyfname = []
2575 for pat in opts.get('line_range', []):
2575 for pat in opts.get('line_range', []):
2576 try:
2576 try:
2577 pat, linerange = pat.rsplit(',', 1)
2577 pat, linerange = pat.rsplit(',', 1)
2578 except ValueError:
2578 except ValueError:
2579 raise error.Abort(_('malformatted line-range pattern %s') % pat)
2579 raise error.Abort(_('malformatted line-range pattern %s') % pat)
2580 try:
2580 try:
2581 fromline, toline = map(int, linerange.split(':'))
2581 fromline, toline = map(int, linerange.split(':'))
2582 except ValueError:
2582 except ValueError:
2583 raise error.Abort(_("invalid line range for %s") % pat)
2583 raise error.Abort(_("invalid line range for %s") % pat)
2584 msg = _("line range pattern '%s' must match exactly one file") % pat
2584 msg = _("line range pattern '%s' must match exactly one file") % pat
2585 fname = scmutil.parsefollowlinespattern(repo, None, pat, msg)
2585 fname = scmutil.parsefollowlinespattern(repo, None, pat, msg)
2586 linerangebyfname.append(
2586 linerangebyfname.append(
2587 (fname, util.processlinerange(fromline, toline)))
2587 (fname, util.processlinerange(fromline, toline)))
2588 return linerangebyfname
2588 return linerangebyfname
2589
2589
2590 def getloglinerangerevs(repo, userrevs, opts):
2590 def getloglinerangerevs(repo, userrevs, opts):
2591 """Return (revs, filematcher, hunksfilter).
2591 """Return (revs, filematcher, hunksfilter).
2592
2592
2593 "revs" are revisions obtained by processing "line-range" log options and
2593 "revs" are revisions obtained by processing "line-range" log options and
2594 walking block ancestors of each specified file/line-range.
2594 walking block ancestors of each specified file/line-range.
2595
2595
2596 "filematcher(rev) -> match" is a factory function returning a match object
2596 "filematcher(rev) -> match" is a factory function returning a match object
2597 for a given revision for file patterns specified in --line-range option.
2597 for a given revision for file patterns specified in --line-range option.
2598 If neither --stat nor --patch options are passed, "filematcher" is None.
2598 If neither --stat nor --patch options are passed, "filematcher" is None.
2599
2599
2600 "hunksfilter(rev) -> filterfn(fctx, hunks)" is a factory function
2600 "hunksfilter(rev) -> filterfn(fctx, hunks)" is a factory function
2601 returning a hunks filtering function.
2601 returning a hunks filtering function.
2602 If neither --stat nor --patch options are passed, "filterhunks" is None.
2602 If neither --stat nor --patch options are passed, "filterhunks" is None.
2603 """
2603 """
2604 wctx = repo[None]
2604 wctx = repo[None]
2605
2605
2606 # Two-levels map of "rev -> file ctx -> [line range]".
2606 # Two-levels map of "rev -> file ctx -> [line range]".
2607 linerangesbyrev = {}
2607 linerangesbyrev = {}
2608 for fname, (fromline, toline) in _parselinerangelogopt(repo, opts):
2608 for fname, (fromline, toline) in _parselinerangelogopt(repo, opts):
2609 if fname not in wctx:
2609 if fname not in wctx:
2610 raise error.Abort(_('cannot follow file not in parent '
2610 raise error.Abort(_('cannot follow file not in parent '
2611 'revision: "%s"') % fname)
2611 'revision: "%s"') % fname)
2612 fctx = wctx.filectx(fname)
2612 fctx = wctx.filectx(fname)
2613 for fctx, linerange in dagop.blockancestors(fctx, fromline, toline):
2613 for fctx, linerange in dagop.blockancestors(fctx, fromline, toline):
2614 rev = fctx.introrev()
2614 rev = fctx.introrev()
2615 if rev not in userrevs:
2615 if rev not in userrevs:
2616 continue
2616 continue
2617 linerangesbyrev.setdefault(
2617 linerangesbyrev.setdefault(
2618 rev, {}).setdefault(
2618 rev, {}).setdefault(
2619 fctx.path(), []).append(linerange)
2619 fctx.path(), []).append(linerange)
2620
2620
2621 filematcher = None
2621 filematcher = None
2622 hunksfilter = None
2622 hunksfilter = None
2623 if opts.get('patch') or opts.get('stat'):
2623 if opts.get('patch') or opts.get('stat'):
2624
2624
2625 def nofilterhunksfn(fctx, hunks):
2625 def nofilterhunksfn(fctx, hunks):
2626 return hunks
2626 return hunks
2627
2627
2628 def hunksfilter(rev):
2628 def hunksfilter(rev):
2629 fctxlineranges = linerangesbyrev.get(rev)
2629 fctxlineranges = linerangesbyrev.get(rev)
2630 if fctxlineranges is None:
2630 if fctxlineranges is None:
2631 return nofilterhunksfn
2631 return nofilterhunksfn
2632
2632
2633 def filterfn(fctx, hunks):
2633 def filterfn(fctx, hunks):
2634 lineranges = fctxlineranges.get(fctx.path())
2634 lineranges = fctxlineranges.get(fctx.path())
2635 if lineranges is not None:
2635 if lineranges is not None:
2636 for hr, lines in hunks:
2636 for hr, lines in hunks:
2637 if hr is None: # binary
2637 if hr is None: # binary
2638 yield hr, lines
2638 yield hr, lines
2639 continue
2639 continue
2640 if any(mdiff.hunkinrange(hr[2:], lr)
2640 if any(mdiff.hunkinrange(hr[2:], lr)
2641 for lr in lineranges):
2641 for lr in lineranges):
2642 yield hr, lines
2642 yield hr, lines
2643 else:
2643 else:
2644 for hunk in hunks:
2644 for hunk in hunks:
2645 yield hunk
2645 yield hunk
2646
2646
2647 return filterfn
2647 return filterfn
2648
2648
2649 def filematcher(rev):
2649 def filematcher(rev):
2650 files = list(linerangesbyrev.get(rev, []))
2650 files = list(linerangesbyrev.get(rev, []))
2651 return scmutil.matchfiles(repo, files)
2651 return scmutil.matchfiles(repo, files)
2652
2652
2653 revs = sorted(linerangesbyrev, reverse=True)
2653 revs = sorted(linerangesbyrev, reverse=True)
2654
2654
2655 return revs, filematcher, hunksfilter
2655 return revs, filematcher, hunksfilter
2656
2656
2657 def _graphnodeformatter(ui, displayer):
2657 def _graphnodeformatter(ui, displayer):
2658 spec = ui.config('ui', 'graphnodetemplate')
2658 spec = ui.config('ui', 'graphnodetemplate')
2659 if not spec:
2659 if not spec:
2660 return templatekw.showgraphnode # fast path for "{graphnode}"
2660 return templatekw.showgraphnode # fast path for "{graphnode}"
2661
2661
2662 spec = templater.unquotestring(spec)
2662 spec = templater.unquotestring(spec)
2663 templ = formatter.maketemplater(ui, spec)
2663 templ = formatter.maketemplater(ui, spec)
2664 cache = {}
2664 cache = {}
2665 if isinstance(displayer, changeset_templater):
2665 if isinstance(displayer, changeset_templater):
2666 cache = displayer.cache # reuse cache of slow templates
2666 cache = displayer.cache # reuse cache of slow templates
2667 props = templatekw.keywords.copy()
2667 props = templatekw.keywords.copy()
2668 props['templ'] = templ
2668 props['templ'] = templ
2669 props['cache'] = cache
2669 props['cache'] = cache
2670 def formatnode(repo, ctx):
2670 def formatnode(repo, ctx):
2671 props['ctx'] = ctx
2671 props['ctx'] = ctx
2672 props['repo'] = repo
2672 props['repo'] = repo
2673 props['ui'] = repo.ui
2673 props['ui'] = repo.ui
2674 props['revcache'] = {}
2674 props['revcache'] = {}
2675 return templ.render(props)
2675 return templ.render(props)
2676 return formatnode
2676 return formatnode
2677
2677
2678 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2678 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2679 filematcher=None, props=None):
2679 filematcher=None, props=None):
2680 props = props or {}
2680 props = props or {}
2681 formatnode = _graphnodeformatter(ui, displayer)
2681 formatnode = _graphnodeformatter(ui, displayer)
2682 state = graphmod.asciistate()
2682 state = graphmod.asciistate()
2683 styles = state['styles']
2683 styles = state['styles']
2684
2684
2685 # only set graph styling if HGPLAIN is not set.
2685 # only set graph styling if HGPLAIN is not set.
2686 if ui.plain('graph'):
2686 if ui.plain('graph'):
2687 # set all edge styles to |, the default pre-3.8 behaviour
2687 # set all edge styles to |, the default pre-3.8 behaviour
2688 styles.update(dict.fromkeys(styles, '|'))
2688 styles.update(dict.fromkeys(styles, '|'))
2689 else:
2689 else:
2690 edgetypes = {
2690 edgetypes = {
2691 'parent': graphmod.PARENT,
2691 'parent': graphmod.PARENT,
2692 'grandparent': graphmod.GRANDPARENT,
2692 'grandparent': graphmod.GRANDPARENT,
2693 'missing': graphmod.MISSINGPARENT
2693 'missing': graphmod.MISSINGPARENT
2694 }
2694 }
2695 for name, key in edgetypes.items():
2695 for name, key in edgetypes.items():
2696 # experimental config: experimental.graphstyle.*
2696 # experimental config: experimental.graphstyle.*
2697 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2697 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2698 styles[key])
2698 styles[key])
2699 if not styles[key]:
2699 if not styles[key]:
2700 styles[key] = None
2700 styles[key] = None
2701
2701
2702 # experimental config: experimental.graphshorten
2702 # experimental config: experimental.graphshorten
2703 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2703 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2704
2704
2705 for rev, type, ctx, parents in dag:
2705 for rev, type, ctx, parents in dag:
2706 char = formatnode(repo, ctx)
2706 char = formatnode(repo, ctx)
2707 copies = None
2707 copies = None
2708 if getrenamed and ctx.rev():
2708 if getrenamed and ctx.rev():
2709 copies = []
2709 copies = []
2710 for fn in ctx.files():
2710 for fn in ctx.files():
2711 rename = getrenamed(fn, ctx.rev())
2711 rename = getrenamed(fn, ctx.rev())
2712 if rename:
2712 if rename:
2713 copies.append((fn, rename[0]))
2713 copies.append((fn, rename[0]))
2714 revmatchfn = None
2714 revmatchfn = None
2715 if filematcher is not None:
2715 if filematcher is not None:
2716 revmatchfn = filematcher(ctx.rev())
2716 revmatchfn = filematcher(ctx.rev())
2717 edges = edgefn(type, char, state, rev, parents)
2717 edges = edgefn(type, char, state, rev, parents)
2718 firstedge = next(edges)
2718 firstedge = next(edges)
2719 width = firstedge[2]
2719 width = firstedge[2]
2720 displayer.show(ctx, copies=copies, matchfn=revmatchfn,
2720 displayer.show(ctx, copies=copies, matchfn=revmatchfn,
2721 _graphwidth=width, **pycompat.strkwargs(props))
2721 _graphwidth=width, **pycompat.strkwargs(props))
2722 lines = displayer.hunk.pop(rev).split('\n')
2722 lines = displayer.hunk.pop(rev).split('\n')
2723 if not lines[-1]:
2723 if not lines[-1]:
2724 del lines[-1]
2724 del lines[-1]
2725 displayer.flush(ctx)
2725 displayer.flush(ctx)
2726 for type, char, width, coldata in itertools.chain([firstedge], edges):
2726 for type, char, width, coldata in itertools.chain([firstedge], edges):
2727 graphmod.ascii(ui, state, type, char, lines, coldata)
2727 graphmod.ascii(ui, state, type, char, lines, coldata)
2728 lines = []
2728 lines = []
2729 displayer.close()
2729 displayer.close()
2730
2730
2731 def graphlog(ui, repo, pats, opts):
2731 def graphlog(ui, repo, pats, opts):
2732 # Parameters are identical to log command ones
2732 # Parameters are identical to log command ones
2733 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2733 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2734 revdag = graphmod.dagwalker(repo, revs)
2734 revdag = graphmod.dagwalker(repo, revs)
2735
2735
2736 getrenamed = None
2736 getrenamed = None
2737 if opts.get('copies'):
2737 if opts.get('copies'):
2738 endrev = None
2738 endrev = None
2739 if opts.get('rev'):
2739 if opts.get('rev'):
2740 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2740 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2741 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2741 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2742
2742
2743 ui.pager('log')
2743 ui.pager('log')
2744 displayer = show_changeset(ui, repo, opts, buffered=True)
2744 displayer = show_changeset(ui, repo, opts, buffered=True)
2745 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2745 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2746 filematcher)
2746 filematcher)
2747
2747
2748 def checkunsupportedgraphflags(pats, opts):
2748 def checkunsupportedgraphflags(pats, opts):
2749 for op in ["newest_first"]:
2749 for op in ["newest_first"]:
2750 if op in opts and opts[op]:
2750 if op in opts and opts[op]:
2751 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2751 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2752 % op.replace("_", "-"))
2752 % op.replace("_", "-"))
2753
2753
2754 def graphrevs(repo, nodes, opts):
2754 def graphrevs(repo, nodes, opts):
2755 limit = loglimit(opts)
2755 limit = loglimit(opts)
2756 nodes.reverse()
2756 nodes.reverse()
2757 if limit is not None:
2757 if limit is not None:
2758 nodes = nodes[:limit]
2758 nodes = nodes[:limit]
2759 return graphmod.nodes(repo, nodes)
2759 return graphmod.nodes(repo, nodes)
2760
2760
2761 def add(ui, repo, match, prefix, explicitonly, **opts):
2761 def add(ui, repo, match, prefix, explicitonly, **opts):
2762 join = lambda f: os.path.join(prefix, f)
2762 join = lambda f: os.path.join(prefix, f)
2763 bad = []
2763 bad = []
2764
2764
2765 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2765 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2766 names = []
2766 names = []
2767 wctx = repo[None]
2767 wctx = repo[None]
2768 cca = None
2768 cca = None
2769 abort, warn = scmutil.checkportabilityalert(ui)
2769 abort, warn = scmutil.checkportabilityalert(ui)
2770 if abort or warn:
2770 if abort or warn:
2771 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2771 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2772
2772
2773 badmatch = matchmod.badmatch(match, badfn)
2773 badmatch = matchmod.badmatch(match, badfn)
2774 dirstate = repo.dirstate
2774 dirstate = repo.dirstate
2775 # We don't want to just call wctx.walk here, since it would return a lot of
2775 # We don't want to just call wctx.walk here, since it would return a lot of
2776 # clean files, which we aren't interested in and takes time.
2776 # clean files, which we aren't interested in and takes time.
2777 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
2777 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
2778 unknown=True, ignored=False, full=False)):
2778 unknown=True, ignored=False, full=False)):
2779 exact = match.exact(f)
2779 exact = match.exact(f)
2780 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2780 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2781 if cca:
2781 if cca:
2782 cca(f)
2782 cca(f)
2783 names.append(f)
2783 names.append(f)
2784 if ui.verbose or not exact:
2784 if ui.verbose or not exact:
2785 ui.status(_('adding %s\n') % match.rel(f))
2785 ui.status(_('adding %s\n') % match.rel(f))
2786
2786
2787 for subpath in sorted(wctx.substate):
2787 for subpath in sorted(wctx.substate):
2788 sub = wctx.sub(subpath)
2788 sub = wctx.sub(subpath)
2789 try:
2789 try:
2790 submatch = matchmod.subdirmatcher(subpath, match)
2790 submatch = matchmod.subdirmatcher(subpath, match)
2791 if opts.get(r'subrepos'):
2791 if opts.get(r'subrepos'):
2792 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2792 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2793 else:
2793 else:
2794 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2794 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2795 except error.LookupError:
2795 except error.LookupError:
2796 ui.status(_("skipping missing subrepository: %s\n")
2796 ui.status(_("skipping missing subrepository: %s\n")
2797 % join(subpath))
2797 % join(subpath))
2798
2798
2799 if not opts.get(r'dry_run'):
2799 if not opts.get(r'dry_run'):
2800 rejected = wctx.add(names, prefix)
2800 rejected = wctx.add(names, prefix)
2801 bad.extend(f for f in rejected if f in match.files())
2801 bad.extend(f for f in rejected if f in match.files())
2802 return bad
2802 return bad
2803
2803
2804 def addwebdirpath(repo, serverpath, webconf):
2804 def addwebdirpath(repo, serverpath, webconf):
2805 webconf[serverpath] = repo.root
2805 webconf[serverpath] = repo.root
2806 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2806 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2807
2807
2808 for r in repo.revs('filelog("path:.hgsub")'):
2808 for r in repo.revs('filelog("path:.hgsub")'):
2809 ctx = repo[r]
2809 ctx = repo[r]
2810 for subpath in ctx.substate:
2810 for subpath in ctx.substate:
2811 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2811 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2812
2812
2813 def forget(ui, repo, match, prefix, explicitonly):
2813 def forget(ui, repo, match, prefix, explicitonly):
2814 join = lambda f: os.path.join(prefix, f)
2814 join = lambda f: os.path.join(prefix, f)
2815 bad = []
2815 bad = []
2816 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2816 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2817 wctx = repo[None]
2817 wctx = repo[None]
2818 forgot = []
2818 forgot = []
2819
2819
2820 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2820 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2821 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2821 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2822 if explicitonly:
2822 if explicitonly:
2823 forget = [f for f in forget if match.exact(f)]
2823 forget = [f for f in forget if match.exact(f)]
2824
2824
2825 for subpath in sorted(wctx.substate):
2825 for subpath in sorted(wctx.substate):
2826 sub = wctx.sub(subpath)
2826 sub = wctx.sub(subpath)
2827 try:
2827 try:
2828 submatch = matchmod.subdirmatcher(subpath, match)
2828 submatch = matchmod.subdirmatcher(subpath, match)
2829 subbad, subforgot = sub.forget(submatch, prefix)
2829 subbad, subforgot = sub.forget(submatch, prefix)
2830 bad.extend([subpath + '/' + f for f in subbad])
2830 bad.extend([subpath + '/' + f for f in subbad])
2831 forgot.extend([subpath + '/' + f for f in subforgot])
2831 forgot.extend([subpath + '/' + f for f in subforgot])
2832 except error.LookupError:
2832 except error.LookupError:
2833 ui.status(_("skipping missing subrepository: %s\n")
2833 ui.status(_("skipping missing subrepository: %s\n")
2834 % join(subpath))
2834 % join(subpath))
2835
2835
2836 if not explicitonly:
2836 if not explicitonly:
2837 for f in match.files():
2837 for f in match.files():
2838 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2838 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2839 if f not in forgot:
2839 if f not in forgot:
2840 if repo.wvfs.exists(f):
2840 if repo.wvfs.exists(f):
2841 # Don't complain if the exact case match wasn't given.
2841 # Don't complain if the exact case match wasn't given.
2842 # But don't do this until after checking 'forgot', so
2842 # But don't do this until after checking 'forgot', so
2843 # that subrepo files aren't normalized, and this op is
2843 # that subrepo files aren't normalized, and this op is
2844 # purely from data cached by the status walk above.
2844 # purely from data cached by the status walk above.
2845 if repo.dirstate.normalize(f) in repo.dirstate:
2845 if repo.dirstate.normalize(f) in repo.dirstate:
2846 continue
2846 continue
2847 ui.warn(_('not removing %s: '
2847 ui.warn(_('not removing %s: '
2848 'file is already untracked\n')
2848 'file is already untracked\n')
2849 % match.rel(f))
2849 % match.rel(f))
2850 bad.append(f)
2850 bad.append(f)
2851
2851
2852 for f in forget:
2852 for f in forget:
2853 if ui.verbose or not match.exact(f):
2853 if ui.verbose or not match.exact(f):
2854 ui.status(_('removing %s\n') % match.rel(f))
2854 ui.status(_('removing %s\n') % match.rel(f))
2855
2855
2856 rejected = wctx.forget(forget, prefix)
2856 rejected = wctx.forget(forget, prefix)
2857 bad.extend(f for f in rejected if f in match.files())
2857 bad.extend(f for f in rejected if f in match.files())
2858 forgot.extend(f for f in forget if f not in rejected)
2858 forgot.extend(f for f in forget if f not in rejected)
2859 return bad, forgot
2859 return bad, forgot
2860
2860
2861 def files(ui, ctx, m, fm, fmt, subrepos):
2861 def files(ui, ctx, m, fm, fmt, subrepos):
2862 rev = ctx.rev()
2862 rev = ctx.rev()
2863 ret = 1
2863 ret = 1
2864 ds = ctx.repo().dirstate
2864 ds = ctx.repo().dirstate
2865
2865
2866 for f in ctx.matches(m):
2866 for f in ctx.matches(m):
2867 if rev is None and ds[f] == 'r':
2867 if rev is None and ds[f] == 'r':
2868 continue
2868 continue
2869 fm.startitem()
2869 fm.startitem()
2870 if ui.verbose:
2870 if ui.verbose:
2871 fc = ctx[f]
2871 fc = ctx[f]
2872 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2872 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2873 fm.data(abspath=f)
2873 fm.data(abspath=f)
2874 fm.write('path', fmt, m.rel(f))
2874 fm.write('path', fmt, m.rel(f))
2875 ret = 0
2875 ret = 0
2876
2876
2877 for subpath in sorted(ctx.substate):
2877 for subpath in sorted(ctx.substate):
2878 submatch = matchmod.subdirmatcher(subpath, m)
2878 submatch = matchmod.subdirmatcher(subpath, m)
2879 if (subrepos or m.exact(subpath) or any(submatch.files())):
2879 if (subrepos or m.exact(subpath) or any(submatch.files())):
2880 sub = ctx.sub(subpath)
2880 sub = ctx.sub(subpath)
2881 try:
2881 try:
2882 recurse = m.exact(subpath) or subrepos
2882 recurse = m.exact(subpath) or subrepos
2883 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2883 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2884 ret = 0
2884 ret = 0
2885 except error.LookupError:
2885 except error.LookupError:
2886 ui.status(_("skipping missing subrepository: %s\n")
2886 ui.status(_("skipping missing subrepository: %s\n")
2887 % m.abs(subpath))
2887 % m.abs(subpath))
2888
2888
2889 return ret
2889 return ret
2890
2890
2891 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2891 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2892 join = lambda f: os.path.join(prefix, f)
2892 join = lambda f: os.path.join(prefix, f)
2893 ret = 0
2893 ret = 0
2894 s = repo.status(match=m, clean=True)
2894 s = repo.status(match=m, clean=True)
2895 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2895 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2896
2896
2897 wctx = repo[None]
2897 wctx = repo[None]
2898
2898
2899 if warnings is None:
2899 if warnings is None:
2900 warnings = []
2900 warnings = []
2901 warn = True
2901 warn = True
2902 else:
2902 else:
2903 warn = False
2903 warn = False
2904
2904
2905 subs = sorted(wctx.substate)
2905 subs = sorted(wctx.substate)
2906 total = len(subs)
2906 total = len(subs)
2907 count = 0
2907 count = 0
2908 for subpath in subs:
2908 for subpath in subs:
2909 count += 1
2909 count += 1
2910 submatch = matchmod.subdirmatcher(subpath, m)
2910 submatch = matchmod.subdirmatcher(subpath, m)
2911 if subrepos or m.exact(subpath) or any(submatch.files()):
2911 if subrepos or m.exact(subpath) or any(submatch.files()):
2912 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2912 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2913 sub = wctx.sub(subpath)
2913 sub = wctx.sub(subpath)
2914 try:
2914 try:
2915 if sub.removefiles(submatch, prefix, after, force, subrepos,
2915 if sub.removefiles(submatch, prefix, after, force, subrepos,
2916 warnings):
2916 warnings):
2917 ret = 1
2917 ret = 1
2918 except error.LookupError:
2918 except error.LookupError:
2919 warnings.append(_("skipping missing subrepository: %s\n")
2919 warnings.append(_("skipping missing subrepository: %s\n")
2920 % join(subpath))
2920 % join(subpath))
2921 ui.progress(_('searching'), None)
2921 ui.progress(_('searching'), None)
2922
2922
2923 # warn about failure to delete explicit files/dirs
2923 # warn about failure to delete explicit files/dirs
2924 deleteddirs = util.dirs(deleted)
2924 deleteddirs = util.dirs(deleted)
2925 files = m.files()
2925 files = m.files()
2926 total = len(files)
2926 total = len(files)
2927 count = 0
2927 count = 0
2928 for f in files:
2928 for f in files:
2929 def insubrepo():
2929 def insubrepo():
2930 for subpath in wctx.substate:
2930 for subpath in wctx.substate:
2931 if f.startswith(subpath + '/'):
2931 if f.startswith(subpath + '/'):
2932 return True
2932 return True
2933 return False
2933 return False
2934
2934
2935 count += 1
2935 count += 1
2936 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2936 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2937 isdir = f in deleteddirs or wctx.hasdir(f)
2937 isdir = f in deleteddirs or wctx.hasdir(f)
2938 if (f in repo.dirstate or isdir or f == '.'
2938 if (f in repo.dirstate or isdir or f == '.'
2939 or insubrepo() or f in subs):
2939 or insubrepo() or f in subs):
2940 continue
2940 continue
2941
2941
2942 if repo.wvfs.exists(f):
2942 if repo.wvfs.exists(f):
2943 if repo.wvfs.isdir(f):
2943 if repo.wvfs.isdir(f):
2944 warnings.append(_('not removing %s: no tracked files\n')
2944 warnings.append(_('not removing %s: no tracked files\n')
2945 % m.rel(f))
2945 % m.rel(f))
2946 else:
2946 else:
2947 warnings.append(_('not removing %s: file is untracked\n')
2947 warnings.append(_('not removing %s: file is untracked\n')
2948 % m.rel(f))
2948 % m.rel(f))
2949 # missing files will generate a warning elsewhere
2949 # missing files will generate a warning elsewhere
2950 ret = 1
2950 ret = 1
2951 ui.progress(_('deleting'), None)
2951 ui.progress(_('deleting'), None)
2952
2952
2953 if force:
2953 if force:
2954 list = modified + deleted + clean + added
2954 list = modified + deleted + clean + added
2955 elif after:
2955 elif after:
2956 list = deleted
2956 list = deleted
2957 remaining = modified + added + clean
2957 remaining = modified + added + clean
2958 total = len(remaining)
2958 total = len(remaining)
2959 count = 0
2959 count = 0
2960 for f in remaining:
2960 for f in remaining:
2961 count += 1
2961 count += 1
2962 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2962 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2963 if ui.verbose or (f in files):
2963 if ui.verbose or (f in files):
2964 warnings.append(_('not removing %s: file still exists\n')
2964 warnings.append(_('not removing %s: file still exists\n')
2965 % m.rel(f))
2965 % m.rel(f))
2966 ret = 1
2966 ret = 1
2967 ui.progress(_('skipping'), None)
2967 ui.progress(_('skipping'), None)
2968 else:
2968 else:
2969 list = deleted + clean
2969 list = deleted + clean
2970 total = len(modified) + len(added)
2970 total = len(modified) + len(added)
2971 count = 0
2971 count = 0
2972 for f in modified:
2972 for f in modified:
2973 count += 1
2973 count += 1
2974 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2974 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2975 warnings.append(_('not removing %s: file is modified (use -f'
2975 warnings.append(_('not removing %s: file is modified (use -f'
2976 ' to force removal)\n') % m.rel(f))
2976 ' to force removal)\n') % m.rel(f))
2977 ret = 1
2977 ret = 1
2978 for f in added:
2978 for f in added:
2979 count += 1
2979 count += 1
2980 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2980 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2981 warnings.append(_("not removing %s: file has been marked for add"
2981 warnings.append(_("not removing %s: file has been marked for add"
2982 " (use 'hg forget' to undo add)\n") % m.rel(f))
2982 " (use 'hg forget' to undo add)\n") % m.rel(f))
2983 ret = 1
2983 ret = 1
2984 ui.progress(_('skipping'), None)
2984 ui.progress(_('skipping'), None)
2985
2985
2986 list = sorted(list)
2986 list = sorted(list)
2987 total = len(list)
2987 total = len(list)
2988 count = 0
2988 count = 0
2989 for f in list:
2989 for f in list:
2990 count += 1
2990 count += 1
2991 if ui.verbose or not m.exact(f):
2991 if ui.verbose or not m.exact(f):
2992 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2992 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2993 ui.status(_('removing %s\n') % m.rel(f))
2993 ui.status(_('removing %s\n') % m.rel(f))
2994 ui.progress(_('deleting'), None)
2994 ui.progress(_('deleting'), None)
2995
2995
2996 with repo.wlock():
2996 with repo.wlock():
2997 if not after:
2997 if not after:
2998 for f in list:
2998 for f in list:
2999 if f in added:
2999 if f in added:
3000 continue # we never unlink added files on remove
3000 continue # we never unlink added files on remove
3001 repo.wvfs.unlinkpath(f, ignoremissing=True)
3001 repo.wvfs.unlinkpath(f, ignoremissing=True)
3002 repo[None].forget(list)
3002 repo[None].forget(list)
3003
3003
3004 if warn:
3004 if warn:
3005 for warning in warnings:
3005 for warning in warnings:
3006 ui.warn(warning)
3006 ui.warn(warning)
3007
3007
3008 return ret
3008 return ret
3009
3009
3010 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
3010 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
3011 err = 1
3011 err = 1
3012 opts = pycompat.byteskwargs(opts)
3012 opts = pycompat.byteskwargs(opts)
3013
3013
3014 def write(path):
3014 def write(path):
3015 filename = None
3015 filename = None
3016 if fntemplate:
3016 if fntemplate:
3017 filename = makefilename(repo, fntemplate, ctx.node(),
3017 filename = makefilename(repo, fntemplate, ctx.node(),
3018 pathname=os.path.join(prefix, path))
3018 pathname=os.path.join(prefix, path))
3019 # attempt to create the directory if it does not already exist
3019 # attempt to create the directory if it does not already exist
3020 try:
3020 try:
3021 os.makedirs(os.path.dirname(filename))
3021 os.makedirs(os.path.dirname(filename))
3022 except OSError:
3022 except OSError:
3023 pass
3023 pass
3024 with formatter.maybereopen(basefm, filename, opts) as fm:
3024 with formatter.maybereopen(basefm, filename, opts) as fm:
3025 data = ctx[path].data()
3025 data = ctx[path].data()
3026 if opts.get('decode'):
3026 if opts.get('decode'):
3027 data = repo.wwritedata(path, data)
3027 data = repo.wwritedata(path, data)
3028 fm.startitem()
3028 fm.startitem()
3029 fm.write('data', '%s', data)
3029 fm.write('data', '%s', data)
3030 fm.data(abspath=path, path=matcher.rel(path))
3030 fm.data(abspath=path, path=matcher.rel(path))
3031
3031
3032 # Automation often uses hg cat on single files, so special case it
3032 # Automation often uses hg cat on single files, so special case it
3033 # for performance to avoid the cost of parsing the manifest.
3033 # for performance to avoid the cost of parsing the manifest.
3034 if len(matcher.files()) == 1 and not matcher.anypats():
3034 if len(matcher.files()) == 1 and not matcher.anypats():
3035 file = matcher.files()[0]
3035 file = matcher.files()[0]
3036 mfl = repo.manifestlog
3036 mfl = repo.manifestlog
3037 mfnode = ctx.manifestnode()
3037 mfnode = ctx.manifestnode()
3038 try:
3038 try:
3039 if mfnode and mfl[mfnode].find(file)[0]:
3039 if mfnode and mfl[mfnode].find(file)[0]:
3040 write(file)
3040 write(file)
3041 return 0
3041 return 0
3042 except KeyError:
3042 except KeyError:
3043 pass
3043 pass
3044
3044
3045 for abs in ctx.walk(matcher):
3045 for abs in ctx.walk(matcher):
3046 write(abs)
3046 write(abs)
3047 err = 0
3047 err = 0
3048
3048
3049 for subpath in sorted(ctx.substate):
3049 for subpath in sorted(ctx.substate):
3050 sub = ctx.sub(subpath)
3050 sub = ctx.sub(subpath)
3051 try:
3051 try:
3052 submatch = matchmod.subdirmatcher(subpath, matcher)
3052 submatch = matchmod.subdirmatcher(subpath, matcher)
3053
3053
3054 if not sub.cat(submatch, basefm, fntemplate,
3054 if not sub.cat(submatch, basefm, fntemplate,
3055 os.path.join(prefix, sub._path),
3055 os.path.join(prefix, sub._path),
3056 **pycompat.strkwargs(opts)):
3056 **pycompat.strkwargs(opts)):
3057 err = 0
3057 err = 0
3058 except error.RepoLookupError:
3058 except error.RepoLookupError:
3059 ui.status(_("skipping missing subrepository: %s\n")
3059 ui.status(_("skipping missing subrepository: %s\n")
3060 % os.path.join(prefix, subpath))
3060 % os.path.join(prefix, subpath))
3061
3061
3062 return err
3062 return err
3063
3063
3064 def commit(ui, repo, commitfunc, pats, opts):
3064 def commit(ui, repo, commitfunc, pats, opts):
3065 '''commit the specified files or all outstanding changes'''
3065 '''commit the specified files or all outstanding changes'''
3066 date = opts.get('date')
3066 date = opts.get('date')
3067 if date:
3067 if date:
3068 opts['date'] = util.parsedate(date)
3068 opts['date'] = util.parsedate(date)
3069 message = logmessage(ui, opts)
3069 message = logmessage(ui, opts)
3070 matcher = scmutil.match(repo[None], pats, opts)
3070 matcher = scmutil.match(repo[None], pats, opts)
3071
3071
3072 dsguard = None
3072 dsguard = None
3073 # extract addremove carefully -- this function can be called from a command
3073 # extract addremove carefully -- this function can be called from a command
3074 # that doesn't support addremove
3074 # that doesn't support addremove
3075 if opts.get('addremove'):
3075 if opts.get('addremove'):
3076 dsguard = dirstateguard.dirstateguard(repo, 'commit')
3076 dsguard = dirstateguard.dirstateguard(repo, 'commit')
3077 with dsguard or util.nullcontextmanager():
3077 with dsguard or util.nullcontextmanager():
3078 if dsguard:
3078 if dsguard:
3079 if scmutil.addremove(repo, matcher, "", opts) != 0:
3079 if scmutil.addremove(repo, matcher, "", opts) != 0:
3080 raise error.Abort(
3080 raise error.Abort(
3081 _("failed to mark all new/missing files as added/removed"))
3081 _("failed to mark all new/missing files as added/removed"))
3082
3082
3083 return commitfunc(ui, repo, message, matcher, opts)
3083 return commitfunc(ui, repo, message, matcher, opts)
3084
3084
3085 def samefile(f, ctx1, ctx2):
3085 def samefile(f, ctx1, ctx2):
3086 if f in ctx1.manifest():
3086 if f in ctx1.manifest():
3087 a = ctx1.filectx(f)
3087 a = ctx1.filectx(f)
3088 if f in ctx2.manifest():
3088 if f in ctx2.manifest():
3089 b = ctx2.filectx(f)
3089 b = ctx2.filectx(f)
3090 return (not a.cmp(b)
3090 return (not a.cmp(b)
3091 and a.flags() == b.flags())
3091 and a.flags() == b.flags())
3092 else:
3092 else:
3093 return False
3093 return False
3094 else:
3094 else:
3095 return f not in ctx2.manifest()
3095 return f not in ctx2.manifest()
3096
3096
3097 def amend(ui, repo, old, extra, pats, opts):
3097 def amend(ui, repo, old, extra, pats, opts):
3098 # avoid cycle context -> subrepo -> cmdutil
3098 # avoid cycle context -> subrepo -> cmdutil
3099 from . import context
3099 from . import context
3100
3100
3101 # amend will reuse the existing user if not specified, but the obsolete
3101 # amend will reuse the existing user if not specified, but the obsolete
3102 # marker creation requires that the current user's name is specified.
3102 # marker creation requires that the current user's name is specified.
3103 if obsolete.isenabled(repo, obsolete.createmarkersopt):
3103 if obsolete.isenabled(repo, obsolete.createmarkersopt):
3104 ui.username() # raise exception if username not set
3104 ui.username() # raise exception if username not set
3105
3105
3106 ui.note(_('amending changeset %s\n') % old)
3106 ui.note(_('amending changeset %s\n') % old)
3107 base = old.p1()
3107 base = old.p1()
3108
3108
3109 with repo.wlock(), repo.lock(), repo.transaction('amend'):
3109 with repo.wlock(), repo.lock(), repo.transaction('amend'):
3110 # Participating changesets:
3110 # Participating changesets:
3111 #
3111 #
3112 # wctx o - workingctx that contains changes from working copy
3112 # wctx o - workingctx that contains changes from working copy
3113 # | to go into amending commit
3113 # | to go into amending commit
3114 # |
3114 # |
3115 # old o - changeset to amend
3115 # old o - changeset to amend
3116 # |
3116 # |
3117 # base o - first parent of the changeset to amend
3117 # base o - first parent of the changeset to amend
3118 wctx = repo[None]
3118 wctx = repo[None]
3119
3119
3120 # Copy to avoid mutating input
3120 # Copy to avoid mutating input
3121 extra = extra.copy()
3121 extra = extra.copy()
3122 # Update extra dict from amended commit (e.g. to preserve graft
3122 # Update extra dict from amended commit (e.g. to preserve graft
3123 # source)
3123 # source)
3124 extra.update(old.extra())
3124 extra.update(old.extra())
3125
3125
3126 # Also update it from the from the wctx
3126 # Also update it from the from the wctx
3127 extra.update(wctx.extra())
3127 extra.update(wctx.extra())
3128
3128
3129 user = opts.get('user') or old.user()
3129 user = opts.get('user') or old.user()
3130 date = opts.get('date') or old.date()
3130 date = opts.get('date') or old.date()
3131
3131
3132 # Parse the date to allow comparison between date and old.date()
3132 # Parse the date to allow comparison between date and old.date()
3133 date = util.parsedate(date)
3133 date = util.parsedate(date)
3134
3134
3135 if len(old.parents()) > 1:
3135 if len(old.parents()) > 1:
3136 # ctx.files() isn't reliable for merges, so fall back to the
3136 # ctx.files() isn't reliable for merges, so fall back to the
3137 # slower repo.status() method
3137 # slower repo.status() method
3138 files = set([fn for st in repo.status(base, old)[:3]
3138 files = set([fn for st in repo.status(base, old)[:3]
3139 for fn in st])
3139 for fn in st])
3140 else:
3140 else:
3141 files = set(old.files())
3141 files = set(old.files())
3142
3142
3143 # add/remove the files to the working copy if the "addremove" option
3143 # add/remove the files to the working copy if the "addremove" option
3144 # was specified.
3144 # was specified.
3145 matcher = scmutil.match(wctx, pats, opts)
3145 matcher = scmutil.match(wctx, pats, opts)
3146 if (opts.get('addremove')
3146 if (opts.get('addremove')
3147 and scmutil.addremove(repo, matcher, "", opts)):
3147 and scmutil.addremove(repo, matcher, "", opts)):
3148 raise error.Abort(
3148 raise error.Abort(
3149 _("failed to mark all new/missing files as added/removed"))
3149 _("failed to mark all new/missing files as added/removed"))
3150
3150
3151 # Check subrepos. This depends on in-place wctx._status update in
3151 # Check subrepos. This depends on in-place wctx._status update in
3152 # subrepo.precommit(). To minimize the risk of this hack, we do
3152 # subrepo.precommit(). To minimize the risk of this hack, we do
3153 # nothing if .hgsub does not exist.
3153 # nothing if .hgsub does not exist.
3154 if '.hgsub' in wctx or '.hgsub' in old:
3154 if '.hgsub' in wctx or '.hgsub' in old:
3155 from . import subrepo # avoid cycle: cmdutil -> subrepo -> cmdutil
3155 from . import subrepo # avoid cycle: cmdutil -> subrepo -> cmdutil
3156 subs, commitsubs, newsubstate = subrepo.precommit(
3156 subs, commitsubs, newsubstate = subrepo.precommit(
3157 ui, wctx, wctx._status, matcher)
3157 ui, wctx, wctx._status, matcher)
3158 # amend should abort if commitsubrepos is enabled
3158 # amend should abort if commitsubrepos is enabled
3159 assert not commitsubs
3159 assert not commitsubs
3160 if subs:
3160 if subs:
3161 subrepo.writestate(repo, newsubstate)
3161 subrepo.writestate(repo, newsubstate)
3162
3162
3163 filestoamend = set(f for f in wctx.files() if matcher(f))
3163 filestoamend = set(f for f in wctx.files() if matcher(f))
3164
3164
3165 changes = (len(filestoamend) > 0)
3165 changes = (len(filestoamend) > 0)
3166 if changes:
3166 if changes:
3167 # Recompute copies (avoid recording a -> b -> a)
3167 # Recompute copies (avoid recording a -> b -> a)
3168 copied = copies.pathcopies(base, wctx, matcher)
3168 copied = copies.pathcopies(base, wctx, matcher)
3169 if old.p2:
3169 if old.p2:
3170 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
3170 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
3171
3171
3172 # Prune files which were reverted by the updates: if old
3172 # Prune files which were reverted by the updates: if old
3173 # introduced file X and the file was renamed in the working
3173 # introduced file X and the file was renamed in the working
3174 # copy, then those two files are the same and
3174 # copy, then those two files are the same and
3175 # we can discard X from our list of files. Likewise if X
3175 # we can discard X from our list of files. Likewise if X
3176 # was removed, it's no longer relevant. If X is missing (aka
3176 # was removed, it's no longer relevant. If X is missing (aka
3177 # deleted), old X must be preserved.
3177 # deleted), old X must be preserved.
3178 files.update(filestoamend)
3178 files.update(filestoamend)
3179 files = [f for f in files if (not samefile(f, wctx, base)
3179 files = [f for f in files if (not samefile(f, wctx, base)
3180 or f in wctx.deleted())]
3180 or f in wctx.deleted())]
3181
3181
3182 def filectxfn(repo, ctx_, path):
3182 def filectxfn(repo, ctx_, path):
3183 try:
3183 try:
3184 # If the file being considered is not amongst the files
3184 # If the file being considered is not amongst the files
3185 # to be amended, we should return the file context from the
3185 # to be amended, we should return the file context from the
3186 # old changeset. This avoids issues when only some files in
3186 # old changeset. This avoids issues when only some files in
3187 # the working copy are being amended but there are also
3187 # the working copy are being amended but there are also
3188 # changes to other files from the old changeset.
3188 # changes to other files from the old changeset.
3189 if path not in filestoamend:
3189 if path not in filestoamend:
3190 return old.filectx(path)
3190 return old.filectx(path)
3191
3191
3192 # Return None for removed files.
3192 # Return None for removed files.
3193 if path in wctx.removed():
3193 if path in wctx.removed():
3194 return None
3194 return None
3195
3195
3196 fctx = wctx[path]
3196 fctx = wctx[path]
3197 flags = fctx.flags()
3197 flags = fctx.flags()
3198 mctx = context.memfilectx(repo,
3198 mctx = context.memfilectx(repo, ctx_,
3199 fctx.path(), fctx.data(),
3199 fctx.path(), fctx.data(),
3200 islink='l' in flags,
3200 islink='l' in flags,
3201 isexec='x' in flags,
3201 isexec='x' in flags,
3202 copied=copied.get(path))
3202 copied=copied.get(path))
3203 return mctx
3203 return mctx
3204 except KeyError:
3204 except KeyError:
3205 return None
3205 return None
3206 else:
3206 else:
3207 ui.note(_('copying changeset %s to %s\n') % (old, base))
3207 ui.note(_('copying changeset %s to %s\n') % (old, base))
3208
3208
3209 # Use version of files as in the old cset
3209 # Use version of files as in the old cset
3210 def filectxfn(repo, ctx_, path):
3210 def filectxfn(repo, ctx_, path):
3211 try:
3211 try:
3212 return old.filectx(path)
3212 return old.filectx(path)
3213 except KeyError:
3213 except KeyError:
3214 return None
3214 return None
3215
3215
3216 # See if we got a message from -m or -l, if not, open the editor with
3216 # See if we got a message from -m or -l, if not, open the editor with
3217 # the message of the changeset to amend.
3217 # the message of the changeset to amend.
3218 message = logmessage(ui, opts)
3218 message = logmessage(ui, opts)
3219
3219
3220 editform = mergeeditform(old, 'commit.amend')
3220 editform = mergeeditform(old, 'commit.amend')
3221 editor = getcommiteditor(editform=editform,
3221 editor = getcommiteditor(editform=editform,
3222 **pycompat.strkwargs(opts))
3222 **pycompat.strkwargs(opts))
3223
3223
3224 if not message:
3224 if not message:
3225 editor = getcommiteditor(edit=True, editform=editform)
3225 editor = getcommiteditor(edit=True, editform=editform)
3226 message = old.description()
3226 message = old.description()
3227
3227
3228 pureextra = extra.copy()
3228 pureextra = extra.copy()
3229 extra['amend_source'] = old.hex()
3229 extra['amend_source'] = old.hex()
3230
3230
3231 new = context.memctx(repo,
3231 new = context.memctx(repo,
3232 parents=[base.node(), old.p2().node()],
3232 parents=[base.node(), old.p2().node()],
3233 text=message,
3233 text=message,
3234 files=files,
3234 files=files,
3235 filectxfn=filectxfn,
3235 filectxfn=filectxfn,
3236 user=user,
3236 user=user,
3237 date=date,
3237 date=date,
3238 extra=extra,
3238 extra=extra,
3239 editor=editor)
3239 editor=editor)
3240
3240
3241 newdesc = changelog.stripdesc(new.description())
3241 newdesc = changelog.stripdesc(new.description())
3242 if ((not changes)
3242 if ((not changes)
3243 and newdesc == old.description()
3243 and newdesc == old.description()
3244 and user == old.user()
3244 and user == old.user()
3245 and date == old.date()
3245 and date == old.date()
3246 and pureextra == old.extra()):
3246 and pureextra == old.extra()):
3247 # nothing changed. continuing here would create a new node
3247 # nothing changed. continuing here would create a new node
3248 # anyway because of the amend_source noise.
3248 # anyway because of the amend_source noise.
3249 #
3249 #
3250 # This not what we expect from amend.
3250 # This not what we expect from amend.
3251 return old.node()
3251 return old.node()
3252
3252
3253 if opts.get('secret'):
3253 if opts.get('secret'):
3254 commitphase = 'secret'
3254 commitphase = 'secret'
3255 else:
3255 else:
3256 commitphase = old.phase()
3256 commitphase = old.phase()
3257 overrides = {('phases', 'new-commit'): commitphase}
3257 overrides = {('phases', 'new-commit'): commitphase}
3258 with ui.configoverride(overrides, 'amend'):
3258 with ui.configoverride(overrides, 'amend'):
3259 newid = repo.commitctx(new)
3259 newid = repo.commitctx(new)
3260
3260
3261 # Reroute the working copy parent to the new changeset
3261 # Reroute the working copy parent to the new changeset
3262 repo.setparents(newid, nullid)
3262 repo.setparents(newid, nullid)
3263 mapping = {old.node(): (newid,)}
3263 mapping = {old.node(): (newid,)}
3264 obsmetadata = None
3264 obsmetadata = None
3265 if opts.get('note'):
3265 if opts.get('note'):
3266 obsmetadata = {'note': opts['note']}
3266 obsmetadata = {'note': opts['note']}
3267 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
3267 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
3268
3268
3269 # Fixing the dirstate because localrepo.commitctx does not update
3269 # Fixing the dirstate because localrepo.commitctx does not update
3270 # it. This is rather convenient because we did not need to update
3270 # it. This is rather convenient because we did not need to update
3271 # the dirstate for all the files in the new commit which commitctx
3271 # the dirstate for all the files in the new commit which commitctx
3272 # could have done if it updated the dirstate. Now, we can
3272 # could have done if it updated the dirstate. Now, we can
3273 # selectively update the dirstate only for the amended files.
3273 # selectively update the dirstate only for the amended files.
3274 dirstate = repo.dirstate
3274 dirstate = repo.dirstate
3275
3275
3276 # Update the state of the files which were added and
3276 # Update the state of the files which were added and
3277 # and modified in the amend to "normal" in the dirstate.
3277 # and modified in the amend to "normal" in the dirstate.
3278 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3278 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3279 for f in normalfiles:
3279 for f in normalfiles:
3280 dirstate.normal(f)
3280 dirstate.normal(f)
3281
3281
3282 # Update the state of files which were removed in the amend
3282 # Update the state of files which were removed in the amend
3283 # to "removed" in the dirstate.
3283 # to "removed" in the dirstate.
3284 removedfiles = set(wctx.removed()) & filestoamend
3284 removedfiles = set(wctx.removed()) & filestoamend
3285 for f in removedfiles:
3285 for f in removedfiles:
3286 dirstate.drop(f)
3286 dirstate.drop(f)
3287
3287
3288 return newid
3288 return newid
3289
3289
3290 def commiteditor(repo, ctx, subs, editform=''):
3290 def commiteditor(repo, ctx, subs, editform=''):
3291 if ctx.description():
3291 if ctx.description():
3292 return ctx.description()
3292 return ctx.description()
3293 return commitforceeditor(repo, ctx, subs, editform=editform,
3293 return commitforceeditor(repo, ctx, subs, editform=editform,
3294 unchangedmessagedetection=True)
3294 unchangedmessagedetection=True)
3295
3295
3296 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
3296 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
3297 editform='', unchangedmessagedetection=False):
3297 editform='', unchangedmessagedetection=False):
3298 if not extramsg:
3298 if not extramsg:
3299 extramsg = _("Leave message empty to abort commit.")
3299 extramsg = _("Leave message empty to abort commit.")
3300
3300
3301 forms = [e for e in editform.split('.') if e]
3301 forms = [e for e in editform.split('.') if e]
3302 forms.insert(0, 'changeset')
3302 forms.insert(0, 'changeset')
3303 templatetext = None
3303 templatetext = None
3304 while forms:
3304 while forms:
3305 ref = '.'.join(forms)
3305 ref = '.'.join(forms)
3306 if repo.ui.config('committemplate', ref):
3306 if repo.ui.config('committemplate', ref):
3307 templatetext = committext = buildcommittemplate(
3307 templatetext = committext = buildcommittemplate(
3308 repo, ctx, subs, extramsg, ref)
3308 repo, ctx, subs, extramsg, ref)
3309 break
3309 break
3310 forms.pop()
3310 forms.pop()
3311 else:
3311 else:
3312 committext = buildcommittext(repo, ctx, subs, extramsg)
3312 committext = buildcommittext(repo, ctx, subs, extramsg)
3313
3313
3314 # run editor in the repository root
3314 # run editor in the repository root
3315 olddir = pycompat.getcwd()
3315 olddir = pycompat.getcwd()
3316 os.chdir(repo.root)
3316 os.chdir(repo.root)
3317
3317
3318 # make in-memory changes visible to external process
3318 # make in-memory changes visible to external process
3319 tr = repo.currenttransaction()
3319 tr = repo.currenttransaction()
3320 repo.dirstate.write(tr)
3320 repo.dirstate.write(tr)
3321 pending = tr and tr.writepending() and repo.root
3321 pending = tr and tr.writepending() and repo.root
3322
3322
3323 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
3323 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
3324 editform=editform, pending=pending,
3324 editform=editform, pending=pending,
3325 repopath=repo.path, action='commit')
3325 repopath=repo.path, action='commit')
3326 text = editortext
3326 text = editortext
3327
3327
3328 # strip away anything below this special string (used for editors that want
3328 # strip away anything below this special string (used for editors that want
3329 # to display the diff)
3329 # to display the diff)
3330 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3330 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3331 if stripbelow:
3331 if stripbelow:
3332 text = text[:stripbelow.start()]
3332 text = text[:stripbelow.start()]
3333
3333
3334 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
3334 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
3335 os.chdir(olddir)
3335 os.chdir(olddir)
3336
3336
3337 if finishdesc:
3337 if finishdesc:
3338 text = finishdesc(text)
3338 text = finishdesc(text)
3339 if not text.strip():
3339 if not text.strip():
3340 raise error.Abort(_("empty commit message"))
3340 raise error.Abort(_("empty commit message"))
3341 if unchangedmessagedetection and editortext == templatetext:
3341 if unchangedmessagedetection and editortext == templatetext:
3342 raise error.Abort(_("commit message unchanged"))
3342 raise error.Abort(_("commit message unchanged"))
3343
3343
3344 return text
3344 return text
3345
3345
3346 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3346 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3347 ui = repo.ui
3347 ui = repo.ui
3348 spec = formatter.templatespec(ref, None, None)
3348 spec = formatter.templatespec(ref, None, None)
3349 t = changeset_templater(ui, repo, spec, None, {}, False)
3349 t = changeset_templater(ui, repo, spec, None, {}, False)
3350 t.t.cache.update((k, templater.unquotestring(v))
3350 t.t.cache.update((k, templater.unquotestring(v))
3351 for k, v in repo.ui.configitems('committemplate'))
3351 for k, v in repo.ui.configitems('committemplate'))
3352
3352
3353 if not extramsg:
3353 if not extramsg:
3354 extramsg = '' # ensure that extramsg is string
3354 extramsg = '' # ensure that extramsg is string
3355
3355
3356 ui.pushbuffer()
3356 ui.pushbuffer()
3357 t.show(ctx, extramsg=extramsg)
3357 t.show(ctx, extramsg=extramsg)
3358 return ui.popbuffer()
3358 return ui.popbuffer()
3359
3359
3360 def hgprefix(msg):
3360 def hgprefix(msg):
3361 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
3361 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
3362
3362
3363 def buildcommittext(repo, ctx, subs, extramsg):
3363 def buildcommittext(repo, ctx, subs, extramsg):
3364 edittext = []
3364 edittext = []
3365 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3365 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3366 if ctx.description():
3366 if ctx.description():
3367 edittext.append(ctx.description())
3367 edittext.append(ctx.description())
3368 edittext.append("")
3368 edittext.append("")
3369 edittext.append("") # Empty line between message and comments.
3369 edittext.append("") # Empty line between message and comments.
3370 edittext.append(hgprefix(_("Enter commit message."
3370 edittext.append(hgprefix(_("Enter commit message."
3371 " Lines beginning with 'HG:' are removed.")))
3371 " Lines beginning with 'HG:' are removed.")))
3372 edittext.append(hgprefix(extramsg))
3372 edittext.append(hgprefix(extramsg))
3373 edittext.append("HG: --")
3373 edittext.append("HG: --")
3374 edittext.append(hgprefix(_("user: %s") % ctx.user()))
3374 edittext.append(hgprefix(_("user: %s") % ctx.user()))
3375 if ctx.p2():
3375 if ctx.p2():
3376 edittext.append(hgprefix(_("branch merge")))
3376 edittext.append(hgprefix(_("branch merge")))
3377 if ctx.branch():
3377 if ctx.branch():
3378 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
3378 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
3379 if bookmarks.isactivewdirparent(repo):
3379 if bookmarks.isactivewdirparent(repo):
3380 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
3380 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
3381 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
3381 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
3382 edittext.extend([hgprefix(_("added %s") % f) for f in added])
3382 edittext.extend([hgprefix(_("added %s") % f) for f in added])
3383 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
3383 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
3384 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
3384 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
3385 if not added and not modified and not removed:
3385 if not added and not modified and not removed:
3386 edittext.append(hgprefix(_("no files changed")))
3386 edittext.append(hgprefix(_("no files changed")))
3387 edittext.append("")
3387 edittext.append("")
3388
3388
3389 return "\n".join(edittext)
3389 return "\n".join(edittext)
3390
3390
3391 def commitstatus(repo, node, branch, bheads=None, opts=None):
3391 def commitstatus(repo, node, branch, bheads=None, opts=None):
3392 if opts is None:
3392 if opts is None:
3393 opts = {}
3393 opts = {}
3394 ctx = repo[node]
3394 ctx = repo[node]
3395 parents = ctx.parents()
3395 parents = ctx.parents()
3396
3396
3397 if (not opts.get('amend') and bheads and node not in bheads and not
3397 if (not opts.get('amend') and bheads and node not in bheads and not
3398 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3398 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3399 repo.ui.status(_('created new head\n'))
3399 repo.ui.status(_('created new head\n'))
3400 # The message is not printed for initial roots. For the other
3400 # The message is not printed for initial roots. For the other
3401 # changesets, it is printed in the following situations:
3401 # changesets, it is printed in the following situations:
3402 #
3402 #
3403 # Par column: for the 2 parents with ...
3403 # Par column: for the 2 parents with ...
3404 # N: null or no parent
3404 # N: null or no parent
3405 # B: parent is on another named branch
3405 # B: parent is on another named branch
3406 # C: parent is a regular non head changeset
3406 # C: parent is a regular non head changeset
3407 # H: parent was a branch head of the current branch
3407 # H: parent was a branch head of the current branch
3408 # Msg column: whether we print "created new head" message
3408 # Msg column: whether we print "created new head" message
3409 # In the following, it is assumed that there already exists some
3409 # In the following, it is assumed that there already exists some
3410 # initial branch heads of the current branch, otherwise nothing is
3410 # initial branch heads of the current branch, otherwise nothing is
3411 # printed anyway.
3411 # printed anyway.
3412 #
3412 #
3413 # Par Msg Comment
3413 # Par Msg Comment
3414 # N N y additional topo root
3414 # N N y additional topo root
3415 #
3415 #
3416 # B N y additional branch root
3416 # B N y additional branch root
3417 # C N y additional topo head
3417 # C N y additional topo head
3418 # H N n usual case
3418 # H N n usual case
3419 #
3419 #
3420 # B B y weird additional branch root
3420 # B B y weird additional branch root
3421 # C B y branch merge
3421 # C B y branch merge
3422 # H B n merge with named branch
3422 # H B n merge with named branch
3423 #
3423 #
3424 # C C y additional head from merge
3424 # C C y additional head from merge
3425 # C H n merge with a head
3425 # C H n merge with a head
3426 #
3426 #
3427 # H H n head merge: head count decreases
3427 # H H n head merge: head count decreases
3428
3428
3429 if not opts.get('close_branch'):
3429 if not opts.get('close_branch'):
3430 for r in parents:
3430 for r in parents:
3431 if r.closesbranch() and r.branch() == branch:
3431 if r.closesbranch() and r.branch() == branch:
3432 repo.ui.status(_('reopening closed branch head %d\n') % r)
3432 repo.ui.status(_('reopening closed branch head %d\n') % r)
3433
3433
3434 if repo.ui.debugflag:
3434 if repo.ui.debugflag:
3435 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3435 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3436 elif repo.ui.verbose:
3436 elif repo.ui.verbose:
3437 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3437 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3438
3438
3439 def postcommitstatus(repo, pats, opts):
3439 def postcommitstatus(repo, pats, opts):
3440 return repo.status(match=scmutil.match(repo[None], pats, opts))
3440 return repo.status(match=scmutil.match(repo[None], pats, opts))
3441
3441
3442 def revert(ui, repo, ctx, parents, *pats, **opts):
3442 def revert(ui, repo, ctx, parents, *pats, **opts):
3443 opts = pycompat.byteskwargs(opts)
3443 opts = pycompat.byteskwargs(opts)
3444 parent, p2 = parents
3444 parent, p2 = parents
3445 node = ctx.node()
3445 node = ctx.node()
3446
3446
3447 mf = ctx.manifest()
3447 mf = ctx.manifest()
3448 if node == p2:
3448 if node == p2:
3449 parent = p2
3449 parent = p2
3450
3450
3451 # need all matching names in dirstate and manifest of target rev,
3451 # need all matching names in dirstate and manifest of target rev,
3452 # so have to walk both. do not print errors if files exist in one
3452 # so have to walk both. do not print errors if files exist in one
3453 # but not other. in both cases, filesets should be evaluated against
3453 # but not other. in both cases, filesets should be evaluated against
3454 # workingctx to get consistent result (issue4497). this means 'set:**'
3454 # workingctx to get consistent result (issue4497). this means 'set:**'
3455 # cannot be used to select missing files from target rev.
3455 # cannot be used to select missing files from target rev.
3456
3456
3457 # `names` is a mapping for all elements in working copy and target revision
3457 # `names` is a mapping for all elements in working copy and target revision
3458 # The mapping is in the form:
3458 # The mapping is in the form:
3459 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3459 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3460 names = {}
3460 names = {}
3461
3461
3462 with repo.wlock():
3462 with repo.wlock():
3463 ## filling of the `names` mapping
3463 ## filling of the `names` mapping
3464 # walk dirstate to fill `names`
3464 # walk dirstate to fill `names`
3465
3465
3466 interactive = opts.get('interactive', False)
3466 interactive = opts.get('interactive', False)
3467 wctx = repo[None]
3467 wctx = repo[None]
3468 m = scmutil.match(wctx, pats, opts)
3468 m = scmutil.match(wctx, pats, opts)
3469
3469
3470 # we'll need this later
3470 # we'll need this later
3471 targetsubs = sorted(s for s in wctx.substate if m(s))
3471 targetsubs = sorted(s for s in wctx.substate if m(s))
3472
3472
3473 if not m.always():
3473 if not m.always():
3474 matcher = matchmod.badmatch(m, lambda x, y: False)
3474 matcher = matchmod.badmatch(m, lambda x, y: False)
3475 for abs in wctx.walk(matcher):
3475 for abs in wctx.walk(matcher):
3476 names[abs] = m.rel(abs), m.exact(abs)
3476 names[abs] = m.rel(abs), m.exact(abs)
3477
3477
3478 # walk target manifest to fill `names`
3478 # walk target manifest to fill `names`
3479
3479
3480 def badfn(path, msg):
3480 def badfn(path, msg):
3481 if path in names:
3481 if path in names:
3482 return
3482 return
3483 if path in ctx.substate:
3483 if path in ctx.substate:
3484 return
3484 return
3485 path_ = path + '/'
3485 path_ = path + '/'
3486 for f in names:
3486 for f in names:
3487 if f.startswith(path_):
3487 if f.startswith(path_):
3488 return
3488 return
3489 ui.warn("%s: %s\n" % (m.rel(path), msg))
3489 ui.warn("%s: %s\n" % (m.rel(path), msg))
3490
3490
3491 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3491 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3492 if abs not in names:
3492 if abs not in names:
3493 names[abs] = m.rel(abs), m.exact(abs)
3493 names[abs] = m.rel(abs), m.exact(abs)
3494
3494
3495 # Find status of all file in `names`.
3495 # Find status of all file in `names`.
3496 m = scmutil.matchfiles(repo, names)
3496 m = scmutil.matchfiles(repo, names)
3497
3497
3498 changes = repo.status(node1=node, match=m,
3498 changes = repo.status(node1=node, match=m,
3499 unknown=True, ignored=True, clean=True)
3499 unknown=True, ignored=True, clean=True)
3500 else:
3500 else:
3501 changes = repo.status(node1=node, match=m)
3501 changes = repo.status(node1=node, match=m)
3502 for kind in changes:
3502 for kind in changes:
3503 for abs in kind:
3503 for abs in kind:
3504 names[abs] = m.rel(abs), m.exact(abs)
3504 names[abs] = m.rel(abs), m.exact(abs)
3505
3505
3506 m = scmutil.matchfiles(repo, names)
3506 m = scmutil.matchfiles(repo, names)
3507
3507
3508 modified = set(changes.modified)
3508 modified = set(changes.modified)
3509 added = set(changes.added)
3509 added = set(changes.added)
3510 removed = set(changes.removed)
3510 removed = set(changes.removed)
3511 _deleted = set(changes.deleted)
3511 _deleted = set(changes.deleted)
3512 unknown = set(changes.unknown)
3512 unknown = set(changes.unknown)
3513 unknown.update(changes.ignored)
3513 unknown.update(changes.ignored)
3514 clean = set(changes.clean)
3514 clean = set(changes.clean)
3515 modadded = set()
3515 modadded = set()
3516
3516
3517 # We need to account for the state of the file in the dirstate,
3517 # We need to account for the state of the file in the dirstate,
3518 # even when we revert against something else than parent. This will
3518 # even when we revert against something else than parent. This will
3519 # slightly alter the behavior of revert (doing back up or not, delete
3519 # slightly alter the behavior of revert (doing back up or not, delete
3520 # or just forget etc).
3520 # or just forget etc).
3521 if parent == node:
3521 if parent == node:
3522 dsmodified = modified
3522 dsmodified = modified
3523 dsadded = added
3523 dsadded = added
3524 dsremoved = removed
3524 dsremoved = removed
3525 # store all local modifications, useful later for rename detection
3525 # store all local modifications, useful later for rename detection
3526 localchanges = dsmodified | dsadded
3526 localchanges = dsmodified | dsadded
3527 modified, added, removed = set(), set(), set()
3527 modified, added, removed = set(), set(), set()
3528 else:
3528 else:
3529 changes = repo.status(node1=parent, match=m)
3529 changes = repo.status(node1=parent, match=m)
3530 dsmodified = set(changes.modified)
3530 dsmodified = set(changes.modified)
3531 dsadded = set(changes.added)
3531 dsadded = set(changes.added)
3532 dsremoved = set(changes.removed)
3532 dsremoved = set(changes.removed)
3533 # store all local modifications, useful later for rename detection
3533 # store all local modifications, useful later for rename detection
3534 localchanges = dsmodified | dsadded
3534 localchanges = dsmodified | dsadded
3535
3535
3536 # only take into account for removes between wc and target
3536 # only take into account for removes between wc and target
3537 clean |= dsremoved - removed
3537 clean |= dsremoved - removed
3538 dsremoved &= removed
3538 dsremoved &= removed
3539 # distinct between dirstate remove and other
3539 # distinct between dirstate remove and other
3540 removed -= dsremoved
3540 removed -= dsremoved
3541
3541
3542 modadded = added & dsmodified
3542 modadded = added & dsmodified
3543 added -= modadded
3543 added -= modadded
3544
3544
3545 # tell newly modified apart.
3545 # tell newly modified apart.
3546 dsmodified &= modified
3546 dsmodified &= modified
3547 dsmodified |= modified & dsadded # dirstate added may need backup
3547 dsmodified |= modified & dsadded # dirstate added may need backup
3548 modified -= dsmodified
3548 modified -= dsmodified
3549
3549
3550 # We need to wait for some post-processing to update this set
3550 # We need to wait for some post-processing to update this set
3551 # before making the distinction. The dirstate will be used for
3551 # before making the distinction. The dirstate will be used for
3552 # that purpose.
3552 # that purpose.
3553 dsadded = added
3553 dsadded = added
3554
3554
3555 # in case of merge, files that are actually added can be reported as
3555 # in case of merge, files that are actually added can be reported as
3556 # modified, we need to post process the result
3556 # modified, we need to post process the result
3557 if p2 != nullid:
3557 if p2 != nullid:
3558 mergeadd = set(dsmodified)
3558 mergeadd = set(dsmodified)
3559 for path in dsmodified:
3559 for path in dsmodified:
3560 if path in mf:
3560 if path in mf:
3561 mergeadd.remove(path)
3561 mergeadd.remove(path)
3562 dsadded |= mergeadd
3562 dsadded |= mergeadd
3563 dsmodified -= mergeadd
3563 dsmodified -= mergeadd
3564
3564
3565 # if f is a rename, update `names` to also revert the source
3565 # if f is a rename, update `names` to also revert the source
3566 cwd = repo.getcwd()
3566 cwd = repo.getcwd()
3567 for f in localchanges:
3567 for f in localchanges:
3568 src = repo.dirstate.copied(f)
3568 src = repo.dirstate.copied(f)
3569 # XXX should we check for rename down to target node?
3569 # XXX should we check for rename down to target node?
3570 if src and src not in names and repo.dirstate[src] == 'r':
3570 if src and src not in names and repo.dirstate[src] == 'r':
3571 dsremoved.add(src)
3571 dsremoved.add(src)
3572 names[src] = (repo.pathto(src, cwd), True)
3572 names[src] = (repo.pathto(src, cwd), True)
3573
3573
3574 # determine the exact nature of the deleted changesets
3574 # determine the exact nature of the deleted changesets
3575 deladded = set(_deleted)
3575 deladded = set(_deleted)
3576 for path in _deleted:
3576 for path in _deleted:
3577 if path in mf:
3577 if path in mf:
3578 deladded.remove(path)
3578 deladded.remove(path)
3579 deleted = _deleted - deladded
3579 deleted = _deleted - deladded
3580
3580
3581 # distinguish between file to forget and the other
3581 # distinguish between file to forget and the other
3582 added = set()
3582 added = set()
3583 for abs in dsadded:
3583 for abs in dsadded:
3584 if repo.dirstate[abs] != 'a':
3584 if repo.dirstate[abs] != 'a':
3585 added.add(abs)
3585 added.add(abs)
3586 dsadded -= added
3586 dsadded -= added
3587
3587
3588 for abs in deladded:
3588 for abs in deladded:
3589 if repo.dirstate[abs] == 'a':
3589 if repo.dirstate[abs] == 'a':
3590 dsadded.add(abs)
3590 dsadded.add(abs)
3591 deladded -= dsadded
3591 deladded -= dsadded
3592
3592
3593 # For files marked as removed, we check if an unknown file is present at
3593 # For files marked as removed, we check if an unknown file is present at
3594 # the same path. If a such file exists it may need to be backed up.
3594 # the same path. If a such file exists it may need to be backed up.
3595 # Making the distinction at this stage helps have simpler backup
3595 # Making the distinction at this stage helps have simpler backup
3596 # logic.
3596 # logic.
3597 removunk = set()
3597 removunk = set()
3598 for abs in removed:
3598 for abs in removed:
3599 target = repo.wjoin(abs)
3599 target = repo.wjoin(abs)
3600 if os.path.lexists(target):
3600 if os.path.lexists(target):
3601 removunk.add(abs)
3601 removunk.add(abs)
3602 removed -= removunk
3602 removed -= removunk
3603
3603
3604 dsremovunk = set()
3604 dsremovunk = set()
3605 for abs in dsremoved:
3605 for abs in dsremoved:
3606 target = repo.wjoin(abs)
3606 target = repo.wjoin(abs)
3607 if os.path.lexists(target):
3607 if os.path.lexists(target):
3608 dsremovunk.add(abs)
3608 dsremovunk.add(abs)
3609 dsremoved -= dsremovunk
3609 dsremoved -= dsremovunk
3610
3610
3611 # action to be actually performed by revert
3611 # action to be actually performed by revert
3612 # (<list of file>, message>) tuple
3612 # (<list of file>, message>) tuple
3613 actions = {'revert': ([], _('reverting %s\n')),
3613 actions = {'revert': ([], _('reverting %s\n')),
3614 'add': ([], _('adding %s\n')),
3614 'add': ([], _('adding %s\n')),
3615 'remove': ([], _('removing %s\n')),
3615 'remove': ([], _('removing %s\n')),
3616 'drop': ([], _('removing %s\n')),
3616 'drop': ([], _('removing %s\n')),
3617 'forget': ([], _('forgetting %s\n')),
3617 'forget': ([], _('forgetting %s\n')),
3618 'undelete': ([], _('undeleting %s\n')),
3618 'undelete': ([], _('undeleting %s\n')),
3619 'noop': (None, _('no changes needed to %s\n')),
3619 'noop': (None, _('no changes needed to %s\n')),
3620 'unknown': (None, _('file not managed: %s\n')),
3620 'unknown': (None, _('file not managed: %s\n')),
3621 }
3621 }
3622
3622
3623 # "constant" that convey the backup strategy.
3623 # "constant" that convey the backup strategy.
3624 # All set to `discard` if `no-backup` is set do avoid checking
3624 # All set to `discard` if `no-backup` is set do avoid checking
3625 # no_backup lower in the code.
3625 # no_backup lower in the code.
3626 # These values are ordered for comparison purposes
3626 # These values are ordered for comparison purposes
3627 backupinteractive = 3 # do backup if interactively modified
3627 backupinteractive = 3 # do backup if interactively modified
3628 backup = 2 # unconditionally do backup
3628 backup = 2 # unconditionally do backup
3629 check = 1 # check if the existing file differs from target
3629 check = 1 # check if the existing file differs from target
3630 discard = 0 # never do backup
3630 discard = 0 # never do backup
3631 if opts.get('no_backup'):
3631 if opts.get('no_backup'):
3632 backupinteractive = backup = check = discard
3632 backupinteractive = backup = check = discard
3633 if interactive:
3633 if interactive:
3634 dsmodifiedbackup = backupinteractive
3634 dsmodifiedbackup = backupinteractive
3635 else:
3635 else:
3636 dsmodifiedbackup = backup
3636 dsmodifiedbackup = backup
3637 tobackup = set()
3637 tobackup = set()
3638
3638
3639 backupanddel = actions['remove']
3639 backupanddel = actions['remove']
3640 if not opts.get('no_backup'):
3640 if not opts.get('no_backup'):
3641 backupanddel = actions['drop']
3641 backupanddel = actions['drop']
3642
3642
3643 disptable = (
3643 disptable = (
3644 # dispatch table:
3644 # dispatch table:
3645 # file state
3645 # file state
3646 # action
3646 # action
3647 # make backup
3647 # make backup
3648
3648
3649 ## Sets that results that will change file on disk
3649 ## Sets that results that will change file on disk
3650 # Modified compared to target, no local change
3650 # Modified compared to target, no local change
3651 (modified, actions['revert'], discard),
3651 (modified, actions['revert'], discard),
3652 # Modified compared to target, but local file is deleted
3652 # Modified compared to target, but local file is deleted
3653 (deleted, actions['revert'], discard),
3653 (deleted, actions['revert'], discard),
3654 # Modified compared to target, local change
3654 # Modified compared to target, local change
3655 (dsmodified, actions['revert'], dsmodifiedbackup),
3655 (dsmodified, actions['revert'], dsmodifiedbackup),
3656 # Added since target
3656 # Added since target
3657 (added, actions['remove'], discard),
3657 (added, actions['remove'], discard),
3658 # Added in working directory
3658 # Added in working directory
3659 (dsadded, actions['forget'], discard),
3659 (dsadded, actions['forget'], discard),
3660 # Added since target, have local modification
3660 # Added since target, have local modification
3661 (modadded, backupanddel, backup),
3661 (modadded, backupanddel, backup),
3662 # Added since target but file is missing in working directory
3662 # Added since target but file is missing in working directory
3663 (deladded, actions['drop'], discard),
3663 (deladded, actions['drop'], discard),
3664 # Removed since target, before working copy parent
3664 # Removed since target, before working copy parent
3665 (removed, actions['add'], discard),
3665 (removed, actions['add'], discard),
3666 # Same as `removed` but an unknown file exists at the same path
3666 # Same as `removed` but an unknown file exists at the same path
3667 (removunk, actions['add'], check),
3667 (removunk, actions['add'], check),
3668 # Removed since targe, marked as such in working copy parent
3668 # Removed since targe, marked as such in working copy parent
3669 (dsremoved, actions['undelete'], discard),
3669 (dsremoved, actions['undelete'], discard),
3670 # Same as `dsremoved` but an unknown file exists at the same path
3670 # Same as `dsremoved` but an unknown file exists at the same path
3671 (dsremovunk, actions['undelete'], check),
3671 (dsremovunk, actions['undelete'], check),
3672 ## the following sets does not result in any file changes
3672 ## the following sets does not result in any file changes
3673 # File with no modification
3673 # File with no modification
3674 (clean, actions['noop'], discard),
3674 (clean, actions['noop'], discard),
3675 # Existing file, not tracked anywhere
3675 # Existing file, not tracked anywhere
3676 (unknown, actions['unknown'], discard),
3676 (unknown, actions['unknown'], discard),
3677 )
3677 )
3678
3678
3679 for abs, (rel, exact) in sorted(names.items()):
3679 for abs, (rel, exact) in sorted(names.items()):
3680 # target file to be touch on disk (relative to cwd)
3680 # target file to be touch on disk (relative to cwd)
3681 target = repo.wjoin(abs)
3681 target = repo.wjoin(abs)
3682 # search the entry in the dispatch table.
3682 # search the entry in the dispatch table.
3683 # if the file is in any of these sets, it was touched in the working
3683 # if the file is in any of these sets, it was touched in the working
3684 # directory parent and we are sure it needs to be reverted.
3684 # directory parent and we are sure it needs to be reverted.
3685 for table, (xlist, msg), dobackup in disptable:
3685 for table, (xlist, msg), dobackup in disptable:
3686 if abs not in table:
3686 if abs not in table:
3687 continue
3687 continue
3688 if xlist is not None:
3688 if xlist is not None:
3689 xlist.append(abs)
3689 xlist.append(abs)
3690 if dobackup:
3690 if dobackup:
3691 # If in interactive mode, don't automatically create
3691 # If in interactive mode, don't automatically create
3692 # .orig files (issue4793)
3692 # .orig files (issue4793)
3693 if dobackup == backupinteractive:
3693 if dobackup == backupinteractive:
3694 tobackup.add(abs)
3694 tobackup.add(abs)
3695 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3695 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3696 bakname = scmutil.origpath(ui, repo, rel)
3696 bakname = scmutil.origpath(ui, repo, rel)
3697 ui.note(_('saving current version of %s as %s\n') %
3697 ui.note(_('saving current version of %s as %s\n') %
3698 (rel, bakname))
3698 (rel, bakname))
3699 if not opts.get('dry_run'):
3699 if not opts.get('dry_run'):
3700 if interactive:
3700 if interactive:
3701 util.copyfile(target, bakname)
3701 util.copyfile(target, bakname)
3702 else:
3702 else:
3703 util.rename(target, bakname)
3703 util.rename(target, bakname)
3704 if ui.verbose or not exact:
3704 if ui.verbose or not exact:
3705 if not isinstance(msg, bytes):
3705 if not isinstance(msg, bytes):
3706 msg = msg(abs)
3706 msg = msg(abs)
3707 ui.status(msg % rel)
3707 ui.status(msg % rel)
3708 elif exact:
3708 elif exact:
3709 ui.warn(msg % rel)
3709 ui.warn(msg % rel)
3710 break
3710 break
3711
3711
3712 if not opts.get('dry_run'):
3712 if not opts.get('dry_run'):
3713 needdata = ('revert', 'add', 'undelete')
3713 needdata = ('revert', 'add', 'undelete')
3714 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3714 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3715 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3715 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3716
3716
3717 if targetsubs:
3717 if targetsubs:
3718 # Revert the subrepos on the revert list
3718 # Revert the subrepos on the revert list
3719 for sub in targetsubs:
3719 for sub in targetsubs:
3720 try:
3720 try:
3721 wctx.sub(sub).revert(ctx.substate[sub], *pats,
3721 wctx.sub(sub).revert(ctx.substate[sub], *pats,
3722 **pycompat.strkwargs(opts))
3722 **pycompat.strkwargs(opts))
3723 except KeyError:
3723 except KeyError:
3724 raise error.Abort("subrepository '%s' does not exist in %s!"
3724 raise error.Abort("subrepository '%s' does not exist in %s!"
3725 % (sub, short(ctx.node())))
3725 % (sub, short(ctx.node())))
3726
3726
3727 def _revertprefetch(repo, ctx, *files):
3727 def _revertprefetch(repo, ctx, *files):
3728 """Let extension changing the storage layer prefetch content"""
3728 """Let extension changing the storage layer prefetch content"""
3729
3729
3730 def _performrevert(repo, parents, ctx, actions, interactive=False,
3730 def _performrevert(repo, parents, ctx, actions, interactive=False,
3731 tobackup=None):
3731 tobackup=None):
3732 """function that actually perform all the actions computed for revert
3732 """function that actually perform all the actions computed for revert
3733
3733
3734 This is an independent function to let extension to plug in and react to
3734 This is an independent function to let extension to plug in and react to
3735 the imminent revert.
3735 the imminent revert.
3736
3736
3737 Make sure you have the working directory locked when calling this function.
3737 Make sure you have the working directory locked when calling this function.
3738 """
3738 """
3739 parent, p2 = parents
3739 parent, p2 = parents
3740 node = ctx.node()
3740 node = ctx.node()
3741 excluded_files = []
3741 excluded_files = []
3742 matcher_opts = {"exclude": excluded_files}
3742 matcher_opts = {"exclude": excluded_files}
3743
3743
3744 def checkout(f):
3744 def checkout(f):
3745 fc = ctx[f]
3745 fc = ctx[f]
3746 repo.wwrite(f, fc.data(), fc.flags())
3746 repo.wwrite(f, fc.data(), fc.flags())
3747
3747
3748 def doremove(f):
3748 def doremove(f):
3749 try:
3749 try:
3750 repo.wvfs.unlinkpath(f)
3750 repo.wvfs.unlinkpath(f)
3751 except OSError:
3751 except OSError:
3752 pass
3752 pass
3753 repo.dirstate.remove(f)
3753 repo.dirstate.remove(f)
3754
3754
3755 audit_path = pathutil.pathauditor(repo.root, cached=True)
3755 audit_path = pathutil.pathauditor(repo.root, cached=True)
3756 for f in actions['forget'][0]:
3756 for f in actions['forget'][0]:
3757 if interactive:
3757 if interactive:
3758 choice = repo.ui.promptchoice(
3758 choice = repo.ui.promptchoice(
3759 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3759 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3760 if choice == 0:
3760 if choice == 0:
3761 repo.dirstate.drop(f)
3761 repo.dirstate.drop(f)
3762 else:
3762 else:
3763 excluded_files.append(repo.wjoin(f))
3763 excluded_files.append(repo.wjoin(f))
3764 else:
3764 else:
3765 repo.dirstate.drop(f)
3765 repo.dirstate.drop(f)
3766 for f in actions['remove'][0]:
3766 for f in actions['remove'][0]:
3767 audit_path(f)
3767 audit_path(f)
3768 if interactive:
3768 if interactive:
3769 choice = repo.ui.promptchoice(
3769 choice = repo.ui.promptchoice(
3770 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3770 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3771 if choice == 0:
3771 if choice == 0:
3772 doremove(f)
3772 doremove(f)
3773 else:
3773 else:
3774 excluded_files.append(repo.wjoin(f))
3774 excluded_files.append(repo.wjoin(f))
3775 else:
3775 else:
3776 doremove(f)
3776 doremove(f)
3777 for f in actions['drop'][0]:
3777 for f in actions['drop'][0]:
3778 audit_path(f)
3778 audit_path(f)
3779 repo.dirstate.remove(f)
3779 repo.dirstate.remove(f)
3780
3780
3781 normal = None
3781 normal = None
3782 if node == parent:
3782 if node == parent:
3783 # We're reverting to our parent. If possible, we'd like status
3783 # We're reverting to our parent. If possible, we'd like status
3784 # to report the file as clean. We have to use normallookup for
3784 # to report the file as clean. We have to use normallookup for
3785 # merges to avoid losing information about merged/dirty files.
3785 # merges to avoid losing information about merged/dirty files.
3786 if p2 != nullid:
3786 if p2 != nullid:
3787 normal = repo.dirstate.normallookup
3787 normal = repo.dirstate.normallookup
3788 else:
3788 else:
3789 normal = repo.dirstate.normal
3789 normal = repo.dirstate.normal
3790
3790
3791 newlyaddedandmodifiedfiles = set()
3791 newlyaddedandmodifiedfiles = set()
3792 if interactive:
3792 if interactive:
3793 # Prompt the user for changes to revert
3793 # Prompt the user for changes to revert
3794 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3794 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3795 m = scmutil.match(ctx, torevert, matcher_opts)
3795 m = scmutil.match(ctx, torevert, matcher_opts)
3796 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3796 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3797 diffopts.nodates = True
3797 diffopts.nodates = True
3798 diffopts.git = True
3798 diffopts.git = True
3799 operation = 'discard'
3799 operation = 'discard'
3800 reversehunks = True
3800 reversehunks = True
3801 if node != parent:
3801 if node != parent:
3802 operation = 'apply'
3802 operation = 'apply'
3803 reversehunks = False
3803 reversehunks = False
3804 if reversehunks:
3804 if reversehunks:
3805 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3805 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3806 else:
3806 else:
3807 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3807 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3808 originalchunks = patch.parsepatch(diff)
3808 originalchunks = patch.parsepatch(diff)
3809
3809
3810 try:
3810 try:
3811
3811
3812 chunks, opts = recordfilter(repo.ui, originalchunks,
3812 chunks, opts = recordfilter(repo.ui, originalchunks,
3813 operation=operation)
3813 operation=operation)
3814 if reversehunks:
3814 if reversehunks:
3815 chunks = patch.reversehunks(chunks)
3815 chunks = patch.reversehunks(chunks)
3816
3816
3817 except error.PatchError as err:
3817 except error.PatchError as err:
3818 raise error.Abort(_('error parsing patch: %s') % err)
3818 raise error.Abort(_('error parsing patch: %s') % err)
3819
3819
3820 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3820 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3821 if tobackup is None:
3821 if tobackup is None:
3822 tobackup = set()
3822 tobackup = set()
3823 # Apply changes
3823 # Apply changes
3824 fp = stringio()
3824 fp = stringio()
3825 for c in chunks:
3825 for c in chunks:
3826 # Create a backup file only if this hunk should be backed up
3826 # Create a backup file only if this hunk should be backed up
3827 if ishunk(c) and c.header.filename() in tobackup:
3827 if ishunk(c) and c.header.filename() in tobackup:
3828 abs = c.header.filename()
3828 abs = c.header.filename()
3829 target = repo.wjoin(abs)
3829 target = repo.wjoin(abs)
3830 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3830 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3831 util.copyfile(target, bakname)
3831 util.copyfile(target, bakname)
3832 tobackup.remove(abs)
3832 tobackup.remove(abs)
3833 c.write(fp)
3833 c.write(fp)
3834 dopatch = fp.tell()
3834 dopatch = fp.tell()
3835 fp.seek(0)
3835 fp.seek(0)
3836 if dopatch:
3836 if dopatch:
3837 try:
3837 try:
3838 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3838 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3839 except error.PatchError as err:
3839 except error.PatchError as err:
3840 raise error.Abort(str(err))
3840 raise error.Abort(str(err))
3841 del fp
3841 del fp
3842 else:
3842 else:
3843 for f in actions['revert'][0]:
3843 for f in actions['revert'][0]:
3844 checkout(f)
3844 checkout(f)
3845 if normal:
3845 if normal:
3846 normal(f)
3846 normal(f)
3847
3847
3848 for f in actions['add'][0]:
3848 for f in actions['add'][0]:
3849 # Don't checkout modified files, they are already created by the diff
3849 # Don't checkout modified files, they are already created by the diff
3850 if f not in newlyaddedandmodifiedfiles:
3850 if f not in newlyaddedandmodifiedfiles:
3851 checkout(f)
3851 checkout(f)
3852 repo.dirstate.add(f)
3852 repo.dirstate.add(f)
3853
3853
3854 normal = repo.dirstate.normallookup
3854 normal = repo.dirstate.normallookup
3855 if node == parent and p2 == nullid:
3855 if node == parent and p2 == nullid:
3856 normal = repo.dirstate.normal
3856 normal = repo.dirstate.normal
3857 for f in actions['undelete'][0]:
3857 for f in actions['undelete'][0]:
3858 checkout(f)
3858 checkout(f)
3859 normal(f)
3859 normal(f)
3860
3860
3861 copied = copies.pathcopies(repo[parent], ctx)
3861 copied = copies.pathcopies(repo[parent], ctx)
3862
3862
3863 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3863 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3864 if f in copied:
3864 if f in copied:
3865 repo.dirstate.copy(copied[f], f)
3865 repo.dirstate.copy(copied[f], f)
3866
3866
3867 class command(registrar.command):
3867 class command(registrar.command):
3868 """deprecated: used registrar.command instead"""
3868 """deprecated: used registrar.command instead"""
3869 def _doregister(self, func, name, *args, **kwargs):
3869 def _doregister(self, func, name, *args, **kwargs):
3870 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3870 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3871 return super(command, self)._doregister(func, name, *args, **kwargs)
3871 return super(command, self)._doregister(func, name, *args, **kwargs)
3872
3872
3873 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3873 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3874 # commands.outgoing. "missing" is "missing" of the result of
3874 # commands.outgoing. "missing" is "missing" of the result of
3875 # "findcommonoutgoing()"
3875 # "findcommonoutgoing()"
3876 outgoinghooks = util.hooks()
3876 outgoinghooks = util.hooks()
3877
3877
3878 # a list of (ui, repo) functions called by commands.summary
3878 # a list of (ui, repo) functions called by commands.summary
3879 summaryhooks = util.hooks()
3879 summaryhooks = util.hooks()
3880
3880
3881 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3881 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3882 #
3882 #
3883 # functions should return tuple of booleans below, if 'changes' is None:
3883 # functions should return tuple of booleans below, if 'changes' is None:
3884 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3884 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3885 #
3885 #
3886 # otherwise, 'changes' is a tuple of tuples below:
3886 # otherwise, 'changes' is a tuple of tuples below:
3887 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3887 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3888 # - (desturl, destbranch, destpeer, outgoing)
3888 # - (desturl, destbranch, destpeer, outgoing)
3889 summaryremotehooks = util.hooks()
3889 summaryremotehooks = util.hooks()
3890
3890
3891 # A list of state files kept by multistep operations like graft.
3891 # A list of state files kept by multistep operations like graft.
3892 # Since graft cannot be aborted, it is considered 'clearable' by update.
3892 # Since graft cannot be aborted, it is considered 'clearable' by update.
3893 # note: bisect is intentionally excluded
3893 # note: bisect is intentionally excluded
3894 # (state file, clearable, allowcommit, error, hint)
3894 # (state file, clearable, allowcommit, error, hint)
3895 unfinishedstates = [
3895 unfinishedstates = [
3896 ('graftstate', True, False, _('graft in progress'),
3896 ('graftstate', True, False, _('graft in progress'),
3897 _("use 'hg graft --continue' or 'hg update' to abort")),
3897 _("use 'hg graft --continue' or 'hg update' to abort")),
3898 ('updatestate', True, False, _('last update was interrupted'),
3898 ('updatestate', True, False, _('last update was interrupted'),
3899 _("use 'hg update' to get a consistent checkout"))
3899 _("use 'hg update' to get a consistent checkout"))
3900 ]
3900 ]
3901
3901
3902 def checkunfinished(repo, commit=False):
3902 def checkunfinished(repo, commit=False):
3903 '''Look for an unfinished multistep operation, like graft, and abort
3903 '''Look for an unfinished multistep operation, like graft, and abort
3904 if found. It's probably good to check this right before
3904 if found. It's probably good to check this right before
3905 bailifchanged().
3905 bailifchanged().
3906 '''
3906 '''
3907 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3907 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3908 if commit and allowcommit:
3908 if commit and allowcommit:
3909 continue
3909 continue
3910 if repo.vfs.exists(f):
3910 if repo.vfs.exists(f):
3911 raise error.Abort(msg, hint=hint)
3911 raise error.Abort(msg, hint=hint)
3912
3912
3913 def clearunfinished(repo):
3913 def clearunfinished(repo):
3914 '''Check for unfinished operations (as above), and clear the ones
3914 '''Check for unfinished operations (as above), and clear the ones
3915 that are clearable.
3915 that are clearable.
3916 '''
3916 '''
3917 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3917 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3918 if not clearable and repo.vfs.exists(f):
3918 if not clearable and repo.vfs.exists(f):
3919 raise error.Abort(msg, hint=hint)
3919 raise error.Abort(msg, hint=hint)
3920 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3920 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3921 if clearable and repo.vfs.exists(f):
3921 if clearable and repo.vfs.exists(f):
3922 util.unlink(repo.vfs.join(f))
3922 util.unlink(repo.vfs.join(f))
3923
3923
3924 afterresolvedstates = [
3924 afterresolvedstates = [
3925 ('graftstate',
3925 ('graftstate',
3926 _('hg graft --continue')),
3926 _('hg graft --continue')),
3927 ]
3927 ]
3928
3928
3929 def howtocontinue(repo):
3929 def howtocontinue(repo):
3930 '''Check for an unfinished operation and return the command to finish
3930 '''Check for an unfinished operation and return the command to finish
3931 it.
3931 it.
3932
3932
3933 afterresolvedstates tuples define a .hg/{file} and the corresponding
3933 afterresolvedstates tuples define a .hg/{file} and the corresponding
3934 command needed to finish it.
3934 command needed to finish it.
3935
3935
3936 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3936 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3937 a boolean.
3937 a boolean.
3938 '''
3938 '''
3939 contmsg = _("continue: %s")
3939 contmsg = _("continue: %s")
3940 for f, msg in afterresolvedstates:
3940 for f, msg in afterresolvedstates:
3941 if repo.vfs.exists(f):
3941 if repo.vfs.exists(f):
3942 return contmsg % msg, True
3942 return contmsg % msg, True
3943 if repo[None].dirty(missing=True, merge=False, branch=False):
3943 if repo[None].dirty(missing=True, merge=False, branch=False):
3944 return contmsg % _("hg commit"), False
3944 return contmsg % _("hg commit"), False
3945 return None, None
3945 return None, None
3946
3946
3947 def checkafterresolved(repo):
3947 def checkafterresolved(repo):
3948 '''Inform the user about the next action after completing hg resolve
3948 '''Inform the user about the next action after completing hg resolve
3949
3949
3950 If there's a matching afterresolvedstates, howtocontinue will yield
3950 If there's a matching afterresolvedstates, howtocontinue will yield
3951 repo.ui.warn as the reporter.
3951 repo.ui.warn as the reporter.
3952
3952
3953 Otherwise, it will yield repo.ui.note.
3953 Otherwise, it will yield repo.ui.note.
3954 '''
3954 '''
3955 msg, warning = howtocontinue(repo)
3955 msg, warning = howtocontinue(repo)
3956 if msg is not None:
3956 if msg is not None:
3957 if warning:
3957 if warning:
3958 repo.ui.warn("%s\n" % msg)
3958 repo.ui.warn("%s\n" % msg)
3959 else:
3959 else:
3960 repo.ui.note("%s\n" % msg)
3960 repo.ui.note("%s\n" % msg)
3961
3961
3962 def wrongtooltocontinue(repo, task):
3962 def wrongtooltocontinue(repo, task):
3963 '''Raise an abort suggesting how to properly continue if there is an
3963 '''Raise an abort suggesting how to properly continue if there is an
3964 active task.
3964 active task.
3965
3965
3966 Uses howtocontinue() to find the active task.
3966 Uses howtocontinue() to find the active task.
3967
3967
3968 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3968 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3969 a hint.
3969 a hint.
3970 '''
3970 '''
3971 after = howtocontinue(repo)
3971 after = howtocontinue(repo)
3972 hint = None
3972 hint = None
3973 if after[1]:
3973 if after[1]:
3974 hint = after[0]
3974 hint = after[0]
3975 raise error.Abort(_('no %s in progress') % task, hint=hint)
3975 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,2778 +1,2776 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import filecmp
11 import filecmp
12 import os
12 import os
13 import re
13 import re
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 addednodeid,
18 addednodeid,
19 bin,
19 bin,
20 hex,
20 hex,
21 modifiednodeid,
21 modifiednodeid,
22 nullid,
22 nullid,
23 nullrev,
23 nullrev,
24 short,
24 short,
25 wdirid,
25 wdirid,
26 wdirnodes,
26 wdirnodes,
27 wdirrev,
27 wdirrev,
28 )
28 )
29 from .thirdparty import (
29 from .thirdparty import (
30 attr,
30 attr,
31 )
31 )
32 from . import (
32 from . import (
33 encoding,
33 encoding,
34 error,
34 error,
35 fileset,
35 fileset,
36 match as matchmod,
36 match as matchmod,
37 mdiff,
37 mdiff,
38 obsolete as obsmod,
38 obsolete as obsmod,
39 patch,
39 patch,
40 pathutil,
40 pathutil,
41 phases,
41 phases,
42 pycompat,
42 pycompat,
43 repoview,
43 repoview,
44 revlog,
44 revlog,
45 scmutil,
45 scmutil,
46 sparse,
46 sparse,
47 subrepo,
47 subrepo,
48 util,
48 util,
49 )
49 )
50
50
51 propertycache = util.propertycache
51 propertycache = util.propertycache
52
52
53 nonascii = re.compile(r'[^\x21-\x7f]').search
53 nonascii = re.compile(r'[^\x21-\x7f]').search
54
54
55 class basectx(object):
55 class basectx(object):
56 """A basectx object represents the common logic for its children:
56 """A basectx object represents the common logic for its children:
57 changectx: read-only context that is already present in the repo,
57 changectx: read-only context that is already present in the repo,
58 workingctx: a context that represents the working directory and can
58 workingctx: a context that represents the working directory and can
59 be committed,
59 be committed,
60 memctx: a context that represents changes in-memory and can also
60 memctx: a context that represents changes in-memory and can also
61 be committed."""
61 be committed."""
62 def __new__(cls, repo, changeid='', *args, **kwargs):
62 def __new__(cls, repo, changeid='', *args, **kwargs):
63 if isinstance(changeid, basectx):
63 if isinstance(changeid, basectx):
64 return changeid
64 return changeid
65
65
66 o = super(basectx, cls).__new__(cls)
66 o = super(basectx, cls).__new__(cls)
67
67
68 o._repo = repo
68 o._repo = repo
69 o._rev = nullrev
69 o._rev = nullrev
70 o._node = nullid
70 o._node = nullid
71
71
72 return o
72 return o
73
73
74 def __bytes__(self):
74 def __bytes__(self):
75 return short(self.node())
75 return short(self.node())
76
76
77 __str__ = encoding.strmethod(__bytes__)
77 __str__ = encoding.strmethod(__bytes__)
78
78
79 def __int__(self):
79 def __int__(self):
80 return self.rev()
80 return self.rev()
81
81
82 def __repr__(self):
82 def __repr__(self):
83 return r"<%s %s>" % (type(self).__name__, str(self))
83 return r"<%s %s>" % (type(self).__name__, str(self))
84
84
85 def __eq__(self, other):
85 def __eq__(self, other):
86 try:
86 try:
87 return type(self) == type(other) and self._rev == other._rev
87 return type(self) == type(other) and self._rev == other._rev
88 except AttributeError:
88 except AttributeError:
89 return False
89 return False
90
90
91 def __ne__(self, other):
91 def __ne__(self, other):
92 return not (self == other)
92 return not (self == other)
93
93
94 def __contains__(self, key):
94 def __contains__(self, key):
95 return key in self._manifest
95 return key in self._manifest
96
96
97 def __getitem__(self, key):
97 def __getitem__(self, key):
98 return self.filectx(key)
98 return self.filectx(key)
99
99
100 def __iter__(self):
100 def __iter__(self):
101 return iter(self._manifest)
101 return iter(self._manifest)
102
102
103 def _buildstatusmanifest(self, status):
103 def _buildstatusmanifest(self, status):
104 """Builds a manifest that includes the given status results, if this is
104 """Builds a manifest that includes the given status results, if this is
105 a working copy context. For non-working copy contexts, it just returns
105 a working copy context. For non-working copy contexts, it just returns
106 the normal manifest."""
106 the normal manifest."""
107 return self.manifest()
107 return self.manifest()
108
108
109 def _matchstatus(self, other, match):
109 def _matchstatus(self, other, match):
110 """This internal method provides a way for child objects to override the
110 """This internal method provides a way for child objects to override the
111 match operator.
111 match operator.
112 """
112 """
113 return match
113 return match
114
114
115 def _buildstatus(self, other, s, match, listignored, listclean,
115 def _buildstatus(self, other, s, match, listignored, listclean,
116 listunknown):
116 listunknown):
117 """build a status with respect to another context"""
117 """build a status with respect to another context"""
118 # Load earliest manifest first for caching reasons. More specifically,
118 # Load earliest manifest first for caching reasons. More specifically,
119 # if you have revisions 1000 and 1001, 1001 is probably stored as a
119 # if you have revisions 1000 and 1001, 1001 is probably stored as a
120 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
120 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
121 # 1000 and cache it so that when you read 1001, we just need to apply a
121 # 1000 and cache it so that when you read 1001, we just need to apply a
122 # delta to what's in the cache. So that's one full reconstruction + one
122 # delta to what's in the cache. So that's one full reconstruction + one
123 # delta application.
123 # delta application.
124 mf2 = None
124 mf2 = None
125 if self.rev() is not None and self.rev() < other.rev():
125 if self.rev() is not None and self.rev() < other.rev():
126 mf2 = self._buildstatusmanifest(s)
126 mf2 = self._buildstatusmanifest(s)
127 mf1 = other._buildstatusmanifest(s)
127 mf1 = other._buildstatusmanifest(s)
128 if mf2 is None:
128 if mf2 is None:
129 mf2 = self._buildstatusmanifest(s)
129 mf2 = self._buildstatusmanifest(s)
130
130
131 modified, added = [], []
131 modified, added = [], []
132 removed = []
132 removed = []
133 clean = []
133 clean = []
134 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
134 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
135 deletedset = set(deleted)
135 deletedset = set(deleted)
136 d = mf1.diff(mf2, match=match, clean=listclean)
136 d = mf1.diff(mf2, match=match, clean=listclean)
137 for fn, value in d.iteritems():
137 for fn, value in d.iteritems():
138 if fn in deletedset:
138 if fn in deletedset:
139 continue
139 continue
140 if value is None:
140 if value is None:
141 clean.append(fn)
141 clean.append(fn)
142 continue
142 continue
143 (node1, flag1), (node2, flag2) = value
143 (node1, flag1), (node2, flag2) = value
144 if node1 is None:
144 if node1 is None:
145 added.append(fn)
145 added.append(fn)
146 elif node2 is None:
146 elif node2 is None:
147 removed.append(fn)
147 removed.append(fn)
148 elif flag1 != flag2:
148 elif flag1 != flag2:
149 modified.append(fn)
149 modified.append(fn)
150 elif node2 not in wdirnodes:
150 elif node2 not in wdirnodes:
151 # When comparing files between two commits, we save time by
151 # When comparing files between two commits, we save time by
152 # not comparing the file contents when the nodeids differ.
152 # not comparing the file contents when the nodeids differ.
153 # Note that this means we incorrectly report a reverted change
153 # Note that this means we incorrectly report a reverted change
154 # to a file as a modification.
154 # to a file as a modification.
155 modified.append(fn)
155 modified.append(fn)
156 elif self[fn].cmp(other[fn]):
156 elif self[fn].cmp(other[fn]):
157 modified.append(fn)
157 modified.append(fn)
158 else:
158 else:
159 clean.append(fn)
159 clean.append(fn)
160
160
161 if removed:
161 if removed:
162 # need to filter files if they are already reported as removed
162 # need to filter files if they are already reported as removed
163 unknown = [fn for fn in unknown if fn not in mf1 and
163 unknown = [fn for fn in unknown if fn not in mf1 and
164 (not match or match(fn))]
164 (not match or match(fn))]
165 ignored = [fn for fn in ignored if fn not in mf1 and
165 ignored = [fn for fn in ignored if fn not in mf1 and
166 (not match or match(fn))]
166 (not match or match(fn))]
167 # if they're deleted, don't report them as removed
167 # if they're deleted, don't report them as removed
168 removed = [fn for fn in removed if fn not in deletedset]
168 removed = [fn for fn in removed if fn not in deletedset]
169
169
170 return scmutil.status(modified, added, removed, deleted, unknown,
170 return scmutil.status(modified, added, removed, deleted, unknown,
171 ignored, clean)
171 ignored, clean)
172
172
173 @propertycache
173 @propertycache
174 def substate(self):
174 def substate(self):
175 return subrepo.state(self, self._repo.ui)
175 return subrepo.state(self, self._repo.ui)
176
176
177 def subrev(self, subpath):
177 def subrev(self, subpath):
178 return self.substate[subpath][1]
178 return self.substate[subpath][1]
179
179
180 def rev(self):
180 def rev(self):
181 return self._rev
181 return self._rev
182 def node(self):
182 def node(self):
183 return self._node
183 return self._node
184 def hex(self):
184 def hex(self):
185 return hex(self.node())
185 return hex(self.node())
186 def manifest(self):
186 def manifest(self):
187 return self._manifest
187 return self._manifest
188 def manifestctx(self):
188 def manifestctx(self):
189 return self._manifestctx
189 return self._manifestctx
190 def repo(self):
190 def repo(self):
191 return self._repo
191 return self._repo
192 def phasestr(self):
192 def phasestr(self):
193 return phases.phasenames[self.phase()]
193 return phases.phasenames[self.phase()]
194 def mutable(self):
194 def mutable(self):
195 return self.phase() > phases.public
195 return self.phase() > phases.public
196
196
197 def getfileset(self, expr):
197 def getfileset(self, expr):
198 return fileset.getfileset(self, expr)
198 return fileset.getfileset(self, expr)
199
199
200 def obsolete(self):
200 def obsolete(self):
201 """True if the changeset is obsolete"""
201 """True if the changeset is obsolete"""
202 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
202 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
203
203
204 def extinct(self):
204 def extinct(self):
205 """True if the changeset is extinct"""
205 """True if the changeset is extinct"""
206 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
206 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
207
207
208 def unstable(self):
208 def unstable(self):
209 msg = ("'context.unstable' is deprecated, "
209 msg = ("'context.unstable' is deprecated, "
210 "use 'context.orphan'")
210 "use 'context.orphan'")
211 self._repo.ui.deprecwarn(msg, '4.4')
211 self._repo.ui.deprecwarn(msg, '4.4')
212 return self.orphan()
212 return self.orphan()
213
213
214 def orphan(self):
214 def orphan(self):
215 """True if the changeset is not obsolete but it's ancestor are"""
215 """True if the changeset is not obsolete but it's ancestor are"""
216 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
216 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
217
217
218 def bumped(self):
218 def bumped(self):
219 msg = ("'context.bumped' is deprecated, "
219 msg = ("'context.bumped' is deprecated, "
220 "use 'context.phasedivergent'")
220 "use 'context.phasedivergent'")
221 self._repo.ui.deprecwarn(msg, '4.4')
221 self._repo.ui.deprecwarn(msg, '4.4')
222 return self.phasedivergent()
222 return self.phasedivergent()
223
223
224 def phasedivergent(self):
224 def phasedivergent(self):
225 """True if the changeset try to be a successor of a public changeset
225 """True if the changeset try to be a successor of a public changeset
226
226
227 Only non-public and non-obsolete changesets may be bumped.
227 Only non-public and non-obsolete changesets may be bumped.
228 """
228 """
229 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
229 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
230
230
231 def divergent(self):
231 def divergent(self):
232 msg = ("'context.divergent' is deprecated, "
232 msg = ("'context.divergent' is deprecated, "
233 "use 'context.contentdivergent'")
233 "use 'context.contentdivergent'")
234 self._repo.ui.deprecwarn(msg, '4.4')
234 self._repo.ui.deprecwarn(msg, '4.4')
235 return self.contentdivergent()
235 return self.contentdivergent()
236
236
237 def contentdivergent(self):
237 def contentdivergent(self):
238 """Is a successors of a changeset with multiple possible successors set
238 """Is a successors of a changeset with multiple possible successors set
239
239
240 Only non-public and non-obsolete changesets may be divergent.
240 Only non-public and non-obsolete changesets may be divergent.
241 """
241 """
242 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
242 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
243
243
244 def troubled(self):
244 def troubled(self):
245 msg = ("'context.troubled' is deprecated, "
245 msg = ("'context.troubled' is deprecated, "
246 "use 'context.isunstable'")
246 "use 'context.isunstable'")
247 self._repo.ui.deprecwarn(msg, '4.4')
247 self._repo.ui.deprecwarn(msg, '4.4')
248 return self.isunstable()
248 return self.isunstable()
249
249
250 def isunstable(self):
250 def isunstable(self):
251 """True if the changeset is either unstable, bumped or divergent"""
251 """True if the changeset is either unstable, bumped or divergent"""
252 return self.orphan() or self.phasedivergent() or self.contentdivergent()
252 return self.orphan() or self.phasedivergent() or self.contentdivergent()
253
253
254 def troubles(self):
254 def troubles(self):
255 """Keep the old version around in order to avoid breaking extensions
255 """Keep the old version around in order to avoid breaking extensions
256 about different return values.
256 about different return values.
257 """
257 """
258 msg = ("'context.troubles' is deprecated, "
258 msg = ("'context.troubles' is deprecated, "
259 "use 'context.instabilities'")
259 "use 'context.instabilities'")
260 self._repo.ui.deprecwarn(msg, '4.4')
260 self._repo.ui.deprecwarn(msg, '4.4')
261
261
262 troubles = []
262 troubles = []
263 if self.orphan():
263 if self.orphan():
264 troubles.append('orphan')
264 troubles.append('orphan')
265 if self.phasedivergent():
265 if self.phasedivergent():
266 troubles.append('bumped')
266 troubles.append('bumped')
267 if self.contentdivergent():
267 if self.contentdivergent():
268 troubles.append('divergent')
268 troubles.append('divergent')
269 return troubles
269 return troubles
270
270
271 def instabilities(self):
271 def instabilities(self):
272 """return the list of instabilities affecting this changeset.
272 """return the list of instabilities affecting this changeset.
273
273
274 Instabilities are returned as strings. possible values are:
274 Instabilities are returned as strings. possible values are:
275 - orphan,
275 - orphan,
276 - phase-divergent,
276 - phase-divergent,
277 - content-divergent.
277 - content-divergent.
278 """
278 """
279 instabilities = []
279 instabilities = []
280 if self.orphan():
280 if self.orphan():
281 instabilities.append('orphan')
281 instabilities.append('orphan')
282 if self.phasedivergent():
282 if self.phasedivergent():
283 instabilities.append('phase-divergent')
283 instabilities.append('phase-divergent')
284 if self.contentdivergent():
284 if self.contentdivergent():
285 instabilities.append('content-divergent')
285 instabilities.append('content-divergent')
286 return instabilities
286 return instabilities
287
287
288 def parents(self):
288 def parents(self):
289 """return contexts for each parent changeset"""
289 """return contexts for each parent changeset"""
290 return self._parents
290 return self._parents
291
291
292 def p1(self):
292 def p1(self):
293 return self._parents[0]
293 return self._parents[0]
294
294
295 def p2(self):
295 def p2(self):
296 parents = self._parents
296 parents = self._parents
297 if len(parents) == 2:
297 if len(parents) == 2:
298 return parents[1]
298 return parents[1]
299 return changectx(self._repo, nullrev)
299 return changectx(self._repo, nullrev)
300
300
301 def _fileinfo(self, path):
301 def _fileinfo(self, path):
302 if r'_manifest' in self.__dict__:
302 if r'_manifest' in self.__dict__:
303 try:
303 try:
304 return self._manifest[path], self._manifest.flags(path)
304 return self._manifest[path], self._manifest.flags(path)
305 except KeyError:
305 except KeyError:
306 raise error.ManifestLookupError(self._node, path,
306 raise error.ManifestLookupError(self._node, path,
307 _('not found in manifest'))
307 _('not found in manifest'))
308 if r'_manifestdelta' in self.__dict__ or path in self.files():
308 if r'_manifestdelta' in self.__dict__ or path in self.files():
309 if path in self._manifestdelta:
309 if path in self._manifestdelta:
310 return (self._manifestdelta[path],
310 return (self._manifestdelta[path],
311 self._manifestdelta.flags(path))
311 self._manifestdelta.flags(path))
312 mfl = self._repo.manifestlog
312 mfl = self._repo.manifestlog
313 try:
313 try:
314 node, flag = mfl[self._changeset.manifest].find(path)
314 node, flag = mfl[self._changeset.manifest].find(path)
315 except KeyError:
315 except KeyError:
316 raise error.ManifestLookupError(self._node, path,
316 raise error.ManifestLookupError(self._node, path,
317 _('not found in manifest'))
317 _('not found in manifest'))
318
318
319 return node, flag
319 return node, flag
320
320
321 def filenode(self, path):
321 def filenode(self, path):
322 return self._fileinfo(path)[0]
322 return self._fileinfo(path)[0]
323
323
324 def flags(self, path):
324 def flags(self, path):
325 try:
325 try:
326 return self._fileinfo(path)[1]
326 return self._fileinfo(path)[1]
327 except error.LookupError:
327 except error.LookupError:
328 return ''
328 return ''
329
329
330 def sub(self, path, allowcreate=True):
330 def sub(self, path, allowcreate=True):
331 '''return a subrepo for the stored revision of path, never wdir()'''
331 '''return a subrepo for the stored revision of path, never wdir()'''
332 return subrepo.subrepo(self, path, allowcreate=allowcreate)
332 return subrepo.subrepo(self, path, allowcreate=allowcreate)
333
333
334 def nullsub(self, path, pctx):
334 def nullsub(self, path, pctx):
335 return subrepo.nullsubrepo(self, path, pctx)
335 return subrepo.nullsubrepo(self, path, pctx)
336
336
337 def workingsub(self, path):
337 def workingsub(self, path):
338 '''return a subrepo for the stored revision, or wdir if this is a wdir
338 '''return a subrepo for the stored revision, or wdir if this is a wdir
339 context.
339 context.
340 '''
340 '''
341 return subrepo.subrepo(self, path, allowwdir=True)
341 return subrepo.subrepo(self, path, allowwdir=True)
342
342
343 def match(self, pats=None, include=None, exclude=None, default='glob',
343 def match(self, pats=None, include=None, exclude=None, default='glob',
344 listsubrepos=False, badfn=None):
344 listsubrepos=False, badfn=None):
345 r = self._repo
345 r = self._repo
346 return matchmod.match(r.root, r.getcwd(), pats,
346 return matchmod.match(r.root, r.getcwd(), pats,
347 include, exclude, default,
347 include, exclude, default,
348 auditor=r.nofsauditor, ctx=self,
348 auditor=r.nofsauditor, ctx=self,
349 listsubrepos=listsubrepos, badfn=badfn)
349 listsubrepos=listsubrepos, badfn=badfn)
350
350
351 def diff(self, ctx2=None, match=None, **opts):
351 def diff(self, ctx2=None, match=None, **opts):
352 """Returns a diff generator for the given contexts and matcher"""
352 """Returns a diff generator for the given contexts and matcher"""
353 if ctx2 is None:
353 if ctx2 is None:
354 ctx2 = self.p1()
354 ctx2 = self.p1()
355 if ctx2 is not None:
355 if ctx2 is not None:
356 ctx2 = self._repo[ctx2]
356 ctx2 = self._repo[ctx2]
357 diffopts = patch.diffopts(self._repo.ui, pycompat.byteskwargs(opts))
357 diffopts = patch.diffopts(self._repo.ui, pycompat.byteskwargs(opts))
358 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
358 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
359
359
360 def dirs(self):
360 def dirs(self):
361 return self._manifest.dirs()
361 return self._manifest.dirs()
362
362
363 def hasdir(self, dir):
363 def hasdir(self, dir):
364 return self._manifest.hasdir(dir)
364 return self._manifest.hasdir(dir)
365
365
366 def status(self, other=None, match=None, listignored=False,
366 def status(self, other=None, match=None, listignored=False,
367 listclean=False, listunknown=False, listsubrepos=False):
367 listclean=False, listunknown=False, listsubrepos=False):
368 """return status of files between two nodes or node and working
368 """return status of files between two nodes or node and working
369 directory.
369 directory.
370
370
371 If other is None, compare this node with working directory.
371 If other is None, compare this node with working directory.
372
372
373 returns (modified, added, removed, deleted, unknown, ignored, clean)
373 returns (modified, added, removed, deleted, unknown, ignored, clean)
374 """
374 """
375
375
376 ctx1 = self
376 ctx1 = self
377 ctx2 = self._repo[other]
377 ctx2 = self._repo[other]
378
378
379 # This next code block is, admittedly, fragile logic that tests for
379 # This next code block is, admittedly, fragile logic that tests for
380 # reversing the contexts and wouldn't need to exist if it weren't for
380 # reversing the contexts and wouldn't need to exist if it weren't for
381 # the fast (and common) code path of comparing the working directory
381 # the fast (and common) code path of comparing the working directory
382 # with its first parent.
382 # with its first parent.
383 #
383 #
384 # What we're aiming for here is the ability to call:
384 # What we're aiming for here is the ability to call:
385 #
385 #
386 # workingctx.status(parentctx)
386 # workingctx.status(parentctx)
387 #
387 #
388 # If we always built the manifest for each context and compared those,
388 # If we always built the manifest for each context and compared those,
389 # then we'd be done. But the special case of the above call means we
389 # then we'd be done. But the special case of the above call means we
390 # just copy the manifest of the parent.
390 # just copy the manifest of the parent.
391 reversed = False
391 reversed = False
392 if (not isinstance(ctx1, changectx)
392 if (not isinstance(ctx1, changectx)
393 and isinstance(ctx2, changectx)):
393 and isinstance(ctx2, changectx)):
394 reversed = True
394 reversed = True
395 ctx1, ctx2 = ctx2, ctx1
395 ctx1, ctx2 = ctx2, ctx1
396
396
397 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
397 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
398 match = ctx2._matchstatus(ctx1, match)
398 match = ctx2._matchstatus(ctx1, match)
399 r = scmutil.status([], [], [], [], [], [], [])
399 r = scmutil.status([], [], [], [], [], [], [])
400 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
400 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
401 listunknown)
401 listunknown)
402
402
403 if reversed:
403 if reversed:
404 # Reverse added and removed. Clear deleted, unknown and ignored as
404 # Reverse added and removed. Clear deleted, unknown and ignored as
405 # these make no sense to reverse.
405 # these make no sense to reverse.
406 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
406 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
407 r.clean)
407 r.clean)
408
408
409 if listsubrepos:
409 if listsubrepos:
410 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
410 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
411 try:
411 try:
412 rev2 = ctx2.subrev(subpath)
412 rev2 = ctx2.subrev(subpath)
413 except KeyError:
413 except KeyError:
414 # A subrepo that existed in node1 was deleted between
414 # A subrepo that existed in node1 was deleted between
415 # node1 and node2 (inclusive). Thus, ctx2's substate
415 # node1 and node2 (inclusive). Thus, ctx2's substate
416 # won't contain that subpath. The best we can do ignore it.
416 # won't contain that subpath. The best we can do ignore it.
417 rev2 = None
417 rev2 = None
418 submatch = matchmod.subdirmatcher(subpath, match)
418 submatch = matchmod.subdirmatcher(subpath, match)
419 s = sub.status(rev2, match=submatch, ignored=listignored,
419 s = sub.status(rev2, match=submatch, ignored=listignored,
420 clean=listclean, unknown=listunknown,
420 clean=listclean, unknown=listunknown,
421 listsubrepos=True)
421 listsubrepos=True)
422 for rfiles, sfiles in zip(r, s):
422 for rfiles, sfiles in zip(r, s):
423 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
423 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
424
424
425 for l in r:
425 for l in r:
426 l.sort()
426 l.sort()
427
427
428 return r
428 return r
429
429
430 def _filterederror(repo, changeid):
430 def _filterederror(repo, changeid):
431 """build an exception to be raised about a filtered changeid
431 """build an exception to be raised about a filtered changeid
432
432
433 This is extracted in a function to help extensions (eg: evolve) to
433 This is extracted in a function to help extensions (eg: evolve) to
434 experiment with various message variants."""
434 experiment with various message variants."""
435 if repo.filtername.startswith('visible'):
435 if repo.filtername.startswith('visible'):
436 msg = _("hidden revision '%s'") % changeid
436 msg = _("hidden revision '%s'") % changeid
437 hint = _('use --hidden to access hidden revisions')
437 hint = _('use --hidden to access hidden revisions')
438 return error.FilteredRepoLookupError(msg, hint=hint)
438 return error.FilteredRepoLookupError(msg, hint=hint)
439 msg = _("filtered revision '%s' (not in '%s' subset)")
439 msg = _("filtered revision '%s' (not in '%s' subset)")
440 msg %= (changeid, repo.filtername)
440 msg %= (changeid, repo.filtername)
441 return error.FilteredRepoLookupError(msg)
441 return error.FilteredRepoLookupError(msg)
442
442
443 class changectx(basectx):
443 class changectx(basectx):
444 """A changecontext object makes access to data related to a particular
444 """A changecontext object makes access to data related to a particular
445 changeset convenient. It represents a read-only context already present in
445 changeset convenient. It represents a read-only context already present in
446 the repo."""
446 the repo."""
447 def __init__(self, repo, changeid=''):
447 def __init__(self, repo, changeid=''):
448 """changeid is a revision number, node, or tag"""
448 """changeid is a revision number, node, or tag"""
449
449
450 # since basectx.__new__ already took care of copying the object, we
450 # since basectx.__new__ already took care of copying the object, we
451 # don't need to do anything in __init__, so we just exit here
451 # don't need to do anything in __init__, so we just exit here
452 if isinstance(changeid, basectx):
452 if isinstance(changeid, basectx):
453 return
453 return
454
454
455 if changeid == '':
455 if changeid == '':
456 changeid = '.'
456 changeid = '.'
457 self._repo = repo
457 self._repo = repo
458
458
459 try:
459 try:
460 if isinstance(changeid, int):
460 if isinstance(changeid, int):
461 self._node = repo.changelog.node(changeid)
461 self._node = repo.changelog.node(changeid)
462 self._rev = changeid
462 self._rev = changeid
463 return
463 return
464 if not pycompat.ispy3 and isinstance(changeid, long):
464 if not pycompat.ispy3 and isinstance(changeid, long):
465 changeid = str(changeid)
465 changeid = str(changeid)
466 if changeid == 'null':
466 if changeid == 'null':
467 self._node = nullid
467 self._node = nullid
468 self._rev = nullrev
468 self._rev = nullrev
469 return
469 return
470 if changeid == 'tip':
470 if changeid == 'tip':
471 self._node = repo.changelog.tip()
471 self._node = repo.changelog.tip()
472 self._rev = repo.changelog.rev(self._node)
472 self._rev = repo.changelog.rev(self._node)
473 return
473 return
474 if (changeid == '.'
474 if (changeid == '.'
475 or repo.local() and changeid == repo.dirstate.p1()):
475 or repo.local() and changeid == repo.dirstate.p1()):
476 # this is a hack to delay/avoid loading obsmarkers
476 # this is a hack to delay/avoid loading obsmarkers
477 # when we know that '.' won't be hidden
477 # when we know that '.' won't be hidden
478 self._node = repo.dirstate.p1()
478 self._node = repo.dirstate.p1()
479 self._rev = repo.unfiltered().changelog.rev(self._node)
479 self._rev = repo.unfiltered().changelog.rev(self._node)
480 return
480 return
481 if len(changeid) == 20:
481 if len(changeid) == 20:
482 try:
482 try:
483 self._node = changeid
483 self._node = changeid
484 self._rev = repo.changelog.rev(changeid)
484 self._rev = repo.changelog.rev(changeid)
485 return
485 return
486 except error.FilteredRepoLookupError:
486 except error.FilteredRepoLookupError:
487 raise
487 raise
488 except LookupError:
488 except LookupError:
489 pass
489 pass
490
490
491 try:
491 try:
492 r = int(changeid)
492 r = int(changeid)
493 if '%d' % r != changeid:
493 if '%d' % r != changeid:
494 raise ValueError
494 raise ValueError
495 l = len(repo.changelog)
495 l = len(repo.changelog)
496 if r < 0:
496 if r < 0:
497 r += l
497 r += l
498 if r < 0 or r >= l and r != wdirrev:
498 if r < 0 or r >= l and r != wdirrev:
499 raise ValueError
499 raise ValueError
500 self._rev = r
500 self._rev = r
501 self._node = repo.changelog.node(r)
501 self._node = repo.changelog.node(r)
502 return
502 return
503 except error.FilteredIndexError:
503 except error.FilteredIndexError:
504 raise
504 raise
505 except (ValueError, OverflowError, IndexError):
505 except (ValueError, OverflowError, IndexError):
506 pass
506 pass
507
507
508 if len(changeid) == 40:
508 if len(changeid) == 40:
509 try:
509 try:
510 self._node = bin(changeid)
510 self._node = bin(changeid)
511 self._rev = repo.changelog.rev(self._node)
511 self._rev = repo.changelog.rev(self._node)
512 return
512 return
513 except error.FilteredLookupError:
513 except error.FilteredLookupError:
514 raise
514 raise
515 except (TypeError, LookupError):
515 except (TypeError, LookupError):
516 pass
516 pass
517
517
518 # lookup bookmarks through the name interface
518 # lookup bookmarks through the name interface
519 try:
519 try:
520 self._node = repo.names.singlenode(repo, changeid)
520 self._node = repo.names.singlenode(repo, changeid)
521 self._rev = repo.changelog.rev(self._node)
521 self._rev = repo.changelog.rev(self._node)
522 return
522 return
523 except KeyError:
523 except KeyError:
524 pass
524 pass
525 except error.FilteredRepoLookupError:
525 except error.FilteredRepoLookupError:
526 raise
526 raise
527 except error.RepoLookupError:
527 except error.RepoLookupError:
528 pass
528 pass
529
529
530 self._node = repo.unfiltered().changelog._partialmatch(changeid)
530 self._node = repo.unfiltered().changelog._partialmatch(changeid)
531 if self._node is not None:
531 if self._node is not None:
532 self._rev = repo.changelog.rev(self._node)
532 self._rev = repo.changelog.rev(self._node)
533 return
533 return
534
534
535 # lookup failed
535 # lookup failed
536 # check if it might have come from damaged dirstate
536 # check if it might have come from damaged dirstate
537 #
537 #
538 # XXX we could avoid the unfiltered if we had a recognizable
538 # XXX we could avoid the unfiltered if we had a recognizable
539 # exception for filtered changeset access
539 # exception for filtered changeset access
540 if (repo.local()
540 if (repo.local()
541 and changeid in repo.unfiltered().dirstate.parents()):
541 and changeid in repo.unfiltered().dirstate.parents()):
542 msg = _("working directory has unknown parent '%s'!")
542 msg = _("working directory has unknown parent '%s'!")
543 raise error.Abort(msg % short(changeid))
543 raise error.Abort(msg % short(changeid))
544 try:
544 try:
545 if len(changeid) == 20 and nonascii(changeid):
545 if len(changeid) == 20 and nonascii(changeid):
546 changeid = hex(changeid)
546 changeid = hex(changeid)
547 except TypeError:
547 except TypeError:
548 pass
548 pass
549 except (error.FilteredIndexError, error.FilteredLookupError,
549 except (error.FilteredIndexError, error.FilteredLookupError,
550 error.FilteredRepoLookupError):
550 error.FilteredRepoLookupError):
551 raise _filterederror(repo, changeid)
551 raise _filterederror(repo, changeid)
552 except IndexError:
552 except IndexError:
553 pass
553 pass
554 raise error.RepoLookupError(
554 raise error.RepoLookupError(
555 _("unknown revision '%s'") % changeid)
555 _("unknown revision '%s'") % changeid)
556
556
557 def __hash__(self):
557 def __hash__(self):
558 try:
558 try:
559 return hash(self._rev)
559 return hash(self._rev)
560 except AttributeError:
560 except AttributeError:
561 return id(self)
561 return id(self)
562
562
563 def __nonzero__(self):
563 def __nonzero__(self):
564 return self._rev != nullrev
564 return self._rev != nullrev
565
565
566 __bool__ = __nonzero__
566 __bool__ = __nonzero__
567
567
568 @propertycache
568 @propertycache
569 def _changeset(self):
569 def _changeset(self):
570 return self._repo.changelog.changelogrevision(self.rev())
570 return self._repo.changelog.changelogrevision(self.rev())
571
571
572 @propertycache
572 @propertycache
573 def _manifest(self):
573 def _manifest(self):
574 return self._manifestctx.read()
574 return self._manifestctx.read()
575
575
576 @property
576 @property
577 def _manifestctx(self):
577 def _manifestctx(self):
578 return self._repo.manifestlog[self._changeset.manifest]
578 return self._repo.manifestlog[self._changeset.manifest]
579
579
580 @propertycache
580 @propertycache
581 def _manifestdelta(self):
581 def _manifestdelta(self):
582 return self._manifestctx.readdelta()
582 return self._manifestctx.readdelta()
583
583
584 @propertycache
584 @propertycache
585 def _parents(self):
585 def _parents(self):
586 repo = self._repo
586 repo = self._repo
587 p1, p2 = repo.changelog.parentrevs(self._rev)
587 p1, p2 = repo.changelog.parentrevs(self._rev)
588 if p2 == nullrev:
588 if p2 == nullrev:
589 return [changectx(repo, p1)]
589 return [changectx(repo, p1)]
590 return [changectx(repo, p1), changectx(repo, p2)]
590 return [changectx(repo, p1), changectx(repo, p2)]
591
591
592 def changeset(self):
592 def changeset(self):
593 c = self._changeset
593 c = self._changeset
594 return (
594 return (
595 c.manifest,
595 c.manifest,
596 c.user,
596 c.user,
597 c.date,
597 c.date,
598 c.files,
598 c.files,
599 c.description,
599 c.description,
600 c.extra,
600 c.extra,
601 )
601 )
602 def manifestnode(self):
602 def manifestnode(self):
603 return self._changeset.manifest
603 return self._changeset.manifest
604
604
605 def user(self):
605 def user(self):
606 return self._changeset.user
606 return self._changeset.user
607 def date(self):
607 def date(self):
608 return self._changeset.date
608 return self._changeset.date
609 def files(self):
609 def files(self):
610 return self._changeset.files
610 return self._changeset.files
611 def description(self):
611 def description(self):
612 return self._changeset.description
612 return self._changeset.description
613 def branch(self):
613 def branch(self):
614 return encoding.tolocal(self._changeset.extra.get("branch"))
614 return encoding.tolocal(self._changeset.extra.get("branch"))
615 def closesbranch(self):
615 def closesbranch(self):
616 return 'close' in self._changeset.extra
616 return 'close' in self._changeset.extra
617 def extra(self):
617 def extra(self):
618 """Return a dict of extra information."""
618 """Return a dict of extra information."""
619 return self._changeset.extra
619 return self._changeset.extra
620 def tags(self):
620 def tags(self):
621 """Return a list of byte tag names"""
621 """Return a list of byte tag names"""
622 return self._repo.nodetags(self._node)
622 return self._repo.nodetags(self._node)
623 def bookmarks(self):
623 def bookmarks(self):
624 """Return a list of byte bookmark names."""
624 """Return a list of byte bookmark names."""
625 return self._repo.nodebookmarks(self._node)
625 return self._repo.nodebookmarks(self._node)
626 def phase(self):
626 def phase(self):
627 return self._repo._phasecache.phase(self._repo, self._rev)
627 return self._repo._phasecache.phase(self._repo, self._rev)
628 def hidden(self):
628 def hidden(self):
629 return self._rev in repoview.filterrevs(self._repo, 'visible')
629 return self._rev in repoview.filterrevs(self._repo, 'visible')
630
630
631 def isinmemory(self):
631 def isinmemory(self):
632 return False
632 return False
633
633
634 def children(self):
634 def children(self):
635 """return list of changectx contexts for each child changeset.
635 """return list of changectx contexts for each child changeset.
636
636
637 This returns only the immediate child changesets. Use descendants() to
637 This returns only the immediate child changesets. Use descendants() to
638 recursively walk children.
638 recursively walk children.
639 """
639 """
640 c = self._repo.changelog.children(self._node)
640 c = self._repo.changelog.children(self._node)
641 return [changectx(self._repo, x) for x in c]
641 return [changectx(self._repo, x) for x in c]
642
642
643 def ancestors(self):
643 def ancestors(self):
644 for a in self._repo.changelog.ancestors([self._rev]):
644 for a in self._repo.changelog.ancestors([self._rev]):
645 yield changectx(self._repo, a)
645 yield changectx(self._repo, a)
646
646
647 def descendants(self):
647 def descendants(self):
648 """Recursively yield all children of the changeset.
648 """Recursively yield all children of the changeset.
649
649
650 For just the immediate children, use children()
650 For just the immediate children, use children()
651 """
651 """
652 for d in self._repo.changelog.descendants([self._rev]):
652 for d in self._repo.changelog.descendants([self._rev]):
653 yield changectx(self._repo, d)
653 yield changectx(self._repo, d)
654
654
655 def filectx(self, path, fileid=None, filelog=None):
655 def filectx(self, path, fileid=None, filelog=None):
656 """get a file context from this changeset"""
656 """get a file context from this changeset"""
657 if fileid is None:
657 if fileid is None:
658 fileid = self.filenode(path)
658 fileid = self.filenode(path)
659 return filectx(self._repo, path, fileid=fileid,
659 return filectx(self._repo, path, fileid=fileid,
660 changectx=self, filelog=filelog)
660 changectx=self, filelog=filelog)
661
661
662 def ancestor(self, c2, warn=False):
662 def ancestor(self, c2, warn=False):
663 """return the "best" ancestor context of self and c2
663 """return the "best" ancestor context of self and c2
664
664
665 If there are multiple candidates, it will show a message and check
665 If there are multiple candidates, it will show a message and check
666 merge.preferancestor configuration before falling back to the
666 merge.preferancestor configuration before falling back to the
667 revlog ancestor."""
667 revlog ancestor."""
668 # deal with workingctxs
668 # deal with workingctxs
669 n2 = c2._node
669 n2 = c2._node
670 if n2 is None:
670 if n2 is None:
671 n2 = c2._parents[0]._node
671 n2 = c2._parents[0]._node
672 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
672 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
673 if not cahs:
673 if not cahs:
674 anc = nullid
674 anc = nullid
675 elif len(cahs) == 1:
675 elif len(cahs) == 1:
676 anc = cahs[0]
676 anc = cahs[0]
677 else:
677 else:
678 # experimental config: merge.preferancestor
678 # experimental config: merge.preferancestor
679 for r in self._repo.ui.configlist('merge', 'preferancestor'):
679 for r in self._repo.ui.configlist('merge', 'preferancestor'):
680 try:
680 try:
681 ctx = changectx(self._repo, r)
681 ctx = changectx(self._repo, r)
682 except error.RepoLookupError:
682 except error.RepoLookupError:
683 continue
683 continue
684 anc = ctx.node()
684 anc = ctx.node()
685 if anc in cahs:
685 if anc in cahs:
686 break
686 break
687 else:
687 else:
688 anc = self._repo.changelog.ancestor(self._node, n2)
688 anc = self._repo.changelog.ancestor(self._node, n2)
689 if warn:
689 if warn:
690 self._repo.ui.status(
690 self._repo.ui.status(
691 (_("note: using %s as ancestor of %s and %s\n") %
691 (_("note: using %s as ancestor of %s and %s\n") %
692 (short(anc), short(self._node), short(n2))) +
692 (short(anc), short(self._node), short(n2))) +
693 ''.join(_(" alternatively, use --config "
693 ''.join(_(" alternatively, use --config "
694 "merge.preferancestor=%s\n") %
694 "merge.preferancestor=%s\n") %
695 short(n) for n in sorted(cahs) if n != anc))
695 short(n) for n in sorted(cahs) if n != anc))
696 return changectx(self._repo, anc)
696 return changectx(self._repo, anc)
697
697
698 def descendant(self, other):
698 def descendant(self, other):
699 """True if other is descendant of this changeset"""
699 """True if other is descendant of this changeset"""
700 return self._repo.changelog.descendant(self._rev, other._rev)
700 return self._repo.changelog.descendant(self._rev, other._rev)
701
701
702 def walk(self, match):
702 def walk(self, match):
703 '''Generates matching file names.'''
703 '''Generates matching file names.'''
704
704
705 # Wrap match.bad method to have message with nodeid
705 # Wrap match.bad method to have message with nodeid
706 def bad(fn, msg):
706 def bad(fn, msg):
707 # The manifest doesn't know about subrepos, so don't complain about
707 # The manifest doesn't know about subrepos, so don't complain about
708 # paths into valid subrepos.
708 # paths into valid subrepos.
709 if any(fn == s or fn.startswith(s + '/')
709 if any(fn == s or fn.startswith(s + '/')
710 for s in self.substate):
710 for s in self.substate):
711 return
711 return
712 match.bad(fn, _('no such file in rev %s') % self)
712 match.bad(fn, _('no such file in rev %s') % self)
713
713
714 m = matchmod.badmatch(match, bad)
714 m = matchmod.badmatch(match, bad)
715 return self._manifest.walk(m)
715 return self._manifest.walk(m)
716
716
717 def matches(self, match):
717 def matches(self, match):
718 return self.walk(match)
718 return self.walk(match)
719
719
720 class basefilectx(object):
720 class basefilectx(object):
721 """A filecontext object represents the common logic for its children:
721 """A filecontext object represents the common logic for its children:
722 filectx: read-only access to a filerevision that is already present
722 filectx: read-only access to a filerevision that is already present
723 in the repo,
723 in the repo,
724 workingfilectx: a filecontext that represents files from the working
724 workingfilectx: a filecontext that represents files from the working
725 directory,
725 directory,
726 memfilectx: a filecontext that represents files in-memory,
726 memfilectx: a filecontext that represents files in-memory,
727 overlayfilectx: duplicate another filecontext with some fields overridden.
727 overlayfilectx: duplicate another filecontext with some fields overridden.
728 """
728 """
729 @propertycache
729 @propertycache
730 def _filelog(self):
730 def _filelog(self):
731 return self._repo.file(self._path)
731 return self._repo.file(self._path)
732
732
733 @propertycache
733 @propertycache
734 def _changeid(self):
734 def _changeid(self):
735 if r'_changeid' in self.__dict__:
735 if r'_changeid' in self.__dict__:
736 return self._changeid
736 return self._changeid
737 elif r'_changectx' in self.__dict__:
737 elif r'_changectx' in self.__dict__:
738 return self._changectx.rev()
738 return self._changectx.rev()
739 elif r'_descendantrev' in self.__dict__:
739 elif r'_descendantrev' in self.__dict__:
740 # this file context was created from a revision with a known
740 # this file context was created from a revision with a known
741 # descendant, we can (lazily) correct for linkrev aliases
741 # descendant, we can (lazily) correct for linkrev aliases
742 return self._adjustlinkrev(self._descendantrev)
742 return self._adjustlinkrev(self._descendantrev)
743 else:
743 else:
744 return self._filelog.linkrev(self._filerev)
744 return self._filelog.linkrev(self._filerev)
745
745
746 @propertycache
746 @propertycache
747 def _filenode(self):
747 def _filenode(self):
748 if r'_fileid' in self.__dict__:
748 if r'_fileid' in self.__dict__:
749 return self._filelog.lookup(self._fileid)
749 return self._filelog.lookup(self._fileid)
750 else:
750 else:
751 return self._changectx.filenode(self._path)
751 return self._changectx.filenode(self._path)
752
752
753 @propertycache
753 @propertycache
754 def _filerev(self):
754 def _filerev(self):
755 return self._filelog.rev(self._filenode)
755 return self._filelog.rev(self._filenode)
756
756
757 @propertycache
757 @propertycache
758 def _repopath(self):
758 def _repopath(self):
759 return self._path
759 return self._path
760
760
761 def __nonzero__(self):
761 def __nonzero__(self):
762 try:
762 try:
763 self._filenode
763 self._filenode
764 return True
764 return True
765 except error.LookupError:
765 except error.LookupError:
766 # file is missing
766 # file is missing
767 return False
767 return False
768
768
769 __bool__ = __nonzero__
769 __bool__ = __nonzero__
770
770
771 def __bytes__(self):
771 def __bytes__(self):
772 try:
772 try:
773 return "%s@%s" % (self.path(), self._changectx)
773 return "%s@%s" % (self.path(), self._changectx)
774 except error.LookupError:
774 except error.LookupError:
775 return "%s@???" % self.path()
775 return "%s@???" % self.path()
776
776
777 __str__ = encoding.strmethod(__bytes__)
777 __str__ = encoding.strmethod(__bytes__)
778
778
779 def __repr__(self):
779 def __repr__(self):
780 return "<%s %s>" % (type(self).__name__, str(self))
780 return "<%s %s>" % (type(self).__name__, str(self))
781
781
782 def __hash__(self):
782 def __hash__(self):
783 try:
783 try:
784 return hash((self._path, self._filenode))
784 return hash((self._path, self._filenode))
785 except AttributeError:
785 except AttributeError:
786 return id(self)
786 return id(self)
787
787
788 def __eq__(self, other):
788 def __eq__(self, other):
789 try:
789 try:
790 return (type(self) == type(other) and self._path == other._path
790 return (type(self) == type(other) and self._path == other._path
791 and self._filenode == other._filenode)
791 and self._filenode == other._filenode)
792 except AttributeError:
792 except AttributeError:
793 return False
793 return False
794
794
795 def __ne__(self, other):
795 def __ne__(self, other):
796 return not (self == other)
796 return not (self == other)
797
797
798 def filerev(self):
798 def filerev(self):
799 return self._filerev
799 return self._filerev
800 def filenode(self):
800 def filenode(self):
801 return self._filenode
801 return self._filenode
802 @propertycache
802 @propertycache
803 def _flags(self):
803 def _flags(self):
804 return self._changectx.flags(self._path)
804 return self._changectx.flags(self._path)
805 def flags(self):
805 def flags(self):
806 return self._flags
806 return self._flags
807 def filelog(self):
807 def filelog(self):
808 return self._filelog
808 return self._filelog
809 def rev(self):
809 def rev(self):
810 return self._changeid
810 return self._changeid
811 def linkrev(self):
811 def linkrev(self):
812 return self._filelog.linkrev(self._filerev)
812 return self._filelog.linkrev(self._filerev)
813 def node(self):
813 def node(self):
814 return self._changectx.node()
814 return self._changectx.node()
815 def hex(self):
815 def hex(self):
816 return self._changectx.hex()
816 return self._changectx.hex()
817 def user(self):
817 def user(self):
818 return self._changectx.user()
818 return self._changectx.user()
819 def date(self):
819 def date(self):
820 return self._changectx.date()
820 return self._changectx.date()
821 def files(self):
821 def files(self):
822 return self._changectx.files()
822 return self._changectx.files()
823 def description(self):
823 def description(self):
824 return self._changectx.description()
824 return self._changectx.description()
825 def branch(self):
825 def branch(self):
826 return self._changectx.branch()
826 return self._changectx.branch()
827 def extra(self):
827 def extra(self):
828 return self._changectx.extra()
828 return self._changectx.extra()
829 def phase(self):
829 def phase(self):
830 return self._changectx.phase()
830 return self._changectx.phase()
831 def phasestr(self):
831 def phasestr(self):
832 return self._changectx.phasestr()
832 return self._changectx.phasestr()
833 def obsolete(self):
833 def obsolete(self):
834 return self._changectx.obsolete()
834 return self._changectx.obsolete()
835 def instabilities(self):
835 def instabilities(self):
836 return self._changectx.instabilities()
836 return self._changectx.instabilities()
837 def manifest(self):
837 def manifest(self):
838 return self._changectx.manifest()
838 return self._changectx.manifest()
839 def changectx(self):
839 def changectx(self):
840 return self._changectx
840 return self._changectx
841 def renamed(self):
841 def renamed(self):
842 return self._copied
842 return self._copied
843 def repo(self):
843 def repo(self):
844 return self._repo
844 return self._repo
845 def size(self):
845 def size(self):
846 return len(self.data())
846 return len(self.data())
847
847
848 def path(self):
848 def path(self):
849 return self._path
849 return self._path
850
850
851 def isbinary(self):
851 def isbinary(self):
852 try:
852 try:
853 return util.binary(self.data())
853 return util.binary(self.data())
854 except IOError:
854 except IOError:
855 return False
855 return False
856 def isexec(self):
856 def isexec(self):
857 return 'x' in self.flags()
857 return 'x' in self.flags()
858 def islink(self):
858 def islink(self):
859 return 'l' in self.flags()
859 return 'l' in self.flags()
860
860
861 def isabsent(self):
861 def isabsent(self):
862 """whether this filectx represents a file not in self._changectx
862 """whether this filectx represents a file not in self._changectx
863
863
864 This is mainly for merge code to detect change/delete conflicts. This is
864 This is mainly for merge code to detect change/delete conflicts. This is
865 expected to be True for all subclasses of basectx."""
865 expected to be True for all subclasses of basectx."""
866 return False
866 return False
867
867
868 _customcmp = False
868 _customcmp = False
869 def cmp(self, fctx):
869 def cmp(self, fctx):
870 """compare with other file context
870 """compare with other file context
871
871
872 returns True if different than fctx.
872 returns True if different than fctx.
873 """
873 """
874 if fctx._customcmp:
874 if fctx._customcmp:
875 return fctx.cmp(self)
875 return fctx.cmp(self)
876
876
877 if (fctx._filenode is None
877 if (fctx._filenode is None
878 and (self._repo._encodefilterpats
878 and (self._repo._encodefilterpats
879 # if file data starts with '\1\n', empty metadata block is
879 # if file data starts with '\1\n', empty metadata block is
880 # prepended, which adds 4 bytes to filelog.size().
880 # prepended, which adds 4 bytes to filelog.size().
881 or self.size() - 4 == fctx.size())
881 or self.size() - 4 == fctx.size())
882 or self.size() == fctx.size()):
882 or self.size() == fctx.size()):
883 return self._filelog.cmp(self._filenode, fctx.data())
883 return self._filelog.cmp(self._filenode, fctx.data())
884
884
885 return True
885 return True
886
886
887 def _adjustlinkrev(self, srcrev, inclusive=False):
887 def _adjustlinkrev(self, srcrev, inclusive=False):
888 """return the first ancestor of <srcrev> introducing <fnode>
888 """return the first ancestor of <srcrev> introducing <fnode>
889
889
890 If the linkrev of the file revision does not point to an ancestor of
890 If the linkrev of the file revision does not point to an ancestor of
891 srcrev, we'll walk down the ancestors until we find one introducing
891 srcrev, we'll walk down the ancestors until we find one introducing
892 this file revision.
892 this file revision.
893
893
894 :srcrev: the changeset revision we search ancestors from
894 :srcrev: the changeset revision we search ancestors from
895 :inclusive: if true, the src revision will also be checked
895 :inclusive: if true, the src revision will also be checked
896 """
896 """
897 repo = self._repo
897 repo = self._repo
898 cl = repo.unfiltered().changelog
898 cl = repo.unfiltered().changelog
899 mfl = repo.manifestlog
899 mfl = repo.manifestlog
900 # fetch the linkrev
900 # fetch the linkrev
901 lkr = self.linkrev()
901 lkr = self.linkrev()
902 # hack to reuse ancestor computation when searching for renames
902 # hack to reuse ancestor computation when searching for renames
903 memberanc = getattr(self, '_ancestrycontext', None)
903 memberanc = getattr(self, '_ancestrycontext', None)
904 iteranc = None
904 iteranc = None
905 if srcrev is None:
905 if srcrev is None:
906 # wctx case, used by workingfilectx during mergecopy
906 # wctx case, used by workingfilectx during mergecopy
907 revs = [p.rev() for p in self._repo[None].parents()]
907 revs = [p.rev() for p in self._repo[None].parents()]
908 inclusive = True # we skipped the real (revless) source
908 inclusive = True # we skipped the real (revless) source
909 else:
909 else:
910 revs = [srcrev]
910 revs = [srcrev]
911 if memberanc is None:
911 if memberanc is None:
912 memberanc = iteranc = cl.ancestors(revs, lkr,
912 memberanc = iteranc = cl.ancestors(revs, lkr,
913 inclusive=inclusive)
913 inclusive=inclusive)
914 # check if this linkrev is an ancestor of srcrev
914 # check if this linkrev is an ancestor of srcrev
915 if lkr not in memberanc:
915 if lkr not in memberanc:
916 if iteranc is None:
916 if iteranc is None:
917 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
917 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
918 fnode = self._filenode
918 fnode = self._filenode
919 path = self._path
919 path = self._path
920 for a in iteranc:
920 for a in iteranc:
921 ac = cl.read(a) # get changeset data (we avoid object creation)
921 ac = cl.read(a) # get changeset data (we avoid object creation)
922 if path in ac[3]: # checking the 'files' field.
922 if path in ac[3]: # checking the 'files' field.
923 # The file has been touched, check if the content is
923 # The file has been touched, check if the content is
924 # similar to the one we search for.
924 # similar to the one we search for.
925 if fnode == mfl[ac[0]].readfast().get(path):
925 if fnode == mfl[ac[0]].readfast().get(path):
926 return a
926 return a
927 # In theory, we should never get out of that loop without a result.
927 # In theory, we should never get out of that loop without a result.
928 # But if manifest uses a buggy file revision (not children of the
928 # But if manifest uses a buggy file revision (not children of the
929 # one it replaces) we could. Such a buggy situation will likely
929 # one it replaces) we could. Such a buggy situation will likely
930 # result is crash somewhere else at to some point.
930 # result is crash somewhere else at to some point.
931 return lkr
931 return lkr
932
932
933 def introrev(self):
933 def introrev(self):
934 """return the rev of the changeset which introduced this file revision
934 """return the rev of the changeset which introduced this file revision
935
935
936 This method is different from linkrev because it take into account the
936 This method is different from linkrev because it take into account the
937 changeset the filectx was created from. It ensures the returned
937 changeset the filectx was created from. It ensures the returned
938 revision is one of its ancestors. This prevents bugs from
938 revision is one of its ancestors. This prevents bugs from
939 'linkrev-shadowing' when a file revision is used by multiple
939 'linkrev-shadowing' when a file revision is used by multiple
940 changesets.
940 changesets.
941 """
941 """
942 lkr = self.linkrev()
942 lkr = self.linkrev()
943 attrs = vars(self)
943 attrs = vars(self)
944 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
944 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
945 if noctx or self.rev() == lkr:
945 if noctx or self.rev() == lkr:
946 return self.linkrev()
946 return self.linkrev()
947 return self._adjustlinkrev(self.rev(), inclusive=True)
947 return self._adjustlinkrev(self.rev(), inclusive=True)
948
948
949 def introfilectx(self):
949 def introfilectx(self):
950 """Return filectx having identical contents, but pointing to the
950 """Return filectx having identical contents, but pointing to the
951 changeset revision where this filectx was introduced"""
951 changeset revision where this filectx was introduced"""
952 introrev = self.introrev()
952 introrev = self.introrev()
953 if self.rev() == introrev:
953 if self.rev() == introrev:
954 return self
954 return self
955 return self.filectx(self.filenode(), changeid=introrev)
955 return self.filectx(self.filenode(), changeid=introrev)
956
956
957 def _parentfilectx(self, path, fileid, filelog):
957 def _parentfilectx(self, path, fileid, filelog):
958 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
958 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
959 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
959 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
960 if '_changeid' in vars(self) or '_changectx' in vars(self):
960 if '_changeid' in vars(self) or '_changectx' in vars(self):
961 # If self is associated with a changeset (probably explicitly
961 # If self is associated with a changeset (probably explicitly
962 # fed), ensure the created filectx is associated with a
962 # fed), ensure the created filectx is associated with a
963 # changeset that is an ancestor of self.changectx.
963 # changeset that is an ancestor of self.changectx.
964 # This lets us later use _adjustlinkrev to get a correct link.
964 # This lets us later use _adjustlinkrev to get a correct link.
965 fctx._descendantrev = self.rev()
965 fctx._descendantrev = self.rev()
966 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
966 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
967 elif '_descendantrev' in vars(self):
967 elif '_descendantrev' in vars(self):
968 # Otherwise propagate _descendantrev if we have one associated.
968 # Otherwise propagate _descendantrev if we have one associated.
969 fctx._descendantrev = self._descendantrev
969 fctx._descendantrev = self._descendantrev
970 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
970 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
971 return fctx
971 return fctx
972
972
973 def parents(self):
973 def parents(self):
974 _path = self._path
974 _path = self._path
975 fl = self._filelog
975 fl = self._filelog
976 parents = self._filelog.parents(self._filenode)
976 parents = self._filelog.parents(self._filenode)
977 pl = [(_path, node, fl) for node in parents if node != nullid]
977 pl = [(_path, node, fl) for node in parents if node != nullid]
978
978
979 r = fl.renamed(self._filenode)
979 r = fl.renamed(self._filenode)
980 if r:
980 if r:
981 # - In the simple rename case, both parent are nullid, pl is empty.
981 # - In the simple rename case, both parent are nullid, pl is empty.
982 # - In case of merge, only one of the parent is null id and should
982 # - In case of merge, only one of the parent is null id and should
983 # be replaced with the rename information. This parent is -always-
983 # be replaced with the rename information. This parent is -always-
984 # the first one.
984 # the first one.
985 #
985 #
986 # As null id have always been filtered out in the previous list
986 # As null id have always been filtered out in the previous list
987 # comprehension, inserting to 0 will always result in "replacing
987 # comprehension, inserting to 0 will always result in "replacing
988 # first nullid parent with rename information.
988 # first nullid parent with rename information.
989 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
989 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
990
990
991 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
991 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
992
992
993 def p1(self):
993 def p1(self):
994 return self.parents()[0]
994 return self.parents()[0]
995
995
996 def p2(self):
996 def p2(self):
997 p = self.parents()
997 p = self.parents()
998 if len(p) == 2:
998 if len(p) == 2:
999 return p[1]
999 return p[1]
1000 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
1000 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
1001
1001
1002 def annotate(self, follow=False, linenumber=False, skiprevs=None,
1002 def annotate(self, follow=False, linenumber=False, skiprevs=None,
1003 diffopts=None):
1003 diffopts=None):
1004 '''returns a list of tuples of ((ctx, number), line) for each line
1004 '''returns a list of tuples of ((ctx, number), line) for each line
1005 in the file, where ctx is the filectx of the node where
1005 in the file, where ctx is the filectx of the node where
1006 that line was last changed; if linenumber parameter is true, number is
1006 that line was last changed; if linenumber parameter is true, number is
1007 the line number at the first appearance in the managed file, otherwise,
1007 the line number at the first appearance in the managed file, otherwise,
1008 number has a fixed value of False.
1008 number has a fixed value of False.
1009 '''
1009 '''
1010
1010
1011 def lines(text):
1011 def lines(text):
1012 if text.endswith("\n"):
1012 if text.endswith("\n"):
1013 return text.count("\n")
1013 return text.count("\n")
1014 return text.count("\n") + int(bool(text))
1014 return text.count("\n") + int(bool(text))
1015
1015
1016 if linenumber:
1016 if linenumber:
1017 def decorate(text, rev):
1017 def decorate(text, rev):
1018 return ([annotateline(fctx=rev, lineno=i)
1018 return ([annotateline(fctx=rev, lineno=i)
1019 for i in xrange(1, lines(text) + 1)], text)
1019 for i in xrange(1, lines(text) + 1)], text)
1020 else:
1020 else:
1021 def decorate(text, rev):
1021 def decorate(text, rev):
1022 return ([annotateline(fctx=rev)] * lines(text), text)
1022 return ([annotateline(fctx=rev)] * lines(text), text)
1023
1023
1024 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
1024 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
1025
1025
1026 def parents(f):
1026 def parents(f):
1027 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
1027 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
1028 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
1028 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
1029 # from the topmost introrev (= srcrev) down to p.linkrev() if it
1029 # from the topmost introrev (= srcrev) down to p.linkrev() if it
1030 # isn't an ancestor of the srcrev.
1030 # isn't an ancestor of the srcrev.
1031 f._changeid
1031 f._changeid
1032 pl = f.parents()
1032 pl = f.parents()
1033
1033
1034 # Don't return renamed parents if we aren't following.
1034 # Don't return renamed parents if we aren't following.
1035 if not follow:
1035 if not follow:
1036 pl = [p for p in pl if p.path() == f.path()]
1036 pl = [p for p in pl if p.path() == f.path()]
1037
1037
1038 # renamed filectx won't have a filelog yet, so set it
1038 # renamed filectx won't have a filelog yet, so set it
1039 # from the cache to save time
1039 # from the cache to save time
1040 for p in pl:
1040 for p in pl:
1041 if not '_filelog' in p.__dict__:
1041 if not '_filelog' in p.__dict__:
1042 p._filelog = getlog(p.path())
1042 p._filelog = getlog(p.path())
1043
1043
1044 return pl
1044 return pl
1045
1045
1046 # use linkrev to find the first changeset where self appeared
1046 # use linkrev to find the first changeset where self appeared
1047 base = self.introfilectx()
1047 base = self.introfilectx()
1048 if getattr(base, '_ancestrycontext', None) is None:
1048 if getattr(base, '_ancestrycontext', None) is None:
1049 cl = self._repo.changelog
1049 cl = self._repo.changelog
1050 if base.rev() is None:
1050 if base.rev() is None:
1051 # wctx is not inclusive, but works because _ancestrycontext
1051 # wctx is not inclusive, but works because _ancestrycontext
1052 # is used to test filelog revisions
1052 # is used to test filelog revisions
1053 ac = cl.ancestors([p.rev() for p in base.parents()],
1053 ac = cl.ancestors([p.rev() for p in base.parents()],
1054 inclusive=True)
1054 inclusive=True)
1055 else:
1055 else:
1056 ac = cl.ancestors([base.rev()], inclusive=True)
1056 ac = cl.ancestors([base.rev()], inclusive=True)
1057 base._ancestrycontext = ac
1057 base._ancestrycontext = ac
1058
1058
1059 # This algorithm would prefer to be recursive, but Python is a
1059 # This algorithm would prefer to be recursive, but Python is a
1060 # bit recursion-hostile. Instead we do an iterative
1060 # bit recursion-hostile. Instead we do an iterative
1061 # depth-first search.
1061 # depth-first search.
1062
1062
1063 # 1st DFS pre-calculates pcache and needed
1063 # 1st DFS pre-calculates pcache and needed
1064 visit = [base]
1064 visit = [base]
1065 pcache = {}
1065 pcache = {}
1066 needed = {base: 1}
1066 needed = {base: 1}
1067 while visit:
1067 while visit:
1068 f = visit.pop()
1068 f = visit.pop()
1069 if f in pcache:
1069 if f in pcache:
1070 continue
1070 continue
1071 pl = parents(f)
1071 pl = parents(f)
1072 pcache[f] = pl
1072 pcache[f] = pl
1073 for p in pl:
1073 for p in pl:
1074 needed[p] = needed.get(p, 0) + 1
1074 needed[p] = needed.get(p, 0) + 1
1075 if p not in pcache:
1075 if p not in pcache:
1076 visit.append(p)
1076 visit.append(p)
1077
1077
1078 # 2nd DFS does the actual annotate
1078 # 2nd DFS does the actual annotate
1079 visit[:] = [base]
1079 visit[:] = [base]
1080 hist = {}
1080 hist = {}
1081 while visit:
1081 while visit:
1082 f = visit[-1]
1082 f = visit[-1]
1083 if f in hist:
1083 if f in hist:
1084 visit.pop()
1084 visit.pop()
1085 continue
1085 continue
1086
1086
1087 ready = True
1087 ready = True
1088 pl = pcache[f]
1088 pl = pcache[f]
1089 for p in pl:
1089 for p in pl:
1090 if p not in hist:
1090 if p not in hist:
1091 ready = False
1091 ready = False
1092 visit.append(p)
1092 visit.append(p)
1093 if ready:
1093 if ready:
1094 visit.pop()
1094 visit.pop()
1095 curr = decorate(f.data(), f)
1095 curr = decorate(f.data(), f)
1096 skipchild = False
1096 skipchild = False
1097 if skiprevs is not None:
1097 if skiprevs is not None:
1098 skipchild = f._changeid in skiprevs
1098 skipchild = f._changeid in skiprevs
1099 curr = _annotatepair([hist[p] for p in pl], f, curr, skipchild,
1099 curr = _annotatepair([hist[p] for p in pl], f, curr, skipchild,
1100 diffopts)
1100 diffopts)
1101 for p in pl:
1101 for p in pl:
1102 if needed[p] == 1:
1102 if needed[p] == 1:
1103 del hist[p]
1103 del hist[p]
1104 del needed[p]
1104 del needed[p]
1105 else:
1105 else:
1106 needed[p] -= 1
1106 needed[p] -= 1
1107
1107
1108 hist[f] = curr
1108 hist[f] = curr
1109 del pcache[f]
1109 del pcache[f]
1110
1110
1111 return zip(hist[base][0], hist[base][1].splitlines(True))
1111 return zip(hist[base][0], hist[base][1].splitlines(True))
1112
1112
1113 def ancestors(self, followfirst=False):
1113 def ancestors(self, followfirst=False):
1114 visit = {}
1114 visit = {}
1115 c = self
1115 c = self
1116 if followfirst:
1116 if followfirst:
1117 cut = 1
1117 cut = 1
1118 else:
1118 else:
1119 cut = None
1119 cut = None
1120
1120
1121 while True:
1121 while True:
1122 for parent in c.parents()[:cut]:
1122 for parent in c.parents()[:cut]:
1123 visit[(parent.linkrev(), parent.filenode())] = parent
1123 visit[(parent.linkrev(), parent.filenode())] = parent
1124 if not visit:
1124 if not visit:
1125 break
1125 break
1126 c = visit.pop(max(visit))
1126 c = visit.pop(max(visit))
1127 yield c
1127 yield c
1128
1128
1129 def decodeddata(self):
1129 def decodeddata(self):
1130 """Returns `data()` after running repository decoding filters.
1130 """Returns `data()` after running repository decoding filters.
1131
1131
1132 This is often equivalent to how the data would be expressed on disk.
1132 This is often equivalent to how the data would be expressed on disk.
1133 """
1133 """
1134 return self._repo.wwritedata(self.path(), self.data())
1134 return self._repo.wwritedata(self.path(), self.data())
1135
1135
1136 @attr.s(slots=True, frozen=True)
1136 @attr.s(slots=True, frozen=True)
1137 class annotateline(object):
1137 class annotateline(object):
1138 fctx = attr.ib()
1138 fctx = attr.ib()
1139 lineno = attr.ib(default=False)
1139 lineno = attr.ib(default=False)
1140 # Whether this annotation was the result of a skip-annotate.
1140 # Whether this annotation was the result of a skip-annotate.
1141 skip = attr.ib(default=False)
1141 skip = attr.ib(default=False)
1142
1142
1143 def _annotatepair(parents, childfctx, child, skipchild, diffopts):
1143 def _annotatepair(parents, childfctx, child, skipchild, diffopts):
1144 r'''
1144 r'''
1145 Given parent and child fctxes and annotate data for parents, for all lines
1145 Given parent and child fctxes and annotate data for parents, for all lines
1146 in either parent that match the child, annotate the child with the parent's
1146 in either parent that match the child, annotate the child with the parent's
1147 data.
1147 data.
1148
1148
1149 Additionally, if `skipchild` is True, replace all other lines with parent
1149 Additionally, if `skipchild` is True, replace all other lines with parent
1150 annotate data as well such that child is never blamed for any lines.
1150 annotate data as well such that child is never blamed for any lines.
1151
1151
1152 See test-annotate.py for unit tests.
1152 See test-annotate.py for unit tests.
1153 '''
1153 '''
1154 pblocks = [(parent, mdiff.allblocks(parent[1], child[1], opts=diffopts))
1154 pblocks = [(parent, mdiff.allblocks(parent[1], child[1], opts=diffopts))
1155 for parent in parents]
1155 for parent in parents]
1156
1156
1157 if skipchild:
1157 if skipchild:
1158 # Need to iterate over the blocks twice -- make it a list
1158 # Need to iterate over the blocks twice -- make it a list
1159 pblocks = [(p, list(blocks)) for (p, blocks) in pblocks]
1159 pblocks = [(p, list(blocks)) for (p, blocks) in pblocks]
1160 # Mercurial currently prefers p2 over p1 for annotate.
1160 # Mercurial currently prefers p2 over p1 for annotate.
1161 # TODO: change this?
1161 # TODO: change this?
1162 for parent, blocks in pblocks:
1162 for parent, blocks in pblocks:
1163 for (a1, a2, b1, b2), t in blocks:
1163 for (a1, a2, b1, b2), t in blocks:
1164 # Changed blocks ('!') or blocks made only of blank lines ('~')
1164 # Changed blocks ('!') or blocks made only of blank lines ('~')
1165 # belong to the child.
1165 # belong to the child.
1166 if t == '=':
1166 if t == '=':
1167 child[0][b1:b2] = parent[0][a1:a2]
1167 child[0][b1:b2] = parent[0][a1:a2]
1168
1168
1169 if skipchild:
1169 if skipchild:
1170 # Now try and match up anything that couldn't be matched,
1170 # Now try and match up anything that couldn't be matched,
1171 # Reversing pblocks maintains bias towards p2, matching above
1171 # Reversing pblocks maintains bias towards p2, matching above
1172 # behavior.
1172 # behavior.
1173 pblocks.reverse()
1173 pblocks.reverse()
1174
1174
1175 # The heuristics are:
1175 # The heuristics are:
1176 # * Work on blocks of changed lines (effectively diff hunks with -U0).
1176 # * Work on blocks of changed lines (effectively diff hunks with -U0).
1177 # This could potentially be smarter but works well enough.
1177 # This could potentially be smarter but works well enough.
1178 # * For a non-matching section, do a best-effort fit. Match lines in
1178 # * For a non-matching section, do a best-effort fit. Match lines in
1179 # diff hunks 1:1, dropping lines as necessary.
1179 # diff hunks 1:1, dropping lines as necessary.
1180 # * Repeat the last line as a last resort.
1180 # * Repeat the last line as a last resort.
1181
1181
1182 # First, replace as much as possible without repeating the last line.
1182 # First, replace as much as possible without repeating the last line.
1183 remaining = [(parent, []) for parent, _blocks in pblocks]
1183 remaining = [(parent, []) for parent, _blocks in pblocks]
1184 for idx, (parent, blocks) in enumerate(pblocks):
1184 for idx, (parent, blocks) in enumerate(pblocks):
1185 for (a1, a2, b1, b2), _t in blocks:
1185 for (a1, a2, b1, b2), _t in blocks:
1186 if a2 - a1 >= b2 - b1:
1186 if a2 - a1 >= b2 - b1:
1187 for bk in xrange(b1, b2):
1187 for bk in xrange(b1, b2):
1188 if child[0][bk].fctx == childfctx:
1188 if child[0][bk].fctx == childfctx:
1189 ak = min(a1 + (bk - b1), a2 - 1)
1189 ak = min(a1 + (bk - b1), a2 - 1)
1190 child[0][bk] = attr.evolve(parent[0][ak], skip=True)
1190 child[0][bk] = attr.evolve(parent[0][ak], skip=True)
1191 else:
1191 else:
1192 remaining[idx][1].append((a1, a2, b1, b2))
1192 remaining[idx][1].append((a1, a2, b1, b2))
1193
1193
1194 # Then, look at anything left, which might involve repeating the last
1194 # Then, look at anything left, which might involve repeating the last
1195 # line.
1195 # line.
1196 for parent, blocks in remaining:
1196 for parent, blocks in remaining:
1197 for a1, a2, b1, b2 in blocks:
1197 for a1, a2, b1, b2 in blocks:
1198 for bk in xrange(b1, b2):
1198 for bk in xrange(b1, b2):
1199 if child[0][bk].fctx == childfctx:
1199 if child[0][bk].fctx == childfctx:
1200 ak = min(a1 + (bk - b1), a2 - 1)
1200 ak = min(a1 + (bk - b1), a2 - 1)
1201 child[0][bk] = attr.evolve(parent[0][ak], skip=True)
1201 child[0][bk] = attr.evolve(parent[0][ak], skip=True)
1202 return child
1202 return child
1203
1203
1204 class filectx(basefilectx):
1204 class filectx(basefilectx):
1205 """A filecontext object makes access to data related to a particular
1205 """A filecontext object makes access to data related to a particular
1206 filerevision convenient."""
1206 filerevision convenient."""
1207 def __init__(self, repo, path, changeid=None, fileid=None,
1207 def __init__(self, repo, path, changeid=None, fileid=None,
1208 filelog=None, changectx=None):
1208 filelog=None, changectx=None):
1209 """changeid can be a changeset revision, node, or tag.
1209 """changeid can be a changeset revision, node, or tag.
1210 fileid can be a file revision or node."""
1210 fileid can be a file revision or node."""
1211 self._repo = repo
1211 self._repo = repo
1212 self._path = path
1212 self._path = path
1213
1213
1214 assert (changeid is not None
1214 assert (changeid is not None
1215 or fileid is not None
1215 or fileid is not None
1216 or changectx is not None), \
1216 or changectx is not None), \
1217 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1217 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1218 % (changeid, fileid, changectx))
1218 % (changeid, fileid, changectx))
1219
1219
1220 if filelog is not None:
1220 if filelog is not None:
1221 self._filelog = filelog
1221 self._filelog = filelog
1222
1222
1223 if changeid is not None:
1223 if changeid is not None:
1224 self._changeid = changeid
1224 self._changeid = changeid
1225 if changectx is not None:
1225 if changectx is not None:
1226 self._changectx = changectx
1226 self._changectx = changectx
1227 if fileid is not None:
1227 if fileid is not None:
1228 self._fileid = fileid
1228 self._fileid = fileid
1229
1229
1230 @propertycache
1230 @propertycache
1231 def _changectx(self):
1231 def _changectx(self):
1232 try:
1232 try:
1233 return changectx(self._repo, self._changeid)
1233 return changectx(self._repo, self._changeid)
1234 except error.FilteredRepoLookupError:
1234 except error.FilteredRepoLookupError:
1235 # Linkrev may point to any revision in the repository. When the
1235 # Linkrev may point to any revision in the repository. When the
1236 # repository is filtered this may lead to `filectx` trying to build
1236 # repository is filtered this may lead to `filectx` trying to build
1237 # `changectx` for filtered revision. In such case we fallback to
1237 # `changectx` for filtered revision. In such case we fallback to
1238 # creating `changectx` on the unfiltered version of the reposition.
1238 # creating `changectx` on the unfiltered version of the reposition.
1239 # This fallback should not be an issue because `changectx` from
1239 # This fallback should not be an issue because `changectx` from
1240 # `filectx` are not used in complex operations that care about
1240 # `filectx` are not used in complex operations that care about
1241 # filtering.
1241 # filtering.
1242 #
1242 #
1243 # This fallback is a cheap and dirty fix that prevent several
1243 # This fallback is a cheap and dirty fix that prevent several
1244 # crashes. It does not ensure the behavior is correct. However the
1244 # crashes. It does not ensure the behavior is correct. However the
1245 # behavior was not correct before filtering either and "incorrect
1245 # behavior was not correct before filtering either and "incorrect
1246 # behavior" is seen as better as "crash"
1246 # behavior" is seen as better as "crash"
1247 #
1247 #
1248 # Linkrevs have several serious troubles with filtering that are
1248 # Linkrevs have several serious troubles with filtering that are
1249 # complicated to solve. Proper handling of the issue here should be
1249 # complicated to solve. Proper handling of the issue here should be
1250 # considered when solving linkrev issue are on the table.
1250 # considered when solving linkrev issue are on the table.
1251 return changectx(self._repo.unfiltered(), self._changeid)
1251 return changectx(self._repo.unfiltered(), self._changeid)
1252
1252
1253 def filectx(self, fileid, changeid=None):
1253 def filectx(self, fileid, changeid=None):
1254 '''opens an arbitrary revision of the file without
1254 '''opens an arbitrary revision of the file without
1255 opening a new filelog'''
1255 opening a new filelog'''
1256 return filectx(self._repo, self._path, fileid=fileid,
1256 return filectx(self._repo, self._path, fileid=fileid,
1257 filelog=self._filelog, changeid=changeid)
1257 filelog=self._filelog, changeid=changeid)
1258
1258
1259 def rawdata(self):
1259 def rawdata(self):
1260 return self._filelog.revision(self._filenode, raw=True)
1260 return self._filelog.revision(self._filenode, raw=True)
1261
1261
1262 def rawflags(self):
1262 def rawflags(self):
1263 """low-level revlog flags"""
1263 """low-level revlog flags"""
1264 return self._filelog.flags(self._filerev)
1264 return self._filelog.flags(self._filerev)
1265
1265
1266 def data(self):
1266 def data(self):
1267 try:
1267 try:
1268 return self._filelog.read(self._filenode)
1268 return self._filelog.read(self._filenode)
1269 except error.CensoredNodeError:
1269 except error.CensoredNodeError:
1270 if self._repo.ui.config("censor", "policy") == "ignore":
1270 if self._repo.ui.config("censor", "policy") == "ignore":
1271 return ""
1271 return ""
1272 raise error.Abort(_("censored node: %s") % short(self._filenode),
1272 raise error.Abort(_("censored node: %s") % short(self._filenode),
1273 hint=_("set censor.policy to ignore errors"))
1273 hint=_("set censor.policy to ignore errors"))
1274
1274
1275 def size(self):
1275 def size(self):
1276 return self._filelog.size(self._filerev)
1276 return self._filelog.size(self._filerev)
1277
1277
1278 @propertycache
1278 @propertycache
1279 def _copied(self):
1279 def _copied(self):
1280 """check if file was actually renamed in this changeset revision
1280 """check if file was actually renamed in this changeset revision
1281
1281
1282 If rename logged in file revision, we report copy for changeset only
1282 If rename logged in file revision, we report copy for changeset only
1283 if file revisions linkrev points back to the changeset in question
1283 if file revisions linkrev points back to the changeset in question
1284 or both changeset parents contain different file revisions.
1284 or both changeset parents contain different file revisions.
1285 """
1285 """
1286
1286
1287 renamed = self._filelog.renamed(self._filenode)
1287 renamed = self._filelog.renamed(self._filenode)
1288 if not renamed:
1288 if not renamed:
1289 return renamed
1289 return renamed
1290
1290
1291 if self.rev() == self.linkrev():
1291 if self.rev() == self.linkrev():
1292 return renamed
1292 return renamed
1293
1293
1294 name = self.path()
1294 name = self.path()
1295 fnode = self._filenode
1295 fnode = self._filenode
1296 for p in self._changectx.parents():
1296 for p in self._changectx.parents():
1297 try:
1297 try:
1298 if fnode == p.filenode(name):
1298 if fnode == p.filenode(name):
1299 return None
1299 return None
1300 except error.LookupError:
1300 except error.LookupError:
1301 pass
1301 pass
1302 return renamed
1302 return renamed
1303
1303
1304 def children(self):
1304 def children(self):
1305 # hard for renames
1305 # hard for renames
1306 c = self._filelog.children(self._filenode)
1306 c = self._filelog.children(self._filenode)
1307 return [filectx(self._repo, self._path, fileid=x,
1307 return [filectx(self._repo, self._path, fileid=x,
1308 filelog=self._filelog) for x in c]
1308 filelog=self._filelog) for x in c]
1309
1309
1310 class committablectx(basectx):
1310 class committablectx(basectx):
1311 """A committablectx object provides common functionality for a context that
1311 """A committablectx object provides common functionality for a context that
1312 wants the ability to commit, e.g. workingctx or memctx."""
1312 wants the ability to commit, e.g. workingctx or memctx."""
1313 def __init__(self, repo, text="", user=None, date=None, extra=None,
1313 def __init__(self, repo, text="", user=None, date=None, extra=None,
1314 changes=None):
1314 changes=None):
1315 self._repo = repo
1315 self._repo = repo
1316 self._rev = None
1316 self._rev = None
1317 self._node = None
1317 self._node = None
1318 self._text = text
1318 self._text = text
1319 if date:
1319 if date:
1320 self._date = util.parsedate(date)
1320 self._date = util.parsedate(date)
1321 if user:
1321 if user:
1322 self._user = user
1322 self._user = user
1323 if changes:
1323 if changes:
1324 self._status = changes
1324 self._status = changes
1325
1325
1326 self._extra = {}
1326 self._extra = {}
1327 if extra:
1327 if extra:
1328 self._extra = extra.copy()
1328 self._extra = extra.copy()
1329 if 'branch' not in self._extra:
1329 if 'branch' not in self._extra:
1330 try:
1330 try:
1331 branch = encoding.fromlocal(self._repo.dirstate.branch())
1331 branch = encoding.fromlocal(self._repo.dirstate.branch())
1332 except UnicodeDecodeError:
1332 except UnicodeDecodeError:
1333 raise error.Abort(_('branch name not in UTF-8!'))
1333 raise error.Abort(_('branch name not in UTF-8!'))
1334 self._extra['branch'] = branch
1334 self._extra['branch'] = branch
1335 if self._extra['branch'] == '':
1335 if self._extra['branch'] == '':
1336 self._extra['branch'] = 'default'
1336 self._extra['branch'] = 'default'
1337
1337
1338 def __bytes__(self):
1338 def __bytes__(self):
1339 return bytes(self._parents[0]) + "+"
1339 return bytes(self._parents[0]) + "+"
1340
1340
1341 __str__ = encoding.strmethod(__bytes__)
1341 __str__ = encoding.strmethod(__bytes__)
1342
1342
1343 def __nonzero__(self):
1343 def __nonzero__(self):
1344 return True
1344 return True
1345
1345
1346 __bool__ = __nonzero__
1346 __bool__ = __nonzero__
1347
1347
1348 def _buildflagfunc(self):
1348 def _buildflagfunc(self):
1349 # Create a fallback function for getting file flags when the
1349 # Create a fallback function for getting file flags when the
1350 # filesystem doesn't support them
1350 # filesystem doesn't support them
1351
1351
1352 copiesget = self._repo.dirstate.copies().get
1352 copiesget = self._repo.dirstate.copies().get
1353 parents = self.parents()
1353 parents = self.parents()
1354 if len(parents) < 2:
1354 if len(parents) < 2:
1355 # when we have one parent, it's easy: copy from parent
1355 # when we have one parent, it's easy: copy from parent
1356 man = parents[0].manifest()
1356 man = parents[0].manifest()
1357 def func(f):
1357 def func(f):
1358 f = copiesget(f, f)
1358 f = copiesget(f, f)
1359 return man.flags(f)
1359 return man.flags(f)
1360 else:
1360 else:
1361 # merges are tricky: we try to reconstruct the unstored
1361 # merges are tricky: we try to reconstruct the unstored
1362 # result from the merge (issue1802)
1362 # result from the merge (issue1802)
1363 p1, p2 = parents
1363 p1, p2 = parents
1364 pa = p1.ancestor(p2)
1364 pa = p1.ancestor(p2)
1365 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1365 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1366
1366
1367 def func(f):
1367 def func(f):
1368 f = copiesget(f, f) # may be wrong for merges with copies
1368 f = copiesget(f, f) # may be wrong for merges with copies
1369 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1369 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1370 if fl1 == fl2:
1370 if fl1 == fl2:
1371 return fl1
1371 return fl1
1372 if fl1 == fla:
1372 if fl1 == fla:
1373 return fl2
1373 return fl2
1374 if fl2 == fla:
1374 if fl2 == fla:
1375 return fl1
1375 return fl1
1376 return '' # punt for conflicts
1376 return '' # punt for conflicts
1377
1377
1378 return func
1378 return func
1379
1379
1380 @propertycache
1380 @propertycache
1381 def _flagfunc(self):
1381 def _flagfunc(self):
1382 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1382 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1383
1383
1384 @propertycache
1384 @propertycache
1385 def _status(self):
1385 def _status(self):
1386 return self._repo.status()
1386 return self._repo.status()
1387
1387
1388 @propertycache
1388 @propertycache
1389 def _user(self):
1389 def _user(self):
1390 return self._repo.ui.username()
1390 return self._repo.ui.username()
1391
1391
1392 @propertycache
1392 @propertycache
1393 def _date(self):
1393 def _date(self):
1394 ui = self._repo.ui
1394 ui = self._repo.ui
1395 date = ui.configdate('devel', 'default-date')
1395 date = ui.configdate('devel', 'default-date')
1396 if date is None:
1396 if date is None:
1397 date = util.makedate()
1397 date = util.makedate()
1398 return date
1398 return date
1399
1399
1400 def subrev(self, subpath):
1400 def subrev(self, subpath):
1401 return None
1401 return None
1402
1402
1403 def manifestnode(self):
1403 def manifestnode(self):
1404 return None
1404 return None
1405 def user(self):
1405 def user(self):
1406 return self._user or self._repo.ui.username()
1406 return self._user or self._repo.ui.username()
1407 def date(self):
1407 def date(self):
1408 return self._date
1408 return self._date
1409 def description(self):
1409 def description(self):
1410 return self._text
1410 return self._text
1411 def files(self):
1411 def files(self):
1412 return sorted(self._status.modified + self._status.added +
1412 return sorted(self._status.modified + self._status.added +
1413 self._status.removed)
1413 self._status.removed)
1414
1414
1415 def modified(self):
1415 def modified(self):
1416 return self._status.modified
1416 return self._status.modified
1417 def added(self):
1417 def added(self):
1418 return self._status.added
1418 return self._status.added
1419 def removed(self):
1419 def removed(self):
1420 return self._status.removed
1420 return self._status.removed
1421 def deleted(self):
1421 def deleted(self):
1422 return self._status.deleted
1422 return self._status.deleted
1423 def branch(self):
1423 def branch(self):
1424 return encoding.tolocal(self._extra['branch'])
1424 return encoding.tolocal(self._extra['branch'])
1425 def closesbranch(self):
1425 def closesbranch(self):
1426 return 'close' in self._extra
1426 return 'close' in self._extra
1427 def extra(self):
1427 def extra(self):
1428 return self._extra
1428 return self._extra
1429
1429
1430 def isinmemory(self):
1430 def isinmemory(self):
1431 return False
1431 return False
1432
1432
1433 def tags(self):
1433 def tags(self):
1434 return []
1434 return []
1435
1435
1436 def bookmarks(self):
1436 def bookmarks(self):
1437 b = []
1437 b = []
1438 for p in self.parents():
1438 for p in self.parents():
1439 b.extend(p.bookmarks())
1439 b.extend(p.bookmarks())
1440 return b
1440 return b
1441
1441
1442 def phase(self):
1442 def phase(self):
1443 phase = phases.draft # default phase to draft
1443 phase = phases.draft # default phase to draft
1444 for p in self.parents():
1444 for p in self.parents():
1445 phase = max(phase, p.phase())
1445 phase = max(phase, p.phase())
1446 return phase
1446 return phase
1447
1447
1448 def hidden(self):
1448 def hidden(self):
1449 return False
1449 return False
1450
1450
1451 def children(self):
1451 def children(self):
1452 return []
1452 return []
1453
1453
1454 def flags(self, path):
1454 def flags(self, path):
1455 if r'_manifest' in self.__dict__:
1455 if r'_manifest' in self.__dict__:
1456 try:
1456 try:
1457 return self._manifest.flags(path)
1457 return self._manifest.flags(path)
1458 except KeyError:
1458 except KeyError:
1459 return ''
1459 return ''
1460
1460
1461 try:
1461 try:
1462 return self._flagfunc(path)
1462 return self._flagfunc(path)
1463 except OSError:
1463 except OSError:
1464 return ''
1464 return ''
1465
1465
1466 def ancestor(self, c2):
1466 def ancestor(self, c2):
1467 """return the "best" ancestor context of self and c2"""
1467 """return the "best" ancestor context of self and c2"""
1468 return self._parents[0].ancestor(c2) # punt on two parents for now
1468 return self._parents[0].ancestor(c2) # punt on two parents for now
1469
1469
1470 def walk(self, match):
1470 def walk(self, match):
1471 '''Generates matching file names.'''
1471 '''Generates matching file names.'''
1472 return sorted(self._repo.dirstate.walk(match,
1472 return sorted(self._repo.dirstate.walk(match,
1473 subrepos=sorted(self.substate),
1473 subrepos=sorted(self.substate),
1474 unknown=True, ignored=False))
1474 unknown=True, ignored=False))
1475
1475
1476 def matches(self, match):
1476 def matches(self, match):
1477 return sorted(self._repo.dirstate.matches(match))
1477 return sorted(self._repo.dirstate.matches(match))
1478
1478
1479 def ancestors(self):
1479 def ancestors(self):
1480 for p in self._parents:
1480 for p in self._parents:
1481 yield p
1481 yield p
1482 for a in self._repo.changelog.ancestors(
1482 for a in self._repo.changelog.ancestors(
1483 [p.rev() for p in self._parents]):
1483 [p.rev() for p in self._parents]):
1484 yield changectx(self._repo, a)
1484 yield changectx(self._repo, a)
1485
1485
1486 def markcommitted(self, node):
1486 def markcommitted(self, node):
1487 """Perform post-commit cleanup necessary after committing this ctx
1487 """Perform post-commit cleanup necessary after committing this ctx
1488
1488
1489 Specifically, this updates backing stores this working context
1489 Specifically, this updates backing stores this working context
1490 wraps to reflect the fact that the changes reflected by this
1490 wraps to reflect the fact that the changes reflected by this
1491 workingctx have been committed. For example, it marks
1491 workingctx have been committed. For example, it marks
1492 modified and added files as normal in the dirstate.
1492 modified and added files as normal in the dirstate.
1493
1493
1494 """
1494 """
1495
1495
1496 with self._repo.dirstate.parentchange():
1496 with self._repo.dirstate.parentchange():
1497 for f in self.modified() + self.added():
1497 for f in self.modified() + self.added():
1498 self._repo.dirstate.normal(f)
1498 self._repo.dirstate.normal(f)
1499 for f in self.removed():
1499 for f in self.removed():
1500 self._repo.dirstate.drop(f)
1500 self._repo.dirstate.drop(f)
1501 self._repo.dirstate.setparents(node)
1501 self._repo.dirstate.setparents(node)
1502
1502
1503 # write changes out explicitly, because nesting wlock at
1503 # write changes out explicitly, because nesting wlock at
1504 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1504 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1505 # from immediately doing so for subsequent changing files
1505 # from immediately doing so for subsequent changing files
1506 self._repo.dirstate.write(self._repo.currenttransaction())
1506 self._repo.dirstate.write(self._repo.currenttransaction())
1507
1507
1508 def dirty(self, missing=False, merge=True, branch=True):
1508 def dirty(self, missing=False, merge=True, branch=True):
1509 return False
1509 return False
1510
1510
1511 class workingctx(committablectx):
1511 class workingctx(committablectx):
1512 """A workingctx object makes access to data related to
1512 """A workingctx object makes access to data related to
1513 the current working directory convenient.
1513 the current working directory convenient.
1514 date - any valid date string or (unixtime, offset), or None.
1514 date - any valid date string or (unixtime, offset), or None.
1515 user - username string, or None.
1515 user - username string, or None.
1516 extra - a dictionary of extra values, or None.
1516 extra - a dictionary of extra values, or None.
1517 changes - a list of file lists as returned by localrepo.status()
1517 changes - a list of file lists as returned by localrepo.status()
1518 or None to use the repository status.
1518 or None to use the repository status.
1519 """
1519 """
1520 def __init__(self, repo, text="", user=None, date=None, extra=None,
1520 def __init__(self, repo, text="", user=None, date=None, extra=None,
1521 changes=None):
1521 changes=None):
1522 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1522 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1523
1523
1524 def __iter__(self):
1524 def __iter__(self):
1525 d = self._repo.dirstate
1525 d = self._repo.dirstate
1526 for f in d:
1526 for f in d:
1527 if d[f] != 'r':
1527 if d[f] != 'r':
1528 yield f
1528 yield f
1529
1529
1530 def __contains__(self, key):
1530 def __contains__(self, key):
1531 return self._repo.dirstate[key] not in "?r"
1531 return self._repo.dirstate[key] not in "?r"
1532
1532
1533 def hex(self):
1533 def hex(self):
1534 return hex(wdirid)
1534 return hex(wdirid)
1535
1535
1536 @propertycache
1536 @propertycache
1537 def _parents(self):
1537 def _parents(self):
1538 p = self._repo.dirstate.parents()
1538 p = self._repo.dirstate.parents()
1539 if p[1] == nullid:
1539 if p[1] == nullid:
1540 p = p[:-1]
1540 p = p[:-1]
1541 return [changectx(self._repo, x) for x in p]
1541 return [changectx(self._repo, x) for x in p]
1542
1542
1543 def filectx(self, path, filelog=None):
1543 def filectx(self, path, filelog=None):
1544 """get a file context from the working directory"""
1544 """get a file context from the working directory"""
1545 return workingfilectx(self._repo, path, workingctx=self,
1545 return workingfilectx(self._repo, path, workingctx=self,
1546 filelog=filelog)
1546 filelog=filelog)
1547
1547
1548 def dirty(self, missing=False, merge=True, branch=True):
1548 def dirty(self, missing=False, merge=True, branch=True):
1549 "check whether a working directory is modified"
1549 "check whether a working directory is modified"
1550 # check subrepos first
1550 # check subrepos first
1551 for s in sorted(self.substate):
1551 for s in sorted(self.substate):
1552 if self.sub(s).dirty(missing=missing):
1552 if self.sub(s).dirty(missing=missing):
1553 return True
1553 return True
1554 # check current working dir
1554 # check current working dir
1555 return ((merge and self.p2()) or
1555 return ((merge and self.p2()) or
1556 (branch and self.branch() != self.p1().branch()) or
1556 (branch and self.branch() != self.p1().branch()) or
1557 self.modified() or self.added() or self.removed() or
1557 self.modified() or self.added() or self.removed() or
1558 (missing and self.deleted()))
1558 (missing and self.deleted()))
1559
1559
1560 def add(self, list, prefix=""):
1560 def add(self, list, prefix=""):
1561 with self._repo.wlock():
1561 with self._repo.wlock():
1562 ui, ds = self._repo.ui, self._repo.dirstate
1562 ui, ds = self._repo.ui, self._repo.dirstate
1563 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1563 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1564 rejected = []
1564 rejected = []
1565 lstat = self._repo.wvfs.lstat
1565 lstat = self._repo.wvfs.lstat
1566 for f in list:
1566 for f in list:
1567 # ds.pathto() returns an absolute file when this is invoked from
1567 # ds.pathto() returns an absolute file when this is invoked from
1568 # the keyword extension. That gets flagged as non-portable on
1568 # the keyword extension. That gets flagged as non-portable on
1569 # Windows, since it contains the drive letter and colon.
1569 # Windows, since it contains the drive letter and colon.
1570 scmutil.checkportable(ui, os.path.join(prefix, f))
1570 scmutil.checkportable(ui, os.path.join(prefix, f))
1571 try:
1571 try:
1572 st = lstat(f)
1572 st = lstat(f)
1573 except OSError:
1573 except OSError:
1574 ui.warn(_("%s does not exist!\n") % uipath(f))
1574 ui.warn(_("%s does not exist!\n") % uipath(f))
1575 rejected.append(f)
1575 rejected.append(f)
1576 continue
1576 continue
1577 if st.st_size > 10000000:
1577 if st.st_size > 10000000:
1578 ui.warn(_("%s: up to %d MB of RAM may be required "
1578 ui.warn(_("%s: up to %d MB of RAM may be required "
1579 "to manage this file\n"
1579 "to manage this file\n"
1580 "(use 'hg revert %s' to cancel the "
1580 "(use 'hg revert %s' to cancel the "
1581 "pending addition)\n")
1581 "pending addition)\n")
1582 % (f, 3 * st.st_size // 1000000, uipath(f)))
1582 % (f, 3 * st.st_size // 1000000, uipath(f)))
1583 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1583 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1584 ui.warn(_("%s not added: only files and symlinks "
1584 ui.warn(_("%s not added: only files and symlinks "
1585 "supported currently\n") % uipath(f))
1585 "supported currently\n") % uipath(f))
1586 rejected.append(f)
1586 rejected.append(f)
1587 elif ds[f] in 'amn':
1587 elif ds[f] in 'amn':
1588 ui.warn(_("%s already tracked!\n") % uipath(f))
1588 ui.warn(_("%s already tracked!\n") % uipath(f))
1589 elif ds[f] == 'r':
1589 elif ds[f] == 'r':
1590 ds.normallookup(f)
1590 ds.normallookup(f)
1591 else:
1591 else:
1592 ds.add(f)
1592 ds.add(f)
1593 return rejected
1593 return rejected
1594
1594
1595 def forget(self, files, prefix=""):
1595 def forget(self, files, prefix=""):
1596 with self._repo.wlock():
1596 with self._repo.wlock():
1597 ds = self._repo.dirstate
1597 ds = self._repo.dirstate
1598 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1598 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1599 rejected = []
1599 rejected = []
1600 for f in files:
1600 for f in files:
1601 if f not in self._repo.dirstate:
1601 if f not in self._repo.dirstate:
1602 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1602 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1603 rejected.append(f)
1603 rejected.append(f)
1604 elif self._repo.dirstate[f] != 'a':
1604 elif self._repo.dirstate[f] != 'a':
1605 self._repo.dirstate.remove(f)
1605 self._repo.dirstate.remove(f)
1606 else:
1606 else:
1607 self._repo.dirstate.drop(f)
1607 self._repo.dirstate.drop(f)
1608 return rejected
1608 return rejected
1609
1609
1610 def undelete(self, list):
1610 def undelete(self, list):
1611 pctxs = self.parents()
1611 pctxs = self.parents()
1612 with self._repo.wlock():
1612 with self._repo.wlock():
1613 ds = self._repo.dirstate
1613 ds = self._repo.dirstate
1614 for f in list:
1614 for f in list:
1615 if self._repo.dirstate[f] != 'r':
1615 if self._repo.dirstate[f] != 'r':
1616 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1616 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1617 else:
1617 else:
1618 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1618 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1619 t = fctx.data()
1619 t = fctx.data()
1620 self._repo.wwrite(f, t, fctx.flags())
1620 self._repo.wwrite(f, t, fctx.flags())
1621 self._repo.dirstate.normal(f)
1621 self._repo.dirstate.normal(f)
1622
1622
1623 def copy(self, source, dest):
1623 def copy(self, source, dest):
1624 try:
1624 try:
1625 st = self._repo.wvfs.lstat(dest)
1625 st = self._repo.wvfs.lstat(dest)
1626 except OSError as err:
1626 except OSError as err:
1627 if err.errno != errno.ENOENT:
1627 if err.errno != errno.ENOENT:
1628 raise
1628 raise
1629 self._repo.ui.warn(_("%s does not exist!\n")
1629 self._repo.ui.warn(_("%s does not exist!\n")
1630 % self._repo.dirstate.pathto(dest))
1630 % self._repo.dirstate.pathto(dest))
1631 return
1631 return
1632 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1632 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1633 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1633 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1634 "symbolic link\n")
1634 "symbolic link\n")
1635 % self._repo.dirstate.pathto(dest))
1635 % self._repo.dirstate.pathto(dest))
1636 else:
1636 else:
1637 with self._repo.wlock():
1637 with self._repo.wlock():
1638 if self._repo.dirstate[dest] in '?':
1638 if self._repo.dirstate[dest] in '?':
1639 self._repo.dirstate.add(dest)
1639 self._repo.dirstate.add(dest)
1640 elif self._repo.dirstate[dest] in 'r':
1640 elif self._repo.dirstate[dest] in 'r':
1641 self._repo.dirstate.normallookup(dest)
1641 self._repo.dirstate.normallookup(dest)
1642 self._repo.dirstate.copy(source, dest)
1642 self._repo.dirstate.copy(source, dest)
1643
1643
1644 def match(self, pats=None, include=None, exclude=None, default='glob',
1644 def match(self, pats=None, include=None, exclude=None, default='glob',
1645 listsubrepos=False, badfn=None):
1645 listsubrepos=False, badfn=None):
1646 r = self._repo
1646 r = self._repo
1647
1647
1648 # Only a case insensitive filesystem needs magic to translate user input
1648 # Only a case insensitive filesystem needs magic to translate user input
1649 # to actual case in the filesystem.
1649 # to actual case in the filesystem.
1650 icasefs = not util.fscasesensitive(r.root)
1650 icasefs = not util.fscasesensitive(r.root)
1651 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1651 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1652 default, auditor=r.auditor, ctx=self,
1652 default, auditor=r.auditor, ctx=self,
1653 listsubrepos=listsubrepos, badfn=badfn,
1653 listsubrepos=listsubrepos, badfn=badfn,
1654 icasefs=icasefs)
1654 icasefs=icasefs)
1655
1655
1656 def _filtersuspectsymlink(self, files):
1656 def _filtersuspectsymlink(self, files):
1657 if not files or self._repo.dirstate._checklink:
1657 if not files or self._repo.dirstate._checklink:
1658 return files
1658 return files
1659
1659
1660 # Symlink placeholders may get non-symlink-like contents
1660 # Symlink placeholders may get non-symlink-like contents
1661 # via user error or dereferencing by NFS or Samba servers,
1661 # via user error or dereferencing by NFS or Samba servers,
1662 # so we filter out any placeholders that don't look like a
1662 # so we filter out any placeholders that don't look like a
1663 # symlink
1663 # symlink
1664 sane = []
1664 sane = []
1665 for f in files:
1665 for f in files:
1666 if self.flags(f) == 'l':
1666 if self.flags(f) == 'l':
1667 d = self[f].data()
1667 d = self[f].data()
1668 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1668 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1669 self._repo.ui.debug('ignoring suspect symlink placeholder'
1669 self._repo.ui.debug('ignoring suspect symlink placeholder'
1670 ' "%s"\n' % f)
1670 ' "%s"\n' % f)
1671 continue
1671 continue
1672 sane.append(f)
1672 sane.append(f)
1673 return sane
1673 return sane
1674
1674
1675 def _checklookup(self, files):
1675 def _checklookup(self, files):
1676 # check for any possibly clean files
1676 # check for any possibly clean files
1677 if not files:
1677 if not files:
1678 return [], [], []
1678 return [], [], []
1679
1679
1680 modified = []
1680 modified = []
1681 deleted = []
1681 deleted = []
1682 fixup = []
1682 fixup = []
1683 pctx = self._parents[0]
1683 pctx = self._parents[0]
1684 # do a full compare of any files that might have changed
1684 # do a full compare of any files that might have changed
1685 for f in sorted(files):
1685 for f in sorted(files):
1686 try:
1686 try:
1687 # This will return True for a file that got replaced by a
1687 # This will return True for a file that got replaced by a
1688 # directory in the interim, but fixing that is pretty hard.
1688 # directory in the interim, but fixing that is pretty hard.
1689 if (f not in pctx or self.flags(f) != pctx.flags(f)
1689 if (f not in pctx or self.flags(f) != pctx.flags(f)
1690 or pctx[f].cmp(self[f])):
1690 or pctx[f].cmp(self[f])):
1691 modified.append(f)
1691 modified.append(f)
1692 else:
1692 else:
1693 fixup.append(f)
1693 fixup.append(f)
1694 except (IOError, OSError):
1694 except (IOError, OSError):
1695 # A file become inaccessible in between? Mark it as deleted,
1695 # A file become inaccessible in between? Mark it as deleted,
1696 # matching dirstate behavior (issue5584).
1696 # matching dirstate behavior (issue5584).
1697 # The dirstate has more complex behavior around whether a
1697 # The dirstate has more complex behavior around whether a
1698 # missing file matches a directory, etc, but we don't need to
1698 # missing file matches a directory, etc, but we don't need to
1699 # bother with that: if f has made it to this point, we're sure
1699 # bother with that: if f has made it to this point, we're sure
1700 # it's in the dirstate.
1700 # it's in the dirstate.
1701 deleted.append(f)
1701 deleted.append(f)
1702
1702
1703 return modified, deleted, fixup
1703 return modified, deleted, fixup
1704
1704
1705 def _poststatusfixup(self, status, fixup):
1705 def _poststatusfixup(self, status, fixup):
1706 """update dirstate for files that are actually clean"""
1706 """update dirstate for files that are actually clean"""
1707 poststatus = self._repo.postdsstatus()
1707 poststatus = self._repo.postdsstatus()
1708 if fixup or poststatus:
1708 if fixup or poststatus:
1709 try:
1709 try:
1710 oldid = self._repo.dirstate.identity()
1710 oldid = self._repo.dirstate.identity()
1711
1711
1712 # updating the dirstate is optional
1712 # updating the dirstate is optional
1713 # so we don't wait on the lock
1713 # so we don't wait on the lock
1714 # wlock can invalidate the dirstate, so cache normal _after_
1714 # wlock can invalidate the dirstate, so cache normal _after_
1715 # taking the lock
1715 # taking the lock
1716 with self._repo.wlock(False):
1716 with self._repo.wlock(False):
1717 if self._repo.dirstate.identity() == oldid:
1717 if self._repo.dirstate.identity() == oldid:
1718 if fixup:
1718 if fixup:
1719 normal = self._repo.dirstate.normal
1719 normal = self._repo.dirstate.normal
1720 for f in fixup:
1720 for f in fixup:
1721 normal(f)
1721 normal(f)
1722 # write changes out explicitly, because nesting
1722 # write changes out explicitly, because nesting
1723 # wlock at runtime may prevent 'wlock.release()'
1723 # wlock at runtime may prevent 'wlock.release()'
1724 # after this block from doing so for subsequent
1724 # after this block from doing so for subsequent
1725 # changing files
1725 # changing files
1726 tr = self._repo.currenttransaction()
1726 tr = self._repo.currenttransaction()
1727 self._repo.dirstate.write(tr)
1727 self._repo.dirstate.write(tr)
1728
1728
1729 if poststatus:
1729 if poststatus:
1730 for ps in poststatus:
1730 for ps in poststatus:
1731 ps(self, status)
1731 ps(self, status)
1732 else:
1732 else:
1733 # in this case, writing changes out breaks
1733 # in this case, writing changes out breaks
1734 # consistency, because .hg/dirstate was
1734 # consistency, because .hg/dirstate was
1735 # already changed simultaneously after last
1735 # already changed simultaneously after last
1736 # caching (see also issue5584 for detail)
1736 # caching (see also issue5584 for detail)
1737 self._repo.ui.debug('skip updating dirstate: '
1737 self._repo.ui.debug('skip updating dirstate: '
1738 'identity mismatch\n')
1738 'identity mismatch\n')
1739 except error.LockError:
1739 except error.LockError:
1740 pass
1740 pass
1741 finally:
1741 finally:
1742 # Even if the wlock couldn't be grabbed, clear out the list.
1742 # Even if the wlock couldn't be grabbed, clear out the list.
1743 self._repo.clearpostdsstatus()
1743 self._repo.clearpostdsstatus()
1744
1744
1745 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1745 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1746 '''Gets the status from the dirstate -- internal use only.'''
1746 '''Gets the status from the dirstate -- internal use only.'''
1747 subrepos = []
1747 subrepos = []
1748 if '.hgsub' in self:
1748 if '.hgsub' in self:
1749 subrepos = sorted(self.substate)
1749 subrepos = sorted(self.substate)
1750 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1750 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1751 clean=clean, unknown=unknown)
1751 clean=clean, unknown=unknown)
1752
1752
1753 # check for any possibly clean files
1753 # check for any possibly clean files
1754 fixup = []
1754 fixup = []
1755 if cmp:
1755 if cmp:
1756 modified2, deleted2, fixup = self._checklookup(cmp)
1756 modified2, deleted2, fixup = self._checklookup(cmp)
1757 s.modified.extend(modified2)
1757 s.modified.extend(modified2)
1758 s.deleted.extend(deleted2)
1758 s.deleted.extend(deleted2)
1759
1759
1760 if fixup and clean:
1760 if fixup and clean:
1761 s.clean.extend(fixup)
1761 s.clean.extend(fixup)
1762
1762
1763 self._poststatusfixup(s, fixup)
1763 self._poststatusfixup(s, fixup)
1764
1764
1765 if match.always():
1765 if match.always():
1766 # cache for performance
1766 # cache for performance
1767 if s.unknown or s.ignored or s.clean:
1767 if s.unknown or s.ignored or s.clean:
1768 # "_status" is cached with list*=False in the normal route
1768 # "_status" is cached with list*=False in the normal route
1769 self._status = scmutil.status(s.modified, s.added, s.removed,
1769 self._status = scmutil.status(s.modified, s.added, s.removed,
1770 s.deleted, [], [], [])
1770 s.deleted, [], [], [])
1771 else:
1771 else:
1772 self._status = s
1772 self._status = s
1773
1773
1774 return s
1774 return s
1775
1775
1776 @propertycache
1776 @propertycache
1777 def _manifest(self):
1777 def _manifest(self):
1778 """generate a manifest corresponding to the values in self._status
1778 """generate a manifest corresponding to the values in self._status
1779
1779
1780 This reuse the file nodeid from parent, but we use special node
1780 This reuse the file nodeid from parent, but we use special node
1781 identifiers for added and modified files. This is used by manifests
1781 identifiers for added and modified files. This is used by manifests
1782 merge to see that files are different and by update logic to avoid
1782 merge to see that files are different and by update logic to avoid
1783 deleting newly added files.
1783 deleting newly added files.
1784 """
1784 """
1785 return self._buildstatusmanifest(self._status)
1785 return self._buildstatusmanifest(self._status)
1786
1786
1787 def _buildstatusmanifest(self, status):
1787 def _buildstatusmanifest(self, status):
1788 """Builds a manifest that includes the given status results."""
1788 """Builds a manifest that includes the given status results."""
1789 parents = self.parents()
1789 parents = self.parents()
1790
1790
1791 man = parents[0].manifest().copy()
1791 man = parents[0].manifest().copy()
1792
1792
1793 ff = self._flagfunc
1793 ff = self._flagfunc
1794 for i, l in ((addednodeid, status.added),
1794 for i, l in ((addednodeid, status.added),
1795 (modifiednodeid, status.modified)):
1795 (modifiednodeid, status.modified)):
1796 for f in l:
1796 for f in l:
1797 man[f] = i
1797 man[f] = i
1798 try:
1798 try:
1799 man.setflag(f, ff(f))
1799 man.setflag(f, ff(f))
1800 except OSError:
1800 except OSError:
1801 pass
1801 pass
1802
1802
1803 for f in status.deleted + status.removed:
1803 for f in status.deleted + status.removed:
1804 if f in man:
1804 if f in man:
1805 del man[f]
1805 del man[f]
1806
1806
1807 return man
1807 return man
1808
1808
1809 def _buildstatus(self, other, s, match, listignored, listclean,
1809 def _buildstatus(self, other, s, match, listignored, listclean,
1810 listunknown):
1810 listunknown):
1811 """build a status with respect to another context
1811 """build a status with respect to another context
1812
1812
1813 This includes logic for maintaining the fast path of status when
1813 This includes logic for maintaining the fast path of status when
1814 comparing the working directory against its parent, which is to skip
1814 comparing the working directory against its parent, which is to skip
1815 building a new manifest if self (working directory) is not comparing
1815 building a new manifest if self (working directory) is not comparing
1816 against its parent (repo['.']).
1816 against its parent (repo['.']).
1817 """
1817 """
1818 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1818 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1819 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1819 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1820 # might have accidentally ended up with the entire contents of the file
1820 # might have accidentally ended up with the entire contents of the file
1821 # they are supposed to be linking to.
1821 # they are supposed to be linking to.
1822 s.modified[:] = self._filtersuspectsymlink(s.modified)
1822 s.modified[:] = self._filtersuspectsymlink(s.modified)
1823 if other != self._repo['.']:
1823 if other != self._repo['.']:
1824 s = super(workingctx, self)._buildstatus(other, s, match,
1824 s = super(workingctx, self)._buildstatus(other, s, match,
1825 listignored, listclean,
1825 listignored, listclean,
1826 listunknown)
1826 listunknown)
1827 return s
1827 return s
1828
1828
1829 def _matchstatus(self, other, match):
1829 def _matchstatus(self, other, match):
1830 """override the match method with a filter for directory patterns
1830 """override the match method with a filter for directory patterns
1831
1831
1832 We use inheritance to customize the match.bad method only in cases of
1832 We use inheritance to customize the match.bad method only in cases of
1833 workingctx since it belongs only to the working directory when
1833 workingctx since it belongs only to the working directory when
1834 comparing against the parent changeset.
1834 comparing against the parent changeset.
1835
1835
1836 If we aren't comparing against the working directory's parent, then we
1836 If we aren't comparing against the working directory's parent, then we
1837 just use the default match object sent to us.
1837 just use the default match object sent to us.
1838 """
1838 """
1839 if other != self._repo['.']:
1839 if other != self._repo['.']:
1840 def bad(f, msg):
1840 def bad(f, msg):
1841 # 'f' may be a directory pattern from 'match.files()',
1841 # 'f' may be a directory pattern from 'match.files()',
1842 # so 'f not in ctx1' is not enough
1842 # so 'f not in ctx1' is not enough
1843 if f not in other and not other.hasdir(f):
1843 if f not in other and not other.hasdir(f):
1844 self._repo.ui.warn('%s: %s\n' %
1844 self._repo.ui.warn('%s: %s\n' %
1845 (self._repo.dirstate.pathto(f), msg))
1845 (self._repo.dirstate.pathto(f), msg))
1846 match.bad = bad
1846 match.bad = bad
1847 return match
1847 return match
1848
1848
1849 def markcommitted(self, node):
1849 def markcommitted(self, node):
1850 super(workingctx, self).markcommitted(node)
1850 super(workingctx, self).markcommitted(node)
1851
1851
1852 sparse.aftercommit(self._repo, node)
1852 sparse.aftercommit(self._repo, node)
1853
1853
1854 class committablefilectx(basefilectx):
1854 class committablefilectx(basefilectx):
1855 """A committablefilectx provides common functionality for a file context
1855 """A committablefilectx provides common functionality for a file context
1856 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1856 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1857 def __init__(self, repo, path, filelog=None, ctx=None):
1857 def __init__(self, repo, path, filelog=None, ctx=None):
1858 self._repo = repo
1858 self._repo = repo
1859 self._path = path
1859 self._path = path
1860 self._changeid = None
1860 self._changeid = None
1861 self._filerev = self._filenode = None
1861 self._filerev = self._filenode = None
1862
1862
1863 if filelog is not None:
1863 if filelog is not None:
1864 self._filelog = filelog
1864 self._filelog = filelog
1865 if ctx:
1865 if ctx:
1866 self._changectx = ctx
1866 self._changectx = ctx
1867
1867
1868 def __nonzero__(self):
1868 def __nonzero__(self):
1869 return True
1869 return True
1870
1870
1871 __bool__ = __nonzero__
1871 __bool__ = __nonzero__
1872
1872
1873 def linkrev(self):
1873 def linkrev(self):
1874 # linked to self._changectx no matter if file is modified or not
1874 # linked to self._changectx no matter if file is modified or not
1875 return self.rev()
1875 return self.rev()
1876
1876
1877 def parents(self):
1877 def parents(self):
1878 '''return parent filectxs, following copies if necessary'''
1878 '''return parent filectxs, following copies if necessary'''
1879 def filenode(ctx, path):
1879 def filenode(ctx, path):
1880 return ctx._manifest.get(path, nullid)
1880 return ctx._manifest.get(path, nullid)
1881
1881
1882 path = self._path
1882 path = self._path
1883 fl = self._filelog
1883 fl = self._filelog
1884 pcl = self._changectx._parents
1884 pcl = self._changectx._parents
1885 renamed = self.renamed()
1885 renamed = self.renamed()
1886
1886
1887 if renamed:
1887 if renamed:
1888 pl = [renamed + (None,)]
1888 pl = [renamed + (None,)]
1889 else:
1889 else:
1890 pl = [(path, filenode(pcl[0], path), fl)]
1890 pl = [(path, filenode(pcl[0], path), fl)]
1891
1891
1892 for pc in pcl[1:]:
1892 for pc in pcl[1:]:
1893 pl.append((path, filenode(pc, path), fl))
1893 pl.append((path, filenode(pc, path), fl))
1894
1894
1895 return [self._parentfilectx(p, fileid=n, filelog=l)
1895 return [self._parentfilectx(p, fileid=n, filelog=l)
1896 for p, n, l in pl if n != nullid]
1896 for p, n, l in pl if n != nullid]
1897
1897
1898 def children(self):
1898 def children(self):
1899 return []
1899 return []
1900
1900
1901 class workingfilectx(committablefilectx):
1901 class workingfilectx(committablefilectx):
1902 """A workingfilectx object makes access to data related to a particular
1902 """A workingfilectx object makes access to data related to a particular
1903 file in the working directory convenient."""
1903 file in the working directory convenient."""
1904 def __init__(self, repo, path, filelog=None, workingctx=None):
1904 def __init__(self, repo, path, filelog=None, workingctx=None):
1905 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1905 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1906
1906
1907 @propertycache
1907 @propertycache
1908 def _changectx(self):
1908 def _changectx(self):
1909 return workingctx(self._repo)
1909 return workingctx(self._repo)
1910
1910
1911 def data(self):
1911 def data(self):
1912 return self._repo.wread(self._path)
1912 return self._repo.wread(self._path)
1913 def renamed(self):
1913 def renamed(self):
1914 rp = self._repo.dirstate.copied(self._path)
1914 rp = self._repo.dirstate.copied(self._path)
1915 if not rp:
1915 if not rp:
1916 return None
1916 return None
1917 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1917 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1918
1918
1919 def size(self):
1919 def size(self):
1920 return self._repo.wvfs.lstat(self._path).st_size
1920 return self._repo.wvfs.lstat(self._path).st_size
1921 def date(self):
1921 def date(self):
1922 t, tz = self._changectx.date()
1922 t, tz = self._changectx.date()
1923 try:
1923 try:
1924 return (self._repo.wvfs.lstat(self._path).st_mtime, tz)
1924 return (self._repo.wvfs.lstat(self._path).st_mtime, tz)
1925 except OSError as err:
1925 except OSError as err:
1926 if err.errno != errno.ENOENT:
1926 if err.errno != errno.ENOENT:
1927 raise
1927 raise
1928 return (t, tz)
1928 return (t, tz)
1929
1929
1930 def exists(self):
1930 def exists(self):
1931 return self._repo.wvfs.exists(self._path)
1931 return self._repo.wvfs.exists(self._path)
1932
1932
1933 def lexists(self):
1933 def lexists(self):
1934 return self._repo.wvfs.lexists(self._path)
1934 return self._repo.wvfs.lexists(self._path)
1935
1935
1936 def audit(self):
1936 def audit(self):
1937 return self._repo.wvfs.audit(self._path)
1937 return self._repo.wvfs.audit(self._path)
1938
1938
1939 def cmp(self, fctx):
1939 def cmp(self, fctx):
1940 """compare with other file context
1940 """compare with other file context
1941
1941
1942 returns True if different than fctx.
1942 returns True if different than fctx.
1943 """
1943 """
1944 # fctx should be a filectx (not a workingfilectx)
1944 # fctx should be a filectx (not a workingfilectx)
1945 # invert comparison to reuse the same code path
1945 # invert comparison to reuse the same code path
1946 return fctx.cmp(self)
1946 return fctx.cmp(self)
1947
1947
1948 def remove(self, ignoremissing=False):
1948 def remove(self, ignoremissing=False):
1949 """wraps unlink for a repo's working directory"""
1949 """wraps unlink for a repo's working directory"""
1950 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
1950 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
1951
1951
1952 def write(self, data, flags, backgroundclose=False):
1952 def write(self, data, flags, backgroundclose=False):
1953 """wraps repo.wwrite"""
1953 """wraps repo.wwrite"""
1954 self._repo.wwrite(self._path, data, flags,
1954 self._repo.wwrite(self._path, data, flags,
1955 backgroundclose=backgroundclose)
1955 backgroundclose=backgroundclose)
1956
1956
1957 def markcopied(self, src):
1957 def markcopied(self, src):
1958 """marks this file a copy of `src`"""
1958 """marks this file a copy of `src`"""
1959 if self._repo.dirstate[self._path] in "nma":
1959 if self._repo.dirstate[self._path] in "nma":
1960 self._repo.dirstate.copy(src, self._path)
1960 self._repo.dirstate.copy(src, self._path)
1961
1961
1962 def clearunknown(self):
1962 def clearunknown(self):
1963 """Removes conflicting items in the working directory so that
1963 """Removes conflicting items in the working directory so that
1964 ``write()`` can be called successfully.
1964 ``write()`` can be called successfully.
1965 """
1965 """
1966 wvfs = self._repo.wvfs
1966 wvfs = self._repo.wvfs
1967 f = self._path
1967 f = self._path
1968 wvfs.audit(f)
1968 wvfs.audit(f)
1969 if wvfs.isdir(f) and not wvfs.islink(f):
1969 if wvfs.isdir(f) and not wvfs.islink(f):
1970 wvfs.rmtree(f, forcibly=True)
1970 wvfs.rmtree(f, forcibly=True)
1971 for p in reversed(list(util.finddirs(f))):
1971 for p in reversed(list(util.finddirs(f))):
1972 if wvfs.isfileorlink(p):
1972 if wvfs.isfileorlink(p):
1973 wvfs.unlink(p)
1973 wvfs.unlink(p)
1974 break
1974 break
1975
1975
1976 def setflags(self, l, x):
1976 def setflags(self, l, x):
1977 self._repo.wvfs.setflags(self._path, l, x)
1977 self._repo.wvfs.setflags(self._path, l, x)
1978
1978
1979 class overlayworkingctx(committablectx):
1979 class overlayworkingctx(committablectx):
1980 """Wraps another mutable context with a write-back cache that can be
1980 """Wraps another mutable context with a write-back cache that can be
1981 converted into a commit context.
1981 converted into a commit context.
1982
1982
1983 self._cache[path] maps to a dict with keys: {
1983 self._cache[path] maps to a dict with keys: {
1984 'exists': bool?
1984 'exists': bool?
1985 'date': date?
1985 'date': date?
1986 'data': str?
1986 'data': str?
1987 'flags': str?
1987 'flags': str?
1988 'copied': str? (path or None)
1988 'copied': str? (path or None)
1989 }
1989 }
1990 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1990 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1991 is `False`, the file was deleted.
1991 is `False`, the file was deleted.
1992 """
1992 """
1993
1993
1994 def __init__(self, repo):
1994 def __init__(self, repo):
1995 super(overlayworkingctx, self).__init__(repo)
1995 super(overlayworkingctx, self).__init__(repo)
1996 self._repo = repo
1996 self._repo = repo
1997 self.clean()
1997 self.clean()
1998
1998
1999 def setbase(self, wrappedctx):
1999 def setbase(self, wrappedctx):
2000 self._wrappedctx = wrappedctx
2000 self._wrappedctx = wrappedctx
2001 self._parents = [wrappedctx]
2001 self._parents = [wrappedctx]
2002 # Drop old manifest cache as it is now out of date.
2002 # Drop old manifest cache as it is now out of date.
2003 # This is necessary when, e.g., rebasing several nodes with one
2003 # This is necessary when, e.g., rebasing several nodes with one
2004 # ``overlayworkingctx`` (e.g. with --collapse).
2004 # ``overlayworkingctx`` (e.g. with --collapse).
2005 util.clearcachedproperty(self, '_manifest')
2005 util.clearcachedproperty(self, '_manifest')
2006
2006
2007 def data(self, path):
2007 def data(self, path):
2008 if self.isdirty(path):
2008 if self.isdirty(path):
2009 if self._cache[path]['exists']:
2009 if self._cache[path]['exists']:
2010 if self._cache[path]['data']:
2010 if self._cache[path]['data']:
2011 return self._cache[path]['data']
2011 return self._cache[path]['data']
2012 else:
2012 else:
2013 # Must fallback here, too, because we only set flags.
2013 # Must fallback here, too, because we only set flags.
2014 return self._wrappedctx[path].data()
2014 return self._wrappedctx[path].data()
2015 else:
2015 else:
2016 raise error.ProgrammingError("No such file or directory: %s" %
2016 raise error.ProgrammingError("No such file or directory: %s" %
2017 path)
2017 path)
2018 else:
2018 else:
2019 return self._wrappedctx[path].data()
2019 return self._wrappedctx[path].data()
2020
2020
2021 @propertycache
2021 @propertycache
2022 def _manifest(self):
2022 def _manifest(self):
2023 parents = self.parents()
2023 parents = self.parents()
2024 man = parents[0].manifest().copy()
2024 man = parents[0].manifest().copy()
2025
2025
2026 flag = self._flagfunc
2026 flag = self._flagfunc
2027 for path in self.added():
2027 for path in self.added():
2028 man[path] = addednodeid
2028 man[path] = addednodeid
2029 man.setflag(path, flag(path))
2029 man.setflag(path, flag(path))
2030 for path in self.modified():
2030 for path in self.modified():
2031 man[path] = modifiednodeid
2031 man[path] = modifiednodeid
2032 man.setflag(path, flag(path))
2032 man.setflag(path, flag(path))
2033 for path in self.removed():
2033 for path in self.removed():
2034 del man[path]
2034 del man[path]
2035 return man
2035 return man
2036
2036
2037 @propertycache
2037 @propertycache
2038 def _flagfunc(self):
2038 def _flagfunc(self):
2039 def f(path):
2039 def f(path):
2040 return self._cache[path]['flags']
2040 return self._cache[path]['flags']
2041 return f
2041 return f
2042
2042
2043 def files(self):
2043 def files(self):
2044 return sorted(self.added() + self.modified() + self.removed())
2044 return sorted(self.added() + self.modified() + self.removed())
2045
2045
2046 def modified(self):
2046 def modified(self):
2047 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
2047 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
2048 self._existsinparent(f)]
2048 self._existsinparent(f)]
2049
2049
2050 def added(self):
2050 def added(self):
2051 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
2051 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
2052 not self._existsinparent(f)]
2052 not self._existsinparent(f)]
2053
2053
2054 def removed(self):
2054 def removed(self):
2055 return [f for f in self._cache.keys() if
2055 return [f for f in self._cache.keys() if
2056 not self._cache[f]['exists'] and self._existsinparent(f)]
2056 not self._cache[f]['exists'] and self._existsinparent(f)]
2057
2057
2058 def isinmemory(self):
2058 def isinmemory(self):
2059 return True
2059 return True
2060
2060
2061 def filedate(self, path):
2061 def filedate(self, path):
2062 if self.isdirty(path):
2062 if self.isdirty(path):
2063 return self._cache[path]['date']
2063 return self._cache[path]['date']
2064 else:
2064 else:
2065 return self._wrappedctx[path].date()
2065 return self._wrappedctx[path].date()
2066
2066
2067 def markcopied(self, path, origin):
2067 def markcopied(self, path, origin):
2068 if self.isdirty(path):
2068 if self.isdirty(path):
2069 self._cache[path]['copied'] = origin
2069 self._cache[path]['copied'] = origin
2070 else:
2070 else:
2071 raise error.ProgrammingError('markcopied() called on clean context')
2071 raise error.ProgrammingError('markcopied() called on clean context')
2072
2072
2073 def copydata(self, path):
2073 def copydata(self, path):
2074 if self.isdirty(path):
2074 if self.isdirty(path):
2075 return self._cache[path]['copied']
2075 return self._cache[path]['copied']
2076 else:
2076 else:
2077 raise error.ProgrammingError('copydata() called on clean context')
2077 raise error.ProgrammingError('copydata() called on clean context')
2078
2078
2079 def flags(self, path):
2079 def flags(self, path):
2080 if self.isdirty(path):
2080 if self.isdirty(path):
2081 if self._cache[path]['exists']:
2081 if self._cache[path]['exists']:
2082 return self._cache[path]['flags']
2082 return self._cache[path]['flags']
2083 else:
2083 else:
2084 raise error.ProgrammingError("No such file or directory: %s" %
2084 raise error.ProgrammingError("No such file or directory: %s" %
2085 self._path)
2085 self._path)
2086 else:
2086 else:
2087 return self._wrappedctx[path].flags()
2087 return self._wrappedctx[path].flags()
2088
2088
2089 def _existsinparent(self, path):
2089 def _existsinparent(self, path):
2090 try:
2090 try:
2091 # ``commitctx` raises a ``ManifestLookupError`` if a path does not
2091 # ``commitctx` raises a ``ManifestLookupError`` if a path does not
2092 # exist, unlike ``workingctx``, which returns a ``workingfilectx``
2092 # exist, unlike ``workingctx``, which returns a ``workingfilectx``
2093 # with an ``exists()`` function.
2093 # with an ``exists()`` function.
2094 self._wrappedctx[path]
2094 self._wrappedctx[path]
2095 return True
2095 return True
2096 except error.ManifestLookupError:
2096 except error.ManifestLookupError:
2097 return False
2097 return False
2098
2098
2099 def _auditconflicts(self, path):
2099 def _auditconflicts(self, path):
2100 """Replicates conflict checks done by wvfs.write().
2100 """Replicates conflict checks done by wvfs.write().
2101
2101
2102 Since we never write to the filesystem and never call `applyupdates` in
2102 Since we never write to the filesystem and never call `applyupdates` in
2103 IMM, we'll never check that a path is actually writable -- e.g., because
2103 IMM, we'll never check that a path is actually writable -- e.g., because
2104 it adds `a/foo`, but `a` is actually a file in the other commit.
2104 it adds `a/foo`, but `a` is actually a file in the other commit.
2105 """
2105 """
2106 def fail(path, component):
2106 def fail(path, component):
2107 # p1() is the base and we're receiving "writes" for p2()'s
2107 # p1() is the base and we're receiving "writes" for p2()'s
2108 # files.
2108 # files.
2109 if 'l' in self.p1()[component].flags():
2109 if 'l' in self.p1()[component].flags():
2110 raise error.Abort("error: %s conflicts with symlink %s "
2110 raise error.Abort("error: %s conflicts with symlink %s "
2111 "in %s." % (path, component,
2111 "in %s." % (path, component,
2112 self.p1().rev()))
2112 self.p1().rev()))
2113 else:
2113 else:
2114 raise error.Abort("error: '%s' conflicts with file '%s' in "
2114 raise error.Abort("error: '%s' conflicts with file '%s' in "
2115 "%s." % (path, component,
2115 "%s." % (path, component,
2116 self.p1().rev()))
2116 self.p1().rev()))
2117
2117
2118 # Test that each new directory to be created to write this path from p2
2118 # Test that each new directory to be created to write this path from p2
2119 # is not a file in p1.
2119 # is not a file in p1.
2120 components = path.split('/')
2120 components = path.split('/')
2121 for i in xrange(len(components)):
2121 for i in xrange(len(components)):
2122 component = "/".join(components[0:i])
2122 component = "/".join(components[0:i])
2123 if component in self.p1():
2123 if component in self.p1():
2124 fail(path, component)
2124 fail(path, component)
2125
2125
2126 # Test the other direction -- that this path from p2 isn't a directory
2126 # Test the other direction -- that this path from p2 isn't a directory
2127 # in p1 (test that p1 doesn't any paths matching `path/*`).
2127 # in p1 (test that p1 doesn't any paths matching `path/*`).
2128 match = matchmod.match('/', '', [path + '/'], default=b'relpath')
2128 match = matchmod.match('/', '', [path + '/'], default=b'relpath')
2129 matches = self.p1().manifest().matches(match)
2129 matches = self.p1().manifest().matches(match)
2130 if len(matches) > 0:
2130 if len(matches) > 0:
2131 if len(matches) == 1 and matches.keys()[0] == path:
2131 if len(matches) == 1 and matches.keys()[0] == path:
2132 return
2132 return
2133 raise error.Abort("error: file '%s' cannot be written because "
2133 raise error.Abort("error: file '%s' cannot be written because "
2134 " '%s/' is a folder in %s (containing %d "
2134 " '%s/' is a folder in %s (containing %d "
2135 "entries: %s)"
2135 "entries: %s)"
2136 % (path, path, self.p1(), len(matches),
2136 % (path, path, self.p1(), len(matches),
2137 ', '.join(matches.keys())))
2137 ', '.join(matches.keys())))
2138
2138
2139 def write(self, path, data, flags=''):
2139 def write(self, path, data, flags=''):
2140 if data is None:
2140 if data is None:
2141 raise error.ProgrammingError("data must be non-None")
2141 raise error.ProgrammingError("data must be non-None")
2142 self._auditconflicts(path)
2142 self._auditconflicts(path)
2143 self._markdirty(path, exists=True, data=data, date=util.makedate(),
2143 self._markdirty(path, exists=True, data=data, date=util.makedate(),
2144 flags=flags)
2144 flags=flags)
2145
2145
2146 def setflags(self, path, l, x):
2146 def setflags(self, path, l, x):
2147 self._markdirty(path, exists=True, date=util.makedate(),
2147 self._markdirty(path, exists=True, date=util.makedate(),
2148 flags=(l and 'l' or '') + (x and 'x' or ''))
2148 flags=(l and 'l' or '') + (x and 'x' or ''))
2149
2149
2150 def remove(self, path):
2150 def remove(self, path):
2151 self._markdirty(path, exists=False)
2151 self._markdirty(path, exists=False)
2152
2152
2153 def exists(self, path):
2153 def exists(self, path):
2154 """exists behaves like `lexists`, but needs to follow symlinks and
2154 """exists behaves like `lexists`, but needs to follow symlinks and
2155 return False if they are broken.
2155 return False if they are broken.
2156 """
2156 """
2157 if self.isdirty(path):
2157 if self.isdirty(path):
2158 # If this path exists and is a symlink, "follow" it by calling
2158 # If this path exists and is a symlink, "follow" it by calling
2159 # exists on the destination path.
2159 # exists on the destination path.
2160 if (self._cache[path]['exists'] and
2160 if (self._cache[path]['exists'] and
2161 'l' in self._cache[path]['flags']):
2161 'l' in self._cache[path]['flags']):
2162 return self.exists(self._cache[path]['data'].strip())
2162 return self.exists(self._cache[path]['data'].strip())
2163 else:
2163 else:
2164 return self._cache[path]['exists']
2164 return self._cache[path]['exists']
2165
2165
2166 return self._existsinparent(path)
2166 return self._existsinparent(path)
2167
2167
2168 def lexists(self, path):
2168 def lexists(self, path):
2169 """lexists returns True if the path exists"""
2169 """lexists returns True if the path exists"""
2170 if self.isdirty(path):
2170 if self.isdirty(path):
2171 return self._cache[path]['exists']
2171 return self._cache[path]['exists']
2172
2172
2173 return self._existsinparent(path)
2173 return self._existsinparent(path)
2174
2174
2175 def size(self, path):
2175 def size(self, path):
2176 if self.isdirty(path):
2176 if self.isdirty(path):
2177 if self._cache[path]['exists']:
2177 if self._cache[path]['exists']:
2178 return len(self._cache[path]['data'])
2178 return len(self._cache[path]['data'])
2179 else:
2179 else:
2180 raise error.ProgrammingError("No such file or directory: %s" %
2180 raise error.ProgrammingError("No such file or directory: %s" %
2181 self._path)
2181 self._path)
2182 return self._wrappedctx[path].size()
2182 return self._wrappedctx[path].size()
2183
2183
2184 def tomemctx(self, text, branch=None, extra=None, date=None, parents=None,
2184 def tomemctx(self, text, branch=None, extra=None, date=None, parents=None,
2185 user=None, editor=None):
2185 user=None, editor=None):
2186 """Converts this ``overlayworkingctx`` into a ``memctx`` ready to be
2186 """Converts this ``overlayworkingctx`` into a ``memctx`` ready to be
2187 committed.
2187 committed.
2188
2188
2189 ``text`` is the commit message.
2189 ``text`` is the commit message.
2190 ``parents`` (optional) are rev numbers.
2190 ``parents`` (optional) are rev numbers.
2191 """
2191 """
2192 # Default parents to the wrapped contexts' if not passed.
2192 # Default parents to the wrapped contexts' if not passed.
2193 if parents is None:
2193 if parents is None:
2194 parents = self._wrappedctx.parents()
2194 parents = self._wrappedctx.parents()
2195 if len(parents) == 1:
2195 if len(parents) == 1:
2196 parents = (parents[0], None)
2196 parents = (parents[0], None)
2197
2197
2198 # ``parents`` is passed as rev numbers; convert to ``commitctxs``.
2198 # ``parents`` is passed as rev numbers; convert to ``commitctxs``.
2199 if parents[1] is None:
2199 if parents[1] is None:
2200 parents = (self._repo[parents[0]], None)
2200 parents = (self._repo[parents[0]], None)
2201 else:
2201 else:
2202 parents = (self._repo[parents[0]], self._repo[parents[1]])
2202 parents = (self._repo[parents[0]], self._repo[parents[1]])
2203
2203
2204 files = self._cache.keys()
2204 files = self._cache.keys()
2205 def getfile(repo, memctx, path):
2205 def getfile(repo, memctx, path):
2206 if self._cache[path]['exists']:
2206 if self._cache[path]['exists']:
2207 return memfilectx(repo, path,
2207 return memfilectx(repo, memctx, path,
2208 self._cache[path]['data'],
2208 self._cache[path]['data'],
2209 'l' in self._cache[path]['flags'],
2209 'l' in self._cache[path]['flags'],
2210 'x' in self._cache[path]['flags'],
2210 'x' in self._cache[path]['flags'],
2211 self._cache[path]['copied'],
2211 self._cache[path]['copied'])
2212 memctx)
2213 else:
2212 else:
2214 # Returning None, but including the path in `files`, is
2213 # Returning None, but including the path in `files`, is
2215 # necessary for memctx to register a deletion.
2214 # necessary for memctx to register a deletion.
2216 return None
2215 return None
2217 return memctx(self._repo, parents, text, files, getfile, date=date,
2216 return memctx(self._repo, parents, text, files, getfile, date=date,
2218 extra=extra, user=user, branch=branch, editor=editor)
2217 extra=extra, user=user, branch=branch, editor=editor)
2219
2218
2220 def isdirty(self, path):
2219 def isdirty(self, path):
2221 return path in self._cache
2220 return path in self._cache
2222
2221
2223 def isempty(self):
2222 def isempty(self):
2224 # We need to discard any keys that are actually clean before the empty
2223 # We need to discard any keys that are actually clean before the empty
2225 # commit check.
2224 # commit check.
2226 self._compact()
2225 self._compact()
2227 return len(self._cache) == 0
2226 return len(self._cache) == 0
2228
2227
2229 def clean(self):
2228 def clean(self):
2230 self._cache = {}
2229 self._cache = {}
2231
2230
2232 def _compact(self):
2231 def _compact(self):
2233 """Removes keys from the cache that are actually clean, by comparing
2232 """Removes keys from the cache that are actually clean, by comparing
2234 them with the underlying context.
2233 them with the underlying context.
2235
2234
2236 This can occur during the merge process, e.g. by passing --tool :local
2235 This can occur during the merge process, e.g. by passing --tool :local
2237 to resolve a conflict.
2236 to resolve a conflict.
2238 """
2237 """
2239 keys = []
2238 keys = []
2240 for path in self._cache.keys():
2239 for path in self._cache.keys():
2241 cache = self._cache[path]
2240 cache = self._cache[path]
2242 try:
2241 try:
2243 underlying = self._wrappedctx[path]
2242 underlying = self._wrappedctx[path]
2244 if (underlying.data() == cache['data'] and
2243 if (underlying.data() == cache['data'] and
2245 underlying.flags() == cache['flags']):
2244 underlying.flags() == cache['flags']):
2246 keys.append(path)
2245 keys.append(path)
2247 except error.ManifestLookupError:
2246 except error.ManifestLookupError:
2248 # Path not in the underlying manifest (created).
2247 # Path not in the underlying manifest (created).
2249 continue
2248 continue
2250
2249
2251 for path in keys:
2250 for path in keys:
2252 del self._cache[path]
2251 del self._cache[path]
2253 return keys
2252 return keys
2254
2253
2255 def _markdirty(self, path, exists, data=None, date=None, flags=''):
2254 def _markdirty(self, path, exists, data=None, date=None, flags=''):
2256 self._cache[path] = {
2255 self._cache[path] = {
2257 'exists': exists,
2256 'exists': exists,
2258 'data': data,
2257 'data': data,
2259 'date': date,
2258 'date': date,
2260 'flags': flags,
2259 'flags': flags,
2261 'copied': None,
2260 'copied': None,
2262 }
2261 }
2263
2262
2264 def filectx(self, path, filelog=None):
2263 def filectx(self, path, filelog=None):
2265 return overlayworkingfilectx(self._repo, path, parent=self,
2264 return overlayworkingfilectx(self._repo, path, parent=self,
2266 filelog=filelog)
2265 filelog=filelog)
2267
2266
2268 class overlayworkingfilectx(committablefilectx):
2267 class overlayworkingfilectx(committablefilectx):
2269 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2268 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2270 cache, which can be flushed through later by calling ``flush()``."""
2269 cache, which can be flushed through later by calling ``flush()``."""
2271
2270
2272 def __init__(self, repo, path, filelog=None, parent=None):
2271 def __init__(self, repo, path, filelog=None, parent=None):
2273 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2272 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2274 parent)
2273 parent)
2275 self._repo = repo
2274 self._repo = repo
2276 self._parent = parent
2275 self._parent = parent
2277 self._path = path
2276 self._path = path
2278
2277
2279 def cmp(self, fctx):
2278 def cmp(self, fctx):
2280 return self.data() != fctx.data()
2279 return self.data() != fctx.data()
2281
2280
2282 def changectx(self):
2281 def changectx(self):
2283 return self._parent
2282 return self._parent
2284
2283
2285 def data(self):
2284 def data(self):
2286 return self._parent.data(self._path)
2285 return self._parent.data(self._path)
2287
2286
2288 def date(self):
2287 def date(self):
2289 return self._parent.filedate(self._path)
2288 return self._parent.filedate(self._path)
2290
2289
2291 def exists(self):
2290 def exists(self):
2292 return self.lexists()
2291 return self.lexists()
2293
2292
2294 def lexists(self):
2293 def lexists(self):
2295 return self._parent.exists(self._path)
2294 return self._parent.exists(self._path)
2296
2295
2297 def renamed(self):
2296 def renamed(self):
2298 path = self._parent.copydata(self._path)
2297 path = self._parent.copydata(self._path)
2299 if not path:
2298 if not path:
2300 return None
2299 return None
2301 return path, self._changectx._parents[0]._manifest.get(path, nullid)
2300 return path, self._changectx._parents[0]._manifest.get(path, nullid)
2302
2301
2303 def size(self):
2302 def size(self):
2304 return self._parent.size(self._path)
2303 return self._parent.size(self._path)
2305
2304
2306 def markcopied(self, origin):
2305 def markcopied(self, origin):
2307 self._parent.markcopied(self._path, origin)
2306 self._parent.markcopied(self._path, origin)
2308
2307
2309 def audit(self):
2308 def audit(self):
2310 pass
2309 pass
2311
2310
2312 def flags(self):
2311 def flags(self):
2313 return self._parent.flags(self._path)
2312 return self._parent.flags(self._path)
2314
2313
2315 def setflags(self, islink, isexec):
2314 def setflags(self, islink, isexec):
2316 return self._parent.setflags(self._path, islink, isexec)
2315 return self._parent.setflags(self._path, islink, isexec)
2317
2316
2318 def write(self, data, flags, backgroundclose=False):
2317 def write(self, data, flags, backgroundclose=False):
2319 return self._parent.write(self._path, data, flags)
2318 return self._parent.write(self._path, data, flags)
2320
2319
2321 def remove(self, ignoremissing=False):
2320 def remove(self, ignoremissing=False):
2322 return self._parent.remove(self._path)
2321 return self._parent.remove(self._path)
2323
2322
2324 def clearunknown(self):
2323 def clearunknown(self):
2325 pass
2324 pass
2326
2325
2327 class workingcommitctx(workingctx):
2326 class workingcommitctx(workingctx):
2328 """A workingcommitctx object makes access to data related to
2327 """A workingcommitctx object makes access to data related to
2329 the revision being committed convenient.
2328 the revision being committed convenient.
2330
2329
2331 This hides changes in the working directory, if they aren't
2330 This hides changes in the working directory, if they aren't
2332 committed in this context.
2331 committed in this context.
2333 """
2332 """
2334 def __init__(self, repo, changes,
2333 def __init__(self, repo, changes,
2335 text="", user=None, date=None, extra=None):
2334 text="", user=None, date=None, extra=None):
2336 super(workingctx, self).__init__(repo, text, user, date, extra,
2335 super(workingctx, self).__init__(repo, text, user, date, extra,
2337 changes)
2336 changes)
2338
2337
2339 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2338 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2340 """Return matched files only in ``self._status``
2339 """Return matched files only in ``self._status``
2341
2340
2342 Uncommitted files appear "clean" via this context, even if
2341 Uncommitted files appear "clean" via this context, even if
2343 they aren't actually so in the working directory.
2342 they aren't actually so in the working directory.
2344 """
2343 """
2345 if clean:
2344 if clean:
2346 clean = [f for f in self._manifest if f not in self._changedset]
2345 clean = [f for f in self._manifest if f not in self._changedset]
2347 else:
2346 else:
2348 clean = []
2347 clean = []
2349 return scmutil.status([f for f in self._status.modified if match(f)],
2348 return scmutil.status([f for f in self._status.modified if match(f)],
2350 [f for f in self._status.added if match(f)],
2349 [f for f in self._status.added if match(f)],
2351 [f for f in self._status.removed if match(f)],
2350 [f for f in self._status.removed if match(f)],
2352 [], [], [], clean)
2351 [], [], [], clean)
2353
2352
2354 @propertycache
2353 @propertycache
2355 def _changedset(self):
2354 def _changedset(self):
2356 """Return the set of files changed in this context
2355 """Return the set of files changed in this context
2357 """
2356 """
2358 changed = set(self._status.modified)
2357 changed = set(self._status.modified)
2359 changed.update(self._status.added)
2358 changed.update(self._status.added)
2360 changed.update(self._status.removed)
2359 changed.update(self._status.removed)
2361 return changed
2360 return changed
2362
2361
2363 def makecachingfilectxfn(func):
2362 def makecachingfilectxfn(func):
2364 """Create a filectxfn that caches based on the path.
2363 """Create a filectxfn that caches based on the path.
2365
2364
2366 We can't use util.cachefunc because it uses all arguments as the cache
2365 We can't use util.cachefunc because it uses all arguments as the cache
2367 key and this creates a cycle since the arguments include the repo and
2366 key and this creates a cycle since the arguments include the repo and
2368 memctx.
2367 memctx.
2369 """
2368 """
2370 cache = {}
2369 cache = {}
2371
2370
2372 def getfilectx(repo, memctx, path):
2371 def getfilectx(repo, memctx, path):
2373 if path not in cache:
2372 if path not in cache:
2374 cache[path] = func(repo, memctx, path)
2373 cache[path] = func(repo, memctx, path)
2375 return cache[path]
2374 return cache[path]
2376
2375
2377 return getfilectx
2376 return getfilectx
2378
2377
2379 def memfilefromctx(ctx):
2378 def memfilefromctx(ctx):
2380 """Given a context return a memfilectx for ctx[path]
2379 """Given a context return a memfilectx for ctx[path]
2381
2380
2382 This is a convenience method for building a memctx based on another
2381 This is a convenience method for building a memctx based on another
2383 context.
2382 context.
2384 """
2383 """
2385 def getfilectx(repo, memctx, path):
2384 def getfilectx(repo, memctx, path):
2386 fctx = ctx[path]
2385 fctx = ctx[path]
2387 # this is weird but apparently we only keep track of one parent
2386 # this is weird but apparently we only keep track of one parent
2388 # (why not only store that instead of a tuple?)
2387 # (why not only store that instead of a tuple?)
2389 copied = fctx.renamed()
2388 copied = fctx.renamed()
2390 if copied:
2389 if copied:
2391 copied = copied[0]
2390 copied = copied[0]
2392 return memfilectx(repo, path, fctx.data(),
2391 return memfilectx(repo, memctx, path, fctx.data(),
2393 islink=fctx.islink(), isexec=fctx.isexec(),
2392 islink=fctx.islink(), isexec=fctx.isexec(),
2394 copied=copied, memctx=memctx)
2393 copied=copied)
2395
2394
2396 return getfilectx
2395 return getfilectx
2397
2396
2398 def memfilefrompatch(patchstore):
2397 def memfilefrompatch(patchstore):
2399 """Given a patch (e.g. patchstore object) return a memfilectx
2398 """Given a patch (e.g. patchstore object) return a memfilectx
2400
2399
2401 This is a convenience method for building a memctx based on a patchstore.
2400 This is a convenience method for building a memctx based on a patchstore.
2402 """
2401 """
2403 def getfilectx(repo, memctx, path):
2402 def getfilectx(repo, memctx, path):
2404 data, mode, copied = patchstore.getfile(path)
2403 data, mode, copied = patchstore.getfile(path)
2405 if data is None:
2404 if data is None:
2406 return None
2405 return None
2407 islink, isexec = mode
2406 islink, isexec = mode
2408 return memfilectx(repo, path, data, islink=islink,
2407 return memfilectx(repo, memctx, path, data, islink=islink,
2409 isexec=isexec, copied=copied,
2408 isexec=isexec, copied=copied)
2410 memctx=memctx)
2411
2409
2412 return getfilectx
2410 return getfilectx
2413
2411
2414 class memctx(committablectx):
2412 class memctx(committablectx):
2415 """Use memctx to perform in-memory commits via localrepo.commitctx().
2413 """Use memctx to perform in-memory commits via localrepo.commitctx().
2416
2414
2417 Revision information is supplied at initialization time while
2415 Revision information is supplied at initialization time while
2418 related files data and is made available through a callback
2416 related files data and is made available through a callback
2419 mechanism. 'repo' is the current localrepo, 'parents' is a
2417 mechanism. 'repo' is the current localrepo, 'parents' is a
2420 sequence of two parent revisions identifiers (pass None for every
2418 sequence of two parent revisions identifiers (pass None for every
2421 missing parent), 'text' is the commit message and 'files' lists
2419 missing parent), 'text' is the commit message and 'files' lists
2422 names of files touched by the revision (normalized and relative to
2420 names of files touched by the revision (normalized and relative to
2423 repository root).
2421 repository root).
2424
2422
2425 filectxfn(repo, memctx, path) is a callable receiving the
2423 filectxfn(repo, memctx, path) is a callable receiving the
2426 repository, the current memctx object and the normalized path of
2424 repository, the current memctx object and the normalized path of
2427 requested file, relative to repository root. It is fired by the
2425 requested file, relative to repository root. It is fired by the
2428 commit function for every file in 'files', but calls order is
2426 commit function for every file in 'files', but calls order is
2429 undefined. If the file is available in the revision being
2427 undefined. If the file is available in the revision being
2430 committed (updated or added), filectxfn returns a memfilectx
2428 committed (updated or added), filectxfn returns a memfilectx
2431 object. If the file was removed, filectxfn return None for recent
2429 object. If the file was removed, filectxfn return None for recent
2432 Mercurial. Moved files are represented by marking the source file
2430 Mercurial. Moved files are represented by marking the source file
2433 removed and the new file added with copy information (see
2431 removed and the new file added with copy information (see
2434 memfilectx).
2432 memfilectx).
2435
2433
2436 user receives the committer name and defaults to current
2434 user receives the committer name and defaults to current
2437 repository username, date is the commit date in any format
2435 repository username, date is the commit date in any format
2438 supported by util.parsedate() and defaults to current date, extra
2436 supported by util.parsedate() and defaults to current date, extra
2439 is a dictionary of metadata or is left empty.
2437 is a dictionary of metadata or is left empty.
2440 """
2438 """
2441
2439
2442 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2440 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2443 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2441 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2444 # this field to determine what to do in filectxfn.
2442 # this field to determine what to do in filectxfn.
2445 _returnnoneformissingfiles = True
2443 _returnnoneformissingfiles = True
2446
2444
2447 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2445 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2448 date=None, extra=None, branch=None, editor=False):
2446 date=None, extra=None, branch=None, editor=False):
2449 super(memctx, self).__init__(repo, text, user, date, extra)
2447 super(memctx, self).__init__(repo, text, user, date, extra)
2450 self._rev = None
2448 self._rev = None
2451 self._node = None
2449 self._node = None
2452 parents = [(p or nullid) for p in parents]
2450 parents = [(p or nullid) for p in parents]
2453 p1, p2 = parents
2451 p1, p2 = parents
2454 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
2452 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
2455 files = sorted(set(files))
2453 files = sorted(set(files))
2456 self._files = files
2454 self._files = files
2457 if branch is not None:
2455 if branch is not None:
2458 self._extra['branch'] = encoding.fromlocal(branch)
2456 self._extra['branch'] = encoding.fromlocal(branch)
2459 self.substate = {}
2457 self.substate = {}
2460
2458
2461 if isinstance(filectxfn, patch.filestore):
2459 if isinstance(filectxfn, patch.filestore):
2462 filectxfn = memfilefrompatch(filectxfn)
2460 filectxfn = memfilefrompatch(filectxfn)
2463 elif not callable(filectxfn):
2461 elif not callable(filectxfn):
2464 # if store is not callable, wrap it in a function
2462 # if store is not callable, wrap it in a function
2465 filectxfn = memfilefromctx(filectxfn)
2463 filectxfn = memfilefromctx(filectxfn)
2466
2464
2467 # memoizing increases performance for e.g. vcs convert scenarios.
2465 # memoizing increases performance for e.g. vcs convert scenarios.
2468 self._filectxfn = makecachingfilectxfn(filectxfn)
2466 self._filectxfn = makecachingfilectxfn(filectxfn)
2469
2467
2470 if editor:
2468 if editor:
2471 self._text = editor(self._repo, self, [])
2469 self._text = editor(self._repo, self, [])
2472 self._repo.savecommitmessage(self._text)
2470 self._repo.savecommitmessage(self._text)
2473
2471
2474 def filectx(self, path, filelog=None):
2472 def filectx(self, path, filelog=None):
2475 """get a file context from the working directory
2473 """get a file context from the working directory
2476
2474
2477 Returns None if file doesn't exist and should be removed."""
2475 Returns None if file doesn't exist and should be removed."""
2478 return self._filectxfn(self._repo, self, path)
2476 return self._filectxfn(self._repo, self, path)
2479
2477
2480 def commit(self):
2478 def commit(self):
2481 """commit context to the repo"""
2479 """commit context to the repo"""
2482 return self._repo.commitctx(self)
2480 return self._repo.commitctx(self)
2483
2481
2484 @propertycache
2482 @propertycache
2485 def _manifest(self):
2483 def _manifest(self):
2486 """generate a manifest based on the return values of filectxfn"""
2484 """generate a manifest based on the return values of filectxfn"""
2487
2485
2488 # keep this simple for now; just worry about p1
2486 # keep this simple for now; just worry about p1
2489 pctx = self._parents[0]
2487 pctx = self._parents[0]
2490 man = pctx.manifest().copy()
2488 man = pctx.manifest().copy()
2491
2489
2492 for f in self._status.modified:
2490 for f in self._status.modified:
2493 p1node = nullid
2491 p1node = nullid
2494 p2node = nullid
2492 p2node = nullid
2495 p = pctx[f].parents() # if file isn't in pctx, check p2?
2493 p = pctx[f].parents() # if file isn't in pctx, check p2?
2496 if len(p) > 0:
2494 if len(p) > 0:
2497 p1node = p[0].filenode()
2495 p1node = p[0].filenode()
2498 if len(p) > 1:
2496 if len(p) > 1:
2499 p2node = p[1].filenode()
2497 p2node = p[1].filenode()
2500 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2498 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2501
2499
2502 for f in self._status.added:
2500 for f in self._status.added:
2503 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2501 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2504
2502
2505 for f in self._status.removed:
2503 for f in self._status.removed:
2506 if f in man:
2504 if f in man:
2507 del man[f]
2505 del man[f]
2508
2506
2509 return man
2507 return man
2510
2508
2511 @propertycache
2509 @propertycache
2512 def _status(self):
2510 def _status(self):
2513 """Calculate exact status from ``files`` specified at construction
2511 """Calculate exact status from ``files`` specified at construction
2514 """
2512 """
2515 man1 = self.p1().manifest()
2513 man1 = self.p1().manifest()
2516 p2 = self._parents[1]
2514 p2 = self._parents[1]
2517 # "1 < len(self._parents)" can't be used for checking
2515 # "1 < len(self._parents)" can't be used for checking
2518 # existence of the 2nd parent, because "memctx._parents" is
2516 # existence of the 2nd parent, because "memctx._parents" is
2519 # explicitly initialized by the list, of which length is 2.
2517 # explicitly initialized by the list, of which length is 2.
2520 if p2.node() != nullid:
2518 if p2.node() != nullid:
2521 man2 = p2.manifest()
2519 man2 = p2.manifest()
2522 managing = lambda f: f in man1 or f in man2
2520 managing = lambda f: f in man1 or f in man2
2523 else:
2521 else:
2524 managing = lambda f: f in man1
2522 managing = lambda f: f in man1
2525
2523
2526 modified, added, removed = [], [], []
2524 modified, added, removed = [], [], []
2527 for f in self._files:
2525 for f in self._files:
2528 if not managing(f):
2526 if not managing(f):
2529 added.append(f)
2527 added.append(f)
2530 elif self[f]:
2528 elif self[f]:
2531 modified.append(f)
2529 modified.append(f)
2532 else:
2530 else:
2533 removed.append(f)
2531 removed.append(f)
2534
2532
2535 return scmutil.status(modified, added, removed, [], [], [], [])
2533 return scmutil.status(modified, added, removed, [], [], [], [])
2536
2534
2537 class memfilectx(committablefilectx):
2535 class memfilectx(committablefilectx):
2538 """memfilectx represents an in-memory file to commit.
2536 """memfilectx represents an in-memory file to commit.
2539
2537
2540 See memctx and committablefilectx for more details.
2538 See memctx and committablefilectx for more details.
2541 """
2539 """
2542 def __init__(self, repo, path, data, islink=False,
2540 def __init__(self, repo, changectx, path, data, islink=False,
2543 isexec=False, copied=None, memctx=None):
2541 isexec=False, copied=None):
2544 """
2542 """
2545 path is the normalized file path relative to repository root.
2543 path is the normalized file path relative to repository root.
2546 data is the file content as a string.
2544 data is the file content as a string.
2547 islink is True if the file is a symbolic link.
2545 islink is True if the file is a symbolic link.
2548 isexec is True if the file is executable.
2546 isexec is True if the file is executable.
2549 copied is the source file path if current file was copied in the
2547 copied is the source file path if current file was copied in the
2550 revision being committed, or None."""
2548 revision being committed, or None."""
2551 super(memfilectx, self).__init__(repo, path, None, memctx)
2549 super(memfilectx, self).__init__(repo, path, None, changectx)
2552 self._data = data
2550 self._data = data
2553 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
2551 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
2554 self._copied = None
2552 self._copied = None
2555 if copied:
2553 if copied:
2556 self._copied = (copied, nullid)
2554 self._copied = (copied, nullid)
2557
2555
2558 def data(self):
2556 def data(self):
2559 return self._data
2557 return self._data
2560
2558
2561 def remove(self, ignoremissing=False):
2559 def remove(self, ignoremissing=False):
2562 """wraps unlink for a repo's working directory"""
2560 """wraps unlink for a repo's working directory"""
2563 # need to figure out what to do here
2561 # need to figure out what to do here
2564 del self._changectx[self._path]
2562 del self._changectx[self._path]
2565
2563
2566 def write(self, data, flags):
2564 def write(self, data, flags):
2567 """wraps repo.wwrite"""
2565 """wraps repo.wwrite"""
2568 self._data = data
2566 self._data = data
2569
2567
2570 class overlayfilectx(committablefilectx):
2568 class overlayfilectx(committablefilectx):
2571 """Like memfilectx but take an original filectx and optional parameters to
2569 """Like memfilectx but take an original filectx and optional parameters to
2572 override parts of it. This is useful when fctx.data() is expensive (i.e.
2570 override parts of it. This is useful when fctx.data() is expensive (i.e.
2573 flag processor is expensive) and raw data, flags, and filenode could be
2571 flag processor is expensive) and raw data, flags, and filenode could be
2574 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2572 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2575 """
2573 """
2576
2574
2577 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2575 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2578 copied=None, ctx=None):
2576 copied=None, ctx=None):
2579 """originalfctx: filecontext to duplicate
2577 """originalfctx: filecontext to duplicate
2580
2578
2581 datafunc: None or a function to override data (file content). It is a
2579 datafunc: None or a function to override data (file content). It is a
2582 function to be lazy. path, flags, copied, ctx: None or overridden value
2580 function to be lazy. path, flags, copied, ctx: None or overridden value
2583
2581
2584 copied could be (path, rev), or False. copied could also be just path,
2582 copied could be (path, rev), or False. copied could also be just path,
2585 and will be converted to (path, nullid). This simplifies some callers.
2583 and will be converted to (path, nullid). This simplifies some callers.
2586 """
2584 """
2587
2585
2588 if path is None:
2586 if path is None:
2589 path = originalfctx.path()
2587 path = originalfctx.path()
2590 if ctx is None:
2588 if ctx is None:
2591 ctx = originalfctx.changectx()
2589 ctx = originalfctx.changectx()
2592 ctxmatch = lambda: True
2590 ctxmatch = lambda: True
2593 else:
2591 else:
2594 ctxmatch = lambda: ctx == originalfctx.changectx()
2592 ctxmatch = lambda: ctx == originalfctx.changectx()
2595
2593
2596 repo = originalfctx.repo()
2594 repo = originalfctx.repo()
2597 flog = originalfctx.filelog()
2595 flog = originalfctx.filelog()
2598 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2596 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2599
2597
2600 if copied is None:
2598 if copied is None:
2601 copied = originalfctx.renamed()
2599 copied = originalfctx.renamed()
2602 copiedmatch = lambda: True
2600 copiedmatch = lambda: True
2603 else:
2601 else:
2604 if copied and not isinstance(copied, tuple):
2602 if copied and not isinstance(copied, tuple):
2605 # repo._filecommit will recalculate copyrev so nullid is okay
2603 # repo._filecommit will recalculate copyrev so nullid is okay
2606 copied = (copied, nullid)
2604 copied = (copied, nullid)
2607 copiedmatch = lambda: copied == originalfctx.renamed()
2605 copiedmatch = lambda: copied == originalfctx.renamed()
2608
2606
2609 # When data, copied (could affect data), ctx (could affect filelog
2607 # When data, copied (could affect data), ctx (could affect filelog
2610 # parents) are not overridden, rawdata, rawflags, and filenode may be
2608 # parents) are not overridden, rawdata, rawflags, and filenode may be
2611 # reused (repo._filecommit should double check filelog parents).
2609 # reused (repo._filecommit should double check filelog parents).
2612 #
2610 #
2613 # path, flags are not hashed in filelog (but in manifestlog) so they do
2611 # path, flags are not hashed in filelog (but in manifestlog) so they do
2614 # not affect reusable here.
2612 # not affect reusable here.
2615 #
2613 #
2616 # If ctx or copied is overridden to a same value with originalfctx,
2614 # If ctx or copied is overridden to a same value with originalfctx,
2617 # still consider it's reusable. originalfctx.renamed() may be a bit
2615 # still consider it's reusable. originalfctx.renamed() may be a bit
2618 # expensive so it's not called unless necessary. Assuming datafunc is
2616 # expensive so it's not called unless necessary. Assuming datafunc is
2619 # always expensive, do not call it for this "reusable" test.
2617 # always expensive, do not call it for this "reusable" test.
2620 reusable = datafunc is None and ctxmatch() and copiedmatch()
2618 reusable = datafunc is None and ctxmatch() and copiedmatch()
2621
2619
2622 if datafunc is None:
2620 if datafunc is None:
2623 datafunc = originalfctx.data
2621 datafunc = originalfctx.data
2624 if flags is None:
2622 if flags is None:
2625 flags = originalfctx.flags()
2623 flags = originalfctx.flags()
2626
2624
2627 self._datafunc = datafunc
2625 self._datafunc = datafunc
2628 self._flags = flags
2626 self._flags = flags
2629 self._copied = copied
2627 self._copied = copied
2630
2628
2631 if reusable:
2629 if reusable:
2632 # copy extra fields from originalfctx
2630 # copy extra fields from originalfctx
2633 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2631 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2634 for attr_ in attrs:
2632 for attr_ in attrs:
2635 if util.safehasattr(originalfctx, attr_):
2633 if util.safehasattr(originalfctx, attr_):
2636 setattr(self, attr_, getattr(originalfctx, attr_))
2634 setattr(self, attr_, getattr(originalfctx, attr_))
2637
2635
2638 def data(self):
2636 def data(self):
2639 return self._datafunc()
2637 return self._datafunc()
2640
2638
2641 class metadataonlyctx(committablectx):
2639 class metadataonlyctx(committablectx):
2642 """Like memctx but it's reusing the manifest of different commit.
2640 """Like memctx but it's reusing the manifest of different commit.
2643 Intended to be used by lightweight operations that are creating
2641 Intended to be used by lightweight operations that are creating
2644 metadata-only changes.
2642 metadata-only changes.
2645
2643
2646 Revision information is supplied at initialization time. 'repo' is the
2644 Revision information is supplied at initialization time. 'repo' is the
2647 current localrepo, 'ctx' is original revision which manifest we're reuisng
2645 current localrepo, 'ctx' is original revision which manifest we're reuisng
2648 'parents' is a sequence of two parent revisions identifiers (pass None for
2646 'parents' is a sequence of two parent revisions identifiers (pass None for
2649 every missing parent), 'text' is the commit.
2647 every missing parent), 'text' is the commit.
2650
2648
2651 user receives the committer name and defaults to current repository
2649 user receives the committer name and defaults to current repository
2652 username, date is the commit date in any format supported by
2650 username, date is the commit date in any format supported by
2653 util.parsedate() and defaults to current date, extra is a dictionary of
2651 util.parsedate() and defaults to current date, extra is a dictionary of
2654 metadata or is left empty.
2652 metadata or is left empty.
2655 """
2653 """
2656 def __new__(cls, repo, originalctx, *args, **kwargs):
2654 def __new__(cls, repo, originalctx, *args, **kwargs):
2657 return super(metadataonlyctx, cls).__new__(cls, repo)
2655 return super(metadataonlyctx, cls).__new__(cls, repo)
2658
2656
2659 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2657 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2660 date=None, extra=None, editor=False):
2658 date=None, extra=None, editor=False):
2661 if text is None:
2659 if text is None:
2662 text = originalctx.description()
2660 text = originalctx.description()
2663 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2661 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2664 self._rev = None
2662 self._rev = None
2665 self._node = None
2663 self._node = None
2666 self._originalctx = originalctx
2664 self._originalctx = originalctx
2667 self._manifestnode = originalctx.manifestnode()
2665 self._manifestnode = originalctx.manifestnode()
2668 if parents is None:
2666 if parents is None:
2669 parents = originalctx.parents()
2667 parents = originalctx.parents()
2670 else:
2668 else:
2671 parents = [repo[p] for p in parents if p is not None]
2669 parents = [repo[p] for p in parents if p is not None]
2672 parents = parents[:]
2670 parents = parents[:]
2673 while len(parents) < 2:
2671 while len(parents) < 2:
2674 parents.append(repo[nullid])
2672 parents.append(repo[nullid])
2675 p1, p2 = self._parents = parents
2673 p1, p2 = self._parents = parents
2676
2674
2677 # sanity check to ensure that the reused manifest parents are
2675 # sanity check to ensure that the reused manifest parents are
2678 # manifests of our commit parents
2676 # manifests of our commit parents
2679 mp1, mp2 = self.manifestctx().parents
2677 mp1, mp2 = self.manifestctx().parents
2680 if p1 != nullid and p1.manifestnode() != mp1:
2678 if p1 != nullid and p1.manifestnode() != mp1:
2681 raise RuntimeError('can\'t reuse the manifest: '
2679 raise RuntimeError('can\'t reuse the manifest: '
2682 'its p1 doesn\'t match the new ctx p1')
2680 'its p1 doesn\'t match the new ctx p1')
2683 if p2 != nullid and p2.manifestnode() != mp2:
2681 if p2 != nullid and p2.manifestnode() != mp2:
2684 raise RuntimeError('can\'t reuse the manifest: '
2682 raise RuntimeError('can\'t reuse the manifest: '
2685 'its p2 doesn\'t match the new ctx p2')
2683 'its p2 doesn\'t match the new ctx p2')
2686
2684
2687 self._files = originalctx.files()
2685 self._files = originalctx.files()
2688 self.substate = {}
2686 self.substate = {}
2689
2687
2690 if editor:
2688 if editor:
2691 self._text = editor(self._repo, self, [])
2689 self._text = editor(self._repo, self, [])
2692 self._repo.savecommitmessage(self._text)
2690 self._repo.savecommitmessage(self._text)
2693
2691
2694 def manifestnode(self):
2692 def manifestnode(self):
2695 return self._manifestnode
2693 return self._manifestnode
2696
2694
2697 @property
2695 @property
2698 def _manifestctx(self):
2696 def _manifestctx(self):
2699 return self._repo.manifestlog[self._manifestnode]
2697 return self._repo.manifestlog[self._manifestnode]
2700
2698
2701 def filectx(self, path, filelog=None):
2699 def filectx(self, path, filelog=None):
2702 return self._originalctx.filectx(path, filelog=filelog)
2700 return self._originalctx.filectx(path, filelog=filelog)
2703
2701
2704 def commit(self):
2702 def commit(self):
2705 """commit context to the repo"""
2703 """commit context to the repo"""
2706 return self._repo.commitctx(self)
2704 return self._repo.commitctx(self)
2707
2705
2708 @property
2706 @property
2709 def _manifest(self):
2707 def _manifest(self):
2710 return self._originalctx.manifest()
2708 return self._originalctx.manifest()
2711
2709
2712 @propertycache
2710 @propertycache
2713 def _status(self):
2711 def _status(self):
2714 """Calculate exact status from ``files`` specified in the ``origctx``
2712 """Calculate exact status from ``files`` specified in the ``origctx``
2715 and parents manifests.
2713 and parents manifests.
2716 """
2714 """
2717 man1 = self.p1().manifest()
2715 man1 = self.p1().manifest()
2718 p2 = self._parents[1]
2716 p2 = self._parents[1]
2719 # "1 < len(self._parents)" can't be used for checking
2717 # "1 < len(self._parents)" can't be used for checking
2720 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2718 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2721 # explicitly initialized by the list, of which length is 2.
2719 # explicitly initialized by the list, of which length is 2.
2722 if p2.node() != nullid:
2720 if p2.node() != nullid:
2723 man2 = p2.manifest()
2721 man2 = p2.manifest()
2724 managing = lambda f: f in man1 or f in man2
2722 managing = lambda f: f in man1 or f in man2
2725 else:
2723 else:
2726 managing = lambda f: f in man1
2724 managing = lambda f: f in man1
2727
2725
2728 modified, added, removed = [], [], []
2726 modified, added, removed = [], [], []
2729 for f in self._files:
2727 for f in self._files:
2730 if not managing(f):
2728 if not managing(f):
2731 added.append(f)
2729 added.append(f)
2732 elif f in self:
2730 elif f in self:
2733 modified.append(f)
2731 modified.append(f)
2734 else:
2732 else:
2735 removed.append(f)
2733 removed.append(f)
2736
2734
2737 return scmutil.status(modified, added, removed, [], [], [], [])
2735 return scmutil.status(modified, added, removed, [], [], [], [])
2738
2736
2739 class arbitraryfilectx(object):
2737 class arbitraryfilectx(object):
2740 """Allows you to use filectx-like functions on a file in an arbitrary
2738 """Allows you to use filectx-like functions on a file in an arbitrary
2741 location on disk, possibly not in the working directory.
2739 location on disk, possibly not in the working directory.
2742 """
2740 """
2743 def __init__(self, path, repo=None):
2741 def __init__(self, path, repo=None):
2744 # Repo is optional because contrib/simplemerge uses this class.
2742 # Repo is optional because contrib/simplemerge uses this class.
2745 self._repo = repo
2743 self._repo = repo
2746 self._path = path
2744 self._path = path
2747
2745
2748 def cmp(self, fctx):
2746 def cmp(self, fctx):
2749 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2747 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2750 # path if either side is a symlink.
2748 # path if either side is a symlink.
2751 symlinks = ('l' in self.flags() or 'l' in fctx.flags())
2749 symlinks = ('l' in self.flags() or 'l' in fctx.flags())
2752 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2750 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2753 # Add a fast-path for merge if both sides are disk-backed.
2751 # Add a fast-path for merge if both sides are disk-backed.
2754 # Note that filecmp uses the opposite return values (True if same)
2752 # Note that filecmp uses the opposite return values (True if same)
2755 # from our cmp functions (True if different).
2753 # from our cmp functions (True if different).
2756 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2754 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2757 return self.data() != fctx.data()
2755 return self.data() != fctx.data()
2758
2756
2759 def path(self):
2757 def path(self):
2760 return self._path
2758 return self._path
2761
2759
2762 def flags(self):
2760 def flags(self):
2763 return ''
2761 return ''
2764
2762
2765 def data(self):
2763 def data(self):
2766 return util.readfile(self._path)
2764 return util.readfile(self._path)
2767
2765
2768 def decodeddata(self):
2766 def decodeddata(self):
2769 with open(self._path, "rb") as f:
2767 with open(self._path, "rb") as f:
2770 return f.read()
2768 return f.read()
2771
2769
2772 def remove(self):
2770 def remove(self):
2773 util.unlink(self._path)
2771 util.unlink(self._path)
2774
2772
2775 def write(self, data, flags):
2773 def write(self, data, flags):
2776 assert not flags
2774 assert not flags
2777 with open(self._path, "w") as f:
2775 with open(self._path, "w") as f:
2778 f.write(data)
2776 f.write(data)
@@ -1,2457 +1,2458 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import socket
17 import socket
18 import ssl
18 import ssl
19 import string
19 import string
20 import sys
20 import sys
21 import tempfile
21 import tempfile
22 import time
22 import time
23
23
24 from .i18n import _
24 from .i18n import _
25 from .node import (
25 from .node import (
26 bin,
26 bin,
27 hex,
27 hex,
28 nullhex,
28 nullhex,
29 nullid,
29 nullid,
30 nullrev,
30 nullrev,
31 short,
31 short,
32 )
32 )
33 from . import (
33 from . import (
34 bundle2,
34 bundle2,
35 changegroup,
35 changegroup,
36 cmdutil,
36 cmdutil,
37 color,
37 color,
38 context,
38 context,
39 dagparser,
39 dagparser,
40 dagutil,
40 dagutil,
41 encoding,
41 encoding,
42 error,
42 error,
43 exchange,
43 exchange,
44 extensions,
44 extensions,
45 filemerge,
45 filemerge,
46 fileset,
46 fileset,
47 formatter,
47 formatter,
48 hg,
48 hg,
49 localrepo,
49 localrepo,
50 lock as lockmod,
50 lock as lockmod,
51 merge as mergemod,
51 merge as mergemod,
52 obsolete,
52 obsolete,
53 obsutil,
53 obsutil,
54 phases,
54 phases,
55 policy,
55 policy,
56 pvec,
56 pvec,
57 pycompat,
57 pycompat,
58 registrar,
58 registrar,
59 repair,
59 repair,
60 revlog,
60 revlog,
61 revset,
61 revset,
62 revsetlang,
62 revsetlang,
63 scmutil,
63 scmutil,
64 setdiscovery,
64 setdiscovery,
65 simplemerge,
65 simplemerge,
66 smartset,
66 smartset,
67 sslutil,
67 sslutil,
68 streamclone,
68 streamclone,
69 templater,
69 templater,
70 treediscovery,
70 treediscovery,
71 upgrade,
71 upgrade,
72 util,
72 util,
73 vfs as vfsmod,
73 vfs as vfsmod,
74 )
74 )
75
75
76 release = lockmod.release
76 release = lockmod.release
77
77
78 command = registrar.command()
78 command = registrar.command()
79
79
80 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
80 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
81 def debugancestor(ui, repo, *args):
81 def debugancestor(ui, repo, *args):
82 """find the ancestor revision of two revisions in a given index"""
82 """find the ancestor revision of two revisions in a given index"""
83 if len(args) == 3:
83 if len(args) == 3:
84 index, rev1, rev2 = args
84 index, rev1, rev2 = args
85 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
85 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
86 lookup = r.lookup
86 lookup = r.lookup
87 elif len(args) == 2:
87 elif len(args) == 2:
88 if not repo:
88 if not repo:
89 raise error.Abort(_('there is no Mercurial repository here '
89 raise error.Abort(_('there is no Mercurial repository here '
90 '(.hg not found)'))
90 '(.hg not found)'))
91 rev1, rev2 = args
91 rev1, rev2 = args
92 r = repo.changelog
92 r = repo.changelog
93 lookup = repo.lookup
93 lookup = repo.lookup
94 else:
94 else:
95 raise error.Abort(_('either two or three arguments required'))
95 raise error.Abort(_('either two or three arguments required'))
96 a = r.ancestor(lookup(rev1), lookup(rev2))
96 a = r.ancestor(lookup(rev1), lookup(rev2))
97 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
97 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
98
98
99 @command('debugapplystreamclonebundle', [], 'FILE')
99 @command('debugapplystreamclonebundle', [], 'FILE')
100 def debugapplystreamclonebundle(ui, repo, fname):
100 def debugapplystreamclonebundle(ui, repo, fname):
101 """apply a stream clone bundle file"""
101 """apply a stream clone bundle file"""
102 f = hg.openpath(ui, fname)
102 f = hg.openpath(ui, fname)
103 gen = exchange.readbundle(ui, f, fname)
103 gen = exchange.readbundle(ui, f, fname)
104 gen.apply(repo)
104 gen.apply(repo)
105
105
106 @command('debugbuilddag',
106 @command('debugbuilddag',
107 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
107 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
108 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
108 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
109 ('n', 'new-file', None, _('add new file at each rev'))],
109 ('n', 'new-file', None, _('add new file at each rev'))],
110 _('[OPTION]... [TEXT]'))
110 _('[OPTION]... [TEXT]'))
111 def debugbuilddag(ui, repo, text=None,
111 def debugbuilddag(ui, repo, text=None,
112 mergeable_file=False,
112 mergeable_file=False,
113 overwritten_file=False,
113 overwritten_file=False,
114 new_file=False):
114 new_file=False):
115 """builds a repo with a given DAG from scratch in the current empty repo
115 """builds a repo with a given DAG from scratch in the current empty repo
116
116
117 The description of the DAG is read from stdin if not given on the
117 The description of the DAG is read from stdin if not given on the
118 command line.
118 command line.
119
119
120 Elements:
120 Elements:
121
121
122 - "+n" is a linear run of n nodes based on the current default parent
122 - "+n" is a linear run of n nodes based on the current default parent
123 - "." is a single node based on the current default parent
123 - "." is a single node based on the current default parent
124 - "$" resets the default parent to null (implied at the start);
124 - "$" resets the default parent to null (implied at the start);
125 otherwise the default parent is always the last node created
125 otherwise the default parent is always the last node created
126 - "<p" sets the default parent to the backref p
126 - "<p" sets the default parent to the backref p
127 - "*p" is a fork at parent p, which is a backref
127 - "*p" is a fork at parent p, which is a backref
128 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
128 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
129 - "/p2" is a merge of the preceding node and p2
129 - "/p2" is a merge of the preceding node and p2
130 - ":tag" defines a local tag for the preceding node
130 - ":tag" defines a local tag for the preceding node
131 - "@branch" sets the named branch for subsequent nodes
131 - "@branch" sets the named branch for subsequent nodes
132 - "#...\\n" is a comment up to the end of the line
132 - "#...\\n" is a comment up to the end of the line
133
133
134 Whitespace between the above elements is ignored.
134 Whitespace between the above elements is ignored.
135
135
136 A backref is either
136 A backref is either
137
137
138 - a number n, which references the node curr-n, where curr is the current
138 - a number n, which references the node curr-n, where curr is the current
139 node, or
139 node, or
140 - the name of a local tag you placed earlier using ":tag", or
140 - the name of a local tag you placed earlier using ":tag", or
141 - empty to denote the default parent.
141 - empty to denote the default parent.
142
142
143 All string valued-elements are either strictly alphanumeric, or must
143 All string valued-elements are either strictly alphanumeric, or must
144 be enclosed in double quotes ("..."), with "\\" as escape character.
144 be enclosed in double quotes ("..."), with "\\" as escape character.
145 """
145 """
146
146
147 if text is None:
147 if text is None:
148 ui.status(_("reading DAG from stdin\n"))
148 ui.status(_("reading DAG from stdin\n"))
149 text = ui.fin.read()
149 text = ui.fin.read()
150
150
151 cl = repo.changelog
151 cl = repo.changelog
152 if len(cl) > 0:
152 if len(cl) > 0:
153 raise error.Abort(_('repository is not empty'))
153 raise error.Abort(_('repository is not empty'))
154
154
155 # determine number of revs in DAG
155 # determine number of revs in DAG
156 total = 0
156 total = 0
157 for type, data in dagparser.parsedag(text):
157 for type, data in dagparser.parsedag(text):
158 if type == 'n':
158 if type == 'n':
159 total += 1
159 total += 1
160
160
161 if mergeable_file:
161 if mergeable_file:
162 linesperrev = 2
162 linesperrev = 2
163 # make a file with k lines per rev
163 # make a file with k lines per rev
164 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
164 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
165 initialmergedlines.append("")
165 initialmergedlines.append("")
166
166
167 tags = []
167 tags = []
168
168
169 wlock = lock = tr = None
169 wlock = lock = tr = None
170 try:
170 try:
171 wlock = repo.wlock()
171 wlock = repo.wlock()
172 lock = repo.lock()
172 lock = repo.lock()
173 tr = repo.transaction("builddag")
173 tr = repo.transaction("builddag")
174
174
175 at = -1
175 at = -1
176 atbranch = 'default'
176 atbranch = 'default'
177 nodeids = []
177 nodeids = []
178 id = 0
178 id = 0
179 ui.progress(_('building'), id, unit=_('revisions'), total=total)
179 ui.progress(_('building'), id, unit=_('revisions'), total=total)
180 for type, data in dagparser.parsedag(text):
180 for type, data in dagparser.parsedag(text):
181 if type == 'n':
181 if type == 'n':
182 ui.note(('node %s\n' % str(data)))
182 ui.note(('node %s\n' % str(data)))
183 id, ps = data
183 id, ps = data
184
184
185 files = []
185 files = []
186 filecontent = {}
186 filecontent = {}
187
187
188 p2 = None
188 p2 = None
189 if mergeable_file:
189 if mergeable_file:
190 fn = "mf"
190 fn = "mf"
191 p1 = repo[ps[0]]
191 p1 = repo[ps[0]]
192 if len(ps) > 1:
192 if len(ps) > 1:
193 p2 = repo[ps[1]]
193 p2 = repo[ps[1]]
194 pa = p1.ancestor(p2)
194 pa = p1.ancestor(p2)
195 base, local, other = [x[fn].data() for x in (pa, p1,
195 base, local, other = [x[fn].data() for x in (pa, p1,
196 p2)]
196 p2)]
197 m3 = simplemerge.Merge3Text(base, local, other)
197 m3 = simplemerge.Merge3Text(base, local, other)
198 ml = [l.strip() for l in m3.merge_lines()]
198 ml = [l.strip() for l in m3.merge_lines()]
199 ml.append("")
199 ml.append("")
200 elif at > 0:
200 elif at > 0:
201 ml = p1[fn].data().split("\n")
201 ml = p1[fn].data().split("\n")
202 else:
202 else:
203 ml = initialmergedlines
203 ml = initialmergedlines
204 ml[id * linesperrev] += " r%i" % id
204 ml[id * linesperrev] += " r%i" % id
205 mergedtext = "\n".join(ml)
205 mergedtext = "\n".join(ml)
206 files.append(fn)
206 files.append(fn)
207 filecontent[fn] = mergedtext
207 filecontent[fn] = mergedtext
208
208
209 if overwritten_file:
209 if overwritten_file:
210 fn = "of"
210 fn = "of"
211 files.append(fn)
211 files.append(fn)
212 filecontent[fn] = "r%i\n" % id
212 filecontent[fn] = "r%i\n" % id
213
213
214 if new_file:
214 if new_file:
215 fn = "nf%i" % id
215 fn = "nf%i" % id
216 files.append(fn)
216 files.append(fn)
217 filecontent[fn] = "r%i\n" % id
217 filecontent[fn] = "r%i\n" % id
218 if len(ps) > 1:
218 if len(ps) > 1:
219 if not p2:
219 if not p2:
220 p2 = repo[ps[1]]
220 p2 = repo[ps[1]]
221 for fn in p2:
221 for fn in p2:
222 if fn.startswith("nf"):
222 if fn.startswith("nf"):
223 files.append(fn)
223 files.append(fn)
224 filecontent[fn] = p2[fn].data()
224 filecontent[fn] = p2[fn].data()
225
225
226 def fctxfn(repo, cx, path):
226 def fctxfn(repo, cx, path):
227 if path in filecontent:
227 if path in filecontent:
228 return context.memfilectx(repo, path, filecontent[path])
228 return context.memfilectx(repo, cx, path,
229 filecontent[path])
229 return None
230 return None
230
231
231 if len(ps) == 0 or ps[0] < 0:
232 if len(ps) == 0 or ps[0] < 0:
232 pars = [None, None]
233 pars = [None, None]
233 elif len(ps) == 1:
234 elif len(ps) == 1:
234 pars = [nodeids[ps[0]], None]
235 pars = [nodeids[ps[0]], None]
235 else:
236 else:
236 pars = [nodeids[p] for p in ps]
237 pars = [nodeids[p] for p in ps]
237 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
238 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
238 date=(id, 0),
239 date=(id, 0),
239 user="debugbuilddag",
240 user="debugbuilddag",
240 extra={'branch': atbranch})
241 extra={'branch': atbranch})
241 nodeid = repo.commitctx(cx)
242 nodeid = repo.commitctx(cx)
242 nodeids.append(nodeid)
243 nodeids.append(nodeid)
243 at = id
244 at = id
244 elif type == 'l':
245 elif type == 'l':
245 id, name = data
246 id, name = data
246 ui.note(('tag %s\n' % name))
247 ui.note(('tag %s\n' % name))
247 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
248 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
248 elif type == 'a':
249 elif type == 'a':
249 ui.note(('branch %s\n' % data))
250 ui.note(('branch %s\n' % data))
250 atbranch = data
251 atbranch = data
251 ui.progress(_('building'), id, unit=_('revisions'), total=total)
252 ui.progress(_('building'), id, unit=_('revisions'), total=total)
252 tr.close()
253 tr.close()
253
254
254 if tags:
255 if tags:
255 repo.vfs.write("localtags", "".join(tags))
256 repo.vfs.write("localtags", "".join(tags))
256 finally:
257 finally:
257 ui.progress(_('building'), None)
258 ui.progress(_('building'), None)
258 release(tr, lock, wlock)
259 release(tr, lock, wlock)
259
260
260 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
261 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
261 indent_string = ' ' * indent
262 indent_string = ' ' * indent
262 if all:
263 if all:
263 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
264 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
264 % indent_string)
265 % indent_string)
265
266
266 def showchunks(named):
267 def showchunks(named):
267 ui.write("\n%s%s\n" % (indent_string, named))
268 ui.write("\n%s%s\n" % (indent_string, named))
268 for deltadata in gen.deltaiter():
269 for deltadata in gen.deltaiter():
269 node, p1, p2, cs, deltabase, delta, flags = deltadata
270 node, p1, p2, cs, deltabase, delta, flags = deltadata
270 ui.write("%s%s %s %s %s %s %s\n" %
271 ui.write("%s%s %s %s %s %s %s\n" %
271 (indent_string, hex(node), hex(p1), hex(p2),
272 (indent_string, hex(node), hex(p1), hex(p2),
272 hex(cs), hex(deltabase), len(delta)))
273 hex(cs), hex(deltabase), len(delta)))
273
274
274 chunkdata = gen.changelogheader()
275 chunkdata = gen.changelogheader()
275 showchunks("changelog")
276 showchunks("changelog")
276 chunkdata = gen.manifestheader()
277 chunkdata = gen.manifestheader()
277 showchunks("manifest")
278 showchunks("manifest")
278 for chunkdata in iter(gen.filelogheader, {}):
279 for chunkdata in iter(gen.filelogheader, {}):
279 fname = chunkdata['filename']
280 fname = chunkdata['filename']
280 showchunks(fname)
281 showchunks(fname)
281 else:
282 else:
282 if isinstance(gen, bundle2.unbundle20):
283 if isinstance(gen, bundle2.unbundle20):
283 raise error.Abort(_('use debugbundle2 for this file'))
284 raise error.Abort(_('use debugbundle2 for this file'))
284 chunkdata = gen.changelogheader()
285 chunkdata = gen.changelogheader()
285 for deltadata in gen.deltaiter():
286 for deltadata in gen.deltaiter():
286 node, p1, p2, cs, deltabase, delta, flags = deltadata
287 node, p1, p2, cs, deltabase, delta, flags = deltadata
287 ui.write("%s%s\n" % (indent_string, hex(node)))
288 ui.write("%s%s\n" % (indent_string, hex(node)))
288
289
289 def _debugobsmarkers(ui, part, indent=0, **opts):
290 def _debugobsmarkers(ui, part, indent=0, **opts):
290 """display version and markers contained in 'data'"""
291 """display version and markers contained in 'data'"""
291 opts = pycompat.byteskwargs(opts)
292 opts = pycompat.byteskwargs(opts)
292 data = part.read()
293 data = part.read()
293 indent_string = ' ' * indent
294 indent_string = ' ' * indent
294 try:
295 try:
295 version, markers = obsolete._readmarkers(data)
296 version, markers = obsolete._readmarkers(data)
296 except error.UnknownVersion as exc:
297 except error.UnknownVersion as exc:
297 msg = "%sunsupported version: %s (%d bytes)\n"
298 msg = "%sunsupported version: %s (%d bytes)\n"
298 msg %= indent_string, exc.version, len(data)
299 msg %= indent_string, exc.version, len(data)
299 ui.write(msg)
300 ui.write(msg)
300 else:
301 else:
301 msg = "%sversion: %d (%d bytes)\n"
302 msg = "%sversion: %d (%d bytes)\n"
302 msg %= indent_string, version, len(data)
303 msg %= indent_string, version, len(data)
303 ui.write(msg)
304 ui.write(msg)
304 fm = ui.formatter('debugobsolete', opts)
305 fm = ui.formatter('debugobsolete', opts)
305 for rawmarker in sorted(markers):
306 for rawmarker in sorted(markers):
306 m = obsutil.marker(None, rawmarker)
307 m = obsutil.marker(None, rawmarker)
307 fm.startitem()
308 fm.startitem()
308 fm.plain(indent_string)
309 fm.plain(indent_string)
309 cmdutil.showmarker(fm, m)
310 cmdutil.showmarker(fm, m)
310 fm.end()
311 fm.end()
311
312
312 def _debugphaseheads(ui, data, indent=0):
313 def _debugphaseheads(ui, data, indent=0):
313 """display version and markers contained in 'data'"""
314 """display version and markers contained in 'data'"""
314 indent_string = ' ' * indent
315 indent_string = ' ' * indent
315 headsbyphase = phases.binarydecode(data)
316 headsbyphase = phases.binarydecode(data)
316 for phase in phases.allphases:
317 for phase in phases.allphases:
317 for head in headsbyphase[phase]:
318 for head in headsbyphase[phase]:
318 ui.write(indent_string)
319 ui.write(indent_string)
319 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
320 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
320
321
321 def _quasirepr(thing):
322 def _quasirepr(thing):
322 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
323 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
323 return '{%s}' % (
324 return '{%s}' % (
324 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
325 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
325 return pycompat.bytestr(repr(thing))
326 return pycompat.bytestr(repr(thing))
326
327
327 def _debugbundle2(ui, gen, all=None, **opts):
328 def _debugbundle2(ui, gen, all=None, **opts):
328 """lists the contents of a bundle2"""
329 """lists the contents of a bundle2"""
329 if not isinstance(gen, bundle2.unbundle20):
330 if not isinstance(gen, bundle2.unbundle20):
330 raise error.Abort(_('not a bundle2 file'))
331 raise error.Abort(_('not a bundle2 file'))
331 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
332 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
332 parttypes = opts.get(r'part_type', [])
333 parttypes = opts.get(r'part_type', [])
333 for part in gen.iterparts():
334 for part in gen.iterparts():
334 if parttypes and part.type not in parttypes:
335 if parttypes and part.type not in parttypes:
335 continue
336 continue
336 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
337 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
337 if part.type == 'changegroup':
338 if part.type == 'changegroup':
338 version = part.params.get('version', '01')
339 version = part.params.get('version', '01')
339 cg = changegroup.getunbundler(version, part, 'UN')
340 cg = changegroup.getunbundler(version, part, 'UN')
340 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
341 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
341 if part.type == 'obsmarkers':
342 if part.type == 'obsmarkers':
342 _debugobsmarkers(ui, part, indent=4, **opts)
343 _debugobsmarkers(ui, part, indent=4, **opts)
343 if part.type == 'phase-heads':
344 if part.type == 'phase-heads':
344 _debugphaseheads(ui, part, indent=4)
345 _debugphaseheads(ui, part, indent=4)
345
346
346 @command('debugbundle',
347 @command('debugbundle',
347 [('a', 'all', None, _('show all details')),
348 [('a', 'all', None, _('show all details')),
348 ('', 'part-type', [], _('show only the named part type')),
349 ('', 'part-type', [], _('show only the named part type')),
349 ('', 'spec', None, _('print the bundlespec of the bundle'))],
350 ('', 'spec', None, _('print the bundlespec of the bundle'))],
350 _('FILE'),
351 _('FILE'),
351 norepo=True)
352 norepo=True)
352 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
353 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
353 """lists the contents of a bundle"""
354 """lists the contents of a bundle"""
354 with hg.openpath(ui, bundlepath) as f:
355 with hg.openpath(ui, bundlepath) as f:
355 if spec:
356 if spec:
356 spec = exchange.getbundlespec(ui, f)
357 spec = exchange.getbundlespec(ui, f)
357 ui.write('%s\n' % spec)
358 ui.write('%s\n' % spec)
358 return
359 return
359
360
360 gen = exchange.readbundle(ui, f, bundlepath)
361 gen = exchange.readbundle(ui, f, bundlepath)
361 if isinstance(gen, bundle2.unbundle20):
362 if isinstance(gen, bundle2.unbundle20):
362 return _debugbundle2(ui, gen, all=all, **opts)
363 return _debugbundle2(ui, gen, all=all, **opts)
363 _debugchangegroup(ui, gen, all=all, **opts)
364 _debugchangegroup(ui, gen, all=all, **opts)
364
365
365 @command('debugcapabilities',
366 @command('debugcapabilities',
366 [], _('PATH'),
367 [], _('PATH'),
367 norepo=True)
368 norepo=True)
368 def debugcapabilities(ui, path, **opts):
369 def debugcapabilities(ui, path, **opts):
369 """lists the capabilities of a remote peer"""
370 """lists the capabilities of a remote peer"""
370 peer = hg.peer(ui, opts, path)
371 peer = hg.peer(ui, opts, path)
371 caps = peer.capabilities()
372 caps = peer.capabilities()
372 ui.write(('Main capabilities:\n'))
373 ui.write(('Main capabilities:\n'))
373 for c in sorted(caps):
374 for c in sorted(caps):
374 ui.write((' %s\n') % c)
375 ui.write((' %s\n') % c)
375 b2caps = bundle2.bundle2caps(peer)
376 b2caps = bundle2.bundle2caps(peer)
376 if b2caps:
377 if b2caps:
377 ui.write(('Bundle2 capabilities:\n'))
378 ui.write(('Bundle2 capabilities:\n'))
378 for key, values in sorted(b2caps.iteritems()):
379 for key, values in sorted(b2caps.iteritems()):
379 ui.write((' %s\n') % key)
380 ui.write((' %s\n') % key)
380 for v in values:
381 for v in values:
381 ui.write((' %s\n') % v)
382 ui.write((' %s\n') % v)
382
383
383 @command('debugcheckstate', [], '')
384 @command('debugcheckstate', [], '')
384 def debugcheckstate(ui, repo):
385 def debugcheckstate(ui, repo):
385 """validate the correctness of the current dirstate"""
386 """validate the correctness of the current dirstate"""
386 parent1, parent2 = repo.dirstate.parents()
387 parent1, parent2 = repo.dirstate.parents()
387 m1 = repo[parent1].manifest()
388 m1 = repo[parent1].manifest()
388 m2 = repo[parent2].manifest()
389 m2 = repo[parent2].manifest()
389 errors = 0
390 errors = 0
390 for f in repo.dirstate:
391 for f in repo.dirstate:
391 state = repo.dirstate[f]
392 state = repo.dirstate[f]
392 if state in "nr" and f not in m1:
393 if state in "nr" and f not in m1:
393 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
394 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
394 errors += 1
395 errors += 1
395 if state in "a" and f in m1:
396 if state in "a" and f in m1:
396 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
397 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
397 errors += 1
398 errors += 1
398 if state in "m" and f not in m1 and f not in m2:
399 if state in "m" and f not in m1 and f not in m2:
399 ui.warn(_("%s in state %s, but not in either manifest\n") %
400 ui.warn(_("%s in state %s, but not in either manifest\n") %
400 (f, state))
401 (f, state))
401 errors += 1
402 errors += 1
402 for f in m1:
403 for f in m1:
403 state = repo.dirstate[f]
404 state = repo.dirstate[f]
404 if state not in "nrm":
405 if state not in "nrm":
405 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
406 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
406 errors += 1
407 errors += 1
407 if errors:
408 if errors:
408 error = _(".hg/dirstate inconsistent with current parent's manifest")
409 error = _(".hg/dirstate inconsistent with current parent's manifest")
409 raise error.Abort(error)
410 raise error.Abort(error)
410
411
411 @command('debugcolor',
412 @command('debugcolor',
412 [('', 'style', None, _('show all configured styles'))],
413 [('', 'style', None, _('show all configured styles'))],
413 'hg debugcolor')
414 'hg debugcolor')
414 def debugcolor(ui, repo, **opts):
415 def debugcolor(ui, repo, **opts):
415 """show available color, effects or style"""
416 """show available color, effects or style"""
416 ui.write(('color mode: %s\n') % ui._colormode)
417 ui.write(('color mode: %s\n') % ui._colormode)
417 if opts.get(r'style'):
418 if opts.get(r'style'):
418 return _debugdisplaystyle(ui)
419 return _debugdisplaystyle(ui)
419 else:
420 else:
420 return _debugdisplaycolor(ui)
421 return _debugdisplaycolor(ui)
421
422
422 def _debugdisplaycolor(ui):
423 def _debugdisplaycolor(ui):
423 ui = ui.copy()
424 ui = ui.copy()
424 ui._styles.clear()
425 ui._styles.clear()
425 for effect in color._activeeffects(ui).keys():
426 for effect in color._activeeffects(ui).keys():
426 ui._styles[effect] = effect
427 ui._styles[effect] = effect
427 if ui._terminfoparams:
428 if ui._terminfoparams:
428 for k, v in ui.configitems('color'):
429 for k, v in ui.configitems('color'):
429 if k.startswith('color.'):
430 if k.startswith('color.'):
430 ui._styles[k] = k[6:]
431 ui._styles[k] = k[6:]
431 elif k.startswith('terminfo.'):
432 elif k.startswith('terminfo.'):
432 ui._styles[k] = k[9:]
433 ui._styles[k] = k[9:]
433 ui.write(_('available colors:\n'))
434 ui.write(_('available colors:\n'))
434 # sort label with a '_' after the other to group '_background' entry.
435 # sort label with a '_' after the other to group '_background' entry.
435 items = sorted(ui._styles.items(),
436 items = sorted(ui._styles.items(),
436 key=lambda i: ('_' in i[0], i[0], i[1]))
437 key=lambda i: ('_' in i[0], i[0], i[1]))
437 for colorname, label in items:
438 for colorname, label in items:
438 ui.write(('%s\n') % colorname, label=label)
439 ui.write(('%s\n') % colorname, label=label)
439
440
440 def _debugdisplaystyle(ui):
441 def _debugdisplaystyle(ui):
441 ui.write(_('available style:\n'))
442 ui.write(_('available style:\n'))
442 width = max(len(s) for s in ui._styles)
443 width = max(len(s) for s in ui._styles)
443 for label, effects in sorted(ui._styles.items()):
444 for label, effects in sorted(ui._styles.items()):
444 ui.write('%s' % label, label=label)
445 ui.write('%s' % label, label=label)
445 if effects:
446 if effects:
446 # 50
447 # 50
447 ui.write(': ')
448 ui.write(': ')
448 ui.write(' ' * (max(0, width - len(label))))
449 ui.write(' ' * (max(0, width - len(label))))
449 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
450 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
450 ui.write('\n')
451 ui.write('\n')
451
452
452 @command('debugcreatestreamclonebundle', [], 'FILE')
453 @command('debugcreatestreamclonebundle', [], 'FILE')
453 def debugcreatestreamclonebundle(ui, repo, fname):
454 def debugcreatestreamclonebundle(ui, repo, fname):
454 """create a stream clone bundle file
455 """create a stream clone bundle file
455
456
456 Stream bundles are special bundles that are essentially archives of
457 Stream bundles are special bundles that are essentially archives of
457 revlog files. They are commonly used for cloning very quickly.
458 revlog files. They are commonly used for cloning very quickly.
458 """
459 """
459 # TODO we may want to turn this into an abort when this functionality
460 # TODO we may want to turn this into an abort when this functionality
460 # is moved into `hg bundle`.
461 # is moved into `hg bundle`.
461 if phases.hassecret(repo):
462 if phases.hassecret(repo):
462 ui.warn(_('(warning: stream clone bundle will contain secret '
463 ui.warn(_('(warning: stream clone bundle will contain secret '
463 'revisions)\n'))
464 'revisions)\n'))
464
465
465 requirements, gen = streamclone.generatebundlev1(repo)
466 requirements, gen = streamclone.generatebundlev1(repo)
466 changegroup.writechunks(ui, gen, fname)
467 changegroup.writechunks(ui, gen, fname)
467
468
468 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
469 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
469
470
470 @command('debugdag',
471 @command('debugdag',
471 [('t', 'tags', None, _('use tags as labels')),
472 [('t', 'tags', None, _('use tags as labels')),
472 ('b', 'branches', None, _('annotate with branch names')),
473 ('b', 'branches', None, _('annotate with branch names')),
473 ('', 'dots', None, _('use dots for runs')),
474 ('', 'dots', None, _('use dots for runs')),
474 ('s', 'spaces', None, _('separate elements by spaces'))],
475 ('s', 'spaces', None, _('separate elements by spaces'))],
475 _('[OPTION]... [FILE [REV]...]'),
476 _('[OPTION]... [FILE [REV]...]'),
476 optionalrepo=True)
477 optionalrepo=True)
477 def debugdag(ui, repo, file_=None, *revs, **opts):
478 def debugdag(ui, repo, file_=None, *revs, **opts):
478 """format the changelog or an index DAG as a concise textual description
479 """format the changelog or an index DAG as a concise textual description
479
480
480 If you pass a revlog index, the revlog's DAG is emitted. If you list
481 If you pass a revlog index, the revlog's DAG is emitted. If you list
481 revision numbers, they get labeled in the output as rN.
482 revision numbers, they get labeled in the output as rN.
482
483
483 Otherwise, the changelog DAG of the current repo is emitted.
484 Otherwise, the changelog DAG of the current repo is emitted.
484 """
485 """
485 spaces = opts.get(r'spaces')
486 spaces = opts.get(r'spaces')
486 dots = opts.get(r'dots')
487 dots = opts.get(r'dots')
487 if file_:
488 if file_:
488 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
489 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
489 file_)
490 file_)
490 revs = set((int(r) for r in revs))
491 revs = set((int(r) for r in revs))
491 def events():
492 def events():
492 for r in rlog:
493 for r in rlog:
493 yield 'n', (r, list(p for p in rlog.parentrevs(r)
494 yield 'n', (r, list(p for p in rlog.parentrevs(r)
494 if p != -1))
495 if p != -1))
495 if r in revs:
496 if r in revs:
496 yield 'l', (r, "r%i" % r)
497 yield 'l', (r, "r%i" % r)
497 elif repo:
498 elif repo:
498 cl = repo.changelog
499 cl = repo.changelog
499 tags = opts.get(r'tags')
500 tags = opts.get(r'tags')
500 branches = opts.get(r'branches')
501 branches = opts.get(r'branches')
501 if tags:
502 if tags:
502 labels = {}
503 labels = {}
503 for l, n in repo.tags().items():
504 for l, n in repo.tags().items():
504 labels.setdefault(cl.rev(n), []).append(l)
505 labels.setdefault(cl.rev(n), []).append(l)
505 def events():
506 def events():
506 b = "default"
507 b = "default"
507 for r in cl:
508 for r in cl:
508 if branches:
509 if branches:
509 newb = cl.read(cl.node(r))[5]['branch']
510 newb = cl.read(cl.node(r))[5]['branch']
510 if newb != b:
511 if newb != b:
511 yield 'a', newb
512 yield 'a', newb
512 b = newb
513 b = newb
513 yield 'n', (r, list(p for p in cl.parentrevs(r)
514 yield 'n', (r, list(p for p in cl.parentrevs(r)
514 if p != -1))
515 if p != -1))
515 if tags:
516 if tags:
516 ls = labels.get(r)
517 ls = labels.get(r)
517 if ls:
518 if ls:
518 for l in ls:
519 for l in ls:
519 yield 'l', (r, l)
520 yield 'l', (r, l)
520 else:
521 else:
521 raise error.Abort(_('need repo for changelog dag'))
522 raise error.Abort(_('need repo for changelog dag'))
522
523
523 for line in dagparser.dagtextlines(events(),
524 for line in dagparser.dagtextlines(events(),
524 addspaces=spaces,
525 addspaces=spaces,
525 wraplabels=True,
526 wraplabels=True,
526 wrapannotations=True,
527 wrapannotations=True,
527 wrapnonlinear=dots,
528 wrapnonlinear=dots,
528 usedots=dots,
529 usedots=dots,
529 maxlinewidth=70):
530 maxlinewidth=70):
530 ui.write(line)
531 ui.write(line)
531 ui.write("\n")
532 ui.write("\n")
532
533
533 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
534 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
534 def debugdata(ui, repo, file_, rev=None, **opts):
535 def debugdata(ui, repo, file_, rev=None, **opts):
535 """dump the contents of a data file revision"""
536 """dump the contents of a data file revision"""
536 opts = pycompat.byteskwargs(opts)
537 opts = pycompat.byteskwargs(opts)
537 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
538 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
538 if rev is not None:
539 if rev is not None:
539 raise error.CommandError('debugdata', _('invalid arguments'))
540 raise error.CommandError('debugdata', _('invalid arguments'))
540 file_, rev = None, file_
541 file_, rev = None, file_
541 elif rev is None:
542 elif rev is None:
542 raise error.CommandError('debugdata', _('invalid arguments'))
543 raise error.CommandError('debugdata', _('invalid arguments'))
543 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
544 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
544 try:
545 try:
545 ui.write(r.revision(r.lookup(rev), raw=True))
546 ui.write(r.revision(r.lookup(rev), raw=True))
546 except KeyError:
547 except KeyError:
547 raise error.Abort(_('invalid revision identifier %s') % rev)
548 raise error.Abort(_('invalid revision identifier %s') % rev)
548
549
549 @command('debugdate',
550 @command('debugdate',
550 [('e', 'extended', None, _('try extended date formats'))],
551 [('e', 'extended', None, _('try extended date formats'))],
551 _('[-e] DATE [RANGE]'),
552 _('[-e] DATE [RANGE]'),
552 norepo=True, optionalrepo=True)
553 norepo=True, optionalrepo=True)
553 def debugdate(ui, date, range=None, **opts):
554 def debugdate(ui, date, range=None, **opts):
554 """parse and display a date"""
555 """parse and display a date"""
555 if opts[r"extended"]:
556 if opts[r"extended"]:
556 d = util.parsedate(date, util.extendeddateformats)
557 d = util.parsedate(date, util.extendeddateformats)
557 else:
558 else:
558 d = util.parsedate(date)
559 d = util.parsedate(date)
559 ui.write(("internal: %s %s\n") % d)
560 ui.write(("internal: %s %s\n") % d)
560 ui.write(("standard: %s\n") % util.datestr(d))
561 ui.write(("standard: %s\n") % util.datestr(d))
561 if range:
562 if range:
562 m = util.matchdate(range)
563 m = util.matchdate(range)
563 ui.write(("match: %s\n") % m(d[0]))
564 ui.write(("match: %s\n") % m(d[0]))
564
565
565 @command('debugdeltachain',
566 @command('debugdeltachain',
566 cmdutil.debugrevlogopts + cmdutil.formatteropts,
567 cmdutil.debugrevlogopts + cmdutil.formatteropts,
567 _('-c|-m|FILE'),
568 _('-c|-m|FILE'),
568 optionalrepo=True)
569 optionalrepo=True)
569 def debugdeltachain(ui, repo, file_=None, **opts):
570 def debugdeltachain(ui, repo, file_=None, **opts):
570 """dump information about delta chains in a revlog
571 """dump information about delta chains in a revlog
571
572
572 Output can be templatized. Available template keywords are:
573 Output can be templatized. Available template keywords are:
573
574
574 :``rev``: revision number
575 :``rev``: revision number
575 :``chainid``: delta chain identifier (numbered by unique base)
576 :``chainid``: delta chain identifier (numbered by unique base)
576 :``chainlen``: delta chain length to this revision
577 :``chainlen``: delta chain length to this revision
577 :``prevrev``: previous revision in delta chain
578 :``prevrev``: previous revision in delta chain
578 :``deltatype``: role of delta / how it was computed
579 :``deltatype``: role of delta / how it was computed
579 :``compsize``: compressed size of revision
580 :``compsize``: compressed size of revision
580 :``uncompsize``: uncompressed size of revision
581 :``uncompsize``: uncompressed size of revision
581 :``chainsize``: total size of compressed revisions in chain
582 :``chainsize``: total size of compressed revisions in chain
582 :``chainratio``: total chain size divided by uncompressed revision size
583 :``chainratio``: total chain size divided by uncompressed revision size
583 (new delta chains typically start at ratio 2.00)
584 (new delta chains typically start at ratio 2.00)
584 :``lindist``: linear distance from base revision in delta chain to end
585 :``lindist``: linear distance from base revision in delta chain to end
585 of this revision
586 of this revision
586 :``extradist``: total size of revisions not part of this delta chain from
587 :``extradist``: total size of revisions not part of this delta chain from
587 base of delta chain to end of this revision; a measurement
588 base of delta chain to end of this revision; a measurement
588 of how much extra data we need to read/seek across to read
589 of how much extra data we need to read/seek across to read
589 the delta chain for this revision
590 the delta chain for this revision
590 :``extraratio``: extradist divided by chainsize; another representation of
591 :``extraratio``: extradist divided by chainsize; another representation of
591 how much unrelated data is needed to load this delta chain
592 how much unrelated data is needed to load this delta chain
592
593
593 If the repository is configured to use the sparse read, additional keywords
594 If the repository is configured to use the sparse read, additional keywords
594 are available:
595 are available:
595
596
596 :``readsize``: total size of data read from the disk for a revision
597 :``readsize``: total size of data read from the disk for a revision
597 (sum of the sizes of all the blocks)
598 (sum of the sizes of all the blocks)
598 :``largestblock``: size of the largest block of data read from the disk
599 :``largestblock``: size of the largest block of data read from the disk
599 :``readdensity``: density of useful bytes in the data read from the disk
600 :``readdensity``: density of useful bytes in the data read from the disk
600
601
601 The sparse read can be enabled with experimental.sparse-read = True
602 The sparse read can be enabled with experimental.sparse-read = True
602 """
603 """
603 opts = pycompat.byteskwargs(opts)
604 opts = pycompat.byteskwargs(opts)
604 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
605 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
605 index = r.index
606 index = r.index
606 generaldelta = r.version & revlog.FLAG_GENERALDELTA
607 generaldelta = r.version & revlog.FLAG_GENERALDELTA
607 withsparseread = getattr(r, '_withsparseread', False)
608 withsparseread = getattr(r, '_withsparseread', False)
608
609
609 def revinfo(rev):
610 def revinfo(rev):
610 e = index[rev]
611 e = index[rev]
611 compsize = e[1]
612 compsize = e[1]
612 uncompsize = e[2]
613 uncompsize = e[2]
613 chainsize = 0
614 chainsize = 0
614
615
615 if generaldelta:
616 if generaldelta:
616 if e[3] == e[5]:
617 if e[3] == e[5]:
617 deltatype = 'p1'
618 deltatype = 'p1'
618 elif e[3] == e[6]:
619 elif e[3] == e[6]:
619 deltatype = 'p2'
620 deltatype = 'p2'
620 elif e[3] == rev - 1:
621 elif e[3] == rev - 1:
621 deltatype = 'prev'
622 deltatype = 'prev'
622 elif e[3] == rev:
623 elif e[3] == rev:
623 deltatype = 'base'
624 deltatype = 'base'
624 else:
625 else:
625 deltatype = 'other'
626 deltatype = 'other'
626 else:
627 else:
627 if e[3] == rev:
628 if e[3] == rev:
628 deltatype = 'base'
629 deltatype = 'base'
629 else:
630 else:
630 deltatype = 'prev'
631 deltatype = 'prev'
631
632
632 chain = r._deltachain(rev)[0]
633 chain = r._deltachain(rev)[0]
633 for iterrev in chain:
634 for iterrev in chain:
634 e = index[iterrev]
635 e = index[iterrev]
635 chainsize += e[1]
636 chainsize += e[1]
636
637
637 return compsize, uncompsize, deltatype, chain, chainsize
638 return compsize, uncompsize, deltatype, chain, chainsize
638
639
639 fm = ui.formatter('debugdeltachain', opts)
640 fm = ui.formatter('debugdeltachain', opts)
640
641
641 fm.plain(' rev chain# chainlen prev delta '
642 fm.plain(' rev chain# chainlen prev delta '
642 'size rawsize chainsize ratio lindist extradist '
643 'size rawsize chainsize ratio lindist extradist '
643 'extraratio')
644 'extraratio')
644 if withsparseread:
645 if withsparseread:
645 fm.plain(' readsize largestblk rddensity')
646 fm.plain(' readsize largestblk rddensity')
646 fm.plain('\n')
647 fm.plain('\n')
647
648
648 chainbases = {}
649 chainbases = {}
649 for rev in r:
650 for rev in r:
650 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
651 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
651 chainbase = chain[0]
652 chainbase = chain[0]
652 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
653 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
653 start = r.start
654 start = r.start
654 length = r.length
655 length = r.length
655 basestart = start(chainbase)
656 basestart = start(chainbase)
656 revstart = start(rev)
657 revstart = start(rev)
657 lineardist = revstart + comp - basestart
658 lineardist = revstart + comp - basestart
658 extradist = lineardist - chainsize
659 extradist = lineardist - chainsize
659 try:
660 try:
660 prevrev = chain[-2]
661 prevrev = chain[-2]
661 except IndexError:
662 except IndexError:
662 prevrev = -1
663 prevrev = -1
663
664
664 chainratio = float(chainsize) / float(uncomp)
665 chainratio = float(chainsize) / float(uncomp)
665 extraratio = float(extradist) / float(chainsize)
666 extraratio = float(extradist) / float(chainsize)
666
667
667 fm.startitem()
668 fm.startitem()
668 fm.write('rev chainid chainlen prevrev deltatype compsize '
669 fm.write('rev chainid chainlen prevrev deltatype compsize '
669 'uncompsize chainsize chainratio lindist extradist '
670 'uncompsize chainsize chainratio lindist extradist '
670 'extraratio',
671 'extraratio',
671 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
672 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
672 rev, chainid, len(chain), prevrev, deltatype, comp,
673 rev, chainid, len(chain), prevrev, deltatype, comp,
673 uncomp, chainsize, chainratio, lineardist, extradist,
674 uncomp, chainsize, chainratio, lineardist, extradist,
674 extraratio,
675 extraratio,
675 rev=rev, chainid=chainid, chainlen=len(chain),
676 rev=rev, chainid=chainid, chainlen=len(chain),
676 prevrev=prevrev, deltatype=deltatype, compsize=comp,
677 prevrev=prevrev, deltatype=deltatype, compsize=comp,
677 uncompsize=uncomp, chainsize=chainsize,
678 uncompsize=uncomp, chainsize=chainsize,
678 chainratio=chainratio, lindist=lineardist,
679 chainratio=chainratio, lindist=lineardist,
679 extradist=extradist, extraratio=extraratio)
680 extradist=extradist, extraratio=extraratio)
680 if withsparseread:
681 if withsparseread:
681 readsize = 0
682 readsize = 0
682 largestblock = 0
683 largestblock = 0
683 for revschunk in revlog._slicechunk(r, chain):
684 for revschunk in revlog._slicechunk(r, chain):
684 blkend = start(revschunk[-1]) + length(revschunk[-1])
685 blkend = start(revschunk[-1]) + length(revschunk[-1])
685 blksize = blkend - start(revschunk[0])
686 blksize = blkend - start(revschunk[0])
686
687
687 readsize += blksize
688 readsize += blksize
688 if largestblock < blksize:
689 if largestblock < blksize:
689 largestblock = blksize
690 largestblock = blksize
690
691
691 readdensity = float(chainsize) / float(readsize)
692 readdensity = float(chainsize) / float(readsize)
692
693
693 fm.write('readsize largestblock readdensity',
694 fm.write('readsize largestblock readdensity',
694 ' %10d %10d %9.5f',
695 ' %10d %10d %9.5f',
695 readsize, largestblock, readdensity,
696 readsize, largestblock, readdensity,
696 readsize=readsize, largestblock=largestblock,
697 readsize=readsize, largestblock=largestblock,
697 readdensity=readdensity)
698 readdensity=readdensity)
698
699
699 fm.plain('\n')
700 fm.plain('\n')
700
701
701 fm.end()
702 fm.end()
702
703
703 @command('debugdirstate|debugstate',
704 @command('debugdirstate|debugstate',
704 [('', 'nodates', None, _('do not display the saved mtime')),
705 [('', 'nodates', None, _('do not display the saved mtime')),
705 ('', 'datesort', None, _('sort by saved mtime'))],
706 ('', 'datesort', None, _('sort by saved mtime'))],
706 _('[OPTION]...'))
707 _('[OPTION]...'))
707 def debugstate(ui, repo, **opts):
708 def debugstate(ui, repo, **opts):
708 """show the contents of the current dirstate"""
709 """show the contents of the current dirstate"""
709
710
710 nodates = opts.get(r'nodates')
711 nodates = opts.get(r'nodates')
711 datesort = opts.get(r'datesort')
712 datesort = opts.get(r'datesort')
712
713
713 timestr = ""
714 timestr = ""
714 if datesort:
715 if datesort:
715 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
716 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
716 else:
717 else:
717 keyfunc = None # sort by filename
718 keyfunc = None # sort by filename
718 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
719 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
719 if ent[3] == -1:
720 if ent[3] == -1:
720 timestr = 'unset '
721 timestr = 'unset '
721 elif nodates:
722 elif nodates:
722 timestr = 'set '
723 timestr = 'set '
723 else:
724 else:
724 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
725 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
725 time.localtime(ent[3]))
726 time.localtime(ent[3]))
726 timestr = encoding.strtolocal(timestr)
727 timestr = encoding.strtolocal(timestr)
727 if ent[1] & 0o20000:
728 if ent[1] & 0o20000:
728 mode = 'lnk'
729 mode = 'lnk'
729 else:
730 else:
730 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
731 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
731 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
732 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
732 for f in repo.dirstate.copies():
733 for f in repo.dirstate.copies():
733 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
734 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
734
735
735 @command('debugdiscovery',
736 @command('debugdiscovery',
736 [('', 'old', None, _('use old-style discovery')),
737 [('', 'old', None, _('use old-style discovery')),
737 ('', 'nonheads', None,
738 ('', 'nonheads', None,
738 _('use old-style discovery with non-heads included')),
739 _('use old-style discovery with non-heads included')),
739 ('', 'rev', [], 'restrict discovery to this set of revs'),
740 ('', 'rev', [], 'restrict discovery to this set of revs'),
740 ] + cmdutil.remoteopts,
741 ] + cmdutil.remoteopts,
741 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
742 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
742 def debugdiscovery(ui, repo, remoteurl="default", **opts):
743 def debugdiscovery(ui, repo, remoteurl="default", **opts):
743 """runs the changeset discovery protocol in isolation"""
744 """runs the changeset discovery protocol in isolation"""
744 opts = pycompat.byteskwargs(opts)
745 opts = pycompat.byteskwargs(opts)
745 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
746 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
746 opts.get('branch'))
747 opts.get('branch'))
747 remote = hg.peer(repo, opts, remoteurl)
748 remote = hg.peer(repo, opts, remoteurl)
748 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
749 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
749
750
750 # make sure tests are repeatable
751 # make sure tests are repeatable
751 random.seed(12323)
752 random.seed(12323)
752
753
753 def doit(pushedrevs, remoteheads, remote=remote):
754 def doit(pushedrevs, remoteheads, remote=remote):
754 if opts.get('old'):
755 if opts.get('old'):
755 if not util.safehasattr(remote, 'branches'):
756 if not util.safehasattr(remote, 'branches'):
756 # enable in-client legacy support
757 # enable in-client legacy support
757 remote = localrepo.locallegacypeer(remote.local())
758 remote = localrepo.locallegacypeer(remote.local())
758 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
759 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
759 force=True)
760 force=True)
760 common = set(common)
761 common = set(common)
761 if not opts.get('nonheads'):
762 if not opts.get('nonheads'):
762 ui.write(("unpruned common: %s\n") %
763 ui.write(("unpruned common: %s\n") %
763 " ".join(sorted(short(n) for n in common)))
764 " ".join(sorted(short(n) for n in common)))
764 dag = dagutil.revlogdag(repo.changelog)
765 dag = dagutil.revlogdag(repo.changelog)
765 all = dag.ancestorset(dag.internalizeall(common))
766 all = dag.ancestorset(dag.internalizeall(common))
766 common = dag.externalizeall(dag.headsetofconnecteds(all))
767 common = dag.externalizeall(dag.headsetofconnecteds(all))
767 else:
768 else:
768 nodes = None
769 nodes = None
769 if pushedrevs:
770 if pushedrevs:
770 revs = scmutil.revrange(repo, pushedrevs)
771 revs = scmutil.revrange(repo, pushedrevs)
771 nodes = [repo[r].node() for r in revs]
772 nodes = [repo[r].node() for r in revs]
772 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
773 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
773 ancestorsof=nodes)
774 ancestorsof=nodes)
774 common = set(common)
775 common = set(common)
775 rheads = set(hds)
776 rheads = set(hds)
776 lheads = set(repo.heads())
777 lheads = set(repo.heads())
777 ui.write(("common heads: %s\n") %
778 ui.write(("common heads: %s\n") %
778 " ".join(sorted(short(n) for n in common)))
779 " ".join(sorted(short(n) for n in common)))
779 if lheads <= common:
780 if lheads <= common:
780 ui.write(("local is subset\n"))
781 ui.write(("local is subset\n"))
781 elif rheads <= common:
782 elif rheads <= common:
782 ui.write(("remote is subset\n"))
783 ui.write(("remote is subset\n"))
783
784
784 serverlogs = opts.get('serverlog')
785 serverlogs = opts.get('serverlog')
785 if serverlogs:
786 if serverlogs:
786 for filename in serverlogs:
787 for filename in serverlogs:
787 with open(filename, 'r') as logfile:
788 with open(filename, 'r') as logfile:
788 line = logfile.readline()
789 line = logfile.readline()
789 while line:
790 while line:
790 parts = line.strip().split(';')
791 parts = line.strip().split(';')
791 op = parts[1]
792 op = parts[1]
792 if op == 'cg':
793 if op == 'cg':
793 pass
794 pass
794 elif op == 'cgss':
795 elif op == 'cgss':
795 doit(parts[2].split(' '), parts[3].split(' '))
796 doit(parts[2].split(' '), parts[3].split(' '))
796 elif op == 'unb':
797 elif op == 'unb':
797 doit(parts[3].split(' '), parts[2].split(' '))
798 doit(parts[3].split(' '), parts[2].split(' '))
798 line = logfile.readline()
799 line = logfile.readline()
799 else:
800 else:
800 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
801 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
801 opts.get('remote_head'))
802 opts.get('remote_head'))
802 localrevs = opts.get('rev')
803 localrevs = opts.get('rev')
803 doit(localrevs, remoterevs)
804 doit(localrevs, remoterevs)
804
805
805 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
806 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
806 def debugextensions(ui, **opts):
807 def debugextensions(ui, **opts):
807 '''show information about active extensions'''
808 '''show information about active extensions'''
808 opts = pycompat.byteskwargs(opts)
809 opts = pycompat.byteskwargs(opts)
809 exts = extensions.extensions(ui)
810 exts = extensions.extensions(ui)
810 hgver = util.version()
811 hgver = util.version()
811 fm = ui.formatter('debugextensions', opts)
812 fm = ui.formatter('debugextensions', opts)
812 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
813 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
813 isinternal = extensions.ismoduleinternal(extmod)
814 isinternal = extensions.ismoduleinternal(extmod)
814 extsource = pycompat.fsencode(extmod.__file__)
815 extsource = pycompat.fsencode(extmod.__file__)
815 if isinternal:
816 if isinternal:
816 exttestedwith = [] # never expose magic string to users
817 exttestedwith = [] # never expose magic string to users
817 else:
818 else:
818 exttestedwith = getattr(extmod, 'testedwith', '').split()
819 exttestedwith = getattr(extmod, 'testedwith', '').split()
819 extbuglink = getattr(extmod, 'buglink', None)
820 extbuglink = getattr(extmod, 'buglink', None)
820
821
821 fm.startitem()
822 fm.startitem()
822
823
823 if ui.quiet or ui.verbose:
824 if ui.quiet or ui.verbose:
824 fm.write('name', '%s\n', extname)
825 fm.write('name', '%s\n', extname)
825 else:
826 else:
826 fm.write('name', '%s', extname)
827 fm.write('name', '%s', extname)
827 if isinternal or hgver in exttestedwith:
828 if isinternal or hgver in exttestedwith:
828 fm.plain('\n')
829 fm.plain('\n')
829 elif not exttestedwith:
830 elif not exttestedwith:
830 fm.plain(_(' (untested!)\n'))
831 fm.plain(_(' (untested!)\n'))
831 else:
832 else:
832 lasttestedversion = exttestedwith[-1]
833 lasttestedversion = exttestedwith[-1]
833 fm.plain(' (%s!)\n' % lasttestedversion)
834 fm.plain(' (%s!)\n' % lasttestedversion)
834
835
835 fm.condwrite(ui.verbose and extsource, 'source',
836 fm.condwrite(ui.verbose and extsource, 'source',
836 _(' location: %s\n'), extsource or "")
837 _(' location: %s\n'), extsource or "")
837
838
838 if ui.verbose:
839 if ui.verbose:
839 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
840 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
840 fm.data(bundled=isinternal)
841 fm.data(bundled=isinternal)
841
842
842 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
843 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
843 _(' tested with: %s\n'),
844 _(' tested with: %s\n'),
844 fm.formatlist(exttestedwith, name='ver'))
845 fm.formatlist(exttestedwith, name='ver'))
845
846
846 fm.condwrite(ui.verbose and extbuglink, 'buglink',
847 fm.condwrite(ui.verbose and extbuglink, 'buglink',
847 _(' bug reporting: %s\n'), extbuglink or "")
848 _(' bug reporting: %s\n'), extbuglink or "")
848
849
849 fm.end()
850 fm.end()
850
851
851 @command('debugfileset',
852 @command('debugfileset',
852 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
853 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
853 _('[-r REV] FILESPEC'))
854 _('[-r REV] FILESPEC'))
854 def debugfileset(ui, repo, expr, **opts):
855 def debugfileset(ui, repo, expr, **opts):
855 '''parse and apply a fileset specification'''
856 '''parse and apply a fileset specification'''
856 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
857 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
857 if ui.verbose:
858 if ui.verbose:
858 tree = fileset.parse(expr)
859 tree = fileset.parse(expr)
859 ui.note(fileset.prettyformat(tree), "\n")
860 ui.note(fileset.prettyformat(tree), "\n")
860
861
861 for f in ctx.getfileset(expr):
862 for f in ctx.getfileset(expr):
862 ui.write("%s\n" % f)
863 ui.write("%s\n" % f)
863
864
864 @command('debugformat',
865 @command('debugformat',
865 [] + cmdutil.formatteropts,
866 [] + cmdutil.formatteropts,
866 _(''))
867 _(''))
867 def debugformat(ui, repo, **opts):
868 def debugformat(ui, repo, **opts):
868 """display format information about the current repository
869 """display format information about the current repository
869
870
870 Use --verbose to get extra information about current config value and
871 Use --verbose to get extra information about current config value and
871 Mercurial default."""
872 Mercurial default."""
872 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
873 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
873 maxvariantlength = max(len('format-variant'), maxvariantlength)
874 maxvariantlength = max(len('format-variant'), maxvariantlength)
874
875
875 def makeformatname(name):
876 def makeformatname(name):
876 return '%s:' + (' ' * (maxvariantlength - len(name)))
877 return '%s:' + (' ' * (maxvariantlength - len(name)))
877
878
878 fm = ui.formatter('debugformat', opts)
879 fm = ui.formatter('debugformat', opts)
879 if fm.isplain():
880 if fm.isplain():
880 def formatvalue(value):
881 def formatvalue(value):
881 if util.safehasattr(value, 'startswith'):
882 if util.safehasattr(value, 'startswith'):
882 return value
883 return value
883 if value:
884 if value:
884 return 'yes'
885 return 'yes'
885 else:
886 else:
886 return 'no'
887 return 'no'
887 else:
888 else:
888 formatvalue = pycompat.identity
889 formatvalue = pycompat.identity
889
890
890 fm.plain('format-variant')
891 fm.plain('format-variant')
891 fm.plain(' ' * (maxvariantlength - len('format-variant')))
892 fm.plain(' ' * (maxvariantlength - len('format-variant')))
892 fm.plain(' repo')
893 fm.plain(' repo')
893 if ui.verbose:
894 if ui.verbose:
894 fm.plain(' config default')
895 fm.plain(' config default')
895 fm.plain('\n')
896 fm.plain('\n')
896 for fv in upgrade.allformatvariant:
897 for fv in upgrade.allformatvariant:
897 fm.startitem()
898 fm.startitem()
898 repovalue = fv.fromrepo(repo)
899 repovalue = fv.fromrepo(repo)
899 configvalue = fv.fromconfig(repo)
900 configvalue = fv.fromconfig(repo)
900
901
901 if repovalue != configvalue:
902 if repovalue != configvalue:
902 namelabel = 'formatvariant.name.mismatchconfig'
903 namelabel = 'formatvariant.name.mismatchconfig'
903 repolabel = 'formatvariant.repo.mismatchconfig'
904 repolabel = 'formatvariant.repo.mismatchconfig'
904 elif repovalue != fv.default:
905 elif repovalue != fv.default:
905 namelabel = 'formatvariant.name.mismatchdefault'
906 namelabel = 'formatvariant.name.mismatchdefault'
906 repolabel = 'formatvariant.repo.mismatchdefault'
907 repolabel = 'formatvariant.repo.mismatchdefault'
907 else:
908 else:
908 namelabel = 'formatvariant.name.uptodate'
909 namelabel = 'formatvariant.name.uptodate'
909 repolabel = 'formatvariant.repo.uptodate'
910 repolabel = 'formatvariant.repo.uptodate'
910
911
911 fm.write('name', makeformatname(fv.name), fv.name,
912 fm.write('name', makeformatname(fv.name), fv.name,
912 label=namelabel)
913 label=namelabel)
913 fm.write('repo', ' %3s', formatvalue(repovalue),
914 fm.write('repo', ' %3s', formatvalue(repovalue),
914 label=repolabel)
915 label=repolabel)
915 if fv.default != configvalue:
916 if fv.default != configvalue:
916 configlabel = 'formatvariant.config.special'
917 configlabel = 'formatvariant.config.special'
917 else:
918 else:
918 configlabel = 'formatvariant.config.default'
919 configlabel = 'formatvariant.config.default'
919 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
920 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
920 label=configlabel)
921 label=configlabel)
921 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
922 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
922 label='formatvariant.default')
923 label='formatvariant.default')
923 fm.plain('\n')
924 fm.plain('\n')
924 fm.end()
925 fm.end()
925
926
926 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
927 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
927 def debugfsinfo(ui, path="."):
928 def debugfsinfo(ui, path="."):
928 """show information detected about current filesystem"""
929 """show information detected about current filesystem"""
929 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
930 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
930 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
931 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
931 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
932 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
932 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
933 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
933 casesensitive = '(unknown)'
934 casesensitive = '(unknown)'
934 try:
935 try:
935 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
936 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
936 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
937 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
937 except OSError:
938 except OSError:
938 pass
939 pass
939 ui.write(('case-sensitive: %s\n') % casesensitive)
940 ui.write(('case-sensitive: %s\n') % casesensitive)
940
941
941 @command('debuggetbundle',
942 @command('debuggetbundle',
942 [('H', 'head', [], _('id of head node'), _('ID')),
943 [('H', 'head', [], _('id of head node'), _('ID')),
943 ('C', 'common', [], _('id of common node'), _('ID')),
944 ('C', 'common', [], _('id of common node'), _('ID')),
944 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
945 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
945 _('REPO FILE [-H|-C ID]...'),
946 _('REPO FILE [-H|-C ID]...'),
946 norepo=True)
947 norepo=True)
947 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
948 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
948 """retrieves a bundle from a repo
949 """retrieves a bundle from a repo
949
950
950 Every ID must be a full-length hex node id string. Saves the bundle to the
951 Every ID must be a full-length hex node id string. Saves the bundle to the
951 given file.
952 given file.
952 """
953 """
953 opts = pycompat.byteskwargs(opts)
954 opts = pycompat.byteskwargs(opts)
954 repo = hg.peer(ui, opts, repopath)
955 repo = hg.peer(ui, opts, repopath)
955 if not repo.capable('getbundle'):
956 if not repo.capable('getbundle'):
956 raise error.Abort("getbundle() not supported by target repository")
957 raise error.Abort("getbundle() not supported by target repository")
957 args = {}
958 args = {}
958 if common:
959 if common:
959 args[r'common'] = [bin(s) for s in common]
960 args[r'common'] = [bin(s) for s in common]
960 if head:
961 if head:
961 args[r'heads'] = [bin(s) for s in head]
962 args[r'heads'] = [bin(s) for s in head]
962 # TODO: get desired bundlecaps from command line.
963 # TODO: get desired bundlecaps from command line.
963 args[r'bundlecaps'] = None
964 args[r'bundlecaps'] = None
964 bundle = repo.getbundle('debug', **args)
965 bundle = repo.getbundle('debug', **args)
965
966
966 bundletype = opts.get('type', 'bzip2').lower()
967 bundletype = opts.get('type', 'bzip2').lower()
967 btypes = {'none': 'HG10UN',
968 btypes = {'none': 'HG10UN',
968 'bzip2': 'HG10BZ',
969 'bzip2': 'HG10BZ',
969 'gzip': 'HG10GZ',
970 'gzip': 'HG10GZ',
970 'bundle2': 'HG20'}
971 'bundle2': 'HG20'}
971 bundletype = btypes.get(bundletype)
972 bundletype = btypes.get(bundletype)
972 if bundletype not in bundle2.bundletypes:
973 if bundletype not in bundle2.bundletypes:
973 raise error.Abort(_('unknown bundle type specified with --type'))
974 raise error.Abort(_('unknown bundle type specified with --type'))
974 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
975 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
975
976
976 @command('debugignore', [], '[FILE]')
977 @command('debugignore', [], '[FILE]')
977 def debugignore(ui, repo, *files, **opts):
978 def debugignore(ui, repo, *files, **opts):
978 """display the combined ignore pattern and information about ignored files
979 """display the combined ignore pattern and information about ignored files
979
980
980 With no argument display the combined ignore pattern.
981 With no argument display the combined ignore pattern.
981
982
982 Given space separated file names, shows if the given file is ignored and
983 Given space separated file names, shows if the given file is ignored and
983 if so, show the ignore rule (file and line number) that matched it.
984 if so, show the ignore rule (file and line number) that matched it.
984 """
985 """
985 ignore = repo.dirstate._ignore
986 ignore = repo.dirstate._ignore
986 if not files:
987 if not files:
987 # Show all the patterns
988 # Show all the patterns
988 ui.write("%s\n" % repr(ignore))
989 ui.write("%s\n" % repr(ignore))
989 else:
990 else:
990 m = scmutil.match(repo[None], pats=files)
991 m = scmutil.match(repo[None], pats=files)
991 for f in m.files():
992 for f in m.files():
992 nf = util.normpath(f)
993 nf = util.normpath(f)
993 ignored = None
994 ignored = None
994 ignoredata = None
995 ignoredata = None
995 if nf != '.':
996 if nf != '.':
996 if ignore(nf):
997 if ignore(nf):
997 ignored = nf
998 ignored = nf
998 ignoredata = repo.dirstate._ignorefileandline(nf)
999 ignoredata = repo.dirstate._ignorefileandline(nf)
999 else:
1000 else:
1000 for p in util.finddirs(nf):
1001 for p in util.finddirs(nf):
1001 if ignore(p):
1002 if ignore(p):
1002 ignored = p
1003 ignored = p
1003 ignoredata = repo.dirstate._ignorefileandline(p)
1004 ignoredata = repo.dirstate._ignorefileandline(p)
1004 break
1005 break
1005 if ignored:
1006 if ignored:
1006 if ignored == nf:
1007 if ignored == nf:
1007 ui.write(_("%s is ignored\n") % m.uipath(f))
1008 ui.write(_("%s is ignored\n") % m.uipath(f))
1008 else:
1009 else:
1009 ui.write(_("%s is ignored because of "
1010 ui.write(_("%s is ignored because of "
1010 "containing folder %s\n")
1011 "containing folder %s\n")
1011 % (m.uipath(f), ignored))
1012 % (m.uipath(f), ignored))
1012 ignorefile, lineno, line = ignoredata
1013 ignorefile, lineno, line = ignoredata
1013 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1014 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1014 % (ignorefile, lineno, line))
1015 % (ignorefile, lineno, line))
1015 else:
1016 else:
1016 ui.write(_("%s is not ignored\n") % m.uipath(f))
1017 ui.write(_("%s is not ignored\n") % m.uipath(f))
1017
1018
1018 @command('debugindex', cmdutil.debugrevlogopts +
1019 @command('debugindex', cmdutil.debugrevlogopts +
1019 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1020 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1020 _('[-f FORMAT] -c|-m|FILE'),
1021 _('[-f FORMAT] -c|-m|FILE'),
1021 optionalrepo=True)
1022 optionalrepo=True)
1022 def debugindex(ui, repo, file_=None, **opts):
1023 def debugindex(ui, repo, file_=None, **opts):
1023 """dump the contents of an index file"""
1024 """dump the contents of an index file"""
1024 opts = pycompat.byteskwargs(opts)
1025 opts = pycompat.byteskwargs(opts)
1025 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1026 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1026 format = opts.get('format', 0)
1027 format = opts.get('format', 0)
1027 if format not in (0, 1):
1028 if format not in (0, 1):
1028 raise error.Abort(_("unknown format %d") % format)
1029 raise error.Abort(_("unknown format %d") % format)
1029
1030
1030 generaldelta = r.version & revlog.FLAG_GENERALDELTA
1031 generaldelta = r.version & revlog.FLAG_GENERALDELTA
1031 if generaldelta:
1032 if generaldelta:
1032 basehdr = ' delta'
1033 basehdr = ' delta'
1033 else:
1034 else:
1034 basehdr = ' base'
1035 basehdr = ' base'
1035
1036
1036 if ui.debugflag:
1037 if ui.debugflag:
1037 shortfn = hex
1038 shortfn = hex
1038 else:
1039 else:
1039 shortfn = short
1040 shortfn = short
1040
1041
1041 # There might not be anything in r, so have a sane default
1042 # There might not be anything in r, so have a sane default
1042 idlen = 12
1043 idlen = 12
1043 for i in r:
1044 for i in r:
1044 idlen = len(shortfn(r.node(i)))
1045 idlen = len(shortfn(r.node(i)))
1045 break
1046 break
1046
1047
1047 if format == 0:
1048 if format == 0:
1048 ui.write((" rev offset length " + basehdr + " linkrev"
1049 ui.write((" rev offset length " + basehdr + " linkrev"
1049 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
1050 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
1050 elif format == 1:
1051 elif format == 1:
1051 ui.write((" rev flag offset length"
1052 ui.write((" rev flag offset length"
1052 " size " + basehdr + " link p1 p2"
1053 " size " + basehdr + " link p1 p2"
1053 " %s\n") % "nodeid".rjust(idlen))
1054 " %s\n") % "nodeid".rjust(idlen))
1054
1055
1055 for i in r:
1056 for i in r:
1056 node = r.node(i)
1057 node = r.node(i)
1057 if generaldelta:
1058 if generaldelta:
1058 base = r.deltaparent(i)
1059 base = r.deltaparent(i)
1059 else:
1060 else:
1060 base = r.chainbase(i)
1061 base = r.chainbase(i)
1061 if format == 0:
1062 if format == 0:
1062 try:
1063 try:
1063 pp = r.parents(node)
1064 pp = r.parents(node)
1064 except Exception:
1065 except Exception:
1065 pp = [nullid, nullid]
1066 pp = [nullid, nullid]
1066 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1067 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1067 i, r.start(i), r.length(i), base, r.linkrev(i),
1068 i, r.start(i), r.length(i), base, r.linkrev(i),
1068 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1069 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1069 elif format == 1:
1070 elif format == 1:
1070 pr = r.parentrevs(i)
1071 pr = r.parentrevs(i)
1071 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1072 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1072 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1073 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1073 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1074 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1074
1075
1075 @command('debugindexdot', cmdutil.debugrevlogopts,
1076 @command('debugindexdot', cmdutil.debugrevlogopts,
1076 _('-c|-m|FILE'), optionalrepo=True)
1077 _('-c|-m|FILE'), optionalrepo=True)
1077 def debugindexdot(ui, repo, file_=None, **opts):
1078 def debugindexdot(ui, repo, file_=None, **opts):
1078 """dump an index DAG as a graphviz dot file"""
1079 """dump an index DAG as a graphviz dot file"""
1079 opts = pycompat.byteskwargs(opts)
1080 opts = pycompat.byteskwargs(opts)
1080 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1081 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1081 ui.write(("digraph G {\n"))
1082 ui.write(("digraph G {\n"))
1082 for i in r:
1083 for i in r:
1083 node = r.node(i)
1084 node = r.node(i)
1084 pp = r.parents(node)
1085 pp = r.parents(node)
1085 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1086 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1086 if pp[1] != nullid:
1087 if pp[1] != nullid:
1087 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1088 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1088 ui.write("}\n")
1089 ui.write("}\n")
1089
1090
1090 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1091 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1091 def debuginstall(ui, **opts):
1092 def debuginstall(ui, **opts):
1092 '''test Mercurial installation
1093 '''test Mercurial installation
1093
1094
1094 Returns 0 on success.
1095 Returns 0 on success.
1095 '''
1096 '''
1096 opts = pycompat.byteskwargs(opts)
1097 opts = pycompat.byteskwargs(opts)
1097
1098
1098 def writetemp(contents):
1099 def writetemp(contents):
1099 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1100 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1100 f = os.fdopen(fd, pycompat.sysstr("wb"))
1101 f = os.fdopen(fd, pycompat.sysstr("wb"))
1101 f.write(contents)
1102 f.write(contents)
1102 f.close()
1103 f.close()
1103 return name
1104 return name
1104
1105
1105 problems = 0
1106 problems = 0
1106
1107
1107 fm = ui.formatter('debuginstall', opts)
1108 fm = ui.formatter('debuginstall', opts)
1108 fm.startitem()
1109 fm.startitem()
1109
1110
1110 # encoding
1111 # encoding
1111 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1112 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1112 err = None
1113 err = None
1113 try:
1114 try:
1114 codecs.lookup(pycompat.sysstr(encoding.encoding))
1115 codecs.lookup(pycompat.sysstr(encoding.encoding))
1115 except LookupError as inst:
1116 except LookupError as inst:
1116 err = util.forcebytestr(inst)
1117 err = util.forcebytestr(inst)
1117 problems += 1
1118 problems += 1
1118 fm.condwrite(err, 'encodingerror', _(" %s\n"
1119 fm.condwrite(err, 'encodingerror', _(" %s\n"
1119 " (check that your locale is properly set)\n"), err)
1120 " (check that your locale is properly set)\n"), err)
1120
1121
1121 # Python
1122 # Python
1122 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1123 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1123 pycompat.sysexecutable)
1124 pycompat.sysexecutable)
1124 fm.write('pythonver', _("checking Python version (%s)\n"),
1125 fm.write('pythonver', _("checking Python version (%s)\n"),
1125 ("%d.%d.%d" % sys.version_info[:3]))
1126 ("%d.%d.%d" % sys.version_info[:3]))
1126 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1127 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1127 os.path.dirname(pycompat.fsencode(os.__file__)))
1128 os.path.dirname(pycompat.fsencode(os.__file__)))
1128
1129
1129 security = set(sslutil.supportedprotocols)
1130 security = set(sslutil.supportedprotocols)
1130 if sslutil.hassni:
1131 if sslutil.hassni:
1131 security.add('sni')
1132 security.add('sni')
1132
1133
1133 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1134 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1134 fm.formatlist(sorted(security), name='protocol',
1135 fm.formatlist(sorted(security), name='protocol',
1135 fmt='%s', sep=','))
1136 fmt='%s', sep=','))
1136
1137
1137 # These are warnings, not errors. So don't increment problem count. This
1138 # These are warnings, not errors. So don't increment problem count. This
1138 # may change in the future.
1139 # may change in the future.
1139 if 'tls1.2' not in security:
1140 if 'tls1.2' not in security:
1140 fm.plain(_(' TLS 1.2 not supported by Python install; '
1141 fm.plain(_(' TLS 1.2 not supported by Python install; '
1141 'network connections lack modern security\n'))
1142 'network connections lack modern security\n'))
1142 if 'sni' not in security:
1143 if 'sni' not in security:
1143 fm.plain(_(' SNI not supported by Python install; may have '
1144 fm.plain(_(' SNI not supported by Python install; may have '
1144 'connectivity issues with some servers\n'))
1145 'connectivity issues with some servers\n'))
1145
1146
1146 # TODO print CA cert info
1147 # TODO print CA cert info
1147
1148
1148 # hg version
1149 # hg version
1149 hgver = util.version()
1150 hgver = util.version()
1150 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1151 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1151 hgver.split('+')[0])
1152 hgver.split('+')[0])
1152 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1153 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1153 '+'.join(hgver.split('+')[1:]))
1154 '+'.join(hgver.split('+')[1:]))
1154
1155
1155 # compiled modules
1156 # compiled modules
1156 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1157 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1157 policy.policy)
1158 policy.policy)
1158 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1159 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1159 os.path.dirname(pycompat.fsencode(__file__)))
1160 os.path.dirname(pycompat.fsencode(__file__)))
1160
1161
1161 if policy.policy in ('c', 'allow'):
1162 if policy.policy in ('c', 'allow'):
1162 err = None
1163 err = None
1163 try:
1164 try:
1164 from .cext import (
1165 from .cext import (
1165 base85,
1166 base85,
1166 bdiff,
1167 bdiff,
1167 mpatch,
1168 mpatch,
1168 osutil,
1169 osutil,
1169 )
1170 )
1170 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1171 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1171 except Exception as inst:
1172 except Exception as inst:
1172 err = util.forcebytestr(inst)
1173 err = util.forcebytestr(inst)
1173 problems += 1
1174 problems += 1
1174 fm.condwrite(err, 'extensionserror', " %s\n", err)
1175 fm.condwrite(err, 'extensionserror', " %s\n", err)
1175
1176
1176 compengines = util.compengines._engines.values()
1177 compengines = util.compengines._engines.values()
1177 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1178 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1178 fm.formatlist(sorted(e.name() for e in compengines),
1179 fm.formatlist(sorted(e.name() for e in compengines),
1179 name='compengine', fmt='%s', sep=', '))
1180 name='compengine', fmt='%s', sep=', '))
1180 fm.write('compenginesavail', _('checking available compression engines '
1181 fm.write('compenginesavail', _('checking available compression engines '
1181 '(%s)\n'),
1182 '(%s)\n'),
1182 fm.formatlist(sorted(e.name() for e in compengines
1183 fm.formatlist(sorted(e.name() for e in compengines
1183 if e.available()),
1184 if e.available()),
1184 name='compengine', fmt='%s', sep=', '))
1185 name='compengine', fmt='%s', sep=', '))
1185 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1186 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1186 fm.write('compenginesserver', _('checking available compression engines '
1187 fm.write('compenginesserver', _('checking available compression engines '
1187 'for wire protocol (%s)\n'),
1188 'for wire protocol (%s)\n'),
1188 fm.formatlist([e.name() for e in wirecompengines
1189 fm.formatlist([e.name() for e in wirecompengines
1189 if e.wireprotosupport()],
1190 if e.wireprotosupport()],
1190 name='compengine', fmt='%s', sep=', '))
1191 name='compengine', fmt='%s', sep=', '))
1191
1192
1192 # templates
1193 # templates
1193 p = templater.templatepaths()
1194 p = templater.templatepaths()
1194 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1195 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1195 fm.condwrite(not p, '', _(" no template directories found\n"))
1196 fm.condwrite(not p, '', _(" no template directories found\n"))
1196 if p:
1197 if p:
1197 m = templater.templatepath("map-cmdline.default")
1198 m = templater.templatepath("map-cmdline.default")
1198 if m:
1199 if m:
1199 # template found, check if it is working
1200 # template found, check if it is working
1200 err = None
1201 err = None
1201 try:
1202 try:
1202 templater.templater.frommapfile(m)
1203 templater.templater.frommapfile(m)
1203 except Exception as inst:
1204 except Exception as inst:
1204 err = util.forcebytestr(inst)
1205 err = util.forcebytestr(inst)
1205 p = None
1206 p = None
1206 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1207 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1207 else:
1208 else:
1208 p = None
1209 p = None
1209 fm.condwrite(p, 'defaulttemplate',
1210 fm.condwrite(p, 'defaulttemplate',
1210 _("checking default template (%s)\n"), m)
1211 _("checking default template (%s)\n"), m)
1211 fm.condwrite(not m, 'defaulttemplatenotfound',
1212 fm.condwrite(not m, 'defaulttemplatenotfound',
1212 _(" template '%s' not found\n"), "default")
1213 _(" template '%s' not found\n"), "default")
1213 if not p:
1214 if not p:
1214 problems += 1
1215 problems += 1
1215 fm.condwrite(not p, '',
1216 fm.condwrite(not p, '',
1216 _(" (templates seem to have been installed incorrectly)\n"))
1217 _(" (templates seem to have been installed incorrectly)\n"))
1217
1218
1218 # editor
1219 # editor
1219 editor = ui.geteditor()
1220 editor = ui.geteditor()
1220 editor = util.expandpath(editor)
1221 editor = util.expandpath(editor)
1221 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1222 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1222 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1223 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1223 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1224 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1224 _(" No commit editor set and can't find %s in PATH\n"
1225 _(" No commit editor set and can't find %s in PATH\n"
1225 " (specify a commit editor in your configuration"
1226 " (specify a commit editor in your configuration"
1226 " file)\n"), not cmdpath and editor == 'vi' and editor)
1227 " file)\n"), not cmdpath and editor == 'vi' and editor)
1227 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1228 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1228 _(" Can't find editor '%s' in PATH\n"
1229 _(" Can't find editor '%s' in PATH\n"
1229 " (specify a commit editor in your configuration"
1230 " (specify a commit editor in your configuration"
1230 " file)\n"), not cmdpath and editor)
1231 " file)\n"), not cmdpath and editor)
1231 if not cmdpath and editor != 'vi':
1232 if not cmdpath and editor != 'vi':
1232 problems += 1
1233 problems += 1
1233
1234
1234 # check username
1235 # check username
1235 username = None
1236 username = None
1236 err = None
1237 err = None
1237 try:
1238 try:
1238 username = ui.username()
1239 username = ui.username()
1239 except error.Abort as e:
1240 except error.Abort as e:
1240 err = util.forcebytestr(e)
1241 err = util.forcebytestr(e)
1241 problems += 1
1242 problems += 1
1242
1243
1243 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1244 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1244 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1245 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1245 " (specify a username in your configuration file)\n"), err)
1246 " (specify a username in your configuration file)\n"), err)
1246
1247
1247 fm.condwrite(not problems, '',
1248 fm.condwrite(not problems, '',
1248 _("no problems detected\n"))
1249 _("no problems detected\n"))
1249 if not problems:
1250 if not problems:
1250 fm.data(problems=problems)
1251 fm.data(problems=problems)
1251 fm.condwrite(problems, 'problems',
1252 fm.condwrite(problems, 'problems',
1252 _("%d problems detected,"
1253 _("%d problems detected,"
1253 " please check your install!\n"), problems)
1254 " please check your install!\n"), problems)
1254 fm.end()
1255 fm.end()
1255
1256
1256 return problems
1257 return problems
1257
1258
1258 @command('debugknown', [], _('REPO ID...'), norepo=True)
1259 @command('debugknown', [], _('REPO ID...'), norepo=True)
1259 def debugknown(ui, repopath, *ids, **opts):
1260 def debugknown(ui, repopath, *ids, **opts):
1260 """test whether node ids are known to a repo
1261 """test whether node ids are known to a repo
1261
1262
1262 Every ID must be a full-length hex node id string. Returns a list of 0s
1263 Every ID must be a full-length hex node id string. Returns a list of 0s
1263 and 1s indicating unknown/known.
1264 and 1s indicating unknown/known.
1264 """
1265 """
1265 opts = pycompat.byteskwargs(opts)
1266 opts = pycompat.byteskwargs(opts)
1266 repo = hg.peer(ui, opts, repopath)
1267 repo = hg.peer(ui, opts, repopath)
1267 if not repo.capable('known'):
1268 if not repo.capable('known'):
1268 raise error.Abort("known() not supported by target repository")
1269 raise error.Abort("known() not supported by target repository")
1269 flags = repo.known([bin(s) for s in ids])
1270 flags = repo.known([bin(s) for s in ids])
1270 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1271 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1271
1272
1272 @command('debuglabelcomplete', [], _('LABEL...'))
1273 @command('debuglabelcomplete', [], _('LABEL...'))
1273 def debuglabelcomplete(ui, repo, *args):
1274 def debuglabelcomplete(ui, repo, *args):
1274 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1275 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1275 debugnamecomplete(ui, repo, *args)
1276 debugnamecomplete(ui, repo, *args)
1276
1277
1277 @command('debuglocks',
1278 @command('debuglocks',
1278 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1279 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1279 ('W', 'force-wlock', None,
1280 ('W', 'force-wlock', None,
1280 _('free the working state lock (DANGEROUS)')),
1281 _('free the working state lock (DANGEROUS)')),
1281 ('s', 'set-lock', None, _('set the store lock until stopped')),
1282 ('s', 'set-lock', None, _('set the store lock until stopped')),
1282 ('S', 'set-wlock', None,
1283 ('S', 'set-wlock', None,
1283 _('set the working state lock until stopped'))],
1284 _('set the working state lock until stopped'))],
1284 _('[OPTION]...'))
1285 _('[OPTION]...'))
1285 def debuglocks(ui, repo, **opts):
1286 def debuglocks(ui, repo, **opts):
1286 """show or modify state of locks
1287 """show or modify state of locks
1287
1288
1288 By default, this command will show which locks are held. This
1289 By default, this command will show which locks are held. This
1289 includes the user and process holding the lock, the amount of time
1290 includes the user and process holding the lock, the amount of time
1290 the lock has been held, and the machine name where the process is
1291 the lock has been held, and the machine name where the process is
1291 running if it's not local.
1292 running if it's not local.
1292
1293
1293 Locks protect the integrity of Mercurial's data, so should be
1294 Locks protect the integrity of Mercurial's data, so should be
1294 treated with care. System crashes or other interruptions may cause
1295 treated with care. System crashes or other interruptions may cause
1295 locks to not be properly released, though Mercurial will usually
1296 locks to not be properly released, though Mercurial will usually
1296 detect and remove such stale locks automatically.
1297 detect and remove such stale locks automatically.
1297
1298
1298 However, detecting stale locks may not always be possible (for
1299 However, detecting stale locks may not always be possible (for
1299 instance, on a shared filesystem). Removing locks may also be
1300 instance, on a shared filesystem). Removing locks may also be
1300 blocked by filesystem permissions.
1301 blocked by filesystem permissions.
1301
1302
1302 Setting a lock will prevent other commands from changing the data.
1303 Setting a lock will prevent other commands from changing the data.
1303 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1304 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1304 The set locks are removed when the command exits.
1305 The set locks are removed when the command exits.
1305
1306
1306 Returns 0 if no locks are held.
1307 Returns 0 if no locks are held.
1307
1308
1308 """
1309 """
1309
1310
1310 if opts.get(r'force_lock'):
1311 if opts.get(r'force_lock'):
1311 repo.svfs.unlink('lock')
1312 repo.svfs.unlink('lock')
1312 if opts.get(r'force_wlock'):
1313 if opts.get(r'force_wlock'):
1313 repo.vfs.unlink('wlock')
1314 repo.vfs.unlink('wlock')
1314 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1315 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1315 return 0
1316 return 0
1316
1317
1317 locks = []
1318 locks = []
1318 try:
1319 try:
1319 if opts.get(r'set_wlock'):
1320 if opts.get(r'set_wlock'):
1320 try:
1321 try:
1321 locks.append(repo.wlock(False))
1322 locks.append(repo.wlock(False))
1322 except error.LockHeld:
1323 except error.LockHeld:
1323 raise error.Abort(_('wlock is already held'))
1324 raise error.Abort(_('wlock is already held'))
1324 if opts.get(r'set_lock'):
1325 if opts.get(r'set_lock'):
1325 try:
1326 try:
1326 locks.append(repo.lock(False))
1327 locks.append(repo.lock(False))
1327 except error.LockHeld:
1328 except error.LockHeld:
1328 raise error.Abort(_('lock is already held'))
1329 raise error.Abort(_('lock is already held'))
1329 if len(locks):
1330 if len(locks):
1330 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1331 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1331 return 0
1332 return 0
1332 finally:
1333 finally:
1333 release(*locks)
1334 release(*locks)
1334
1335
1335 now = time.time()
1336 now = time.time()
1336 held = 0
1337 held = 0
1337
1338
1338 def report(vfs, name, method):
1339 def report(vfs, name, method):
1339 # this causes stale locks to get reaped for more accurate reporting
1340 # this causes stale locks to get reaped for more accurate reporting
1340 try:
1341 try:
1341 l = method(False)
1342 l = method(False)
1342 except error.LockHeld:
1343 except error.LockHeld:
1343 l = None
1344 l = None
1344
1345
1345 if l:
1346 if l:
1346 l.release()
1347 l.release()
1347 else:
1348 else:
1348 try:
1349 try:
1349 stat = vfs.lstat(name)
1350 stat = vfs.lstat(name)
1350 age = now - stat.st_mtime
1351 age = now - stat.st_mtime
1351 user = util.username(stat.st_uid)
1352 user = util.username(stat.st_uid)
1352 locker = vfs.readlock(name)
1353 locker = vfs.readlock(name)
1353 if ":" in locker:
1354 if ":" in locker:
1354 host, pid = locker.split(':')
1355 host, pid = locker.split(':')
1355 if host == socket.gethostname():
1356 if host == socket.gethostname():
1356 locker = 'user %s, process %s' % (user, pid)
1357 locker = 'user %s, process %s' % (user, pid)
1357 else:
1358 else:
1358 locker = 'user %s, process %s, host %s' \
1359 locker = 'user %s, process %s, host %s' \
1359 % (user, pid, host)
1360 % (user, pid, host)
1360 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1361 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1361 return 1
1362 return 1
1362 except OSError as e:
1363 except OSError as e:
1363 if e.errno != errno.ENOENT:
1364 if e.errno != errno.ENOENT:
1364 raise
1365 raise
1365
1366
1366 ui.write(("%-6s free\n") % (name + ":"))
1367 ui.write(("%-6s free\n") % (name + ":"))
1367 return 0
1368 return 0
1368
1369
1369 held += report(repo.svfs, "lock", repo.lock)
1370 held += report(repo.svfs, "lock", repo.lock)
1370 held += report(repo.vfs, "wlock", repo.wlock)
1371 held += report(repo.vfs, "wlock", repo.wlock)
1371
1372
1372 return held
1373 return held
1373
1374
1374 @command('debugmergestate', [], '')
1375 @command('debugmergestate', [], '')
1375 def debugmergestate(ui, repo, *args):
1376 def debugmergestate(ui, repo, *args):
1376 """print merge state
1377 """print merge state
1377
1378
1378 Use --verbose to print out information about whether v1 or v2 merge state
1379 Use --verbose to print out information about whether v1 or v2 merge state
1379 was chosen."""
1380 was chosen."""
1380 def _hashornull(h):
1381 def _hashornull(h):
1381 if h == nullhex:
1382 if h == nullhex:
1382 return 'null'
1383 return 'null'
1383 else:
1384 else:
1384 return h
1385 return h
1385
1386
1386 def printrecords(version):
1387 def printrecords(version):
1387 ui.write(('* version %s records\n') % version)
1388 ui.write(('* version %s records\n') % version)
1388 if version == 1:
1389 if version == 1:
1389 records = v1records
1390 records = v1records
1390 else:
1391 else:
1391 records = v2records
1392 records = v2records
1392
1393
1393 for rtype, record in records:
1394 for rtype, record in records:
1394 # pretty print some record types
1395 # pretty print some record types
1395 if rtype == 'L':
1396 if rtype == 'L':
1396 ui.write(('local: %s\n') % record)
1397 ui.write(('local: %s\n') % record)
1397 elif rtype == 'O':
1398 elif rtype == 'O':
1398 ui.write(('other: %s\n') % record)
1399 ui.write(('other: %s\n') % record)
1399 elif rtype == 'm':
1400 elif rtype == 'm':
1400 driver, mdstate = record.split('\0', 1)
1401 driver, mdstate = record.split('\0', 1)
1401 ui.write(('merge driver: %s (state "%s")\n')
1402 ui.write(('merge driver: %s (state "%s")\n')
1402 % (driver, mdstate))
1403 % (driver, mdstate))
1403 elif rtype in 'FDC':
1404 elif rtype in 'FDC':
1404 r = record.split('\0')
1405 r = record.split('\0')
1405 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1406 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1406 if version == 1:
1407 if version == 1:
1407 onode = 'not stored in v1 format'
1408 onode = 'not stored in v1 format'
1408 flags = r[7]
1409 flags = r[7]
1409 else:
1410 else:
1410 onode, flags = r[7:9]
1411 onode, flags = r[7:9]
1411 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1412 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1412 % (f, rtype, state, _hashornull(hash)))
1413 % (f, rtype, state, _hashornull(hash)))
1413 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1414 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1414 ui.write((' ancestor path: %s (node %s)\n')
1415 ui.write((' ancestor path: %s (node %s)\n')
1415 % (afile, _hashornull(anode)))
1416 % (afile, _hashornull(anode)))
1416 ui.write((' other path: %s (node %s)\n')
1417 ui.write((' other path: %s (node %s)\n')
1417 % (ofile, _hashornull(onode)))
1418 % (ofile, _hashornull(onode)))
1418 elif rtype == 'f':
1419 elif rtype == 'f':
1419 filename, rawextras = record.split('\0', 1)
1420 filename, rawextras = record.split('\0', 1)
1420 extras = rawextras.split('\0')
1421 extras = rawextras.split('\0')
1421 i = 0
1422 i = 0
1422 extrastrings = []
1423 extrastrings = []
1423 while i < len(extras):
1424 while i < len(extras):
1424 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1425 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1425 i += 2
1426 i += 2
1426
1427
1427 ui.write(('file extras: %s (%s)\n')
1428 ui.write(('file extras: %s (%s)\n')
1428 % (filename, ', '.join(extrastrings)))
1429 % (filename, ', '.join(extrastrings)))
1429 elif rtype == 'l':
1430 elif rtype == 'l':
1430 labels = record.split('\0', 2)
1431 labels = record.split('\0', 2)
1431 labels = [l for l in labels if len(l) > 0]
1432 labels = [l for l in labels if len(l) > 0]
1432 ui.write(('labels:\n'))
1433 ui.write(('labels:\n'))
1433 ui.write((' local: %s\n' % labels[0]))
1434 ui.write((' local: %s\n' % labels[0]))
1434 ui.write((' other: %s\n' % labels[1]))
1435 ui.write((' other: %s\n' % labels[1]))
1435 if len(labels) > 2:
1436 if len(labels) > 2:
1436 ui.write((' base: %s\n' % labels[2]))
1437 ui.write((' base: %s\n' % labels[2]))
1437 else:
1438 else:
1438 ui.write(('unrecognized entry: %s\t%s\n')
1439 ui.write(('unrecognized entry: %s\t%s\n')
1439 % (rtype, record.replace('\0', '\t')))
1440 % (rtype, record.replace('\0', '\t')))
1440
1441
1441 # Avoid mergestate.read() since it may raise an exception for unsupported
1442 # Avoid mergestate.read() since it may raise an exception for unsupported
1442 # merge state records. We shouldn't be doing this, but this is OK since this
1443 # merge state records. We shouldn't be doing this, but this is OK since this
1443 # command is pretty low-level.
1444 # command is pretty low-level.
1444 ms = mergemod.mergestate(repo)
1445 ms = mergemod.mergestate(repo)
1445
1446
1446 # sort so that reasonable information is on top
1447 # sort so that reasonable information is on top
1447 v1records = ms._readrecordsv1()
1448 v1records = ms._readrecordsv1()
1448 v2records = ms._readrecordsv2()
1449 v2records = ms._readrecordsv2()
1449 order = 'LOml'
1450 order = 'LOml'
1450 def key(r):
1451 def key(r):
1451 idx = order.find(r[0])
1452 idx = order.find(r[0])
1452 if idx == -1:
1453 if idx == -1:
1453 return (1, r[1])
1454 return (1, r[1])
1454 else:
1455 else:
1455 return (0, idx)
1456 return (0, idx)
1456 v1records.sort(key=key)
1457 v1records.sort(key=key)
1457 v2records.sort(key=key)
1458 v2records.sort(key=key)
1458
1459
1459 if not v1records and not v2records:
1460 if not v1records and not v2records:
1460 ui.write(('no merge state found\n'))
1461 ui.write(('no merge state found\n'))
1461 elif not v2records:
1462 elif not v2records:
1462 ui.note(('no version 2 merge state\n'))
1463 ui.note(('no version 2 merge state\n'))
1463 printrecords(1)
1464 printrecords(1)
1464 elif ms._v1v2match(v1records, v2records):
1465 elif ms._v1v2match(v1records, v2records):
1465 ui.note(('v1 and v2 states match: using v2\n'))
1466 ui.note(('v1 and v2 states match: using v2\n'))
1466 printrecords(2)
1467 printrecords(2)
1467 else:
1468 else:
1468 ui.note(('v1 and v2 states mismatch: using v1\n'))
1469 ui.note(('v1 and v2 states mismatch: using v1\n'))
1469 printrecords(1)
1470 printrecords(1)
1470 if ui.verbose:
1471 if ui.verbose:
1471 printrecords(2)
1472 printrecords(2)
1472
1473
1473 @command('debugnamecomplete', [], _('NAME...'))
1474 @command('debugnamecomplete', [], _('NAME...'))
1474 def debugnamecomplete(ui, repo, *args):
1475 def debugnamecomplete(ui, repo, *args):
1475 '''complete "names" - tags, open branch names, bookmark names'''
1476 '''complete "names" - tags, open branch names, bookmark names'''
1476
1477
1477 names = set()
1478 names = set()
1478 # since we previously only listed open branches, we will handle that
1479 # since we previously only listed open branches, we will handle that
1479 # specially (after this for loop)
1480 # specially (after this for loop)
1480 for name, ns in repo.names.iteritems():
1481 for name, ns in repo.names.iteritems():
1481 if name != 'branches':
1482 if name != 'branches':
1482 names.update(ns.listnames(repo))
1483 names.update(ns.listnames(repo))
1483 names.update(tag for (tag, heads, tip, closed)
1484 names.update(tag for (tag, heads, tip, closed)
1484 in repo.branchmap().iterbranches() if not closed)
1485 in repo.branchmap().iterbranches() if not closed)
1485 completions = set()
1486 completions = set()
1486 if not args:
1487 if not args:
1487 args = ['']
1488 args = ['']
1488 for a in args:
1489 for a in args:
1489 completions.update(n for n in names if n.startswith(a))
1490 completions.update(n for n in names if n.startswith(a))
1490 ui.write('\n'.join(sorted(completions)))
1491 ui.write('\n'.join(sorted(completions)))
1491 ui.write('\n')
1492 ui.write('\n')
1492
1493
1493 @command('debugobsolete',
1494 @command('debugobsolete',
1494 [('', 'flags', 0, _('markers flag')),
1495 [('', 'flags', 0, _('markers flag')),
1495 ('', 'record-parents', False,
1496 ('', 'record-parents', False,
1496 _('record parent information for the precursor')),
1497 _('record parent information for the precursor')),
1497 ('r', 'rev', [], _('display markers relevant to REV')),
1498 ('r', 'rev', [], _('display markers relevant to REV')),
1498 ('', 'exclusive', False, _('restrict display to markers only '
1499 ('', 'exclusive', False, _('restrict display to markers only '
1499 'relevant to REV')),
1500 'relevant to REV')),
1500 ('', 'index', False, _('display index of the marker')),
1501 ('', 'index', False, _('display index of the marker')),
1501 ('', 'delete', [], _('delete markers specified by indices')),
1502 ('', 'delete', [], _('delete markers specified by indices')),
1502 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1503 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1503 _('[OBSOLETED [REPLACEMENT ...]]'))
1504 _('[OBSOLETED [REPLACEMENT ...]]'))
1504 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1505 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1505 """create arbitrary obsolete marker
1506 """create arbitrary obsolete marker
1506
1507
1507 With no arguments, displays the list of obsolescence markers."""
1508 With no arguments, displays the list of obsolescence markers."""
1508
1509
1509 opts = pycompat.byteskwargs(opts)
1510 opts = pycompat.byteskwargs(opts)
1510
1511
1511 def parsenodeid(s):
1512 def parsenodeid(s):
1512 try:
1513 try:
1513 # We do not use revsingle/revrange functions here to accept
1514 # We do not use revsingle/revrange functions here to accept
1514 # arbitrary node identifiers, possibly not present in the
1515 # arbitrary node identifiers, possibly not present in the
1515 # local repository.
1516 # local repository.
1516 n = bin(s)
1517 n = bin(s)
1517 if len(n) != len(nullid):
1518 if len(n) != len(nullid):
1518 raise TypeError()
1519 raise TypeError()
1519 return n
1520 return n
1520 except TypeError:
1521 except TypeError:
1521 raise error.Abort('changeset references must be full hexadecimal '
1522 raise error.Abort('changeset references must be full hexadecimal '
1522 'node identifiers')
1523 'node identifiers')
1523
1524
1524 if opts.get('delete'):
1525 if opts.get('delete'):
1525 indices = []
1526 indices = []
1526 for v in opts.get('delete'):
1527 for v in opts.get('delete'):
1527 try:
1528 try:
1528 indices.append(int(v))
1529 indices.append(int(v))
1529 except ValueError:
1530 except ValueError:
1530 raise error.Abort(_('invalid index value: %r') % v,
1531 raise error.Abort(_('invalid index value: %r') % v,
1531 hint=_('use integers for indices'))
1532 hint=_('use integers for indices'))
1532
1533
1533 if repo.currenttransaction():
1534 if repo.currenttransaction():
1534 raise error.Abort(_('cannot delete obsmarkers in the middle '
1535 raise error.Abort(_('cannot delete obsmarkers in the middle '
1535 'of transaction.'))
1536 'of transaction.'))
1536
1537
1537 with repo.lock():
1538 with repo.lock():
1538 n = repair.deleteobsmarkers(repo.obsstore, indices)
1539 n = repair.deleteobsmarkers(repo.obsstore, indices)
1539 ui.write(_('deleted %i obsolescence markers\n') % n)
1540 ui.write(_('deleted %i obsolescence markers\n') % n)
1540
1541
1541 return
1542 return
1542
1543
1543 if precursor is not None:
1544 if precursor is not None:
1544 if opts['rev']:
1545 if opts['rev']:
1545 raise error.Abort('cannot select revision when creating marker')
1546 raise error.Abort('cannot select revision when creating marker')
1546 metadata = {}
1547 metadata = {}
1547 metadata['user'] = opts['user'] or ui.username()
1548 metadata['user'] = opts['user'] or ui.username()
1548 succs = tuple(parsenodeid(succ) for succ in successors)
1549 succs = tuple(parsenodeid(succ) for succ in successors)
1549 l = repo.lock()
1550 l = repo.lock()
1550 try:
1551 try:
1551 tr = repo.transaction('debugobsolete')
1552 tr = repo.transaction('debugobsolete')
1552 try:
1553 try:
1553 date = opts.get('date')
1554 date = opts.get('date')
1554 if date:
1555 if date:
1555 date = util.parsedate(date)
1556 date = util.parsedate(date)
1556 else:
1557 else:
1557 date = None
1558 date = None
1558 prec = parsenodeid(precursor)
1559 prec = parsenodeid(precursor)
1559 parents = None
1560 parents = None
1560 if opts['record_parents']:
1561 if opts['record_parents']:
1561 if prec not in repo.unfiltered():
1562 if prec not in repo.unfiltered():
1562 raise error.Abort('cannot used --record-parents on '
1563 raise error.Abort('cannot used --record-parents on '
1563 'unknown changesets')
1564 'unknown changesets')
1564 parents = repo.unfiltered()[prec].parents()
1565 parents = repo.unfiltered()[prec].parents()
1565 parents = tuple(p.node() for p in parents)
1566 parents = tuple(p.node() for p in parents)
1566 repo.obsstore.create(tr, prec, succs, opts['flags'],
1567 repo.obsstore.create(tr, prec, succs, opts['flags'],
1567 parents=parents, date=date,
1568 parents=parents, date=date,
1568 metadata=metadata, ui=ui)
1569 metadata=metadata, ui=ui)
1569 tr.close()
1570 tr.close()
1570 except ValueError as exc:
1571 except ValueError as exc:
1571 raise error.Abort(_('bad obsmarker input: %s') % exc)
1572 raise error.Abort(_('bad obsmarker input: %s') % exc)
1572 finally:
1573 finally:
1573 tr.release()
1574 tr.release()
1574 finally:
1575 finally:
1575 l.release()
1576 l.release()
1576 else:
1577 else:
1577 if opts['rev']:
1578 if opts['rev']:
1578 revs = scmutil.revrange(repo, opts['rev'])
1579 revs = scmutil.revrange(repo, opts['rev'])
1579 nodes = [repo[r].node() for r in revs]
1580 nodes = [repo[r].node() for r in revs]
1580 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1581 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1581 exclusive=opts['exclusive']))
1582 exclusive=opts['exclusive']))
1582 markers.sort(key=lambda x: x._data)
1583 markers.sort(key=lambda x: x._data)
1583 else:
1584 else:
1584 markers = obsutil.getmarkers(repo)
1585 markers = obsutil.getmarkers(repo)
1585
1586
1586 markerstoiter = markers
1587 markerstoiter = markers
1587 isrelevant = lambda m: True
1588 isrelevant = lambda m: True
1588 if opts.get('rev') and opts.get('index'):
1589 if opts.get('rev') and opts.get('index'):
1589 markerstoiter = obsutil.getmarkers(repo)
1590 markerstoiter = obsutil.getmarkers(repo)
1590 markerset = set(markers)
1591 markerset = set(markers)
1591 isrelevant = lambda m: m in markerset
1592 isrelevant = lambda m: m in markerset
1592
1593
1593 fm = ui.formatter('debugobsolete', opts)
1594 fm = ui.formatter('debugobsolete', opts)
1594 for i, m in enumerate(markerstoiter):
1595 for i, m in enumerate(markerstoiter):
1595 if not isrelevant(m):
1596 if not isrelevant(m):
1596 # marker can be irrelevant when we're iterating over a set
1597 # marker can be irrelevant when we're iterating over a set
1597 # of markers (markerstoiter) which is bigger than the set
1598 # of markers (markerstoiter) which is bigger than the set
1598 # of markers we want to display (markers)
1599 # of markers we want to display (markers)
1599 # this can happen if both --index and --rev options are
1600 # this can happen if both --index and --rev options are
1600 # provided and thus we need to iterate over all of the markers
1601 # provided and thus we need to iterate over all of the markers
1601 # to get the correct indices, but only display the ones that
1602 # to get the correct indices, but only display the ones that
1602 # are relevant to --rev value
1603 # are relevant to --rev value
1603 continue
1604 continue
1604 fm.startitem()
1605 fm.startitem()
1605 ind = i if opts.get('index') else None
1606 ind = i if opts.get('index') else None
1606 cmdutil.showmarker(fm, m, index=ind)
1607 cmdutil.showmarker(fm, m, index=ind)
1607 fm.end()
1608 fm.end()
1608
1609
1609 @command('debugpathcomplete',
1610 @command('debugpathcomplete',
1610 [('f', 'full', None, _('complete an entire path')),
1611 [('f', 'full', None, _('complete an entire path')),
1611 ('n', 'normal', None, _('show only normal files')),
1612 ('n', 'normal', None, _('show only normal files')),
1612 ('a', 'added', None, _('show only added files')),
1613 ('a', 'added', None, _('show only added files')),
1613 ('r', 'removed', None, _('show only removed files'))],
1614 ('r', 'removed', None, _('show only removed files'))],
1614 _('FILESPEC...'))
1615 _('FILESPEC...'))
1615 def debugpathcomplete(ui, repo, *specs, **opts):
1616 def debugpathcomplete(ui, repo, *specs, **opts):
1616 '''complete part or all of a tracked path
1617 '''complete part or all of a tracked path
1617
1618
1618 This command supports shells that offer path name completion. It
1619 This command supports shells that offer path name completion. It
1619 currently completes only files already known to the dirstate.
1620 currently completes only files already known to the dirstate.
1620
1621
1621 Completion extends only to the next path segment unless
1622 Completion extends only to the next path segment unless
1622 --full is specified, in which case entire paths are used.'''
1623 --full is specified, in which case entire paths are used.'''
1623
1624
1624 def complete(path, acceptable):
1625 def complete(path, acceptable):
1625 dirstate = repo.dirstate
1626 dirstate = repo.dirstate
1626 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1627 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1627 rootdir = repo.root + pycompat.ossep
1628 rootdir = repo.root + pycompat.ossep
1628 if spec != repo.root and not spec.startswith(rootdir):
1629 if spec != repo.root and not spec.startswith(rootdir):
1629 return [], []
1630 return [], []
1630 if os.path.isdir(spec):
1631 if os.path.isdir(spec):
1631 spec += '/'
1632 spec += '/'
1632 spec = spec[len(rootdir):]
1633 spec = spec[len(rootdir):]
1633 fixpaths = pycompat.ossep != '/'
1634 fixpaths = pycompat.ossep != '/'
1634 if fixpaths:
1635 if fixpaths:
1635 spec = spec.replace(pycompat.ossep, '/')
1636 spec = spec.replace(pycompat.ossep, '/')
1636 speclen = len(spec)
1637 speclen = len(spec)
1637 fullpaths = opts[r'full']
1638 fullpaths = opts[r'full']
1638 files, dirs = set(), set()
1639 files, dirs = set(), set()
1639 adddir, addfile = dirs.add, files.add
1640 adddir, addfile = dirs.add, files.add
1640 for f, st in dirstate.iteritems():
1641 for f, st in dirstate.iteritems():
1641 if f.startswith(spec) and st[0] in acceptable:
1642 if f.startswith(spec) and st[0] in acceptable:
1642 if fixpaths:
1643 if fixpaths:
1643 f = f.replace('/', pycompat.ossep)
1644 f = f.replace('/', pycompat.ossep)
1644 if fullpaths:
1645 if fullpaths:
1645 addfile(f)
1646 addfile(f)
1646 continue
1647 continue
1647 s = f.find(pycompat.ossep, speclen)
1648 s = f.find(pycompat.ossep, speclen)
1648 if s >= 0:
1649 if s >= 0:
1649 adddir(f[:s])
1650 adddir(f[:s])
1650 else:
1651 else:
1651 addfile(f)
1652 addfile(f)
1652 return files, dirs
1653 return files, dirs
1653
1654
1654 acceptable = ''
1655 acceptable = ''
1655 if opts[r'normal']:
1656 if opts[r'normal']:
1656 acceptable += 'nm'
1657 acceptable += 'nm'
1657 if opts[r'added']:
1658 if opts[r'added']:
1658 acceptable += 'a'
1659 acceptable += 'a'
1659 if opts[r'removed']:
1660 if opts[r'removed']:
1660 acceptable += 'r'
1661 acceptable += 'r'
1661 cwd = repo.getcwd()
1662 cwd = repo.getcwd()
1662 if not specs:
1663 if not specs:
1663 specs = ['.']
1664 specs = ['.']
1664
1665
1665 files, dirs = set(), set()
1666 files, dirs = set(), set()
1666 for spec in specs:
1667 for spec in specs:
1667 f, d = complete(spec, acceptable or 'nmar')
1668 f, d = complete(spec, acceptable or 'nmar')
1668 files.update(f)
1669 files.update(f)
1669 dirs.update(d)
1670 dirs.update(d)
1670 files.update(dirs)
1671 files.update(dirs)
1671 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1672 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1672 ui.write('\n')
1673 ui.write('\n')
1673
1674
1674 @command('debugpickmergetool',
1675 @command('debugpickmergetool',
1675 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1676 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1676 ('', 'changedelete', None, _('emulate merging change and delete')),
1677 ('', 'changedelete', None, _('emulate merging change and delete')),
1677 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1678 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1678 _('[PATTERN]...'),
1679 _('[PATTERN]...'),
1679 inferrepo=True)
1680 inferrepo=True)
1680 def debugpickmergetool(ui, repo, *pats, **opts):
1681 def debugpickmergetool(ui, repo, *pats, **opts):
1681 """examine which merge tool is chosen for specified file
1682 """examine which merge tool is chosen for specified file
1682
1683
1683 As described in :hg:`help merge-tools`, Mercurial examines
1684 As described in :hg:`help merge-tools`, Mercurial examines
1684 configurations below in this order to decide which merge tool is
1685 configurations below in this order to decide which merge tool is
1685 chosen for specified file.
1686 chosen for specified file.
1686
1687
1687 1. ``--tool`` option
1688 1. ``--tool`` option
1688 2. ``HGMERGE`` environment variable
1689 2. ``HGMERGE`` environment variable
1689 3. configurations in ``merge-patterns`` section
1690 3. configurations in ``merge-patterns`` section
1690 4. configuration of ``ui.merge``
1691 4. configuration of ``ui.merge``
1691 5. configurations in ``merge-tools`` section
1692 5. configurations in ``merge-tools`` section
1692 6. ``hgmerge`` tool (for historical reason only)
1693 6. ``hgmerge`` tool (for historical reason only)
1693 7. default tool for fallback (``:merge`` or ``:prompt``)
1694 7. default tool for fallback (``:merge`` or ``:prompt``)
1694
1695
1695 This command writes out examination result in the style below::
1696 This command writes out examination result in the style below::
1696
1697
1697 FILE = MERGETOOL
1698 FILE = MERGETOOL
1698
1699
1699 By default, all files known in the first parent context of the
1700 By default, all files known in the first parent context of the
1700 working directory are examined. Use file patterns and/or -I/-X
1701 working directory are examined. Use file patterns and/or -I/-X
1701 options to limit target files. -r/--rev is also useful to examine
1702 options to limit target files. -r/--rev is also useful to examine
1702 files in another context without actual updating to it.
1703 files in another context without actual updating to it.
1703
1704
1704 With --debug, this command shows warning messages while matching
1705 With --debug, this command shows warning messages while matching
1705 against ``merge-patterns`` and so on, too. It is recommended to
1706 against ``merge-patterns`` and so on, too. It is recommended to
1706 use this option with explicit file patterns and/or -I/-X options,
1707 use this option with explicit file patterns and/or -I/-X options,
1707 because this option increases amount of output per file according
1708 because this option increases amount of output per file according
1708 to configurations in hgrc.
1709 to configurations in hgrc.
1709
1710
1710 With -v/--verbose, this command shows configurations below at
1711 With -v/--verbose, this command shows configurations below at
1711 first (only if specified).
1712 first (only if specified).
1712
1713
1713 - ``--tool`` option
1714 - ``--tool`` option
1714 - ``HGMERGE`` environment variable
1715 - ``HGMERGE`` environment variable
1715 - configuration of ``ui.merge``
1716 - configuration of ``ui.merge``
1716
1717
1717 If merge tool is chosen before matching against
1718 If merge tool is chosen before matching against
1718 ``merge-patterns``, this command can't show any helpful
1719 ``merge-patterns``, this command can't show any helpful
1719 information, even with --debug. In such case, information above is
1720 information, even with --debug. In such case, information above is
1720 useful to know why a merge tool is chosen.
1721 useful to know why a merge tool is chosen.
1721 """
1722 """
1722 opts = pycompat.byteskwargs(opts)
1723 opts = pycompat.byteskwargs(opts)
1723 overrides = {}
1724 overrides = {}
1724 if opts['tool']:
1725 if opts['tool']:
1725 overrides[('ui', 'forcemerge')] = opts['tool']
1726 overrides[('ui', 'forcemerge')] = opts['tool']
1726 ui.note(('with --tool %r\n') % (opts['tool']))
1727 ui.note(('with --tool %r\n') % (opts['tool']))
1727
1728
1728 with ui.configoverride(overrides, 'debugmergepatterns'):
1729 with ui.configoverride(overrides, 'debugmergepatterns'):
1729 hgmerge = encoding.environ.get("HGMERGE")
1730 hgmerge = encoding.environ.get("HGMERGE")
1730 if hgmerge is not None:
1731 if hgmerge is not None:
1731 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1732 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1732 uimerge = ui.config("ui", "merge")
1733 uimerge = ui.config("ui", "merge")
1733 if uimerge:
1734 if uimerge:
1734 ui.note(('with ui.merge=%r\n') % (uimerge))
1735 ui.note(('with ui.merge=%r\n') % (uimerge))
1735
1736
1736 ctx = scmutil.revsingle(repo, opts.get('rev'))
1737 ctx = scmutil.revsingle(repo, opts.get('rev'))
1737 m = scmutil.match(ctx, pats, opts)
1738 m = scmutil.match(ctx, pats, opts)
1738 changedelete = opts['changedelete']
1739 changedelete = opts['changedelete']
1739 for path in ctx.walk(m):
1740 for path in ctx.walk(m):
1740 fctx = ctx[path]
1741 fctx = ctx[path]
1741 try:
1742 try:
1742 if not ui.debugflag:
1743 if not ui.debugflag:
1743 ui.pushbuffer(error=True)
1744 ui.pushbuffer(error=True)
1744 tool, toolpath = filemerge._picktool(repo, ui, path,
1745 tool, toolpath = filemerge._picktool(repo, ui, path,
1745 fctx.isbinary(),
1746 fctx.isbinary(),
1746 'l' in fctx.flags(),
1747 'l' in fctx.flags(),
1747 changedelete)
1748 changedelete)
1748 finally:
1749 finally:
1749 if not ui.debugflag:
1750 if not ui.debugflag:
1750 ui.popbuffer()
1751 ui.popbuffer()
1751 ui.write(('%s = %s\n') % (path, tool))
1752 ui.write(('%s = %s\n') % (path, tool))
1752
1753
1753 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1754 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1754 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1755 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1755 '''access the pushkey key/value protocol
1756 '''access the pushkey key/value protocol
1756
1757
1757 With two args, list the keys in the given namespace.
1758 With two args, list the keys in the given namespace.
1758
1759
1759 With five args, set a key to new if it currently is set to old.
1760 With five args, set a key to new if it currently is set to old.
1760 Reports success or failure.
1761 Reports success or failure.
1761 '''
1762 '''
1762
1763
1763 target = hg.peer(ui, {}, repopath)
1764 target = hg.peer(ui, {}, repopath)
1764 if keyinfo:
1765 if keyinfo:
1765 key, old, new = keyinfo
1766 key, old, new = keyinfo
1766 r = target.pushkey(namespace, key, old, new)
1767 r = target.pushkey(namespace, key, old, new)
1767 ui.status(str(r) + '\n')
1768 ui.status(str(r) + '\n')
1768 return not r
1769 return not r
1769 else:
1770 else:
1770 for k, v in sorted(target.listkeys(namespace).iteritems()):
1771 for k, v in sorted(target.listkeys(namespace).iteritems()):
1771 ui.write("%s\t%s\n" % (util.escapestr(k),
1772 ui.write("%s\t%s\n" % (util.escapestr(k),
1772 util.escapestr(v)))
1773 util.escapestr(v)))
1773
1774
1774 @command('debugpvec', [], _('A B'))
1775 @command('debugpvec', [], _('A B'))
1775 def debugpvec(ui, repo, a, b=None):
1776 def debugpvec(ui, repo, a, b=None):
1776 ca = scmutil.revsingle(repo, a)
1777 ca = scmutil.revsingle(repo, a)
1777 cb = scmutil.revsingle(repo, b)
1778 cb = scmutil.revsingle(repo, b)
1778 pa = pvec.ctxpvec(ca)
1779 pa = pvec.ctxpvec(ca)
1779 pb = pvec.ctxpvec(cb)
1780 pb = pvec.ctxpvec(cb)
1780 if pa == pb:
1781 if pa == pb:
1781 rel = "="
1782 rel = "="
1782 elif pa > pb:
1783 elif pa > pb:
1783 rel = ">"
1784 rel = ">"
1784 elif pa < pb:
1785 elif pa < pb:
1785 rel = "<"
1786 rel = "<"
1786 elif pa | pb:
1787 elif pa | pb:
1787 rel = "|"
1788 rel = "|"
1788 ui.write(_("a: %s\n") % pa)
1789 ui.write(_("a: %s\n") % pa)
1789 ui.write(_("b: %s\n") % pb)
1790 ui.write(_("b: %s\n") % pb)
1790 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1791 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1791 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1792 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1792 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1793 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1793 pa.distance(pb), rel))
1794 pa.distance(pb), rel))
1794
1795
1795 @command('debugrebuilddirstate|debugrebuildstate',
1796 @command('debugrebuilddirstate|debugrebuildstate',
1796 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1797 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1797 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1798 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1798 'the working copy parent')),
1799 'the working copy parent')),
1799 ],
1800 ],
1800 _('[-r REV]'))
1801 _('[-r REV]'))
1801 def debugrebuilddirstate(ui, repo, rev, **opts):
1802 def debugrebuilddirstate(ui, repo, rev, **opts):
1802 """rebuild the dirstate as it would look like for the given revision
1803 """rebuild the dirstate as it would look like for the given revision
1803
1804
1804 If no revision is specified the first current parent will be used.
1805 If no revision is specified the first current parent will be used.
1805
1806
1806 The dirstate will be set to the files of the given revision.
1807 The dirstate will be set to the files of the given revision.
1807 The actual working directory content or existing dirstate
1808 The actual working directory content or existing dirstate
1808 information such as adds or removes is not considered.
1809 information such as adds or removes is not considered.
1809
1810
1810 ``minimal`` will only rebuild the dirstate status for files that claim to be
1811 ``minimal`` will only rebuild the dirstate status for files that claim to be
1811 tracked but are not in the parent manifest, or that exist in the parent
1812 tracked but are not in the parent manifest, or that exist in the parent
1812 manifest but are not in the dirstate. It will not change adds, removes, or
1813 manifest but are not in the dirstate. It will not change adds, removes, or
1813 modified files that are in the working copy parent.
1814 modified files that are in the working copy parent.
1814
1815
1815 One use of this command is to make the next :hg:`status` invocation
1816 One use of this command is to make the next :hg:`status` invocation
1816 check the actual file content.
1817 check the actual file content.
1817 """
1818 """
1818 ctx = scmutil.revsingle(repo, rev)
1819 ctx = scmutil.revsingle(repo, rev)
1819 with repo.wlock():
1820 with repo.wlock():
1820 dirstate = repo.dirstate
1821 dirstate = repo.dirstate
1821 changedfiles = None
1822 changedfiles = None
1822 # See command doc for what minimal does.
1823 # See command doc for what minimal does.
1823 if opts.get(r'minimal'):
1824 if opts.get(r'minimal'):
1824 manifestfiles = set(ctx.manifest().keys())
1825 manifestfiles = set(ctx.manifest().keys())
1825 dirstatefiles = set(dirstate)
1826 dirstatefiles = set(dirstate)
1826 manifestonly = manifestfiles - dirstatefiles
1827 manifestonly = manifestfiles - dirstatefiles
1827 dsonly = dirstatefiles - manifestfiles
1828 dsonly = dirstatefiles - manifestfiles
1828 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1829 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1829 changedfiles = manifestonly | dsnotadded
1830 changedfiles = manifestonly | dsnotadded
1830
1831
1831 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1832 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1832
1833
1833 @command('debugrebuildfncache', [], '')
1834 @command('debugrebuildfncache', [], '')
1834 def debugrebuildfncache(ui, repo):
1835 def debugrebuildfncache(ui, repo):
1835 """rebuild the fncache file"""
1836 """rebuild the fncache file"""
1836 repair.rebuildfncache(ui, repo)
1837 repair.rebuildfncache(ui, repo)
1837
1838
1838 @command('debugrename',
1839 @command('debugrename',
1839 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1840 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1840 _('[-r REV] FILE'))
1841 _('[-r REV] FILE'))
1841 def debugrename(ui, repo, file1, *pats, **opts):
1842 def debugrename(ui, repo, file1, *pats, **opts):
1842 """dump rename information"""
1843 """dump rename information"""
1843
1844
1844 opts = pycompat.byteskwargs(opts)
1845 opts = pycompat.byteskwargs(opts)
1845 ctx = scmutil.revsingle(repo, opts.get('rev'))
1846 ctx = scmutil.revsingle(repo, opts.get('rev'))
1846 m = scmutil.match(ctx, (file1,) + pats, opts)
1847 m = scmutil.match(ctx, (file1,) + pats, opts)
1847 for abs in ctx.walk(m):
1848 for abs in ctx.walk(m):
1848 fctx = ctx[abs]
1849 fctx = ctx[abs]
1849 o = fctx.filelog().renamed(fctx.filenode())
1850 o = fctx.filelog().renamed(fctx.filenode())
1850 rel = m.rel(abs)
1851 rel = m.rel(abs)
1851 if o:
1852 if o:
1852 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1853 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1853 else:
1854 else:
1854 ui.write(_("%s not renamed\n") % rel)
1855 ui.write(_("%s not renamed\n") % rel)
1855
1856
1856 @command('debugrevlog', cmdutil.debugrevlogopts +
1857 @command('debugrevlog', cmdutil.debugrevlogopts +
1857 [('d', 'dump', False, _('dump index data'))],
1858 [('d', 'dump', False, _('dump index data'))],
1858 _('-c|-m|FILE'),
1859 _('-c|-m|FILE'),
1859 optionalrepo=True)
1860 optionalrepo=True)
1860 def debugrevlog(ui, repo, file_=None, **opts):
1861 def debugrevlog(ui, repo, file_=None, **opts):
1861 """show data and statistics about a revlog"""
1862 """show data and statistics about a revlog"""
1862 opts = pycompat.byteskwargs(opts)
1863 opts = pycompat.byteskwargs(opts)
1863 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1864 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1864
1865
1865 if opts.get("dump"):
1866 if opts.get("dump"):
1866 numrevs = len(r)
1867 numrevs = len(r)
1867 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1868 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1868 " rawsize totalsize compression heads chainlen\n"))
1869 " rawsize totalsize compression heads chainlen\n"))
1869 ts = 0
1870 ts = 0
1870 heads = set()
1871 heads = set()
1871
1872
1872 for rev in xrange(numrevs):
1873 for rev in xrange(numrevs):
1873 dbase = r.deltaparent(rev)
1874 dbase = r.deltaparent(rev)
1874 if dbase == -1:
1875 if dbase == -1:
1875 dbase = rev
1876 dbase = rev
1876 cbase = r.chainbase(rev)
1877 cbase = r.chainbase(rev)
1877 clen = r.chainlen(rev)
1878 clen = r.chainlen(rev)
1878 p1, p2 = r.parentrevs(rev)
1879 p1, p2 = r.parentrevs(rev)
1879 rs = r.rawsize(rev)
1880 rs = r.rawsize(rev)
1880 ts = ts + rs
1881 ts = ts + rs
1881 heads -= set(r.parentrevs(rev))
1882 heads -= set(r.parentrevs(rev))
1882 heads.add(rev)
1883 heads.add(rev)
1883 try:
1884 try:
1884 compression = ts / r.end(rev)
1885 compression = ts / r.end(rev)
1885 except ZeroDivisionError:
1886 except ZeroDivisionError:
1886 compression = 0
1887 compression = 0
1887 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1888 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1888 "%11d %5d %8d\n" %
1889 "%11d %5d %8d\n" %
1889 (rev, p1, p2, r.start(rev), r.end(rev),
1890 (rev, p1, p2, r.start(rev), r.end(rev),
1890 r.start(dbase), r.start(cbase),
1891 r.start(dbase), r.start(cbase),
1891 r.start(p1), r.start(p2),
1892 r.start(p1), r.start(p2),
1892 rs, ts, compression, len(heads), clen))
1893 rs, ts, compression, len(heads), clen))
1893 return 0
1894 return 0
1894
1895
1895 v = r.version
1896 v = r.version
1896 format = v & 0xFFFF
1897 format = v & 0xFFFF
1897 flags = []
1898 flags = []
1898 gdelta = False
1899 gdelta = False
1899 if v & revlog.FLAG_INLINE_DATA:
1900 if v & revlog.FLAG_INLINE_DATA:
1900 flags.append('inline')
1901 flags.append('inline')
1901 if v & revlog.FLAG_GENERALDELTA:
1902 if v & revlog.FLAG_GENERALDELTA:
1902 gdelta = True
1903 gdelta = True
1903 flags.append('generaldelta')
1904 flags.append('generaldelta')
1904 if not flags:
1905 if not flags:
1905 flags = ['(none)']
1906 flags = ['(none)']
1906
1907
1907 nummerges = 0
1908 nummerges = 0
1908 numfull = 0
1909 numfull = 0
1909 numprev = 0
1910 numprev = 0
1910 nump1 = 0
1911 nump1 = 0
1911 nump2 = 0
1912 nump2 = 0
1912 numother = 0
1913 numother = 0
1913 nump1prev = 0
1914 nump1prev = 0
1914 nump2prev = 0
1915 nump2prev = 0
1915 chainlengths = []
1916 chainlengths = []
1916 chainbases = []
1917 chainbases = []
1917 chainspans = []
1918 chainspans = []
1918
1919
1919 datasize = [None, 0, 0]
1920 datasize = [None, 0, 0]
1920 fullsize = [None, 0, 0]
1921 fullsize = [None, 0, 0]
1921 deltasize = [None, 0, 0]
1922 deltasize = [None, 0, 0]
1922 chunktypecounts = {}
1923 chunktypecounts = {}
1923 chunktypesizes = {}
1924 chunktypesizes = {}
1924
1925
1925 def addsize(size, l):
1926 def addsize(size, l):
1926 if l[0] is None or size < l[0]:
1927 if l[0] is None or size < l[0]:
1927 l[0] = size
1928 l[0] = size
1928 if size > l[1]:
1929 if size > l[1]:
1929 l[1] = size
1930 l[1] = size
1930 l[2] += size
1931 l[2] += size
1931
1932
1932 numrevs = len(r)
1933 numrevs = len(r)
1933 for rev in xrange(numrevs):
1934 for rev in xrange(numrevs):
1934 p1, p2 = r.parentrevs(rev)
1935 p1, p2 = r.parentrevs(rev)
1935 delta = r.deltaparent(rev)
1936 delta = r.deltaparent(rev)
1936 if format > 0:
1937 if format > 0:
1937 addsize(r.rawsize(rev), datasize)
1938 addsize(r.rawsize(rev), datasize)
1938 if p2 != nullrev:
1939 if p2 != nullrev:
1939 nummerges += 1
1940 nummerges += 1
1940 size = r.length(rev)
1941 size = r.length(rev)
1941 if delta == nullrev:
1942 if delta == nullrev:
1942 chainlengths.append(0)
1943 chainlengths.append(0)
1943 chainbases.append(r.start(rev))
1944 chainbases.append(r.start(rev))
1944 chainspans.append(size)
1945 chainspans.append(size)
1945 numfull += 1
1946 numfull += 1
1946 addsize(size, fullsize)
1947 addsize(size, fullsize)
1947 else:
1948 else:
1948 chainlengths.append(chainlengths[delta] + 1)
1949 chainlengths.append(chainlengths[delta] + 1)
1949 baseaddr = chainbases[delta]
1950 baseaddr = chainbases[delta]
1950 revaddr = r.start(rev)
1951 revaddr = r.start(rev)
1951 chainbases.append(baseaddr)
1952 chainbases.append(baseaddr)
1952 chainspans.append((revaddr - baseaddr) + size)
1953 chainspans.append((revaddr - baseaddr) + size)
1953 addsize(size, deltasize)
1954 addsize(size, deltasize)
1954 if delta == rev - 1:
1955 if delta == rev - 1:
1955 numprev += 1
1956 numprev += 1
1956 if delta == p1:
1957 if delta == p1:
1957 nump1prev += 1
1958 nump1prev += 1
1958 elif delta == p2:
1959 elif delta == p2:
1959 nump2prev += 1
1960 nump2prev += 1
1960 elif delta == p1:
1961 elif delta == p1:
1961 nump1 += 1
1962 nump1 += 1
1962 elif delta == p2:
1963 elif delta == p2:
1963 nump2 += 1
1964 nump2 += 1
1964 elif delta != nullrev:
1965 elif delta != nullrev:
1965 numother += 1
1966 numother += 1
1966
1967
1967 # Obtain data on the raw chunks in the revlog.
1968 # Obtain data on the raw chunks in the revlog.
1968 segment = r._getsegmentforrevs(rev, rev)[1]
1969 segment = r._getsegmentforrevs(rev, rev)[1]
1969 if segment:
1970 if segment:
1970 chunktype = bytes(segment[0:1])
1971 chunktype = bytes(segment[0:1])
1971 else:
1972 else:
1972 chunktype = 'empty'
1973 chunktype = 'empty'
1973
1974
1974 if chunktype not in chunktypecounts:
1975 if chunktype not in chunktypecounts:
1975 chunktypecounts[chunktype] = 0
1976 chunktypecounts[chunktype] = 0
1976 chunktypesizes[chunktype] = 0
1977 chunktypesizes[chunktype] = 0
1977
1978
1978 chunktypecounts[chunktype] += 1
1979 chunktypecounts[chunktype] += 1
1979 chunktypesizes[chunktype] += size
1980 chunktypesizes[chunktype] += size
1980
1981
1981 # Adjust size min value for empty cases
1982 # Adjust size min value for empty cases
1982 for size in (datasize, fullsize, deltasize):
1983 for size in (datasize, fullsize, deltasize):
1983 if size[0] is None:
1984 if size[0] is None:
1984 size[0] = 0
1985 size[0] = 0
1985
1986
1986 numdeltas = numrevs - numfull
1987 numdeltas = numrevs - numfull
1987 numoprev = numprev - nump1prev - nump2prev
1988 numoprev = numprev - nump1prev - nump2prev
1988 totalrawsize = datasize[2]
1989 totalrawsize = datasize[2]
1989 datasize[2] /= numrevs
1990 datasize[2] /= numrevs
1990 fulltotal = fullsize[2]
1991 fulltotal = fullsize[2]
1991 fullsize[2] /= numfull
1992 fullsize[2] /= numfull
1992 deltatotal = deltasize[2]
1993 deltatotal = deltasize[2]
1993 if numrevs - numfull > 0:
1994 if numrevs - numfull > 0:
1994 deltasize[2] /= numrevs - numfull
1995 deltasize[2] /= numrevs - numfull
1995 totalsize = fulltotal + deltatotal
1996 totalsize = fulltotal + deltatotal
1996 avgchainlen = sum(chainlengths) / numrevs
1997 avgchainlen = sum(chainlengths) / numrevs
1997 maxchainlen = max(chainlengths)
1998 maxchainlen = max(chainlengths)
1998 maxchainspan = max(chainspans)
1999 maxchainspan = max(chainspans)
1999 compratio = 1
2000 compratio = 1
2000 if totalsize:
2001 if totalsize:
2001 compratio = totalrawsize / totalsize
2002 compratio = totalrawsize / totalsize
2002
2003
2003 basedfmtstr = '%%%dd\n'
2004 basedfmtstr = '%%%dd\n'
2004 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2005 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2005
2006
2006 def dfmtstr(max):
2007 def dfmtstr(max):
2007 return basedfmtstr % len(str(max))
2008 return basedfmtstr % len(str(max))
2008 def pcfmtstr(max, padding=0):
2009 def pcfmtstr(max, padding=0):
2009 return basepcfmtstr % (len(str(max)), ' ' * padding)
2010 return basepcfmtstr % (len(str(max)), ' ' * padding)
2010
2011
2011 def pcfmt(value, total):
2012 def pcfmt(value, total):
2012 if total:
2013 if total:
2013 return (value, 100 * float(value) / total)
2014 return (value, 100 * float(value) / total)
2014 else:
2015 else:
2015 return value, 100.0
2016 return value, 100.0
2016
2017
2017 ui.write(('format : %d\n') % format)
2018 ui.write(('format : %d\n') % format)
2018 ui.write(('flags : %s\n') % ', '.join(flags))
2019 ui.write(('flags : %s\n') % ', '.join(flags))
2019
2020
2020 ui.write('\n')
2021 ui.write('\n')
2021 fmt = pcfmtstr(totalsize)
2022 fmt = pcfmtstr(totalsize)
2022 fmt2 = dfmtstr(totalsize)
2023 fmt2 = dfmtstr(totalsize)
2023 ui.write(('revisions : ') + fmt2 % numrevs)
2024 ui.write(('revisions : ') + fmt2 % numrevs)
2024 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2025 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2025 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2026 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2026 ui.write(('revisions : ') + fmt2 % numrevs)
2027 ui.write(('revisions : ') + fmt2 % numrevs)
2027 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2028 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2028 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2029 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2029 ui.write(('revision size : ') + fmt2 % totalsize)
2030 ui.write(('revision size : ') + fmt2 % totalsize)
2030 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2031 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2031 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2032 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2032
2033
2033 def fmtchunktype(chunktype):
2034 def fmtchunktype(chunktype):
2034 if chunktype == 'empty':
2035 if chunktype == 'empty':
2035 return ' %s : ' % chunktype
2036 return ' %s : ' % chunktype
2036 elif chunktype in pycompat.bytestr(string.ascii_letters):
2037 elif chunktype in pycompat.bytestr(string.ascii_letters):
2037 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2038 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2038 else:
2039 else:
2039 return ' 0x%s : ' % hex(chunktype)
2040 return ' 0x%s : ' % hex(chunktype)
2040
2041
2041 ui.write('\n')
2042 ui.write('\n')
2042 ui.write(('chunks : ') + fmt2 % numrevs)
2043 ui.write(('chunks : ') + fmt2 % numrevs)
2043 for chunktype in sorted(chunktypecounts):
2044 for chunktype in sorted(chunktypecounts):
2044 ui.write(fmtchunktype(chunktype))
2045 ui.write(fmtchunktype(chunktype))
2045 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2046 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2046 ui.write(('chunks size : ') + fmt2 % totalsize)
2047 ui.write(('chunks size : ') + fmt2 % totalsize)
2047 for chunktype in sorted(chunktypecounts):
2048 for chunktype in sorted(chunktypecounts):
2048 ui.write(fmtchunktype(chunktype))
2049 ui.write(fmtchunktype(chunktype))
2049 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2050 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2050
2051
2051 ui.write('\n')
2052 ui.write('\n')
2052 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2053 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2053 ui.write(('avg chain length : ') + fmt % avgchainlen)
2054 ui.write(('avg chain length : ') + fmt % avgchainlen)
2054 ui.write(('max chain length : ') + fmt % maxchainlen)
2055 ui.write(('max chain length : ') + fmt % maxchainlen)
2055 ui.write(('max chain reach : ') + fmt % maxchainspan)
2056 ui.write(('max chain reach : ') + fmt % maxchainspan)
2056 ui.write(('compression ratio : ') + fmt % compratio)
2057 ui.write(('compression ratio : ') + fmt % compratio)
2057
2058
2058 if format > 0:
2059 if format > 0:
2059 ui.write('\n')
2060 ui.write('\n')
2060 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2061 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2061 % tuple(datasize))
2062 % tuple(datasize))
2062 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2063 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2063 % tuple(fullsize))
2064 % tuple(fullsize))
2064 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2065 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2065 % tuple(deltasize))
2066 % tuple(deltasize))
2066
2067
2067 if numdeltas > 0:
2068 if numdeltas > 0:
2068 ui.write('\n')
2069 ui.write('\n')
2069 fmt = pcfmtstr(numdeltas)
2070 fmt = pcfmtstr(numdeltas)
2070 fmt2 = pcfmtstr(numdeltas, 4)
2071 fmt2 = pcfmtstr(numdeltas, 4)
2071 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2072 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2072 if numprev > 0:
2073 if numprev > 0:
2073 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2074 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2074 numprev))
2075 numprev))
2075 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2076 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2076 numprev))
2077 numprev))
2077 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2078 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2078 numprev))
2079 numprev))
2079 if gdelta:
2080 if gdelta:
2080 ui.write(('deltas against p1 : ')
2081 ui.write(('deltas against p1 : ')
2081 + fmt % pcfmt(nump1, numdeltas))
2082 + fmt % pcfmt(nump1, numdeltas))
2082 ui.write(('deltas against p2 : ')
2083 ui.write(('deltas against p2 : ')
2083 + fmt % pcfmt(nump2, numdeltas))
2084 + fmt % pcfmt(nump2, numdeltas))
2084 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2085 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2085 numdeltas))
2086 numdeltas))
2086
2087
2087 @command('debugrevspec',
2088 @command('debugrevspec',
2088 [('', 'optimize', None,
2089 [('', 'optimize', None,
2089 _('print parsed tree after optimizing (DEPRECATED)')),
2090 _('print parsed tree after optimizing (DEPRECATED)')),
2090 ('', 'show-revs', True, _('print list of result revisions (default)')),
2091 ('', 'show-revs', True, _('print list of result revisions (default)')),
2091 ('s', 'show-set', None, _('print internal representation of result set')),
2092 ('s', 'show-set', None, _('print internal representation of result set')),
2092 ('p', 'show-stage', [],
2093 ('p', 'show-stage', [],
2093 _('print parsed tree at the given stage'), _('NAME')),
2094 _('print parsed tree at the given stage'), _('NAME')),
2094 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2095 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2095 ('', 'verify-optimized', False, _('verify optimized result')),
2096 ('', 'verify-optimized', False, _('verify optimized result')),
2096 ],
2097 ],
2097 ('REVSPEC'))
2098 ('REVSPEC'))
2098 def debugrevspec(ui, repo, expr, **opts):
2099 def debugrevspec(ui, repo, expr, **opts):
2099 """parse and apply a revision specification
2100 """parse and apply a revision specification
2100
2101
2101 Use -p/--show-stage option to print the parsed tree at the given stages.
2102 Use -p/--show-stage option to print the parsed tree at the given stages.
2102 Use -p all to print tree at every stage.
2103 Use -p all to print tree at every stage.
2103
2104
2104 Use --no-show-revs option with -s or -p to print only the set
2105 Use --no-show-revs option with -s or -p to print only the set
2105 representation or the parsed tree respectively.
2106 representation or the parsed tree respectively.
2106
2107
2107 Use --verify-optimized to compare the optimized result with the unoptimized
2108 Use --verify-optimized to compare the optimized result with the unoptimized
2108 one. Returns 1 if the optimized result differs.
2109 one. Returns 1 if the optimized result differs.
2109 """
2110 """
2110 opts = pycompat.byteskwargs(opts)
2111 opts = pycompat.byteskwargs(opts)
2111 aliases = ui.configitems('revsetalias')
2112 aliases = ui.configitems('revsetalias')
2112 stages = [
2113 stages = [
2113 ('parsed', lambda tree: tree),
2114 ('parsed', lambda tree: tree),
2114 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2115 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2115 ui.warn)),
2116 ui.warn)),
2116 ('concatenated', revsetlang.foldconcat),
2117 ('concatenated', revsetlang.foldconcat),
2117 ('analyzed', revsetlang.analyze),
2118 ('analyzed', revsetlang.analyze),
2118 ('optimized', revsetlang.optimize),
2119 ('optimized', revsetlang.optimize),
2119 ]
2120 ]
2120 if opts['no_optimized']:
2121 if opts['no_optimized']:
2121 stages = stages[:-1]
2122 stages = stages[:-1]
2122 if opts['verify_optimized'] and opts['no_optimized']:
2123 if opts['verify_optimized'] and opts['no_optimized']:
2123 raise error.Abort(_('cannot use --verify-optimized with '
2124 raise error.Abort(_('cannot use --verify-optimized with '
2124 '--no-optimized'))
2125 '--no-optimized'))
2125 stagenames = set(n for n, f in stages)
2126 stagenames = set(n for n, f in stages)
2126
2127
2127 showalways = set()
2128 showalways = set()
2128 showchanged = set()
2129 showchanged = set()
2129 if ui.verbose and not opts['show_stage']:
2130 if ui.verbose and not opts['show_stage']:
2130 # show parsed tree by --verbose (deprecated)
2131 # show parsed tree by --verbose (deprecated)
2131 showalways.add('parsed')
2132 showalways.add('parsed')
2132 showchanged.update(['expanded', 'concatenated'])
2133 showchanged.update(['expanded', 'concatenated'])
2133 if opts['optimize']:
2134 if opts['optimize']:
2134 showalways.add('optimized')
2135 showalways.add('optimized')
2135 if opts['show_stage'] and opts['optimize']:
2136 if opts['show_stage'] and opts['optimize']:
2136 raise error.Abort(_('cannot use --optimize with --show-stage'))
2137 raise error.Abort(_('cannot use --optimize with --show-stage'))
2137 if opts['show_stage'] == ['all']:
2138 if opts['show_stage'] == ['all']:
2138 showalways.update(stagenames)
2139 showalways.update(stagenames)
2139 else:
2140 else:
2140 for n in opts['show_stage']:
2141 for n in opts['show_stage']:
2141 if n not in stagenames:
2142 if n not in stagenames:
2142 raise error.Abort(_('invalid stage name: %s') % n)
2143 raise error.Abort(_('invalid stage name: %s') % n)
2143 showalways.update(opts['show_stage'])
2144 showalways.update(opts['show_stage'])
2144
2145
2145 treebystage = {}
2146 treebystage = {}
2146 printedtree = None
2147 printedtree = None
2147 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2148 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2148 for n, f in stages:
2149 for n, f in stages:
2149 treebystage[n] = tree = f(tree)
2150 treebystage[n] = tree = f(tree)
2150 if n in showalways or (n in showchanged and tree != printedtree):
2151 if n in showalways or (n in showchanged and tree != printedtree):
2151 if opts['show_stage'] or n != 'parsed':
2152 if opts['show_stage'] or n != 'parsed':
2152 ui.write(("* %s:\n") % n)
2153 ui.write(("* %s:\n") % n)
2153 ui.write(revsetlang.prettyformat(tree), "\n")
2154 ui.write(revsetlang.prettyformat(tree), "\n")
2154 printedtree = tree
2155 printedtree = tree
2155
2156
2156 if opts['verify_optimized']:
2157 if opts['verify_optimized']:
2157 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2158 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2158 brevs = revset.makematcher(treebystage['optimized'])(repo)
2159 brevs = revset.makematcher(treebystage['optimized'])(repo)
2159 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2160 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2160 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2161 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2161 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2162 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2162 arevs = list(arevs)
2163 arevs = list(arevs)
2163 brevs = list(brevs)
2164 brevs = list(brevs)
2164 if arevs == brevs:
2165 if arevs == brevs:
2165 return 0
2166 return 0
2166 ui.write(('--- analyzed\n'), label='diff.file_a')
2167 ui.write(('--- analyzed\n'), label='diff.file_a')
2167 ui.write(('+++ optimized\n'), label='diff.file_b')
2168 ui.write(('+++ optimized\n'), label='diff.file_b')
2168 sm = difflib.SequenceMatcher(None, arevs, brevs)
2169 sm = difflib.SequenceMatcher(None, arevs, brevs)
2169 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2170 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2170 if tag in ('delete', 'replace'):
2171 if tag in ('delete', 'replace'):
2171 for c in arevs[alo:ahi]:
2172 for c in arevs[alo:ahi]:
2172 ui.write('-%s\n' % c, label='diff.deleted')
2173 ui.write('-%s\n' % c, label='diff.deleted')
2173 if tag in ('insert', 'replace'):
2174 if tag in ('insert', 'replace'):
2174 for c in brevs[blo:bhi]:
2175 for c in brevs[blo:bhi]:
2175 ui.write('+%s\n' % c, label='diff.inserted')
2176 ui.write('+%s\n' % c, label='diff.inserted')
2176 if tag == 'equal':
2177 if tag == 'equal':
2177 for c in arevs[alo:ahi]:
2178 for c in arevs[alo:ahi]:
2178 ui.write(' %s\n' % c)
2179 ui.write(' %s\n' % c)
2179 return 1
2180 return 1
2180
2181
2181 func = revset.makematcher(tree)
2182 func = revset.makematcher(tree)
2182 revs = func(repo)
2183 revs = func(repo)
2183 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2184 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2184 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2185 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2185 if not opts['show_revs']:
2186 if not opts['show_revs']:
2186 return
2187 return
2187 for c in revs:
2188 for c in revs:
2188 ui.write("%s\n" % c)
2189 ui.write("%s\n" % c)
2189
2190
2190 @command('debugsetparents', [], _('REV1 [REV2]'))
2191 @command('debugsetparents', [], _('REV1 [REV2]'))
2191 def debugsetparents(ui, repo, rev1, rev2=None):
2192 def debugsetparents(ui, repo, rev1, rev2=None):
2192 """manually set the parents of the current working directory
2193 """manually set the parents of the current working directory
2193
2194
2194 This is useful for writing repository conversion tools, but should
2195 This is useful for writing repository conversion tools, but should
2195 be used with care. For example, neither the working directory nor the
2196 be used with care. For example, neither the working directory nor the
2196 dirstate is updated, so file status may be incorrect after running this
2197 dirstate is updated, so file status may be incorrect after running this
2197 command.
2198 command.
2198
2199
2199 Returns 0 on success.
2200 Returns 0 on success.
2200 """
2201 """
2201
2202
2202 r1 = scmutil.revsingle(repo, rev1).node()
2203 r1 = scmutil.revsingle(repo, rev1).node()
2203 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2204 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2204
2205
2205 with repo.wlock():
2206 with repo.wlock():
2206 repo.setparents(r1, r2)
2207 repo.setparents(r1, r2)
2207
2208
2208 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2209 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2209 def debugssl(ui, repo, source=None, **opts):
2210 def debugssl(ui, repo, source=None, **opts):
2210 '''test a secure connection to a server
2211 '''test a secure connection to a server
2211
2212
2212 This builds the certificate chain for the server on Windows, installing the
2213 This builds the certificate chain for the server on Windows, installing the
2213 missing intermediates and trusted root via Windows Update if necessary. It
2214 missing intermediates and trusted root via Windows Update if necessary. It
2214 does nothing on other platforms.
2215 does nothing on other platforms.
2215
2216
2216 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2217 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2217 that server is used. See :hg:`help urls` for more information.
2218 that server is used. See :hg:`help urls` for more information.
2218
2219
2219 If the update succeeds, retry the original operation. Otherwise, the cause
2220 If the update succeeds, retry the original operation. Otherwise, the cause
2220 of the SSL error is likely another issue.
2221 of the SSL error is likely another issue.
2221 '''
2222 '''
2222 if not pycompat.iswindows:
2223 if not pycompat.iswindows:
2223 raise error.Abort(_('certificate chain building is only possible on '
2224 raise error.Abort(_('certificate chain building is only possible on '
2224 'Windows'))
2225 'Windows'))
2225
2226
2226 if not source:
2227 if not source:
2227 if not repo:
2228 if not repo:
2228 raise error.Abort(_("there is no Mercurial repository here, and no "
2229 raise error.Abort(_("there is no Mercurial repository here, and no "
2229 "server specified"))
2230 "server specified"))
2230 source = "default"
2231 source = "default"
2231
2232
2232 source, branches = hg.parseurl(ui.expandpath(source))
2233 source, branches = hg.parseurl(ui.expandpath(source))
2233 url = util.url(source)
2234 url = util.url(source)
2234 addr = None
2235 addr = None
2235
2236
2236 if url.scheme == 'https':
2237 if url.scheme == 'https':
2237 addr = (url.host, url.port or 443)
2238 addr = (url.host, url.port or 443)
2238 elif url.scheme == 'ssh':
2239 elif url.scheme == 'ssh':
2239 addr = (url.host, url.port or 22)
2240 addr = (url.host, url.port or 22)
2240 else:
2241 else:
2241 raise error.Abort(_("only https and ssh connections are supported"))
2242 raise error.Abort(_("only https and ssh connections are supported"))
2242
2243
2243 from . import win32
2244 from . import win32
2244
2245
2245 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2246 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2246 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2247 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2247
2248
2248 try:
2249 try:
2249 s.connect(addr)
2250 s.connect(addr)
2250 cert = s.getpeercert(True)
2251 cert = s.getpeercert(True)
2251
2252
2252 ui.status(_('checking the certificate chain for %s\n') % url.host)
2253 ui.status(_('checking the certificate chain for %s\n') % url.host)
2253
2254
2254 complete = win32.checkcertificatechain(cert, build=False)
2255 complete = win32.checkcertificatechain(cert, build=False)
2255
2256
2256 if not complete:
2257 if not complete:
2257 ui.status(_('certificate chain is incomplete, updating... '))
2258 ui.status(_('certificate chain is incomplete, updating... '))
2258
2259
2259 if not win32.checkcertificatechain(cert):
2260 if not win32.checkcertificatechain(cert):
2260 ui.status(_('failed.\n'))
2261 ui.status(_('failed.\n'))
2261 else:
2262 else:
2262 ui.status(_('done.\n'))
2263 ui.status(_('done.\n'))
2263 else:
2264 else:
2264 ui.status(_('full certificate chain is available\n'))
2265 ui.status(_('full certificate chain is available\n'))
2265 finally:
2266 finally:
2266 s.close()
2267 s.close()
2267
2268
2268 @command('debugsub',
2269 @command('debugsub',
2269 [('r', 'rev', '',
2270 [('r', 'rev', '',
2270 _('revision to check'), _('REV'))],
2271 _('revision to check'), _('REV'))],
2271 _('[-r REV] [REV]'))
2272 _('[-r REV] [REV]'))
2272 def debugsub(ui, repo, rev=None):
2273 def debugsub(ui, repo, rev=None):
2273 ctx = scmutil.revsingle(repo, rev, None)
2274 ctx = scmutil.revsingle(repo, rev, None)
2274 for k, v in sorted(ctx.substate.items()):
2275 for k, v in sorted(ctx.substate.items()):
2275 ui.write(('path %s\n') % k)
2276 ui.write(('path %s\n') % k)
2276 ui.write((' source %s\n') % v[0])
2277 ui.write((' source %s\n') % v[0])
2277 ui.write((' revision %s\n') % v[1])
2278 ui.write((' revision %s\n') % v[1])
2278
2279
2279 @command('debugsuccessorssets',
2280 @command('debugsuccessorssets',
2280 [('', 'closest', False, _('return closest successors sets only'))],
2281 [('', 'closest', False, _('return closest successors sets only'))],
2281 _('[REV]'))
2282 _('[REV]'))
2282 def debugsuccessorssets(ui, repo, *revs, **opts):
2283 def debugsuccessorssets(ui, repo, *revs, **opts):
2283 """show set of successors for revision
2284 """show set of successors for revision
2284
2285
2285 A successors set of changeset A is a consistent group of revisions that
2286 A successors set of changeset A is a consistent group of revisions that
2286 succeed A. It contains non-obsolete changesets only unless closests
2287 succeed A. It contains non-obsolete changesets only unless closests
2287 successors set is set.
2288 successors set is set.
2288
2289
2289 In most cases a changeset A has a single successors set containing a single
2290 In most cases a changeset A has a single successors set containing a single
2290 successor (changeset A replaced by A').
2291 successor (changeset A replaced by A').
2291
2292
2292 A changeset that is made obsolete with no successors are called "pruned".
2293 A changeset that is made obsolete with no successors are called "pruned".
2293 Such changesets have no successors sets at all.
2294 Such changesets have no successors sets at all.
2294
2295
2295 A changeset that has been "split" will have a successors set containing
2296 A changeset that has been "split" will have a successors set containing
2296 more than one successor.
2297 more than one successor.
2297
2298
2298 A changeset that has been rewritten in multiple different ways is called
2299 A changeset that has been rewritten in multiple different ways is called
2299 "divergent". Such changesets have multiple successor sets (each of which
2300 "divergent". Such changesets have multiple successor sets (each of which
2300 may also be split, i.e. have multiple successors).
2301 may also be split, i.e. have multiple successors).
2301
2302
2302 Results are displayed as follows::
2303 Results are displayed as follows::
2303
2304
2304 <rev1>
2305 <rev1>
2305 <successors-1A>
2306 <successors-1A>
2306 <rev2>
2307 <rev2>
2307 <successors-2A>
2308 <successors-2A>
2308 <successors-2B1> <successors-2B2> <successors-2B3>
2309 <successors-2B1> <successors-2B2> <successors-2B3>
2309
2310
2310 Here rev2 has two possible (i.e. divergent) successors sets. The first
2311 Here rev2 has two possible (i.e. divergent) successors sets. The first
2311 holds one element, whereas the second holds three (i.e. the changeset has
2312 holds one element, whereas the second holds three (i.e. the changeset has
2312 been split).
2313 been split).
2313 """
2314 """
2314 # passed to successorssets caching computation from one call to another
2315 # passed to successorssets caching computation from one call to another
2315 cache = {}
2316 cache = {}
2316 ctx2str = str
2317 ctx2str = str
2317 node2str = short
2318 node2str = short
2318 if ui.debug():
2319 if ui.debug():
2319 def ctx2str(ctx):
2320 def ctx2str(ctx):
2320 return ctx.hex()
2321 return ctx.hex()
2321 node2str = hex
2322 node2str = hex
2322 for rev in scmutil.revrange(repo, revs):
2323 for rev in scmutil.revrange(repo, revs):
2323 ctx = repo[rev]
2324 ctx = repo[rev]
2324 ui.write('%s\n'% ctx2str(ctx))
2325 ui.write('%s\n'% ctx2str(ctx))
2325 for succsset in obsutil.successorssets(repo, ctx.node(),
2326 for succsset in obsutil.successorssets(repo, ctx.node(),
2326 closest=opts['closest'],
2327 closest=opts['closest'],
2327 cache=cache):
2328 cache=cache):
2328 if succsset:
2329 if succsset:
2329 ui.write(' ')
2330 ui.write(' ')
2330 ui.write(node2str(succsset[0]))
2331 ui.write(node2str(succsset[0]))
2331 for node in succsset[1:]:
2332 for node in succsset[1:]:
2332 ui.write(' ')
2333 ui.write(' ')
2333 ui.write(node2str(node))
2334 ui.write(node2str(node))
2334 ui.write('\n')
2335 ui.write('\n')
2335
2336
2336 @command('debugtemplate',
2337 @command('debugtemplate',
2337 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2338 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2338 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2339 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2339 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2340 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2340 optionalrepo=True)
2341 optionalrepo=True)
2341 def debugtemplate(ui, repo, tmpl, **opts):
2342 def debugtemplate(ui, repo, tmpl, **opts):
2342 """parse and apply a template
2343 """parse and apply a template
2343
2344
2344 If -r/--rev is given, the template is processed as a log template and
2345 If -r/--rev is given, the template is processed as a log template and
2345 applied to the given changesets. Otherwise, it is processed as a generic
2346 applied to the given changesets. Otherwise, it is processed as a generic
2346 template.
2347 template.
2347
2348
2348 Use --verbose to print the parsed tree.
2349 Use --verbose to print the parsed tree.
2349 """
2350 """
2350 revs = None
2351 revs = None
2351 if opts[r'rev']:
2352 if opts[r'rev']:
2352 if repo is None:
2353 if repo is None:
2353 raise error.RepoError(_('there is no Mercurial repository here '
2354 raise error.RepoError(_('there is no Mercurial repository here '
2354 '(.hg not found)'))
2355 '(.hg not found)'))
2355 revs = scmutil.revrange(repo, opts[r'rev'])
2356 revs = scmutil.revrange(repo, opts[r'rev'])
2356
2357
2357 props = {}
2358 props = {}
2358 for d in opts[r'define']:
2359 for d in opts[r'define']:
2359 try:
2360 try:
2360 k, v = (e.strip() for e in d.split('=', 1))
2361 k, v = (e.strip() for e in d.split('=', 1))
2361 if not k or k == 'ui':
2362 if not k or k == 'ui':
2362 raise ValueError
2363 raise ValueError
2363 props[k] = v
2364 props[k] = v
2364 except ValueError:
2365 except ValueError:
2365 raise error.Abort(_('malformed keyword definition: %s') % d)
2366 raise error.Abort(_('malformed keyword definition: %s') % d)
2366
2367
2367 if ui.verbose:
2368 if ui.verbose:
2368 aliases = ui.configitems('templatealias')
2369 aliases = ui.configitems('templatealias')
2369 tree = templater.parse(tmpl)
2370 tree = templater.parse(tmpl)
2370 ui.note(templater.prettyformat(tree), '\n')
2371 ui.note(templater.prettyformat(tree), '\n')
2371 newtree = templater.expandaliases(tree, aliases)
2372 newtree = templater.expandaliases(tree, aliases)
2372 if newtree != tree:
2373 if newtree != tree:
2373 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2374 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2374
2375
2375 if revs is None:
2376 if revs is None:
2376 t = formatter.maketemplater(ui, tmpl)
2377 t = formatter.maketemplater(ui, tmpl)
2377 props['ui'] = ui
2378 props['ui'] = ui
2378 ui.write(t.render(props))
2379 ui.write(t.render(props))
2379 else:
2380 else:
2380 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2381 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2381 for r in revs:
2382 for r in revs:
2382 displayer.show(repo[r], **pycompat.strkwargs(props))
2383 displayer.show(repo[r], **pycompat.strkwargs(props))
2383 displayer.close()
2384 displayer.close()
2384
2385
2385 @command('debugupdatecaches', [])
2386 @command('debugupdatecaches', [])
2386 def debugupdatecaches(ui, repo, *pats, **opts):
2387 def debugupdatecaches(ui, repo, *pats, **opts):
2387 """warm all known caches in the repository"""
2388 """warm all known caches in the repository"""
2388 with repo.wlock(), repo.lock():
2389 with repo.wlock(), repo.lock():
2389 repo.updatecaches()
2390 repo.updatecaches()
2390
2391
2391 @command('debugupgraderepo', [
2392 @command('debugupgraderepo', [
2392 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2393 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2393 ('', 'run', False, _('performs an upgrade')),
2394 ('', 'run', False, _('performs an upgrade')),
2394 ])
2395 ])
2395 def debugupgraderepo(ui, repo, run=False, optimize=None):
2396 def debugupgraderepo(ui, repo, run=False, optimize=None):
2396 """upgrade a repository to use different features
2397 """upgrade a repository to use different features
2397
2398
2398 If no arguments are specified, the repository is evaluated for upgrade
2399 If no arguments are specified, the repository is evaluated for upgrade
2399 and a list of problems and potential optimizations is printed.
2400 and a list of problems and potential optimizations is printed.
2400
2401
2401 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2402 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2402 can be influenced via additional arguments. More details will be provided
2403 can be influenced via additional arguments. More details will be provided
2403 by the command output when run without ``--run``.
2404 by the command output when run without ``--run``.
2404
2405
2405 During the upgrade, the repository will be locked and no writes will be
2406 During the upgrade, the repository will be locked and no writes will be
2406 allowed.
2407 allowed.
2407
2408
2408 At the end of the upgrade, the repository may not be readable while new
2409 At the end of the upgrade, the repository may not be readable while new
2409 repository data is swapped in. This window will be as long as it takes to
2410 repository data is swapped in. This window will be as long as it takes to
2410 rename some directories inside the ``.hg`` directory. On most machines, this
2411 rename some directories inside the ``.hg`` directory. On most machines, this
2411 should complete almost instantaneously and the chances of a consumer being
2412 should complete almost instantaneously and the chances of a consumer being
2412 unable to access the repository should be low.
2413 unable to access the repository should be low.
2413 """
2414 """
2414 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2415 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2415
2416
2416 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2417 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2417 inferrepo=True)
2418 inferrepo=True)
2418 def debugwalk(ui, repo, *pats, **opts):
2419 def debugwalk(ui, repo, *pats, **opts):
2419 """show how files match on given patterns"""
2420 """show how files match on given patterns"""
2420 opts = pycompat.byteskwargs(opts)
2421 opts = pycompat.byteskwargs(opts)
2421 m = scmutil.match(repo[None], pats, opts)
2422 m = scmutil.match(repo[None], pats, opts)
2422 ui.write(('matcher: %r\n' % m))
2423 ui.write(('matcher: %r\n' % m))
2423 items = list(repo[None].walk(m))
2424 items = list(repo[None].walk(m))
2424 if not items:
2425 if not items:
2425 return
2426 return
2426 f = lambda fn: fn
2427 f = lambda fn: fn
2427 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2428 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2428 f = lambda fn: util.normpath(fn)
2429 f = lambda fn: util.normpath(fn)
2429 fmt = 'f %%-%ds %%-%ds %%s' % (
2430 fmt = 'f %%-%ds %%-%ds %%s' % (
2430 max([len(abs) for abs in items]),
2431 max([len(abs) for abs in items]),
2431 max([len(m.rel(abs)) for abs in items]))
2432 max([len(m.rel(abs)) for abs in items]))
2432 for abs in items:
2433 for abs in items:
2433 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2434 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2434 ui.write("%s\n" % line.rstrip())
2435 ui.write("%s\n" % line.rstrip())
2435
2436
2436 @command('debugwireargs',
2437 @command('debugwireargs',
2437 [('', 'three', '', 'three'),
2438 [('', 'three', '', 'three'),
2438 ('', 'four', '', 'four'),
2439 ('', 'four', '', 'four'),
2439 ('', 'five', '', 'five'),
2440 ('', 'five', '', 'five'),
2440 ] + cmdutil.remoteopts,
2441 ] + cmdutil.remoteopts,
2441 _('REPO [OPTIONS]... [ONE [TWO]]'),
2442 _('REPO [OPTIONS]... [ONE [TWO]]'),
2442 norepo=True)
2443 norepo=True)
2443 def debugwireargs(ui, repopath, *vals, **opts):
2444 def debugwireargs(ui, repopath, *vals, **opts):
2444 opts = pycompat.byteskwargs(opts)
2445 opts = pycompat.byteskwargs(opts)
2445 repo = hg.peer(ui, opts, repopath)
2446 repo = hg.peer(ui, opts, repopath)
2446 for opt in cmdutil.remoteopts:
2447 for opt in cmdutil.remoteopts:
2447 del opts[opt[1]]
2448 del opts[opt[1]]
2448 args = {}
2449 args = {}
2449 for k, v in opts.iteritems():
2450 for k, v in opts.iteritems():
2450 if v:
2451 if v:
2451 args[k] = v
2452 args[k] = v
2452 # run twice to check that we don't mess up the stream for the next command
2453 # run twice to check that we don't mess up the stream for the next command
2453 res1 = repo.debugwireargs(*vals, **args)
2454 res1 = repo.debugwireargs(*vals, **args)
2454 res2 = repo.debugwireargs(*vals, **args)
2455 res2 = repo.debugwireargs(*vals, **args)
2455 ui.write("%s\n" % res1)
2456 ui.write("%s\n" % res1)
2456 if res1 != res2:
2457 if res1 != res2:
2457 ui.warn("%s\n" % res2)
2458 ui.warn("%s\n" % res2)
@@ -1,831 +1,834 b''
1 commit date test
1 commit date test
2
2
3 $ hg init test
3 $ hg init test
4 $ cd test
4 $ cd test
5 $ echo foo > foo
5 $ echo foo > foo
6 $ hg add foo
6 $ hg add foo
7 $ cat > $TESTTMP/checkeditform.sh <<EOF
7 $ cat > $TESTTMP/checkeditform.sh <<EOF
8 > env | grep HGEDITFORM
8 > env | grep HGEDITFORM
9 > true
9 > true
10 > EOF
10 > EOF
11 $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg commit -m ""
11 $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg commit -m ""
12 HGEDITFORM=commit.normal.normal
12 HGEDITFORM=commit.normal.normal
13 abort: empty commit message
13 abort: empty commit message
14 [255]
14 [255]
15 $ hg commit -d '0 0' -m commit-1
15 $ hg commit -d '0 0' -m commit-1
16 $ echo foo >> foo
16 $ echo foo >> foo
17 $ hg commit -d '1 4444444' -m commit-3
17 $ hg commit -d '1 4444444' -m commit-3
18 hg: parse error: impossible time zone offset: 4444444
18 hg: parse error: impossible time zone offset: 4444444
19 [255]
19 [255]
20 $ hg commit -d '1 15.1' -m commit-4
20 $ hg commit -d '1 15.1' -m commit-4
21 hg: parse error: invalid date: '1\t15.1'
21 hg: parse error: invalid date: '1\t15.1'
22 [255]
22 [255]
23 $ hg commit -d 'foo bar' -m commit-5
23 $ hg commit -d 'foo bar' -m commit-5
24 hg: parse error: invalid date: 'foo bar'
24 hg: parse error: invalid date: 'foo bar'
25 [255]
25 [255]
26 $ hg commit -d ' 1 4444' -m commit-6
26 $ hg commit -d ' 1 4444' -m commit-6
27 $ hg commit -d '111111111111 0' -m commit-7
27 $ hg commit -d '111111111111 0' -m commit-7
28 hg: parse error: date exceeds 32 bits: 111111111111
28 hg: parse error: date exceeds 32 bits: 111111111111
29 [255]
29 [255]
30 $ hg commit -d '-111111111111 0' -m commit-7
30 $ hg commit -d '-111111111111 0' -m commit-7
31 hg: parse error: date exceeds 32 bits: -111111111111
31 hg: parse error: date exceeds 32 bits: -111111111111
32 [255]
32 [255]
33 $ echo foo >> foo
33 $ echo foo >> foo
34 $ hg commit -d '1901-12-13 20:45:52 +0000' -m commit-7-2
34 $ hg commit -d '1901-12-13 20:45:52 +0000' -m commit-7-2
35 $ echo foo >> foo
35 $ echo foo >> foo
36 $ hg commit -d '-2147483648 0' -m commit-7-3
36 $ hg commit -d '-2147483648 0' -m commit-7-3
37 $ hg log -T '{rev} {date|isodatesec}\n' -l2
37 $ hg log -T '{rev} {date|isodatesec}\n' -l2
38 3 1901-12-13 20:45:52 +0000
38 3 1901-12-13 20:45:52 +0000
39 2 1901-12-13 20:45:52 +0000
39 2 1901-12-13 20:45:52 +0000
40 $ hg commit -d '1901-12-13 20:45:51 +0000' -m commit-7
40 $ hg commit -d '1901-12-13 20:45:51 +0000' -m commit-7
41 hg: parse error: date exceeds 32 bits: -2147483649
41 hg: parse error: date exceeds 32 bits: -2147483649
42 [255]
42 [255]
43 $ hg commit -d '-2147483649 0' -m commit-7
43 $ hg commit -d '-2147483649 0' -m commit-7
44 hg: parse error: date exceeds 32 bits: -2147483649
44 hg: parse error: date exceeds 32 bits: -2147483649
45 [255]
45 [255]
46
46
47 commit added file that has been deleted
47 commit added file that has been deleted
48
48
49 $ echo bar > bar
49 $ echo bar > bar
50 $ hg add bar
50 $ hg add bar
51 $ rm bar
51 $ rm bar
52 $ hg commit -m commit-8
52 $ hg commit -m commit-8
53 nothing changed (1 missing files, see 'hg status')
53 nothing changed (1 missing files, see 'hg status')
54 [1]
54 [1]
55 $ hg commit -m commit-8-2 bar
55 $ hg commit -m commit-8-2 bar
56 abort: bar: file not found!
56 abort: bar: file not found!
57 [255]
57 [255]
58
58
59 $ hg -q revert -a --no-backup
59 $ hg -q revert -a --no-backup
60
60
61 $ mkdir dir
61 $ mkdir dir
62 $ echo boo > dir/file
62 $ echo boo > dir/file
63 $ hg add
63 $ hg add
64 adding dir/file
64 adding dir/file
65 $ hg -v commit -m commit-9 dir
65 $ hg -v commit -m commit-9 dir
66 committing files:
66 committing files:
67 dir/file
67 dir/file
68 committing manifest
68 committing manifest
69 committing changelog
69 committing changelog
70 committed changeset 4:1957363f1ced
70 committed changeset 4:1957363f1ced
71
71
72 $ echo > dir.file
72 $ echo > dir.file
73 $ hg add
73 $ hg add
74 adding dir.file
74 adding dir.file
75 $ hg commit -m commit-10 dir dir.file
75 $ hg commit -m commit-10 dir dir.file
76 abort: dir: no match under directory!
76 abort: dir: no match under directory!
77 [255]
77 [255]
78
78
79 $ echo >> dir/file
79 $ echo >> dir/file
80 $ mkdir bleh
80 $ mkdir bleh
81 $ mkdir dir2
81 $ mkdir dir2
82 $ cd bleh
82 $ cd bleh
83 $ hg commit -m commit-11 .
83 $ hg commit -m commit-11 .
84 abort: bleh: no match under directory!
84 abort: bleh: no match under directory!
85 [255]
85 [255]
86 $ hg commit -m commit-12 ../dir ../dir2
86 $ hg commit -m commit-12 ../dir ../dir2
87 abort: dir2: no match under directory!
87 abort: dir2: no match under directory!
88 [255]
88 [255]
89 $ hg -v commit -m commit-13 ../dir
89 $ hg -v commit -m commit-13 ../dir
90 committing files:
90 committing files:
91 dir/file
91 dir/file
92 committing manifest
92 committing manifest
93 committing changelog
93 committing changelog
94 committed changeset 5:a31d8f87544a
94 committed changeset 5:a31d8f87544a
95 $ cd ..
95 $ cd ..
96
96
97 $ hg commit -m commit-14 does-not-exist
97 $ hg commit -m commit-14 does-not-exist
98 abort: does-not-exist: * (glob)
98 abort: does-not-exist: * (glob)
99 [255]
99 [255]
100
100
101 #if symlink
101 #if symlink
102 $ ln -s foo baz
102 $ ln -s foo baz
103 $ hg commit -m commit-15 baz
103 $ hg commit -m commit-15 baz
104 abort: baz: file not tracked!
104 abort: baz: file not tracked!
105 [255]
105 [255]
106 $ rm baz
106 $ rm baz
107 #endif
107 #endif
108
108
109 $ touch quux
109 $ touch quux
110 $ hg commit -m commit-16 quux
110 $ hg commit -m commit-16 quux
111 abort: quux: file not tracked!
111 abort: quux: file not tracked!
112 [255]
112 [255]
113 $ echo >> dir/file
113 $ echo >> dir/file
114 $ hg -v commit -m commit-17 dir/file
114 $ hg -v commit -m commit-17 dir/file
115 committing files:
115 committing files:
116 dir/file
116 dir/file
117 committing manifest
117 committing manifest
118 committing changelog
118 committing changelog
119 committed changeset 6:32d054c9d085
119 committed changeset 6:32d054c9d085
120
120
121 An empty date was interpreted as epoch origin
121 An empty date was interpreted as epoch origin
122
122
123 $ echo foo >> foo
123 $ echo foo >> foo
124 $ hg commit -d '' -m commit-no-date --config devel.default-date=
124 $ hg commit -d '' -m commit-no-date --config devel.default-date=
125 $ hg tip --template '{date|isodate}\n' | grep '1970'
125 $ hg tip --template '{date|isodate}\n' | grep '1970'
126 [1]
126 [1]
127
127
128 Using the advanced --extra flag
128 Using the advanced --extra flag
129
129
130 $ echo "[extensions]" >> $HGRCPATH
130 $ echo "[extensions]" >> $HGRCPATH
131 $ echo "commitextras=" >> $HGRCPATH
131 $ echo "commitextras=" >> $HGRCPATH
132 $ hg status
132 $ hg status
133 ? quux
133 ? quux
134 $ hg add quux
134 $ hg add quux
135 $ hg commit -m "adding internal used extras" --extra amend_source=hash
135 $ hg commit -m "adding internal used extras" --extra amend_source=hash
136 abort: key 'amend_source' is used internally, can't be set manually
136 abort: key 'amend_source' is used internally, can't be set manually
137 [255]
137 [255]
138 $ hg commit -m "special chars in extra" --extra id@phab=214
138 $ hg commit -m "special chars in extra" --extra id@phab=214
139 abort: keys can only contain ascii letters, digits, '_' and '-'
139 abort: keys can only contain ascii letters, digits, '_' and '-'
140 [255]
140 [255]
141 $ hg commit -m "empty key" --extra =value
141 $ hg commit -m "empty key" --extra =value
142 abort: unable to parse '=value', keys can't be empty
142 abort: unable to parse '=value', keys can't be empty
143 [255]
143 [255]
144 $ hg commit -m "adding extras" --extra sourcehash=foo --extra oldhash=bar
144 $ hg commit -m "adding extras" --extra sourcehash=foo --extra oldhash=bar
145 $ hg log -r . -T '{extras % "{extra}\n"}'
145 $ hg log -r . -T '{extras % "{extra}\n"}'
146 branch=default
146 branch=default
147 oldhash=bar
147 oldhash=bar
148 sourcehash=foo
148 sourcehash=foo
149
149
150 Failed commit with --addremove should not update dirstate
150 Failed commit with --addremove should not update dirstate
151
151
152 $ echo foo > newfile
152 $ echo foo > newfile
153 $ hg status
153 $ hg status
154 ? newfile
154 ? newfile
155 $ HGEDITOR=false hg ci --addremove
155 $ HGEDITOR=false hg ci --addremove
156 adding newfile
156 adding newfile
157 abort: edit failed: false exited with status 1
157 abort: edit failed: false exited with status 1
158 [255]
158 [255]
159 $ hg status
159 $ hg status
160 ? newfile
160 ? newfile
161
161
162 Make sure we do not obscure unknown requires file entries (issue2649)
162 Make sure we do not obscure unknown requires file entries (issue2649)
163
163
164 $ echo foo >> foo
164 $ echo foo >> foo
165 $ echo fake >> .hg/requires
165 $ echo fake >> .hg/requires
166 $ hg commit -m bla
166 $ hg commit -m bla
167 abort: repository requires features unknown to this Mercurial: fake!
167 abort: repository requires features unknown to this Mercurial: fake!
168 (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
168 (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
169 [255]
169 [255]
170
170
171 $ cd ..
171 $ cd ..
172
172
173
173
174 partial subdir commit test
174 partial subdir commit test
175
175
176 $ hg init test2
176 $ hg init test2
177 $ cd test2
177 $ cd test2
178 $ mkdir foo
178 $ mkdir foo
179 $ echo foo > foo/foo
179 $ echo foo > foo/foo
180 $ mkdir bar
180 $ mkdir bar
181 $ echo bar > bar/bar
181 $ echo bar > bar/bar
182 $ hg add
182 $ hg add
183 adding bar/bar
183 adding bar/bar
184 adding foo/foo
184 adding foo/foo
185 $ HGEDITOR=cat hg ci -e -m commit-subdir-1 foo
185 $ HGEDITOR=cat hg ci -e -m commit-subdir-1 foo
186 commit-subdir-1
186 commit-subdir-1
187
187
188
188
189 HG: Enter commit message. Lines beginning with 'HG:' are removed.
189 HG: Enter commit message. Lines beginning with 'HG:' are removed.
190 HG: Leave message empty to abort commit.
190 HG: Leave message empty to abort commit.
191 HG: --
191 HG: --
192 HG: user: test
192 HG: user: test
193 HG: branch 'default'
193 HG: branch 'default'
194 HG: added foo/foo
194 HG: added foo/foo
195
195
196
196
197 $ hg ci -m commit-subdir-2 bar
197 $ hg ci -m commit-subdir-2 bar
198
198
199 subdir log 1
199 subdir log 1
200
200
201 $ hg log -v foo
201 $ hg log -v foo
202 changeset: 0:f97e73a25882
202 changeset: 0:f97e73a25882
203 user: test
203 user: test
204 date: Thu Jan 01 00:00:00 1970 +0000
204 date: Thu Jan 01 00:00:00 1970 +0000
205 files: foo/foo
205 files: foo/foo
206 description:
206 description:
207 commit-subdir-1
207 commit-subdir-1
208
208
209
209
210
210
211 subdir log 2
211 subdir log 2
212
212
213 $ hg log -v bar
213 $ hg log -v bar
214 changeset: 1:aa809156d50d
214 changeset: 1:aa809156d50d
215 tag: tip
215 tag: tip
216 user: test
216 user: test
217 date: Thu Jan 01 00:00:00 1970 +0000
217 date: Thu Jan 01 00:00:00 1970 +0000
218 files: bar/bar
218 files: bar/bar
219 description:
219 description:
220 commit-subdir-2
220 commit-subdir-2
221
221
222
222
223
223
224 full log
224 full log
225
225
226 $ hg log -v
226 $ hg log -v
227 changeset: 1:aa809156d50d
227 changeset: 1:aa809156d50d
228 tag: tip
228 tag: tip
229 user: test
229 user: test
230 date: Thu Jan 01 00:00:00 1970 +0000
230 date: Thu Jan 01 00:00:00 1970 +0000
231 files: bar/bar
231 files: bar/bar
232 description:
232 description:
233 commit-subdir-2
233 commit-subdir-2
234
234
235
235
236 changeset: 0:f97e73a25882
236 changeset: 0:f97e73a25882
237 user: test
237 user: test
238 date: Thu Jan 01 00:00:00 1970 +0000
238 date: Thu Jan 01 00:00:00 1970 +0000
239 files: foo/foo
239 files: foo/foo
240 description:
240 description:
241 commit-subdir-1
241 commit-subdir-1
242
242
243
243
244 $ cd ..
244 $ cd ..
245
245
246
246
247 dot and subdir commit test
247 dot and subdir commit test
248
248
249 $ hg init test3
249 $ hg init test3
250 $ echo commit-foo-subdir > commit-log-test
250 $ echo commit-foo-subdir > commit-log-test
251 $ cd test3
251 $ cd test3
252 $ mkdir foo
252 $ mkdir foo
253 $ echo foo content > foo/plain-file
253 $ echo foo content > foo/plain-file
254 $ hg add foo/plain-file
254 $ hg add foo/plain-file
255 $ HGEDITOR=cat hg ci --edit -l ../commit-log-test foo
255 $ HGEDITOR=cat hg ci --edit -l ../commit-log-test foo
256 commit-foo-subdir
256 commit-foo-subdir
257
257
258
258
259 HG: Enter commit message. Lines beginning with 'HG:' are removed.
259 HG: Enter commit message. Lines beginning with 'HG:' are removed.
260 HG: Leave message empty to abort commit.
260 HG: Leave message empty to abort commit.
261 HG: --
261 HG: --
262 HG: user: test
262 HG: user: test
263 HG: branch 'default'
263 HG: branch 'default'
264 HG: added foo/plain-file
264 HG: added foo/plain-file
265
265
266
266
267 $ echo modified foo content > foo/plain-file
267 $ echo modified foo content > foo/plain-file
268 $ hg ci -m commit-foo-dot .
268 $ hg ci -m commit-foo-dot .
269
269
270 full log
270 full log
271
271
272 $ hg log -v
272 $ hg log -v
273 changeset: 1:95b38e3a5b2e
273 changeset: 1:95b38e3a5b2e
274 tag: tip
274 tag: tip
275 user: test
275 user: test
276 date: Thu Jan 01 00:00:00 1970 +0000
276 date: Thu Jan 01 00:00:00 1970 +0000
277 files: foo/plain-file
277 files: foo/plain-file
278 description:
278 description:
279 commit-foo-dot
279 commit-foo-dot
280
280
281
281
282 changeset: 0:65d4e9386227
282 changeset: 0:65d4e9386227
283 user: test
283 user: test
284 date: Thu Jan 01 00:00:00 1970 +0000
284 date: Thu Jan 01 00:00:00 1970 +0000
285 files: foo/plain-file
285 files: foo/plain-file
286 description:
286 description:
287 commit-foo-subdir
287 commit-foo-subdir
288
288
289
289
290
290
291 subdir log
291 subdir log
292
292
293 $ cd foo
293 $ cd foo
294 $ hg log .
294 $ hg log .
295 changeset: 1:95b38e3a5b2e
295 changeset: 1:95b38e3a5b2e
296 tag: tip
296 tag: tip
297 user: test
297 user: test
298 date: Thu Jan 01 00:00:00 1970 +0000
298 date: Thu Jan 01 00:00:00 1970 +0000
299 summary: commit-foo-dot
299 summary: commit-foo-dot
300
300
301 changeset: 0:65d4e9386227
301 changeset: 0:65d4e9386227
302 user: test
302 user: test
303 date: Thu Jan 01 00:00:00 1970 +0000
303 date: Thu Jan 01 00:00:00 1970 +0000
304 summary: commit-foo-subdir
304 summary: commit-foo-subdir
305
305
306 $ cd ..
306 $ cd ..
307 $ cd ..
307 $ cd ..
308
308
309 Issue1049: Hg permits partial commit of merge without warning
309 Issue1049: Hg permits partial commit of merge without warning
310
310
311 $ hg init issue1049
311 $ hg init issue1049
312 $ cd issue1049
312 $ cd issue1049
313 $ echo a > a
313 $ echo a > a
314 $ hg ci -Ama
314 $ hg ci -Ama
315 adding a
315 adding a
316 $ echo a >> a
316 $ echo a >> a
317 $ hg ci -mb
317 $ hg ci -mb
318 $ hg up 0
318 $ hg up 0
319 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
319 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
320 $ echo b >> a
320 $ echo b >> a
321 $ hg ci -mc
321 $ hg ci -mc
322 created new head
322 created new head
323 $ HGMERGE=true hg merge
323 $ HGMERGE=true hg merge
324 merging a
324 merging a
325 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
325 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
326 (branch merge, don't forget to commit)
326 (branch merge, don't forget to commit)
327
327
328 should fail because we are specifying a file name
328 should fail because we are specifying a file name
329
329
330 $ hg ci -mmerge a
330 $ hg ci -mmerge a
331 abort: cannot partially commit a merge (do not specify files or patterns)
331 abort: cannot partially commit a merge (do not specify files or patterns)
332 [255]
332 [255]
333
333
334 should fail because we are specifying a pattern
334 should fail because we are specifying a pattern
335
335
336 $ hg ci -mmerge -I a
336 $ hg ci -mmerge -I a
337 abort: cannot partially commit a merge (do not specify files or patterns)
337 abort: cannot partially commit a merge (do not specify files or patterns)
338 [255]
338 [255]
339
339
340 should succeed
340 should succeed
341
341
342 $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg ci -mmerge --edit
342 $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg ci -mmerge --edit
343 HGEDITFORM=commit.normal.merge
343 HGEDITFORM=commit.normal.merge
344 $ cd ..
344 $ cd ..
345
345
346
346
347 test commit message content
347 test commit message content
348
348
349 $ hg init commitmsg
349 $ hg init commitmsg
350 $ cd commitmsg
350 $ cd commitmsg
351 $ echo changed > changed
351 $ echo changed > changed
352 $ echo removed > removed
352 $ echo removed > removed
353 $ hg book activebookmark
353 $ hg book activebookmark
354 $ hg ci -qAm init
354 $ hg ci -qAm init
355
355
356 $ hg rm removed
356 $ hg rm removed
357 $ echo changed >> changed
357 $ echo changed >> changed
358 $ echo added > added
358 $ echo added > added
359 $ hg add added
359 $ hg add added
360 $ HGEDITOR=cat hg ci -A
360 $ HGEDITOR=cat hg ci -A
361
361
362
362
363 HG: Enter commit message. Lines beginning with 'HG:' are removed.
363 HG: Enter commit message. Lines beginning with 'HG:' are removed.
364 HG: Leave message empty to abort commit.
364 HG: Leave message empty to abort commit.
365 HG: --
365 HG: --
366 HG: user: test
366 HG: user: test
367 HG: branch 'default'
367 HG: branch 'default'
368 HG: bookmark 'activebookmark'
368 HG: bookmark 'activebookmark'
369 HG: added added
369 HG: added added
370 HG: changed changed
370 HG: changed changed
371 HG: removed removed
371 HG: removed removed
372 abort: empty commit message
372 abort: empty commit message
373 [255]
373 [255]
374
374
375 test saving last-message.txt
375 test saving last-message.txt
376
376
377 $ hg init sub
377 $ hg init sub
378 $ echo a > sub/a
378 $ echo a > sub/a
379 $ hg -R sub add sub/a
379 $ hg -R sub add sub/a
380 $ cat > sub/.hg/hgrc <<EOF
380 $ cat > sub/.hg/hgrc <<EOF
381 > [hooks]
381 > [hooks]
382 > precommit.test-saving-last-message = false
382 > precommit.test-saving-last-message = false
383 > EOF
383 > EOF
384
384
385 $ echo 'sub = sub' > .hgsub
385 $ echo 'sub = sub' > .hgsub
386 $ hg add .hgsub
386 $ hg add .hgsub
387
387
388 $ cat > $TESTTMP/editor.sh <<EOF
388 $ cat > $TESTTMP/editor.sh <<EOF
389 > echo "==== before editing:"
389 > echo "==== before editing:"
390 > cat \$1
390 > cat \$1
391 > echo "===="
391 > echo "===="
392 > echo "test saving last-message.txt" >> \$1
392 > echo "test saving last-message.txt" >> \$1
393 > EOF
393 > EOF
394
394
395 $ rm -f .hg/last-message.txt
395 $ rm -f .hg/last-message.txt
396 $ HGEDITOR="sh $TESTTMP/editor.sh" hg commit -S -q
396 $ HGEDITOR="sh $TESTTMP/editor.sh" hg commit -S -q
397 ==== before editing:
397 ==== before editing:
398
398
399
399
400 HG: Enter commit message. Lines beginning with 'HG:' are removed.
400 HG: Enter commit message. Lines beginning with 'HG:' are removed.
401 HG: Leave message empty to abort commit.
401 HG: Leave message empty to abort commit.
402 HG: --
402 HG: --
403 HG: user: test
403 HG: user: test
404 HG: branch 'default'
404 HG: branch 'default'
405 HG: bookmark 'activebookmark'
405 HG: bookmark 'activebookmark'
406 HG: subrepo sub
406 HG: subrepo sub
407 HG: added .hgsub
407 HG: added .hgsub
408 HG: added added
408 HG: added added
409 HG: changed .hgsubstate
409 HG: changed .hgsubstate
410 HG: changed changed
410 HG: changed changed
411 HG: removed removed
411 HG: removed removed
412 ====
412 ====
413 abort: precommit.test-saving-last-message hook exited with status 1 (in subrepository "sub")
413 abort: precommit.test-saving-last-message hook exited with status 1 (in subrepository "sub")
414 [255]
414 [255]
415 $ cat .hg/last-message.txt
415 $ cat .hg/last-message.txt
416
416
417
417
418 test saving last-message.txt
418 test saving last-message.txt
419
419
420 test that '[committemplate] changeset' definition and commit log
420 test that '[committemplate] changeset' definition and commit log
421 specific template keywords work well
421 specific template keywords work well
422
422
423 $ cat >> .hg/hgrc <<EOF
423 $ cat >> .hg/hgrc <<EOF
424 > [committemplate]
424 > [committemplate]
425 > changeset.commit.normal = 'HG: this is "commit.normal" template
425 > changeset.commit.normal = 'HG: this is "commit.normal" template
426 > HG: {extramsg}
426 > HG: {extramsg}
427 > {if(activebookmark,
427 > {if(activebookmark,
428 > "HG: bookmark '{activebookmark}' is activated\n",
428 > "HG: bookmark '{activebookmark}' is activated\n",
429 > "HG: no bookmark is activated\n")}{subrepos %
429 > "HG: no bookmark is activated\n")}{subrepos %
430 > "HG: subrepo '{subrepo}' is changed\n"}'
430 > "HG: subrepo '{subrepo}' is changed\n"}'
431 >
431 >
432 > changeset.commit = HG: this is "commit" template
432 > changeset.commit = HG: this is "commit" template
433 > HG: {extramsg}
433 > HG: {extramsg}
434 > {if(activebookmark,
434 > {if(activebookmark,
435 > "HG: bookmark '{activebookmark}' is activated\n",
435 > "HG: bookmark '{activebookmark}' is activated\n",
436 > "HG: no bookmark is activated\n")}{subrepos %
436 > "HG: no bookmark is activated\n")}{subrepos %
437 > "HG: subrepo '{subrepo}' is changed\n"}
437 > "HG: subrepo '{subrepo}' is changed\n"}
438 >
438 >
439 > changeset = HG: this is customized commit template
439 > changeset = HG: this is customized commit template
440 > HG: {extramsg}
440 > HG: {extramsg}
441 > {if(activebookmark,
441 > {if(activebookmark,
442 > "HG: bookmark '{activebookmark}' is activated\n",
442 > "HG: bookmark '{activebookmark}' is activated\n",
443 > "HG: no bookmark is activated\n")}{subrepos %
443 > "HG: no bookmark is activated\n")}{subrepos %
444 > "HG: subrepo '{subrepo}' is changed\n"}
444 > "HG: subrepo '{subrepo}' is changed\n"}
445 > EOF
445 > EOF
446
446
447 $ hg init sub2
447 $ hg init sub2
448 $ echo a > sub2/a
448 $ echo a > sub2/a
449 $ hg -R sub2 add sub2/a
449 $ hg -R sub2 add sub2/a
450 $ echo 'sub2 = sub2' >> .hgsub
450 $ echo 'sub2 = sub2' >> .hgsub
451
451
452 $ HGEDITOR=cat hg commit -S -q
452 $ HGEDITOR=cat hg commit -S -q
453 HG: this is "commit.normal" template
453 HG: this is "commit.normal" template
454 HG: Leave message empty to abort commit.
454 HG: Leave message empty to abort commit.
455 HG: bookmark 'activebookmark' is activated
455 HG: bookmark 'activebookmark' is activated
456 HG: subrepo 'sub' is changed
456 HG: subrepo 'sub' is changed
457 HG: subrepo 'sub2' is changed
457 HG: subrepo 'sub2' is changed
458 abort: empty commit message
458 abort: empty commit message
459 [255]
459 [255]
460
460
461 $ cat >> .hg/hgrc <<EOF
461 $ cat >> .hg/hgrc <<EOF
462 > [committemplate]
462 > [committemplate]
463 > changeset.commit.normal =
463 > changeset.commit.normal =
464 > # now, "changeset.commit" should be chosen for "hg commit"
464 > # now, "changeset.commit" should be chosen for "hg commit"
465 > EOF
465 > EOF
466
466
467 $ hg bookmark --inactive activebookmark
467 $ hg bookmark --inactive activebookmark
468 $ hg forget .hgsub
468 $ hg forget .hgsub
469 $ HGEDITOR=cat hg commit -q
469 $ HGEDITOR=cat hg commit -q
470 HG: this is "commit" template
470 HG: this is "commit" template
471 HG: Leave message empty to abort commit.
471 HG: Leave message empty to abort commit.
472 HG: no bookmark is activated
472 HG: no bookmark is activated
473 abort: empty commit message
473 abort: empty commit message
474 [255]
474 [255]
475
475
476 $ cat >> .hg/hgrc <<EOF
476 $ cat >> .hg/hgrc <<EOF
477 > [committemplate]
477 > [committemplate]
478 > changeset.commit =
478 > changeset.commit =
479 > # now, "changeset" should be chosen for "hg commit"
479 > # now, "changeset" should be chosen for "hg commit"
480 > EOF
480 > EOF
481
481
482 $ HGEDITOR=cat hg commit -q
482 $ HGEDITOR=cat hg commit -q
483 HG: this is customized commit template
483 HG: this is customized commit template
484 HG: Leave message empty to abort commit.
484 HG: Leave message empty to abort commit.
485 HG: no bookmark is activated
485 HG: no bookmark is activated
486 abort: empty commit message
486 abort: empty commit message
487 [255]
487 [255]
488
488
489 $ cat >> .hg/hgrc <<EOF
489 $ cat >> .hg/hgrc <<EOF
490 > [committemplate]
490 > [committemplate]
491 > changeset = {desc}
491 > changeset = {desc}
492 > HG: mods={file_mods}
492 > HG: mods={file_mods}
493 > HG: adds={file_adds}
493 > HG: adds={file_adds}
494 > HG: dels={file_dels}
494 > HG: dels={file_dels}
495 > HG: files={files}
495 > HG: files={files}
496 > HG:
496 > HG:
497 > {splitlines(diff()) % 'HG: {line}\n'
497 > {splitlines(diff()) % 'HG: {line}\n'
498 > }HG:
498 > }HG:
499 > HG: mods={file_mods}
499 > HG: mods={file_mods}
500 > HG: adds={file_adds}
500 > HG: adds={file_adds}
501 > HG: dels={file_dels}
501 > HG: dels={file_dels}
502 > HG: files={files}\n
502 > HG: files={files}\n
503 > EOF
503 > EOF
504 $ hg status -amr
504 $ hg status -amr
505 M changed
505 M changed
506 A added
506 A added
507 R removed
507 R removed
508 $ HGEDITOR=cat hg commit -q -e -m "foo bar" changed
508 $ HGEDITOR=cat hg commit -q -e -m "foo bar" changed
509 foo bar
509 foo bar
510 HG: mods=changed
510 HG: mods=changed
511 HG: adds=
511 HG: adds=
512 HG: dels=
512 HG: dels=
513 HG: files=changed
513 HG: files=changed
514 HG:
514 HG:
515 HG: --- a/changed Thu Jan 01 00:00:00 1970 +0000
515 HG: --- a/changed Thu Jan 01 00:00:00 1970 +0000
516 HG: +++ b/changed Thu Jan 01 00:00:00 1970 +0000
516 HG: +++ b/changed Thu Jan 01 00:00:00 1970 +0000
517 HG: @@ -1,1 +1,2 @@
517 HG: @@ -1,1 +1,2 @@
518 HG: changed
518 HG: changed
519 HG: +changed
519 HG: +changed
520 HG:
520 HG:
521 HG: mods=changed
521 HG: mods=changed
522 HG: adds=
522 HG: adds=
523 HG: dels=
523 HG: dels=
524 HG: files=changed
524 HG: files=changed
525 $ hg status -amr
525 $ hg status -amr
526 A added
526 A added
527 R removed
527 R removed
528 $ hg parents --template "M {file_mods}\nA {file_adds}\nR {file_dels}\n"
528 $ hg parents --template "M {file_mods}\nA {file_adds}\nR {file_dels}\n"
529 M changed
529 M changed
530 A
530 A
531 R
531 R
532 $ hg rollback -q
532 $ hg rollback -q
533
533
534 $ cat >> .hg/hgrc <<EOF
534 $ cat >> .hg/hgrc <<EOF
535 > [committemplate]
535 > [committemplate]
536 > changeset = {desc}
536 > changeset = {desc}
537 > HG: mods={file_mods}
537 > HG: mods={file_mods}
538 > HG: adds={file_adds}
538 > HG: adds={file_adds}
539 > HG: dels={file_dels}
539 > HG: dels={file_dels}
540 > HG: files={files}
540 > HG: files={files}
541 > HG:
541 > HG:
542 > {splitlines(diff("changed")) % 'HG: {line}\n'
542 > {splitlines(diff("changed")) % 'HG: {line}\n'
543 > }HG:
543 > }HG:
544 > HG: mods={file_mods}
544 > HG: mods={file_mods}
545 > HG: adds={file_adds}
545 > HG: adds={file_adds}
546 > HG: dels={file_dels}
546 > HG: dels={file_dels}
547 > HG: files={files}
547 > HG: files={files}
548 > HG:
548 > HG:
549 > {splitlines(diff("added")) % 'HG: {line}\n'
549 > {splitlines(diff("added")) % 'HG: {line}\n'
550 > }HG:
550 > }HG:
551 > HG: mods={file_mods}
551 > HG: mods={file_mods}
552 > HG: adds={file_adds}
552 > HG: adds={file_adds}
553 > HG: dels={file_dels}
553 > HG: dels={file_dels}
554 > HG: files={files}
554 > HG: files={files}
555 > HG:
555 > HG:
556 > {splitlines(diff("removed")) % 'HG: {line}\n'
556 > {splitlines(diff("removed")) % 'HG: {line}\n'
557 > }HG:
557 > }HG:
558 > HG: mods={file_mods}
558 > HG: mods={file_mods}
559 > HG: adds={file_adds}
559 > HG: adds={file_adds}
560 > HG: dels={file_dels}
560 > HG: dels={file_dels}
561 > HG: files={files}\n
561 > HG: files={files}\n
562 > EOF
562 > EOF
563 $ HGEDITOR=cat hg commit -q -e -m "foo bar" added removed
563 $ HGEDITOR=cat hg commit -q -e -m "foo bar" added removed
564 foo bar
564 foo bar
565 HG: mods=
565 HG: mods=
566 HG: adds=added
566 HG: adds=added
567 HG: dels=removed
567 HG: dels=removed
568 HG: files=added removed
568 HG: files=added removed
569 HG:
569 HG:
570 HG:
570 HG:
571 HG: mods=
571 HG: mods=
572 HG: adds=added
572 HG: adds=added
573 HG: dels=removed
573 HG: dels=removed
574 HG: files=added removed
574 HG: files=added removed
575 HG:
575 HG:
576 HG: --- /dev/null Thu Jan 01 00:00:00 1970 +0000
576 HG: --- /dev/null Thu Jan 01 00:00:00 1970 +0000
577 HG: +++ b/added Thu Jan 01 00:00:00 1970 +0000
577 HG: +++ b/added Thu Jan 01 00:00:00 1970 +0000
578 HG: @@ -0,0 +1,1 @@
578 HG: @@ -0,0 +1,1 @@
579 HG: +added
579 HG: +added
580 HG:
580 HG:
581 HG: mods=
581 HG: mods=
582 HG: adds=added
582 HG: adds=added
583 HG: dels=removed
583 HG: dels=removed
584 HG: files=added removed
584 HG: files=added removed
585 HG:
585 HG:
586 HG: --- a/removed Thu Jan 01 00:00:00 1970 +0000
586 HG: --- a/removed Thu Jan 01 00:00:00 1970 +0000
587 HG: +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
587 HG: +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
588 HG: @@ -1,1 +0,0 @@
588 HG: @@ -1,1 +0,0 @@
589 HG: -removed
589 HG: -removed
590 HG:
590 HG:
591 HG: mods=
591 HG: mods=
592 HG: adds=added
592 HG: adds=added
593 HG: dels=removed
593 HG: dels=removed
594 HG: files=added removed
594 HG: files=added removed
595 $ hg status -amr
595 $ hg status -amr
596 M changed
596 M changed
597 $ hg parents --template "M {file_mods}\nA {file_adds}\nR {file_dels}\n"
597 $ hg parents --template "M {file_mods}\nA {file_adds}\nR {file_dels}\n"
598 M
598 M
599 A added
599 A added
600 R removed
600 R removed
601 $ hg rollback -q
601 $ hg rollback -q
602
602
603 $ cat >> .hg/hgrc <<EOF
603 $ cat >> .hg/hgrc <<EOF
604 > # disable customizing for subsequent tests
604 > # disable customizing for subsequent tests
605 > [committemplate]
605 > [committemplate]
606 > changeset =
606 > changeset =
607 > EOF
607 > EOF
608
608
609 $ cd ..
609 $ cd ..
610
610
611
611
612 commit copy
612 commit copy
613
613
614 $ hg init dir2
614 $ hg init dir2
615 $ cd dir2
615 $ cd dir2
616 $ echo bleh > bar
616 $ echo bleh > bar
617 $ hg add bar
617 $ hg add bar
618 $ hg ci -m 'add bar'
618 $ hg ci -m 'add bar'
619
619
620 $ hg cp bar foo
620 $ hg cp bar foo
621 $ echo >> bar
621 $ echo >> bar
622 $ hg ci -m 'cp bar foo; change bar'
622 $ hg ci -m 'cp bar foo; change bar'
623
623
624 $ hg debugrename foo
624 $ hg debugrename foo
625 foo renamed from bar:26d3ca0dfd18e44d796b564e38dd173c9668d3a9
625 foo renamed from bar:26d3ca0dfd18e44d796b564e38dd173c9668d3a9
626 $ hg debugindex bar
626 $ hg debugindex bar
627 rev offset length ..... linkrev nodeid p1 p2 (re)
627 rev offset length ..... linkrev nodeid p1 p2 (re)
628 0 0 6 ..... 0 26d3ca0dfd18 000000000000 000000000000 (re)
628 0 0 6 ..... 0 26d3ca0dfd18 000000000000 000000000000 (re)
629 1 6 7 ..... 1 d267bddd54f7 26d3ca0dfd18 000000000000 (re)
629 1 6 7 ..... 1 d267bddd54f7 26d3ca0dfd18 000000000000 (re)
630
630
631 Test making empty commits
631 Test making empty commits
632 $ hg commit --config ui.allowemptycommit=True -m "empty commit"
632 $ hg commit --config ui.allowemptycommit=True -m "empty commit"
633 $ hg log -r . -v --stat
633 $ hg log -r . -v --stat
634 changeset: 2:d809f3644287
634 changeset: 2:d809f3644287
635 tag: tip
635 tag: tip
636 user: test
636 user: test
637 date: Thu Jan 01 00:00:00 1970 +0000
637 date: Thu Jan 01 00:00:00 1970 +0000
638 description:
638 description:
639 empty commit
639 empty commit
640
640
641
641
642
642
643 verify pathauditor blocks evil filepaths
643 verify pathauditor blocks evil filepaths
644 $ cat > evil-commit.py <<EOF
644 $ cat > evil-commit.py <<EOF
645 > from __future__ import absolute_import
645 > from __future__ import absolute_import
646 > from mercurial import context, hg, node, ui as uimod
646 > from mercurial import context, hg, node, ui as uimod
647 > notrc = u".h\u200cg".encode('utf-8') + '/hgrc'
647 > notrc = u".h\u200cg".encode('utf-8') + '/hgrc'
648 > u = uimod.ui.load()
648 > u = uimod.ui.load()
649 > r = hg.repository(u, '.')
649 > r = hg.repository(u, '.')
650 > def filectxfn(repo, memctx, path):
650 > def filectxfn(repo, memctx, path):
651 > return context.memfilectx(repo, path, '[hooks]\nupdate = echo owned')
651 > return context.memfilectx(repo, memctx, path,
652 > '[hooks]\nupdate = echo owned')
652 > c = context.memctx(r, [r['tip'].node(), node.nullid],
653 > c = context.memctx(r, [r['tip'].node(), node.nullid],
653 > 'evil', [notrc], filectxfn, 0)
654 > 'evil', [notrc], filectxfn, 0)
654 > r.commitctx(c)
655 > r.commitctx(c)
655 > EOF
656 > EOF
656 $ $PYTHON evil-commit.py
657 $ $PYTHON evil-commit.py
657 #if windows
658 #if windows
658 $ hg co --clean tip
659 $ hg co --clean tip
659 abort: path contains illegal component: .h\xe2\x80\x8cg\\hgrc (esc)
660 abort: path contains illegal component: .h\xe2\x80\x8cg\\hgrc (esc)
660 [255]
661 [255]
661 #else
662 #else
662 $ hg co --clean tip
663 $ hg co --clean tip
663 abort: path contains illegal component: .h\xe2\x80\x8cg/hgrc (esc)
664 abort: path contains illegal component: .h\xe2\x80\x8cg/hgrc (esc)
664 [255]
665 [255]
665 #endif
666 #endif
666
667
667 $ hg rollback -f
668 $ hg rollback -f
668 repository tip rolled back to revision 2 (undo commit)
669 repository tip rolled back to revision 2 (undo commit)
669 $ cat > evil-commit.py <<EOF
670 $ cat > evil-commit.py <<EOF
670 > from __future__ import absolute_import
671 > from __future__ import absolute_import
671 > from mercurial import context, hg, node, ui as uimod
672 > from mercurial import context, hg, node, ui as uimod
672 > notrc = "HG~1/hgrc"
673 > notrc = "HG~1/hgrc"
673 > u = uimod.ui.load()
674 > u = uimod.ui.load()
674 > r = hg.repository(u, '.')
675 > r = hg.repository(u, '.')
675 > def filectxfn(repo, memctx, path):
676 > def filectxfn(repo, memctx, path):
676 > return context.memfilectx(repo, path, '[hooks]\nupdate = echo owned')
677 > return context.memfilectx(repo, memctx, path,
678 > '[hooks]\nupdate = echo owned')
677 > c = context.memctx(r, [r['tip'].node(), node.nullid],
679 > c = context.memctx(r, [r['tip'].node(), node.nullid],
678 > 'evil', [notrc], filectxfn, 0)
680 > 'evil', [notrc], filectxfn, 0)
679 > r.commitctx(c)
681 > r.commitctx(c)
680 > EOF
682 > EOF
681 $ $PYTHON evil-commit.py
683 $ $PYTHON evil-commit.py
682 $ hg co --clean tip
684 $ hg co --clean tip
683 abort: path contains illegal component: HG~1/hgrc
685 abort: path contains illegal component: HG~1/hgrc
684 [255]
686 [255]
685
687
686 $ hg rollback -f
688 $ hg rollback -f
687 repository tip rolled back to revision 2 (undo commit)
689 repository tip rolled back to revision 2 (undo commit)
688 $ cat > evil-commit.py <<EOF
690 $ cat > evil-commit.py <<EOF
689 > from __future__ import absolute_import
691 > from __future__ import absolute_import
690 > from mercurial import context, hg, node, ui as uimod
692 > from mercurial import context, hg, node, ui as uimod
691 > notrc = "HG8B6C~2/hgrc"
693 > notrc = "HG8B6C~2/hgrc"
692 > u = uimod.ui.load()
694 > u = uimod.ui.load()
693 > r = hg.repository(u, '.')
695 > r = hg.repository(u, '.')
694 > def filectxfn(repo, memctx, path):
696 > def filectxfn(repo, memctx, path):
695 > return context.memfilectx(repo, path, '[hooks]\nupdate = echo owned')
697 > return context.memfilectx(repo, memctx, path,
698 > '[hooks]\nupdate = echo owned')
696 > c = context.memctx(r, [r['tip'].node(), node.nullid],
699 > c = context.memctx(r, [r['tip'].node(), node.nullid],
697 > 'evil', [notrc], filectxfn, 0)
700 > 'evil', [notrc], filectxfn, 0)
698 > r.commitctx(c)
701 > r.commitctx(c)
699 > EOF
702 > EOF
700 $ $PYTHON evil-commit.py
703 $ $PYTHON evil-commit.py
701 $ hg co --clean tip
704 $ hg co --clean tip
702 abort: path contains illegal component: HG8B6C~2/hgrc
705 abort: path contains illegal component: HG8B6C~2/hgrc
703 [255]
706 [255]
704
707
705 # test that an unmodified commit template message aborts
708 # test that an unmodified commit template message aborts
706
709
707 $ hg init unmodified_commit_template
710 $ hg init unmodified_commit_template
708 $ cd unmodified_commit_template
711 $ cd unmodified_commit_template
709 $ echo foo > foo
712 $ echo foo > foo
710 $ hg add foo
713 $ hg add foo
711 $ hg commit -m "foo"
714 $ hg commit -m "foo"
712 $ cat >> .hg/hgrc <<EOF
715 $ cat >> .hg/hgrc <<EOF
713 > [committemplate]
716 > [committemplate]
714 > changeset.commit = HI THIS IS NOT STRIPPED
717 > changeset.commit = HI THIS IS NOT STRIPPED
715 > HG: this is customized commit template
718 > HG: this is customized commit template
716 > HG: {extramsg}
719 > HG: {extramsg}
717 > {if(activebookmark,
720 > {if(activebookmark,
718 > "HG: bookmark '{activebookmark}' is activated\n",
721 > "HG: bookmark '{activebookmark}' is activated\n",
719 > "HG: no bookmark is activated\n")}{subrepos %
722 > "HG: no bookmark is activated\n")}{subrepos %
720 > "HG: subrepo '{subrepo}' is changed\n"}
723 > "HG: subrepo '{subrepo}' is changed\n"}
721 > EOF
724 > EOF
722 $ cat > $TESTTMP/notouching.sh <<EOF
725 $ cat > $TESTTMP/notouching.sh <<EOF
723 > true
726 > true
724 > EOF
727 > EOF
725 $ echo foo2 > foo2
728 $ echo foo2 > foo2
726 $ hg add foo2
729 $ hg add foo2
727 $ HGEDITOR="sh $TESTTMP/notouching.sh" hg commit
730 $ HGEDITOR="sh $TESTTMP/notouching.sh" hg commit
728 abort: commit message unchanged
731 abort: commit message unchanged
729 [255]
732 [255]
730
733
731 test that text below the --- >8 --- special string is ignored
734 test that text below the --- >8 --- special string is ignored
732
735
733 $ cat <<'EOF' > $TESTTMP/lowercaseline.sh
736 $ cat <<'EOF' > $TESTTMP/lowercaseline.sh
734 > cat $1 | sed s/LINE/line/ | tee $1.new
737 > cat $1 | sed s/LINE/line/ | tee $1.new
735 > mv $1.new $1
738 > mv $1.new $1
736 > EOF
739 > EOF
737
740
738 $ hg init ignore_below_special_string
741 $ hg init ignore_below_special_string
739 $ cd ignore_below_special_string
742 $ cd ignore_below_special_string
740 $ echo foo > foo
743 $ echo foo > foo
741 $ hg add foo
744 $ hg add foo
742 $ hg commit -m "foo"
745 $ hg commit -m "foo"
743 $ cat >> .hg/hgrc <<EOF
746 $ cat >> .hg/hgrc <<EOF
744 > [committemplate]
747 > [committemplate]
745 > changeset.commit = first LINE
748 > changeset.commit = first LINE
746 > HG: this is customized commit template
749 > HG: this is customized commit template
747 > HG: {extramsg}
750 > HG: {extramsg}
748 > HG: ------------------------ >8 ------------------------
751 > HG: ------------------------ >8 ------------------------
749 > {diff()}
752 > {diff()}
750 > EOF
753 > EOF
751 $ echo foo2 > foo2
754 $ echo foo2 > foo2
752 $ hg add foo2
755 $ hg add foo2
753 $ HGEDITOR="sh $TESTTMP/notouching.sh" hg ci
756 $ HGEDITOR="sh $TESTTMP/notouching.sh" hg ci
754 abort: commit message unchanged
757 abort: commit message unchanged
755 [255]
758 [255]
756 $ HGEDITOR="sh $TESTTMP/lowercaseline.sh" hg ci
759 $ HGEDITOR="sh $TESTTMP/lowercaseline.sh" hg ci
757 first line
760 first line
758 HG: this is customized commit template
761 HG: this is customized commit template
759 HG: Leave message empty to abort commit.
762 HG: Leave message empty to abort commit.
760 HG: ------------------------ >8 ------------------------
763 HG: ------------------------ >8 ------------------------
761 diff -r e63c23eaa88a foo2
764 diff -r e63c23eaa88a foo2
762 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
765 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
763 +++ b/foo2 Thu Jan 01 00:00:00 1970 +0000
766 +++ b/foo2 Thu Jan 01 00:00:00 1970 +0000
764 @@ -0,0 +1,1 @@
767 @@ -0,0 +1,1 @@
765 +foo2
768 +foo2
766 $ hg log -T '{desc}\n' -r .
769 $ hg log -T '{desc}\n' -r .
767 first line
770 first line
768
771
769 test that the special string --- >8 --- isn't used when not at the beginning of
772 test that the special string --- >8 --- isn't used when not at the beginning of
770 a line
773 a line
771
774
772 $ cat >> .hg/hgrc <<EOF
775 $ cat >> .hg/hgrc <<EOF
773 > [committemplate]
776 > [committemplate]
774 > changeset.commit = first LINE2
777 > changeset.commit = first LINE2
775 > another line HG: ------------------------ >8 ------------------------
778 > another line HG: ------------------------ >8 ------------------------
776 > HG: this is customized commit template
779 > HG: this is customized commit template
777 > HG: {extramsg}
780 > HG: {extramsg}
778 > HG: ------------------------ >8 ------------------------
781 > HG: ------------------------ >8 ------------------------
779 > {diff()}
782 > {diff()}
780 > EOF
783 > EOF
781 $ echo foo >> foo
784 $ echo foo >> foo
782 $ HGEDITOR="sh $TESTTMP/lowercaseline.sh" hg ci
785 $ HGEDITOR="sh $TESTTMP/lowercaseline.sh" hg ci
783 first line2
786 first line2
784 another line HG: ------------------------ >8 ------------------------
787 another line HG: ------------------------ >8 ------------------------
785 HG: this is customized commit template
788 HG: this is customized commit template
786 HG: Leave message empty to abort commit.
789 HG: Leave message empty to abort commit.
787 HG: ------------------------ >8 ------------------------
790 HG: ------------------------ >8 ------------------------
788 diff -r 3661b22b0702 foo
791 diff -r 3661b22b0702 foo
789 --- a/foo Thu Jan 01 00:00:00 1970 +0000
792 --- a/foo Thu Jan 01 00:00:00 1970 +0000
790 +++ b/foo Thu Jan 01 00:00:00 1970 +0000
793 +++ b/foo Thu Jan 01 00:00:00 1970 +0000
791 @@ -1,1 +1,2 @@
794 @@ -1,1 +1,2 @@
792 foo
795 foo
793 +foo
796 +foo
794 $ hg log -T '{desc}\n' -r .
797 $ hg log -T '{desc}\n' -r .
795 first line2
798 first line2
796 another line HG: ------------------------ >8 ------------------------
799 another line HG: ------------------------ >8 ------------------------
797
800
798 also test that this special string isn't accepted when there is some extra text
801 also test that this special string isn't accepted when there is some extra text
799 at the end
802 at the end
800
803
801 $ cat >> .hg/hgrc <<EOF
804 $ cat >> .hg/hgrc <<EOF
802 > [committemplate]
805 > [committemplate]
803 > changeset.commit = first LINE3
806 > changeset.commit = first LINE3
804 > HG: ------------------------ >8 ------------------------foobar
807 > HG: ------------------------ >8 ------------------------foobar
805 > second line
808 > second line
806 > HG: this is customized commit template
809 > HG: this is customized commit template
807 > HG: {extramsg}
810 > HG: {extramsg}
808 > HG: ------------------------ >8 ------------------------
811 > HG: ------------------------ >8 ------------------------
809 > {diff()}
812 > {diff()}
810 > EOF
813 > EOF
811 $ echo foo >> foo
814 $ echo foo >> foo
812 $ HGEDITOR="sh $TESTTMP/lowercaseline.sh" hg ci
815 $ HGEDITOR="sh $TESTTMP/lowercaseline.sh" hg ci
813 first line3
816 first line3
814 HG: ------------------------ >8 ------------------------foobar
817 HG: ------------------------ >8 ------------------------foobar
815 second line
818 second line
816 HG: this is customized commit template
819 HG: this is customized commit template
817 HG: Leave message empty to abort commit.
820 HG: Leave message empty to abort commit.
818 HG: ------------------------ >8 ------------------------
821 HG: ------------------------ >8 ------------------------
819 diff -r ce648f5f066f foo
822 diff -r ce648f5f066f foo
820 --- a/foo Thu Jan 01 00:00:00 1970 +0000
823 --- a/foo Thu Jan 01 00:00:00 1970 +0000
821 +++ b/foo Thu Jan 01 00:00:00 1970 +0000
824 +++ b/foo Thu Jan 01 00:00:00 1970 +0000
822 @@ -1,2 +1,3 @@
825 @@ -1,2 +1,3 @@
823 foo
826 foo
824 foo
827 foo
825 +foo
828 +foo
826 $ hg log -T '{desc}\n' -r .
829 $ hg log -T '{desc}\n' -r .
827 first line3
830 first line3
828 second line
831 second line
829
832
830 $ cd ..
833 $ cd ..
831
834
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now