##// END OF EJS Templates
cmdutil: factor out helper to create changeset_templater with literal template...
Yuya Nishihara -
r32837:50586a0a default
parent child Browse files
Show More
@@ -1,211 +1,208 b''
1 # churn.py - create a graph of revisions count grouped by template
1 # churn.py - create a graph of revisions count grouped by template
2 #
2 #
3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
4 # Copyright 2008 Alexander Solovyov <piranha@piranha.org.ua>
4 # Copyright 2008 Alexander Solovyov <piranha@piranha.org.ua>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''command to display statistics about repository history'''
9 '''command to display statistics about repository history'''
10
10
11 from __future__ import absolute_import
11 from __future__ import absolute_import
12
12
13 import datetime
13 import datetime
14 import os
14 import os
15 import time
15 import time
16
16
17 from mercurial.i18n import _
17 from mercurial.i18n import _
18 from mercurial import (
18 from mercurial import (
19 cmdutil,
19 cmdutil,
20 encoding,
20 encoding,
21 patch,
21 patch,
22 registrar,
22 registrar,
23 scmutil,
23 scmutil,
24 util,
24 util,
25 )
25 )
26
26
27 cmdtable = {}
27 cmdtable = {}
28 command = registrar.command(cmdtable)
28 command = registrar.command(cmdtable)
29 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
29 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
30 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
30 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
31 # be specifying the version(s) of Mercurial they are tested with, or
31 # be specifying the version(s) of Mercurial they are tested with, or
32 # leave the attribute unspecified.
32 # leave the attribute unspecified.
33 testedwith = 'ships-with-hg-core'
33 testedwith = 'ships-with-hg-core'
34
34
35 def maketemplater(ui, repo, tmpl):
36 return cmdutil.changeset_templater(ui, repo, False, None, tmpl, None, False)
37
38 def changedlines(ui, repo, ctx1, ctx2, fns):
35 def changedlines(ui, repo, ctx1, ctx2, fns):
39 added, removed = 0, 0
36 added, removed = 0, 0
40 fmatch = scmutil.matchfiles(repo, fns)
37 fmatch = scmutil.matchfiles(repo, fns)
41 diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
38 diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
42 for l in diff.split('\n'):
39 for l in diff.split('\n'):
43 if l.startswith("+") and not l.startswith("+++ "):
40 if l.startswith("+") and not l.startswith("+++ "):
44 added += 1
41 added += 1
45 elif l.startswith("-") and not l.startswith("--- "):
42 elif l.startswith("-") and not l.startswith("--- "):
46 removed += 1
43 removed += 1
47 return (added, removed)
44 return (added, removed)
48
45
49 def countrate(ui, repo, amap, *pats, **opts):
46 def countrate(ui, repo, amap, *pats, **opts):
50 """Calculate stats"""
47 """Calculate stats"""
51 if opts.get('dateformat'):
48 if opts.get('dateformat'):
52 def getkey(ctx):
49 def getkey(ctx):
53 t, tz = ctx.date()
50 t, tz = ctx.date()
54 date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
51 date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
55 return date.strftime(opts['dateformat'])
52 return date.strftime(opts['dateformat'])
56 else:
53 else:
57 tmpl = opts.get('oldtemplate') or opts.get('template')
54 tmpl = opts.get('oldtemplate') or opts.get('template')
58 tmpl = maketemplater(ui, repo, tmpl)
55 tmpl = cmdutil.makelogtemplater(ui, repo, tmpl)
59 def getkey(ctx):
56 def getkey(ctx):
60 ui.pushbuffer()
57 ui.pushbuffer()
61 tmpl.show(ctx)
58 tmpl.show(ctx)
62 return ui.popbuffer()
59 return ui.popbuffer()
63
60
64 state = {'count': 0}
61 state = {'count': 0}
65 rate = {}
62 rate = {}
66 df = False
63 df = False
67 if opts.get('date'):
64 if opts.get('date'):
68 df = util.matchdate(opts['date'])
65 df = util.matchdate(opts['date'])
69
66
70 m = scmutil.match(repo[None], pats, opts)
67 m = scmutil.match(repo[None], pats, opts)
71 def prep(ctx, fns):
68 def prep(ctx, fns):
72 rev = ctx.rev()
69 rev = ctx.rev()
73 if df and not df(ctx.date()[0]): # doesn't match date format
70 if df and not df(ctx.date()[0]): # doesn't match date format
74 return
71 return
75
72
76 key = getkey(ctx).strip()
73 key = getkey(ctx).strip()
77 key = amap.get(key, key) # alias remap
74 key = amap.get(key, key) # alias remap
78 if opts.get('changesets'):
75 if opts.get('changesets'):
79 rate[key] = (rate.get(key, (0,))[0] + 1, 0)
76 rate[key] = (rate.get(key, (0,))[0] + 1, 0)
80 else:
77 else:
81 parents = ctx.parents()
78 parents = ctx.parents()
82 if len(parents) > 1:
79 if len(parents) > 1:
83 ui.note(_('revision %d is a merge, ignoring...\n') % (rev,))
80 ui.note(_('revision %d is a merge, ignoring...\n') % (rev,))
84 return
81 return
85
82
86 ctx1 = parents[0]
83 ctx1 = parents[0]
87 lines = changedlines(ui, repo, ctx1, ctx, fns)
84 lines = changedlines(ui, repo, ctx1, ctx, fns)
88 rate[key] = [r + l for r, l in zip(rate.get(key, (0, 0)), lines)]
85 rate[key] = [r + l for r, l in zip(rate.get(key, (0, 0)), lines)]
89
86
90 state['count'] += 1
87 state['count'] += 1
91 ui.progress(_('analyzing'), state['count'], total=len(repo),
88 ui.progress(_('analyzing'), state['count'], total=len(repo),
92 unit=_('revisions'))
89 unit=_('revisions'))
93
90
94 for ctx in cmdutil.walkchangerevs(repo, m, opts, prep):
91 for ctx in cmdutil.walkchangerevs(repo, m, opts, prep):
95 continue
92 continue
96
93
97 ui.progress(_('analyzing'), None)
94 ui.progress(_('analyzing'), None)
98
95
99 return rate
96 return rate
100
97
101
98
102 @command('churn',
99 @command('churn',
103 [('r', 'rev', [],
100 [('r', 'rev', [],
104 _('count rate for the specified revision or revset'), _('REV')),
101 _('count rate for the specified revision or revset'), _('REV')),
105 ('d', 'date', '',
102 ('d', 'date', '',
106 _('count rate for revisions matching date spec'), _('DATE')),
103 _('count rate for revisions matching date spec'), _('DATE')),
107 ('t', 'oldtemplate', '',
104 ('t', 'oldtemplate', '',
108 _('template to group changesets (DEPRECATED)'), _('TEMPLATE')),
105 _('template to group changesets (DEPRECATED)'), _('TEMPLATE')),
109 ('T', 'template', '{author|email}',
106 ('T', 'template', '{author|email}',
110 _('template to group changesets'), _('TEMPLATE')),
107 _('template to group changesets'), _('TEMPLATE')),
111 ('f', 'dateformat', '',
108 ('f', 'dateformat', '',
112 _('strftime-compatible format for grouping by date'), _('FORMAT')),
109 _('strftime-compatible format for grouping by date'), _('FORMAT')),
113 ('c', 'changesets', False, _('count rate by number of changesets')),
110 ('c', 'changesets', False, _('count rate by number of changesets')),
114 ('s', 'sort', False, _('sort by key (default: sort by count)')),
111 ('s', 'sort', False, _('sort by key (default: sort by count)')),
115 ('', 'diffstat', False, _('display added/removed lines separately')),
112 ('', 'diffstat', False, _('display added/removed lines separately')),
116 ('', 'aliases', '', _('file with email aliases'), _('FILE')),
113 ('', 'aliases', '', _('file with email aliases'), _('FILE')),
117 ] + cmdutil.walkopts,
114 ] + cmdutil.walkopts,
118 _("hg churn [-d DATE] [-r REV] [--aliases FILE] [FILE]"),
115 _("hg churn [-d DATE] [-r REV] [--aliases FILE] [FILE]"),
119 inferrepo=True)
116 inferrepo=True)
120 def churn(ui, repo, *pats, **opts):
117 def churn(ui, repo, *pats, **opts):
121 '''histogram of changes to the repository
118 '''histogram of changes to the repository
122
119
123 This command will display a histogram representing the number
120 This command will display a histogram representing the number
124 of changed lines or revisions, grouped according to the given
121 of changed lines or revisions, grouped according to the given
125 template. The default template will group changes by author.
122 template. The default template will group changes by author.
126 The --dateformat option may be used to group the results by
123 The --dateformat option may be used to group the results by
127 date instead.
124 date instead.
128
125
129 Statistics are based on the number of changed lines, or
126 Statistics are based on the number of changed lines, or
130 alternatively the number of matching revisions if the
127 alternatively the number of matching revisions if the
131 --changesets option is specified.
128 --changesets option is specified.
132
129
133 Examples::
130 Examples::
134
131
135 # display count of changed lines for every committer
132 # display count of changed lines for every committer
136 hg churn -T "{author|email}"
133 hg churn -T "{author|email}"
137
134
138 # display daily activity graph
135 # display daily activity graph
139 hg churn -f "%H" -s -c
136 hg churn -f "%H" -s -c
140
137
141 # display activity of developers by month
138 # display activity of developers by month
142 hg churn -f "%Y-%m" -s -c
139 hg churn -f "%Y-%m" -s -c
143
140
144 # display count of lines changed in every year
141 # display count of lines changed in every year
145 hg churn -f "%Y" -s
142 hg churn -f "%Y" -s
146
143
147 It is possible to map alternate email addresses to a main address
144 It is possible to map alternate email addresses to a main address
148 by providing a file using the following format::
145 by providing a file using the following format::
149
146
150 <alias email> = <actual email>
147 <alias email> = <actual email>
151
148
152 Such a file may be specified with the --aliases option, otherwise
149 Such a file may be specified with the --aliases option, otherwise
153 a .hgchurn file will be looked for in the working directory root.
150 a .hgchurn file will be looked for in the working directory root.
154 Aliases will be split from the rightmost "=".
151 Aliases will be split from the rightmost "=".
155 '''
152 '''
156 def pad(s, l):
153 def pad(s, l):
157 return s + " " * (l - encoding.colwidth(s))
154 return s + " " * (l - encoding.colwidth(s))
158
155
159 amap = {}
156 amap = {}
160 aliases = opts.get('aliases')
157 aliases = opts.get('aliases')
161 if not aliases and os.path.exists(repo.wjoin('.hgchurn')):
158 if not aliases and os.path.exists(repo.wjoin('.hgchurn')):
162 aliases = repo.wjoin('.hgchurn')
159 aliases = repo.wjoin('.hgchurn')
163 if aliases:
160 if aliases:
164 for l in open(aliases, "r"):
161 for l in open(aliases, "r"):
165 try:
162 try:
166 alias, actual = l.rsplit('=' in l and '=' or None, 1)
163 alias, actual = l.rsplit('=' in l and '=' or None, 1)
167 amap[alias.strip()] = actual.strip()
164 amap[alias.strip()] = actual.strip()
168 except ValueError:
165 except ValueError:
169 l = l.strip()
166 l = l.strip()
170 if l:
167 if l:
171 ui.warn(_("skipping malformed alias: %s\n") % l)
168 ui.warn(_("skipping malformed alias: %s\n") % l)
172 continue
169 continue
173
170
174 rate = countrate(ui, repo, amap, *pats, **opts).items()
171 rate = countrate(ui, repo, amap, *pats, **opts).items()
175 if not rate:
172 if not rate:
176 return
173 return
177
174
178 if opts.get('sort'):
175 if opts.get('sort'):
179 rate.sort()
176 rate.sort()
180 else:
177 else:
181 rate.sort(key=lambda x: (-sum(x[1]), x))
178 rate.sort(key=lambda x: (-sum(x[1]), x))
182
179
183 # Be careful not to have a zero maxcount (issue833)
180 # Be careful not to have a zero maxcount (issue833)
184 maxcount = float(max(sum(v) for k, v in rate)) or 1.0
181 maxcount = float(max(sum(v) for k, v in rate)) or 1.0
185 maxname = max(len(k) for k, v in rate)
182 maxname = max(len(k) for k, v in rate)
186
183
187 ttywidth = ui.termwidth()
184 ttywidth = ui.termwidth()
188 ui.debug("assuming %i character terminal\n" % ttywidth)
185 ui.debug("assuming %i character terminal\n" % ttywidth)
189 width = ttywidth - maxname - 2 - 2 - 2
186 width = ttywidth - maxname - 2 - 2 - 2
190
187
191 if opts.get('diffstat'):
188 if opts.get('diffstat'):
192 width -= 15
189 width -= 15
193 def format(name, diffstat):
190 def format(name, diffstat):
194 added, removed = diffstat
191 added, removed = diffstat
195 return "%s %15s %s%s\n" % (pad(name, maxname),
192 return "%s %15s %s%s\n" % (pad(name, maxname),
196 '+%d/-%d' % (added, removed),
193 '+%d/-%d' % (added, removed),
197 ui.label('+' * charnum(added),
194 ui.label('+' * charnum(added),
198 'diffstat.inserted'),
195 'diffstat.inserted'),
199 ui.label('-' * charnum(removed),
196 ui.label('-' * charnum(removed),
200 'diffstat.deleted'))
197 'diffstat.deleted'))
201 else:
198 else:
202 width -= 6
199 width -= 6
203 def format(name, count):
200 def format(name, count):
204 return "%s %6d %s\n" % (pad(name, maxname), sum(count),
201 return "%s %6d %s\n" % (pad(name, maxname), sum(count),
205 '*' * charnum(sum(count)))
202 '*' * charnum(sum(count)))
206
203
207 def charnum(count):
204 def charnum(count):
208 return int(round(count * width / maxcount))
205 return int(round(count * width / maxcount))
209
206
210 for name, count in rate:
207 for name, count in rate:
211 ui.write(format(name, count))
208 ui.write(format(name, count))
@@ -1,759 +1,759 b''
1 # keyword.py - $Keyword$ expansion for Mercurial
1 # keyword.py - $Keyword$ expansion for Mercurial
2 #
2 #
3 # Copyright 2007-2015 Christian Ebert <blacktrash@gmx.net>
3 # Copyright 2007-2015 Christian Ebert <blacktrash@gmx.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 #
7 #
8 # $Id$
8 # $Id$
9 #
9 #
10 # Keyword expansion hack against the grain of a Distributed SCM
10 # Keyword expansion hack against the grain of a Distributed SCM
11 #
11 #
12 # There are many good reasons why this is not needed in a distributed
12 # There are many good reasons why this is not needed in a distributed
13 # SCM, still it may be useful in very small projects based on single
13 # SCM, still it may be useful in very small projects based on single
14 # files (like LaTeX packages), that are mostly addressed to an
14 # files (like LaTeX packages), that are mostly addressed to an
15 # audience not running a version control system.
15 # audience not running a version control system.
16 #
16 #
17 # For in-depth discussion refer to
17 # For in-depth discussion refer to
18 # <https://mercurial-scm.org/wiki/KeywordPlan>.
18 # <https://mercurial-scm.org/wiki/KeywordPlan>.
19 #
19 #
20 # Keyword expansion is based on Mercurial's changeset template mappings.
20 # Keyword expansion is based on Mercurial's changeset template mappings.
21 #
21 #
22 # Binary files are not touched.
22 # Binary files are not touched.
23 #
23 #
24 # Files to act upon/ignore are specified in the [keyword] section.
24 # Files to act upon/ignore are specified in the [keyword] section.
25 # Customized keyword template mappings in the [keywordmaps] section.
25 # Customized keyword template mappings in the [keywordmaps] section.
26 #
26 #
27 # Run 'hg help keyword' and 'hg kwdemo' to get info on configuration.
27 # Run 'hg help keyword' and 'hg kwdemo' to get info on configuration.
28
28
29 '''expand keywords in tracked files
29 '''expand keywords in tracked files
30
30
31 This extension expands RCS/CVS-like or self-customized $Keywords$ in
31 This extension expands RCS/CVS-like or self-customized $Keywords$ in
32 tracked text files selected by your configuration.
32 tracked text files selected by your configuration.
33
33
34 Keywords are only expanded in local repositories and not stored in the
34 Keywords are only expanded in local repositories and not stored in the
35 change history. The mechanism can be regarded as a convenience for the
35 change history. The mechanism can be regarded as a convenience for the
36 current user or for archive distribution.
36 current user or for archive distribution.
37
37
38 Keywords expand to the changeset data pertaining to the latest change
38 Keywords expand to the changeset data pertaining to the latest change
39 relative to the working directory parent of each file.
39 relative to the working directory parent of each file.
40
40
41 Configuration is done in the [keyword], [keywordset] and [keywordmaps]
41 Configuration is done in the [keyword], [keywordset] and [keywordmaps]
42 sections of hgrc files.
42 sections of hgrc files.
43
43
44 Example::
44 Example::
45
45
46 [keyword]
46 [keyword]
47 # expand keywords in every python file except those matching "x*"
47 # expand keywords in every python file except those matching "x*"
48 **.py =
48 **.py =
49 x* = ignore
49 x* = ignore
50
50
51 [keywordset]
51 [keywordset]
52 # prefer svn- over cvs-like default keywordmaps
52 # prefer svn- over cvs-like default keywordmaps
53 svn = True
53 svn = True
54
54
55 .. note::
55 .. note::
56
56
57 The more specific you are in your filename patterns the less you
57 The more specific you are in your filename patterns the less you
58 lose speed in huge repositories.
58 lose speed in huge repositories.
59
59
60 For [keywordmaps] template mapping and expansion demonstration and
60 For [keywordmaps] template mapping and expansion demonstration and
61 control run :hg:`kwdemo`. See :hg:`help templates` for a list of
61 control run :hg:`kwdemo`. See :hg:`help templates` for a list of
62 available templates and filters.
62 available templates and filters.
63
63
64 Three additional date template filters are provided:
64 Three additional date template filters are provided:
65
65
66 :``utcdate``: "2006/09/18 15:13:13"
66 :``utcdate``: "2006/09/18 15:13:13"
67 :``svnutcdate``: "2006-09-18 15:13:13Z"
67 :``svnutcdate``: "2006-09-18 15:13:13Z"
68 :``svnisodate``: "2006-09-18 08:13:13 -700 (Mon, 18 Sep 2006)"
68 :``svnisodate``: "2006-09-18 08:13:13 -700 (Mon, 18 Sep 2006)"
69
69
70 The default template mappings (view with :hg:`kwdemo -d`) can be
70 The default template mappings (view with :hg:`kwdemo -d`) can be
71 replaced with customized keywords and templates. Again, run
71 replaced with customized keywords and templates. Again, run
72 :hg:`kwdemo` to control the results of your configuration changes.
72 :hg:`kwdemo` to control the results of your configuration changes.
73
73
74 Before changing/disabling active keywords, you must run :hg:`kwshrink`
74 Before changing/disabling active keywords, you must run :hg:`kwshrink`
75 to avoid storing expanded keywords in the change history.
75 to avoid storing expanded keywords in the change history.
76
76
77 To force expansion after enabling it, or a configuration change, run
77 To force expansion after enabling it, or a configuration change, run
78 :hg:`kwexpand`.
78 :hg:`kwexpand`.
79
79
80 Expansions spanning more than one line and incremental expansions,
80 Expansions spanning more than one line and incremental expansions,
81 like CVS' $Log$, are not supported. A keyword template map "Log =
81 like CVS' $Log$, are not supported. A keyword template map "Log =
82 {desc}" expands to the first line of the changeset description.
82 {desc}" expands to the first line of the changeset description.
83 '''
83 '''
84
84
85
85
86 from __future__ import absolute_import
86 from __future__ import absolute_import
87
87
88 import os
88 import os
89 import re
89 import re
90 import tempfile
90 import tempfile
91
91
92 from mercurial.i18n import _
92 from mercurial.i18n import _
93 from mercurial.hgweb import webcommands
93 from mercurial.hgweb import webcommands
94
94
95 from mercurial import (
95 from mercurial import (
96 cmdutil,
96 cmdutil,
97 context,
97 context,
98 dispatch,
98 dispatch,
99 error,
99 error,
100 extensions,
100 extensions,
101 filelog,
101 filelog,
102 localrepo,
102 localrepo,
103 match,
103 match,
104 patch,
104 patch,
105 pathutil,
105 pathutil,
106 registrar,
106 registrar,
107 scmutil,
107 scmutil,
108 templatefilters,
108 templatefilters,
109 util,
109 util,
110 )
110 )
111
111
112 cmdtable = {}
112 cmdtable = {}
113 command = registrar.command(cmdtable)
113 command = registrar.command(cmdtable)
114 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
114 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
115 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
115 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
116 # be specifying the version(s) of Mercurial they are tested with, or
116 # be specifying the version(s) of Mercurial they are tested with, or
117 # leave the attribute unspecified.
117 # leave the attribute unspecified.
118 testedwith = 'ships-with-hg-core'
118 testedwith = 'ships-with-hg-core'
119
119
120 # hg commands that do not act on keywords
120 # hg commands that do not act on keywords
121 nokwcommands = ('add addremove annotate bundle export grep incoming init log'
121 nokwcommands = ('add addremove annotate bundle export grep incoming init log'
122 ' outgoing push tip verify convert email glog')
122 ' outgoing push tip verify convert email glog')
123
123
124 # hg commands that trigger expansion only when writing to working dir,
124 # hg commands that trigger expansion only when writing to working dir,
125 # not when reading filelog, and unexpand when reading from working dir
125 # not when reading filelog, and unexpand when reading from working dir
126 restricted = ('merge kwexpand kwshrink record qrecord resolve transplant'
126 restricted = ('merge kwexpand kwshrink record qrecord resolve transplant'
127 ' unshelve rebase graft backout histedit fetch')
127 ' unshelve rebase graft backout histedit fetch')
128
128
129 # names of extensions using dorecord
129 # names of extensions using dorecord
130 recordextensions = 'record'
130 recordextensions = 'record'
131
131
132 colortable = {
132 colortable = {
133 'kwfiles.enabled': 'green bold',
133 'kwfiles.enabled': 'green bold',
134 'kwfiles.deleted': 'cyan bold underline',
134 'kwfiles.deleted': 'cyan bold underline',
135 'kwfiles.enabledunknown': 'green',
135 'kwfiles.enabledunknown': 'green',
136 'kwfiles.ignored': 'bold',
136 'kwfiles.ignored': 'bold',
137 'kwfiles.ignoredunknown': 'none'
137 'kwfiles.ignoredunknown': 'none'
138 }
138 }
139
139
140 templatefilter = registrar.templatefilter()
140 templatefilter = registrar.templatefilter()
141
141
142 # date like in cvs' $Date
142 # date like in cvs' $Date
143 @templatefilter('utcdate')
143 @templatefilter('utcdate')
144 def utcdate(text):
144 def utcdate(text):
145 '''Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
145 '''Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
146 '''
146 '''
147 return util.datestr((util.parsedate(text)[0], 0), '%Y/%m/%d %H:%M:%S')
147 return util.datestr((util.parsedate(text)[0], 0), '%Y/%m/%d %H:%M:%S')
148 # date like in svn's $Date
148 # date like in svn's $Date
149 @templatefilter('svnisodate')
149 @templatefilter('svnisodate')
150 def svnisodate(text):
150 def svnisodate(text):
151 '''Date. Returns a date in this format: "2009-08-18 13:00:13
151 '''Date. Returns a date in this format: "2009-08-18 13:00:13
152 +0200 (Tue, 18 Aug 2009)".
152 +0200 (Tue, 18 Aug 2009)".
153 '''
153 '''
154 return util.datestr(text, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
154 return util.datestr(text, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
155 # date like in svn's $Id
155 # date like in svn's $Id
156 @templatefilter('svnutcdate')
156 @templatefilter('svnutcdate')
157 def svnutcdate(text):
157 def svnutcdate(text):
158 '''Date. Returns a UTC-date in this format: "2009-08-18
158 '''Date. Returns a UTC-date in this format: "2009-08-18
159 11:00:13Z".
159 11:00:13Z".
160 '''
160 '''
161 return util.datestr((util.parsedate(text)[0], 0), '%Y-%m-%d %H:%M:%SZ')
161 return util.datestr((util.parsedate(text)[0], 0), '%Y-%m-%d %H:%M:%SZ')
162
162
163 # make keyword tools accessible
163 # make keyword tools accessible
164 kwtools = {'templater': None, 'hgcmd': ''}
164 kwtools = {'templater': None, 'hgcmd': ''}
165
165
166 def _defaultkwmaps(ui):
166 def _defaultkwmaps(ui):
167 '''Returns default keywordmaps according to keywordset configuration.'''
167 '''Returns default keywordmaps according to keywordset configuration.'''
168 templates = {
168 templates = {
169 'Revision': '{node|short}',
169 'Revision': '{node|short}',
170 'Author': '{author|user}',
170 'Author': '{author|user}',
171 }
171 }
172 kwsets = ({
172 kwsets = ({
173 'Date': '{date|utcdate}',
173 'Date': '{date|utcdate}',
174 'RCSfile': '{file|basename},v',
174 'RCSfile': '{file|basename},v',
175 'RCSFile': '{file|basename},v', # kept for backwards compatibility
175 'RCSFile': '{file|basename},v', # kept for backwards compatibility
176 # with hg-keyword
176 # with hg-keyword
177 'Source': '{root}/{file},v',
177 'Source': '{root}/{file},v',
178 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
178 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
179 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
179 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
180 }, {
180 }, {
181 'Date': '{date|svnisodate}',
181 'Date': '{date|svnisodate}',
182 'Id': '{file|basename},v {node|short} {date|svnutcdate} {author|user}',
182 'Id': '{file|basename},v {node|short} {date|svnutcdate} {author|user}',
183 'LastChangedRevision': '{node|short}',
183 'LastChangedRevision': '{node|short}',
184 'LastChangedBy': '{author|user}',
184 'LastChangedBy': '{author|user}',
185 'LastChangedDate': '{date|svnisodate}',
185 'LastChangedDate': '{date|svnisodate}',
186 })
186 })
187 templates.update(kwsets[ui.configbool('keywordset', 'svn')])
187 templates.update(kwsets[ui.configbool('keywordset', 'svn')])
188 return templates
188 return templates
189
189
190 def _shrinktext(text, subfunc):
190 def _shrinktext(text, subfunc):
191 '''Helper for keyword expansion removal in text.
191 '''Helper for keyword expansion removal in text.
192 Depending on subfunc also returns number of substitutions.'''
192 Depending on subfunc also returns number of substitutions.'''
193 return subfunc(r'$\1$', text)
193 return subfunc(r'$\1$', text)
194
194
195 def _preselect(wstatus, changed):
195 def _preselect(wstatus, changed):
196 '''Retrieves modified and added files from a working directory state
196 '''Retrieves modified and added files from a working directory state
197 and returns the subset of each contained in given changed files
197 and returns the subset of each contained in given changed files
198 retrieved from a change context.'''
198 retrieved from a change context.'''
199 modified = [f for f in wstatus.modified if f in changed]
199 modified = [f for f in wstatus.modified if f in changed]
200 added = [f for f in wstatus.added if f in changed]
200 added = [f for f in wstatus.added if f in changed]
201 return modified, added
201 return modified, added
202
202
203
203
204 class kwtemplater(object):
204 class kwtemplater(object):
205 '''
205 '''
206 Sets up keyword templates, corresponding keyword regex, and
206 Sets up keyword templates, corresponding keyword regex, and
207 provides keyword substitution functions.
207 provides keyword substitution functions.
208 '''
208 '''
209
209
210 def __init__(self, ui, repo, inc, exc):
210 def __init__(self, ui, repo, inc, exc):
211 self.ui = ui
211 self.ui = ui
212 self.repo = repo
212 self.repo = repo
213 self.match = match.match(repo.root, '', [], inc, exc)
213 self.match = match.match(repo.root, '', [], inc, exc)
214 self.restrict = kwtools['hgcmd'] in restricted.split()
214 self.restrict = kwtools['hgcmd'] in restricted.split()
215 self.postcommit = False
215 self.postcommit = False
216
216
217 kwmaps = self.ui.configitems('keywordmaps')
217 kwmaps = self.ui.configitems('keywordmaps')
218 if kwmaps: # override default templates
218 if kwmaps: # override default templates
219 self.templates = dict(kwmaps)
219 self.templates = dict(kwmaps)
220 else:
220 else:
221 self.templates = _defaultkwmaps(self.ui)
221 self.templates = _defaultkwmaps(self.ui)
222
222
223 @util.propertycache
223 @util.propertycache
224 def escape(self):
224 def escape(self):
225 '''Returns bar-separated and escaped keywords.'''
225 '''Returns bar-separated and escaped keywords.'''
226 return '|'.join(map(re.escape, self.templates.keys()))
226 return '|'.join(map(re.escape, self.templates.keys()))
227
227
228 @util.propertycache
228 @util.propertycache
229 def rekw(self):
229 def rekw(self):
230 '''Returns regex for unexpanded keywords.'''
230 '''Returns regex for unexpanded keywords.'''
231 return re.compile(r'\$(%s)\$' % self.escape)
231 return re.compile(r'\$(%s)\$' % self.escape)
232
232
233 @util.propertycache
233 @util.propertycache
234 def rekwexp(self):
234 def rekwexp(self):
235 '''Returns regex for expanded keywords.'''
235 '''Returns regex for expanded keywords.'''
236 return re.compile(r'\$(%s): [^$\n\r]*? \$' % self.escape)
236 return re.compile(r'\$(%s): [^$\n\r]*? \$' % self.escape)
237
237
238 def substitute(self, data, path, ctx, subfunc):
238 def substitute(self, data, path, ctx, subfunc):
239 '''Replaces keywords in data with expanded template.'''
239 '''Replaces keywords in data with expanded template.'''
240 def kwsub(mobj):
240 def kwsub(mobj):
241 kw = mobj.group(1)
241 kw = mobj.group(1)
242 ct = cmdutil.changeset_templater(self.ui, self.repo, False, None,
242 ct = cmdutil.makelogtemplater(self.ui, self.repo,
243 self.templates[kw], '', False)
243 self.templates[kw])
244 self.ui.pushbuffer()
244 self.ui.pushbuffer()
245 ct.show(ctx, root=self.repo.root, file=path)
245 ct.show(ctx, root=self.repo.root, file=path)
246 ekw = templatefilters.firstline(self.ui.popbuffer())
246 ekw = templatefilters.firstline(self.ui.popbuffer())
247 return '$%s: %s $' % (kw, ekw)
247 return '$%s: %s $' % (kw, ekw)
248 return subfunc(kwsub, data)
248 return subfunc(kwsub, data)
249
249
250 def linkctx(self, path, fileid):
250 def linkctx(self, path, fileid):
251 '''Similar to filelog.linkrev, but returns a changectx.'''
251 '''Similar to filelog.linkrev, but returns a changectx.'''
252 return self.repo.filectx(path, fileid=fileid).changectx()
252 return self.repo.filectx(path, fileid=fileid).changectx()
253
253
254 def expand(self, path, node, data):
254 def expand(self, path, node, data):
255 '''Returns data with keywords expanded.'''
255 '''Returns data with keywords expanded.'''
256 if not self.restrict and self.match(path) and not util.binary(data):
256 if not self.restrict and self.match(path) and not util.binary(data):
257 ctx = self.linkctx(path, node)
257 ctx = self.linkctx(path, node)
258 return self.substitute(data, path, ctx, self.rekw.sub)
258 return self.substitute(data, path, ctx, self.rekw.sub)
259 return data
259 return data
260
260
261 def iskwfile(self, cand, ctx):
261 def iskwfile(self, cand, ctx):
262 '''Returns subset of candidates which are configured for keyword
262 '''Returns subset of candidates which are configured for keyword
263 expansion but are not symbolic links.'''
263 expansion but are not symbolic links.'''
264 return [f for f in cand if self.match(f) and 'l' not in ctx.flags(f)]
264 return [f for f in cand if self.match(f) and 'l' not in ctx.flags(f)]
265
265
266 def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
266 def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
267 '''Overwrites selected files expanding/shrinking keywords.'''
267 '''Overwrites selected files expanding/shrinking keywords.'''
268 if self.restrict or lookup or self.postcommit: # exclude kw_copy
268 if self.restrict or lookup or self.postcommit: # exclude kw_copy
269 candidates = self.iskwfile(candidates, ctx)
269 candidates = self.iskwfile(candidates, ctx)
270 if not candidates:
270 if not candidates:
271 return
271 return
272 kwcmd = self.restrict and lookup # kwexpand/kwshrink
272 kwcmd = self.restrict and lookup # kwexpand/kwshrink
273 if self.restrict or expand and lookup:
273 if self.restrict or expand and lookup:
274 mf = ctx.manifest()
274 mf = ctx.manifest()
275 if self.restrict or rekw:
275 if self.restrict or rekw:
276 re_kw = self.rekw
276 re_kw = self.rekw
277 else:
277 else:
278 re_kw = self.rekwexp
278 re_kw = self.rekwexp
279 if expand:
279 if expand:
280 msg = _('overwriting %s expanding keywords\n')
280 msg = _('overwriting %s expanding keywords\n')
281 else:
281 else:
282 msg = _('overwriting %s shrinking keywords\n')
282 msg = _('overwriting %s shrinking keywords\n')
283 for f in candidates:
283 for f in candidates:
284 if self.restrict:
284 if self.restrict:
285 data = self.repo.file(f).read(mf[f])
285 data = self.repo.file(f).read(mf[f])
286 else:
286 else:
287 data = self.repo.wread(f)
287 data = self.repo.wread(f)
288 if util.binary(data):
288 if util.binary(data):
289 continue
289 continue
290 if expand:
290 if expand:
291 parents = ctx.parents()
291 parents = ctx.parents()
292 if lookup:
292 if lookup:
293 ctx = self.linkctx(f, mf[f])
293 ctx = self.linkctx(f, mf[f])
294 elif self.restrict and len(parents) > 1:
294 elif self.restrict and len(parents) > 1:
295 # merge commit
295 # merge commit
296 # in case of conflict f is in modified state during
296 # in case of conflict f is in modified state during
297 # merge, even if f does not differ from f in parent
297 # merge, even if f does not differ from f in parent
298 for p in parents:
298 for p in parents:
299 if f in p and not p[f].cmp(ctx[f]):
299 if f in p and not p[f].cmp(ctx[f]):
300 ctx = p[f].changectx()
300 ctx = p[f].changectx()
301 break
301 break
302 data, found = self.substitute(data, f, ctx, re_kw.subn)
302 data, found = self.substitute(data, f, ctx, re_kw.subn)
303 elif self.restrict:
303 elif self.restrict:
304 found = re_kw.search(data)
304 found = re_kw.search(data)
305 else:
305 else:
306 data, found = _shrinktext(data, re_kw.subn)
306 data, found = _shrinktext(data, re_kw.subn)
307 if found:
307 if found:
308 self.ui.note(msg % f)
308 self.ui.note(msg % f)
309 fp = self.repo.wvfs(f, "wb", atomictemp=True)
309 fp = self.repo.wvfs(f, "wb", atomictemp=True)
310 fp.write(data)
310 fp.write(data)
311 fp.close()
311 fp.close()
312 if kwcmd:
312 if kwcmd:
313 self.repo.dirstate.normal(f)
313 self.repo.dirstate.normal(f)
314 elif self.postcommit:
314 elif self.postcommit:
315 self.repo.dirstate.normallookup(f)
315 self.repo.dirstate.normallookup(f)
316
316
317 def shrink(self, fname, text):
317 def shrink(self, fname, text):
318 '''Returns text with all keyword substitutions removed.'''
318 '''Returns text with all keyword substitutions removed.'''
319 if self.match(fname) and not util.binary(text):
319 if self.match(fname) and not util.binary(text):
320 return _shrinktext(text, self.rekwexp.sub)
320 return _shrinktext(text, self.rekwexp.sub)
321 return text
321 return text
322
322
323 def shrinklines(self, fname, lines):
323 def shrinklines(self, fname, lines):
324 '''Returns lines with keyword substitutions removed.'''
324 '''Returns lines with keyword substitutions removed.'''
325 if self.match(fname):
325 if self.match(fname):
326 text = ''.join(lines)
326 text = ''.join(lines)
327 if not util.binary(text):
327 if not util.binary(text):
328 return _shrinktext(text, self.rekwexp.sub).splitlines(True)
328 return _shrinktext(text, self.rekwexp.sub).splitlines(True)
329 return lines
329 return lines
330
330
331 def wread(self, fname, data):
331 def wread(self, fname, data):
332 '''If in restricted mode returns data read from wdir with
332 '''If in restricted mode returns data read from wdir with
333 keyword substitutions removed.'''
333 keyword substitutions removed.'''
334 if self.restrict:
334 if self.restrict:
335 return self.shrink(fname, data)
335 return self.shrink(fname, data)
336 return data
336 return data
337
337
338 class kwfilelog(filelog.filelog):
338 class kwfilelog(filelog.filelog):
339 '''
339 '''
340 Subclass of filelog to hook into its read, add, cmp methods.
340 Subclass of filelog to hook into its read, add, cmp methods.
341 Keywords are "stored" unexpanded, and processed on reading.
341 Keywords are "stored" unexpanded, and processed on reading.
342 '''
342 '''
343 def __init__(self, opener, kwt, path):
343 def __init__(self, opener, kwt, path):
344 super(kwfilelog, self).__init__(opener, path)
344 super(kwfilelog, self).__init__(opener, path)
345 self.kwt = kwt
345 self.kwt = kwt
346 self.path = path
346 self.path = path
347
347
348 def read(self, node):
348 def read(self, node):
349 '''Expands keywords when reading filelog.'''
349 '''Expands keywords when reading filelog.'''
350 data = super(kwfilelog, self).read(node)
350 data = super(kwfilelog, self).read(node)
351 if self.renamed(node):
351 if self.renamed(node):
352 return data
352 return data
353 return self.kwt.expand(self.path, node, data)
353 return self.kwt.expand(self.path, node, data)
354
354
355 def add(self, text, meta, tr, link, p1=None, p2=None):
355 def add(self, text, meta, tr, link, p1=None, p2=None):
356 '''Removes keyword substitutions when adding to filelog.'''
356 '''Removes keyword substitutions when adding to filelog.'''
357 text = self.kwt.shrink(self.path, text)
357 text = self.kwt.shrink(self.path, text)
358 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
358 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
359
359
360 def cmp(self, node, text):
360 def cmp(self, node, text):
361 '''Removes keyword substitutions for comparison.'''
361 '''Removes keyword substitutions for comparison.'''
362 text = self.kwt.shrink(self.path, text)
362 text = self.kwt.shrink(self.path, text)
363 return super(kwfilelog, self).cmp(node, text)
363 return super(kwfilelog, self).cmp(node, text)
364
364
365 def _status(ui, repo, wctx, kwt, *pats, **opts):
365 def _status(ui, repo, wctx, kwt, *pats, **opts):
366 '''Bails out if [keyword] configuration is not active.
366 '''Bails out if [keyword] configuration is not active.
367 Returns status of working directory.'''
367 Returns status of working directory.'''
368 if kwt:
368 if kwt:
369 return repo.status(match=scmutil.match(wctx, pats, opts), clean=True,
369 return repo.status(match=scmutil.match(wctx, pats, opts), clean=True,
370 unknown=opts.get('unknown') or opts.get('all'))
370 unknown=opts.get('unknown') or opts.get('all'))
371 if ui.configitems('keyword'):
371 if ui.configitems('keyword'):
372 raise error.Abort(_('[keyword] patterns cannot match'))
372 raise error.Abort(_('[keyword] patterns cannot match'))
373 raise error.Abort(_('no [keyword] patterns configured'))
373 raise error.Abort(_('no [keyword] patterns configured'))
374
374
375 def _kwfwrite(ui, repo, expand, *pats, **opts):
375 def _kwfwrite(ui, repo, expand, *pats, **opts):
376 '''Selects files and passes them to kwtemplater.overwrite.'''
376 '''Selects files and passes them to kwtemplater.overwrite.'''
377 wctx = repo[None]
377 wctx = repo[None]
378 if len(wctx.parents()) > 1:
378 if len(wctx.parents()) > 1:
379 raise error.Abort(_('outstanding uncommitted merge'))
379 raise error.Abort(_('outstanding uncommitted merge'))
380 kwt = kwtools['templater']
380 kwt = kwtools['templater']
381 with repo.wlock():
381 with repo.wlock():
382 status = _status(ui, repo, wctx, kwt, *pats, **opts)
382 status = _status(ui, repo, wctx, kwt, *pats, **opts)
383 if status.modified or status.added or status.removed or status.deleted:
383 if status.modified or status.added or status.removed or status.deleted:
384 raise error.Abort(_('outstanding uncommitted changes'))
384 raise error.Abort(_('outstanding uncommitted changes'))
385 kwt.overwrite(wctx, status.clean, True, expand)
385 kwt.overwrite(wctx, status.clean, True, expand)
386
386
387 @command('kwdemo',
387 @command('kwdemo',
388 [('d', 'default', None, _('show default keyword template maps')),
388 [('d', 'default', None, _('show default keyword template maps')),
389 ('f', 'rcfile', '',
389 ('f', 'rcfile', '',
390 _('read maps from rcfile'), _('FILE'))],
390 _('read maps from rcfile'), _('FILE'))],
391 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'),
391 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'),
392 optionalrepo=True)
392 optionalrepo=True)
393 def demo(ui, repo, *args, **opts):
393 def demo(ui, repo, *args, **opts):
394 '''print [keywordmaps] configuration and an expansion example
394 '''print [keywordmaps] configuration and an expansion example
395
395
396 Show current, custom, or default keyword template maps and their
396 Show current, custom, or default keyword template maps and their
397 expansions.
397 expansions.
398
398
399 Extend the current configuration by specifying maps as arguments
399 Extend the current configuration by specifying maps as arguments
400 and using -f/--rcfile to source an external hgrc file.
400 and using -f/--rcfile to source an external hgrc file.
401
401
402 Use -d/--default to disable current configuration.
402 Use -d/--default to disable current configuration.
403
403
404 See :hg:`help templates` for information on templates and filters.
404 See :hg:`help templates` for information on templates and filters.
405 '''
405 '''
406 def demoitems(section, items):
406 def demoitems(section, items):
407 ui.write('[%s]\n' % section)
407 ui.write('[%s]\n' % section)
408 for k, v in sorted(items):
408 for k, v in sorted(items):
409 ui.write('%s = %s\n' % (k, v))
409 ui.write('%s = %s\n' % (k, v))
410
410
411 fn = 'demo.txt'
411 fn = 'demo.txt'
412 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
412 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
413 ui.note(_('creating temporary repository at %s\n') % tmpdir)
413 ui.note(_('creating temporary repository at %s\n') % tmpdir)
414 if repo is None:
414 if repo is None:
415 baseui = ui
415 baseui = ui
416 else:
416 else:
417 baseui = repo.baseui
417 baseui = repo.baseui
418 repo = localrepo.localrepository(baseui, tmpdir, True)
418 repo = localrepo.localrepository(baseui, tmpdir, True)
419 ui.setconfig('keyword', fn, '', 'keyword')
419 ui.setconfig('keyword', fn, '', 'keyword')
420 svn = ui.configbool('keywordset', 'svn')
420 svn = ui.configbool('keywordset', 'svn')
421 # explicitly set keywordset for demo output
421 # explicitly set keywordset for demo output
422 ui.setconfig('keywordset', 'svn', svn, 'keyword')
422 ui.setconfig('keywordset', 'svn', svn, 'keyword')
423
423
424 uikwmaps = ui.configitems('keywordmaps')
424 uikwmaps = ui.configitems('keywordmaps')
425 if args or opts.get('rcfile'):
425 if args or opts.get('rcfile'):
426 ui.status(_('\n\tconfiguration using custom keyword template maps\n'))
426 ui.status(_('\n\tconfiguration using custom keyword template maps\n'))
427 if uikwmaps:
427 if uikwmaps:
428 ui.status(_('\textending current template maps\n'))
428 ui.status(_('\textending current template maps\n'))
429 if opts.get('default') or not uikwmaps:
429 if opts.get('default') or not uikwmaps:
430 if svn:
430 if svn:
431 ui.status(_('\toverriding default svn keywordset\n'))
431 ui.status(_('\toverriding default svn keywordset\n'))
432 else:
432 else:
433 ui.status(_('\toverriding default cvs keywordset\n'))
433 ui.status(_('\toverriding default cvs keywordset\n'))
434 if opts.get('rcfile'):
434 if opts.get('rcfile'):
435 ui.readconfig(opts.get('rcfile'))
435 ui.readconfig(opts.get('rcfile'))
436 if args:
436 if args:
437 # simulate hgrc parsing
437 # simulate hgrc parsing
438 rcmaps = '[keywordmaps]\n%s\n' % '\n'.join(args)
438 rcmaps = '[keywordmaps]\n%s\n' % '\n'.join(args)
439 repo.vfs.write('hgrc', rcmaps)
439 repo.vfs.write('hgrc', rcmaps)
440 ui.readconfig(repo.vfs.join('hgrc'))
440 ui.readconfig(repo.vfs.join('hgrc'))
441 kwmaps = dict(ui.configitems('keywordmaps'))
441 kwmaps = dict(ui.configitems('keywordmaps'))
442 elif opts.get('default'):
442 elif opts.get('default'):
443 if svn:
443 if svn:
444 ui.status(_('\n\tconfiguration using default svn keywordset\n'))
444 ui.status(_('\n\tconfiguration using default svn keywordset\n'))
445 else:
445 else:
446 ui.status(_('\n\tconfiguration using default cvs keywordset\n'))
446 ui.status(_('\n\tconfiguration using default cvs keywordset\n'))
447 kwmaps = _defaultkwmaps(ui)
447 kwmaps = _defaultkwmaps(ui)
448 if uikwmaps:
448 if uikwmaps:
449 ui.status(_('\tdisabling current template maps\n'))
449 ui.status(_('\tdisabling current template maps\n'))
450 for k, v in kwmaps.iteritems():
450 for k, v in kwmaps.iteritems():
451 ui.setconfig('keywordmaps', k, v, 'keyword')
451 ui.setconfig('keywordmaps', k, v, 'keyword')
452 else:
452 else:
453 ui.status(_('\n\tconfiguration using current keyword template maps\n'))
453 ui.status(_('\n\tconfiguration using current keyword template maps\n'))
454 if uikwmaps:
454 if uikwmaps:
455 kwmaps = dict(uikwmaps)
455 kwmaps = dict(uikwmaps)
456 else:
456 else:
457 kwmaps = _defaultkwmaps(ui)
457 kwmaps = _defaultkwmaps(ui)
458
458
459 uisetup(ui)
459 uisetup(ui)
460 reposetup(ui, repo)
460 reposetup(ui, repo)
461 ui.write(('[extensions]\nkeyword =\n'))
461 ui.write(('[extensions]\nkeyword =\n'))
462 demoitems('keyword', ui.configitems('keyword'))
462 demoitems('keyword', ui.configitems('keyword'))
463 demoitems('keywordset', ui.configitems('keywordset'))
463 demoitems('keywordset', ui.configitems('keywordset'))
464 demoitems('keywordmaps', kwmaps.iteritems())
464 demoitems('keywordmaps', kwmaps.iteritems())
465 keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n'
465 keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n'
466 repo.wvfs.write(fn, keywords)
466 repo.wvfs.write(fn, keywords)
467 repo[None].add([fn])
467 repo[None].add([fn])
468 ui.note(_('\nkeywords written to %s:\n') % fn)
468 ui.note(_('\nkeywords written to %s:\n') % fn)
469 ui.note(keywords)
469 ui.note(keywords)
470 with repo.wlock():
470 with repo.wlock():
471 repo.dirstate.setbranch('demobranch')
471 repo.dirstate.setbranch('demobranch')
472 for name, cmd in ui.configitems('hooks'):
472 for name, cmd in ui.configitems('hooks'):
473 if name.split('.', 1)[0].find('commit') > -1:
473 if name.split('.', 1)[0].find('commit') > -1:
474 repo.ui.setconfig('hooks', name, '', 'keyword')
474 repo.ui.setconfig('hooks', name, '', 'keyword')
475 msg = _('hg keyword configuration and expansion example')
475 msg = _('hg keyword configuration and expansion example')
476 ui.note(("hg ci -m '%s'\n" % msg))
476 ui.note(("hg ci -m '%s'\n" % msg))
477 repo.commit(text=msg)
477 repo.commit(text=msg)
478 ui.status(_('\n\tkeywords expanded\n'))
478 ui.status(_('\n\tkeywords expanded\n'))
479 ui.write(repo.wread(fn))
479 ui.write(repo.wread(fn))
480 repo.wvfs.rmtree(repo.root)
480 repo.wvfs.rmtree(repo.root)
481
481
482 @command('kwexpand',
482 @command('kwexpand',
483 cmdutil.walkopts,
483 cmdutil.walkopts,
484 _('hg kwexpand [OPTION]... [FILE]...'),
484 _('hg kwexpand [OPTION]... [FILE]...'),
485 inferrepo=True)
485 inferrepo=True)
486 def expand(ui, repo, *pats, **opts):
486 def expand(ui, repo, *pats, **opts):
487 '''expand keywords in the working directory
487 '''expand keywords in the working directory
488
488
489 Run after (re)enabling keyword expansion.
489 Run after (re)enabling keyword expansion.
490
490
491 kwexpand refuses to run if given files contain local changes.
491 kwexpand refuses to run if given files contain local changes.
492 '''
492 '''
493 # 3rd argument sets expansion to True
493 # 3rd argument sets expansion to True
494 _kwfwrite(ui, repo, True, *pats, **opts)
494 _kwfwrite(ui, repo, True, *pats, **opts)
495
495
496 @command('kwfiles',
496 @command('kwfiles',
497 [('A', 'all', None, _('show keyword status flags of all files')),
497 [('A', 'all', None, _('show keyword status flags of all files')),
498 ('i', 'ignore', None, _('show files excluded from expansion')),
498 ('i', 'ignore', None, _('show files excluded from expansion')),
499 ('u', 'unknown', None, _('only show unknown (not tracked) files')),
499 ('u', 'unknown', None, _('only show unknown (not tracked) files')),
500 ] + cmdutil.walkopts,
500 ] + cmdutil.walkopts,
501 _('hg kwfiles [OPTION]... [FILE]...'),
501 _('hg kwfiles [OPTION]... [FILE]...'),
502 inferrepo=True)
502 inferrepo=True)
503 def files(ui, repo, *pats, **opts):
503 def files(ui, repo, *pats, **opts):
504 '''show files configured for keyword expansion
504 '''show files configured for keyword expansion
505
505
506 List which files in the working directory are matched by the
506 List which files in the working directory are matched by the
507 [keyword] configuration patterns.
507 [keyword] configuration patterns.
508
508
509 Useful to prevent inadvertent keyword expansion and to speed up
509 Useful to prevent inadvertent keyword expansion and to speed up
510 execution by including only files that are actual candidates for
510 execution by including only files that are actual candidates for
511 expansion.
511 expansion.
512
512
513 See :hg:`help keyword` on how to construct patterns both for
513 See :hg:`help keyword` on how to construct patterns both for
514 inclusion and exclusion of files.
514 inclusion and exclusion of files.
515
515
516 With -A/--all and -v/--verbose the codes used to show the status
516 With -A/--all and -v/--verbose the codes used to show the status
517 of files are::
517 of files are::
518
518
519 K = keyword expansion candidate
519 K = keyword expansion candidate
520 k = keyword expansion candidate (not tracked)
520 k = keyword expansion candidate (not tracked)
521 I = ignored
521 I = ignored
522 i = ignored (not tracked)
522 i = ignored (not tracked)
523 '''
523 '''
524 kwt = kwtools['templater']
524 kwt = kwtools['templater']
525 wctx = repo[None]
525 wctx = repo[None]
526 status = _status(ui, repo, wctx, kwt, *pats, **opts)
526 status = _status(ui, repo, wctx, kwt, *pats, **opts)
527 if pats:
527 if pats:
528 cwd = repo.getcwd()
528 cwd = repo.getcwd()
529 else:
529 else:
530 cwd = ''
530 cwd = ''
531 files = []
531 files = []
532 if not opts.get('unknown') or opts.get('all'):
532 if not opts.get('unknown') or opts.get('all'):
533 files = sorted(status.modified + status.added + status.clean)
533 files = sorted(status.modified + status.added + status.clean)
534 kwfiles = kwt.iskwfile(files, wctx)
534 kwfiles = kwt.iskwfile(files, wctx)
535 kwdeleted = kwt.iskwfile(status.deleted, wctx)
535 kwdeleted = kwt.iskwfile(status.deleted, wctx)
536 kwunknown = kwt.iskwfile(status.unknown, wctx)
536 kwunknown = kwt.iskwfile(status.unknown, wctx)
537 if not opts.get('ignore') or opts.get('all'):
537 if not opts.get('ignore') or opts.get('all'):
538 showfiles = kwfiles, kwdeleted, kwunknown
538 showfiles = kwfiles, kwdeleted, kwunknown
539 else:
539 else:
540 showfiles = [], [], []
540 showfiles = [], [], []
541 if opts.get('all') or opts.get('ignore'):
541 if opts.get('all') or opts.get('ignore'):
542 showfiles += ([f for f in files if f not in kwfiles],
542 showfiles += ([f for f in files if f not in kwfiles],
543 [f for f in status.unknown if f not in kwunknown])
543 [f for f in status.unknown if f not in kwunknown])
544 kwlabels = 'enabled deleted enabledunknown ignored ignoredunknown'.split()
544 kwlabels = 'enabled deleted enabledunknown ignored ignoredunknown'.split()
545 kwstates = zip(kwlabels, 'K!kIi', showfiles)
545 kwstates = zip(kwlabels, 'K!kIi', showfiles)
546 fm = ui.formatter('kwfiles', opts)
546 fm = ui.formatter('kwfiles', opts)
547 fmt = '%.0s%s\n'
547 fmt = '%.0s%s\n'
548 if opts.get('all') or ui.verbose:
548 if opts.get('all') or ui.verbose:
549 fmt = '%s %s\n'
549 fmt = '%s %s\n'
550 for kwstate, char, filenames in kwstates:
550 for kwstate, char, filenames in kwstates:
551 label = 'kwfiles.' + kwstate
551 label = 'kwfiles.' + kwstate
552 for f in filenames:
552 for f in filenames:
553 fm.startitem()
553 fm.startitem()
554 fm.write('kwstatus path', fmt, char,
554 fm.write('kwstatus path', fmt, char,
555 repo.pathto(f, cwd), label=label)
555 repo.pathto(f, cwd), label=label)
556 fm.end()
556 fm.end()
557
557
558 @command('kwshrink',
558 @command('kwshrink',
559 cmdutil.walkopts,
559 cmdutil.walkopts,
560 _('hg kwshrink [OPTION]... [FILE]...'),
560 _('hg kwshrink [OPTION]... [FILE]...'),
561 inferrepo=True)
561 inferrepo=True)
562 def shrink(ui, repo, *pats, **opts):
562 def shrink(ui, repo, *pats, **opts):
563 '''revert expanded keywords in the working directory
563 '''revert expanded keywords in the working directory
564
564
565 Must be run before changing/disabling active keywords.
565 Must be run before changing/disabling active keywords.
566
566
567 kwshrink refuses to run if given files contain local changes.
567 kwshrink refuses to run if given files contain local changes.
568 '''
568 '''
569 # 3rd argument sets expansion to False
569 # 3rd argument sets expansion to False
570 _kwfwrite(ui, repo, False, *pats, **opts)
570 _kwfwrite(ui, repo, False, *pats, **opts)
571
571
572
572
573 def uisetup(ui):
573 def uisetup(ui):
574 ''' Monkeypatches dispatch._parse to retrieve user command.'''
574 ''' Monkeypatches dispatch._parse to retrieve user command.'''
575
575
576 def kwdispatch_parse(orig, ui, args):
576 def kwdispatch_parse(orig, ui, args):
577 '''Monkeypatch dispatch._parse to obtain running hg command.'''
577 '''Monkeypatch dispatch._parse to obtain running hg command.'''
578 cmd, func, args, options, cmdoptions = orig(ui, args)
578 cmd, func, args, options, cmdoptions = orig(ui, args)
579 kwtools['hgcmd'] = cmd
579 kwtools['hgcmd'] = cmd
580 return cmd, func, args, options, cmdoptions
580 return cmd, func, args, options, cmdoptions
581
581
582 extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
582 extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
583
583
584 def reposetup(ui, repo):
584 def reposetup(ui, repo):
585 '''Sets up repo as kwrepo for keyword substitution.
585 '''Sets up repo as kwrepo for keyword substitution.
586 Overrides file method to return kwfilelog instead of filelog
586 Overrides file method to return kwfilelog instead of filelog
587 if file matches user configuration.
587 if file matches user configuration.
588 Wraps commit to overwrite configured files with updated
588 Wraps commit to overwrite configured files with updated
589 keyword substitutions.
589 keyword substitutions.
590 Monkeypatches patch and webcommands.'''
590 Monkeypatches patch and webcommands.'''
591
591
592 try:
592 try:
593 if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
593 if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
594 or '.hg' in util.splitpath(repo.root)
594 or '.hg' in util.splitpath(repo.root)
595 or repo._url.startswith('bundle:')):
595 or repo._url.startswith('bundle:')):
596 return
596 return
597 except AttributeError:
597 except AttributeError:
598 pass
598 pass
599
599
600 inc, exc = [], ['.hg*']
600 inc, exc = [], ['.hg*']
601 for pat, opt in ui.configitems('keyword'):
601 for pat, opt in ui.configitems('keyword'):
602 if opt != 'ignore':
602 if opt != 'ignore':
603 inc.append(pat)
603 inc.append(pat)
604 else:
604 else:
605 exc.append(pat)
605 exc.append(pat)
606 if not inc:
606 if not inc:
607 return
607 return
608
608
609 kwtools['templater'] = kwt = kwtemplater(ui, repo, inc, exc)
609 kwtools['templater'] = kwt = kwtemplater(ui, repo, inc, exc)
610
610
611 class kwrepo(repo.__class__):
611 class kwrepo(repo.__class__):
612 def file(self, f):
612 def file(self, f):
613 if f[0] == '/':
613 if f[0] == '/':
614 f = f[1:]
614 f = f[1:]
615 return kwfilelog(self.svfs, kwt, f)
615 return kwfilelog(self.svfs, kwt, f)
616
616
617 def wread(self, filename):
617 def wread(self, filename):
618 data = super(kwrepo, self).wread(filename)
618 data = super(kwrepo, self).wread(filename)
619 return kwt.wread(filename, data)
619 return kwt.wread(filename, data)
620
620
621 def commit(self, *args, **opts):
621 def commit(self, *args, **opts):
622 # use custom commitctx for user commands
622 # use custom commitctx for user commands
623 # other extensions can still wrap repo.commitctx directly
623 # other extensions can still wrap repo.commitctx directly
624 self.commitctx = self.kwcommitctx
624 self.commitctx = self.kwcommitctx
625 try:
625 try:
626 return super(kwrepo, self).commit(*args, **opts)
626 return super(kwrepo, self).commit(*args, **opts)
627 finally:
627 finally:
628 del self.commitctx
628 del self.commitctx
629
629
630 def kwcommitctx(self, ctx, error=False):
630 def kwcommitctx(self, ctx, error=False):
631 n = super(kwrepo, self).commitctx(ctx, error)
631 n = super(kwrepo, self).commitctx(ctx, error)
632 # no lock needed, only called from repo.commit() which already locks
632 # no lock needed, only called from repo.commit() which already locks
633 if not kwt.postcommit:
633 if not kwt.postcommit:
634 restrict = kwt.restrict
634 restrict = kwt.restrict
635 kwt.restrict = True
635 kwt.restrict = True
636 kwt.overwrite(self[n], sorted(ctx.added() + ctx.modified()),
636 kwt.overwrite(self[n], sorted(ctx.added() + ctx.modified()),
637 False, True)
637 False, True)
638 kwt.restrict = restrict
638 kwt.restrict = restrict
639 return n
639 return n
640
640
641 def rollback(self, dryrun=False, force=False):
641 def rollback(self, dryrun=False, force=False):
642 wlock = self.wlock()
642 wlock = self.wlock()
643 origrestrict = kwt.restrict
643 origrestrict = kwt.restrict
644 try:
644 try:
645 if not dryrun:
645 if not dryrun:
646 changed = self['.'].files()
646 changed = self['.'].files()
647 ret = super(kwrepo, self).rollback(dryrun, force)
647 ret = super(kwrepo, self).rollback(dryrun, force)
648 if not dryrun:
648 if not dryrun:
649 ctx = self['.']
649 ctx = self['.']
650 modified, added = _preselect(ctx.status(), changed)
650 modified, added = _preselect(ctx.status(), changed)
651 kwt.restrict = False
651 kwt.restrict = False
652 kwt.overwrite(ctx, modified, True, True)
652 kwt.overwrite(ctx, modified, True, True)
653 kwt.overwrite(ctx, added, True, False)
653 kwt.overwrite(ctx, added, True, False)
654 return ret
654 return ret
655 finally:
655 finally:
656 kwt.restrict = origrestrict
656 kwt.restrict = origrestrict
657 wlock.release()
657 wlock.release()
658
658
659 # monkeypatches
659 # monkeypatches
660 def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
660 def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
661 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
661 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
662 rejects or conflicts due to expanded keywords in working dir.'''
662 rejects or conflicts due to expanded keywords in working dir.'''
663 orig(self, ui, gp, backend, store, eolmode)
663 orig(self, ui, gp, backend, store, eolmode)
664 # shrink keywords read from working dir
664 # shrink keywords read from working dir
665 self.lines = kwt.shrinklines(self.fname, self.lines)
665 self.lines = kwt.shrinklines(self.fname, self.lines)
666
666
667 def kwdiff(orig, *args, **kwargs):
667 def kwdiff(orig, *args, **kwargs):
668 '''Monkeypatch patch.diff to avoid expansion.'''
668 '''Monkeypatch patch.diff to avoid expansion.'''
669 kwt.restrict = True
669 kwt.restrict = True
670 return orig(*args, **kwargs)
670 return orig(*args, **kwargs)
671
671
672 def kwweb_skip(orig, web, req, tmpl):
672 def kwweb_skip(orig, web, req, tmpl):
673 '''Wraps webcommands.x turning off keyword expansion.'''
673 '''Wraps webcommands.x turning off keyword expansion.'''
674 kwt.match = util.never
674 kwt.match = util.never
675 return orig(web, req, tmpl)
675 return orig(web, req, tmpl)
676
676
677 def kw_amend(orig, ui, repo, commitfunc, old, extra, pats, opts):
677 def kw_amend(orig, ui, repo, commitfunc, old, extra, pats, opts):
678 '''Wraps cmdutil.amend expanding keywords after amend.'''
678 '''Wraps cmdutil.amend expanding keywords after amend.'''
679 with repo.wlock():
679 with repo.wlock():
680 kwt.postcommit = True
680 kwt.postcommit = True
681 newid = orig(ui, repo, commitfunc, old, extra, pats, opts)
681 newid = orig(ui, repo, commitfunc, old, extra, pats, opts)
682 if newid != old.node():
682 if newid != old.node():
683 ctx = repo[newid]
683 ctx = repo[newid]
684 kwt.restrict = True
684 kwt.restrict = True
685 kwt.overwrite(ctx, ctx.files(), False, True)
685 kwt.overwrite(ctx, ctx.files(), False, True)
686 kwt.restrict = False
686 kwt.restrict = False
687 return newid
687 return newid
688
688
689 def kw_copy(orig, ui, repo, pats, opts, rename=False):
689 def kw_copy(orig, ui, repo, pats, opts, rename=False):
690 '''Wraps cmdutil.copy so that copy/rename destinations do not
690 '''Wraps cmdutil.copy so that copy/rename destinations do not
691 contain expanded keywords.
691 contain expanded keywords.
692 Note that the source of a regular file destination may also be a
692 Note that the source of a regular file destination may also be a
693 symlink:
693 symlink:
694 hg cp sym x -> x is symlink
694 hg cp sym x -> x is symlink
695 cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
695 cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
696 For the latter we have to follow the symlink to find out whether its
696 For the latter we have to follow the symlink to find out whether its
697 target is configured for expansion and we therefore must unexpand the
697 target is configured for expansion and we therefore must unexpand the
698 keywords in the destination.'''
698 keywords in the destination.'''
699 with repo.wlock():
699 with repo.wlock():
700 orig(ui, repo, pats, opts, rename)
700 orig(ui, repo, pats, opts, rename)
701 if opts.get('dry_run'):
701 if opts.get('dry_run'):
702 return
702 return
703 wctx = repo[None]
703 wctx = repo[None]
704 cwd = repo.getcwd()
704 cwd = repo.getcwd()
705
705
706 def haskwsource(dest):
706 def haskwsource(dest):
707 '''Returns true if dest is a regular file and configured for
707 '''Returns true if dest is a regular file and configured for
708 expansion or a symlink which points to a file configured for
708 expansion or a symlink which points to a file configured for
709 expansion. '''
709 expansion. '''
710 source = repo.dirstate.copied(dest)
710 source = repo.dirstate.copied(dest)
711 if 'l' in wctx.flags(source):
711 if 'l' in wctx.flags(source):
712 source = pathutil.canonpath(repo.root, cwd,
712 source = pathutil.canonpath(repo.root, cwd,
713 os.path.realpath(source))
713 os.path.realpath(source))
714 return kwt.match(source)
714 return kwt.match(source)
715
715
716 candidates = [f for f in repo.dirstate.copies() if
716 candidates = [f for f in repo.dirstate.copies() if
717 'l' not in wctx.flags(f) and haskwsource(f)]
717 'l' not in wctx.flags(f) and haskwsource(f)]
718 kwt.overwrite(wctx, candidates, False, False)
718 kwt.overwrite(wctx, candidates, False, False)
719
719
720 def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
720 def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
721 '''Wraps record.dorecord expanding keywords after recording.'''
721 '''Wraps record.dorecord expanding keywords after recording.'''
722 with repo.wlock():
722 with repo.wlock():
723 # record returns 0 even when nothing has changed
723 # record returns 0 even when nothing has changed
724 # therefore compare nodes before and after
724 # therefore compare nodes before and after
725 kwt.postcommit = True
725 kwt.postcommit = True
726 ctx = repo['.']
726 ctx = repo['.']
727 wstatus = ctx.status()
727 wstatus = ctx.status()
728 ret = orig(ui, repo, commitfunc, *pats, **opts)
728 ret = orig(ui, repo, commitfunc, *pats, **opts)
729 recctx = repo['.']
729 recctx = repo['.']
730 if ctx != recctx:
730 if ctx != recctx:
731 modified, added = _preselect(wstatus, recctx.files())
731 modified, added = _preselect(wstatus, recctx.files())
732 kwt.restrict = False
732 kwt.restrict = False
733 kwt.overwrite(recctx, modified, False, True)
733 kwt.overwrite(recctx, modified, False, True)
734 kwt.overwrite(recctx, added, False, True, True)
734 kwt.overwrite(recctx, added, False, True, True)
735 kwt.restrict = True
735 kwt.restrict = True
736 return ret
736 return ret
737
737
738 def kwfilectx_cmp(orig, self, fctx):
738 def kwfilectx_cmp(orig, self, fctx):
739 if fctx._customcmp:
739 if fctx._customcmp:
740 return fctx.cmp(self)
740 return fctx.cmp(self)
741 # keyword affects data size, comparing wdir and filelog size does
741 # keyword affects data size, comparing wdir and filelog size does
742 # not make sense
742 # not make sense
743 if (fctx._filenode is None and
743 if (fctx._filenode is None and
744 (self._repo._encodefilterpats or
744 (self._repo._encodefilterpats or
745 kwt.match(fctx.path()) and 'l' not in fctx.flags() or
745 kwt.match(fctx.path()) and 'l' not in fctx.flags() or
746 self.size() - 4 == fctx.size()) or
746 self.size() - 4 == fctx.size()) or
747 self.size() == fctx.size()):
747 self.size() == fctx.size()):
748 return self._filelog.cmp(self._filenode, fctx.data())
748 return self._filelog.cmp(self._filenode, fctx.data())
749 return True
749 return True
750
750
751 extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp)
751 extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp)
752 extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
752 extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
753 extensions.wrapfunction(patch, 'diff', kwdiff)
753 extensions.wrapfunction(patch, 'diff', kwdiff)
754 extensions.wrapfunction(cmdutil, 'amend', kw_amend)
754 extensions.wrapfunction(cmdutil, 'amend', kw_amend)
755 extensions.wrapfunction(cmdutil, 'copy', kw_copy)
755 extensions.wrapfunction(cmdutil, 'copy', kw_copy)
756 extensions.wrapfunction(cmdutil, 'dorecord', kw_dorecord)
756 extensions.wrapfunction(cmdutil, 'dorecord', kw_dorecord)
757 for c in 'annotate changeset rev filediff diff'.split():
757 for c in 'annotate changeset rev filediff diff'.split():
758 extensions.wrapfunction(webcommands, c, kwweb_skip)
758 extensions.wrapfunction(webcommands, c, kwweb_skip)
759 repo.__class__ = kwrepo
759 repo.__class__ = kwrepo
@@ -1,222 +1,221 b''
1 # show.py - Extension implementing `hg show`
1 # show.py - Extension implementing `hg show`
2 #
2 #
3 # Copyright 2017 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2017 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """unified command to show various repository information (EXPERIMENTAL)
8 """unified command to show various repository information (EXPERIMENTAL)
9
9
10 This extension provides the :hg:`show` command, which provides a central
10 This extension provides the :hg:`show` command, which provides a central
11 command for displaying commonly-accessed repository data and views of that
11 command for displaying commonly-accessed repository data and views of that
12 data.
12 data.
13 """
13 """
14
14
15 from __future__ import absolute_import
15 from __future__ import absolute_import
16
16
17 from mercurial.i18n import _
17 from mercurial.i18n import _
18 from mercurial.node import nullrev
18 from mercurial.node import nullrev
19 from mercurial import (
19 from mercurial import (
20 cmdutil,
20 cmdutil,
21 error,
21 error,
22 formatter,
22 formatter,
23 graphmod,
23 graphmod,
24 pycompat,
24 pycompat,
25 registrar,
25 registrar,
26 revset,
26 revset,
27 revsetlang,
27 revsetlang,
28 )
28 )
29
29
30 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
30 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
31 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
31 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
32 # be specifying the version(s) of Mercurial they are tested with, or
32 # be specifying the version(s) of Mercurial they are tested with, or
33 # leave the attribute unspecified.
33 # leave the attribute unspecified.
34 testedwith = 'ships-with-hg-core'
34 testedwith = 'ships-with-hg-core'
35
35
36 cmdtable = {}
36 cmdtable = {}
37 command = registrar.command(cmdtable)
37 command = registrar.command(cmdtable)
38 revsetpredicate = registrar.revsetpredicate()
38 revsetpredicate = registrar.revsetpredicate()
39
39
40 class showcmdfunc(registrar._funcregistrarbase):
40 class showcmdfunc(registrar._funcregistrarbase):
41 """Register a function to be invoked for an `hg show <thing>`."""
41 """Register a function to be invoked for an `hg show <thing>`."""
42
42
43 # Used by _formatdoc().
43 # Used by _formatdoc().
44 _docformat = '%s -- %s'
44 _docformat = '%s -- %s'
45
45
46 def _extrasetup(self, name, func, fmtopic=None):
46 def _extrasetup(self, name, func, fmtopic=None):
47 """Called with decorator arguments to register a show view.
47 """Called with decorator arguments to register a show view.
48
48
49 ``name`` is the sub-command name.
49 ``name`` is the sub-command name.
50
50
51 ``func`` is the function being decorated.
51 ``func`` is the function being decorated.
52
52
53 ``fmtopic`` is the topic in the style that will be rendered for
53 ``fmtopic`` is the topic in the style that will be rendered for
54 this view.
54 this view.
55 """
55 """
56 func._fmtopic = fmtopic
56 func._fmtopic = fmtopic
57
57
58 showview = showcmdfunc()
58 showview = showcmdfunc()
59
59
60 @command('show', [
60 @command('show', [
61 # TODO: Switch this template flag to use cmdutil.formatteropts if
61 # TODO: Switch this template flag to use cmdutil.formatteropts if
62 # 'hg show' becomes stable before --template/-T is stable. For now,
62 # 'hg show' becomes stable before --template/-T is stable. For now,
63 # we are putting it here without the '(EXPERIMENTAL)' flag because it
63 # we are putting it here without the '(EXPERIMENTAL)' flag because it
64 # is an important part of the 'hg show' user experience and the entire
64 # is an important part of the 'hg show' user experience and the entire
65 # 'hg show' experience is experimental.
65 # 'hg show' experience is experimental.
66 ('T', 'template', '', ('display with template'), _('TEMPLATE')),
66 ('T', 'template', '', ('display with template'), _('TEMPLATE')),
67 ], _('VIEW'))
67 ], _('VIEW'))
68 def show(ui, repo, view=None, template=None):
68 def show(ui, repo, view=None, template=None):
69 """show various repository information
69 """show various repository information
70
70
71 A requested view of repository data is displayed.
71 A requested view of repository data is displayed.
72
72
73 If no view is requested, the list of available views is shown and the
73 If no view is requested, the list of available views is shown and the
74 command aborts.
74 command aborts.
75
75
76 .. note::
76 .. note::
77
77
78 There are no backwards compatibility guarantees for the output of this
78 There are no backwards compatibility guarantees for the output of this
79 command. Output may change in any future Mercurial release.
79 command. Output may change in any future Mercurial release.
80
80
81 Consumers wanting stable command output should specify a template via
81 Consumers wanting stable command output should specify a template via
82 ``-T/--template``.
82 ``-T/--template``.
83
83
84 List of available views:
84 List of available views:
85 """
85 """
86 if ui.plain() and not template:
86 if ui.plain() and not template:
87 hint = _('invoke with -T/--template to control output format')
87 hint = _('invoke with -T/--template to control output format')
88 raise error.Abort(_('must specify a template in plain mode'), hint=hint)
88 raise error.Abort(_('must specify a template in plain mode'), hint=hint)
89
89
90 views = showview._table
90 views = showview._table
91
91
92 if not view:
92 if not view:
93 ui.pager('show')
93 ui.pager('show')
94 # TODO consider using formatter here so available views can be
94 # TODO consider using formatter here so available views can be
95 # rendered to custom format.
95 # rendered to custom format.
96 ui.write(_('available views:\n'))
96 ui.write(_('available views:\n'))
97 ui.write('\n')
97 ui.write('\n')
98
98
99 for name, func in sorted(views.items()):
99 for name, func in sorted(views.items()):
100 ui.write(('%s\n') % func.__doc__)
100 ui.write(('%s\n') % func.__doc__)
101
101
102 ui.write('\n')
102 ui.write('\n')
103 raise error.Abort(_('no view requested'),
103 raise error.Abort(_('no view requested'),
104 hint=_('use "hg show VIEW" to choose a view'))
104 hint=_('use "hg show VIEW" to choose a view'))
105
105
106 # TODO use same logic as dispatch to perform prefix matching.
106 # TODO use same logic as dispatch to perform prefix matching.
107 if view not in views:
107 if view not in views:
108 raise error.Abort(_('unknown view: %s') % view,
108 raise error.Abort(_('unknown view: %s') % view,
109 hint=_('run "hg show" to see available views'))
109 hint=_('run "hg show" to see available views'))
110
110
111 template = template or 'show'
111 template = template or 'show'
112 fmtopic = 'show%s' % views[view]._fmtopic
112 fmtopic = 'show%s' % views[view]._fmtopic
113
113
114 ui.pager('show')
114 ui.pager('show')
115 with ui.formatter(fmtopic, {'template': template}) as fm:
115 with ui.formatter(fmtopic, {'template': template}) as fm:
116 return views[view](ui, repo, fm)
116 return views[view](ui, repo, fm)
117
117
118 @showview('bookmarks', fmtopic='bookmarks')
118 @showview('bookmarks', fmtopic='bookmarks')
119 def showbookmarks(ui, repo, fm):
119 def showbookmarks(ui, repo, fm):
120 """bookmarks and their associated changeset"""
120 """bookmarks and their associated changeset"""
121 marks = repo._bookmarks
121 marks = repo._bookmarks
122 if not len(marks):
122 if not len(marks):
123 # This is a bit hacky. Ideally, templates would have a way to
123 # This is a bit hacky. Ideally, templates would have a way to
124 # specify an empty output, but we shouldn't corrupt JSON while
124 # specify an empty output, but we shouldn't corrupt JSON while
125 # waiting for this functionality.
125 # waiting for this functionality.
126 if not isinstance(fm, formatter.jsonformatter):
126 if not isinstance(fm, formatter.jsonformatter):
127 ui.write(_('(no bookmarks set)\n'))
127 ui.write(_('(no bookmarks set)\n'))
128 return
128 return
129
129
130 active = repo._activebookmark
130 active = repo._activebookmark
131 longestname = max(len(b) for b in marks)
131 longestname = max(len(b) for b in marks)
132 # TODO consider exposing longest shortest(node).
132 # TODO consider exposing longest shortest(node).
133
133
134 for bm, node in sorted(marks.items()):
134 for bm, node in sorted(marks.items()):
135 fm.startitem()
135 fm.startitem()
136 fm.context(ctx=repo[node])
136 fm.context(ctx=repo[node])
137 fm.write('bookmark', '%s', bm)
137 fm.write('bookmark', '%s', bm)
138 fm.write('node', fm.hexfunc(node), fm.hexfunc(node))
138 fm.write('node', fm.hexfunc(node), fm.hexfunc(node))
139 fm.data(active=bm == active,
139 fm.data(active=bm == active,
140 longestbookmarklen=longestname)
140 longestbookmarklen=longestname)
141
141
142 @revsetpredicate('_underway([commitage[, headage]])')
142 @revsetpredicate('_underway([commitage[, headage]])')
143 def underwayrevset(repo, subset, x):
143 def underwayrevset(repo, subset, x):
144 args = revset.getargsdict(x, 'underway', 'commitage headage')
144 args = revset.getargsdict(x, 'underway', 'commitage headage')
145 if 'commitage' not in args:
145 if 'commitage' not in args:
146 args['commitage'] = None
146 args['commitage'] = None
147 if 'headage' not in args:
147 if 'headage' not in args:
148 args['headage'] = None
148 args['headage'] = None
149
149
150 # We assume callers of this revset add a topographical sort on the
150 # We assume callers of this revset add a topographical sort on the
151 # result. This means there is no benefit to making the revset lazy
151 # result. This means there is no benefit to making the revset lazy
152 # since the topographical sort needs to consume all revs.
152 # since the topographical sort needs to consume all revs.
153 #
153 #
154 # With this in mind, we build up the set manually instead of constructing
154 # With this in mind, we build up the set manually instead of constructing
155 # a complex revset. This enables faster execution.
155 # a complex revset. This enables faster execution.
156
156
157 # Mutable changesets (non-public) are the most important changesets
157 # Mutable changesets (non-public) are the most important changesets
158 # to return. ``not public()`` will also pull in obsolete changesets if
158 # to return. ``not public()`` will also pull in obsolete changesets if
159 # there is a non-obsolete changeset with obsolete ancestors. This is
159 # there is a non-obsolete changeset with obsolete ancestors. This is
160 # why we exclude obsolete changesets from this query.
160 # why we exclude obsolete changesets from this query.
161 rs = 'not public() and not obsolete()'
161 rs = 'not public() and not obsolete()'
162 rsargs = []
162 rsargs = []
163 if args['commitage']:
163 if args['commitage']:
164 rs += ' and date(%s)'
164 rs += ' and date(%s)'
165 rsargs.append(revsetlang.getstring(args['commitage'],
165 rsargs.append(revsetlang.getstring(args['commitage'],
166 _('commitage requires a string')))
166 _('commitage requires a string')))
167
167
168 mutable = repo.revs(rs, *rsargs)
168 mutable = repo.revs(rs, *rsargs)
169 relevant = revset.baseset(mutable)
169 relevant = revset.baseset(mutable)
170
170
171 # Add parents of mutable changesets to provide context.
171 # Add parents of mutable changesets to provide context.
172 relevant += repo.revs('parents(%ld)', mutable)
172 relevant += repo.revs('parents(%ld)', mutable)
173
173
174 # We also pull in (public) heads if they a) aren't closing a branch
174 # We also pull in (public) heads if they a) aren't closing a branch
175 # b) are recent.
175 # b) are recent.
176 rs = 'head() and not closed()'
176 rs = 'head() and not closed()'
177 rsargs = []
177 rsargs = []
178 if args['headage']:
178 if args['headage']:
179 rs += ' and date(%s)'
179 rs += ' and date(%s)'
180 rsargs.append(revsetlang.getstring(args['headage'],
180 rsargs.append(revsetlang.getstring(args['headage'],
181 _('headage requires a string')))
181 _('headage requires a string')))
182
182
183 relevant += repo.revs(rs, *rsargs)
183 relevant += repo.revs(rs, *rsargs)
184
184
185 # Add working directory parent.
185 # Add working directory parent.
186 wdirrev = repo['.'].rev()
186 wdirrev = repo['.'].rev()
187 if wdirrev != nullrev:
187 if wdirrev != nullrev:
188 relevant += revset.baseset({wdirrev})
188 relevant += revset.baseset({wdirrev})
189
189
190 return subset & relevant
190 return subset & relevant
191
191
192 @showview('work', fmtopic='work')
192 @showview('work', fmtopic='work')
193 def showwork(ui, repo, fm):
193 def showwork(ui, repo, fm):
194 """changesets that aren't finished"""
194 """changesets that aren't finished"""
195 # TODO support date-based limiting when calling revset.
195 # TODO support date-based limiting when calling revset.
196 revs = repo.revs('sort(_underway(), topo)')
196 revs = repo.revs('sort(_underway(), topo)')
197
197
198 revdag = graphmod.dagwalker(repo, revs)
198 revdag = graphmod.dagwalker(repo, revs)
199 displayer = cmdutil.changeset_templater(ui, repo, None, None,
199 tmpl = fm._t.load(fm._topic)
200 tmpl=fm._t.load(fm._topic),
200 displayer = cmdutil.makelogtemplater(ui, repo, tmpl, buffered=True)
201 mapfile=None, buffered=True)
202
201
203 ui.setconfig('experimental', 'graphshorten', True)
202 ui.setconfig('experimental', 'graphshorten', True)
204 cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges)
203 cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges)
205
204
206 # Adjust the docstring of the show command so it shows all registered views.
205 # Adjust the docstring of the show command so it shows all registered views.
207 # This is a bit hacky because it runs at the end of module load. When moved
206 # This is a bit hacky because it runs at the end of module load. When moved
208 # into core or when another extension wants to provide a view, we'll need
207 # into core or when another extension wants to provide a view, we'll need
209 # to do this more robustly.
208 # to do this more robustly.
210 # TODO make this more robust.
209 # TODO make this more robust.
211 def _updatedocstring():
210 def _updatedocstring():
212 longest = max(map(len, showview._table.keys()))
211 longest = max(map(len, showview._table.keys()))
213 entries = []
212 entries = []
214 for key in sorted(showview._table.keys()):
213 for key in sorted(showview._table.keys()):
215 entries.append(pycompat.sysstr(' %s %s' % (
214 entries.append(pycompat.sysstr(' %s %s' % (
216 key.ljust(longest), showview._table[key]._origdoc)))
215 key.ljust(longest), showview._table[key]._origdoc)))
217
216
218 cmdtable['show'][0].__doc__ = pycompat.sysstr('%s\n\n%s\n ') % (
217 cmdtable['show'][0].__doc__ = pycompat.sysstr('%s\n\n%s\n ') % (
219 cmdtable['show'][0].__doc__.rstrip(),
218 cmdtable['show'][0].__doc__.rstrip(),
220 pycompat.sysstr('\n\n').join(entries))
219 pycompat.sysstr('\n\n').join(entries))
221
220
222 _updatedocstring()
221 _updatedocstring()
@@ -1,3582 +1,3587 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import itertools
11 import itertools
12 import os
12 import os
13 import re
13 import re
14 import tempfile
14 import tempfile
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 hex,
18 hex,
19 nullid,
19 nullid,
20 nullrev,
20 nullrev,
21 short,
21 short,
22 )
22 )
23
23
24 from . import (
24 from . import (
25 bookmarks,
25 bookmarks,
26 changelog,
26 changelog,
27 copies,
27 copies,
28 crecord as crecordmod,
28 crecord as crecordmod,
29 encoding,
29 encoding,
30 error,
30 error,
31 formatter,
31 formatter,
32 graphmod,
32 graphmod,
33 lock as lockmod,
33 lock as lockmod,
34 match as matchmod,
34 match as matchmod,
35 obsolete,
35 obsolete,
36 patch,
36 patch,
37 pathutil,
37 pathutil,
38 phases,
38 phases,
39 pycompat,
39 pycompat,
40 registrar,
40 registrar,
41 repair,
41 repair,
42 revlog,
42 revlog,
43 revset,
43 revset,
44 scmutil,
44 scmutil,
45 smartset,
45 smartset,
46 templatekw,
46 templatekw,
47 templater,
47 templater,
48 util,
48 util,
49 vfs as vfsmod,
49 vfs as vfsmod,
50 )
50 )
51 stringio = util.stringio
51 stringio = util.stringio
52
52
53 # templates of common command options
53 # templates of common command options
54
54
55 dryrunopts = [
55 dryrunopts = [
56 ('n', 'dry-run', None,
56 ('n', 'dry-run', None,
57 _('do not perform actions, just print output')),
57 _('do not perform actions, just print output')),
58 ]
58 ]
59
59
60 remoteopts = [
60 remoteopts = [
61 ('e', 'ssh', '',
61 ('e', 'ssh', '',
62 _('specify ssh command to use'), _('CMD')),
62 _('specify ssh command to use'), _('CMD')),
63 ('', 'remotecmd', '',
63 ('', 'remotecmd', '',
64 _('specify hg command to run on the remote side'), _('CMD')),
64 _('specify hg command to run on the remote side'), _('CMD')),
65 ('', 'insecure', None,
65 ('', 'insecure', None,
66 _('do not verify server certificate (ignoring web.cacerts config)')),
66 _('do not verify server certificate (ignoring web.cacerts config)')),
67 ]
67 ]
68
68
69 walkopts = [
69 walkopts = [
70 ('I', 'include', [],
70 ('I', 'include', [],
71 _('include names matching the given patterns'), _('PATTERN')),
71 _('include names matching the given patterns'), _('PATTERN')),
72 ('X', 'exclude', [],
72 ('X', 'exclude', [],
73 _('exclude names matching the given patterns'), _('PATTERN')),
73 _('exclude names matching the given patterns'), _('PATTERN')),
74 ]
74 ]
75
75
76 commitopts = [
76 commitopts = [
77 ('m', 'message', '',
77 ('m', 'message', '',
78 _('use text as commit message'), _('TEXT')),
78 _('use text as commit message'), _('TEXT')),
79 ('l', 'logfile', '',
79 ('l', 'logfile', '',
80 _('read commit message from file'), _('FILE')),
80 _('read commit message from file'), _('FILE')),
81 ]
81 ]
82
82
83 commitopts2 = [
83 commitopts2 = [
84 ('d', 'date', '',
84 ('d', 'date', '',
85 _('record the specified date as commit date'), _('DATE')),
85 _('record the specified date as commit date'), _('DATE')),
86 ('u', 'user', '',
86 ('u', 'user', '',
87 _('record the specified user as committer'), _('USER')),
87 _('record the specified user as committer'), _('USER')),
88 ]
88 ]
89
89
90 # hidden for now
90 # hidden for now
91 formatteropts = [
91 formatteropts = [
92 ('T', 'template', '',
92 ('T', 'template', '',
93 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
93 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
94 ]
94 ]
95
95
96 templateopts = [
96 templateopts = [
97 ('', 'style', '',
97 ('', 'style', '',
98 _('display using template map file (DEPRECATED)'), _('STYLE')),
98 _('display using template map file (DEPRECATED)'), _('STYLE')),
99 ('T', 'template', '',
99 ('T', 'template', '',
100 _('display with template'), _('TEMPLATE')),
100 _('display with template'), _('TEMPLATE')),
101 ]
101 ]
102
102
103 logopts = [
103 logopts = [
104 ('p', 'patch', None, _('show patch')),
104 ('p', 'patch', None, _('show patch')),
105 ('g', 'git', None, _('use git extended diff format')),
105 ('g', 'git', None, _('use git extended diff format')),
106 ('l', 'limit', '',
106 ('l', 'limit', '',
107 _('limit number of changes displayed'), _('NUM')),
107 _('limit number of changes displayed'), _('NUM')),
108 ('M', 'no-merges', None, _('do not show merges')),
108 ('M', 'no-merges', None, _('do not show merges')),
109 ('', 'stat', None, _('output diffstat-style summary of changes')),
109 ('', 'stat', None, _('output diffstat-style summary of changes')),
110 ('G', 'graph', None, _("show the revision DAG")),
110 ('G', 'graph', None, _("show the revision DAG")),
111 ] + templateopts
111 ] + templateopts
112
112
113 diffopts = [
113 diffopts = [
114 ('a', 'text', None, _('treat all files as text')),
114 ('a', 'text', None, _('treat all files as text')),
115 ('g', 'git', None, _('use git extended diff format')),
115 ('g', 'git', None, _('use git extended diff format')),
116 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
116 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
117 ('', 'nodates', None, _('omit dates from diff headers'))
117 ('', 'nodates', None, _('omit dates from diff headers'))
118 ]
118 ]
119
119
120 diffwsopts = [
120 diffwsopts = [
121 ('w', 'ignore-all-space', None,
121 ('w', 'ignore-all-space', None,
122 _('ignore white space when comparing lines')),
122 _('ignore white space when comparing lines')),
123 ('b', 'ignore-space-change', None,
123 ('b', 'ignore-space-change', None,
124 _('ignore changes in the amount of white space')),
124 _('ignore changes in the amount of white space')),
125 ('B', 'ignore-blank-lines', None,
125 ('B', 'ignore-blank-lines', None,
126 _('ignore changes whose lines are all blank')),
126 _('ignore changes whose lines are all blank')),
127 ]
127 ]
128
128
129 diffopts2 = [
129 diffopts2 = [
130 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
130 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
131 ('p', 'show-function', None, _('show which function each change is in')),
131 ('p', 'show-function', None, _('show which function each change is in')),
132 ('', 'reverse', None, _('produce a diff that undoes the changes')),
132 ('', 'reverse', None, _('produce a diff that undoes the changes')),
133 ] + diffwsopts + [
133 ] + diffwsopts + [
134 ('U', 'unified', '',
134 ('U', 'unified', '',
135 _('number of lines of context to show'), _('NUM')),
135 _('number of lines of context to show'), _('NUM')),
136 ('', 'stat', None, _('output diffstat-style summary of changes')),
136 ('', 'stat', None, _('output diffstat-style summary of changes')),
137 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
137 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
138 ]
138 ]
139
139
140 mergetoolopts = [
140 mergetoolopts = [
141 ('t', 'tool', '', _('specify merge tool')),
141 ('t', 'tool', '', _('specify merge tool')),
142 ]
142 ]
143
143
144 similarityopts = [
144 similarityopts = [
145 ('s', 'similarity', '',
145 ('s', 'similarity', '',
146 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
146 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
147 ]
147 ]
148
148
149 subrepoopts = [
149 subrepoopts = [
150 ('S', 'subrepos', None,
150 ('S', 'subrepos', None,
151 _('recurse into subrepositories'))
151 _('recurse into subrepositories'))
152 ]
152 ]
153
153
154 debugrevlogopts = [
154 debugrevlogopts = [
155 ('c', 'changelog', False, _('open changelog')),
155 ('c', 'changelog', False, _('open changelog')),
156 ('m', 'manifest', False, _('open manifest')),
156 ('m', 'manifest', False, _('open manifest')),
157 ('', 'dir', '', _('open directory manifest')),
157 ('', 'dir', '', _('open directory manifest')),
158 ]
158 ]
159
159
160 # special string such that everything below this line will be ingored in the
160 # special string such that everything below this line will be ingored in the
161 # editor text
161 # editor text
162 _linebelow = "^HG: ------------------------ >8 ------------------------$"
162 _linebelow = "^HG: ------------------------ >8 ------------------------$"
163
163
164 def ishunk(x):
164 def ishunk(x):
165 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
165 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
166 return isinstance(x, hunkclasses)
166 return isinstance(x, hunkclasses)
167
167
168 def newandmodified(chunks, originalchunks):
168 def newandmodified(chunks, originalchunks):
169 newlyaddedandmodifiedfiles = set()
169 newlyaddedandmodifiedfiles = set()
170 for chunk in chunks:
170 for chunk in chunks:
171 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
171 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
172 originalchunks:
172 originalchunks:
173 newlyaddedandmodifiedfiles.add(chunk.header.filename())
173 newlyaddedandmodifiedfiles.add(chunk.header.filename())
174 return newlyaddedandmodifiedfiles
174 return newlyaddedandmodifiedfiles
175
175
176 def parsealiases(cmd):
176 def parsealiases(cmd):
177 return cmd.lstrip("^").split("|")
177 return cmd.lstrip("^").split("|")
178
178
179 def setupwrapcolorwrite(ui):
179 def setupwrapcolorwrite(ui):
180 # wrap ui.write so diff output can be labeled/colorized
180 # wrap ui.write so diff output can be labeled/colorized
181 def wrapwrite(orig, *args, **kw):
181 def wrapwrite(orig, *args, **kw):
182 label = kw.pop('label', '')
182 label = kw.pop('label', '')
183 for chunk, l in patch.difflabel(lambda: args):
183 for chunk, l in patch.difflabel(lambda: args):
184 orig(chunk, label=label + l)
184 orig(chunk, label=label + l)
185
185
186 oldwrite = ui.write
186 oldwrite = ui.write
187 def wrap(*args, **kwargs):
187 def wrap(*args, **kwargs):
188 return wrapwrite(oldwrite, *args, **kwargs)
188 return wrapwrite(oldwrite, *args, **kwargs)
189 setattr(ui, 'write', wrap)
189 setattr(ui, 'write', wrap)
190 return oldwrite
190 return oldwrite
191
191
192 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
192 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
193 if usecurses:
193 if usecurses:
194 if testfile:
194 if testfile:
195 recordfn = crecordmod.testdecorator(testfile,
195 recordfn = crecordmod.testdecorator(testfile,
196 crecordmod.testchunkselector)
196 crecordmod.testchunkselector)
197 else:
197 else:
198 recordfn = crecordmod.chunkselector
198 recordfn = crecordmod.chunkselector
199
199
200 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
200 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
201
201
202 else:
202 else:
203 return patch.filterpatch(ui, originalhunks, operation)
203 return patch.filterpatch(ui, originalhunks, operation)
204
204
205 def recordfilter(ui, originalhunks, operation=None):
205 def recordfilter(ui, originalhunks, operation=None):
206 """ Prompts the user to filter the originalhunks and return a list of
206 """ Prompts the user to filter the originalhunks and return a list of
207 selected hunks.
207 selected hunks.
208 *operation* is used for to build ui messages to indicate the user what
208 *operation* is used for to build ui messages to indicate the user what
209 kind of filtering they are doing: reverting, committing, shelving, etc.
209 kind of filtering they are doing: reverting, committing, shelving, etc.
210 (see patch.filterpatch).
210 (see patch.filterpatch).
211 """
211 """
212 usecurses = crecordmod.checkcurses(ui)
212 usecurses = crecordmod.checkcurses(ui)
213 testfile = ui.config('experimental', 'crecordtest', None)
213 testfile = ui.config('experimental', 'crecordtest', None)
214 oldwrite = setupwrapcolorwrite(ui)
214 oldwrite = setupwrapcolorwrite(ui)
215 try:
215 try:
216 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
216 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
217 testfile, operation)
217 testfile, operation)
218 finally:
218 finally:
219 ui.write = oldwrite
219 ui.write = oldwrite
220 return newchunks, newopts
220 return newchunks, newopts
221
221
222 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
222 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
223 filterfn, *pats, **opts):
223 filterfn, *pats, **opts):
224 from . import merge as mergemod
224 from . import merge as mergemod
225 opts = pycompat.byteskwargs(opts)
225 opts = pycompat.byteskwargs(opts)
226 if not ui.interactive():
226 if not ui.interactive():
227 if cmdsuggest:
227 if cmdsuggest:
228 msg = _('running non-interactively, use %s instead') % cmdsuggest
228 msg = _('running non-interactively, use %s instead') % cmdsuggest
229 else:
229 else:
230 msg = _('running non-interactively')
230 msg = _('running non-interactively')
231 raise error.Abort(msg)
231 raise error.Abort(msg)
232
232
233 # make sure username is set before going interactive
233 # make sure username is set before going interactive
234 if not opts.get('user'):
234 if not opts.get('user'):
235 ui.username() # raise exception, username not provided
235 ui.username() # raise exception, username not provided
236
236
237 def recordfunc(ui, repo, message, match, opts):
237 def recordfunc(ui, repo, message, match, opts):
238 """This is generic record driver.
238 """This is generic record driver.
239
239
240 Its job is to interactively filter local changes, and
240 Its job is to interactively filter local changes, and
241 accordingly prepare working directory into a state in which the
241 accordingly prepare working directory into a state in which the
242 job can be delegated to a non-interactive commit command such as
242 job can be delegated to a non-interactive commit command such as
243 'commit' or 'qrefresh'.
243 'commit' or 'qrefresh'.
244
244
245 After the actual job is done by non-interactive command, the
245 After the actual job is done by non-interactive command, the
246 working directory is restored to its original state.
246 working directory is restored to its original state.
247
247
248 In the end we'll record interesting changes, and everything else
248 In the end we'll record interesting changes, and everything else
249 will be left in place, so the user can continue working.
249 will be left in place, so the user can continue working.
250 """
250 """
251
251
252 checkunfinished(repo, commit=True)
252 checkunfinished(repo, commit=True)
253 wctx = repo[None]
253 wctx = repo[None]
254 merge = len(wctx.parents()) > 1
254 merge = len(wctx.parents()) > 1
255 if merge:
255 if merge:
256 raise error.Abort(_('cannot partially commit a merge '
256 raise error.Abort(_('cannot partially commit a merge '
257 '(use "hg commit" instead)'))
257 '(use "hg commit" instead)'))
258
258
259 def fail(f, msg):
259 def fail(f, msg):
260 raise error.Abort('%s: %s' % (f, msg))
260 raise error.Abort('%s: %s' % (f, msg))
261
261
262 force = opts.get('force')
262 force = opts.get('force')
263 if not force:
263 if not force:
264 vdirs = []
264 vdirs = []
265 match.explicitdir = vdirs.append
265 match.explicitdir = vdirs.append
266 match.bad = fail
266 match.bad = fail
267
267
268 status = repo.status(match=match)
268 status = repo.status(match=match)
269 if not force:
269 if not force:
270 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
270 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
271 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
271 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
272 diffopts.nodates = True
272 diffopts.nodates = True
273 diffopts.git = True
273 diffopts.git = True
274 diffopts.showfunc = True
274 diffopts.showfunc = True
275 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
275 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
276 originalchunks = patch.parsepatch(originaldiff)
276 originalchunks = patch.parsepatch(originaldiff)
277
277
278 # 1. filter patch, since we are intending to apply subset of it
278 # 1. filter patch, since we are intending to apply subset of it
279 try:
279 try:
280 chunks, newopts = filterfn(ui, originalchunks)
280 chunks, newopts = filterfn(ui, originalchunks)
281 except patch.PatchError as err:
281 except patch.PatchError as err:
282 raise error.Abort(_('error parsing patch: %s') % err)
282 raise error.Abort(_('error parsing patch: %s') % err)
283 opts.update(newopts)
283 opts.update(newopts)
284
284
285 # We need to keep a backup of files that have been newly added and
285 # We need to keep a backup of files that have been newly added and
286 # modified during the recording process because there is a previous
286 # modified during the recording process because there is a previous
287 # version without the edit in the workdir
287 # version without the edit in the workdir
288 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
288 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
289 contenders = set()
289 contenders = set()
290 for h in chunks:
290 for h in chunks:
291 try:
291 try:
292 contenders.update(set(h.files()))
292 contenders.update(set(h.files()))
293 except AttributeError:
293 except AttributeError:
294 pass
294 pass
295
295
296 changed = status.modified + status.added + status.removed
296 changed = status.modified + status.added + status.removed
297 newfiles = [f for f in changed if f in contenders]
297 newfiles = [f for f in changed if f in contenders]
298 if not newfiles:
298 if not newfiles:
299 ui.status(_('no changes to record\n'))
299 ui.status(_('no changes to record\n'))
300 return 0
300 return 0
301
301
302 modified = set(status.modified)
302 modified = set(status.modified)
303
303
304 # 2. backup changed files, so we can restore them in the end
304 # 2. backup changed files, so we can restore them in the end
305
305
306 if backupall:
306 if backupall:
307 tobackup = changed
307 tobackup = changed
308 else:
308 else:
309 tobackup = [f for f in newfiles if f in modified or f in \
309 tobackup = [f for f in newfiles if f in modified or f in \
310 newlyaddedandmodifiedfiles]
310 newlyaddedandmodifiedfiles]
311 backups = {}
311 backups = {}
312 if tobackup:
312 if tobackup:
313 backupdir = repo.vfs.join('record-backups')
313 backupdir = repo.vfs.join('record-backups')
314 try:
314 try:
315 os.mkdir(backupdir)
315 os.mkdir(backupdir)
316 except OSError as err:
316 except OSError as err:
317 if err.errno != errno.EEXIST:
317 if err.errno != errno.EEXIST:
318 raise
318 raise
319 try:
319 try:
320 # backup continues
320 # backup continues
321 for f in tobackup:
321 for f in tobackup:
322 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
322 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
323 dir=backupdir)
323 dir=backupdir)
324 os.close(fd)
324 os.close(fd)
325 ui.debug('backup %r as %r\n' % (f, tmpname))
325 ui.debug('backup %r as %r\n' % (f, tmpname))
326 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
326 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
327 backups[f] = tmpname
327 backups[f] = tmpname
328
328
329 fp = stringio()
329 fp = stringio()
330 for c in chunks:
330 for c in chunks:
331 fname = c.filename()
331 fname = c.filename()
332 if fname in backups:
332 if fname in backups:
333 c.write(fp)
333 c.write(fp)
334 dopatch = fp.tell()
334 dopatch = fp.tell()
335 fp.seek(0)
335 fp.seek(0)
336
336
337 # 2.5 optionally review / modify patch in text editor
337 # 2.5 optionally review / modify patch in text editor
338 if opts.get('review', False):
338 if opts.get('review', False):
339 patchtext = (crecordmod.diffhelptext
339 patchtext = (crecordmod.diffhelptext
340 + crecordmod.patchhelptext
340 + crecordmod.patchhelptext
341 + fp.read())
341 + fp.read())
342 reviewedpatch = ui.edit(patchtext, "",
342 reviewedpatch = ui.edit(patchtext, "",
343 extra={"suffix": ".diff"},
343 extra={"suffix": ".diff"},
344 repopath=repo.path)
344 repopath=repo.path)
345 fp.truncate(0)
345 fp.truncate(0)
346 fp.write(reviewedpatch)
346 fp.write(reviewedpatch)
347 fp.seek(0)
347 fp.seek(0)
348
348
349 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
349 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
350 # 3a. apply filtered patch to clean repo (clean)
350 # 3a. apply filtered patch to clean repo (clean)
351 if backups:
351 if backups:
352 # Equivalent to hg.revert
352 # Equivalent to hg.revert
353 m = scmutil.matchfiles(repo, backups.keys())
353 m = scmutil.matchfiles(repo, backups.keys())
354 mergemod.update(repo, repo.dirstate.p1(),
354 mergemod.update(repo, repo.dirstate.p1(),
355 False, True, matcher=m)
355 False, True, matcher=m)
356
356
357 # 3b. (apply)
357 # 3b. (apply)
358 if dopatch:
358 if dopatch:
359 try:
359 try:
360 ui.debug('applying patch\n')
360 ui.debug('applying patch\n')
361 ui.debug(fp.getvalue())
361 ui.debug(fp.getvalue())
362 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
362 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
363 except patch.PatchError as err:
363 except patch.PatchError as err:
364 raise error.Abort(str(err))
364 raise error.Abort(str(err))
365 del fp
365 del fp
366
366
367 # 4. We prepared working directory according to filtered
367 # 4. We prepared working directory according to filtered
368 # patch. Now is the time to delegate the job to
368 # patch. Now is the time to delegate the job to
369 # commit/qrefresh or the like!
369 # commit/qrefresh or the like!
370
370
371 # Make all of the pathnames absolute.
371 # Make all of the pathnames absolute.
372 newfiles = [repo.wjoin(nf) for nf in newfiles]
372 newfiles = [repo.wjoin(nf) for nf in newfiles]
373 return commitfunc(ui, repo, *newfiles, **opts)
373 return commitfunc(ui, repo, *newfiles, **opts)
374 finally:
374 finally:
375 # 5. finally restore backed-up files
375 # 5. finally restore backed-up files
376 try:
376 try:
377 dirstate = repo.dirstate
377 dirstate = repo.dirstate
378 for realname, tmpname in backups.iteritems():
378 for realname, tmpname in backups.iteritems():
379 ui.debug('restoring %r to %r\n' % (tmpname, realname))
379 ui.debug('restoring %r to %r\n' % (tmpname, realname))
380
380
381 if dirstate[realname] == 'n':
381 if dirstate[realname] == 'n':
382 # without normallookup, restoring timestamp
382 # without normallookup, restoring timestamp
383 # may cause partially committed files
383 # may cause partially committed files
384 # to be treated as unmodified
384 # to be treated as unmodified
385 dirstate.normallookup(realname)
385 dirstate.normallookup(realname)
386
386
387 # copystat=True here and above are a hack to trick any
387 # copystat=True here and above are a hack to trick any
388 # editors that have f open that we haven't modified them.
388 # editors that have f open that we haven't modified them.
389 #
389 #
390 # Also note that this racy as an editor could notice the
390 # Also note that this racy as an editor could notice the
391 # file's mtime before we've finished writing it.
391 # file's mtime before we've finished writing it.
392 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
392 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
393 os.unlink(tmpname)
393 os.unlink(tmpname)
394 if tobackup:
394 if tobackup:
395 os.rmdir(backupdir)
395 os.rmdir(backupdir)
396 except OSError:
396 except OSError:
397 pass
397 pass
398
398
399 def recordinwlock(ui, repo, message, match, opts):
399 def recordinwlock(ui, repo, message, match, opts):
400 with repo.wlock():
400 with repo.wlock():
401 return recordfunc(ui, repo, message, match, opts)
401 return recordfunc(ui, repo, message, match, opts)
402
402
403 return commit(ui, repo, recordinwlock, pats, opts)
403 return commit(ui, repo, recordinwlock, pats, opts)
404
404
405 def findpossible(cmd, table, strict=False):
405 def findpossible(cmd, table, strict=False):
406 """
406 """
407 Return cmd -> (aliases, command table entry)
407 Return cmd -> (aliases, command table entry)
408 for each matching command.
408 for each matching command.
409 Return debug commands (or their aliases) only if no normal command matches.
409 Return debug commands (or their aliases) only if no normal command matches.
410 """
410 """
411 choice = {}
411 choice = {}
412 debugchoice = {}
412 debugchoice = {}
413
413
414 if cmd in table:
414 if cmd in table:
415 # short-circuit exact matches, "log" alias beats "^log|history"
415 # short-circuit exact matches, "log" alias beats "^log|history"
416 keys = [cmd]
416 keys = [cmd]
417 else:
417 else:
418 keys = table.keys()
418 keys = table.keys()
419
419
420 allcmds = []
420 allcmds = []
421 for e in keys:
421 for e in keys:
422 aliases = parsealiases(e)
422 aliases = parsealiases(e)
423 allcmds.extend(aliases)
423 allcmds.extend(aliases)
424 found = None
424 found = None
425 if cmd in aliases:
425 if cmd in aliases:
426 found = cmd
426 found = cmd
427 elif not strict:
427 elif not strict:
428 for a in aliases:
428 for a in aliases:
429 if a.startswith(cmd):
429 if a.startswith(cmd):
430 found = a
430 found = a
431 break
431 break
432 if found is not None:
432 if found is not None:
433 if aliases[0].startswith("debug") or found.startswith("debug"):
433 if aliases[0].startswith("debug") or found.startswith("debug"):
434 debugchoice[found] = (aliases, table[e])
434 debugchoice[found] = (aliases, table[e])
435 else:
435 else:
436 choice[found] = (aliases, table[e])
436 choice[found] = (aliases, table[e])
437
437
438 if not choice and debugchoice:
438 if not choice and debugchoice:
439 choice = debugchoice
439 choice = debugchoice
440
440
441 return choice, allcmds
441 return choice, allcmds
442
442
443 def findcmd(cmd, table, strict=True):
443 def findcmd(cmd, table, strict=True):
444 """Return (aliases, command table entry) for command string."""
444 """Return (aliases, command table entry) for command string."""
445 choice, allcmds = findpossible(cmd, table, strict)
445 choice, allcmds = findpossible(cmd, table, strict)
446
446
447 if cmd in choice:
447 if cmd in choice:
448 return choice[cmd]
448 return choice[cmd]
449
449
450 if len(choice) > 1:
450 if len(choice) > 1:
451 clist = sorted(choice)
451 clist = sorted(choice)
452 raise error.AmbiguousCommand(cmd, clist)
452 raise error.AmbiguousCommand(cmd, clist)
453
453
454 if choice:
454 if choice:
455 return choice.values()[0]
455 return choice.values()[0]
456
456
457 raise error.UnknownCommand(cmd, allcmds)
457 raise error.UnknownCommand(cmd, allcmds)
458
458
459 def findrepo(p):
459 def findrepo(p):
460 while not os.path.isdir(os.path.join(p, ".hg")):
460 while not os.path.isdir(os.path.join(p, ".hg")):
461 oldp, p = p, os.path.dirname(p)
461 oldp, p = p, os.path.dirname(p)
462 if p == oldp:
462 if p == oldp:
463 return None
463 return None
464
464
465 return p
465 return p
466
466
467 def bailifchanged(repo, merge=True, hint=None):
467 def bailifchanged(repo, merge=True, hint=None):
468 """ enforce the precondition that working directory must be clean.
468 """ enforce the precondition that working directory must be clean.
469
469
470 'merge' can be set to false if a pending uncommitted merge should be
470 'merge' can be set to false if a pending uncommitted merge should be
471 ignored (such as when 'update --check' runs).
471 ignored (such as when 'update --check' runs).
472
472
473 'hint' is the usual hint given to Abort exception.
473 'hint' is the usual hint given to Abort exception.
474 """
474 """
475
475
476 if merge and repo.dirstate.p2() != nullid:
476 if merge and repo.dirstate.p2() != nullid:
477 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
477 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
478 modified, added, removed, deleted = repo.status()[:4]
478 modified, added, removed, deleted = repo.status()[:4]
479 if modified or added or removed or deleted:
479 if modified or added or removed or deleted:
480 raise error.Abort(_('uncommitted changes'), hint=hint)
480 raise error.Abort(_('uncommitted changes'), hint=hint)
481 ctx = repo[None]
481 ctx = repo[None]
482 for s in sorted(ctx.substate):
482 for s in sorted(ctx.substate):
483 ctx.sub(s).bailifchanged(hint=hint)
483 ctx.sub(s).bailifchanged(hint=hint)
484
484
485 def logmessage(ui, opts):
485 def logmessage(ui, opts):
486 """ get the log message according to -m and -l option """
486 """ get the log message according to -m and -l option """
487 message = opts.get('message')
487 message = opts.get('message')
488 logfile = opts.get('logfile')
488 logfile = opts.get('logfile')
489
489
490 if message and logfile:
490 if message and logfile:
491 raise error.Abort(_('options --message and --logfile are mutually '
491 raise error.Abort(_('options --message and --logfile are mutually '
492 'exclusive'))
492 'exclusive'))
493 if not message and logfile:
493 if not message and logfile:
494 try:
494 try:
495 if isstdiofilename(logfile):
495 if isstdiofilename(logfile):
496 message = ui.fin.read()
496 message = ui.fin.read()
497 else:
497 else:
498 message = '\n'.join(util.readfile(logfile).splitlines())
498 message = '\n'.join(util.readfile(logfile).splitlines())
499 except IOError as inst:
499 except IOError as inst:
500 raise error.Abort(_("can't read commit message '%s': %s") %
500 raise error.Abort(_("can't read commit message '%s': %s") %
501 (logfile, inst.strerror))
501 (logfile, inst.strerror))
502 return message
502 return message
503
503
504 def mergeeditform(ctxorbool, baseformname):
504 def mergeeditform(ctxorbool, baseformname):
505 """return appropriate editform name (referencing a committemplate)
505 """return appropriate editform name (referencing a committemplate)
506
506
507 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
507 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
508 merging is committed.
508 merging is committed.
509
509
510 This returns baseformname with '.merge' appended if it is a merge,
510 This returns baseformname with '.merge' appended if it is a merge,
511 otherwise '.normal' is appended.
511 otherwise '.normal' is appended.
512 """
512 """
513 if isinstance(ctxorbool, bool):
513 if isinstance(ctxorbool, bool):
514 if ctxorbool:
514 if ctxorbool:
515 return baseformname + ".merge"
515 return baseformname + ".merge"
516 elif 1 < len(ctxorbool.parents()):
516 elif 1 < len(ctxorbool.parents()):
517 return baseformname + ".merge"
517 return baseformname + ".merge"
518
518
519 return baseformname + ".normal"
519 return baseformname + ".normal"
520
520
521 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
521 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
522 editform='', **opts):
522 editform='', **opts):
523 """get appropriate commit message editor according to '--edit' option
523 """get appropriate commit message editor according to '--edit' option
524
524
525 'finishdesc' is a function to be called with edited commit message
525 'finishdesc' is a function to be called with edited commit message
526 (= 'description' of the new changeset) just after editing, but
526 (= 'description' of the new changeset) just after editing, but
527 before checking empty-ness. It should return actual text to be
527 before checking empty-ness. It should return actual text to be
528 stored into history. This allows to change description before
528 stored into history. This allows to change description before
529 storing.
529 storing.
530
530
531 'extramsg' is a extra message to be shown in the editor instead of
531 'extramsg' is a extra message to be shown in the editor instead of
532 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
532 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
533 is automatically added.
533 is automatically added.
534
534
535 'editform' is a dot-separated list of names, to distinguish
535 'editform' is a dot-separated list of names, to distinguish
536 the purpose of commit text editing.
536 the purpose of commit text editing.
537
537
538 'getcommiteditor' returns 'commitforceeditor' regardless of
538 'getcommiteditor' returns 'commitforceeditor' regardless of
539 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
539 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
540 they are specific for usage in MQ.
540 they are specific for usage in MQ.
541 """
541 """
542 if edit or finishdesc or extramsg:
542 if edit or finishdesc or extramsg:
543 return lambda r, c, s: commitforceeditor(r, c, s,
543 return lambda r, c, s: commitforceeditor(r, c, s,
544 finishdesc=finishdesc,
544 finishdesc=finishdesc,
545 extramsg=extramsg,
545 extramsg=extramsg,
546 editform=editform)
546 editform=editform)
547 elif editform:
547 elif editform:
548 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
548 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
549 else:
549 else:
550 return commiteditor
550 return commiteditor
551
551
552 def loglimit(opts):
552 def loglimit(opts):
553 """get the log limit according to option -l/--limit"""
553 """get the log limit according to option -l/--limit"""
554 limit = opts.get('limit')
554 limit = opts.get('limit')
555 if limit:
555 if limit:
556 try:
556 try:
557 limit = int(limit)
557 limit = int(limit)
558 except ValueError:
558 except ValueError:
559 raise error.Abort(_('limit must be a positive integer'))
559 raise error.Abort(_('limit must be a positive integer'))
560 if limit <= 0:
560 if limit <= 0:
561 raise error.Abort(_('limit must be positive'))
561 raise error.Abort(_('limit must be positive'))
562 else:
562 else:
563 limit = None
563 limit = None
564 return limit
564 return limit
565
565
566 def makefilename(repo, pat, node, desc=None,
566 def makefilename(repo, pat, node, desc=None,
567 total=None, seqno=None, revwidth=None, pathname=None):
567 total=None, seqno=None, revwidth=None, pathname=None):
568 node_expander = {
568 node_expander = {
569 'H': lambda: hex(node),
569 'H': lambda: hex(node),
570 'R': lambda: str(repo.changelog.rev(node)),
570 'R': lambda: str(repo.changelog.rev(node)),
571 'h': lambda: short(node),
571 'h': lambda: short(node),
572 'm': lambda: re.sub('[^\w]', '_', str(desc))
572 'm': lambda: re.sub('[^\w]', '_', str(desc))
573 }
573 }
574 expander = {
574 expander = {
575 '%': lambda: '%',
575 '%': lambda: '%',
576 'b': lambda: os.path.basename(repo.root),
576 'b': lambda: os.path.basename(repo.root),
577 }
577 }
578
578
579 try:
579 try:
580 if node:
580 if node:
581 expander.update(node_expander)
581 expander.update(node_expander)
582 if node:
582 if node:
583 expander['r'] = (lambda:
583 expander['r'] = (lambda:
584 str(repo.changelog.rev(node)).zfill(revwidth or 0))
584 str(repo.changelog.rev(node)).zfill(revwidth or 0))
585 if total is not None:
585 if total is not None:
586 expander['N'] = lambda: str(total)
586 expander['N'] = lambda: str(total)
587 if seqno is not None:
587 if seqno is not None:
588 expander['n'] = lambda: str(seqno)
588 expander['n'] = lambda: str(seqno)
589 if total is not None and seqno is not None:
589 if total is not None and seqno is not None:
590 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
590 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
591 if pathname is not None:
591 if pathname is not None:
592 expander['s'] = lambda: os.path.basename(pathname)
592 expander['s'] = lambda: os.path.basename(pathname)
593 expander['d'] = lambda: os.path.dirname(pathname) or '.'
593 expander['d'] = lambda: os.path.dirname(pathname) or '.'
594 expander['p'] = lambda: pathname
594 expander['p'] = lambda: pathname
595
595
596 newname = []
596 newname = []
597 patlen = len(pat)
597 patlen = len(pat)
598 i = 0
598 i = 0
599 while i < patlen:
599 while i < patlen:
600 c = pat[i:i + 1]
600 c = pat[i:i + 1]
601 if c == '%':
601 if c == '%':
602 i += 1
602 i += 1
603 c = pat[i:i + 1]
603 c = pat[i:i + 1]
604 c = expander[c]()
604 c = expander[c]()
605 newname.append(c)
605 newname.append(c)
606 i += 1
606 i += 1
607 return ''.join(newname)
607 return ''.join(newname)
608 except KeyError as inst:
608 except KeyError as inst:
609 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
609 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
610 inst.args[0])
610 inst.args[0])
611
611
612 def isstdiofilename(pat):
612 def isstdiofilename(pat):
613 """True if the given pat looks like a filename denoting stdin/stdout"""
613 """True if the given pat looks like a filename denoting stdin/stdout"""
614 return not pat or pat == '-'
614 return not pat or pat == '-'
615
615
616 class _unclosablefile(object):
616 class _unclosablefile(object):
617 def __init__(self, fp):
617 def __init__(self, fp):
618 self._fp = fp
618 self._fp = fp
619
619
620 def close(self):
620 def close(self):
621 pass
621 pass
622
622
623 def __iter__(self):
623 def __iter__(self):
624 return iter(self._fp)
624 return iter(self._fp)
625
625
626 def __getattr__(self, attr):
626 def __getattr__(self, attr):
627 return getattr(self._fp, attr)
627 return getattr(self._fp, attr)
628
628
629 def __enter__(self):
629 def __enter__(self):
630 return self
630 return self
631
631
632 def __exit__(self, exc_type, exc_value, exc_tb):
632 def __exit__(self, exc_type, exc_value, exc_tb):
633 pass
633 pass
634
634
635 def makefileobj(repo, pat, node=None, desc=None, total=None,
635 def makefileobj(repo, pat, node=None, desc=None, total=None,
636 seqno=None, revwidth=None, mode='wb', modemap=None,
636 seqno=None, revwidth=None, mode='wb', modemap=None,
637 pathname=None):
637 pathname=None):
638
638
639 writable = mode not in ('r', 'rb')
639 writable = mode not in ('r', 'rb')
640
640
641 if isstdiofilename(pat):
641 if isstdiofilename(pat):
642 if writable:
642 if writable:
643 fp = repo.ui.fout
643 fp = repo.ui.fout
644 else:
644 else:
645 fp = repo.ui.fin
645 fp = repo.ui.fin
646 return _unclosablefile(fp)
646 return _unclosablefile(fp)
647 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
647 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
648 if modemap is not None:
648 if modemap is not None:
649 mode = modemap.get(fn, mode)
649 mode = modemap.get(fn, mode)
650 if mode == 'wb':
650 if mode == 'wb':
651 modemap[fn] = 'ab'
651 modemap[fn] = 'ab'
652 return open(fn, mode)
652 return open(fn, mode)
653
653
654 def openrevlog(repo, cmd, file_, opts):
654 def openrevlog(repo, cmd, file_, opts):
655 """opens the changelog, manifest, a filelog or a given revlog"""
655 """opens the changelog, manifest, a filelog or a given revlog"""
656 cl = opts['changelog']
656 cl = opts['changelog']
657 mf = opts['manifest']
657 mf = opts['manifest']
658 dir = opts['dir']
658 dir = opts['dir']
659 msg = None
659 msg = None
660 if cl and mf:
660 if cl and mf:
661 msg = _('cannot specify --changelog and --manifest at the same time')
661 msg = _('cannot specify --changelog and --manifest at the same time')
662 elif cl and dir:
662 elif cl and dir:
663 msg = _('cannot specify --changelog and --dir at the same time')
663 msg = _('cannot specify --changelog and --dir at the same time')
664 elif cl or mf or dir:
664 elif cl or mf or dir:
665 if file_:
665 if file_:
666 msg = _('cannot specify filename with --changelog or --manifest')
666 msg = _('cannot specify filename with --changelog or --manifest')
667 elif not repo:
667 elif not repo:
668 msg = _('cannot specify --changelog or --manifest or --dir '
668 msg = _('cannot specify --changelog or --manifest or --dir '
669 'without a repository')
669 'without a repository')
670 if msg:
670 if msg:
671 raise error.Abort(msg)
671 raise error.Abort(msg)
672
672
673 r = None
673 r = None
674 if repo:
674 if repo:
675 if cl:
675 if cl:
676 r = repo.unfiltered().changelog
676 r = repo.unfiltered().changelog
677 elif dir:
677 elif dir:
678 if 'treemanifest' not in repo.requirements:
678 if 'treemanifest' not in repo.requirements:
679 raise error.Abort(_("--dir can only be used on repos with "
679 raise error.Abort(_("--dir can only be used on repos with "
680 "treemanifest enabled"))
680 "treemanifest enabled"))
681 dirlog = repo.manifestlog._revlog.dirlog(dir)
681 dirlog = repo.manifestlog._revlog.dirlog(dir)
682 if len(dirlog):
682 if len(dirlog):
683 r = dirlog
683 r = dirlog
684 elif mf:
684 elif mf:
685 r = repo.manifestlog._revlog
685 r = repo.manifestlog._revlog
686 elif file_:
686 elif file_:
687 filelog = repo.file(file_)
687 filelog = repo.file(file_)
688 if len(filelog):
688 if len(filelog):
689 r = filelog
689 r = filelog
690 if not r:
690 if not r:
691 if not file_:
691 if not file_:
692 raise error.CommandError(cmd, _('invalid arguments'))
692 raise error.CommandError(cmd, _('invalid arguments'))
693 if not os.path.isfile(file_):
693 if not os.path.isfile(file_):
694 raise error.Abort(_("revlog '%s' not found") % file_)
694 raise error.Abort(_("revlog '%s' not found") % file_)
695 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
695 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
696 file_[:-2] + ".i")
696 file_[:-2] + ".i")
697 return r
697 return r
698
698
699 def copy(ui, repo, pats, opts, rename=False):
699 def copy(ui, repo, pats, opts, rename=False):
700 # called with the repo lock held
700 # called with the repo lock held
701 #
701 #
702 # hgsep => pathname that uses "/" to separate directories
702 # hgsep => pathname that uses "/" to separate directories
703 # ossep => pathname that uses os.sep to separate directories
703 # ossep => pathname that uses os.sep to separate directories
704 cwd = repo.getcwd()
704 cwd = repo.getcwd()
705 targets = {}
705 targets = {}
706 after = opts.get("after")
706 after = opts.get("after")
707 dryrun = opts.get("dry_run")
707 dryrun = opts.get("dry_run")
708 wctx = repo[None]
708 wctx = repo[None]
709
709
710 def walkpat(pat):
710 def walkpat(pat):
711 srcs = []
711 srcs = []
712 if after:
712 if after:
713 badstates = '?'
713 badstates = '?'
714 else:
714 else:
715 badstates = '?r'
715 badstates = '?r'
716 m = scmutil.match(wctx, [pat], opts, globbed=True)
716 m = scmutil.match(wctx, [pat], opts, globbed=True)
717 for abs in wctx.walk(m):
717 for abs in wctx.walk(m):
718 state = repo.dirstate[abs]
718 state = repo.dirstate[abs]
719 rel = m.rel(abs)
719 rel = m.rel(abs)
720 exact = m.exact(abs)
720 exact = m.exact(abs)
721 if state in badstates:
721 if state in badstates:
722 if exact and state == '?':
722 if exact and state == '?':
723 ui.warn(_('%s: not copying - file is not managed\n') % rel)
723 ui.warn(_('%s: not copying - file is not managed\n') % rel)
724 if exact and state == 'r':
724 if exact and state == 'r':
725 ui.warn(_('%s: not copying - file has been marked for'
725 ui.warn(_('%s: not copying - file has been marked for'
726 ' remove\n') % rel)
726 ' remove\n') % rel)
727 continue
727 continue
728 # abs: hgsep
728 # abs: hgsep
729 # rel: ossep
729 # rel: ossep
730 srcs.append((abs, rel, exact))
730 srcs.append((abs, rel, exact))
731 return srcs
731 return srcs
732
732
733 # abssrc: hgsep
733 # abssrc: hgsep
734 # relsrc: ossep
734 # relsrc: ossep
735 # otarget: ossep
735 # otarget: ossep
736 def copyfile(abssrc, relsrc, otarget, exact):
736 def copyfile(abssrc, relsrc, otarget, exact):
737 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
737 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
738 if '/' in abstarget:
738 if '/' in abstarget:
739 # We cannot normalize abstarget itself, this would prevent
739 # We cannot normalize abstarget itself, this would prevent
740 # case only renames, like a => A.
740 # case only renames, like a => A.
741 abspath, absname = abstarget.rsplit('/', 1)
741 abspath, absname = abstarget.rsplit('/', 1)
742 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
742 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
743 reltarget = repo.pathto(abstarget, cwd)
743 reltarget = repo.pathto(abstarget, cwd)
744 target = repo.wjoin(abstarget)
744 target = repo.wjoin(abstarget)
745 src = repo.wjoin(abssrc)
745 src = repo.wjoin(abssrc)
746 state = repo.dirstate[abstarget]
746 state = repo.dirstate[abstarget]
747
747
748 scmutil.checkportable(ui, abstarget)
748 scmutil.checkportable(ui, abstarget)
749
749
750 # check for collisions
750 # check for collisions
751 prevsrc = targets.get(abstarget)
751 prevsrc = targets.get(abstarget)
752 if prevsrc is not None:
752 if prevsrc is not None:
753 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
753 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
754 (reltarget, repo.pathto(abssrc, cwd),
754 (reltarget, repo.pathto(abssrc, cwd),
755 repo.pathto(prevsrc, cwd)))
755 repo.pathto(prevsrc, cwd)))
756 return
756 return
757
757
758 # check for overwrites
758 # check for overwrites
759 exists = os.path.lexists(target)
759 exists = os.path.lexists(target)
760 samefile = False
760 samefile = False
761 if exists and abssrc != abstarget:
761 if exists and abssrc != abstarget:
762 if (repo.dirstate.normalize(abssrc) ==
762 if (repo.dirstate.normalize(abssrc) ==
763 repo.dirstate.normalize(abstarget)):
763 repo.dirstate.normalize(abstarget)):
764 if not rename:
764 if not rename:
765 ui.warn(_("%s: can't copy - same file\n") % reltarget)
765 ui.warn(_("%s: can't copy - same file\n") % reltarget)
766 return
766 return
767 exists = False
767 exists = False
768 samefile = True
768 samefile = True
769
769
770 if not after and exists or after and state in 'mn':
770 if not after and exists or after and state in 'mn':
771 if not opts['force']:
771 if not opts['force']:
772 if state in 'mn':
772 if state in 'mn':
773 msg = _('%s: not overwriting - file already committed\n')
773 msg = _('%s: not overwriting - file already committed\n')
774 if after:
774 if after:
775 flags = '--after --force'
775 flags = '--after --force'
776 else:
776 else:
777 flags = '--force'
777 flags = '--force'
778 if rename:
778 if rename:
779 hint = _('(hg rename %s to replace the file by '
779 hint = _('(hg rename %s to replace the file by '
780 'recording a rename)\n') % flags
780 'recording a rename)\n') % flags
781 else:
781 else:
782 hint = _('(hg copy %s to replace the file by '
782 hint = _('(hg copy %s to replace the file by '
783 'recording a copy)\n') % flags
783 'recording a copy)\n') % flags
784 else:
784 else:
785 msg = _('%s: not overwriting - file exists\n')
785 msg = _('%s: not overwriting - file exists\n')
786 if rename:
786 if rename:
787 hint = _('(hg rename --after to record the rename)\n')
787 hint = _('(hg rename --after to record the rename)\n')
788 else:
788 else:
789 hint = _('(hg copy --after to record the copy)\n')
789 hint = _('(hg copy --after to record the copy)\n')
790 ui.warn(msg % reltarget)
790 ui.warn(msg % reltarget)
791 ui.warn(hint)
791 ui.warn(hint)
792 return
792 return
793
793
794 if after:
794 if after:
795 if not exists:
795 if not exists:
796 if rename:
796 if rename:
797 ui.warn(_('%s: not recording move - %s does not exist\n') %
797 ui.warn(_('%s: not recording move - %s does not exist\n') %
798 (relsrc, reltarget))
798 (relsrc, reltarget))
799 else:
799 else:
800 ui.warn(_('%s: not recording copy - %s does not exist\n') %
800 ui.warn(_('%s: not recording copy - %s does not exist\n') %
801 (relsrc, reltarget))
801 (relsrc, reltarget))
802 return
802 return
803 elif not dryrun:
803 elif not dryrun:
804 try:
804 try:
805 if exists:
805 if exists:
806 os.unlink(target)
806 os.unlink(target)
807 targetdir = os.path.dirname(target) or '.'
807 targetdir = os.path.dirname(target) or '.'
808 if not os.path.isdir(targetdir):
808 if not os.path.isdir(targetdir):
809 os.makedirs(targetdir)
809 os.makedirs(targetdir)
810 if samefile:
810 if samefile:
811 tmp = target + "~hgrename"
811 tmp = target + "~hgrename"
812 os.rename(src, tmp)
812 os.rename(src, tmp)
813 os.rename(tmp, target)
813 os.rename(tmp, target)
814 else:
814 else:
815 util.copyfile(src, target)
815 util.copyfile(src, target)
816 srcexists = True
816 srcexists = True
817 except IOError as inst:
817 except IOError as inst:
818 if inst.errno == errno.ENOENT:
818 if inst.errno == errno.ENOENT:
819 ui.warn(_('%s: deleted in working directory\n') % relsrc)
819 ui.warn(_('%s: deleted in working directory\n') % relsrc)
820 srcexists = False
820 srcexists = False
821 else:
821 else:
822 ui.warn(_('%s: cannot copy - %s\n') %
822 ui.warn(_('%s: cannot copy - %s\n') %
823 (relsrc, inst.strerror))
823 (relsrc, inst.strerror))
824 return True # report a failure
824 return True # report a failure
825
825
826 if ui.verbose or not exact:
826 if ui.verbose or not exact:
827 if rename:
827 if rename:
828 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
828 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
829 else:
829 else:
830 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
830 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
831
831
832 targets[abstarget] = abssrc
832 targets[abstarget] = abssrc
833
833
834 # fix up dirstate
834 # fix up dirstate
835 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
835 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
836 dryrun=dryrun, cwd=cwd)
836 dryrun=dryrun, cwd=cwd)
837 if rename and not dryrun:
837 if rename and not dryrun:
838 if not after and srcexists and not samefile:
838 if not after and srcexists and not samefile:
839 repo.wvfs.unlinkpath(abssrc)
839 repo.wvfs.unlinkpath(abssrc)
840 wctx.forget([abssrc])
840 wctx.forget([abssrc])
841
841
842 # pat: ossep
842 # pat: ossep
843 # dest ossep
843 # dest ossep
844 # srcs: list of (hgsep, hgsep, ossep, bool)
844 # srcs: list of (hgsep, hgsep, ossep, bool)
845 # return: function that takes hgsep and returns ossep
845 # return: function that takes hgsep and returns ossep
846 def targetpathfn(pat, dest, srcs):
846 def targetpathfn(pat, dest, srcs):
847 if os.path.isdir(pat):
847 if os.path.isdir(pat):
848 abspfx = pathutil.canonpath(repo.root, cwd, pat)
848 abspfx = pathutil.canonpath(repo.root, cwd, pat)
849 abspfx = util.localpath(abspfx)
849 abspfx = util.localpath(abspfx)
850 if destdirexists:
850 if destdirexists:
851 striplen = len(os.path.split(abspfx)[0])
851 striplen = len(os.path.split(abspfx)[0])
852 else:
852 else:
853 striplen = len(abspfx)
853 striplen = len(abspfx)
854 if striplen:
854 if striplen:
855 striplen += len(pycompat.ossep)
855 striplen += len(pycompat.ossep)
856 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
856 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
857 elif destdirexists:
857 elif destdirexists:
858 res = lambda p: os.path.join(dest,
858 res = lambda p: os.path.join(dest,
859 os.path.basename(util.localpath(p)))
859 os.path.basename(util.localpath(p)))
860 else:
860 else:
861 res = lambda p: dest
861 res = lambda p: dest
862 return res
862 return res
863
863
864 # pat: ossep
864 # pat: ossep
865 # dest ossep
865 # dest ossep
866 # srcs: list of (hgsep, hgsep, ossep, bool)
866 # srcs: list of (hgsep, hgsep, ossep, bool)
867 # return: function that takes hgsep and returns ossep
867 # return: function that takes hgsep and returns ossep
868 def targetpathafterfn(pat, dest, srcs):
868 def targetpathafterfn(pat, dest, srcs):
869 if matchmod.patkind(pat):
869 if matchmod.patkind(pat):
870 # a mercurial pattern
870 # a mercurial pattern
871 res = lambda p: os.path.join(dest,
871 res = lambda p: os.path.join(dest,
872 os.path.basename(util.localpath(p)))
872 os.path.basename(util.localpath(p)))
873 else:
873 else:
874 abspfx = pathutil.canonpath(repo.root, cwd, pat)
874 abspfx = pathutil.canonpath(repo.root, cwd, pat)
875 if len(abspfx) < len(srcs[0][0]):
875 if len(abspfx) < len(srcs[0][0]):
876 # A directory. Either the target path contains the last
876 # A directory. Either the target path contains the last
877 # component of the source path or it does not.
877 # component of the source path or it does not.
878 def evalpath(striplen):
878 def evalpath(striplen):
879 score = 0
879 score = 0
880 for s in srcs:
880 for s in srcs:
881 t = os.path.join(dest, util.localpath(s[0])[striplen:])
881 t = os.path.join(dest, util.localpath(s[0])[striplen:])
882 if os.path.lexists(t):
882 if os.path.lexists(t):
883 score += 1
883 score += 1
884 return score
884 return score
885
885
886 abspfx = util.localpath(abspfx)
886 abspfx = util.localpath(abspfx)
887 striplen = len(abspfx)
887 striplen = len(abspfx)
888 if striplen:
888 if striplen:
889 striplen += len(pycompat.ossep)
889 striplen += len(pycompat.ossep)
890 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
890 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
891 score = evalpath(striplen)
891 score = evalpath(striplen)
892 striplen1 = len(os.path.split(abspfx)[0])
892 striplen1 = len(os.path.split(abspfx)[0])
893 if striplen1:
893 if striplen1:
894 striplen1 += len(pycompat.ossep)
894 striplen1 += len(pycompat.ossep)
895 if evalpath(striplen1) > score:
895 if evalpath(striplen1) > score:
896 striplen = striplen1
896 striplen = striplen1
897 res = lambda p: os.path.join(dest,
897 res = lambda p: os.path.join(dest,
898 util.localpath(p)[striplen:])
898 util.localpath(p)[striplen:])
899 else:
899 else:
900 # a file
900 # a file
901 if destdirexists:
901 if destdirexists:
902 res = lambda p: os.path.join(dest,
902 res = lambda p: os.path.join(dest,
903 os.path.basename(util.localpath(p)))
903 os.path.basename(util.localpath(p)))
904 else:
904 else:
905 res = lambda p: dest
905 res = lambda p: dest
906 return res
906 return res
907
907
908 pats = scmutil.expandpats(pats)
908 pats = scmutil.expandpats(pats)
909 if not pats:
909 if not pats:
910 raise error.Abort(_('no source or destination specified'))
910 raise error.Abort(_('no source or destination specified'))
911 if len(pats) == 1:
911 if len(pats) == 1:
912 raise error.Abort(_('no destination specified'))
912 raise error.Abort(_('no destination specified'))
913 dest = pats.pop()
913 dest = pats.pop()
914 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
914 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
915 if not destdirexists:
915 if not destdirexists:
916 if len(pats) > 1 or matchmod.patkind(pats[0]):
916 if len(pats) > 1 or matchmod.patkind(pats[0]):
917 raise error.Abort(_('with multiple sources, destination must be an '
917 raise error.Abort(_('with multiple sources, destination must be an '
918 'existing directory'))
918 'existing directory'))
919 if util.endswithsep(dest):
919 if util.endswithsep(dest):
920 raise error.Abort(_('destination %s is not a directory') % dest)
920 raise error.Abort(_('destination %s is not a directory') % dest)
921
921
922 tfn = targetpathfn
922 tfn = targetpathfn
923 if after:
923 if after:
924 tfn = targetpathafterfn
924 tfn = targetpathafterfn
925 copylist = []
925 copylist = []
926 for pat in pats:
926 for pat in pats:
927 srcs = walkpat(pat)
927 srcs = walkpat(pat)
928 if not srcs:
928 if not srcs:
929 continue
929 continue
930 copylist.append((tfn(pat, dest, srcs), srcs))
930 copylist.append((tfn(pat, dest, srcs), srcs))
931 if not copylist:
931 if not copylist:
932 raise error.Abort(_('no files to copy'))
932 raise error.Abort(_('no files to copy'))
933
933
934 errors = 0
934 errors = 0
935 for targetpath, srcs in copylist:
935 for targetpath, srcs in copylist:
936 for abssrc, relsrc, exact in srcs:
936 for abssrc, relsrc, exact in srcs:
937 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
937 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
938 errors += 1
938 errors += 1
939
939
940 if errors:
940 if errors:
941 ui.warn(_('(consider using --after)\n'))
941 ui.warn(_('(consider using --after)\n'))
942
942
943 return errors != 0
943 return errors != 0
944
944
945 ## facility to let extension process additional data into an import patch
945 ## facility to let extension process additional data into an import patch
946 # list of identifier to be executed in order
946 # list of identifier to be executed in order
947 extrapreimport = [] # run before commit
947 extrapreimport = [] # run before commit
948 extrapostimport = [] # run after commit
948 extrapostimport = [] # run after commit
949 # mapping from identifier to actual import function
949 # mapping from identifier to actual import function
950 #
950 #
951 # 'preimport' are run before the commit is made and are provided the following
951 # 'preimport' are run before the commit is made and are provided the following
952 # arguments:
952 # arguments:
953 # - repo: the localrepository instance,
953 # - repo: the localrepository instance,
954 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
954 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
955 # - extra: the future extra dictionary of the changeset, please mutate it,
955 # - extra: the future extra dictionary of the changeset, please mutate it,
956 # - opts: the import options.
956 # - opts: the import options.
957 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
957 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
958 # mutation of in memory commit and more. Feel free to rework the code to get
958 # mutation of in memory commit and more. Feel free to rework the code to get
959 # there.
959 # there.
960 extrapreimportmap = {}
960 extrapreimportmap = {}
961 # 'postimport' are run after the commit is made and are provided the following
961 # 'postimport' are run after the commit is made and are provided the following
962 # argument:
962 # argument:
963 # - ctx: the changectx created by import.
963 # - ctx: the changectx created by import.
964 extrapostimportmap = {}
964 extrapostimportmap = {}
965
965
966 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
966 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
967 """Utility function used by commands.import to import a single patch
967 """Utility function used by commands.import to import a single patch
968
968
969 This function is explicitly defined here to help the evolve extension to
969 This function is explicitly defined here to help the evolve extension to
970 wrap this part of the import logic.
970 wrap this part of the import logic.
971
971
972 The API is currently a bit ugly because it a simple code translation from
972 The API is currently a bit ugly because it a simple code translation from
973 the import command. Feel free to make it better.
973 the import command. Feel free to make it better.
974
974
975 :hunk: a patch (as a binary string)
975 :hunk: a patch (as a binary string)
976 :parents: nodes that will be parent of the created commit
976 :parents: nodes that will be parent of the created commit
977 :opts: the full dict of option passed to the import command
977 :opts: the full dict of option passed to the import command
978 :msgs: list to save commit message to.
978 :msgs: list to save commit message to.
979 (used in case we need to save it when failing)
979 (used in case we need to save it when failing)
980 :updatefunc: a function that update a repo to a given node
980 :updatefunc: a function that update a repo to a given node
981 updatefunc(<repo>, <node>)
981 updatefunc(<repo>, <node>)
982 """
982 """
983 # avoid cycle context -> subrepo -> cmdutil
983 # avoid cycle context -> subrepo -> cmdutil
984 from . import context
984 from . import context
985 extractdata = patch.extract(ui, hunk)
985 extractdata = patch.extract(ui, hunk)
986 tmpname = extractdata.get('filename')
986 tmpname = extractdata.get('filename')
987 message = extractdata.get('message')
987 message = extractdata.get('message')
988 user = opts.get('user') or extractdata.get('user')
988 user = opts.get('user') or extractdata.get('user')
989 date = opts.get('date') or extractdata.get('date')
989 date = opts.get('date') or extractdata.get('date')
990 branch = extractdata.get('branch')
990 branch = extractdata.get('branch')
991 nodeid = extractdata.get('nodeid')
991 nodeid = extractdata.get('nodeid')
992 p1 = extractdata.get('p1')
992 p1 = extractdata.get('p1')
993 p2 = extractdata.get('p2')
993 p2 = extractdata.get('p2')
994
994
995 nocommit = opts.get('no_commit')
995 nocommit = opts.get('no_commit')
996 importbranch = opts.get('import_branch')
996 importbranch = opts.get('import_branch')
997 update = not opts.get('bypass')
997 update = not opts.get('bypass')
998 strip = opts["strip"]
998 strip = opts["strip"]
999 prefix = opts["prefix"]
999 prefix = opts["prefix"]
1000 sim = float(opts.get('similarity') or 0)
1000 sim = float(opts.get('similarity') or 0)
1001 if not tmpname:
1001 if not tmpname:
1002 return (None, None, False)
1002 return (None, None, False)
1003
1003
1004 rejects = False
1004 rejects = False
1005
1005
1006 try:
1006 try:
1007 cmdline_message = logmessage(ui, opts)
1007 cmdline_message = logmessage(ui, opts)
1008 if cmdline_message:
1008 if cmdline_message:
1009 # pickup the cmdline msg
1009 # pickup the cmdline msg
1010 message = cmdline_message
1010 message = cmdline_message
1011 elif message:
1011 elif message:
1012 # pickup the patch msg
1012 # pickup the patch msg
1013 message = message.strip()
1013 message = message.strip()
1014 else:
1014 else:
1015 # launch the editor
1015 # launch the editor
1016 message = None
1016 message = None
1017 ui.debug('message:\n%s\n' % message)
1017 ui.debug('message:\n%s\n' % message)
1018
1018
1019 if len(parents) == 1:
1019 if len(parents) == 1:
1020 parents.append(repo[nullid])
1020 parents.append(repo[nullid])
1021 if opts.get('exact'):
1021 if opts.get('exact'):
1022 if not nodeid or not p1:
1022 if not nodeid or not p1:
1023 raise error.Abort(_('not a Mercurial patch'))
1023 raise error.Abort(_('not a Mercurial patch'))
1024 p1 = repo[p1]
1024 p1 = repo[p1]
1025 p2 = repo[p2 or nullid]
1025 p2 = repo[p2 or nullid]
1026 elif p2:
1026 elif p2:
1027 try:
1027 try:
1028 p1 = repo[p1]
1028 p1 = repo[p1]
1029 p2 = repo[p2]
1029 p2 = repo[p2]
1030 # Without any options, consider p2 only if the
1030 # Without any options, consider p2 only if the
1031 # patch is being applied on top of the recorded
1031 # patch is being applied on top of the recorded
1032 # first parent.
1032 # first parent.
1033 if p1 != parents[0]:
1033 if p1 != parents[0]:
1034 p1 = parents[0]
1034 p1 = parents[0]
1035 p2 = repo[nullid]
1035 p2 = repo[nullid]
1036 except error.RepoError:
1036 except error.RepoError:
1037 p1, p2 = parents
1037 p1, p2 = parents
1038 if p2.node() == nullid:
1038 if p2.node() == nullid:
1039 ui.warn(_("warning: import the patch as a normal revision\n"
1039 ui.warn(_("warning: import the patch as a normal revision\n"
1040 "(use --exact to import the patch as a merge)\n"))
1040 "(use --exact to import the patch as a merge)\n"))
1041 else:
1041 else:
1042 p1, p2 = parents
1042 p1, p2 = parents
1043
1043
1044 n = None
1044 n = None
1045 if update:
1045 if update:
1046 if p1 != parents[0]:
1046 if p1 != parents[0]:
1047 updatefunc(repo, p1.node())
1047 updatefunc(repo, p1.node())
1048 if p2 != parents[1]:
1048 if p2 != parents[1]:
1049 repo.setparents(p1.node(), p2.node())
1049 repo.setparents(p1.node(), p2.node())
1050
1050
1051 if opts.get('exact') or importbranch:
1051 if opts.get('exact') or importbranch:
1052 repo.dirstate.setbranch(branch or 'default')
1052 repo.dirstate.setbranch(branch or 'default')
1053
1053
1054 partial = opts.get('partial', False)
1054 partial = opts.get('partial', False)
1055 files = set()
1055 files = set()
1056 try:
1056 try:
1057 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1057 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1058 files=files, eolmode=None, similarity=sim / 100.0)
1058 files=files, eolmode=None, similarity=sim / 100.0)
1059 except patch.PatchError as e:
1059 except patch.PatchError as e:
1060 if not partial:
1060 if not partial:
1061 raise error.Abort(str(e))
1061 raise error.Abort(str(e))
1062 if partial:
1062 if partial:
1063 rejects = True
1063 rejects = True
1064
1064
1065 files = list(files)
1065 files = list(files)
1066 if nocommit:
1066 if nocommit:
1067 if message:
1067 if message:
1068 msgs.append(message)
1068 msgs.append(message)
1069 else:
1069 else:
1070 if opts.get('exact') or p2:
1070 if opts.get('exact') or p2:
1071 # If you got here, you either use --force and know what
1071 # If you got here, you either use --force and know what
1072 # you are doing or used --exact or a merge patch while
1072 # you are doing or used --exact or a merge patch while
1073 # being updated to its first parent.
1073 # being updated to its first parent.
1074 m = None
1074 m = None
1075 else:
1075 else:
1076 m = scmutil.matchfiles(repo, files or [])
1076 m = scmutil.matchfiles(repo, files or [])
1077 editform = mergeeditform(repo[None], 'import.normal')
1077 editform = mergeeditform(repo[None], 'import.normal')
1078 if opts.get('exact'):
1078 if opts.get('exact'):
1079 editor = None
1079 editor = None
1080 else:
1080 else:
1081 editor = getcommiteditor(editform=editform, **opts)
1081 editor = getcommiteditor(editform=editform, **opts)
1082 extra = {}
1082 extra = {}
1083 for idfunc in extrapreimport:
1083 for idfunc in extrapreimport:
1084 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1084 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1085 overrides = {}
1085 overrides = {}
1086 if partial:
1086 if partial:
1087 overrides[('ui', 'allowemptycommit')] = True
1087 overrides[('ui', 'allowemptycommit')] = True
1088 with repo.ui.configoverride(overrides, 'import'):
1088 with repo.ui.configoverride(overrides, 'import'):
1089 n = repo.commit(message, user,
1089 n = repo.commit(message, user,
1090 date, match=m,
1090 date, match=m,
1091 editor=editor, extra=extra)
1091 editor=editor, extra=extra)
1092 for idfunc in extrapostimport:
1092 for idfunc in extrapostimport:
1093 extrapostimportmap[idfunc](repo[n])
1093 extrapostimportmap[idfunc](repo[n])
1094 else:
1094 else:
1095 if opts.get('exact') or importbranch:
1095 if opts.get('exact') or importbranch:
1096 branch = branch or 'default'
1096 branch = branch or 'default'
1097 else:
1097 else:
1098 branch = p1.branch()
1098 branch = p1.branch()
1099 store = patch.filestore()
1099 store = patch.filestore()
1100 try:
1100 try:
1101 files = set()
1101 files = set()
1102 try:
1102 try:
1103 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1103 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1104 files, eolmode=None)
1104 files, eolmode=None)
1105 except patch.PatchError as e:
1105 except patch.PatchError as e:
1106 raise error.Abort(str(e))
1106 raise error.Abort(str(e))
1107 if opts.get('exact'):
1107 if opts.get('exact'):
1108 editor = None
1108 editor = None
1109 else:
1109 else:
1110 editor = getcommiteditor(editform='import.bypass')
1110 editor = getcommiteditor(editform='import.bypass')
1111 memctx = context.memctx(repo, (p1.node(), p2.node()),
1111 memctx = context.memctx(repo, (p1.node(), p2.node()),
1112 message,
1112 message,
1113 files=files,
1113 files=files,
1114 filectxfn=store,
1114 filectxfn=store,
1115 user=user,
1115 user=user,
1116 date=date,
1116 date=date,
1117 branch=branch,
1117 branch=branch,
1118 editor=editor)
1118 editor=editor)
1119 n = memctx.commit()
1119 n = memctx.commit()
1120 finally:
1120 finally:
1121 store.close()
1121 store.close()
1122 if opts.get('exact') and nocommit:
1122 if opts.get('exact') and nocommit:
1123 # --exact with --no-commit is still useful in that it does merge
1123 # --exact with --no-commit is still useful in that it does merge
1124 # and branch bits
1124 # and branch bits
1125 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1125 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1126 elif opts.get('exact') and hex(n) != nodeid:
1126 elif opts.get('exact') and hex(n) != nodeid:
1127 raise error.Abort(_('patch is damaged or loses information'))
1127 raise error.Abort(_('patch is damaged or loses information'))
1128 msg = _('applied to working directory')
1128 msg = _('applied to working directory')
1129 if n:
1129 if n:
1130 # i18n: refers to a short changeset id
1130 # i18n: refers to a short changeset id
1131 msg = _('created %s') % short(n)
1131 msg = _('created %s') % short(n)
1132 return (msg, n, rejects)
1132 return (msg, n, rejects)
1133 finally:
1133 finally:
1134 os.unlink(tmpname)
1134 os.unlink(tmpname)
1135
1135
1136 # facility to let extensions include additional data in an exported patch
1136 # facility to let extensions include additional data in an exported patch
1137 # list of identifiers to be executed in order
1137 # list of identifiers to be executed in order
1138 extraexport = []
1138 extraexport = []
1139 # mapping from identifier to actual export function
1139 # mapping from identifier to actual export function
1140 # function as to return a string to be added to the header or None
1140 # function as to return a string to be added to the header or None
1141 # it is given two arguments (sequencenumber, changectx)
1141 # it is given two arguments (sequencenumber, changectx)
1142 extraexportmap = {}
1142 extraexportmap = {}
1143
1143
1144 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1144 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1145 node = scmutil.binnode(ctx)
1145 node = scmutil.binnode(ctx)
1146 parents = [p.node() for p in ctx.parents() if p]
1146 parents = [p.node() for p in ctx.parents() if p]
1147 branch = ctx.branch()
1147 branch = ctx.branch()
1148 if switch_parent:
1148 if switch_parent:
1149 parents.reverse()
1149 parents.reverse()
1150
1150
1151 if parents:
1151 if parents:
1152 prev = parents[0]
1152 prev = parents[0]
1153 else:
1153 else:
1154 prev = nullid
1154 prev = nullid
1155
1155
1156 write("# HG changeset patch\n")
1156 write("# HG changeset patch\n")
1157 write("# User %s\n" % ctx.user())
1157 write("# User %s\n" % ctx.user())
1158 write("# Date %d %d\n" % ctx.date())
1158 write("# Date %d %d\n" % ctx.date())
1159 write("# %s\n" % util.datestr(ctx.date()))
1159 write("# %s\n" % util.datestr(ctx.date()))
1160 if branch and branch != 'default':
1160 if branch and branch != 'default':
1161 write("# Branch %s\n" % branch)
1161 write("# Branch %s\n" % branch)
1162 write("# Node ID %s\n" % hex(node))
1162 write("# Node ID %s\n" % hex(node))
1163 write("# Parent %s\n" % hex(prev))
1163 write("# Parent %s\n" % hex(prev))
1164 if len(parents) > 1:
1164 if len(parents) > 1:
1165 write("# Parent %s\n" % hex(parents[1]))
1165 write("# Parent %s\n" % hex(parents[1]))
1166
1166
1167 for headerid in extraexport:
1167 for headerid in extraexport:
1168 header = extraexportmap[headerid](seqno, ctx)
1168 header = extraexportmap[headerid](seqno, ctx)
1169 if header is not None:
1169 if header is not None:
1170 write('# %s\n' % header)
1170 write('# %s\n' % header)
1171 write(ctx.description().rstrip())
1171 write(ctx.description().rstrip())
1172 write("\n\n")
1172 write("\n\n")
1173
1173
1174 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1174 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1175 write(chunk, label=label)
1175 write(chunk, label=label)
1176
1176
1177 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1177 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1178 opts=None, match=None):
1178 opts=None, match=None):
1179 '''export changesets as hg patches
1179 '''export changesets as hg patches
1180
1180
1181 Args:
1181 Args:
1182 repo: The repository from which we're exporting revisions.
1182 repo: The repository from which we're exporting revisions.
1183 revs: A list of revisions to export as revision numbers.
1183 revs: A list of revisions to export as revision numbers.
1184 fntemplate: An optional string to use for generating patch file names.
1184 fntemplate: An optional string to use for generating patch file names.
1185 fp: An optional file-like object to which patches should be written.
1185 fp: An optional file-like object to which patches should be written.
1186 switch_parent: If True, show diffs against second parent when not nullid.
1186 switch_parent: If True, show diffs against second parent when not nullid.
1187 Default is false, which always shows diff against p1.
1187 Default is false, which always shows diff against p1.
1188 opts: diff options to use for generating the patch.
1188 opts: diff options to use for generating the patch.
1189 match: If specified, only export changes to files matching this matcher.
1189 match: If specified, only export changes to files matching this matcher.
1190
1190
1191 Returns:
1191 Returns:
1192 Nothing.
1192 Nothing.
1193
1193
1194 Side Effect:
1194 Side Effect:
1195 "HG Changeset Patch" data is emitted to one of the following
1195 "HG Changeset Patch" data is emitted to one of the following
1196 destinations:
1196 destinations:
1197 fp is specified: All revs are written to the specified
1197 fp is specified: All revs are written to the specified
1198 file-like object.
1198 file-like object.
1199 fntemplate specified: Each rev is written to a unique file named using
1199 fntemplate specified: Each rev is written to a unique file named using
1200 the given template.
1200 the given template.
1201 Neither fp nor template specified: All revs written to repo.ui.write()
1201 Neither fp nor template specified: All revs written to repo.ui.write()
1202 '''
1202 '''
1203
1203
1204 total = len(revs)
1204 total = len(revs)
1205 revwidth = max(len(str(rev)) for rev in revs)
1205 revwidth = max(len(str(rev)) for rev in revs)
1206 filemode = {}
1206 filemode = {}
1207
1207
1208 write = None
1208 write = None
1209 dest = '<unnamed>'
1209 dest = '<unnamed>'
1210 if fp:
1210 if fp:
1211 dest = getattr(fp, 'name', dest)
1211 dest = getattr(fp, 'name', dest)
1212 def write(s, **kw):
1212 def write(s, **kw):
1213 fp.write(s)
1213 fp.write(s)
1214 elif not fntemplate:
1214 elif not fntemplate:
1215 write = repo.ui.write
1215 write = repo.ui.write
1216
1216
1217 for seqno, rev in enumerate(revs, 1):
1217 for seqno, rev in enumerate(revs, 1):
1218 ctx = repo[rev]
1218 ctx = repo[rev]
1219 fo = None
1219 fo = None
1220 if not fp and fntemplate:
1220 if not fp and fntemplate:
1221 desc_lines = ctx.description().rstrip().split('\n')
1221 desc_lines = ctx.description().rstrip().split('\n')
1222 desc = desc_lines[0] #Commit always has a first line.
1222 desc = desc_lines[0] #Commit always has a first line.
1223 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1223 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1224 total=total, seqno=seqno, revwidth=revwidth,
1224 total=total, seqno=seqno, revwidth=revwidth,
1225 mode='wb', modemap=filemode)
1225 mode='wb', modemap=filemode)
1226 dest = fo.name
1226 dest = fo.name
1227 def write(s, **kw):
1227 def write(s, **kw):
1228 fo.write(s)
1228 fo.write(s)
1229 if not dest.startswith('<'):
1229 if not dest.startswith('<'):
1230 repo.ui.note("%s\n" % dest)
1230 repo.ui.note("%s\n" % dest)
1231 _exportsingle(
1231 _exportsingle(
1232 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1232 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1233 if fo is not None:
1233 if fo is not None:
1234 fo.close()
1234 fo.close()
1235
1235
1236 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1236 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1237 changes=None, stat=False, fp=None, prefix='',
1237 changes=None, stat=False, fp=None, prefix='',
1238 root='', listsubrepos=False):
1238 root='', listsubrepos=False):
1239 '''show diff or diffstat.'''
1239 '''show diff or diffstat.'''
1240 if fp is None:
1240 if fp is None:
1241 write = ui.write
1241 write = ui.write
1242 else:
1242 else:
1243 def write(s, **kw):
1243 def write(s, **kw):
1244 fp.write(s)
1244 fp.write(s)
1245
1245
1246 if root:
1246 if root:
1247 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1247 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1248 else:
1248 else:
1249 relroot = ''
1249 relroot = ''
1250 if relroot != '':
1250 if relroot != '':
1251 # XXX relative roots currently don't work if the root is within a
1251 # XXX relative roots currently don't work if the root is within a
1252 # subrepo
1252 # subrepo
1253 uirelroot = match.uipath(relroot)
1253 uirelroot = match.uipath(relroot)
1254 relroot += '/'
1254 relroot += '/'
1255 for matchroot in match.files():
1255 for matchroot in match.files():
1256 if not matchroot.startswith(relroot):
1256 if not matchroot.startswith(relroot):
1257 ui.warn(_('warning: %s not inside relative root %s\n') % (
1257 ui.warn(_('warning: %s not inside relative root %s\n') % (
1258 match.uipath(matchroot), uirelroot))
1258 match.uipath(matchroot), uirelroot))
1259
1259
1260 if stat:
1260 if stat:
1261 diffopts = diffopts.copy(context=0)
1261 diffopts = diffopts.copy(context=0)
1262 width = 80
1262 width = 80
1263 if not ui.plain():
1263 if not ui.plain():
1264 width = ui.termwidth()
1264 width = ui.termwidth()
1265 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1265 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1266 prefix=prefix, relroot=relroot)
1266 prefix=prefix, relroot=relroot)
1267 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1267 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1268 width=width):
1268 width=width):
1269 write(chunk, label=label)
1269 write(chunk, label=label)
1270 else:
1270 else:
1271 for chunk, label in patch.diffui(repo, node1, node2, match,
1271 for chunk, label in patch.diffui(repo, node1, node2, match,
1272 changes, diffopts, prefix=prefix,
1272 changes, diffopts, prefix=prefix,
1273 relroot=relroot):
1273 relroot=relroot):
1274 write(chunk, label=label)
1274 write(chunk, label=label)
1275
1275
1276 if listsubrepos:
1276 if listsubrepos:
1277 ctx1 = repo[node1]
1277 ctx1 = repo[node1]
1278 ctx2 = repo[node2]
1278 ctx2 = repo[node2]
1279 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1279 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1280 tempnode2 = node2
1280 tempnode2 = node2
1281 try:
1281 try:
1282 if node2 is not None:
1282 if node2 is not None:
1283 tempnode2 = ctx2.substate[subpath][1]
1283 tempnode2 = ctx2.substate[subpath][1]
1284 except KeyError:
1284 except KeyError:
1285 # A subrepo that existed in node1 was deleted between node1 and
1285 # A subrepo that existed in node1 was deleted between node1 and
1286 # node2 (inclusive). Thus, ctx2's substate won't contain that
1286 # node2 (inclusive). Thus, ctx2's substate won't contain that
1287 # subpath. The best we can do is to ignore it.
1287 # subpath. The best we can do is to ignore it.
1288 tempnode2 = None
1288 tempnode2 = None
1289 submatch = matchmod.subdirmatcher(subpath, match)
1289 submatch = matchmod.subdirmatcher(subpath, match)
1290 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1290 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1291 stat=stat, fp=fp, prefix=prefix)
1291 stat=stat, fp=fp, prefix=prefix)
1292
1292
1293 def _changesetlabels(ctx):
1293 def _changesetlabels(ctx):
1294 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1294 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1295 if ctx.obsolete():
1295 if ctx.obsolete():
1296 labels.append('changeset.obsolete')
1296 labels.append('changeset.obsolete')
1297 if ctx.troubled():
1297 if ctx.troubled():
1298 labels.append('changeset.troubled')
1298 labels.append('changeset.troubled')
1299 for trouble in ctx.troubles():
1299 for trouble in ctx.troubles():
1300 labels.append('trouble.%s' % trouble)
1300 labels.append('trouble.%s' % trouble)
1301 return ' '.join(labels)
1301 return ' '.join(labels)
1302
1302
1303 class changeset_printer(object):
1303 class changeset_printer(object):
1304 '''show changeset information when templating not requested.'''
1304 '''show changeset information when templating not requested.'''
1305
1305
1306 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1306 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1307 self.ui = ui
1307 self.ui = ui
1308 self.repo = repo
1308 self.repo = repo
1309 self.buffered = buffered
1309 self.buffered = buffered
1310 self.matchfn = matchfn
1310 self.matchfn = matchfn
1311 self.diffopts = diffopts
1311 self.diffopts = diffopts
1312 self.header = {}
1312 self.header = {}
1313 self.hunk = {}
1313 self.hunk = {}
1314 self.lastheader = None
1314 self.lastheader = None
1315 self.footer = None
1315 self.footer = None
1316
1316
1317 def flush(self, ctx):
1317 def flush(self, ctx):
1318 rev = ctx.rev()
1318 rev = ctx.rev()
1319 if rev in self.header:
1319 if rev in self.header:
1320 h = self.header[rev]
1320 h = self.header[rev]
1321 if h != self.lastheader:
1321 if h != self.lastheader:
1322 self.lastheader = h
1322 self.lastheader = h
1323 self.ui.write(h)
1323 self.ui.write(h)
1324 del self.header[rev]
1324 del self.header[rev]
1325 if rev in self.hunk:
1325 if rev in self.hunk:
1326 self.ui.write(self.hunk[rev])
1326 self.ui.write(self.hunk[rev])
1327 del self.hunk[rev]
1327 del self.hunk[rev]
1328 return 1
1328 return 1
1329 return 0
1329 return 0
1330
1330
1331 def close(self):
1331 def close(self):
1332 if self.footer:
1332 if self.footer:
1333 self.ui.write(self.footer)
1333 self.ui.write(self.footer)
1334
1334
1335 def show(self, ctx, copies=None, matchfn=None, **props):
1335 def show(self, ctx, copies=None, matchfn=None, **props):
1336 if self.buffered:
1336 if self.buffered:
1337 self.ui.pushbuffer(labeled=True)
1337 self.ui.pushbuffer(labeled=True)
1338 self._show(ctx, copies, matchfn, props)
1338 self._show(ctx, copies, matchfn, props)
1339 self.hunk[ctx.rev()] = self.ui.popbuffer()
1339 self.hunk[ctx.rev()] = self.ui.popbuffer()
1340 else:
1340 else:
1341 self._show(ctx, copies, matchfn, props)
1341 self._show(ctx, copies, matchfn, props)
1342
1342
1343 def _show(self, ctx, copies, matchfn, props):
1343 def _show(self, ctx, copies, matchfn, props):
1344 '''show a single changeset or file revision'''
1344 '''show a single changeset or file revision'''
1345 changenode = ctx.node()
1345 changenode = ctx.node()
1346 rev = ctx.rev()
1346 rev = ctx.rev()
1347 if self.ui.debugflag:
1347 if self.ui.debugflag:
1348 hexfunc = hex
1348 hexfunc = hex
1349 else:
1349 else:
1350 hexfunc = short
1350 hexfunc = short
1351 # as of now, wctx.node() and wctx.rev() return None, but we want to
1351 # as of now, wctx.node() and wctx.rev() return None, but we want to
1352 # show the same values as {node} and {rev} templatekw
1352 # show the same values as {node} and {rev} templatekw
1353 revnode = (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
1353 revnode = (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
1354
1354
1355 if self.ui.quiet:
1355 if self.ui.quiet:
1356 self.ui.write("%d:%s\n" % revnode, label='log.node')
1356 self.ui.write("%d:%s\n" % revnode, label='log.node')
1357 return
1357 return
1358
1358
1359 date = util.datestr(ctx.date())
1359 date = util.datestr(ctx.date())
1360
1360
1361 # i18n: column positioning for "hg log"
1361 # i18n: column positioning for "hg log"
1362 self.ui.write(_("changeset: %d:%s\n") % revnode,
1362 self.ui.write(_("changeset: %d:%s\n") % revnode,
1363 label=_changesetlabels(ctx))
1363 label=_changesetlabels(ctx))
1364
1364
1365 # branches are shown first before any other names due to backwards
1365 # branches are shown first before any other names due to backwards
1366 # compatibility
1366 # compatibility
1367 branch = ctx.branch()
1367 branch = ctx.branch()
1368 # don't show the default branch name
1368 # don't show the default branch name
1369 if branch != 'default':
1369 if branch != 'default':
1370 # i18n: column positioning for "hg log"
1370 # i18n: column positioning for "hg log"
1371 self.ui.write(_("branch: %s\n") % branch,
1371 self.ui.write(_("branch: %s\n") % branch,
1372 label='log.branch')
1372 label='log.branch')
1373
1373
1374 for nsname, ns in self.repo.names.iteritems():
1374 for nsname, ns in self.repo.names.iteritems():
1375 # branches has special logic already handled above, so here we just
1375 # branches has special logic already handled above, so here we just
1376 # skip it
1376 # skip it
1377 if nsname == 'branches':
1377 if nsname == 'branches':
1378 continue
1378 continue
1379 # we will use the templatename as the color name since those two
1379 # we will use the templatename as the color name since those two
1380 # should be the same
1380 # should be the same
1381 for name in ns.names(self.repo, changenode):
1381 for name in ns.names(self.repo, changenode):
1382 self.ui.write(ns.logfmt % name,
1382 self.ui.write(ns.logfmt % name,
1383 label='log.%s' % ns.colorname)
1383 label='log.%s' % ns.colorname)
1384 if self.ui.debugflag:
1384 if self.ui.debugflag:
1385 # i18n: column positioning for "hg log"
1385 # i18n: column positioning for "hg log"
1386 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1386 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1387 label='log.phase')
1387 label='log.phase')
1388 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1388 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1389 label = 'log.parent changeset.%s' % pctx.phasestr()
1389 label = 'log.parent changeset.%s' % pctx.phasestr()
1390 # i18n: column positioning for "hg log"
1390 # i18n: column positioning for "hg log"
1391 self.ui.write(_("parent: %d:%s\n")
1391 self.ui.write(_("parent: %d:%s\n")
1392 % (pctx.rev(), hexfunc(pctx.node())),
1392 % (pctx.rev(), hexfunc(pctx.node())),
1393 label=label)
1393 label=label)
1394
1394
1395 if self.ui.debugflag and rev is not None:
1395 if self.ui.debugflag and rev is not None:
1396 mnode = ctx.manifestnode()
1396 mnode = ctx.manifestnode()
1397 # i18n: column positioning for "hg log"
1397 # i18n: column positioning for "hg log"
1398 self.ui.write(_("manifest: %d:%s\n") %
1398 self.ui.write(_("manifest: %d:%s\n") %
1399 (self.repo.manifestlog._revlog.rev(mnode),
1399 (self.repo.manifestlog._revlog.rev(mnode),
1400 hex(mnode)),
1400 hex(mnode)),
1401 label='ui.debug log.manifest')
1401 label='ui.debug log.manifest')
1402 # i18n: column positioning for "hg log"
1402 # i18n: column positioning for "hg log"
1403 self.ui.write(_("user: %s\n") % ctx.user(),
1403 self.ui.write(_("user: %s\n") % ctx.user(),
1404 label='log.user')
1404 label='log.user')
1405 # i18n: column positioning for "hg log"
1405 # i18n: column positioning for "hg log"
1406 self.ui.write(_("date: %s\n") % date,
1406 self.ui.write(_("date: %s\n") % date,
1407 label='log.date')
1407 label='log.date')
1408
1408
1409 if ctx.troubled():
1409 if ctx.troubled():
1410 # i18n: column positioning for "hg log"
1410 # i18n: column positioning for "hg log"
1411 self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
1411 self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
1412 label='log.trouble')
1412 label='log.trouble')
1413
1413
1414 if self.ui.debugflag:
1414 if self.ui.debugflag:
1415 files = ctx.p1().status(ctx)[:3]
1415 files = ctx.p1().status(ctx)[:3]
1416 for key, value in zip([# i18n: column positioning for "hg log"
1416 for key, value in zip([# i18n: column positioning for "hg log"
1417 _("files:"),
1417 _("files:"),
1418 # i18n: column positioning for "hg log"
1418 # i18n: column positioning for "hg log"
1419 _("files+:"),
1419 _("files+:"),
1420 # i18n: column positioning for "hg log"
1420 # i18n: column positioning for "hg log"
1421 _("files-:")], files):
1421 _("files-:")], files):
1422 if value:
1422 if value:
1423 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1423 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1424 label='ui.debug log.files')
1424 label='ui.debug log.files')
1425 elif ctx.files() and self.ui.verbose:
1425 elif ctx.files() and self.ui.verbose:
1426 # i18n: column positioning for "hg log"
1426 # i18n: column positioning for "hg log"
1427 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1427 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1428 label='ui.note log.files')
1428 label='ui.note log.files')
1429 if copies and self.ui.verbose:
1429 if copies and self.ui.verbose:
1430 copies = ['%s (%s)' % c for c in copies]
1430 copies = ['%s (%s)' % c for c in copies]
1431 # i18n: column positioning for "hg log"
1431 # i18n: column positioning for "hg log"
1432 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1432 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1433 label='ui.note log.copies')
1433 label='ui.note log.copies')
1434
1434
1435 extra = ctx.extra()
1435 extra = ctx.extra()
1436 if extra and self.ui.debugflag:
1436 if extra and self.ui.debugflag:
1437 for key, value in sorted(extra.items()):
1437 for key, value in sorted(extra.items()):
1438 # i18n: column positioning for "hg log"
1438 # i18n: column positioning for "hg log"
1439 self.ui.write(_("extra: %s=%s\n")
1439 self.ui.write(_("extra: %s=%s\n")
1440 % (key, util.escapestr(value)),
1440 % (key, util.escapestr(value)),
1441 label='ui.debug log.extra')
1441 label='ui.debug log.extra')
1442
1442
1443 description = ctx.description().strip()
1443 description = ctx.description().strip()
1444 if description:
1444 if description:
1445 if self.ui.verbose:
1445 if self.ui.verbose:
1446 self.ui.write(_("description:\n"),
1446 self.ui.write(_("description:\n"),
1447 label='ui.note log.description')
1447 label='ui.note log.description')
1448 self.ui.write(description,
1448 self.ui.write(description,
1449 label='ui.note log.description')
1449 label='ui.note log.description')
1450 self.ui.write("\n\n")
1450 self.ui.write("\n\n")
1451 else:
1451 else:
1452 # i18n: column positioning for "hg log"
1452 # i18n: column positioning for "hg log"
1453 self.ui.write(_("summary: %s\n") %
1453 self.ui.write(_("summary: %s\n") %
1454 description.splitlines()[0],
1454 description.splitlines()[0],
1455 label='log.summary')
1455 label='log.summary')
1456 self.ui.write("\n")
1456 self.ui.write("\n")
1457
1457
1458 self.showpatch(ctx, matchfn)
1458 self.showpatch(ctx, matchfn)
1459
1459
1460 def showpatch(self, ctx, matchfn):
1460 def showpatch(self, ctx, matchfn):
1461 if not matchfn:
1461 if not matchfn:
1462 matchfn = self.matchfn
1462 matchfn = self.matchfn
1463 if matchfn:
1463 if matchfn:
1464 stat = self.diffopts.get('stat')
1464 stat = self.diffopts.get('stat')
1465 diff = self.diffopts.get('patch')
1465 diff = self.diffopts.get('patch')
1466 diffopts = patch.diffallopts(self.ui, self.diffopts)
1466 diffopts = patch.diffallopts(self.ui, self.diffopts)
1467 node = ctx.node()
1467 node = ctx.node()
1468 prev = ctx.p1().node()
1468 prev = ctx.p1().node()
1469 if stat:
1469 if stat:
1470 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1470 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1471 match=matchfn, stat=True)
1471 match=matchfn, stat=True)
1472 if diff:
1472 if diff:
1473 if stat:
1473 if stat:
1474 self.ui.write("\n")
1474 self.ui.write("\n")
1475 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1475 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1476 match=matchfn, stat=False)
1476 match=matchfn, stat=False)
1477 self.ui.write("\n")
1477 self.ui.write("\n")
1478
1478
1479 class jsonchangeset(changeset_printer):
1479 class jsonchangeset(changeset_printer):
1480 '''format changeset information.'''
1480 '''format changeset information.'''
1481
1481
1482 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1482 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1483 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1483 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1484 self.cache = {}
1484 self.cache = {}
1485 self._first = True
1485 self._first = True
1486
1486
1487 def close(self):
1487 def close(self):
1488 if not self._first:
1488 if not self._first:
1489 self.ui.write("\n]\n")
1489 self.ui.write("\n]\n")
1490 else:
1490 else:
1491 self.ui.write("[]\n")
1491 self.ui.write("[]\n")
1492
1492
1493 def _show(self, ctx, copies, matchfn, props):
1493 def _show(self, ctx, copies, matchfn, props):
1494 '''show a single changeset or file revision'''
1494 '''show a single changeset or file revision'''
1495 rev = ctx.rev()
1495 rev = ctx.rev()
1496 if rev is None:
1496 if rev is None:
1497 jrev = jnode = 'null'
1497 jrev = jnode = 'null'
1498 else:
1498 else:
1499 jrev = '%d' % rev
1499 jrev = '%d' % rev
1500 jnode = '"%s"' % hex(ctx.node())
1500 jnode = '"%s"' % hex(ctx.node())
1501 j = encoding.jsonescape
1501 j = encoding.jsonescape
1502
1502
1503 if self._first:
1503 if self._first:
1504 self.ui.write("[\n {")
1504 self.ui.write("[\n {")
1505 self._first = False
1505 self._first = False
1506 else:
1506 else:
1507 self.ui.write(",\n {")
1507 self.ui.write(",\n {")
1508
1508
1509 if self.ui.quiet:
1509 if self.ui.quiet:
1510 self.ui.write(('\n "rev": %s') % jrev)
1510 self.ui.write(('\n "rev": %s') % jrev)
1511 self.ui.write((',\n "node": %s') % jnode)
1511 self.ui.write((',\n "node": %s') % jnode)
1512 self.ui.write('\n }')
1512 self.ui.write('\n }')
1513 return
1513 return
1514
1514
1515 self.ui.write(('\n "rev": %s') % jrev)
1515 self.ui.write(('\n "rev": %s') % jrev)
1516 self.ui.write((',\n "node": %s') % jnode)
1516 self.ui.write((',\n "node": %s') % jnode)
1517 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1517 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1518 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1518 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1519 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1519 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1520 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1520 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1521 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1521 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1522
1522
1523 self.ui.write((',\n "bookmarks": [%s]') %
1523 self.ui.write((',\n "bookmarks": [%s]') %
1524 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1524 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1525 self.ui.write((',\n "tags": [%s]') %
1525 self.ui.write((',\n "tags": [%s]') %
1526 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1526 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1527 self.ui.write((',\n "parents": [%s]') %
1527 self.ui.write((',\n "parents": [%s]') %
1528 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1528 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1529
1529
1530 if self.ui.debugflag:
1530 if self.ui.debugflag:
1531 if rev is None:
1531 if rev is None:
1532 jmanifestnode = 'null'
1532 jmanifestnode = 'null'
1533 else:
1533 else:
1534 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1534 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1535 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1535 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1536
1536
1537 self.ui.write((',\n "extra": {%s}') %
1537 self.ui.write((',\n "extra": {%s}') %
1538 ", ".join('"%s": "%s"' % (j(k), j(v))
1538 ", ".join('"%s": "%s"' % (j(k), j(v))
1539 for k, v in ctx.extra().items()))
1539 for k, v in ctx.extra().items()))
1540
1540
1541 files = ctx.p1().status(ctx)
1541 files = ctx.p1().status(ctx)
1542 self.ui.write((',\n "modified": [%s]') %
1542 self.ui.write((',\n "modified": [%s]') %
1543 ", ".join('"%s"' % j(f) for f in files[0]))
1543 ", ".join('"%s"' % j(f) for f in files[0]))
1544 self.ui.write((',\n "added": [%s]') %
1544 self.ui.write((',\n "added": [%s]') %
1545 ", ".join('"%s"' % j(f) for f in files[1]))
1545 ", ".join('"%s"' % j(f) for f in files[1]))
1546 self.ui.write((',\n "removed": [%s]') %
1546 self.ui.write((',\n "removed": [%s]') %
1547 ", ".join('"%s"' % j(f) for f in files[2]))
1547 ", ".join('"%s"' % j(f) for f in files[2]))
1548
1548
1549 elif self.ui.verbose:
1549 elif self.ui.verbose:
1550 self.ui.write((',\n "files": [%s]') %
1550 self.ui.write((',\n "files": [%s]') %
1551 ", ".join('"%s"' % j(f) for f in ctx.files()))
1551 ", ".join('"%s"' % j(f) for f in ctx.files()))
1552
1552
1553 if copies:
1553 if copies:
1554 self.ui.write((',\n "copies": {%s}') %
1554 self.ui.write((',\n "copies": {%s}') %
1555 ", ".join('"%s": "%s"' % (j(k), j(v))
1555 ", ".join('"%s": "%s"' % (j(k), j(v))
1556 for k, v in copies))
1556 for k, v in copies))
1557
1557
1558 matchfn = self.matchfn
1558 matchfn = self.matchfn
1559 if matchfn:
1559 if matchfn:
1560 stat = self.diffopts.get('stat')
1560 stat = self.diffopts.get('stat')
1561 diff = self.diffopts.get('patch')
1561 diff = self.diffopts.get('patch')
1562 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1562 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1563 node, prev = ctx.node(), ctx.p1().node()
1563 node, prev = ctx.node(), ctx.p1().node()
1564 if stat:
1564 if stat:
1565 self.ui.pushbuffer()
1565 self.ui.pushbuffer()
1566 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1566 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1567 match=matchfn, stat=True)
1567 match=matchfn, stat=True)
1568 self.ui.write((',\n "diffstat": "%s"')
1568 self.ui.write((',\n "diffstat": "%s"')
1569 % j(self.ui.popbuffer()))
1569 % j(self.ui.popbuffer()))
1570 if diff:
1570 if diff:
1571 self.ui.pushbuffer()
1571 self.ui.pushbuffer()
1572 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1572 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1573 match=matchfn, stat=False)
1573 match=matchfn, stat=False)
1574 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1574 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1575
1575
1576 self.ui.write("\n }")
1576 self.ui.write("\n }")
1577
1577
1578 class changeset_templater(changeset_printer):
1578 class changeset_templater(changeset_printer):
1579 '''format changeset information.'''
1579 '''format changeset information.'''
1580
1580
1581 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1581 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1582 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1582 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1583 self.t = formatter.loadtemplater(ui, 'changeset', (tmpl, mapfile),
1583 self.t = formatter.loadtemplater(ui, 'changeset', (tmpl, mapfile),
1584 cache=templatekw.defaulttempl)
1584 cache=templatekw.defaulttempl)
1585 self._counter = itertools.count()
1585 self._counter = itertools.count()
1586 self.cache = {}
1586 self.cache = {}
1587
1587
1588 # find correct templates for current mode
1588 # find correct templates for current mode
1589 tmplmodes = [
1589 tmplmodes = [
1590 (True, None),
1590 (True, None),
1591 (self.ui.verbose, 'verbose'),
1591 (self.ui.verbose, 'verbose'),
1592 (self.ui.quiet, 'quiet'),
1592 (self.ui.quiet, 'quiet'),
1593 (self.ui.debugflag, 'debug'),
1593 (self.ui.debugflag, 'debug'),
1594 ]
1594 ]
1595
1595
1596 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1596 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1597 'docheader': '', 'docfooter': ''}
1597 'docheader': '', 'docfooter': ''}
1598 for mode, postfix in tmplmodes:
1598 for mode, postfix in tmplmodes:
1599 for t in self._parts:
1599 for t in self._parts:
1600 cur = t
1600 cur = t
1601 if postfix:
1601 if postfix:
1602 cur += "_" + postfix
1602 cur += "_" + postfix
1603 if mode and cur in self.t:
1603 if mode and cur in self.t:
1604 self._parts[t] = cur
1604 self._parts[t] = cur
1605
1605
1606 if self._parts['docheader']:
1606 if self._parts['docheader']:
1607 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1607 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1608
1608
1609 def close(self):
1609 def close(self):
1610 if self._parts['docfooter']:
1610 if self._parts['docfooter']:
1611 if not self.footer:
1611 if not self.footer:
1612 self.footer = ""
1612 self.footer = ""
1613 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1613 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1614 return super(changeset_templater, self).close()
1614 return super(changeset_templater, self).close()
1615
1615
1616 def _show(self, ctx, copies, matchfn, props):
1616 def _show(self, ctx, copies, matchfn, props):
1617 '''show a single changeset or file revision'''
1617 '''show a single changeset or file revision'''
1618 props = props.copy()
1618 props = props.copy()
1619 props.update(templatekw.keywords)
1619 props.update(templatekw.keywords)
1620 props['templ'] = self.t
1620 props['templ'] = self.t
1621 props['ctx'] = ctx
1621 props['ctx'] = ctx
1622 props['repo'] = self.repo
1622 props['repo'] = self.repo
1623 props['ui'] = self.repo.ui
1623 props['ui'] = self.repo.ui
1624 props['index'] = next(self._counter)
1624 props['index'] = next(self._counter)
1625 props['revcache'] = {'copies': copies}
1625 props['revcache'] = {'copies': copies}
1626 props['cache'] = self.cache
1626 props['cache'] = self.cache
1627 props = pycompat.strkwargs(props)
1627 props = pycompat.strkwargs(props)
1628
1628
1629 # write header
1629 # write header
1630 if self._parts['header']:
1630 if self._parts['header']:
1631 h = templater.stringify(self.t(self._parts['header'], **props))
1631 h = templater.stringify(self.t(self._parts['header'], **props))
1632 if self.buffered:
1632 if self.buffered:
1633 self.header[ctx.rev()] = h
1633 self.header[ctx.rev()] = h
1634 else:
1634 else:
1635 if self.lastheader != h:
1635 if self.lastheader != h:
1636 self.lastheader = h
1636 self.lastheader = h
1637 self.ui.write(h)
1637 self.ui.write(h)
1638
1638
1639 # write changeset metadata, then patch if requested
1639 # write changeset metadata, then patch if requested
1640 key = self._parts['changeset']
1640 key = self._parts['changeset']
1641 self.ui.write(templater.stringify(self.t(key, **props)))
1641 self.ui.write(templater.stringify(self.t(key, **props)))
1642 self.showpatch(ctx, matchfn)
1642 self.showpatch(ctx, matchfn)
1643
1643
1644 if self._parts['footer']:
1644 if self._parts['footer']:
1645 if not self.footer:
1645 if not self.footer:
1646 self.footer = templater.stringify(
1646 self.footer = templater.stringify(
1647 self.t(self._parts['footer'], **props))
1647 self.t(self._parts['footer'], **props))
1648
1648
1649 def _lookuplogtemplate(ui, tmpl, style):
1649 def _lookuplogtemplate(ui, tmpl, style):
1650 """Find the template matching the given template spec or style
1650 """Find the template matching the given template spec or style
1651
1651
1652 See formatter.lookuptemplate() for details.
1652 See formatter.lookuptemplate() for details.
1653 """
1653 """
1654
1654
1655 # ui settings
1655 # ui settings
1656 if not tmpl and not style: # template are stronger than style
1656 if not tmpl and not style: # template are stronger than style
1657 tmpl = ui.config('ui', 'logtemplate')
1657 tmpl = ui.config('ui', 'logtemplate')
1658 if tmpl:
1658 if tmpl:
1659 return templater.unquotestring(tmpl), None
1659 return templater.unquotestring(tmpl), None
1660 else:
1660 else:
1661 style = util.expandpath(ui.config('ui', 'style', ''))
1661 style = util.expandpath(ui.config('ui', 'style', ''))
1662
1662
1663 if not tmpl and style:
1663 if not tmpl and style:
1664 mapfile = style
1664 mapfile = style
1665 if not os.path.split(mapfile)[0]:
1665 if not os.path.split(mapfile)[0]:
1666 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1666 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1667 or templater.templatepath(mapfile))
1667 or templater.templatepath(mapfile))
1668 if mapname:
1668 if mapname:
1669 mapfile = mapname
1669 mapfile = mapname
1670 return None, mapfile
1670 return None, mapfile
1671
1671
1672 if not tmpl:
1672 if not tmpl:
1673 return None, None
1673 return None, None
1674
1674
1675 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1675 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1676
1676
1677 def makelogtemplater(ui, repo, tmpl, buffered=False):
1678 """Create a changeset_templater from a literal template 'tmpl'"""
1679 return changeset_templater(ui, repo, matchfn=None, diffopts={},
1680 tmpl=tmpl, mapfile=None, buffered=buffered)
1681
1677 def show_changeset(ui, repo, opts, buffered=False):
1682 def show_changeset(ui, repo, opts, buffered=False):
1678 """show one changeset using template or regular display.
1683 """show one changeset using template or regular display.
1679
1684
1680 Display format will be the first non-empty hit of:
1685 Display format will be the first non-empty hit of:
1681 1. option 'template'
1686 1. option 'template'
1682 2. option 'style'
1687 2. option 'style'
1683 3. [ui] setting 'logtemplate'
1688 3. [ui] setting 'logtemplate'
1684 4. [ui] setting 'style'
1689 4. [ui] setting 'style'
1685 If all of these values are either the unset or the empty string,
1690 If all of these values are either the unset or the empty string,
1686 regular display via changeset_printer() is done.
1691 regular display via changeset_printer() is done.
1687 """
1692 """
1688 # options
1693 # options
1689 matchfn = None
1694 matchfn = None
1690 if opts.get('patch') or opts.get('stat'):
1695 if opts.get('patch') or opts.get('stat'):
1691 matchfn = scmutil.matchall(repo)
1696 matchfn = scmutil.matchall(repo)
1692
1697
1693 if opts.get('template') == 'json':
1698 if opts.get('template') == 'json':
1694 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1699 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1695
1700
1696 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
1701 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
1697 tmpl, mapfile = spec
1702 tmpl, mapfile = spec
1698
1703
1699 if not tmpl and not mapfile:
1704 if not tmpl and not mapfile:
1700 return changeset_printer(ui, repo, matchfn, opts, buffered)
1705 return changeset_printer(ui, repo, matchfn, opts, buffered)
1701
1706
1702 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1707 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1703
1708
1704 def showmarker(fm, marker, index=None):
1709 def showmarker(fm, marker, index=None):
1705 """utility function to display obsolescence marker in a readable way
1710 """utility function to display obsolescence marker in a readable way
1706
1711
1707 To be used by debug function."""
1712 To be used by debug function."""
1708 if index is not None:
1713 if index is not None:
1709 fm.write('index', '%i ', index)
1714 fm.write('index', '%i ', index)
1710 fm.write('precnode', '%s ', hex(marker.precnode()))
1715 fm.write('precnode', '%s ', hex(marker.precnode()))
1711 succs = marker.succnodes()
1716 succs = marker.succnodes()
1712 fm.condwrite(succs, 'succnodes', '%s ',
1717 fm.condwrite(succs, 'succnodes', '%s ',
1713 fm.formatlist(map(hex, succs), name='node'))
1718 fm.formatlist(map(hex, succs), name='node'))
1714 fm.write('flag', '%X ', marker.flags())
1719 fm.write('flag', '%X ', marker.flags())
1715 parents = marker.parentnodes()
1720 parents = marker.parentnodes()
1716 if parents is not None:
1721 if parents is not None:
1717 fm.write('parentnodes', '{%s} ',
1722 fm.write('parentnodes', '{%s} ',
1718 fm.formatlist(map(hex, parents), name='node', sep=', '))
1723 fm.formatlist(map(hex, parents), name='node', sep=', '))
1719 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1724 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1720 meta = marker.metadata().copy()
1725 meta = marker.metadata().copy()
1721 meta.pop('date', None)
1726 meta.pop('date', None)
1722 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1727 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1723 fm.plain('\n')
1728 fm.plain('\n')
1724
1729
1725 def finddate(ui, repo, date):
1730 def finddate(ui, repo, date):
1726 """Find the tipmost changeset that matches the given date spec"""
1731 """Find the tipmost changeset that matches the given date spec"""
1727
1732
1728 df = util.matchdate(date)
1733 df = util.matchdate(date)
1729 m = scmutil.matchall(repo)
1734 m = scmutil.matchall(repo)
1730 results = {}
1735 results = {}
1731
1736
1732 def prep(ctx, fns):
1737 def prep(ctx, fns):
1733 d = ctx.date()
1738 d = ctx.date()
1734 if df(d[0]):
1739 if df(d[0]):
1735 results[ctx.rev()] = d
1740 results[ctx.rev()] = d
1736
1741
1737 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1742 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1738 rev = ctx.rev()
1743 rev = ctx.rev()
1739 if rev in results:
1744 if rev in results:
1740 ui.status(_("found revision %s from %s\n") %
1745 ui.status(_("found revision %s from %s\n") %
1741 (rev, util.datestr(results[rev])))
1746 (rev, util.datestr(results[rev])))
1742 return '%d' % rev
1747 return '%d' % rev
1743
1748
1744 raise error.Abort(_("revision matching date not found"))
1749 raise error.Abort(_("revision matching date not found"))
1745
1750
1746 def increasingwindows(windowsize=8, sizelimit=512):
1751 def increasingwindows(windowsize=8, sizelimit=512):
1747 while True:
1752 while True:
1748 yield windowsize
1753 yield windowsize
1749 if windowsize < sizelimit:
1754 if windowsize < sizelimit:
1750 windowsize *= 2
1755 windowsize *= 2
1751
1756
1752 class FileWalkError(Exception):
1757 class FileWalkError(Exception):
1753 pass
1758 pass
1754
1759
1755 def walkfilerevs(repo, match, follow, revs, fncache):
1760 def walkfilerevs(repo, match, follow, revs, fncache):
1756 '''Walks the file history for the matched files.
1761 '''Walks the file history for the matched files.
1757
1762
1758 Returns the changeset revs that are involved in the file history.
1763 Returns the changeset revs that are involved in the file history.
1759
1764
1760 Throws FileWalkError if the file history can't be walked using
1765 Throws FileWalkError if the file history can't be walked using
1761 filelogs alone.
1766 filelogs alone.
1762 '''
1767 '''
1763 wanted = set()
1768 wanted = set()
1764 copies = []
1769 copies = []
1765 minrev, maxrev = min(revs), max(revs)
1770 minrev, maxrev = min(revs), max(revs)
1766 def filerevgen(filelog, last):
1771 def filerevgen(filelog, last):
1767 """
1772 """
1768 Only files, no patterns. Check the history of each file.
1773 Only files, no patterns. Check the history of each file.
1769
1774
1770 Examines filelog entries within minrev, maxrev linkrev range
1775 Examines filelog entries within minrev, maxrev linkrev range
1771 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1776 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1772 tuples in backwards order
1777 tuples in backwards order
1773 """
1778 """
1774 cl_count = len(repo)
1779 cl_count = len(repo)
1775 revs = []
1780 revs = []
1776 for j in xrange(0, last + 1):
1781 for j in xrange(0, last + 1):
1777 linkrev = filelog.linkrev(j)
1782 linkrev = filelog.linkrev(j)
1778 if linkrev < minrev:
1783 if linkrev < minrev:
1779 continue
1784 continue
1780 # only yield rev for which we have the changelog, it can
1785 # only yield rev for which we have the changelog, it can
1781 # happen while doing "hg log" during a pull or commit
1786 # happen while doing "hg log" during a pull or commit
1782 if linkrev >= cl_count:
1787 if linkrev >= cl_count:
1783 break
1788 break
1784
1789
1785 parentlinkrevs = []
1790 parentlinkrevs = []
1786 for p in filelog.parentrevs(j):
1791 for p in filelog.parentrevs(j):
1787 if p != nullrev:
1792 if p != nullrev:
1788 parentlinkrevs.append(filelog.linkrev(p))
1793 parentlinkrevs.append(filelog.linkrev(p))
1789 n = filelog.node(j)
1794 n = filelog.node(j)
1790 revs.append((linkrev, parentlinkrevs,
1795 revs.append((linkrev, parentlinkrevs,
1791 follow and filelog.renamed(n)))
1796 follow and filelog.renamed(n)))
1792
1797
1793 return reversed(revs)
1798 return reversed(revs)
1794 def iterfiles():
1799 def iterfiles():
1795 pctx = repo['.']
1800 pctx = repo['.']
1796 for filename in match.files():
1801 for filename in match.files():
1797 if follow:
1802 if follow:
1798 if filename not in pctx:
1803 if filename not in pctx:
1799 raise error.Abort(_('cannot follow file not in parent '
1804 raise error.Abort(_('cannot follow file not in parent '
1800 'revision: "%s"') % filename)
1805 'revision: "%s"') % filename)
1801 yield filename, pctx[filename].filenode()
1806 yield filename, pctx[filename].filenode()
1802 else:
1807 else:
1803 yield filename, None
1808 yield filename, None
1804 for filename_node in copies:
1809 for filename_node in copies:
1805 yield filename_node
1810 yield filename_node
1806
1811
1807 for file_, node in iterfiles():
1812 for file_, node in iterfiles():
1808 filelog = repo.file(file_)
1813 filelog = repo.file(file_)
1809 if not len(filelog):
1814 if not len(filelog):
1810 if node is None:
1815 if node is None:
1811 # A zero count may be a directory or deleted file, so
1816 # A zero count may be a directory or deleted file, so
1812 # try to find matching entries on the slow path.
1817 # try to find matching entries on the slow path.
1813 if follow:
1818 if follow:
1814 raise error.Abort(
1819 raise error.Abort(
1815 _('cannot follow nonexistent file: "%s"') % file_)
1820 _('cannot follow nonexistent file: "%s"') % file_)
1816 raise FileWalkError("Cannot walk via filelog")
1821 raise FileWalkError("Cannot walk via filelog")
1817 else:
1822 else:
1818 continue
1823 continue
1819
1824
1820 if node is None:
1825 if node is None:
1821 last = len(filelog) - 1
1826 last = len(filelog) - 1
1822 else:
1827 else:
1823 last = filelog.rev(node)
1828 last = filelog.rev(node)
1824
1829
1825 # keep track of all ancestors of the file
1830 # keep track of all ancestors of the file
1826 ancestors = {filelog.linkrev(last)}
1831 ancestors = {filelog.linkrev(last)}
1827
1832
1828 # iterate from latest to oldest revision
1833 # iterate from latest to oldest revision
1829 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1834 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1830 if not follow:
1835 if not follow:
1831 if rev > maxrev:
1836 if rev > maxrev:
1832 continue
1837 continue
1833 else:
1838 else:
1834 # Note that last might not be the first interesting
1839 # Note that last might not be the first interesting
1835 # rev to us:
1840 # rev to us:
1836 # if the file has been changed after maxrev, we'll
1841 # if the file has been changed after maxrev, we'll
1837 # have linkrev(last) > maxrev, and we still need
1842 # have linkrev(last) > maxrev, and we still need
1838 # to explore the file graph
1843 # to explore the file graph
1839 if rev not in ancestors:
1844 if rev not in ancestors:
1840 continue
1845 continue
1841 # XXX insert 1327 fix here
1846 # XXX insert 1327 fix here
1842 if flparentlinkrevs:
1847 if flparentlinkrevs:
1843 ancestors.update(flparentlinkrevs)
1848 ancestors.update(flparentlinkrevs)
1844
1849
1845 fncache.setdefault(rev, []).append(file_)
1850 fncache.setdefault(rev, []).append(file_)
1846 wanted.add(rev)
1851 wanted.add(rev)
1847 if copied:
1852 if copied:
1848 copies.append(copied)
1853 copies.append(copied)
1849
1854
1850 return wanted
1855 return wanted
1851
1856
1852 class _followfilter(object):
1857 class _followfilter(object):
1853 def __init__(self, repo, onlyfirst=False):
1858 def __init__(self, repo, onlyfirst=False):
1854 self.repo = repo
1859 self.repo = repo
1855 self.startrev = nullrev
1860 self.startrev = nullrev
1856 self.roots = set()
1861 self.roots = set()
1857 self.onlyfirst = onlyfirst
1862 self.onlyfirst = onlyfirst
1858
1863
1859 def match(self, rev):
1864 def match(self, rev):
1860 def realparents(rev):
1865 def realparents(rev):
1861 if self.onlyfirst:
1866 if self.onlyfirst:
1862 return self.repo.changelog.parentrevs(rev)[0:1]
1867 return self.repo.changelog.parentrevs(rev)[0:1]
1863 else:
1868 else:
1864 return filter(lambda x: x != nullrev,
1869 return filter(lambda x: x != nullrev,
1865 self.repo.changelog.parentrevs(rev))
1870 self.repo.changelog.parentrevs(rev))
1866
1871
1867 if self.startrev == nullrev:
1872 if self.startrev == nullrev:
1868 self.startrev = rev
1873 self.startrev = rev
1869 return True
1874 return True
1870
1875
1871 if rev > self.startrev:
1876 if rev > self.startrev:
1872 # forward: all descendants
1877 # forward: all descendants
1873 if not self.roots:
1878 if not self.roots:
1874 self.roots.add(self.startrev)
1879 self.roots.add(self.startrev)
1875 for parent in realparents(rev):
1880 for parent in realparents(rev):
1876 if parent in self.roots:
1881 if parent in self.roots:
1877 self.roots.add(rev)
1882 self.roots.add(rev)
1878 return True
1883 return True
1879 else:
1884 else:
1880 # backwards: all parents
1885 # backwards: all parents
1881 if not self.roots:
1886 if not self.roots:
1882 self.roots.update(realparents(self.startrev))
1887 self.roots.update(realparents(self.startrev))
1883 if rev in self.roots:
1888 if rev in self.roots:
1884 self.roots.remove(rev)
1889 self.roots.remove(rev)
1885 self.roots.update(realparents(rev))
1890 self.roots.update(realparents(rev))
1886 return True
1891 return True
1887
1892
1888 return False
1893 return False
1889
1894
1890 def walkchangerevs(repo, match, opts, prepare):
1895 def walkchangerevs(repo, match, opts, prepare):
1891 '''Iterate over files and the revs in which they changed.
1896 '''Iterate over files and the revs in which they changed.
1892
1897
1893 Callers most commonly need to iterate backwards over the history
1898 Callers most commonly need to iterate backwards over the history
1894 in which they are interested. Doing so has awful (quadratic-looking)
1899 in which they are interested. Doing so has awful (quadratic-looking)
1895 performance, so we use iterators in a "windowed" way.
1900 performance, so we use iterators in a "windowed" way.
1896
1901
1897 We walk a window of revisions in the desired order. Within the
1902 We walk a window of revisions in the desired order. Within the
1898 window, we first walk forwards to gather data, then in the desired
1903 window, we first walk forwards to gather data, then in the desired
1899 order (usually backwards) to display it.
1904 order (usually backwards) to display it.
1900
1905
1901 This function returns an iterator yielding contexts. Before
1906 This function returns an iterator yielding contexts. Before
1902 yielding each context, the iterator will first call the prepare
1907 yielding each context, the iterator will first call the prepare
1903 function on each context in the window in forward order.'''
1908 function on each context in the window in forward order.'''
1904
1909
1905 follow = opts.get('follow') or opts.get('follow_first')
1910 follow = opts.get('follow') or opts.get('follow_first')
1906 revs = _logrevs(repo, opts)
1911 revs = _logrevs(repo, opts)
1907 if not revs:
1912 if not revs:
1908 return []
1913 return []
1909 wanted = set()
1914 wanted = set()
1910 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1915 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1911 opts.get('removed'))
1916 opts.get('removed'))
1912 fncache = {}
1917 fncache = {}
1913 change = repo.changectx
1918 change = repo.changectx
1914
1919
1915 # First step is to fill wanted, the set of revisions that we want to yield.
1920 # First step is to fill wanted, the set of revisions that we want to yield.
1916 # When it does not induce extra cost, we also fill fncache for revisions in
1921 # When it does not induce extra cost, we also fill fncache for revisions in
1917 # wanted: a cache of filenames that were changed (ctx.files()) and that
1922 # wanted: a cache of filenames that were changed (ctx.files()) and that
1918 # match the file filtering conditions.
1923 # match the file filtering conditions.
1919
1924
1920 if match.always():
1925 if match.always():
1921 # No files, no patterns. Display all revs.
1926 # No files, no patterns. Display all revs.
1922 wanted = revs
1927 wanted = revs
1923 elif not slowpath:
1928 elif not slowpath:
1924 # We only have to read through the filelog to find wanted revisions
1929 # We only have to read through the filelog to find wanted revisions
1925
1930
1926 try:
1931 try:
1927 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1932 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1928 except FileWalkError:
1933 except FileWalkError:
1929 slowpath = True
1934 slowpath = True
1930
1935
1931 # We decided to fall back to the slowpath because at least one
1936 # We decided to fall back to the slowpath because at least one
1932 # of the paths was not a file. Check to see if at least one of them
1937 # of the paths was not a file. Check to see if at least one of them
1933 # existed in history, otherwise simply return
1938 # existed in history, otherwise simply return
1934 for path in match.files():
1939 for path in match.files():
1935 if path == '.' or path in repo.store:
1940 if path == '.' or path in repo.store:
1936 break
1941 break
1937 else:
1942 else:
1938 return []
1943 return []
1939
1944
1940 if slowpath:
1945 if slowpath:
1941 # We have to read the changelog to match filenames against
1946 # We have to read the changelog to match filenames against
1942 # changed files
1947 # changed files
1943
1948
1944 if follow:
1949 if follow:
1945 raise error.Abort(_('can only follow copies/renames for explicit '
1950 raise error.Abort(_('can only follow copies/renames for explicit '
1946 'filenames'))
1951 'filenames'))
1947
1952
1948 # The slow path checks files modified in every changeset.
1953 # The slow path checks files modified in every changeset.
1949 # This is really slow on large repos, so compute the set lazily.
1954 # This is really slow on large repos, so compute the set lazily.
1950 class lazywantedset(object):
1955 class lazywantedset(object):
1951 def __init__(self):
1956 def __init__(self):
1952 self.set = set()
1957 self.set = set()
1953 self.revs = set(revs)
1958 self.revs = set(revs)
1954
1959
1955 # No need to worry about locality here because it will be accessed
1960 # No need to worry about locality here because it will be accessed
1956 # in the same order as the increasing window below.
1961 # in the same order as the increasing window below.
1957 def __contains__(self, value):
1962 def __contains__(self, value):
1958 if value in self.set:
1963 if value in self.set:
1959 return True
1964 return True
1960 elif not value in self.revs:
1965 elif not value in self.revs:
1961 return False
1966 return False
1962 else:
1967 else:
1963 self.revs.discard(value)
1968 self.revs.discard(value)
1964 ctx = change(value)
1969 ctx = change(value)
1965 matches = filter(match, ctx.files())
1970 matches = filter(match, ctx.files())
1966 if matches:
1971 if matches:
1967 fncache[value] = matches
1972 fncache[value] = matches
1968 self.set.add(value)
1973 self.set.add(value)
1969 return True
1974 return True
1970 return False
1975 return False
1971
1976
1972 def discard(self, value):
1977 def discard(self, value):
1973 self.revs.discard(value)
1978 self.revs.discard(value)
1974 self.set.discard(value)
1979 self.set.discard(value)
1975
1980
1976 wanted = lazywantedset()
1981 wanted = lazywantedset()
1977
1982
1978 # it might be worthwhile to do this in the iterator if the rev range
1983 # it might be worthwhile to do this in the iterator if the rev range
1979 # is descending and the prune args are all within that range
1984 # is descending and the prune args are all within that range
1980 for rev in opts.get('prune', ()):
1985 for rev in opts.get('prune', ()):
1981 rev = repo[rev].rev()
1986 rev = repo[rev].rev()
1982 ff = _followfilter(repo)
1987 ff = _followfilter(repo)
1983 stop = min(revs[0], revs[-1])
1988 stop = min(revs[0], revs[-1])
1984 for x in xrange(rev, stop - 1, -1):
1989 for x in xrange(rev, stop - 1, -1):
1985 if ff.match(x):
1990 if ff.match(x):
1986 wanted = wanted - [x]
1991 wanted = wanted - [x]
1987
1992
1988 # Now that wanted is correctly initialized, we can iterate over the
1993 # Now that wanted is correctly initialized, we can iterate over the
1989 # revision range, yielding only revisions in wanted.
1994 # revision range, yielding only revisions in wanted.
1990 def iterate():
1995 def iterate():
1991 if follow and match.always():
1996 if follow and match.always():
1992 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1997 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1993 def want(rev):
1998 def want(rev):
1994 return ff.match(rev) and rev in wanted
1999 return ff.match(rev) and rev in wanted
1995 else:
2000 else:
1996 def want(rev):
2001 def want(rev):
1997 return rev in wanted
2002 return rev in wanted
1998
2003
1999 it = iter(revs)
2004 it = iter(revs)
2000 stopiteration = False
2005 stopiteration = False
2001 for windowsize in increasingwindows():
2006 for windowsize in increasingwindows():
2002 nrevs = []
2007 nrevs = []
2003 for i in xrange(windowsize):
2008 for i in xrange(windowsize):
2004 rev = next(it, None)
2009 rev = next(it, None)
2005 if rev is None:
2010 if rev is None:
2006 stopiteration = True
2011 stopiteration = True
2007 break
2012 break
2008 elif want(rev):
2013 elif want(rev):
2009 nrevs.append(rev)
2014 nrevs.append(rev)
2010 for rev in sorted(nrevs):
2015 for rev in sorted(nrevs):
2011 fns = fncache.get(rev)
2016 fns = fncache.get(rev)
2012 ctx = change(rev)
2017 ctx = change(rev)
2013 if not fns:
2018 if not fns:
2014 def fns_generator():
2019 def fns_generator():
2015 for f in ctx.files():
2020 for f in ctx.files():
2016 if match(f):
2021 if match(f):
2017 yield f
2022 yield f
2018 fns = fns_generator()
2023 fns = fns_generator()
2019 prepare(ctx, fns)
2024 prepare(ctx, fns)
2020 for rev in nrevs:
2025 for rev in nrevs:
2021 yield change(rev)
2026 yield change(rev)
2022
2027
2023 if stopiteration:
2028 if stopiteration:
2024 break
2029 break
2025
2030
2026 return iterate()
2031 return iterate()
2027
2032
2028 def _makefollowlogfilematcher(repo, files, followfirst):
2033 def _makefollowlogfilematcher(repo, files, followfirst):
2029 # When displaying a revision with --patch --follow FILE, we have
2034 # When displaying a revision with --patch --follow FILE, we have
2030 # to know which file of the revision must be diffed. With
2035 # to know which file of the revision must be diffed. With
2031 # --follow, we want the names of the ancestors of FILE in the
2036 # --follow, we want the names of the ancestors of FILE in the
2032 # revision, stored in "fcache". "fcache" is populated by
2037 # revision, stored in "fcache". "fcache" is populated by
2033 # reproducing the graph traversal already done by --follow revset
2038 # reproducing the graph traversal already done by --follow revset
2034 # and relating revs to file names (which is not "correct" but
2039 # and relating revs to file names (which is not "correct" but
2035 # good enough).
2040 # good enough).
2036 fcache = {}
2041 fcache = {}
2037 fcacheready = [False]
2042 fcacheready = [False]
2038 pctx = repo['.']
2043 pctx = repo['.']
2039
2044
2040 def populate():
2045 def populate():
2041 for fn in files:
2046 for fn in files:
2042 fctx = pctx[fn]
2047 fctx = pctx[fn]
2043 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2048 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2044 for c in fctx.ancestors(followfirst=followfirst):
2049 for c in fctx.ancestors(followfirst=followfirst):
2045 fcache.setdefault(c.rev(), set()).add(c.path())
2050 fcache.setdefault(c.rev(), set()).add(c.path())
2046
2051
2047 def filematcher(rev):
2052 def filematcher(rev):
2048 if not fcacheready[0]:
2053 if not fcacheready[0]:
2049 # Lazy initialization
2054 # Lazy initialization
2050 fcacheready[0] = True
2055 fcacheready[0] = True
2051 populate()
2056 populate()
2052 return scmutil.matchfiles(repo, fcache.get(rev, []))
2057 return scmutil.matchfiles(repo, fcache.get(rev, []))
2053
2058
2054 return filematcher
2059 return filematcher
2055
2060
2056 def _makenofollowlogfilematcher(repo, pats, opts):
2061 def _makenofollowlogfilematcher(repo, pats, opts):
2057 '''hook for extensions to override the filematcher for non-follow cases'''
2062 '''hook for extensions to override the filematcher for non-follow cases'''
2058 return None
2063 return None
2059
2064
2060 def _makelogrevset(repo, pats, opts, revs):
2065 def _makelogrevset(repo, pats, opts, revs):
2061 """Return (expr, filematcher) where expr is a revset string built
2066 """Return (expr, filematcher) where expr is a revset string built
2062 from log options and file patterns or None. If --stat or --patch
2067 from log options and file patterns or None. If --stat or --patch
2063 are not passed filematcher is None. Otherwise it is a callable
2068 are not passed filematcher is None. Otherwise it is a callable
2064 taking a revision number and returning a match objects filtering
2069 taking a revision number and returning a match objects filtering
2065 the files to be detailed when displaying the revision.
2070 the files to be detailed when displaying the revision.
2066 """
2071 """
2067 opt2revset = {
2072 opt2revset = {
2068 'no_merges': ('not merge()', None),
2073 'no_merges': ('not merge()', None),
2069 'only_merges': ('merge()', None),
2074 'only_merges': ('merge()', None),
2070 '_ancestors': ('ancestors(%(val)s)', None),
2075 '_ancestors': ('ancestors(%(val)s)', None),
2071 '_fancestors': ('_firstancestors(%(val)s)', None),
2076 '_fancestors': ('_firstancestors(%(val)s)', None),
2072 '_descendants': ('descendants(%(val)s)', None),
2077 '_descendants': ('descendants(%(val)s)', None),
2073 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2078 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2074 '_matchfiles': ('_matchfiles(%(val)s)', None),
2079 '_matchfiles': ('_matchfiles(%(val)s)', None),
2075 'date': ('date(%(val)r)', None),
2080 'date': ('date(%(val)r)', None),
2076 'branch': ('branch(%(val)r)', ' or '),
2081 'branch': ('branch(%(val)r)', ' or '),
2077 '_patslog': ('filelog(%(val)r)', ' or '),
2082 '_patslog': ('filelog(%(val)r)', ' or '),
2078 '_patsfollow': ('follow(%(val)r)', ' or '),
2083 '_patsfollow': ('follow(%(val)r)', ' or '),
2079 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2084 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2080 'keyword': ('keyword(%(val)r)', ' or '),
2085 'keyword': ('keyword(%(val)r)', ' or '),
2081 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2086 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2082 'user': ('user(%(val)r)', ' or '),
2087 'user': ('user(%(val)r)', ' or '),
2083 }
2088 }
2084
2089
2085 opts = dict(opts)
2090 opts = dict(opts)
2086 # follow or not follow?
2091 # follow or not follow?
2087 follow = opts.get('follow') or opts.get('follow_first')
2092 follow = opts.get('follow') or opts.get('follow_first')
2088 if opts.get('follow_first'):
2093 if opts.get('follow_first'):
2089 followfirst = 1
2094 followfirst = 1
2090 else:
2095 else:
2091 followfirst = 0
2096 followfirst = 0
2092 # --follow with FILE behavior depends on revs...
2097 # --follow with FILE behavior depends on revs...
2093 it = iter(revs)
2098 it = iter(revs)
2094 startrev = next(it)
2099 startrev = next(it)
2095 followdescendants = startrev < next(it, startrev)
2100 followdescendants = startrev < next(it, startrev)
2096
2101
2097 # branch and only_branch are really aliases and must be handled at
2102 # branch and only_branch are really aliases and must be handled at
2098 # the same time
2103 # the same time
2099 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2104 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2100 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2105 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2101 # pats/include/exclude are passed to match.match() directly in
2106 # pats/include/exclude are passed to match.match() directly in
2102 # _matchfiles() revset but walkchangerevs() builds its matcher with
2107 # _matchfiles() revset but walkchangerevs() builds its matcher with
2103 # scmutil.match(). The difference is input pats are globbed on
2108 # scmutil.match(). The difference is input pats are globbed on
2104 # platforms without shell expansion (windows).
2109 # platforms without shell expansion (windows).
2105 wctx = repo[None]
2110 wctx = repo[None]
2106 match, pats = scmutil.matchandpats(wctx, pats, opts)
2111 match, pats = scmutil.matchandpats(wctx, pats, opts)
2107 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2112 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2108 opts.get('removed'))
2113 opts.get('removed'))
2109 if not slowpath:
2114 if not slowpath:
2110 for f in match.files():
2115 for f in match.files():
2111 if follow and f not in wctx:
2116 if follow and f not in wctx:
2112 # If the file exists, it may be a directory, so let it
2117 # If the file exists, it may be a directory, so let it
2113 # take the slow path.
2118 # take the slow path.
2114 if os.path.exists(repo.wjoin(f)):
2119 if os.path.exists(repo.wjoin(f)):
2115 slowpath = True
2120 slowpath = True
2116 continue
2121 continue
2117 else:
2122 else:
2118 raise error.Abort(_('cannot follow file not in parent '
2123 raise error.Abort(_('cannot follow file not in parent '
2119 'revision: "%s"') % f)
2124 'revision: "%s"') % f)
2120 filelog = repo.file(f)
2125 filelog = repo.file(f)
2121 if not filelog:
2126 if not filelog:
2122 # A zero count may be a directory or deleted file, so
2127 # A zero count may be a directory or deleted file, so
2123 # try to find matching entries on the slow path.
2128 # try to find matching entries on the slow path.
2124 if follow:
2129 if follow:
2125 raise error.Abort(
2130 raise error.Abort(
2126 _('cannot follow nonexistent file: "%s"') % f)
2131 _('cannot follow nonexistent file: "%s"') % f)
2127 slowpath = True
2132 slowpath = True
2128
2133
2129 # We decided to fall back to the slowpath because at least one
2134 # We decided to fall back to the slowpath because at least one
2130 # of the paths was not a file. Check to see if at least one of them
2135 # of the paths was not a file. Check to see if at least one of them
2131 # existed in history - in that case, we'll continue down the
2136 # existed in history - in that case, we'll continue down the
2132 # slowpath; otherwise, we can turn off the slowpath
2137 # slowpath; otherwise, we can turn off the slowpath
2133 if slowpath:
2138 if slowpath:
2134 for path in match.files():
2139 for path in match.files():
2135 if path == '.' or path in repo.store:
2140 if path == '.' or path in repo.store:
2136 break
2141 break
2137 else:
2142 else:
2138 slowpath = False
2143 slowpath = False
2139
2144
2140 fpats = ('_patsfollow', '_patsfollowfirst')
2145 fpats = ('_patsfollow', '_patsfollowfirst')
2141 fnopats = (('_ancestors', '_fancestors'),
2146 fnopats = (('_ancestors', '_fancestors'),
2142 ('_descendants', '_fdescendants'))
2147 ('_descendants', '_fdescendants'))
2143 if slowpath:
2148 if slowpath:
2144 # See walkchangerevs() slow path.
2149 # See walkchangerevs() slow path.
2145 #
2150 #
2146 # pats/include/exclude cannot be represented as separate
2151 # pats/include/exclude cannot be represented as separate
2147 # revset expressions as their filtering logic applies at file
2152 # revset expressions as their filtering logic applies at file
2148 # level. For instance "-I a -X a" matches a revision touching
2153 # level. For instance "-I a -X a" matches a revision touching
2149 # "a" and "b" while "file(a) and not file(b)" does
2154 # "a" and "b" while "file(a) and not file(b)" does
2150 # not. Besides, filesets are evaluated against the working
2155 # not. Besides, filesets are evaluated against the working
2151 # directory.
2156 # directory.
2152 matchargs = ['r:', 'd:relpath']
2157 matchargs = ['r:', 'd:relpath']
2153 for p in pats:
2158 for p in pats:
2154 matchargs.append('p:' + p)
2159 matchargs.append('p:' + p)
2155 for p in opts.get('include', []):
2160 for p in opts.get('include', []):
2156 matchargs.append('i:' + p)
2161 matchargs.append('i:' + p)
2157 for p in opts.get('exclude', []):
2162 for p in opts.get('exclude', []):
2158 matchargs.append('x:' + p)
2163 matchargs.append('x:' + p)
2159 matchargs = ','.join(('%r' % p) for p in matchargs)
2164 matchargs = ','.join(('%r' % p) for p in matchargs)
2160 opts['_matchfiles'] = matchargs
2165 opts['_matchfiles'] = matchargs
2161 if follow:
2166 if follow:
2162 opts[fnopats[0][followfirst]] = '.'
2167 opts[fnopats[0][followfirst]] = '.'
2163 else:
2168 else:
2164 if follow:
2169 if follow:
2165 if pats:
2170 if pats:
2166 # follow() revset interprets its file argument as a
2171 # follow() revset interprets its file argument as a
2167 # manifest entry, so use match.files(), not pats.
2172 # manifest entry, so use match.files(), not pats.
2168 opts[fpats[followfirst]] = list(match.files())
2173 opts[fpats[followfirst]] = list(match.files())
2169 else:
2174 else:
2170 op = fnopats[followdescendants][followfirst]
2175 op = fnopats[followdescendants][followfirst]
2171 opts[op] = 'rev(%d)' % startrev
2176 opts[op] = 'rev(%d)' % startrev
2172 else:
2177 else:
2173 opts['_patslog'] = list(pats)
2178 opts['_patslog'] = list(pats)
2174
2179
2175 filematcher = None
2180 filematcher = None
2176 if opts.get('patch') or opts.get('stat'):
2181 if opts.get('patch') or opts.get('stat'):
2177 # When following files, track renames via a special matcher.
2182 # When following files, track renames via a special matcher.
2178 # If we're forced to take the slowpath it means we're following
2183 # If we're forced to take the slowpath it means we're following
2179 # at least one pattern/directory, so don't bother with rename tracking.
2184 # at least one pattern/directory, so don't bother with rename tracking.
2180 if follow and not match.always() and not slowpath:
2185 if follow and not match.always() and not slowpath:
2181 # _makefollowlogfilematcher expects its files argument to be
2186 # _makefollowlogfilematcher expects its files argument to be
2182 # relative to the repo root, so use match.files(), not pats.
2187 # relative to the repo root, so use match.files(), not pats.
2183 filematcher = _makefollowlogfilematcher(repo, match.files(),
2188 filematcher = _makefollowlogfilematcher(repo, match.files(),
2184 followfirst)
2189 followfirst)
2185 else:
2190 else:
2186 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2191 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2187 if filematcher is None:
2192 if filematcher is None:
2188 filematcher = lambda rev: match
2193 filematcher = lambda rev: match
2189
2194
2190 expr = []
2195 expr = []
2191 for op, val in sorted(opts.iteritems()):
2196 for op, val in sorted(opts.iteritems()):
2192 if not val:
2197 if not val:
2193 continue
2198 continue
2194 if op not in opt2revset:
2199 if op not in opt2revset:
2195 continue
2200 continue
2196 revop, andor = opt2revset[op]
2201 revop, andor = opt2revset[op]
2197 if '%(val)' not in revop:
2202 if '%(val)' not in revop:
2198 expr.append(revop)
2203 expr.append(revop)
2199 else:
2204 else:
2200 if not isinstance(val, list):
2205 if not isinstance(val, list):
2201 e = revop % {'val': val}
2206 e = revop % {'val': val}
2202 else:
2207 else:
2203 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2208 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2204 expr.append(e)
2209 expr.append(e)
2205
2210
2206 if expr:
2211 if expr:
2207 expr = '(' + ' and '.join(expr) + ')'
2212 expr = '(' + ' and '.join(expr) + ')'
2208 else:
2213 else:
2209 expr = None
2214 expr = None
2210 return expr, filematcher
2215 return expr, filematcher
2211
2216
2212 def _logrevs(repo, opts):
2217 def _logrevs(repo, opts):
2213 # Default --rev value depends on --follow but --follow behavior
2218 # Default --rev value depends on --follow but --follow behavior
2214 # depends on revisions resolved from --rev...
2219 # depends on revisions resolved from --rev...
2215 follow = opts.get('follow') or opts.get('follow_first')
2220 follow = opts.get('follow') or opts.get('follow_first')
2216 if opts.get('rev'):
2221 if opts.get('rev'):
2217 revs = scmutil.revrange(repo, opts['rev'])
2222 revs = scmutil.revrange(repo, opts['rev'])
2218 elif follow and repo.dirstate.p1() == nullid:
2223 elif follow and repo.dirstate.p1() == nullid:
2219 revs = smartset.baseset()
2224 revs = smartset.baseset()
2220 elif follow:
2225 elif follow:
2221 revs = repo.revs('reverse(:.)')
2226 revs = repo.revs('reverse(:.)')
2222 else:
2227 else:
2223 revs = smartset.spanset(repo)
2228 revs = smartset.spanset(repo)
2224 revs.reverse()
2229 revs.reverse()
2225 return revs
2230 return revs
2226
2231
2227 def getgraphlogrevs(repo, pats, opts):
2232 def getgraphlogrevs(repo, pats, opts):
2228 """Return (revs, expr, filematcher) where revs is an iterable of
2233 """Return (revs, expr, filematcher) where revs is an iterable of
2229 revision numbers, expr is a revset string built from log options
2234 revision numbers, expr is a revset string built from log options
2230 and file patterns or None, and used to filter 'revs'. If --stat or
2235 and file patterns or None, and used to filter 'revs'. If --stat or
2231 --patch are not passed filematcher is None. Otherwise it is a
2236 --patch are not passed filematcher is None. Otherwise it is a
2232 callable taking a revision number and returning a match objects
2237 callable taking a revision number and returning a match objects
2233 filtering the files to be detailed when displaying the revision.
2238 filtering the files to be detailed when displaying the revision.
2234 """
2239 """
2235 limit = loglimit(opts)
2240 limit = loglimit(opts)
2236 revs = _logrevs(repo, opts)
2241 revs = _logrevs(repo, opts)
2237 if not revs:
2242 if not revs:
2238 return smartset.baseset(), None, None
2243 return smartset.baseset(), None, None
2239 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2244 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2240 if opts.get('rev'):
2245 if opts.get('rev'):
2241 # User-specified revs might be unsorted, but don't sort before
2246 # User-specified revs might be unsorted, but don't sort before
2242 # _makelogrevset because it might depend on the order of revs
2247 # _makelogrevset because it might depend on the order of revs
2243 if not (revs.isdescending() or revs.istopo()):
2248 if not (revs.isdescending() or revs.istopo()):
2244 revs.sort(reverse=True)
2249 revs.sort(reverse=True)
2245 if expr:
2250 if expr:
2246 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2251 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2247 revs = matcher(repo, revs)
2252 revs = matcher(repo, revs)
2248 if limit is not None:
2253 if limit is not None:
2249 limitedrevs = []
2254 limitedrevs = []
2250 for idx, rev in enumerate(revs):
2255 for idx, rev in enumerate(revs):
2251 if idx >= limit:
2256 if idx >= limit:
2252 break
2257 break
2253 limitedrevs.append(rev)
2258 limitedrevs.append(rev)
2254 revs = smartset.baseset(limitedrevs)
2259 revs = smartset.baseset(limitedrevs)
2255
2260
2256 return revs, expr, filematcher
2261 return revs, expr, filematcher
2257
2262
2258 def getlogrevs(repo, pats, opts):
2263 def getlogrevs(repo, pats, opts):
2259 """Return (revs, expr, filematcher) where revs is an iterable of
2264 """Return (revs, expr, filematcher) where revs is an iterable of
2260 revision numbers, expr is a revset string built from log options
2265 revision numbers, expr is a revset string built from log options
2261 and file patterns or None, and used to filter 'revs'. If --stat or
2266 and file patterns or None, and used to filter 'revs'. If --stat or
2262 --patch are not passed filematcher is None. Otherwise it is a
2267 --patch are not passed filematcher is None. Otherwise it is a
2263 callable taking a revision number and returning a match objects
2268 callable taking a revision number and returning a match objects
2264 filtering the files to be detailed when displaying the revision.
2269 filtering the files to be detailed when displaying the revision.
2265 """
2270 """
2266 limit = loglimit(opts)
2271 limit = loglimit(opts)
2267 revs = _logrevs(repo, opts)
2272 revs = _logrevs(repo, opts)
2268 if not revs:
2273 if not revs:
2269 return smartset.baseset([]), None, None
2274 return smartset.baseset([]), None, None
2270 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2275 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2271 if expr:
2276 if expr:
2272 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2277 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2273 revs = matcher(repo, revs)
2278 revs = matcher(repo, revs)
2274 if limit is not None:
2279 if limit is not None:
2275 limitedrevs = []
2280 limitedrevs = []
2276 for idx, r in enumerate(revs):
2281 for idx, r in enumerate(revs):
2277 if limit <= idx:
2282 if limit <= idx:
2278 break
2283 break
2279 limitedrevs.append(r)
2284 limitedrevs.append(r)
2280 revs = smartset.baseset(limitedrevs)
2285 revs = smartset.baseset(limitedrevs)
2281
2286
2282 return revs, expr, filematcher
2287 return revs, expr, filematcher
2283
2288
2284 def _graphnodeformatter(ui, displayer):
2289 def _graphnodeformatter(ui, displayer):
2285 spec = ui.config('ui', 'graphnodetemplate')
2290 spec = ui.config('ui', 'graphnodetemplate')
2286 if not spec:
2291 if not spec:
2287 return templatekw.showgraphnode # fast path for "{graphnode}"
2292 return templatekw.showgraphnode # fast path for "{graphnode}"
2288
2293
2289 spec = templater.unquotestring(spec)
2294 spec = templater.unquotestring(spec)
2290 templ = formatter.maketemplater(ui, 'graphnode', spec)
2295 templ = formatter.maketemplater(ui, 'graphnode', spec)
2291 cache = {}
2296 cache = {}
2292 if isinstance(displayer, changeset_templater):
2297 if isinstance(displayer, changeset_templater):
2293 cache = displayer.cache # reuse cache of slow templates
2298 cache = displayer.cache # reuse cache of slow templates
2294 props = templatekw.keywords.copy()
2299 props = templatekw.keywords.copy()
2295 props['templ'] = templ
2300 props['templ'] = templ
2296 props['cache'] = cache
2301 props['cache'] = cache
2297 def formatnode(repo, ctx):
2302 def formatnode(repo, ctx):
2298 props['ctx'] = ctx
2303 props['ctx'] = ctx
2299 props['repo'] = repo
2304 props['repo'] = repo
2300 props['ui'] = repo.ui
2305 props['ui'] = repo.ui
2301 props['revcache'] = {}
2306 props['revcache'] = {}
2302 return templater.stringify(templ('graphnode', **props))
2307 return templater.stringify(templ('graphnode', **props))
2303 return formatnode
2308 return formatnode
2304
2309
2305 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2310 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2306 filematcher=None):
2311 filematcher=None):
2307 formatnode = _graphnodeformatter(ui, displayer)
2312 formatnode = _graphnodeformatter(ui, displayer)
2308 state = graphmod.asciistate()
2313 state = graphmod.asciistate()
2309 styles = state['styles']
2314 styles = state['styles']
2310
2315
2311 # only set graph styling if HGPLAIN is not set.
2316 # only set graph styling if HGPLAIN is not set.
2312 if ui.plain('graph'):
2317 if ui.plain('graph'):
2313 # set all edge styles to |, the default pre-3.8 behaviour
2318 # set all edge styles to |, the default pre-3.8 behaviour
2314 styles.update(dict.fromkeys(styles, '|'))
2319 styles.update(dict.fromkeys(styles, '|'))
2315 else:
2320 else:
2316 edgetypes = {
2321 edgetypes = {
2317 'parent': graphmod.PARENT,
2322 'parent': graphmod.PARENT,
2318 'grandparent': graphmod.GRANDPARENT,
2323 'grandparent': graphmod.GRANDPARENT,
2319 'missing': graphmod.MISSINGPARENT
2324 'missing': graphmod.MISSINGPARENT
2320 }
2325 }
2321 for name, key in edgetypes.items():
2326 for name, key in edgetypes.items():
2322 # experimental config: experimental.graphstyle.*
2327 # experimental config: experimental.graphstyle.*
2323 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2328 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2324 styles[key])
2329 styles[key])
2325 if not styles[key]:
2330 if not styles[key]:
2326 styles[key] = None
2331 styles[key] = None
2327
2332
2328 # experimental config: experimental.graphshorten
2333 # experimental config: experimental.graphshorten
2329 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2334 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2330
2335
2331 for rev, type, ctx, parents in dag:
2336 for rev, type, ctx, parents in dag:
2332 char = formatnode(repo, ctx)
2337 char = formatnode(repo, ctx)
2333 copies = None
2338 copies = None
2334 if getrenamed and ctx.rev():
2339 if getrenamed and ctx.rev():
2335 copies = []
2340 copies = []
2336 for fn in ctx.files():
2341 for fn in ctx.files():
2337 rename = getrenamed(fn, ctx.rev())
2342 rename = getrenamed(fn, ctx.rev())
2338 if rename:
2343 if rename:
2339 copies.append((fn, rename[0]))
2344 copies.append((fn, rename[0]))
2340 revmatchfn = None
2345 revmatchfn = None
2341 if filematcher is not None:
2346 if filematcher is not None:
2342 revmatchfn = filematcher(ctx.rev())
2347 revmatchfn = filematcher(ctx.rev())
2343 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2348 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2344 lines = displayer.hunk.pop(rev).split('\n')
2349 lines = displayer.hunk.pop(rev).split('\n')
2345 if not lines[-1]:
2350 if not lines[-1]:
2346 del lines[-1]
2351 del lines[-1]
2347 displayer.flush(ctx)
2352 displayer.flush(ctx)
2348 edges = edgefn(type, char, lines, state, rev, parents)
2353 edges = edgefn(type, char, lines, state, rev, parents)
2349 for type, char, lines, coldata in edges:
2354 for type, char, lines, coldata in edges:
2350 graphmod.ascii(ui, state, type, char, lines, coldata)
2355 graphmod.ascii(ui, state, type, char, lines, coldata)
2351 displayer.close()
2356 displayer.close()
2352
2357
2353 def graphlog(ui, repo, pats, opts):
2358 def graphlog(ui, repo, pats, opts):
2354 # Parameters are identical to log command ones
2359 # Parameters are identical to log command ones
2355 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2360 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2356 revdag = graphmod.dagwalker(repo, revs)
2361 revdag = graphmod.dagwalker(repo, revs)
2357
2362
2358 getrenamed = None
2363 getrenamed = None
2359 if opts.get('copies'):
2364 if opts.get('copies'):
2360 endrev = None
2365 endrev = None
2361 if opts.get('rev'):
2366 if opts.get('rev'):
2362 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2367 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2363 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2368 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2364
2369
2365 ui.pager('log')
2370 ui.pager('log')
2366 displayer = show_changeset(ui, repo, opts, buffered=True)
2371 displayer = show_changeset(ui, repo, opts, buffered=True)
2367 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2372 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2368 filematcher)
2373 filematcher)
2369
2374
2370 def checkunsupportedgraphflags(pats, opts):
2375 def checkunsupportedgraphflags(pats, opts):
2371 for op in ["newest_first"]:
2376 for op in ["newest_first"]:
2372 if op in opts and opts[op]:
2377 if op in opts and opts[op]:
2373 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2378 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2374 % op.replace("_", "-"))
2379 % op.replace("_", "-"))
2375
2380
2376 def graphrevs(repo, nodes, opts):
2381 def graphrevs(repo, nodes, opts):
2377 limit = loglimit(opts)
2382 limit = loglimit(opts)
2378 nodes.reverse()
2383 nodes.reverse()
2379 if limit is not None:
2384 if limit is not None:
2380 nodes = nodes[:limit]
2385 nodes = nodes[:limit]
2381 return graphmod.nodes(repo, nodes)
2386 return graphmod.nodes(repo, nodes)
2382
2387
2383 def add(ui, repo, match, prefix, explicitonly, **opts):
2388 def add(ui, repo, match, prefix, explicitonly, **opts):
2384 join = lambda f: os.path.join(prefix, f)
2389 join = lambda f: os.path.join(prefix, f)
2385 bad = []
2390 bad = []
2386
2391
2387 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2392 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2388 names = []
2393 names = []
2389 wctx = repo[None]
2394 wctx = repo[None]
2390 cca = None
2395 cca = None
2391 abort, warn = scmutil.checkportabilityalert(ui)
2396 abort, warn = scmutil.checkportabilityalert(ui)
2392 if abort or warn:
2397 if abort or warn:
2393 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2398 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2394
2399
2395 badmatch = matchmod.badmatch(match, badfn)
2400 badmatch = matchmod.badmatch(match, badfn)
2396 dirstate = repo.dirstate
2401 dirstate = repo.dirstate
2397 # We don't want to just call wctx.walk here, since it would return a lot of
2402 # We don't want to just call wctx.walk here, since it would return a lot of
2398 # clean files, which we aren't interested in and takes time.
2403 # clean files, which we aren't interested in and takes time.
2399 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2404 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2400 True, False, full=False)):
2405 True, False, full=False)):
2401 exact = match.exact(f)
2406 exact = match.exact(f)
2402 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2407 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2403 if cca:
2408 if cca:
2404 cca(f)
2409 cca(f)
2405 names.append(f)
2410 names.append(f)
2406 if ui.verbose or not exact:
2411 if ui.verbose or not exact:
2407 ui.status(_('adding %s\n') % match.rel(f))
2412 ui.status(_('adding %s\n') % match.rel(f))
2408
2413
2409 for subpath in sorted(wctx.substate):
2414 for subpath in sorted(wctx.substate):
2410 sub = wctx.sub(subpath)
2415 sub = wctx.sub(subpath)
2411 try:
2416 try:
2412 submatch = matchmod.subdirmatcher(subpath, match)
2417 submatch = matchmod.subdirmatcher(subpath, match)
2413 if opts.get(r'subrepos'):
2418 if opts.get(r'subrepos'):
2414 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2419 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2415 else:
2420 else:
2416 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2421 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2417 except error.LookupError:
2422 except error.LookupError:
2418 ui.status(_("skipping missing subrepository: %s\n")
2423 ui.status(_("skipping missing subrepository: %s\n")
2419 % join(subpath))
2424 % join(subpath))
2420
2425
2421 if not opts.get(r'dry_run'):
2426 if not opts.get(r'dry_run'):
2422 rejected = wctx.add(names, prefix)
2427 rejected = wctx.add(names, prefix)
2423 bad.extend(f for f in rejected if f in match.files())
2428 bad.extend(f for f in rejected if f in match.files())
2424 return bad
2429 return bad
2425
2430
2426 def addwebdirpath(repo, serverpath, webconf):
2431 def addwebdirpath(repo, serverpath, webconf):
2427 webconf[serverpath] = repo.root
2432 webconf[serverpath] = repo.root
2428 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2433 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2429
2434
2430 for r in repo.revs('filelog("path:.hgsub")'):
2435 for r in repo.revs('filelog("path:.hgsub")'):
2431 ctx = repo[r]
2436 ctx = repo[r]
2432 for subpath in ctx.substate:
2437 for subpath in ctx.substate:
2433 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2438 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2434
2439
2435 def forget(ui, repo, match, prefix, explicitonly):
2440 def forget(ui, repo, match, prefix, explicitonly):
2436 join = lambda f: os.path.join(prefix, f)
2441 join = lambda f: os.path.join(prefix, f)
2437 bad = []
2442 bad = []
2438 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2443 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2439 wctx = repo[None]
2444 wctx = repo[None]
2440 forgot = []
2445 forgot = []
2441
2446
2442 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2447 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2443 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2448 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2444 if explicitonly:
2449 if explicitonly:
2445 forget = [f for f in forget if match.exact(f)]
2450 forget = [f for f in forget if match.exact(f)]
2446
2451
2447 for subpath in sorted(wctx.substate):
2452 for subpath in sorted(wctx.substate):
2448 sub = wctx.sub(subpath)
2453 sub = wctx.sub(subpath)
2449 try:
2454 try:
2450 submatch = matchmod.subdirmatcher(subpath, match)
2455 submatch = matchmod.subdirmatcher(subpath, match)
2451 subbad, subforgot = sub.forget(submatch, prefix)
2456 subbad, subforgot = sub.forget(submatch, prefix)
2452 bad.extend([subpath + '/' + f for f in subbad])
2457 bad.extend([subpath + '/' + f for f in subbad])
2453 forgot.extend([subpath + '/' + f for f in subforgot])
2458 forgot.extend([subpath + '/' + f for f in subforgot])
2454 except error.LookupError:
2459 except error.LookupError:
2455 ui.status(_("skipping missing subrepository: %s\n")
2460 ui.status(_("skipping missing subrepository: %s\n")
2456 % join(subpath))
2461 % join(subpath))
2457
2462
2458 if not explicitonly:
2463 if not explicitonly:
2459 for f in match.files():
2464 for f in match.files():
2460 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2465 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2461 if f not in forgot:
2466 if f not in forgot:
2462 if repo.wvfs.exists(f):
2467 if repo.wvfs.exists(f):
2463 # Don't complain if the exact case match wasn't given.
2468 # Don't complain if the exact case match wasn't given.
2464 # But don't do this until after checking 'forgot', so
2469 # But don't do this until after checking 'forgot', so
2465 # that subrepo files aren't normalized, and this op is
2470 # that subrepo files aren't normalized, and this op is
2466 # purely from data cached by the status walk above.
2471 # purely from data cached by the status walk above.
2467 if repo.dirstate.normalize(f) in repo.dirstate:
2472 if repo.dirstate.normalize(f) in repo.dirstate:
2468 continue
2473 continue
2469 ui.warn(_('not removing %s: '
2474 ui.warn(_('not removing %s: '
2470 'file is already untracked\n')
2475 'file is already untracked\n')
2471 % match.rel(f))
2476 % match.rel(f))
2472 bad.append(f)
2477 bad.append(f)
2473
2478
2474 for f in forget:
2479 for f in forget:
2475 if ui.verbose or not match.exact(f):
2480 if ui.verbose or not match.exact(f):
2476 ui.status(_('removing %s\n') % match.rel(f))
2481 ui.status(_('removing %s\n') % match.rel(f))
2477
2482
2478 rejected = wctx.forget(forget, prefix)
2483 rejected = wctx.forget(forget, prefix)
2479 bad.extend(f for f in rejected if f in match.files())
2484 bad.extend(f for f in rejected if f in match.files())
2480 forgot.extend(f for f in forget if f not in rejected)
2485 forgot.extend(f for f in forget if f not in rejected)
2481 return bad, forgot
2486 return bad, forgot
2482
2487
2483 def files(ui, ctx, m, fm, fmt, subrepos):
2488 def files(ui, ctx, m, fm, fmt, subrepos):
2484 rev = ctx.rev()
2489 rev = ctx.rev()
2485 ret = 1
2490 ret = 1
2486 ds = ctx.repo().dirstate
2491 ds = ctx.repo().dirstate
2487
2492
2488 for f in ctx.matches(m):
2493 for f in ctx.matches(m):
2489 if rev is None and ds[f] == 'r':
2494 if rev is None and ds[f] == 'r':
2490 continue
2495 continue
2491 fm.startitem()
2496 fm.startitem()
2492 if ui.verbose:
2497 if ui.verbose:
2493 fc = ctx[f]
2498 fc = ctx[f]
2494 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2499 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2495 fm.data(abspath=f)
2500 fm.data(abspath=f)
2496 fm.write('path', fmt, m.rel(f))
2501 fm.write('path', fmt, m.rel(f))
2497 ret = 0
2502 ret = 0
2498
2503
2499 for subpath in sorted(ctx.substate):
2504 for subpath in sorted(ctx.substate):
2500 submatch = matchmod.subdirmatcher(subpath, m)
2505 submatch = matchmod.subdirmatcher(subpath, m)
2501 if (subrepos or m.exact(subpath) or any(submatch.files())):
2506 if (subrepos or m.exact(subpath) or any(submatch.files())):
2502 sub = ctx.sub(subpath)
2507 sub = ctx.sub(subpath)
2503 try:
2508 try:
2504 recurse = m.exact(subpath) or subrepos
2509 recurse = m.exact(subpath) or subrepos
2505 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2510 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2506 ret = 0
2511 ret = 0
2507 except error.LookupError:
2512 except error.LookupError:
2508 ui.status(_("skipping missing subrepository: %s\n")
2513 ui.status(_("skipping missing subrepository: %s\n")
2509 % m.abs(subpath))
2514 % m.abs(subpath))
2510
2515
2511 return ret
2516 return ret
2512
2517
2513 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2518 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2514 join = lambda f: os.path.join(prefix, f)
2519 join = lambda f: os.path.join(prefix, f)
2515 ret = 0
2520 ret = 0
2516 s = repo.status(match=m, clean=True)
2521 s = repo.status(match=m, clean=True)
2517 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2522 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2518
2523
2519 wctx = repo[None]
2524 wctx = repo[None]
2520
2525
2521 if warnings is None:
2526 if warnings is None:
2522 warnings = []
2527 warnings = []
2523 warn = True
2528 warn = True
2524 else:
2529 else:
2525 warn = False
2530 warn = False
2526
2531
2527 subs = sorted(wctx.substate)
2532 subs = sorted(wctx.substate)
2528 total = len(subs)
2533 total = len(subs)
2529 count = 0
2534 count = 0
2530 for subpath in subs:
2535 for subpath in subs:
2531 count += 1
2536 count += 1
2532 submatch = matchmod.subdirmatcher(subpath, m)
2537 submatch = matchmod.subdirmatcher(subpath, m)
2533 if subrepos or m.exact(subpath) or any(submatch.files()):
2538 if subrepos or m.exact(subpath) or any(submatch.files()):
2534 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2539 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2535 sub = wctx.sub(subpath)
2540 sub = wctx.sub(subpath)
2536 try:
2541 try:
2537 if sub.removefiles(submatch, prefix, after, force, subrepos,
2542 if sub.removefiles(submatch, prefix, after, force, subrepos,
2538 warnings):
2543 warnings):
2539 ret = 1
2544 ret = 1
2540 except error.LookupError:
2545 except error.LookupError:
2541 warnings.append(_("skipping missing subrepository: %s\n")
2546 warnings.append(_("skipping missing subrepository: %s\n")
2542 % join(subpath))
2547 % join(subpath))
2543 ui.progress(_('searching'), None)
2548 ui.progress(_('searching'), None)
2544
2549
2545 # warn about failure to delete explicit files/dirs
2550 # warn about failure to delete explicit files/dirs
2546 deleteddirs = util.dirs(deleted)
2551 deleteddirs = util.dirs(deleted)
2547 files = m.files()
2552 files = m.files()
2548 total = len(files)
2553 total = len(files)
2549 count = 0
2554 count = 0
2550 for f in files:
2555 for f in files:
2551 def insubrepo():
2556 def insubrepo():
2552 for subpath in wctx.substate:
2557 for subpath in wctx.substate:
2553 if f.startswith(subpath + '/'):
2558 if f.startswith(subpath + '/'):
2554 return True
2559 return True
2555 return False
2560 return False
2556
2561
2557 count += 1
2562 count += 1
2558 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2563 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2559 isdir = f in deleteddirs or wctx.hasdir(f)
2564 isdir = f in deleteddirs or wctx.hasdir(f)
2560 if (f in repo.dirstate or isdir or f == '.'
2565 if (f in repo.dirstate or isdir or f == '.'
2561 or insubrepo() or f in subs):
2566 or insubrepo() or f in subs):
2562 continue
2567 continue
2563
2568
2564 if repo.wvfs.exists(f):
2569 if repo.wvfs.exists(f):
2565 if repo.wvfs.isdir(f):
2570 if repo.wvfs.isdir(f):
2566 warnings.append(_('not removing %s: no tracked files\n')
2571 warnings.append(_('not removing %s: no tracked files\n')
2567 % m.rel(f))
2572 % m.rel(f))
2568 else:
2573 else:
2569 warnings.append(_('not removing %s: file is untracked\n')
2574 warnings.append(_('not removing %s: file is untracked\n')
2570 % m.rel(f))
2575 % m.rel(f))
2571 # missing files will generate a warning elsewhere
2576 # missing files will generate a warning elsewhere
2572 ret = 1
2577 ret = 1
2573 ui.progress(_('deleting'), None)
2578 ui.progress(_('deleting'), None)
2574
2579
2575 if force:
2580 if force:
2576 list = modified + deleted + clean + added
2581 list = modified + deleted + clean + added
2577 elif after:
2582 elif after:
2578 list = deleted
2583 list = deleted
2579 remaining = modified + added + clean
2584 remaining = modified + added + clean
2580 total = len(remaining)
2585 total = len(remaining)
2581 count = 0
2586 count = 0
2582 for f in remaining:
2587 for f in remaining:
2583 count += 1
2588 count += 1
2584 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2589 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2585 warnings.append(_('not removing %s: file still exists\n')
2590 warnings.append(_('not removing %s: file still exists\n')
2586 % m.rel(f))
2591 % m.rel(f))
2587 ret = 1
2592 ret = 1
2588 ui.progress(_('skipping'), None)
2593 ui.progress(_('skipping'), None)
2589 else:
2594 else:
2590 list = deleted + clean
2595 list = deleted + clean
2591 total = len(modified) + len(added)
2596 total = len(modified) + len(added)
2592 count = 0
2597 count = 0
2593 for f in modified:
2598 for f in modified:
2594 count += 1
2599 count += 1
2595 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2600 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2596 warnings.append(_('not removing %s: file is modified (use -f'
2601 warnings.append(_('not removing %s: file is modified (use -f'
2597 ' to force removal)\n') % m.rel(f))
2602 ' to force removal)\n') % m.rel(f))
2598 ret = 1
2603 ret = 1
2599 for f in added:
2604 for f in added:
2600 count += 1
2605 count += 1
2601 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2606 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2602 warnings.append(_("not removing %s: file has been marked for add"
2607 warnings.append(_("not removing %s: file has been marked for add"
2603 " (use 'hg forget' to undo add)\n") % m.rel(f))
2608 " (use 'hg forget' to undo add)\n") % m.rel(f))
2604 ret = 1
2609 ret = 1
2605 ui.progress(_('skipping'), None)
2610 ui.progress(_('skipping'), None)
2606
2611
2607 list = sorted(list)
2612 list = sorted(list)
2608 total = len(list)
2613 total = len(list)
2609 count = 0
2614 count = 0
2610 for f in list:
2615 for f in list:
2611 count += 1
2616 count += 1
2612 if ui.verbose or not m.exact(f):
2617 if ui.verbose or not m.exact(f):
2613 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2618 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2614 ui.status(_('removing %s\n') % m.rel(f))
2619 ui.status(_('removing %s\n') % m.rel(f))
2615 ui.progress(_('deleting'), None)
2620 ui.progress(_('deleting'), None)
2616
2621
2617 with repo.wlock():
2622 with repo.wlock():
2618 if not after:
2623 if not after:
2619 for f in list:
2624 for f in list:
2620 if f in added:
2625 if f in added:
2621 continue # we never unlink added files on remove
2626 continue # we never unlink added files on remove
2622 repo.wvfs.unlinkpath(f, ignoremissing=True)
2627 repo.wvfs.unlinkpath(f, ignoremissing=True)
2623 repo[None].forget(list)
2628 repo[None].forget(list)
2624
2629
2625 if warn:
2630 if warn:
2626 for warning in warnings:
2631 for warning in warnings:
2627 ui.warn(warning)
2632 ui.warn(warning)
2628
2633
2629 return ret
2634 return ret
2630
2635
2631 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2636 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2632 err = 1
2637 err = 1
2633
2638
2634 def write(path):
2639 def write(path):
2635 filename = None
2640 filename = None
2636 if fntemplate:
2641 if fntemplate:
2637 filename = makefilename(repo, fntemplate, ctx.node(),
2642 filename = makefilename(repo, fntemplate, ctx.node(),
2638 pathname=os.path.join(prefix, path))
2643 pathname=os.path.join(prefix, path))
2639 with formatter.maybereopen(basefm, filename, opts) as fm:
2644 with formatter.maybereopen(basefm, filename, opts) as fm:
2640 data = ctx[path].data()
2645 data = ctx[path].data()
2641 if opts.get('decode'):
2646 if opts.get('decode'):
2642 data = repo.wwritedata(path, data)
2647 data = repo.wwritedata(path, data)
2643 fm.startitem()
2648 fm.startitem()
2644 fm.write('data', '%s', data)
2649 fm.write('data', '%s', data)
2645 fm.data(abspath=path, path=matcher.rel(path))
2650 fm.data(abspath=path, path=matcher.rel(path))
2646
2651
2647 # Automation often uses hg cat on single files, so special case it
2652 # Automation often uses hg cat on single files, so special case it
2648 # for performance to avoid the cost of parsing the manifest.
2653 # for performance to avoid the cost of parsing the manifest.
2649 if len(matcher.files()) == 1 and not matcher.anypats():
2654 if len(matcher.files()) == 1 and not matcher.anypats():
2650 file = matcher.files()[0]
2655 file = matcher.files()[0]
2651 mfl = repo.manifestlog
2656 mfl = repo.manifestlog
2652 mfnode = ctx.manifestnode()
2657 mfnode = ctx.manifestnode()
2653 try:
2658 try:
2654 if mfnode and mfl[mfnode].find(file)[0]:
2659 if mfnode and mfl[mfnode].find(file)[0]:
2655 write(file)
2660 write(file)
2656 return 0
2661 return 0
2657 except KeyError:
2662 except KeyError:
2658 pass
2663 pass
2659
2664
2660 for abs in ctx.walk(matcher):
2665 for abs in ctx.walk(matcher):
2661 write(abs)
2666 write(abs)
2662 err = 0
2667 err = 0
2663
2668
2664 for subpath in sorted(ctx.substate):
2669 for subpath in sorted(ctx.substate):
2665 sub = ctx.sub(subpath)
2670 sub = ctx.sub(subpath)
2666 try:
2671 try:
2667 submatch = matchmod.subdirmatcher(subpath, matcher)
2672 submatch = matchmod.subdirmatcher(subpath, matcher)
2668
2673
2669 if not sub.cat(submatch, basefm, fntemplate,
2674 if not sub.cat(submatch, basefm, fntemplate,
2670 os.path.join(prefix, sub._path), **opts):
2675 os.path.join(prefix, sub._path), **opts):
2671 err = 0
2676 err = 0
2672 except error.RepoLookupError:
2677 except error.RepoLookupError:
2673 ui.status(_("skipping missing subrepository: %s\n")
2678 ui.status(_("skipping missing subrepository: %s\n")
2674 % os.path.join(prefix, subpath))
2679 % os.path.join(prefix, subpath))
2675
2680
2676 return err
2681 return err
2677
2682
2678 def commit(ui, repo, commitfunc, pats, opts):
2683 def commit(ui, repo, commitfunc, pats, opts):
2679 '''commit the specified files or all outstanding changes'''
2684 '''commit the specified files or all outstanding changes'''
2680 date = opts.get('date')
2685 date = opts.get('date')
2681 if date:
2686 if date:
2682 opts['date'] = util.parsedate(date)
2687 opts['date'] = util.parsedate(date)
2683 message = logmessage(ui, opts)
2688 message = logmessage(ui, opts)
2684 matcher = scmutil.match(repo[None], pats, opts)
2689 matcher = scmutil.match(repo[None], pats, opts)
2685
2690
2686 # extract addremove carefully -- this function can be called from a command
2691 # extract addremove carefully -- this function can be called from a command
2687 # that doesn't support addremove
2692 # that doesn't support addremove
2688 if opts.get('addremove'):
2693 if opts.get('addremove'):
2689 if scmutil.addremove(repo, matcher, "", opts) != 0:
2694 if scmutil.addremove(repo, matcher, "", opts) != 0:
2690 raise error.Abort(
2695 raise error.Abort(
2691 _("failed to mark all new/missing files as added/removed"))
2696 _("failed to mark all new/missing files as added/removed"))
2692
2697
2693 return commitfunc(ui, repo, message, matcher, opts)
2698 return commitfunc(ui, repo, message, matcher, opts)
2694
2699
2695 def samefile(f, ctx1, ctx2):
2700 def samefile(f, ctx1, ctx2):
2696 if f in ctx1.manifest():
2701 if f in ctx1.manifest():
2697 a = ctx1.filectx(f)
2702 a = ctx1.filectx(f)
2698 if f in ctx2.manifest():
2703 if f in ctx2.manifest():
2699 b = ctx2.filectx(f)
2704 b = ctx2.filectx(f)
2700 return (not a.cmp(b)
2705 return (not a.cmp(b)
2701 and a.flags() == b.flags())
2706 and a.flags() == b.flags())
2702 else:
2707 else:
2703 return False
2708 return False
2704 else:
2709 else:
2705 return f not in ctx2.manifest()
2710 return f not in ctx2.manifest()
2706
2711
2707 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2712 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2708 # avoid cycle context -> subrepo -> cmdutil
2713 # avoid cycle context -> subrepo -> cmdutil
2709 from . import context
2714 from . import context
2710
2715
2711 # amend will reuse the existing user if not specified, but the obsolete
2716 # amend will reuse the existing user if not specified, but the obsolete
2712 # marker creation requires that the current user's name is specified.
2717 # marker creation requires that the current user's name is specified.
2713 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2718 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2714 ui.username() # raise exception if username not set
2719 ui.username() # raise exception if username not set
2715
2720
2716 ui.note(_('amending changeset %s\n') % old)
2721 ui.note(_('amending changeset %s\n') % old)
2717 base = old.p1()
2722 base = old.p1()
2718 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2723 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2719
2724
2720 wlock = lock = newid = None
2725 wlock = lock = newid = None
2721 try:
2726 try:
2722 wlock = repo.wlock()
2727 wlock = repo.wlock()
2723 lock = repo.lock()
2728 lock = repo.lock()
2724 with repo.transaction('amend') as tr:
2729 with repo.transaction('amend') as tr:
2725 # See if we got a message from -m or -l, if not, open the editor
2730 # See if we got a message from -m or -l, if not, open the editor
2726 # with the message of the changeset to amend
2731 # with the message of the changeset to amend
2727 message = logmessage(ui, opts)
2732 message = logmessage(ui, opts)
2728 # ensure logfile does not conflict with later enforcement of the
2733 # ensure logfile does not conflict with later enforcement of the
2729 # message. potential logfile content has been processed by
2734 # message. potential logfile content has been processed by
2730 # `logmessage` anyway.
2735 # `logmessage` anyway.
2731 opts.pop('logfile')
2736 opts.pop('logfile')
2732 # First, do a regular commit to record all changes in the working
2737 # First, do a regular commit to record all changes in the working
2733 # directory (if there are any)
2738 # directory (if there are any)
2734 ui.callhooks = False
2739 ui.callhooks = False
2735 activebookmark = repo._bookmarks.active
2740 activebookmark = repo._bookmarks.active
2736 try:
2741 try:
2737 repo._bookmarks.active = None
2742 repo._bookmarks.active = None
2738 opts['message'] = 'temporary amend commit for %s' % old
2743 opts['message'] = 'temporary amend commit for %s' % old
2739 node = commit(ui, repo, commitfunc, pats, opts)
2744 node = commit(ui, repo, commitfunc, pats, opts)
2740 finally:
2745 finally:
2741 repo._bookmarks.active = activebookmark
2746 repo._bookmarks.active = activebookmark
2742 repo._bookmarks.recordchange(tr)
2747 repo._bookmarks.recordchange(tr)
2743 ui.callhooks = True
2748 ui.callhooks = True
2744 ctx = repo[node]
2749 ctx = repo[node]
2745
2750
2746 # Participating changesets:
2751 # Participating changesets:
2747 #
2752 #
2748 # node/ctx o - new (intermediate) commit that contains changes
2753 # node/ctx o - new (intermediate) commit that contains changes
2749 # | from working dir to go into amending commit
2754 # | from working dir to go into amending commit
2750 # | (or a workingctx if there were no changes)
2755 # | (or a workingctx if there were no changes)
2751 # |
2756 # |
2752 # old o - changeset to amend
2757 # old o - changeset to amend
2753 # |
2758 # |
2754 # base o - parent of amending changeset
2759 # base o - parent of amending changeset
2755
2760
2756 # Update extra dict from amended commit (e.g. to preserve graft
2761 # Update extra dict from amended commit (e.g. to preserve graft
2757 # source)
2762 # source)
2758 extra.update(old.extra())
2763 extra.update(old.extra())
2759
2764
2760 # Also update it from the intermediate commit or from the wctx
2765 # Also update it from the intermediate commit or from the wctx
2761 extra.update(ctx.extra())
2766 extra.update(ctx.extra())
2762
2767
2763 if len(old.parents()) > 1:
2768 if len(old.parents()) > 1:
2764 # ctx.files() isn't reliable for merges, so fall back to the
2769 # ctx.files() isn't reliable for merges, so fall back to the
2765 # slower repo.status() method
2770 # slower repo.status() method
2766 files = set([fn for st in repo.status(base, old)[:3]
2771 files = set([fn for st in repo.status(base, old)[:3]
2767 for fn in st])
2772 for fn in st])
2768 else:
2773 else:
2769 files = set(old.files())
2774 files = set(old.files())
2770
2775
2771 # Second, we use either the commit we just did, or if there were no
2776 # Second, we use either the commit we just did, or if there were no
2772 # changes the parent of the working directory as the version of the
2777 # changes the parent of the working directory as the version of the
2773 # files in the final amend commit
2778 # files in the final amend commit
2774 if node:
2779 if node:
2775 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2780 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2776
2781
2777 user = ctx.user()
2782 user = ctx.user()
2778 date = ctx.date()
2783 date = ctx.date()
2779 # Recompute copies (avoid recording a -> b -> a)
2784 # Recompute copies (avoid recording a -> b -> a)
2780 copied = copies.pathcopies(base, ctx)
2785 copied = copies.pathcopies(base, ctx)
2781 if old.p2:
2786 if old.p2:
2782 copied.update(copies.pathcopies(old.p2(), ctx))
2787 copied.update(copies.pathcopies(old.p2(), ctx))
2783
2788
2784 # Prune files which were reverted by the updates: if old
2789 # Prune files which were reverted by the updates: if old
2785 # introduced file X and our intermediate commit, node,
2790 # introduced file X and our intermediate commit, node,
2786 # renamed that file, then those two files are the same and
2791 # renamed that file, then those two files are the same and
2787 # we can discard X from our list of files. Likewise if X
2792 # we can discard X from our list of files. Likewise if X
2788 # was deleted, it's no longer relevant
2793 # was deleted, it's no longer relevant
2789 files.update(ctx.files())
2794 files.update(ctx.files())
2790 files = [f for f in files if not samefile(f, ctx, base)]
2795 files = [f for f in files if not samefile(f, ctx, base)]
2791
2796
2792 def filectxfn(repo, ctx_, path):
2797 def filectxfn(repo, ctx_, path):
2793 try:
2798 try:
2794 fctx = ctx[path]
2799 fctx = ctx[path]
2795 flags = fctx.flags()
2800 flags = fctx.flags()
2796 mctx = context.memfilectx(repo,
2801 mctx = context.memfilectx(repo,
2797 fctx.path(), fctx.data(),
2802 fctx.path(), fctx.data(),
2798 islink='l' in flags,
2803 islink='l' in flags,
2799 isexec='x' in flags,
2804 isexec='x' in flags,
2800 copied=copied.get(path))
2805 copied=copied.get(path))
2801 return mctx
2806 return mctx
2802 except KeyError:
2807 except KeyError:
2803 return None
2808 return None
2804 else:
2809 else:
2805 ui.note(_('copying changeset %s to %s\n') % (old, base))
2810 ui.note(_('copying changeset %s to %s\n') % (old, base))
2806
2811
2807 # Use version of files as in the old cset
2812 # Use version of files as in the old cset
2808 def filectxfn(repo, ctx_, path):
2813 def filectxfn(repo, ctx_, path):
2809 try:
2814 try:
2810 return old.filectx(path)
2815 return old.filectx(path)
2811 except KeyError:
2816 except KeyError:
2812 return None
2817 return None
2813
2818
2814 user = opts.get('user') or old.user()
2819 user = opts.get('user') or old.user()
2815 date = opts.get('date') or old.date()
2820 date = opts.get('date') or old.date()
2816 editform = mergeeditform(old, 'commit.amend')
2821 editform = mergeeditform(old, 'commit.amend')
2817 editor = getcommiteditor(editform=editform, **opts)
2822 editor = getcommiteditor(editform=editform, **opts)
2818 if not message:
2823 if not message:
2819 editor = getcommiteditor(edit=True, editform=editform)
2824 editor = getcommiteditor(edit=True, editform=editform)
2820 message = old.description()
2825 message = old.description()
2821
2826
2822 pureextra = extra.copy()
2827 pureextra = extra.copy()
2823 extra['amend_source'] = old.hex()
2828 extra['amend_source'] = old.hex()
2824
2829
2825 new = context.memctx(repo,
2830 new = context.memctx(repo,
2826 parents=[base.node(), old.p2().node()],
2831 parents=[base.node(), old.p2().node()],
2827 text=message,
2832 text=message,
2828 files=files,
2833 files=files,
2829 filectxfn=filectxfn,
2834 filectxfn=filectxfn,
2830 user=user,
2835 user=user,
2831 date=date,
2836 date=date,
2832 extra=extra,
2837 extra=extra,
2833 editor=editor)
2838 editor=editor)
2834
2839
2835 newdesc = changelog.stripdesc(new.description())
2840 newdesc = changelog.stripdesc(new.description())
2836 if ((not node)
2841 if ((not node)
2837 and newdesc == old.description()
2842 and newdesc == old.description()
2838 and user == old.user()
2843 and user == old.user()
2839 and date == old.date()
2844 and date == old.date()
2840 and pureextra == old.extra()):
2845 and pureextra == old.extra()):
2841 # nothing changed. continuing here would create a new node
2846 # nothing changed. continuing here would create a new node
2842 # anyway because of the amend_source noise.
2847 # anyway because of the amend_source noise.
2843 #
2848 #
2844 # This not what we expect from amend.
2849 # This not what we expect from amend.
2845 return old.node()
2850 return old.node()
2846
2851
2847 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2852 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2848 try:
2853 try:
2849 if opts.get('secret'):
2854 if opts.get('secret'):
2850 commitphase = 'secret'
2855 commitphase = 'secret'
2851 else:
2856 else:
2852 commitphase = old.phase()
2857 commitphase = old.phase()
2853 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2858 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2854 newid = repo.commitctx(new)
2859 newid = repo.commitctx(new)
2855 finally:
2860 finally:
2856 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2861 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2857 if newid != old.node():
2862 if newid != old.node():
2858 # Reroute the working copy parent to the new changeset
2863 # Reroute the working copy parent to the new changeset
2859 repo.setparents(newid, nullid)
2864 repo.setparents(newid, nullid)
2860
2865
2861 # Move bookmarks from old parent to amend commit
2866 # Move bookmarks from old parent to amend commit
2862 bms = repo.nodebookmarks(old.node())
2867 bms = repo.nodebookmarks(old.node())
2863 if bms:
2868 if bms:
2864 marks = repo._bookmarks
2869 marks = repo._bookmarks
2865 for bm in bms:
2870 for bm in bms:
2866 ui.debug('moving bookmarks %r from %s to %s\n' %
2871 ui.debug('moving bookmarks %r from %s to %s\n' %
2867 (marks, old.hex(), hex(newid)))
2872 (marks, old.hex(), hex(newid)))
2868 marks[bm] = newid
2873 marks[bm] = newid
2869 marks.recordchange(tr)
2874 marks.recordchange(tr)
2870 #commit the whole amend process
2875 #commit the whole amend process
2871 if createmarkers:
2876 if createmarkers:
2872 # mark the new changeset as successor of the rewritten one
2877 # mark the new changeset as successor of the rewritten one
2873 new = repo[newid]
2878 new = repo[newid]
2874 obs = [(old, (new,))]
2879 obs = [(old, (new,))]
2875 if node:
2880 if node:
2876 obs.append((ctx, ()))
2881 obs.append((ctx, ()))
2877
2882
2878 obsolete.createmarkers(repo, obs, operation='amend')
2883 obsolete.createmarkers(repo, obs, operation='amend')
2879 if not createmarkers and newid != old.node():
2884 if not createmarkers and newid != old.node():
2880 # Strip the intermediate commit (if there was one) and the amended
2885 # Strip the intermediate commit (if there was one) and the amended
2881 # commit
2886 # commit
2882 if node:
2887 if node:
2883 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2888 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2884 ui.note(_('stripping amended changeset %s\n') % old)
2889 ui.note(_('stripping amended changeset %s\n') % old)
2885 repair.strip(ui, repo, old.node(), topic='amend-backup')
2890 repair.strip(ui, repo, old.node(), topic='amend-backup')
2886 finally:
2891 finally:
2887 lockmod.release(lock, wlock)
2892 lockmod.release(lock, wlock)
2888 return newid
2893 return newid
2889
2894
2890 def commiteditor(repo, ctx, subs, editform=''):
2895 def commiteditor(repo, ctx, subs, editform=''):
2891 if ctx.description():
2896 if ctx.description():
2892 return ctx.description()
2897 return ctx.description()
2893 return commitforceeditor(repo, ctx, subs, editform=editform,
2898 return commitforceeditor(repo, ctx, subs, editform=editform,
2894 unchangedmessagedetection=True)
2899 unchangedmessagedetection=True)
2895
2900
2896 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2901 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2897 editform='', unchangedmessagedetection=False):
2902 editform='', unchangedmessagedetection=False):
2898 if not extramsg:
2903 if not extramsg:
2899 extramsg = _("Leave message empty to abort commit.")
2904 extramsg = _("Leave message empty to abort commit.")
2900
2905
2901 forms = [e for e in editform.split('.') if e]
2906 forms = [e for e in editform.split('.') if e]
2902 forms.insert(0, 'changeset')
2907 forms.insert(0, 'changeset')
2903 templatetext = None
2908 templatetext = None
2904 while forms:
2909 while forms:
2905 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2910 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2906 if tmpl:
2911 if tmpl:
2907 tmpl = templater.unquotestring(tmpl)
2912 tmpl = templater.unquotestring(tmpl)
2908 templatetext = committext = buildcommittemplate(
2913 templatetext = committext = buildcommittemplate(
2909 repo, ctx, subs, extramsg, tmpl)
2914 repo, ctx, subs, extramsg, tmpl)
2910 break
2915 break
2911 forms.pop()
2916 forms.pop()
2912 else:
2917 else:
2913 committext = buildcommittext(repo, ctx, subs, extramsg)
2918 committext = buildcommittext(repo, ctx, subs, extramsg)
2914
2919
2915 # run editor in the repository root
2920 # run editor in the repository root
2916 olddir = pycompat.getcwd()
2921 olddir = pycompat.getcwd()
2917 os.chdir(repo.root)
2922 os.chdir(repo.root)
2918
2923
2919 # make in-memory changes visible to external process
2924 # make in-memory changes visible to external process
2920 tr = repo.currenttransaction()
2925 tr = repo.currenttransaction()
2921 repo.dirstate.write(tr)
2926 repo.dirstate.write(tr)
2922 pending = tr and tr.writepending() and repo.root
2927 pending = tr and tr.writepending() and repo.root
2923
2928
2924 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2929 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2925 editform=editform, pending=pending,
2930 editform=editform, pending=pending,
2926 repopath=repo.path)
2931 repopath=repo.path)
2927 text = editortext
2932 text = editortext
2928
2933
2929 # strip away anything below this special string (used for editors that want
2934 # strip away anything below this special string (used for editors that want
2930 # to display the diff)
2935 # to display the diff)
2931 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2936 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2932 if stripbelow:
2937 if stripbelow:
2933 text = text[:stripbelow.start()]
2938 text = text[:stripbelow.start()]
2934
2939
2935 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2940 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2936 os.chdir(olddir)
2941 os.chdir(olddir)
2937
2942
2938 if finishdesc:
2943 if finishdesc:
2939 text = finishdesc(text)
2944 text = finishdesc(text)
2940 if not text.strip():
2945 if not text.strip():
2941 raise error.Abort(_("empty commit message"))
2946 raise error.Abort(_("empty commit message"))
2942 if unchangedmessagedetection and editortext == templatetext:
2947 if unchangedmessagedetection and editortext == templatetext:
2943 raise error.Abort(_("commit message unchanged"))
2948 raise error.Abort(_("commit message unchanged"))
2944
2949
2945 return text
2950 return text
2946
2951
2947 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2952 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2948 ui = repo.ui
2953 ui = repo.ui
2949 tmpl, mapfile = _lookuplogtemplate(ui, tmpl, None)
2954 tmpl, mapfile = _lookuplogtemplate(ui, tmpl, None)
2950
2955
2951 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2956 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2952
2957
2953 for k, v in repo.ui.configitems('committemplate'):
2958 for k, v in repo.ui.configitems('committemplate'):
2954 if k != 'changeset':
2959 if k != 'changeset':
2955 t.t.cache[k] = v
2960 t.t.cache[k] = v
2956
2961
2957 if not extramsg:
2962 if not extramsg:
2958 extramsg = '' # ensure that extramsg is string
2963 extramsg = '' # ensure that extramsg is string
2959
2964
2960 ui.pushbuffer()
2965 ui.pushbuffer()
2961 t.show(ctx, extramsg=extramsg)
2966 t.show(ctx, extramsg=extramsg)
2962 return ui.popbuffer()
2967 return ui.popbuffer()
2963
2968
2964 def hgprefix(msg):
2969 def hgprefix(msg):
2965 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2970 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2966
2971
2967 def buildcommittext(repo, ctx, subs, extramsg):
2972 def buildcommittext(repo, ctx, subs, extramsg):
2968 edittext = []
2973 edittext = []
2969 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2974 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2970 if ctx.description():
2975 if ctx.description():
2971 edittext.append(ctx.description())
2976 edittext.append(ctx.description())
2972 edittext.append("")
2977 edittext.append("")
2973 edittext.append("") # Empty line between message and comments.
2978 edittext.append("") # Empty line between message and comments.
2974 edittext.append(hgprefix(_("Enter commit message."
2979 edittext.append(hgprefix(_("Enter commit message."
2975 " Lines beginning with 'HG:' are removed.")))
2980 " Lines beginning with 'HG:' are removed.")))
2976 edittext.append(hgprefix(extramsg))
2981 edittext.append(hgprefix(extramsg))
2977 edittext.append("HG: --")
2982 edittext.append("HG: --")
2978 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2983 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2979 if ctx.p2():
2984 if ctx.p2():
2980 edittext.append(hgprefix(_("branch merge")))
2985 edittext.append(hgprefix(_("branch merge")))
2981 if ctx.branch():
2986 if ctx.branch():
2982 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2987 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2983 if bookmarks.isactivewdirparent(repo):
2988 if bookmarks.isactivewdirparent(repo):
2984 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2989 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2985 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2990 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2986 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2991 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2987 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2992 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2988 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2993 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2989 if not added and not modified and not removed:
2994 if not added and not modified and not removed:
2990 edittext.append(hgprefix(_("no files changed")))
2995 edittext.append(hgprefix(_("no files changed")))
2991 edittext.append("")
2996 edittext.append("")
2992
2997
2993 return "\n".join(edittext)
2998 return "\n".join(edittext)
2994
2999
2995 def commitstatus(repo, node, branch, bheads=None, opts=None):
3000 def commitstatus(repo, node, branch, bheads=None, opts=None):
2996 if opts is None:
3001 if opts is None:
2997 opts = {}
3002 opts = {}
2998 ctx = repo[node]
3003 ctx = repo[node]
2999 parents = ctx.parents()
3004 parents = ctx.parents()
3000
3005
3001 if (not opts.get('amend') and bheads and node not in bheads and not
3006 if (not opts.get('amend') and bheads and node not in bheads and not
3002 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3007 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3003 repo.ui.status(_('created new head\n'))
3008 repo.ui.status(_('created new head\n'))
3004 # The message is not printed for initial roots. For the other
3009 # The message is not printed for initial roots. For the other
3005 # changesets, it is printed in the following situations:
3010 # changesets, it is printed in the following situations:
3006 #
3011 #
3007 # Par column: for the 2 parents with ...
3012 # Par column: for the 2 parents with ...
3008 # N: null or no parent
3013 # N: null or no parent
3009 # B: parent is on another named branch
3014 # B: parent is on another named branch
3010 # C: parent is a regular non head changeset
3015 # C: parent is a regular non head changeset
3011 # H: parent was a branch head of the current branch
3016 # H: parent was a branch head of the current branch
3012 # Msg column: whether we print "created new head" message
3017 # Msg column: whether we print "created new head" message
3013 # In the following, it is assumed that there already exists some
3018 # In the following, it is assumed that there already exists some
3014 # initial branch heads of the current branch, otherwise nothing is
3019 # initial branch heads of the current branch, otherwise nothing is
3015 # printed anyway.
3020 # printed anyway.
3016 #
3021 #
3017 # Par Msg Comment
3022 # Par Msg Comment
3018 # N N y additional topo root
3023 # N N y additional topo root
3019 #
3024 #
3020 # B N y additional branch root
3025 # B N y additional branch root
3021 # C N y additional topo head
3026 # C N y additional topo head
3022 # H N n usual case
3027 # H N n usual case
3023 #
3028 #
3024 # B B y weird additional branch root
3029 # B B y weird additional branch root
3025 # C B y branch merge
3030 # C B y branch merge
3026 # H B n merge with named branch
3031 # H B n merge with named branch
3027 #
3032 #
3028 # C C y additional head from merge
3033 # C C y additional head from merge
3029 # C H n merge with a head
3034 # C H n merge with a head
3030 #
3035 #
3031 # H H n head merge: head count decreases
3036 # H H n head merge: head count decreases
3032
3037
3033 if not opts.get('close_branch'):
3038 if not opts.get('close_branch'):
3034 for r in parents:
3039 for r in parents:
3035 if r.closesbranch() and r.branch() == branch:
3040 if r.closesbranch() and r.branch() == branch:
3036 repo.ui.status(_('reopening closed branch head %d\n') % r)
3041 repo.ui.status(_('reopening closed branch head %d\n') % r)
3037
3042
3038 if repo.ui.debugflag:
3043 if repo.ui.debugflag:
3039 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3044 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3040 elif repo.ui.verbose:
3045 elif repo.ui.verbose:
3041 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3046 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3042
3047
3043 def postcommitstatus(repo, pats, opts):
3048 def postcommitstatus(repo, pats, opts):
3044 return repo.status(match=scmutil.match(repo[None], pats, opts))
3049 return repo.status(match=scmutil.match(repo[None], pats, opts))
3045
3050
3046 def revert(ui, repo, ctx, parents, *pats, **opts):
3051 def revert(ui, repo, ctx, parents, *pats, **opts):
3047 parent, p2 = parents
3052 parent, p2 = parents
3048 node = ctx.node()
3053 node = ctx.node()
3049
3054
3050 mf = ctx.manifest()
3055 mf = ctx.manifest()
3051 if node == p2:
3056 if node == p2:
3052 parent = p2
3057 parent = p2
3053
3058
3054 # need all matching names in dirstate and manifest of target rev,
3059 # need all matching names in dirstate and manifest of target rev,
3055 # so have to walk both. do not print errors if files exist in one
3060 # so have to walk both. do not print errors if files exist in one
3056 # but not other. in both cases, filesets should be evaluated against
3061 # but not other. in both cases, filesets should be evaluated against
3057 # workingctx to get consistent result (issue4497). this means 'set:**'
3062 # workingctx to get consistent result (issue4497). this means 'set:**'
3058 # cannot be used to select missing files from target rev.
3063 # cannot be used to select missing files from target rev.
3059
3064
3060 # `names` is a mapping for all elements in working copy and target revision
3065 # `names` is a mapping for all elements in working copy and target revision
3061 # The mapping is in the form:
3066 # The mapping is in the form:
3062 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3067 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3063 names = {}
3068 names = {}
3064
3069
3065 with repo.wlock():
3070 with repo.wlock():
3066 ## filling of the `names` mapping
3071 ## filling of the `names` mapping
3067 # walk dirstate to fill `names`
3072 # walk dirstate to fill `names`
3068
3073
3069 interactive = opts.get('interactive', False)
3074 interactive = opts.get('interactive', False)
3070 wctx = repo[None]
3075 wctx = repo[None]
3071 m = scmutil.match(wctx, pats, opts)
3076 m = scmutil.match(wctx, pats, opts)
3072
3077
3073 # we'll need this later
3078 # we'll need this later
3074 targetsubs = sorted(s for s in wctx.substate if m(s))
3079 targetsubs = sorted(s for s in wctx.substate if m(s))
3075
3080
3076 if not m.always():
3081 if not m.always():
3077 matcher = matchmod.badmatch(m, lambda x, y: False)
3082 matcher = matchmod.badmatch(m, lambda x, y: False)
3078 for abs in wctx.walk(matcher):
3083 for abs in wctx.walk(matcher):
3079 names[abs] = m.rel(abs), m.exact(abs)
3084 names[abs] = m.rel(abs), m.exact(abs)
3080
3085
3081 # walk target manifest to fill `names`
3086 # walk target manifest to fill `names`
3082
3087
3083 def badfn(path, msg):
3088 def badfn(path, msg):
3084 if path in names:
3089 if path in names:
3085 return
3090 return
3086 if path in ctx.substate:
3091 if path in ctx.substate:
3087 return
3092 return
3088 path_ = path + '/'
3093 path_ = path + '/'
3089 for f in names:
3094 for f in names:
3090 if f.startswith(path_):
3095 if f.startswith(path_):
3091 return
3096 return
3092 ui.warn("%s: %s\n" % (m.rel(path), msg))
3097 ui.warn("%s: %s\n" % (m.rel(path), msg))
3093
3098
3094 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3099 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3095 if abs not in names:
3100 if abs not in names:
3096 names[abs] = m.rel(abs), m.exact(abs)
3101 names[abs] = m.rel(abs), m.exact(abs)
3097
3102
3098 # Find status of all file in `names`.
3103 # Find status of all file in `names`.
3099 m = scmutil.matchfiles(repo, names)
3104 m = scmutil.matchfiles(repo, names)
3100
3105
3101 changes = repo.status(node1=node, match=m,
3106 changes = repo.status(node1=node, match=m,
3102 unknown=True, ignored=True, clean=True)
3107 unknown=True, ignored=True, clean=True)
3103 else:
3108 else:
3104 changes = repo.status(node1=node, match=m)
3109 changes = repo.status(node1=node, match=m)
3105 for kind in changes:
3110 for kind in changes:
3106 for abs in kind:
3111 for abs in kind:
3107 names[abs] = m.rel(abs), m.exact(abs)
3112 names[abs] = m.rel(abs), m.exact(abs)
3108
3113
3109 m = scmutil.matchfiles(repo, names)
3114 m = scmutil.matchfiles(repo, names)
3110
3115
3111 modified = set(changes.modified)
3116 modified = set(changes.modified)
3112 added = set(changes.added)
3117 added = set(changes.added)
3113 removed = set(changes.removed)
3118 removed = set(changes.removed)
3114 _deleted = set(changes.deleted)
3119 _deleted = set(changes.deleted)
3115 unknown = set(changes.unknown)
3120 unknown = set(changes.unknown)
3116 unknown.update(changes.ignored)
3121 unknown.update(changes.ignored)
3117 clean = set(changes.clean)
3122 clean = set(changes.clean)
3118 modadded = set()
3123 modadded = set()
3119
3124
3120 # We need to account for the state of the file in the dirstate,
3125 # We need to account for the state of the file in the dirstate,
3121 # even when we revert against something else than parent. This will
3126 # even when we revert against something else than parent. This will
3122 # slightly alter the behavior of revert (doing back up or not, delete
3127 # slightly alter the behavior of revert (doing back up or not, delete
3123 # or just forget etc).
3128 # or just forget etc).
3124 if parent == node:
3129 if parent == node:
3125 dsmodified = modified
3130 dsmodified = modified
3126 dsadded = added
3131 dsadded = added
3127 dsremoved = removed
3132 dsremoved = removed
3128 # store all local modifications, useful later for rename detection
3133 # store all local modifications, useful later for rename detection
3129 localchanges = dsmodified | dsadded
3134 localchanges = dsmodified | dsadded
3130 modified, added, removed = set(), set(), set()
3135 modified, added, removed = set(), set(), set()
3131 else:
3136 else:
3132 changes = repo.status(node1=parent, match=m)
3137 changes = repo.status(node1=parent, match=m)
3133 dsmodified = set(changes.modified)
3138 dsmodified = set(changes.modified)
3134 dsadded = set(changes.added)
3139 dsadded = set(changes.added)
3135 dsremoved = set(changes.removed)
3140 dsremoved = set(changes.removed)
3136 # store all local modifications, useful later for rename detection
3141 # store all local modifications, useful later for rename detection
3137 localchanges = dsmodified | dsadded
3142 localchanges = dsmodified | dsadded
3138
3143
3139 # only take into account for removes between wc and target
3144 # only take into account for removes between wc and target
3140 clean |= dsremoved - removed
3145 clean |= dsremoved - removed
3141 dsremoved &= removed
3146 dsremoved &= removed
3142 # distinct between dirstate remove and other
3147 # distinct between dirstate remove and other
3143 removed -= dsremoved
3148 removed -= dsremoved
3144
3149
3145 modadded = added & dsmodified
3150 modadded = added & dsmodified
3146 added -= modadded
3151 added -= modadded
3147
3152
3148 # tell newly modified apart.
3153 # tell newly modified apart.
3149 dsmodified &= modified
3154 dsmodified &= modified
3150 dsmodified |= modified & dsadded # dirstate added may need backup
3155 dsmodified |= modified & dsadded # dirstate added may need backup
3151 modified -= dsmodified
3156 modified -= dsmodified
3152
3157
3153 # We need to wait for some post-processing to update this set
3158 # We need to wait for some post-processing to update this set
3154 # before making the distinction. The dirstate will be used for
3159 # before making the distinction. The dirstate will be used for
3155 # that purpose.
3160 # that purpose.
3156 dsadded = added
3161 dsadded = added
3157
3162
3158 # in case of merge, files that are actually added can be reported as
3163 # in case of merge, files that are actually added can be reported as
3159 # modified, we need to post process the result
3164 # modified, we need to post process the result
3160 if p2 != nullid:
3165 if p2 != nullid:
3161 mergeadd = set(dsmodified)
3166 mergeadd = set(dsmodified)
3162 for path in dsmodified:
3167 for path in dsmodified:
3163 if path in mf:
3168 if path in mf:
3164 mergeadd.remove(path)
3169 mergeadd.remove(path)
3165 dsadded |= mergeadd
3170 dsadded |= mergeadd
3166 dsmodified -= mergeadd
3171 dsmodified -= mergeadd
3167
3172
3168 # if f is a rename, update `names` to also revert the source
3173 # if f is a rename, update `names` to also revert the source
3169 cwd = repo.getcwd()
3174 cwd = repo.getcwd()
3170 for f in localchanges:
3175 for f in localchanges:
3171 src = repo.dirstate.copied(f)
3176 src = repo.dirstate.copied(f)
3172 # XXX should we check for rename down to target node?
3177 # XXX should we check for rename down to target node?
3173 if src and src not in names and repo.dirstate[src] == 'r':
3178 if src and src not in names and repo.dirstate[src] == 'r':
3174 dsremoved.add(src)
3179 dsremoved.add(src)
3175 names[src] = (repo.pathto(src, cwd), True)
3180 names[src] = (repo.pathto(src, cwd), True)
3176
3181
3177 # determine the exact nature of the deleted changesets
3182 # determine the exact nature of the deleted changesets
3178 deladded = set(_deleted)
3183 deladded = set(_deleted)
3179 for path in _deleted:
3184 for path in _deleted:
3180 if path in mf:
3185 if path in mf:
3181 deladded.remove(path)
3186 deladded.remove(path)
3182 deleted = _deleted - deladded
3187 deleted = _deleted - deladded
3183
3188
3184 # distinguish between file to forget and the other
3189 # distinguish between file to forget and the other
3185 added = set()
3190 added = set()
3186 for abs in dsadded:
3191 for abs in dsadded:
3187 if repo.dirstate[abs] != 'a':
3192 if repo.dirstate[abs] != 'a':
3188 added.add(abs)
3193 added.add(abs)
3189 dsadded -= added
3194 dsadded -= added
3190
3195
3191 for abs in deladded:
3196 for abs in deladded:
3192 if repo.dirstate[abs] == 'a':
3197 if repo.dirstate[abs] == 'a':
3193 dsadded.add(abs)
3198 dsadded.add(abs)
3194 deladded -= dsadded
3199 deladded -= dsadded
3195
3200
3196 # For files marked as removed, we check if an unknown file is present at
3201 # For files marked as removed, we check if an unknown file is present at
3197 # the same path. If a such file exists it may need to be backed up.
3202 # the same path. If a such file exists it may need to be backed up.
3198 # Making the distinction at this stage helps have simpler backup
3203 # Making the distinction at this stage helps have simpler backup
3199 # logic.
3204 # logic.
3200 removunk = set()
3205 removunk = set()
3201 for abs in removed:
3206 for abs in removed:
3202 target = repo.wjoin(abs)
3207 target = repo.wjoin(abs)
3203 if os.path.lexists(target):
3208 if os.path.lexists(target):
3204 removunk.add(abs)
3209 removunk.add(abs)
3205 removed -= removunk
3210 removed -= removunk
3206
3211
3207 dsremovunk = set()
3212 dsremovunk = set()
3208 for abs in dsremoved:
3213 for abs in dsremoved:
3209 target = repo.wjoin(abs)
3214 target = repo.wjoin(abs)
3210 if os.path.lexists(target):
3215 if os.path.lexists(target):
3211 dsremovunk.add(abs)
3216 dsremovunk.add(abs)
3212 dsremoved -= dsremovunk
3217 dsremoved -= dsremovunk
3213
3218
3214 # action to be actually performed by revert
3219 # action to be actually performed by revert
3215 # (<list of file>, message>) tuple
3220 # (<list of file>, message>) tuple
3216 actions = {'revert': ([], _('reverting %s\n')),
3221 actions = {'revert': ([], _('reverting %s\n')),
3217 'add': ([], _('adding %s\n')),
3222 'add': ([], _('adding %s\n')),
3218 'remove': ([], _('removing %s\n')),
3223 'remove': ([], _('removing %s\n')),
3219 'drop': ([], _('removing %s\n')),
3224 'drop': ([], _('removing %s\n')),
3220 'forget': ([], _('forgetting %s\n')),
3225 'forget': ([], _('forgetting %s\n')),
3221 'undelete': ([], _('undeleting %s\n')),
3226 'undelete': ([], _('undeleting %s\n')),
3222 'noop': (None, _('no changes needed to %s\n')),
3227 'noop': (None, _('no changes needed to %s\n')),
3223 'unknown': (None, _('file not managed: %s\n')),
3228 'unknown': (None, _('file not managed: %s\n')),
3224 }
3229 }
3225
3230
3226 # "constant" that convey the backup strategy.
3231 # "constant" that convey the backup strategy.
3227 # All set to `discard` if `no-backup` is set do avoid checking
3232 # All set to `discard` if `no-backup` is set do avoid checking
3228 # no_backup lower in the code.
3233 # no_backup lower in the code.
3229 # These values are ordered for comparison purposes
3234 # These values are ordered for comparison purposes
3230 backupinteractive = 3 # do backup if interactively modified
3235 backupinteractive = 3 # do backup if interactively modified
3231 backup = 2 # unconditionally do backup
3236 backup = 2 # unconditionally do backup
3232 check = 1 # check if the existing file differs from target
3237 check = 1 # check if the existing file differs from target
3233 discard = 0 # never do backup
3238 discard = 0 # never do backup
3234 if opts.get('no_backup'):
3239 if opts.get('no_backup'):
3235 backupinteractive = backup = check = discard
3240 backupinteractive = backup = check = discard
3236 if interactive:
3241 if interactive:
3237 dsmodifiedbackup = backupinteractive
3242 dsmodifiedbackup = backupinteractive
3238 else:
3243 else:
3239 dsmodifiedbackup = backup
3244 dsmodifiedbackup = backup
3240 tobackup = set()
3245 tobackup = set()
3241
3246
3242 backupanddel = actions['remove']
3247 backupanddel = actions['remove']
3243 if not opts.get('no_backup'):
3248 if not opts.get('no_backup'):
3244 backupanddel = actions['drop']
3249 backupanddel = actions['drop']
3245
3250
3246 disptable = (
3251 disptable = (
3247 # dispatch table:
3252 # dispatch table:
3248 # file state
3253 # file state
3249 # action
3254 # action
3250 # make backup
3255 # make backup
3251
3256
3252 ## Sets that results that will change file on disk
3257 ## Sets that results that will change file on disk
3253 # Modified compared to target, no local change
3258 # Modified compared to target, no local change
3254 (modified, actions['revert'], discard),
3259 (modified, actions['revert'], discard),
3255 # Modified compared to target, but local file is deleted
3260 # Modified compared to target, but local file is deleted
3256 (deleted, actions['revert'], discard),
3261 (deleted, actions['revert'], discard),
3257 # Modified compared to target, local change
3262 # Modified compared to target, local change
3258 (dsmodified, actions['revert'], dsmodifiedbackup),
3263 (dsmodified, actions['revert'], dsmodifiedbackup),
3259 # Added since target
3264 # Added since target
3260 (added, actions['remove'], discard),
3265 (added, actions['remove'], discard),
3261 # Added in working directory
3266 # Added in working directory
3262 (dsadded, actions['forget'], discard),
3267 (dsadded, actions['forget'], discard),
3263 # Added since target, have local modification
3268 # Added since target, have local modification
3264 (modadded, backupanddel, backup),
3269 (modadded, backupanddel, backup),
3265 # Added since target but file is missing in working directory
3270 # Added since target but file is missing in working directory
3266 (deladded, actions['drop'], discard),
3271 (deladded, actions['drop'], discard),
3267 # Removed since target, before working copy parent
3272 # Removed since target, before working copy parent
3268 (removed, actions['add'], discard),
3273 (removed, actions['add'], discard),
3269 # Same as `removed` but an unknown file exists at the same path
3274 # Same as `removed` but an unknown file exists at the same path
3270 (removunk, actions['add'], check),
3275 (removunk, actions['add'], check),
3271 # Removed since targe, marked as such in working copy parent
3276 # Removed since targe, marked as such in working copy parent
3272 (dsremoved, actions['undelete'], discard),
3277 (dsremoved, actions['undelete'], discard),
3273 # Same as `dsremoved` but an unknown file exists at the same path
3278 # Same as `dsremoved` but an unknown file exists at the same path
3274 (dsremovunk, actions['undelete'], check),
3279 (dsremovunk, actions['undelete'], check),
3275 ## the following sets does not result in any file changes
3280 ## the following sets does not result in any file changes
3276 # File with no modification
3281 # File with no modification
3277 (clean, actions['noop'], discard),
3282 (clean, actions['noop'], discard),
3278 # Existing file, not tracked anywhere
3283 # Existing file, not tracked anywhere
3279 (unknown, actions['unknown'], discard),
3284 (unknown, actions['unknown'], discard),
3280 )
3285 )
3281
3286
3282 for abs, (rel, exact) in sorted(names.items()):
3287 for abs, (rel, exact) in sorted(names.items()):
3283 # target file to be touch on disk (relative to cwd)
3288 # target file to be touch on disk (relative to cwd)
3284 target = repo.wjoin(abs)
3289 target = repo.wjoin(abs)
3285 # search the entry in the dispatch table.
3290 # search the entry in the dispatch table.
3286 # if the file is in any of these sets, it was touched in the working
3291 # if the file is in any of these sets, it was touched in the working
3287 # directory parent and we are sure it needs to be reverted.
3292 # directory parent and we are sure it needs to be reverted.
3288 for table, (xlist, msg), dobackup in disptable:
3293 for table, (xlist, msg), dobackup in disptable:
3289 if abs not in table:
3294 if abs not in table:
3290 continue
3295 continue
3291 if xlist is not None:
3296 if xlist is not None:
3292 xlist.append(abs)
3297 xlist.append(abs)
3293 if dobackup:
3298 if dobackup:
3294 # If in interactive mode, don't automatically create
3299 # If in interactive mode, don't automatically create
3295 # .orig files (issue4793)
3300 # .orig files (issue4793)
3296 if dobackup == backupinteractive:
3301 if dobackup == backupinteractive:
3297 tobackup.add(abs)
3302 tobackup.add(abs)
3298 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3303 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3299 bakname = scmutil.origpath(ui, repo, rel)
3304 bakname = scmutil.origpath(ui, repo, rel)
3300 ui.note(_('saving current version of %s as %s\n') %
3305 ui.note(_('saving current version of %s as %s\n') %
3301 (rel, bakname))
3306 (rel, bakname))
3302 if not opts.get('dry_run'):
3307 if not opts.get('dry_run'):
3303 if interactive:
3308 if interactive:
3304 util.copyfile(target, bakname)
3309 util.copyfile(target, bakname)
3305 else:
3310 else:
3306 util.rename(target, bakname)
3311 util.rename(target, bakname)
3307 if ui.verbose or not exact:
3312 if ui.verbose or not exact:
3308 if not isinstance(msg, basestring):
3313 if not isinstance(msg, basestring):
3309 msg = msg(abs)
3314 msg = msg(abs)
3310 ui.status(msg % rel)
3315 ui.status(msg % rel)
3311 elif exact:
3316 elif exact:
3312 ui.warn(msg % rel)
3317 ui.warn(msg % rel)
3313 break
3318 break
3314
3319
3315 if not opts.get('dry_run'):
3320 if not opts.get('dry_run'):
3316 needdata = ('revert', 'add', 'undelete')
3321 needdata = ('revert', 'add', 'undelete')
3317 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3322 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3318 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3323 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3319
3324
3320 if targetsubs:
3325 if targetsubs:
3321 # Revert the subrepos on the revert list
3326 # Revert the subrepos on the revert list
3322 for sub in targetsubs:
3327 for sub in targetsubs:
3323 try:
3328 try:
3324 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3329 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3325 except KeyError:
3330 except KeyError:
3326 raise error.Abort("subrepository '%s' does not exist in %s!"
3331 raise error.Abort("subrepository '%s' does not exist in %s!"
3327 % (sub, short(ctx.node())))
3332 % (sub, short(ctx.node())))
3328
3333
3329 def _revertprefetch(repo, ctx, *files):
3334 def _revertprefetch(repo, ctx, *files):
3330 """Let extension changing the storage layer prefetch content"""
3335 """Let extension changing the storage layer prefetch content"""
3331 pass
3336 pass
3332
3337
3333 def _performrevert(repo, parents, ctx, actions, interactive=False,
3338 def _performrevert(repo, parents, ctx, actions, interactive=False,
3334 tobackup=None):
3339 tobackup=None):
3335 """function that actually perform all the actions computed for revert
3340 """function that actually perform all the actions computed for revert
3336
3341
3337 This is an independent function to let extension to plug in and react to
3342 This is an independent function to let extension to plug in and react to
3338 the imminent revert.
3343 the imminent revert.
3339
3344
3340 Make sure you have the working directory locked when calling this function.
3345 Make sure you have the working directory locked when calling this function.
3341 """
3346 """
3342 parent, p2 = parents
3347 parent, p2 = parents
3343 node = ctx.node()
3348 node = ctx.node()
3344 excluded_files = []
3349 excluded_files = []
3345 matcher_opts = {"exclude": excluded_files}
3350 matcher_opts = {"exclude": excluded_files}
3346
3351
3347 def checkout(f):
3352 def checkout(f):
3348 fc = ctx[f]
3353 fc = ctx[f]
3349 repo.wwrite(f, fc.data(), fc.flags())
3354 repo.wwrite(f, fc.data(), fc.flags())
3350
3355
3351 def doremove(f):
3356 def doremove(f):
3352 try:
3357 try:
3353 repo.wvfs.unlinkpath(f)
3358 repo.wvfs.unlinkpath(f)
3354 except OSError:
3359 except OSError:
3355 pass
3360 pass
3356 repo.dirstate.remove(f)
3361 repo.dirstate.remove(f)
3357
3362
3358 audit_path = pathutil.pathauditor(repo.root)
3363 audit_path = pathutil.pathauditor(repo.root)
3359 for f in actions['forget'][0]:
3364 for f in actions['forget'][0]:
3360 if interactive:
3365 if interactive:
3361 choice = repo.ui.promptchoice(
3366 choice = repo.ui.promptchoice(
3362 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3367 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3363 if choice == 0:
3368 if choice == 0:
3364 repo.dirstate.drop(f)
3369 repo.dirstate.drop(f)
3365 else:
3370 else:
3366 excluded_files.append(repo.wjoin(f))
3371 excluded_files.append(repo.wjoin(f))
3367 else:
3372 else:
3368 repo.dirstate.drop(f)
3373 repo.dirstate.drop(f)
3369 for f in actions['remove'][0]:
3374 for f in actions['remove'][0]:
3370 audit_path(f)
3375 audit_path(f)
3371 if interactive:
3376 if interactive:
3372 choice = repo.ui.promptchoice(
3377 choice = repo.ui.promptchoice(
3373 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3378 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3374 if choice == 0:
3379 if choice == 0:
3375 doremove(f)
3380 doremove(f)
3376 else:
3381 else:
3377 excluded_files.append(repo.wjoin(f))
3382 excluded_files.append(repo.wjoin(f))
3378 else:
3383 else:
3379 doremove(f)
3384 doremove(f)
3380 for f in actions['drop'][0]:
3385 for f in actions['drop'][0]:
3381 audit_path(f)
3386 audit_path(f)
3382 repo.dirstate.remove(f)
3387 repo.dirstate.remove(f)
3383
3388
3384 normal = None
3389 normal = None
3385 if node == parent:
3390 if node == parent:
3386 # We're reverting to our parent. If possible, we'd like status
3391 # We're reverting to our parent. If possible, we'd like status
3387 # to report the file as clean. We have to use normallookup for
3392 # to report the file as clean. We have to use normallookup for
3388 # merges to avoid losing information about merged/dirty files.
3393 # merges to avoid losing information about merged/dirty files.
3389 if p2 != nullid:
3394 if p2 != nullid:
3390 normal = repo.dirstate.normallookup
3395 normal = repo.dirstate.normallookup
3391 else:
3396 else:
3392 normal = repo.dirstate.normal
3397 normal = repo.dirstate.normal
3393
3398
3394 newlyaddedandmodifiedfiles = set()
3399 newlyaddedandmodifiedfiles = set()
3395 if interactive:
3400 if interactive:
3396 # Prompt the user for changes to revert
3401 # Prompt the user for changes to revert
3397 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3402 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3398 m = scmutil.match(ctx, torevert, matcher_opts)
3403 m = scmutil.match(ctx, torevert, matcher_opts)
3399 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3404 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3400 diffopts.nodates = True
3405 diffopts.nodates = True
3401 diffopts.git = True
3406 diffopts.git = True
3402 operation = 'discard'
3407 operation = 'discard'
3403 reversehunks = True
3408 reversehunks = True
3404 if node != parent:
3409 if node != parent:
3405 operation = 'revert'
3410 operation = 'revert'
3406 reversehunks = repo.ui.configbool('experimental',
3411 reversehunks = repo.ui.configbool('experimental',
3407 'revertalternateinteractivemode',
3412 'revertalternateinteractivemode',
3408 True)
3413 True)
3409 if reversehunks:
3414 if reversehunks:
3410 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3415 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3411 else:
3416 else:
3412 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3417 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3413 originalchunks = patch.parsepatch(diff)
3418 originalchunks = patch.parsepatch(diff)
3414
3419
3415 try:
3420 try:
3416
3421
3417 chunks, opts = recordfilter(repo.ui, originalchunks,
3422 chunks, opts = recordfilter(repo.ui, originalchunks,
3418 operation=operation)
3423 operation=operation)
3419 if reversehunks:
3424 if reversehunks:
3420 chunks = patch.reversehunks(chunks)
3425 chunks = patch.reversehunks(chunks)
3421
3426
3422 except patch.PatchError as err:
3427 except patch.PatchError as err:
3423 raise error.Abort(_('error parsing patch: %s') % err)
3428 raise error.Abort(_('error parsing patch: %s') % err)
3424
3429
3425 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3430 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3426 if tobackup is None:
3431 if tobackup is None:
3427 tobackup = set()
3432 tobackup = set()
3428 # Apply changes
3433 # Apply changes
3429 fp = stringio()
3434 fp = stringio()
3430 for c in chunks:
3435 for c in chunks:
3431 # Create a backup file only if this hunk should be backed up
3436 # Create a backup file only if this hunk should be backed up
3432 if ishunk(c) and c.header.filename() in tobackup:
3437 if ishunk(c) and c.header.filename() in tobackup:
3433 abs = c.header.filename()
3438 abs = c.header.filename()
3434 target = repo.wjoin(abs)
3439 target = repo.wjoin(abs)
3435 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3440 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3436 util.copyfile(target, bakname)
3441 util.copyfile(target, bakname)
3437 tobackup.remove(abs)
3442 tobackup.remove(abs)
3438 c.write(fp)
3443 c.write(fp)
3439 dopatch = fp.tell()
3444 dopatch = fp.tell()
3440 fp.seek(0)
3445 fp.seek(0)
3441 if dopatch:
3446 if dopatch:
3442 try:
3447 try:
3443 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3448 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3444 except patch.PatchError as err:
3449 except patch.PatchError as err:
3445 raise error.Abort(str(err))
3450 raise error.Abort(str(err))
3446 del fp
3451 del fp
3447 else:
3452 else:
3448 for f in actions['revert'][0]:
3453 for f in actions['revert'][0]:
3449 checkout(f)
3454 checkout(f)
3450 if normal:
3455 if normal:
3451 normal(f)
3456 normal(f)
3452
3457
3453 for f in actions['add'][0]:
3458 for f in actions['add'][0]:
3454 # Don't checkout modified files, they are already created by the diff
3459 # Don't checkout modified files, they are already created by the diff
3455 if f not in newlyaddedandmodifiedfiles:
3460 if f not in newlyaddedandmodifiedfiles:
3456 checkout(f)
3461 checkout(f)
3457 repo.dirstate.add(f)
3462 repo.dirstate.add(f)
3458
3463
3459 normal = repo.dirstate.normallookup
3464 normal = repo.dirstate.normallookup
3460 if node == parent and p2 == nullid:
3465 if node == parent and p2 == nullid:
3461 normal = repo.dirstate.normal
3466 normal = repo.dirstate.normal
3462 for f in actions['undelete'][0]:
3467 for f in actions['undelete'][0]:
3463 checkout(f)
3468 checkout(f)
3464 normal(f)
3469 normal(f)
3465
3470
3466 copied = copies.pathcopies(repo[parent], ctx)
3471 copied = copies.pathcopies(repo[parent], ctx)
3467
3472
3468 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3473 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3469 if f in copied:
3474 if f in copied:
3470 repo.dirstate.copy(copied[f], f)
3475 repo.dirstate.copy(copied[f], f)
3471
3476
3472 class command(registrar.command):
3477 class command(registrar.command):
3473 def _doregister(self, func, name, *args, **kwargs):
3478 def _doregister(self, func, name, *args, **kwargs):
3474 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3479 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3475 return super(command, self)._doregister(func, name, *args, **kwargs)
3480 return super(command, self)._doregister(func, name, *args, **kwargs)
3476
3481
3477 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3482 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3478 # commands.outgoing. "missing" is "missing" of the result of
3483 # commands.outgoing. "missing" is "missing" of the result of
3479 # "findcommonoutgoing()"
3484 # "findcommonoutgoing()"
3480 outgoinghooks = util.hooks()
3485 outgoinghooks = util.hooks()
3481
3486
3482 # a list of (ui, repo) functions called by commands.summary
3487 # a list of (ui, repo) functions called by commands.summary
3483 summaryhooks = util.hooks()
3488 summaryhooks = util.hooks()
3484
3489
3485 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3490 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3486 #
3491 #
3487 # functions should return tuple of booleans below, if 'changes' is None:
3492 # functions should return tuple of booleans below, if 'changes' is None:
3488 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3493 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3489 #
3494 #
3490 # otherwise, 'changes' is a tuple of tuples below:
3495 # otherwise, 'changes' is a tuple of tuples below:
3491 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3496 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3492 # - (desturl, destbranch, destpeer, outgoing)
3497 # - (desturl, destbranch, destpeer, outgoing)
3493 summaryremotehooks = util.hooks()
3498 summaryremotehooks = util.hooks()
3494
3499
3495 # A list of state files kept by multistep operations like graft.
3500 # A list of state files kept by multistep operations like graft.
3496 # Since graft cannot be aborted, it is considered 'clearable' by update.
3501 # Since graft cannot be aborted, it is considered 'clearable' by update.
3497 # note: bisect is intentionally excluded
3502 # note: bisect is intentionally excluded
3498 # (state file, clearable, allowcommit, error, hint)
3503 # (state file, clearable, allowcommit, error, hint)
3499 unfinishedstates = [
3504 unfinishedstates = [
3500 ('graftstate', True, False, _('graft in progress'),
3505 ('graftstate', True, False, _('graft in progress'),
3501 _("use 'hg graft --continue' or 'hg update' to abort")),
3506 _("use 'hg graft --continue' or 'hg update' to abort")),
3502 ('updatestate', True, False, _('last update was interrupted'),
3507 ('updatestate', True, False, _('last update was interrupted'),
3503 _("use 'hg update' to get a consistent checkout"))
3508 _("use 'hg update' to get a consistent checkout"))
3504 ]
3509 ]
3505
3510
3506 def checkunfinished(repo, commit=False):
3511 def checkunfinished(repo, commit=False):
3507 '''Look for an unfinished multistep operation, like graft, and abort
3512 '''Look for an unfinished multistep operation, like graft, and abort
3508 if found. It's probably good to check this right before
3513 if found. It's probably good to check this right before
3509 bailifchanged().
3514 bailifchanged().
3510 '''
3515 '''
3511 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3516 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3512 if commit and allowcommit:
3517 if commit and allowcommit:
3513 continue
3518 continue
3514 if repo.vfs.exists(f):
3519 if repo.vfs.exists(f):
3515 raise error.Abort(msg, hint=hint)
3520 raise error.Abort(msg, hint=hint)
3516
3521
3517 def clearunfinished(repo):
3522 def clearunfinished(repo):
3518 '''Check for unfinished operations (as above), and clear the ones
3523 '''Check for unfinished operations (as above), and clear the ones
3519 that are clearable.
3524 that are clearable.
3520 '''
3525 '''
3521 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3526 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3522 if not clearable and repo.vfs.exists(f):
3527 if not clearable and repo.vfs.exists(f):
3523 raise error.Abort(msg, hint=hint)
3528 raise error.Abort(msg, hint=hint)
3524 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3529 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3525 if clearable and repo.vfs.exists(f):
3530 if clearable and repo.vfs.exists(f):
3526 util.unlink(repo.vfs.join(f))
3531 util.unlink(repo.vfs.join(f))
3527
3532
3528 afterresolvedstates = [
3533 afterresolvedstates = [
3529 ('graftstate',
3534 ('graftstate',
3530 _('hg graft --continue')),
3535 _('hg graft --continue')),
3531 ]
3536 ]
3532
3537
3533 def howtocontinue(repo):
3538 def howtocontinue(repo):
3534 '''Check for an unfinished operation and return the command to finish
3539 '''Check for an unfinished operation and return the command to finish
3535 it.
3540 it.
3536
3541
3537 afterresolvedstates tuples define a .hg/{file} and the corresponding
3542 afterresolvedstates tuples define a .hg/{file} and the corresponding
3538 command needed to finish it.
3543 command needed to finish it.
3539
3544
3540 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3545 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3541 a boolean.
3546 a boolean.
3542 '''
3547 '''
3543 contmsg = _("continue: %s")
3548 contmsg = _("continue: %s")
3544 for f, msg in afterresolvedstates:
3549 for f, msg in afterresolvedstates:
3545 if repo.vfs.exists(f):
3550 if repo.vfs.exists(f):
3546 return contmsg % msg, True
3551 return contmsg % msg, True
3547 workingctx = repo[None]
3552 workingctx = repo[None]
3548 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3553 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3549 for s in workingctx.substate)
3554 for s in workingctx.substate)
3550 if dirty:
3555 if dirty:
3551 return contmsg % _("hg commit"), False
3556 return contmsg % _("hg commit"), False
3552 return None, None
3557 return None, None
3553
3558
3554 def checkafterresolved(repo):
3559 def checkafterresolved(repo):
3555 '''Inform the user about the next action after completing hg resolve
3560 '''Inform the user about the next action after completing hg resolve
3556
3561
3557 If there's a matching afterresolvedstates, howtocontinue will yield
3562 If there's a matching afterresolvedstates, howtocontinue will yield
3558 repo.ui.warn as the reporter.
3563 repo.ui.warn as the reporter.
3559
3564
3560 Otherwise, it will yield repo.ui.note.
3565 Otherwise, it will yield repo.ui.note.
3561 '''
3566 '''
3562 msg, warning = howtocontinue(repo)
3567 msg, warning = howtocontinue(repo)
3563 if msg is not None:
3568 if msg is not None:
3564 if warning:
3569 if warning:
3565 repo.ui.warn("%s\n" % msg)
3570 repo.ui.warn("%s\n" % msg)
3566 else:
3571 else:
3567 repo.ui.note("%s\n" % msg)
3572 repo.ui.note("%s\n" % msg)
3568
3573
3569 def wrongtooltocontinue(repo, task):
3574 def wrongtooltocontinue(repo, task):
3570 '''Raise an abort suggesting how to properly continue if there is an
3575 '''Raise an abort suggesting how to properly continue if there is an
3571 active task.
3576 active task.
3572
3577
3573 Uses howtocontinue() to find the active task.
3578 Uses howtocontinue() to find the active task.
3574
3579
3575 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3580 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3576 a hint.
3581 a hint.
3577 '''
3582 '''
3578 after = howtocontinue(repo)
3583 after = howtocontinue(repo)
3579 hint = None
3584 hint = None
3580 if after[1]:
3585 if after[1]:
3581 hint = after[0]
3586 hint = after[0]
3582 raise error.Abort(_('no %s in progress') % task, hint=hint)
3587 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,2206 +1,2204 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import operator
12 import operator
13 import os
13 import os
14 import random
14 import random
15 import socket
15 import socket
16 import string
16 import string
17 import sys
17 import sys
18 import tempfile
18 import tempfile
19 import time
19 import time
20
20
21 from .i18n import _
21 from .i18n import _
22 from .node import (
22 from .node import (
23 bin,
23 bin,
24 hex,
24 hex,
25 nullhex,
25 nullhex,
26 nullid,
26 nullid,
27 nullrev,
27 nullrev,
28 short,
28 short,
29 )
29 )
30 from . import (
30 from . import (
31 bundle2,
31 bundle2,
32 changegroup,
32 changegroup,
33 cmdutil,
33 cmdutil,
34 color,
34 color,
35 context,
35 context,
36 dagparser,
36 dagparser,
37 dagutil,
37 dagutil,
38 encoding,
38 encoding,
39 error,
39 error,
40 exchange,
40 exchange,
41 extensions,
41 extensions,
42 filemerge,
42 filemerge,
43 fileset,
43 fileset,
44 formatter,
44 formatter,
45 hg,
45 hg,
46 localrepo,
46 localrepo,
47 lock as lockmod,
47 lock as lockmod,
48 merge as mergemod,
48 merge as mergemod,
49 obsolete,
49 obsolete,
50 phases,
50 phases,
51 policy,
51 policy,
52 pvec,
52 pvec,
53 pycompat,
53 pycompat,
54 registrar,
54 registrar,
55 repair,
55 repair,
56 revlog,
56 revlog,
57 revset,
57 revset,
58 revsetlang,
58 revsetlang,
59 scmutil,
59 scmutil,
60 setdiscovery,
60 setdiscovery,
61 simplemerge,
61 simplemerge,
62 smartset,
62 smartset,
63 sslutil,
63 sslutil,
64 streamclone,
64 streamclone,
65 templater,
65 templater,
66 treediscovery,
66 treediscovery,
67 upgrade,
67 upgrade,
68 util,
68 util,
69 vfs as vfsmod,
69 vfs as vfsmod,
70 )
70 )
71
71
72 release = lockmod.release
72 release = lockmod.release
73
73
74 command = registrar.command()
74 command = registrar.command()
75
75
76 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
76 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
77 def debugancestor(ui, repo, *args):
77 def debugancestor(ui, repo, *args):
78 """find the ancestor revision of two revisions in a given index"""
78 """find the ancestor revision of two revisions in a given index"""
79 if len(args) == 3:
79 if len(args) == 3:
80 index, rev1, rev2 = args
80 index, rev1, rev2 = args
81 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
81 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
82 lookup = r.lookup
82 lookup = r.lookup
83 elif len(args) == 2:
83 elif len(args) == 2:
84 if not repo:
84 if not repo:
85 raise error.Abort(_('there is no Mercurial repository here '
85 raise error.Abort(_('there is no Mercurial repository here '
86 '(.hg not found)'))
86 '(.hg not found)'))
87 rev1, rev2 = args
87 rev1, rev2 = args
88 r = repo.changelog
88 r = repo.changelog
89 lookup = repo.lookup
89 lookup = repo.lookup
90 else:
90 else:
91 raise error.Abort(_('either two or three arguments required'))
91 raise error.Abort(_('either two or three arguments required'))
92 a = r.ancestor(lookup(rev1), lookup(rev2))
92 a = r.ancestor(lookup(rev1), lookup(rev2))
93 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
93 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
94
94
95 @command('debugapplystreamclonebundle', [], 'FILE')
95 @command('debugapplystreamclonebundle', [], 'FILE')
96 def debugapplystreamclonebundle(ui, repo, fname):
96 def debugapplystreamclonebundle(ui, repo, fname):
97 """apply a stream clone bundle file"""
97 """apply a stream clone bundle file"""
98 f = hg.openpath(ui, fname)
98 f = hg.openpath(ui, fname)
99 gen = exchange.readbundle(ui, f, fname)
99 gen = exchange.readbundle(ui, f, fname)
100 gen.apply(repo)
100 gen.apply(repo)
101
101
102 @command('debugbuilddag',
102 @command('debugbuilddag',
103 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
103 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
104 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
104 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
105 ('n', 'new-file', None, _('add new file at each rev'))],
105 ('n', 'new-file', None, _('add new file at each rev'))],
106 _('[OPTION]... [TEXT]'))
106 _('[OPTION]... [TEXT]'))
107 def debugbuilddag(ui, repo, text=None,
107 def debugbuilddag(ui, repo, text=None,
108 mergeable_file=False,
108 mergeable_file=False,
109 overwritten_file=False,
109 overwritten_file=False,
110 new_file=False):
110 new_file=False):
111 """builds a repo with a given DAG from scratch in the current empty repo
111 """builds a repo with a given DAG from scratch in the current empty repo
112
112
113 The description of the DAG is read from stdin if not given on the
113 The description of the DAG is read from stdin if not given on the
114 command line.
114 command line.
115
115
116 Elements:
116 Elements:
117
117
118 - "+n" is a linear run of n nodes based on the current default parent
118 - "+n" is a linear run of n nodes based on the current default parent
119 - "." is a single node based on the current default parent
119 - "." is a single node based on the current default parent
120 - "$" resets the default parent to null (implied at the start);
120 - "$" resets the default parent to null (implied at the start);
121 otherwise the default parent is always the last node created
121 otherwise the default parent is always the last node created
122 - "<p" sets the default parent to the backref p
122 - "<p" sets the default parent to the backref p
123 - "*p" is a fork at parent p, which is a backref
123 - "*p" is a fork at parent p, which is a backref
124 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
124 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
125 - "/p2" is a merge of the preceding node and p2
125 - "/p2" is a merge of the preceding node and p2
126 - ":tag" defines a local tag for the preceding node
126 - ":tag" defines a local tag for the preceding node
127 - "@branch" sets the named branch for subsequent nodes
127 - "@branch" sets the named branch for subsequent nodes
128 - "#...\\n" is a comment up to the end of the line
128 - "#...\\n" is a comment up to the end of the line
129
129
130 Whitespace between the above elements is ignored.
130 Whitespace between the above elements is ignored.
131
131
132 A backref is either
132 A backref is either
133
133
134 - a number n, which references the node curr-n, where curr is the current
134 - a number n, which references the node curr-n, where curr is the current
135 node, or
135 node, or
136 - the name of a local tag you placed earlier using ":tag", or
136 - the name of a local tag you placed earlier using ":tag", or
137 - empty to denote the default parent.
137 - empty to denote the default parent.
138
138
139 All string valued-elements are either strictly alphanumeric, or must
139 All string valued-elements are either strictly alphanumeric, or must
140 be enclosed in double quotes ("..."), with "\\" as escape character.
140 be enclosed in double quotes ("..."), with "\\" as escape character.
141 """
141 """
142
142
143 if text is None:
143 if text is None:
144 ui.status(_("reading DAG from stdin\n"))
144 ui.status(_("reading DAG from stdin\n"))
145 text = ui.fin.read()
145 text = ui.fin.read()
146
146
147 cl = repo.changelog
147 cl = repo.changelog
148 if len(cl) > 0:
148 if len(cl) > 0:
149 raise error.Abort(_('repository is not empty'))
149 raise error.Abort(_('repository is not empty'))
150
150
151 # determine number of revs in DAG
151 # determine number of revs in DAG
152 total = 0
152 total = 0
153 for type, data in dagparser.parsedag(text):
153 for type, data in dagparser.parsedag(text):
154 if type == 'n':
154 if type == 'n':
155 total += 1
155 total += 1
156
156
157 if mergeable_file:
157 if mergeable_file:
158 linesperrev = 2
158 linesperrev = 2
159 # make a file with k lines per rev
159 # make a file with k lines per rev
160 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
160 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
161 initialmergedlines.append("")
161 initialmergedlines.append("")
162
162
163 tags = []
163 tags = []
164
164
165 wlock = lock = tr = None
165 wlock = lock = tr = None
166 try:
166 try:
167 wlock = repo.wlock()
167 wlock = repo.wlock()
168 lock = repo.lock()
168 lock = repo.lock()
169 tr = repo.transaction("builddag")
169 tr = repo.transaction("builddag")
170
170
171 at = -1
171 at = -1
172 atbranch = 'default'
172 atbranch = 'default'
173 nodeids = []
173 nodeids = []
174 id = 0
174 id = 0
175 ui.progress(_('building'), id, unit=_('revisions'), total=total)
175 ui.progress(_('building'), id, unit=_('revisions'), total=total)
176 for type, data in dagparser.parsedag(text):
176 for type, data in dagparser.parsedag(text):
177 if type == 'n':
177 if type == 'n':
178 ui.note(('node %s\n' % str(data)))
178 ui.note(('node %s\n' % str(data)))
179 id, ps = data
179 id, ps = data
180
180
181 files = []
181 files = []
182 fctxs = {}
182 fctxs = {}
183
183
184 p2 = None
184 p2 = None
185 if mergeable_file:
185 if mergeable_file:
186 fn = "mf"
186 fn = "mf"
187 p1 = repo[ps[0]]
187 p1 = repo[ps[0]]
188 if len(ps) > 1:
188 if len(ps) > 1:
189 p2 = repo[ps[1]]
189 p2 = repo[ps[1]]
190 pa = p1.ancestor(p2)
190 pa = p1.ancestor(p2)
191 base, local, other = [x[fn].data() for x in (pa, p1,
191 base, local, other = [x[fn].data() for x in (pa, p1,
192 p2)]
192 p2)]
193 m3 = simplemerge.Merge3Text(base, local, other)
193 m3 = simplemerge.Merge3Text(base, local, other)
194 ml = [l.strip() for l in m3.merge_lines()]
194 ml = [l.strip() for l in m3.merge_lines()]
195 ml.append("")
195 ml.append("")
196 elif at > 0:
196 elif at > 0:
197 ml = p1[fn].data().split("\n")
197 ml = p1[fn].data().split("\n")
198 else:
198 else:
199 ml = initialmergedlines
199 ml = initialmergedlines
200 ml[id * linesperrev] += " r%i" % id
200 ml[id * linesperrev] += " r%i" % id
201 mergedtext = "\n".join(ml)
201 mergedtext = "\n".join(ml)
202 files.append(fn)
202 files.append(fn)
203 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
203 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
204
204
205 if overwritten_file:
205 if overwritten_file:
206 fn = "of"
206 fn = "of"
207 files.append(fn)
207 files.append(fn)
208 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
208 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
209
209
210 if new_file:
210 if new_file:
211 fn = "nf%i" % id
211 fn = "nf%i" % id
212 files.append(fn)
212 files.append(fn)
213 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
213 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
214 if len(ps) > 1:
214 if len(ps) > 1:
215 if not p2:
215 if not p2:
216 p2 = repo[ps[1]]
216 p2 = repo[ps[1]]
217 for fn in p2:
217 for fn in p2:
218 if fn.startswith("nf"):
218 if fn.startswith("nf"):
219 files.append(fn)
219 files.append(fn)
220 fctxs[fn] = p2[fn]
220 fctxs[fn] = p2[fn]
221
221
222 def fctxfn(repo, cx, path):
222 def fctxfn(repo, cx, path):
223 return fctxs.get(path)
223 return fctxs.get(path)
224
224
225 if len(ps) == 0 or ps[0] < 0:
225 if len(ps) == 0 or ps[0] < 0:
226 pars = [None, None]
226 pars = [None, None]
227 elif len(ps) == 1:
227 elif len(ps) == 1:
228 pars = [nodeids[ps[0]], None]
228 pars = [nodeids[ps[0]], None]
229 else:
229 else:
230 pars = [nodeids[p] for p in ps]
230 pars = [nodeids[p] for p in ps]
231 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
231 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
232 date=(id, 0),
232 date=(id, 0),
233 user="debugbuilddag",
233 user="debugbuilddag",
234 extra={'branch': atbranch})
234 extra={'branch': atbranch})
235 nodeid = repo.commitctx(cx)
235 nodeid = repo.commitctx(cx)
236 nodeids.append(nodeid)
236 nodeids.append(nodeid)
237 at = id
237 at = id
238 elif type == 'l':
238 elif type == 'l':
239 id, name = data
239 id, name = data
240 ui.note(('tag %s\n' % name))
240 ui.note(('tag %s\n' % name))
241 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
241 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
242 elif type == 'a':
242 elif type == 'a':
243 ui.note(('branch %s\n' % data))
243 ui.note(('branch %s\n' % data))
244 atbranch = data
244 atbranch = data
245 ui.progress(_('building'), id, unit=_('revisions'), total=total)
245 ui.progress(_('building'), id, unit=_('revisions'), total=total)
246 tr.close()
246 tr.close()
247
247
248 if tags:
248 if tags:
249 repo.vfs.write("localtags", "".join(tags))
249 repo.vfs.write("localtags", "".join(tags))
250 finally:
250 finally:
251 ui.progress(_('building'), None)
251 ui.progress(_('building'), None)
252 release(tr, lock, wlock)
252 release(tr, lock, wlock)
253
253
254 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
254 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
255 indent_string = ' ' * indent
255 indent_string = ' ' * indent
256 if all:
256 if all:
257 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
257 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
258 % indent_string)
258 % indent_string)
259
259
260 def showchunks(named):
260 def showchunks(named):
261 ui.write("\n%s%s\n" % (indent_string, named))
261 ui.write("\n%s%s\n" % (indent_string, named))
262 chain = None
262 chain = None
263 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
263 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
264 node = chunkdata['node']
264 node = chunkdata['node']
265 p1 = chunkdata['p1']
265 p1 = chunkdata['p1']
266 p2 = chunkdata['p2']
266 p2 = chunkdata['p2']
267 cs = chunkdata['cs']
267 cs = chunkdata['cs']
268 deltabase = chunkdata['deltabase']
268 deltabase = chunkdata['deltabase']
269 delta = chunkdata['delta']
269 delta = chunkdata['delta']
270 ui.write("%s%s %s %s %s %s %s\n" %
270 ui.write("%s%s %s %s %s %s %s\n" %
271 (indent_string, hex(node), hex(p1), hex(p2),
271 (indent_string, hex(node), hex(p1), hex(p2),
272 hex(cs), hex(deltabase), len(delta)))
272 hex(cs), hex(deltabase), len(delta)))
273 chain = node
273 chain = node
274
274
275 chunkdata = gen.changelogheader()
275 chunkdata = gen.changelogheader()
276 showchunks("changelog")
276 showchunks("changelog")
277 chunkdata = gen.manifestheader()
277 chunkdata = gen.manifestheader()
278 showchunks("manifest")
278 showchunks("manifest")
279 for chunkdata in iter(gen.filelogheader, {}):
279 for chunkdata in iter(gen.filelogheader, {}):
280 fname = chunkdata['filename']
280 fname = chunkdata['filename']
281 showchunks(fname)
281 showchunks(fname)
282 else:
282 else:
283 if isinstance(gen, bundle2.unbundle20):
283 if isinstance(gen, bundle2.unbundle20):
284 raise error.Abort(_('use debugbundle2 for this file'))
284 raise error.Abort(_('use debugbundle2 for this file'))
285 chunkdata = gen.changelogheader()
285 chunkdata = gen.changelogheader()
286 chain = None
286 chain = None
287 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
287 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
288 node = chunkdata['node']
288 node = chunkdata['node']
289 ui.write("%s%s\n" % (indent_string, hex(node)))
289 ui.write("%s%s\n" % (indent_string, hex(node)))
290 chain = node
290 chain = node
291
291
292 def _debugobsmarkers(ui, data, all=None, indent=0, **opts):
292 def _debugobsmarkers(ui, data, all=None, indent=0, **opts):
293 """display version and markers contained in 'data'"""
293 """display version and markers contained in 'data'"""
294 indent_string = ' ' * indent
294 indent_string = ' ' * indent
295 try:
295 try:
296 version, markers = obsolete._readmarkers(data)
296 version, markers = obsolete._readmarkers(data)
297 except error.UnknownVersion as exc:
297 except error.UnknownVersion as exc:
298 msg = "%sunsupported version: %s (%d bytes)\n"
298 msg = "%sunsupported version: %s (%d bytes)\n"
299 msg %= indent_string, exc.version, len(data)
299 msg %= indent_string, exc.version, len(data)
300 ui.write(msg)
300 ui.write(msg)
301 else:
301 else:
302 msg = "%sversion: %s (%d bytes)\n"
302 msg = "%sversion: %s (%d bytes)\n"
303 msg %= indent_string, version, len(data)
303 msg %= indent_string, version, len(data)
304 ui.write(msg)
304 ui.write(msg)
305 fm = ui.formatter('debugobsolete', opts)
305 fm = ui.formatter('debugobsolete', opts)
306 for rawmarker in sorted(markers):
306 for rawmarker in sorted(markers):
307 m = obsolete.marker(None, rawmarker)
307 m = obsolete.marker(None, rawmarker)
308 fm.startitem()
308 fm.startitem()
309 fm.plain(indent_string)
309 fm.plain(indent_string)
310 cmdutil.showmarker(fm, m)
310 cmdutil.showmarker(fm, m)
311 fm.end()
311 fm.end()
312
312
313 def _debugbundle2(ui, gen, all=None, **opts):
313 def _debugbundle2(ui, gen, all=None, **opts):
314 """lists the contents of a bundle2"""
314 """lists the contents of a bundle2"""
315 if not isinstance(gen, bundle2.unbundle20):
315 if not isinstance(gen, bundle2.unbundle20):
316 raise error.Abort(_('not a bundle2 file'))
316 raise error.Abort(_('not a bundle2 file'))
317 ui.write(('Stream params: %s\n' % repr(gen.params)))
317 ui.write(('Stream params: %s\n' % repr(gen.params)))
318 parttypes = opts.get('part_type', [])
318 parttypes = opts.get('part_type', [])
319 for part in gen.iterparts():
319 for part in gen.iterparts():
320 if parttypes and part.type not in parttypes:
320 if parttypes and part.type not in parttypes:
321 continue
321 continue
322 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
322 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
323 if part.type == 'changegroup':
323 if part.type == 'changegroup':
324 version = part.params.get('version', '01')
324 version = part.params.get('version', '01')
325 cg = changegroup.getunbundler(version, part, 'UN')
325 cg = changegroup.getunbundler(version, part, 'UN')
326 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
326 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
327 if part.type == 'obsmarkers':
327 if part.type == 'obsmarkers':
328 _debugobsmarkers(ui, part.read(), all=all, indent=4, **opts)
328 _debugobsmarkers(ui, part.read(), all=all, indent=4, **opts)
329
329
330 @command('debugbundle',
330 @command('debugbundle',
331 [('a', 'all', None, _('show all details')),
331 [('a', 'all', None, _('show all details')),
332 ('', 'part-type', [], _('show only the named part type')),
332 ('', 'part-type', [], _('show only the named part type')),
333 ('', 'spec', None, _('print the bundlespec of the bundle'))],
333 ('', 'spec', None, _('print the bundlespec of the bundle'))],
334 _('FILE'),
334 _('FILE'),
335 norepo=True)
335 norepo=True)
336 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
336 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
337 """lists the contents of a bundle"""
337 """lists the contents of a bundle"""
338 with hg.openpath(ui, bundlepath) as f:
338 with hg.openpath(ui, bundlepath) as f:
339 if spec:
339 if spec:
340 spec = exchange.getbundlespec(ui, f)
340 spec = exchange.getbundlespec(ui, f)
341 ui.write('%s\n' % spec)
341 ui.write('%s\n' % spec)
342 return
342 return
343
343
344 gen = exchange.readbundle(ui, f, bundlepath)
344 gen = exchange.readbundle(ui, f, bundlepath)
345 if isinstance(gen, bundle2.unbundle20):
345 if isinstance(gen, bundle2.unbundle20):
346 return _debugbundle2(ui, gen, all=all, **opts)
346 return _debugbundle2(ui, gen, all=all, **opts)
347 _debugchangegroup(ui, gen, all=all, **opts)
347 _debugchangegroup(ui, gen, all=all, **opts)
348
348
349 @command('debugcheckstate', [], '')
349 @command('debugcheckstate', [], '')
350 def debugcheckstate(ui, repo):
350 def debugcheckstate(ui, repo):
351 """validate the correctness of the current dirstate"""
351 """validate the correctness of the current dirstate"""
352 parent1, parent2 = repo.dirstate.parents()
352 parent1, parent2 = repo.dirstate.parents()
353 m1 = repo[parent1].manifest()
353 m1 = repo[parent1].manifest()
354 m2 = repo[parent2].manifest()
354 m2 = repo[parent2].manifest()
355 errors = 0
355 errors = 0
356 for f in repo.dirstate:
356 for f in repo.dirstate:
357 state = repo.dirstate[f]
357 state = repo.dirstate[f]
358 if state in "nr" and f not in m1:
358 if state in "nr" and f not in m1:
359 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
359 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
360 errors += 1
360 errors += 1
361 if state in "a" and f in m1:
361 if state in "a" and f in m1:
362 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
362 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
363 errors += 1
363 errors += 1
364 if state in "m" and f not in m1 and f not in m2:
364 if state in "m" and f not in m1 and f not in m2:
365 ui.warn(_("%s in state %s, but not in either manifest\n") %
365 ui.warn(_("%s in state %s, but not in either manifest\n") %
366 (f, state))
366 (f, state))
367 errors += 1
367 errors += 1
368 for f in m1:
368 for f in m1:
369 state = repo.dirstate[f]
369 state = repo.dirstate[f]
370 if state not in "nrm":
370 if state not in "nrm":
371 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
371 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
372 errors += 1
372 errors += 1
373 if errors:
373 if errors:
374 error = _(".hg/dirstate inconsistent with current parent's manifest")
374 error = _(".hg/dirstate inconsistent with current parent's manifest")
375 raise error.Abort(error)
375 raise error.Abort(error)
376
376
377 @command('debugcolor',
377 @command('debugcolor',
378 [('', 'style', None, _('show all configured styles'))],
378 [('', 'style', None, _('show all configured styles'))],
379 'hg debugcolor')
379 'hg debugcolor')
380 def debugcolor(ui, repo, **opts):
380 def debugcolor(ui, repo, **opts):
381 """show available color, effects or style"""
381 """show available color, effects or style"""
382 ui.write(('color mode: %s\n') % ui._colormode)
382 ui.write(('color mode: %s\n') % ui._colormode)
383 if opts.get('style'):
383 if opts.get('style'):
384 return _debugdisplaystyle(ui)
384 return _debugdisplaystyle(ui)
385 else:
385 else:
386 return _debugdisplaycolor(ui)
386 return _debugdisplaycolor(ui)
387
387
388 def _debugdisplaycolor(ui):
388 def _debugdisplaycolor(ui):
389 ui = ui.copy()
389 ui = ui.copy()
390 ui._styles.clear()
390 ui._styles.clear()
391 for effect in color._activeeffects(ui).keys():
391 for effect in color._activeeffects(ui).keys():
392 ui._styles[effect] = effect
392 ui._styles[effect] = effect
393 if ui._terminfoparams:
393 if ui._terminfoparams:
394 for k, v in ui.configitems('color'):
394 for k, v in ui.configitems('color'):
395 if k.startswith('color.'):
395 if k.startswith('color.'):
396 ui._styles[k] = k[6:]
396 ui._styles[k] = k[6:]
397 elif k.startswith('terminfo.'):
397 elif k.startswith('terminfo.'):
398 ui._styles[k] = k[9:]
398 ui._styles[k] = k[9:]
399 ui.write(_('available colors:\n'))
399 ui.write(_('available colors:\n'))
400 # sort label with a '_' after the other to group '_background' entry.
400 # sort label with a '_' after the other to group '_background' entry.
401 items = sorted(ui._styles.items(),
401 items = sorted(ui._styles.items(),
402 key=lambda i: ('_' in i[0], i[0], i[1]))
402 key=lambda i: ('_' in i[0], i[0], i[1]))
403 for colorname, label in items:
403 for colorname, label in items:
404 ui.write(('%s\n') % colorname, label=label)
404 ui.write(('%s\n') % colorname, label=label)
405
405
406 def _debugdisplaystyle(ui):
406 def _debugdisplaystyle(ui):
407 ui.write(_('available style:\n'))
407 ui.write(_('available style:\n'))
408 width = max(len(s) for s in ui._styles)
408 width = max(len(s) for s in ui._styles)
409 for label, effects in sorted(ui._styles.items()):
409 for label, effects in sorted(ui._styles.items()):
410 ui.write('%s' % label, label=label)
410 ui.write('%s' % label, label=label)
411 if effects:
411 if effects:
412 # 50
412 # 50
413 ui.write(': ')
413 ui.write(': ')
414 ui.write(' ' * (max(0, width - len(label))))
414 ui.write(' ' * (max(0, width - len(label))))
415 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
415 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
416 ui.write('\n')
416 ui.write('\n')
417
417
418 @command('debugcreatestreamclonebundle', [], 'FILE')
418 @command('debugcreatestreamclonebundle', [], 'FILE')
419 def debugcreatestreamclonebundle(ui, repo, fname):
419 def debugcreatestreamclonebundle(ui, repo, fname):
420 """create a stream clone bundle file
420 """create a stream clone bundle file
421
421
422 Stream bundles are special bundles that are essentially archives of
422 Stream bundles are special bundles that are essentially archives of
423 revlog files. They are commonly used for cloning very quickly.
423 revlog files. They are commonly used for cloning very quickly.
424 """
424 """
425 # TODO we may want to turn this into an abort when this functionality
425 # TODO we may want to turn this into an abort when this functionality
426 # is moved into `hg bundle`.
426 # is moved into `hg bundle`.
427 if phases.hassecret(repo):
427 if phases.hassecret(repo):
428 ui.warn(_('(warning: stream clone bundle will contain secret '
428 ui.warn(_('(warning: stream clone bundle will contain secret '
429 'revisions)\n'))
429 'revisions)\n'))
430
430
431 requirements, gen = streamclone.generatebundlev1(repo)
431 requirements, gen = streamclone.generatebundlev1(repo)
432 changegroup.writechunks(ui, gen, fname)
432 changegroup.writechunks(ui, gen, fname)
433
433
434 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
434 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
435
435
436 @command('debugdag',
436 @command('debugdag',
437 [('t', 'tags', None, _('use tags as labels')),
437 [('t', 'tags', None, _('use tags as labels')),
438 ('b', 'branches', None, _('annotate with branch names')),
438 ('b', 'branches', None, _('annotate with branch names')),
439 ('', 'dots', None, _('use dots for runs')),
439 ('', 'dots', None, _('use dots for runs')),
440 ('s', 'spaces', None, _('separate elements by spaces'))],
440 ('s', 'spaces', None, _('separate elements by spaces'))],
441 _('[OPTION]... [FILE [REV]...]'),
441 _('[OPTION]... [FILE [REV]...]'),
442 optionalrepo=True)
442 optionalrepo=True)
443 def debugdag(ui, repo, file_=None, *revs, **opts):
443 def debugdag(ui, repo, file_=None, *revs, **opts):
444 """format the changelog or an index DAG as a concise textual description
444 """format the changelog or an index DAG as a concise textual description
445
445
446 If you pass a revlog index, the revlog's DAG is emitted. If you list
446 If you pass a revlog index, the revlog's DAG is emitted. If you list
447 revision numbers, they get labeled in the output as rN.
447 revision numbers, they get labeled in the output as rN.
448
448
449 Otherwise, the changelog DAG of the current repo is emitted.
449 Otherwise, the changelog DAG of the current repo is emitted.
450 """
450 """
451 spaces = opts.get('spaces')
451 spaces = opts.get('spaces')
452 dots = opts.get('dots')
452 dots = opts.get('dots')
453 if file_:
453 if file_:
454 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
454 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
455 file_)
455 file_)
456 revs = set((int(r) for r in revs))
456 revs = set((int(r) for r in revs))
457 def events():
457 def events():
458 for r in rlog:
458 for r in rlog:
459 yield 'n', (r, list(p for p in rlog.parentrevs(r)
459 yield 'n', (r, list(p for p in rlog.parentrevs(r)
460 if p != -1))
460 if p != -1))
461 if r in revs:
461 if r in revs:
462 yield 'l', (r, "r%i" % r)
462 yield 'l', (r, "r%i" % r)
463 elif repo:
463 elif repo:
464 cl = repo.changelog
464 cl = repo.changelog
465 tags = opts.get('tags')
465 tags = opts.get('tags')
466 branches = opts.get('branches')
466 branches = opts.get('branches')
467 if tags:
467 if tags:
468 labels = {}
468 labels = {}
469 for l, n in repo.tags().items():
469 for l, n in repo.tags().items():
470 labels.setdefault(cl.rev(n), []).append(l)
470 labels.setdefault(cl.rev(n), []).append(l)
471 def events():
471 def events():
472 b = "default"
472 b = "default"
473 for r in cl:
473 for r in cl:
474 if branches:
474 if branches:
475 newb = cl.read(cl.node(r))[5]['branch']
475 newb = cl.read(cl.node(r))[5]['branch']
476 if newb != b:
476 if newb != b:
477 yield 'a', newb
477 yield 'a', newb
478 b = newb
478 b = newb
479 yield 'n', (r, list(p for p in cl.parentrevs(r)
479 yield 'n', (r, list(p for p in cl.parentrevs(r)
480 if p != -1))
480 if p != -1))
481 if tags:
481 if tags:
482 ls = labels.get(r)
482 ls = labels.get(r)
483 if ls:
483 if ls:
484 for l in ls:
484 for l in ls:
485 yield 'l', (r, l)
485 yield 'l', (r, l)
486 else:
486 else:
487 raise error.Abort(_('need repo for changelog dag'))
487 raise error.Abort(_('need repo for changelog dag'))
488
488
489 for line in dagparser.dagtextlines(events(),
489 for line in dagparser.dagtextlines(events(),
490 addspaces=spaces,
490 addspaces=spaces,
491 wraplabels=True,
491 wraplabels=True,
492 wrapannotations=True,
492 wrapannotations=True,
493 wrapnonlinear=dots,
493 wrapnonlinear=dots,
494 usedots=dots,
494 usedots=dots,
495 maxlinewidth=70):
495 maxlinewidth=70):
496 ui.write(line)
496 ui.write(line)
497 ui.write("\n")
497 ui.write("\n")
498
498
499 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
499 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
500 def debugdata(ui, repo, file_, rev=None, **opts):
500 def debugdata(ui, repo, file_, rev=None, **opts):
501 """dump the contents of a data file revision"""
501 """dump the contents of a data file revision"""
502 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
502 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
503 if rev is not None:
503 if rev is not None:
504 raise error.CommandError('debugdata', _('invalid arguments'))
504 raise error.CommandError('debugdata', _('invalid arguments'))
505 file_, rev = None, file_
505 file_, rev = None, file_
506 elif rev is None:
506 elif rev is None:
507 raise error.CommandError('debugdata', _('invalid arguments'))
507 raise error.CommandError('debugdata', _('invalid arguments'))
508 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
508 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
509 try:
509 try:
510 ui.write(r.revision(r.lookup(rev), raw=True))
510 ui.write(r.revision(r.lookup(rev), raw=True))
511 except KeyError:
511 except KeyError:
512 raise error.Abort(_('invalid revision identifier %s') % rev)
512 raise error.Abort(_('invalid revision identifier %s') % rev)
513
513
514 @command('debugdate',
514 @command('debugdate',
515 [('e', 'extended', None, _('try extended date formats'))],
515 [('e', 'extended', None, _('try extended date formats'))],
516 _('[-e] DATE [RANGE]'),
516 _('[-e] DATE [RANGE]'),
517 norepo=True, optionalrepo=True)
517 norepo=True, optionalrepo=True)
518 def debugdate(ui, date, range=None, **opts):
518 def debugdate(ui, date, range=None, **opts):
519 """parse and display a date"""
519 """parse and display a date"""
520 if opts["extended"]:
520 if opts["extended"]:
521 d = util.parsedate(date, util.extendeddateformats)
521 d = util.parsedate(date, util.extendeddateformats)
522 else:
522 else:
523 d = util.parsedate(date)
523 d = util.parsedate(date)
524 ui.write(("internal: %s %s\n") % d)
524 ui.write(("internal: %s %s\n") % d)
525 ui.write(("standard: %s\n") % util.datestr(d))
525 ui.write(("standard: %s\n") % util.datestr(d))
526 if range:
526 if range:
527 m = util.matchdate(range)
527 m = util.matchdate(range)
528 ui.write(("match: %s\n") % m(d[0]))
528 ui.write(("match: %s\n") % m(d[0]))
529
529
530 @command('debugdeltachain',
530 @command('debugdeltachain',
531 cmdutil.debugrevlogopts + cmdutil.formatteropts,
531 cmdutil.debugrevlogopts + cmdutil.formatteropts,
532 _('-c|-m|FILE'),
532 _('-c|-m|FILE'),
533 optionalrepo=True)
533 optionalrepo=True)
534 def debugdeltachain(ui, repo, file_=None, **opts):
534 def debugdeltachain(ui, repo, file_=None, **opts):
535 """dump information about delta chains in a revlog
535 """dump information about delta chains in a revlog
536
536
537 Output can be templatized. Available template keywords are:
537 Output can be templatized. Available template keywords are:
538
538
539 :``rev``: revision number
539 :``rev``: revision number
540 :``chainid``: delta chain identifier (numbered by unique base)
540 :``chainid``: delta chain identifier (numbered by unique base)
541 :``chainlen``: delta chain length to this revision
541 :``chainlen``: delta chain length to this revision
542 :``prevrev``: previous revision in delta chain
542 :``prevrev``: previous revision in delta chain
543 :``deltatype``: role of delta / how it was computed
543 :``deltatype``: role of delta / how it was computed
544 :``compsize``: compressed size of revision
544 :``compsize``: compressed size of revision
545 :``uncompsize``: uncompressed size of revision
545 :``uncompsize``: uncompressed size of revision
546 :``chainsize``: total size of compressed revisions in chain
546 :``chainsize``: total size of compressed revisions in chain
547 :``chainratio``: total chain size divided by uncompressed revision size
547 :``chainratio``: total chain size divided by uncompressed revision size
548 (new delta chains typically start at ratio 2.00)
548 (new delta chains typically start at ratio 2.00)
549 :``lindist``: linear distance from base revision in delta chain to end
549 :``lindist``: linear distance from base revision in delta chain to end
550 of this revision
550 of this revision
551 :``extradist``: total size of revisions not part of this delta chain from
551 :``extradist``: total size of revisions not part of this delta chain from
552 base of delta chain to end of this revision; a measurement
552 base of delta chain to end of this revision; a measurement
553 of how much extra data we need to read/seek across to read
553 of how much extra data we need to read/seek across to read
554 the delta chain for this revision
554 the delta chain for this revision
555 :``extraratio``: extradist divided by chainsize; another representation of
555 :``extraratio``: extradist divided by chainsize; another representation of
556 how much unrelated data is needed to load this delta chain
556 how much unrelated data is needed to load this delta chain
557 """
557 """
558 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
558 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
559 index = r.index
559 index = r.index
560 generaldelta = r.version & revlog.FLAG_GENERALDELTA
560 generaldelta = r.version & revlog.FLAG_GENERALDELTA
561
561
562 def revinfo(rev):
562 def revinfo(rev):
563 e = index[rev]
563 e = index[rev]
564 compsize = e[1]
564 compsize = e[1]
565 uncompsize = e[2]
565 uncompsize = e[2]
566 chainsize = 0
566 chainsize = 0
567
567
568 if generaldelta:
568 if generaldelta:
569 if e[3] == e[5]:
569 if e[3] == e[5]:
570 deltatype = 'p1'
570 deltatype = 'p1'
571 elif e[3] == e[6]:
571 elif e[3] == e[6]:
572 deltatype = 'p2'
572 deltatype = 'p2'
573 elif e[3] == rev - 1:
573 elif e[3] == rev - 1:
574 deltatype = 'prev'
574 deltatype = 'prev'
575 elif e[3] == rev:
575 elif e[3] == rev:
576 deltatype = 'base'
576 deltatype = 'base'
577 else:
577 else:
578 deltatype = 'other'
578 deltatype = 'other'
579 else:
579 else:
580 if e[3] == rev:
580 if e[3] == rev:
581 deltatype = 'base'
581 deltatype = 'base'
582 else:
582 else:
583 deltatype = 'prev'
583 deltatype = 'prev'
584
584
585 chain = r._deltachain(rev)[0]
585 chain = r._deltachain(rev)[0]
586 for iterrev in chain:
586 for iterrev in chain:
587 e = index[iterrev]
587 e = index[iterrev]
588 chainsize += e[1]
588 chainsize += e[1]
589
589
590 return compsize, uncompsize, deltatype, chain, chainsize
590 return compsize, uncompsize, deltatype, chain, chainsize
591
591
592 fm = ui.formatter('debugdeltachain', opts)
592 fm = ui.formatter('debugdeltachain', opts)
593
593
594 fm.plain(' rev chain# chainlen prev delta '
594 fm.plain(' rev chain# chainlen prev delta '
595 'size rawsize chainsize ratio lindist extradist '
595 'size rawsize chainsize ratio lindist extradist '
596 'extraratio\n')
596 'extraratio\n')
597
597
598 chainbases = {}
598 chainbases = {}
599 for rev in r:
599 for rev in r:
600 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
600 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
601 chainbase = chain[0]
601 chainbase = chain[0]
602 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
602 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
603 basestart = r.start(chainbase)
603 basestart = r.start(chainbase)
604 revstart = r.start(rev)
604 revstart = r.start(rev)
605 lineardist = revstart + comp - basestart
605 lineardist = revstart + comp - basestart
606 extradist = lineardist - chainsize
606 extradist = lineardist - chainsize
607 try:
607 try:
608 prevrev = chain[-2]
608 prevrev = chain[-2]
609 except IndexError:
609 except IndexError:
610 prevrev = -1
610 prevrev = -1
611
611
612 chainratio = float(chainsize) / float(uncomp)
612 chainratio = float(chainsize) / float(uncomp)
613 extraratio = float(extradist) / float(chainsize)
613 extraratio = float(extradist) / float(chainsize)
614
614
615 fm.startitem()
615 fm.startitem()
616 fm.write('rev chainid chainlen prevrev deltatype compsize '
616 fm.write('rev chainid chainlen prevrev deltatype compsize '
617 'uncompsize chainsize chainratio lindist extradist '
617 'uncompsize chainsize chainratio lindist extradist '
618 'extraratio',
618 'extraratio',
619 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
619 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
620 rev, chainid, len(chain), prevrev, deltatype, comp,
620 rev, chainid, len(chain), prevrev, deltatype, comp,
621 uncomp, chainsize, chainratio, lineardist, extradist,
621 uncomp, chainsize, chainratio, lineardist, extradist,
622 extraratio,
622 extraratio,
623 rev=rev, chainid=chainid, chainlen=len(chain),
623 rev=rev, chainid=chainid, chainlen=len(chain),
624 prevrev=prevrev, deltatype=deltatype, compsize=comp,
624 prevrev=prevrev, deltatype=deltatype, compsize=comp,
625 uncompsize=uncomp, chainsize=chainsize,
625 uncompsize=uncomp, chainsize=chainsize,
626 chainratio=chainratio, lindist=lineardist,
626 chainratio=chainratio, lindist=lineardist,
627 extradist=extradist, extraratio=extraratio)
627 extradist=extradist, extraratio=extraratio)
628
628
629 fm.end()
629 fm.end()
630
630
631 @command('debugdirstate|debugstate',
631 @command('debugdirstate|debugstate',
632 [('', 'nodates', None, _('do not display the saved mtime')),
632 [('', 'nodates', None, _('do not display the saved mtime')),
633 ('', 'datesort', None, _('sort by saved mtime'))],
633 ('', 'datesort', None, _('sort by saved mtime'))],
634 _('[OPTION]...'))
634 _('[OPTION]...'))
635 def debugstate(ui, repo, **opts):
635 def debugstate(ui, repo, **opts):
636 """show the contents of the current dirstate"""
636 """show the contents of the current dirstate"""
637
637
638 nodates = opts.get('nodates')
638 nodates = opts.get('nodates')
639 datesort = opts.get('datesort')
639 datesort = opts.get('datesort')
640
640
641 timestr = ""
641 timestr = ""
642 if datesort:
642 if datesort:
643 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
643 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
644 else:
644 else:
645 keyfunc = None # sort by filename
645 keyfunc = None # sort by filename
646 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
646 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
647 if ent[3] == -1:
647 if ent[3] == -1:
648 timestr = 'unset '
648 timestr = 'unset '
649 elif nodates:
649 elif nodates:
650 timestr = 'set '
650 timestr = 'set '
651 else:
651 else:
652 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
652 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
653 time.localtime(ent[3]))
653 time.localtime(ent[3]))
654 if ent[1] & 0o20000:
654 if ent[1] & 0o20000:
655 mode = 'lnk'
655 mode = 'lnk'
656 else:
656 else:
657 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
657 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
658 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
658 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
659 for f in repo.dirstate.copies():
659 for f in repo.dirstate.copies():
660 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
660 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
661
661
662 @command('debugdiscovery',
662 @command('debugdiscovery',
663 [('', 'old', None, _('use old-style discovery')),
663 [('', 'old', None, _('use old-style discovery')),
664 ('', 'nonheads', None,
664 ('', 'nonheads', None,
665 _('use old-style discovery with non-heads included')),
665 _('use old-style discovery with non-heads included')),
666 ] + cmdutil.remoteopts,
666 ] + cmdutil.remoteopts,
667 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
667 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
668 def debugdiscovery(ui, repo, remoteurl="default", **opts):
668 def debugdiscovery(ui, repo, remoteurl="default", **opts):
669 """runs the changeset discovery protocol in isolation"""
669 """runs the changeset discovery protocol in isolation"""
670 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
670 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
671 opts.get('branch'))
671 opts.get('branch'))
672 remote = hg.peer(repo, opts, remoteurl)
672 remote = hg.peer(repo, opts, remoteurl)
673 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
673 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
674
674
675 # make sure tests are repeatable
675 # make sure tests are repeatable
676 random.seed(12323)
676 random.seed(12323)
677
677
678 def doit(localheads, remoteheads, remote=remote):
678 def doit(localheads, remoteheads, remote=remote):
679 if opts.get('old'):
679 if opts.get('old'):
680 if localheads:
680 if localheads:
681 raise error.Abort('cannot use localheads with old style '
681 raise error.Abort('cannot use localheads with old style '
682 'discovery')
682 'discovery')
683 if not util.safehasattr(remote, 'branches'):
683 if not util.safehasattr(remote, 'branches'):
684 # enable in-client legacy support
684 # enable in-client legacy support
685 remote = localrepo.locallegacypeer(remote.local())
685 remote = localrepo.locallegacypeer(remote.local())
686 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
686 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
687 force=True)
687 force=True)
688 common = set(common)
688 common = set(common)
689 if not opts.get('nonheads'):
689 if not opts.get('nonheads'):
690 ui.write(("unpruned common: %s\n") %
690 ui.write(("unpruned common: %s\n") %
691 " ".join(sorted(short(n) for n in common)))
691 " ".join(sorted(short(n) for n in common)))
692 dag = dagutil.revlogdag(repo.changelog)
692 dag = dagutil.revlogdag(repo.changelog)
693 all = dag.ancestorset(dag.internalizeall(common))
693 all = dag.ancestorset(dag.internalizeall(common))
694 common = dag.externalizeall(dag.headsetofconnecteds(all))
694 common = dag.externalizeall(dag.headsetofconnecteds(all))
695 else:
695 else:
696 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
696 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
697 common = set(common)
697 common = set(common)
698 rheads = set(hds)
698 rheads = set(hds)
699 lheads = set(repo.heads())
699 lheads = set(repo.heads())
700 ui.write(("common heads: %s\n") %
700 ui.write(("common heads: %s\n") %
701 " ".join(sorted(short(n) for n in common)))
701 " ".join(sorted(short(n) for n in common)))
702 if lheads <= common:
702 if lheads <= common:
703 ui.write(("local is subset\n"))
703 ui.write(("local is subset\n"))
704 elif rheads <= common:
704 elif rheads <= common:
705 ui.write(("remote is subset\n"))
705 ui.write(("remote is subset\n"))
706
706
707 serverlogs = opts.get('serverlog')
707 serverlogs = opts.get('serverlog')
708 if serverlogs:
708 if serverlogs:
709 for filename in serverlogs:
709 for filename in serverlogs:
710 with open(filename, 'r') as logfile:
710 with open(filename, 'r') as logfile:
711 line = logfile.readline()
711 line = logfile.readline()
712 while line:
712 while line:
713 parts = line.strip().split(';')
713 parts = line.strip().split(';')
714 op = parts[1]
714 op = parts[1]
715 if op == 'cg':
715 if op == 'cg':
716 pass
716 pass
717 elif op == 'cgss':
717 elif op == 'cgss':
718 doit(parts[2].split(' '), parts[3].split(' '))
718 doit(parts[2].split(' '), parts[3].split(' '))
719 elif op == 'unb':
719 elif op == 'unb':
720 doit(parts[3].split(' '), parts[2].split(' '))
720 doit(parts[3].split(' '), parts[2].split(' '))
721 line = logfile.readline()
721 line = logfile.readline()
722 else:
722 else:
723 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
723 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
724 opts.get('remote_head'))
724 opts.get('remote_head'))
725 localrevs = opts.get('local_head')
725 localrevs = opts.get('local_head')
726 doit(localrevs, remoterevs)
726 doit(localrevs, remoterevs)
727
727
728 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
728 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
729 def debugextensions(ui, **opts):
729 def debugextensions(ui, **opts):
730 '''show information about active extensions'''
730 '''show information about active extensions'''
731 exts = extensions.extensions(ui)
731 exts = extensions.extensions(ui)
732 hgver = util.version()
732 hgver = util.version()
733 fm = ui.formatter('debugextensions', opts)
733 fm = ui.formatter('debugextensions', opts)
734 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
734 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
735 isinternal = extensions.ismoduleinternal(extmod)
735 isinternal = extensions.ismoduleinternal(extmod)
736 extsource = pycompat.fsencode(extmod.__file__)
736 extsource = pycompat.fsencode(extmod.__file__)
737 if isinternal:
737 if isinternal:
738 exttestedwith = [] # never expose magic string to users
738 exttestedwith = [] # never expose magic string to users
739 else:
739 else:
740 exttestedwith = getattr(extmod, 'testedwith', '').split()
740 exttestedwith = getattr(extmod, 'testedwith', '').split()
741 extbuglink = getattr(extmod, 'buglink', None)
741 extbuglink = getattr(extmod, 'buglink', None)
742
742
743 fm.startitem()
743 fm.startitem()
744
744
745 if ui.quiet or ui.verbose:
745 if ui.quiet or ui.verbose:
746 fm.write('name', '%s\n', extname)
746 fm.write('name', '%s\n', extname)
747 else:
747 else:
748 fm.write('name', '%s', extname)
748 fm.write('name', '%s', extname)
749 if isinternal or hgver in exttestedwith:
749 if isinternal or hgver in exttestedwith:
750 fm.plain('\n')
750 fm.plain('\n')
751 elif not exttestedwith:
751 elif not exttestedwith:
752 fm.plain(_(' (untested!)\n'))
752 fm.plain(_(' (untested!)\n'))
753 else:
753 else:
754 lasttestedversion = exttestedwith[-1]
754 lasttestedversion = exttestedwith[-1]
755 fm.plain(' (%s!)\n' % lasttestedversion)
755 fm.plain(' (%s!)\n' % lasttestedversion)
756
756
757 fm.condwrite(ui.verbose and extsource, 'source',
757 fm.condwrite(ui.verbose and extsource, 'source',
758 _(' location: %s\n'), extsource or "")
758 _(' location: %s\n'), extsource or "")
759
759
760 if ui.verbose:
760 if ui.verbose:
761 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
761 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
762 fm.data(bundled=isinternal)
762 fm.data(bundled=isinternal)
763
763
764 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
764 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
765 _(' tested with: %s\n'),
765 _(' tested with: %s\n'),
766 fm.formatlist(exttestedwith, name='ver'))
766 fm.formatlist(exttestedwith, name='ver'))
767
767
768 fm.condwrite(ui.verbose and extbuglink, 'buglink',
768 fm.condwrite(ui.verbose and extbuglink, 'buglink',
769 _(' bug reporting: %s\n'), extbuglink or "")
769 _(' bug reporting: %s\n'), extbuglink or "")
770
770
771 fm.end()
771 fm.end()
772
772
773 @command('debugfileset',
773 @command('debugfileset',
774 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
774 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
775 _('[-r REV] FILESPEC'))
775 _('[-r REV] FILESPEC'))
776 def debugfileset(ui, repo, expr, **opts):
776 def debugfileset(ui, repo, expr, **opts):
777 '''parse and apply a fileset specification'''
777 '''parse and apply a fileset specification'''
778 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
778 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
779 if ui.verbose:
779 if ui.verbose:
780 tree = fileset.parse(expr)
780 tree = fileset.parse(expr)
781 ui.note(fileset.prettyformat(tree), "\n")
781 ui.note(fileset.prettyformat(tree), "\n")
782
782
783 for f in ctx.getfileset(expr):
783 for f in ctx.getfileset(expr):
784 ui.write("%s\n" % f)
784 ui.write("%s\n" % f)
785
785
786 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
786 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
787 def debugfsinfo(ui, path="."):
787 def debugfsinfo(ui, path="."):
788 """show information detected about current filesystem"""
788 """show information detected about current filesystem"""
789 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
789 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
790 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
790 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
791 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
791 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
792 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
792 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
793 casesensitive = '(unknown)'
793 casesensitive = '(unknown)'
794 try:
794 try:
795 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
795 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
796 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
796 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
797 except OSError:
797 except OSError:
798 pass
798 pass
799 ui.write(('case-sensitive: %s\n') % casesensitive)
799 ui.write(('case-sensitive: %s\n') % casesensitive)
800
800
801 @command('debuggetbundle',
801 @command('debuggetbundle',
802 [('H', 'head', [], _('id of head node'), _('ID')),
802 [('H', 'head', [], _('id of head node'), _('ID')),
803 ('C', 'common', [], _('id of common node'), _('ID')),
803 ('C', 'common', [], _('id of common node'), _('ID')),
804 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
804 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
805 _('REPO FILE [-H|-C ID]...'),
805 _('REPO FILE [-H|-C ID]...'),
806 norepo=True)
806 norepo=True)
807 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
807 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
808 """retrieves a bundle from a repo
808 """retrieves a bundle from a repo
809
809
810 Every ID must be a full-length hex node id string. Saves the bundle to the
810 Every ID must be a full-length hex node id string. Saves the bundle to the
811 given file.
811 given file.
812 """
812 """
813 repo = hg.peer(ui, opts, repopath)
813 repo = hg.peer(ui, opts, repopath)
814 if not repo.capable('getbundle'):
814 if not repo.capable('getbundle'):
815 raise error.Abort("getbundle() not supported by target repository")
815 raise error.Abort("getbundle() not supported by target repository")
816 args = {}
816 args = {}
817 if common:
817 if common:
818 args['common'] = [bin(s) for s in common]
818 args['common'] = [bin(s) for s in common]
819 if head:
819 if head:
820 args['heads'] = [bin(s) for s in head]
820 args['heads'] = [bin(s) for s in head]
821 # TODO: get desired bundlecaps from command line.
821 # TODO: get desired bundlecaps from command line.
822 args['bundlecaps'] = None
822 args['bundlecaps'] = None
823 bundle = repo.getbundle('debug', **args)
823 bundle = repo.getbundle('debug', **args)
824
824
825 bundletype = opts.get('type', 'bzip2').lower()
825 bundletype = opts.get('type', 'bzip2').lower()
826 btypes = {'none': 'HG10UN',
826 btypes = {'none': 'HG10UN',
827 'bzip2': 'HG10BZ',
827 'bzip2': 'HG10BZ',
828 'gzip': 'HG10GZ',
828 'gzip': 'HG10GZ',
829 'bundle2': 'HG20'}
829 'bundle2': 'HG20'}
830 bundletype = btypes.get(bundletype)
830 bundletype = btypes.get(bundletype)
831 if bundletype not in bundle2.bundletypes:
831 if bundletype not in bundle2.bundletypes:
832 raise error.Abort(_('unknown bundle type specified with --type'))
832 raise error.Abort(_('unknown bundle type specified with --type'))
833 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
833 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
834
834
835 @command('debugignore', [], '[FILE]')
835 @command('debugignore', [], '[FILE]')
836 def debugignore(ui, repo, *files, **opts):
836 def debugignore(ui, repo, *files, **opts):
837 """display the combined ignore pattern and information about ignored files
837 """display the combined ignore pattern and information about ignored files
838
838
839 With no argument display the combined ignore pattern.
839 With no argument display the combined ignore pattern.
840
840
841 Given space separated file names, shows if the given file is ignored and
841 Given space separated file names, shows if the given file is ignored and
842 if so, show the ignore rule (file and line number) that matched it.
842 if so, show the ignore rule (file and line number) that matched it.
843 """
843 """
844 ignore = repo.dirstate._ignore
844 ignore = repo.dirstate._ignore
845 if not files:
845 if not files:
846 # Show all the patterns
846 # Show all the patterns
847 ui.write("%s\n" % repr(ignore))
847 ui.write("%s\n" % repr(ignore))
848 else:
848 else:
849 for f in files:
849 for f in files:
850 nf = util.normpath(f)
850 nf = util.normpath(f)
851 ignored = None
851 ignored = None
852 ignoredata = None
852 ignoredata = None
853 if nf != '.':
853 if nf != '.':
854 if ignore(nf):
854 if ignore(nf):
855 ignored = nf
855 ignored = nf
856 ignoredata = repo.dirstate._ignorefileandline(nf)
856 ignoredata = repo.dirstate._ignorefileandline(nf)
857 else:
857 else:
858 for p in util.finddirs(nf):
858 for p in util.finddirs(nf):
859 if ignore(p):
859 if ignore(p):
860 ignored = p
860 ignored = p
861 ignoredata = repo.dirstate._ignorefileandline(p)
861 ignoredata = repo.dirstate._ignorefileandline(p)
862 break
862 break
863 if ignored:
863 if ignored:
864 if ignored == nf:
864 if ignored == nf:
865 ui.write(_("%s is ignored\n") % f)
865 ui.write(_("%s is ignored\n") % f)
866 else:
866 else:
867 ui.write(_("%s is ignored because of "
867 ui.write(_("%s is ignored because of "
868 "containing folder %s\n")
868 "containing folder %s\n")
869 % (f, ignored))
869 % (f, ignored))
870 ignorefile, lineno, line = ignoredata
870 ignorefile, lineno, line = ignoredata
871 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
871 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
872 % (ignorefile, lineno, line))
872 % (ignorefile, lineno, line))
873 else:
873 else:
874 ui.write(_("%s is not ignored\n") % f)
874 ui.write(_("%s is not ignored\n") % f)
875
875
876 @command('debugindex', cmdutil.debugrevlogopts +
876 @command('debugindex', cmdutil.debugrevlogopts +
877 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
877 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
878 _('[-f FORMAT] -c|-m|FILE'),
878 _('[-f FORMAT] -c|-m|FILE'),
879 optionalrepo=True)
879 optionalrepo=True)
880 def debugindex(ui, repo, file_=None, **opts):
880 def debugindex(ui, repo, file_=None, **opts):
881 """dump the contents of an index file"""
881 """dump the contents of an index file"""
882 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
882 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
883 format = opts.get('format', 0)
883 format = opts.get('format', 0)
884 if format not in (0, 1):
884 if format not in (0, 1):
885 raise error.Abort(_("unknown format %d") % format)
885 raise error.Abort(_("unknown format %d") % format)
886
886
887 generaldelta = r.version & revlog.FLAG_GENERALDELTA
887 generaldelta = r.version & revlog.FLAG_GENERALDELTA
888 if generaldelta:
888 if generaldelta:
889 basehdr = ' delta'
889 basehdr = ' delta'
890 else:
890 else:
891 basehdr = ' base'
891 basehdr = ' base'
892
892
893 if ui.debugflag:
893 if ui.debugflag:
894 shortfn = hex
894 shortfn = hex
895 else:
895 else:
896 shortfn = short
896 shortfn = short
897
897
898 # There might not be anything in r, so have a sane default
898 # There might not be anything in r, so have a sane default
899 idlen = 12
899 idlen = 12
900 for i in r:
900 for i in r:
901 idlen = len(shortfn(r.node(i)))
901 idlen = len(shortfn(r.node(i)))
902 break
902 break
903
903
904 if format == 0:
904 if format == 0:
905 ui.write((" rev offset length " + basehdr + " linkrev"
905 ui.write((" rev offset length " + basehdr + " linkrev"
906 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
906 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
907 elif format == 1:
907 elif format == 1:
908 ui.write((" rev flag offset length"
908 ui.write((" rev flag offset length"
909 " size " + basehdr + " link p1 p2"
909 " size " + basehdr + " link p1 p2"
910 " %s\n") % "nodeid".rjust(idlen))
910 " %s\n") % "nodeid".rjust(idlen))
911
911
912 for i in r:
912 for i in r:
913 node = r.node(i)
913 node = r.node(i)
914 if generaldelta:
914 if generaldelta:
915 base = r.deltaparent(i)
915 base = r.deltaparent(i)
916 else:
916 else:
917 base = r.chainbase(i)
917 base = r.chainbase(i)
918 if format == 0:
918 if format == 0:
919 try:
919 try:
920 pp = r.parents(node)
920 pp = r.parents(node)
921 except Exception:
921 except Exception:
922 pp = [nullid, nullid]
922 pp = [nullid, nullid]
923 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
923 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
924 i, r.start(i), r.length(i), base, r.linkrev(i),
924 i, r.start(i), r.length(i), base, r.linkrev(i),
925 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
925 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
926 elif format == 1:
926 elif format == 1:
927 pr = r.parentrevs(i)
927 pr = r.parentrevs(i)
928 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
928 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
929 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
929 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
930 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
930 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
931
931
932 @command('debugindexdot', cmdutil.debugrevlogopts,
932 @command('debugindexdot', cmdutil.debugrevlogopts,
933 _('-c|-m|FILE'), optionalrepo=True)
933 _('-c|-m|FILE'), optionalrepo=True)
934 def debugindexdot(ui, repo, file_=None, **opts):
934 def debugindexdot(ui, repo, file_=None, **opts):
935 """dump an index DAG as a graphviz dot file"""
935 """dump an index DAG as a graphviz dot file"""
936 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
936 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
937 ui.write(("digraph G {\n"))
937 ui.write(("digraph G {\n"))
938 for i in r:
938 for i in r:
939 node = r.node(i)
939 node = r.node(i)
940 pp = r.parents(node)
940 pp = r.parents(node)
941 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
941 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
942 if pp[1] != nullid:
942 if pp[1] != nullid:
943 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
943 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
944 ui.write("}\n")
944 ui.write("}\n")
945
945
946 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
946 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
947 def debuginstall(ui, **opts):
947 def debuginstall(ui, **opts):
948 '''test Mercurial installation
948 '''test Mercurial installation
949
949
950 Returns 0 on success.
950 Returns 0 on success.
951 '''
951 '''
952
952
953 def writetemp(contents):
953 def writetemp(contents):
954 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
954 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
955 f = os.fdopen(fd, pycompat.sysstr("wb"))
955 f = os.fdopen(fd, pycompat.sysstr("wb"))
956 f.write(contents)
956 f.write(contents)
957 f.close()
957 f.close()
958 return name
958 return name
959
959
960 problems = 0
960 problems = 0
961
961
962 fm = ui.formatter('debuginstall', opts)
962 fm = ui.formatter('debuginstall', opts)
963 fm.startitem()
963 fm.startitem()
964
964
965 # encoding
965 # encoding
966 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
966 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
967 err = None
967 err = None
968 try:
968 try:
969 encoding.fromlocal("test")
969 encoding.fromlocal("test")
970 except error.Abort as inst:
970 except error.Abort as inst:
971 err = inst
971 err = inst
972 problems += 1
972 problems += 1
973 fm.condwrite(err, 'encodingerror', _(" %s\n"
973 fm.condwrite(err, 'encodingerror', _(" %s\n"
974 " (check that your locale is properly set)\n"), err)
974 " (check that your locale is properly set)\n"), err)
975
975
976 # Python
976 # Python
977 fm.write('pythonexe', _("checking Python executable (%s)\n"),
977 fm.write('pythonexe', _("checking Python executable (%s)\n"),
978 pycompat.sysexecutable)
978 pycompat.sysexecutable)
979 fm.write('pythonver', _("checking Python version (%s)\n"),
979 fm.write('pythonver', _("checking Python version (%s)\n"),
980 ("%d.%d.%d" % sys.version_info[:3]))
980 ("%d.%d.%d" % sys.version_info[:3]))
981 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
981 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
982 os.path.dirname(pycompat.fsencode(os.__file__)))
982 os.path.dirname(pycompat.fsencode(os.__file__)))
983
983
984 security = set(sslutil.supportedprotocols)
984 security = set(sslutil.supportedprotocols)
985 if sslutil.hassni:
985 if sslutil.hassni:
986 security.add('sni')
986 security.add('sni')
987
987
988 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
988 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
989 fm.formatlist(sorted(security), name='protocol',
989 fm.formatlist(sorted(security), name='protocol',
990 fmt='%s', sep=','))
990 fmt='%s', sep=','))
991
991
992 # These are warnings, not errors. So don't increment problem count. This
992 # These are warnings, not errors. So don't increment problem count. This
993 # may change in the future.
993 # may change in the future.
994 if 'tls1.2' not in security:
994 if 'tls1.2' not in security:
995 fm.plain(_(' TLS 1.2 not supported by Python install; '
995 fm.plain(_(' TLS 1.2 not supported by Python install; '
996 'network connections lack modern security\n'))
996 'network connections lack modern security\n'))
997 if 'sni' not in security:
997 if 'sni' not in security:
998 fm.plain(_(' SNI not supported by Python install; may have '
998 fm.plain(_(' SNI not supported by Python install; may have '
999 'connectivity issues with some servers\n'))
999 'connectivity issues with some servers\n'))
1000
1000
1001 # TODO print CA cert info
1001 # TODO print CA cert info
1002
1002
1003 # hg version
1003 # hg version
1004 hgver = util.version()
1004 hgver = util.version()
1005 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1005 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1006 hgver.split('+')[0])
1006 hgver.split('+')[0])
1007 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1007 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1008 '+'.join(hgver.split('+')[1:]))
1008 '+'.join(hgver.split('+')[1:]))
1009
1009
1010 # compiled modules
1010 # compiled modules
1011 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1011 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1012 policy.policy)
1012 policy.policy)
1013 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1013 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1014 os.path.dirname(pycompat.fsencode(__file__)))
1014 os.path.dirname(pycompat.fsencode(__file__)))
1015
1015
1016 if policy.policy in ('c', 'allow'):
1016 if policy.policy in ('c', 'allow'):
1017 err = None
1017 err = None
1018 try:
1018 try:
1019 from .cext import (
1019 from .cext import (
1020 base85,
1020 base85,
1021 bdiff,
1021 bdiff,
1022 mpatch,
1022 mpatch,
1023 osutil,
1023 osutil,
1024 )
1024 )
1025 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1025 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1026 except Exception as inst:
1026 except Exception as inst:
1027 err = inst
1027 err = inst
1028 problems += 1
1028 problems += 1
1029 fm.condwrite(err, 'extensionserror', " %s\n", err)
1029 fm.condwrite(err, 'extensionserror', " %s\n", err)
1030
1030
1031 compengines = util.compengines._engines.values()
1031 compengines = util.compengines._engines.values()
1032 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1032 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1033 fm.formatlist(sorted(e.name() for e in compengines),
1033 fm.formatlist(sorted(e.name() for e in compengines),
1034 name='compengine', fmt='%s', sep=', '))
1034 name='compengine', fmt='%s', sep=', '))
1035 fm.write('compenginesavail', _('checking available compression engines '
1035 fm.write('compenginesavail', _('checking available compression engines '
1036 '(%s)\n'),
1036 '(%s)\n'),
1037 fm.formatlist(sorted(e.name() for e in compengines
1037 fm.formatlist(sorted(e.name() for e in compengines
1038 if e.available()),
1038 if e.available()),
1039 name='compengine', fmt='%s', sep=', '))
1039 name='compengine', fmt='%s', sep=', '))
1040 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1040 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1041 fm.write('compenginesserver', _('checking available compression engines '
1041 fm.write('compenginesserver', _('checking available compression engines '
1042 'for wire protocol (%s)\n'),
1042 'for wire protocol (%s)\n'),
1043 fm.formatlist([e.name() for e in wirecompengines
1043 fm.formatlist([e.name() for e in wirecompengines
1044 if e.wireprotosupport()],
1044 if e.wireprotosupport()],
1045 name='compengine', fmt='%s', sep=', '))
1045 name='compengine', fmt='%s', sep=', '))
1046
1046
1047 # templates
1047 # templates
1048 p = templater.templatepaths()
1048 p = templater.templatepaths()
1049 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1049 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1050 fm.condwrite(not p, '', _(" no template directories found\n"))
1050 fm.condwrite(not p, '', _(" no template directories found\n"))
1051 if p:
1051 if p:
1052 m = templater.templatepath("map-cmdline.default")
1052 m = templater.templatepath("map-cmdline.default")
1053 if m:
1053 if m:
1054 # template found, check if it is working
1054 # template found, check if it is working
1055 err = None
1055 err = None
1056 try:
1056 try:
1057 templater.templater.frommapfile(m)
1057 templater.templater.frommapfile(m)
1058 except Exception as inst:
1058 except Exception as inst:
1059 err = inst
1059 err = inst
1060 p = None
1060 p = None
1061 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1061 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1062 else:
1062 else:
1063 p = None
1063 p = None
1064 fm.condwrite(p, 'defaulttemplate',
1064 fm.condwrite(p, 'defaulttemplate',
1065 _("checking default template (%s)\n"), m)
1065 _("checking default template (%s)\n"), m)
1066 fm.condwrite(not m, 'defaulttemplatenotfound',
1066 fm.condwrite(not m, 'defaulttemplatenotfound',
1067 _(" template '%s' not found\n"), "default")
1067 _(" template '%s' not found\n"), "default")
1068 if not p:
1068 if not p:
1069 problems += 1
1069 problems += 1
1070 fm.condwrite(not p, '',
1070 fm.condwrite(not p, '',
1071 _(" (templates seem to have been installed incorrectly)\n"))
1071 _(" (templates seem to have been installed incorrectly)\n"))
1072
1072
1073 # editor
1073 # editor
1074 editor = ui.geteditor()
1074 editor = ui.geteditor()
1075 editor = util.expandpath(editor)
1075 editor = util.expandpath(editor)
1076 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1076 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1077 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1077 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1078 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1078 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1079 _(" No commit editor set and can't find %s in PATH\n"
1079 _(" No commit editor set and can't find %s in PATH\n"
1080 " (specify a commit editor in your configuration"
1080 " (specify a commit editor in your configuration"
1081 " file)\n"), not cmdpath and editor == 'vi' and editor)
1081 " file)\n"), not cmdpath and editor == 'vi' and editor)
1082 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1082 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1083 _(" Can't find editor '%s' in PATH\n"
1083 _(" Can't find editor '%s' in PATH\n"
1084 " (specify a commit editor in your configuration"
1084 " (specify a commit editor in your configuration"
1085 " file)\n"), not cmdpath and editor)
1085 " file)\n"), not cmdpath and editor)
1086 if not cmdpath and editor != 'vi':
1086 if not cmdpath and editor != 'vi':
1087 problems += 1
1087 problems += 1
1088
1088
1089 # check username
1089 # check username
1090 username = None
1090 username = None
1091 err = None
1091 err = None
1092 try:
1092 try:
1093 username = ui.username()
1093 username = ui.username()
1094 except error.Abort as e:
1094 except error.Abort as e:
1095 err = e
1095 err = e
1096 problems += 1
1096 problems += 1
1097
1097
1098 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1098 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1099 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1099 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1100 " (specify a username in your configuration file)\n"), err)
1100 " (specify a username in your configuration file)\n"), err)
1101
1101
1102 fm.condwrite(not problems, '',
1102 fm.condwrite(not problems, '',
1103 _("no problems detected\n"))
1103 _("no problems detected\n"))
1104 if not problems:
1104 if not problems:
1105 fm.data(problems=problems)
1105 fm.data(problems=problems)
1106 fm.condwrite(problems, 'problems',
1106 fm.condwrite(problems, 'problems',
1107 _("%d problems detected,"
1107 _("%d problems detected,"
1108 " please check your install!\n"), problems)
1108 " please check your install!\n"), problems)
1109 fm.end()
1109 fm.end()
1110
1110
1111 return problems
1111 return problems
1112
1112
1113 @command('debugknown', [], _('REPO ID...'), norepo=True)
1113 @command('debugknown', [], _('REPO ID...'), norepo=True)
1114 def debugknown(ui, repopath, *ids, **opts):
1114 def debugknown(ui, repopath, *ids, **opts):
1115 """test whether node ids are known to a repo
1115 """test whether node ids are known to a repo
1116
1116
1117 Every ID must be a full-length hex node id string. Returns a list of 0s
1117 Every ID must be a full-length hex node id string. Returns a list of 0s
1118 and 1s indicating unknown/known.
1118 and 1s indicating unknown/known.
1119 """
1119 """
1120 repo = hg.peer(ui, opts, repopath)
1120 repo = hg.peer(ui, opts, repopath)
1121 if not repo.capable('known'):
1121 if not repo.capable('known'):
1122 raise error.Abort("known() not supported by target repository")
1122 raise error.Abort("known() not supported by target repository")
1123 flags = repo.known([bin(s) for s in ids])
1123 flags = repo.known([bin(s) for s in ids])
1124 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1124 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1125
1125
1126 @command('debuglabelcomplete', [], _('LABEL...'))
1126 @command('debuglabelcomplete', [], _('LABEL...'))
1127 def debuglabelcomplete(ui, repo, *args):
1127 def debuglabelcomplete(ui, repo, *args):
1128 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1128 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1129 debugnamecomplete(ui, repo, *args)
1129 debugnamecomplete(ui, repo, *args)
1130
1130
1131 @command('debuglocks',
1131 @command('debuglocks',
1132 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1132 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1133 ('W', 'force-wlock', None,
1133 ('W', 'force-wlock', None,
1134 _('free the working state lock (DANGEROUS)'))],
1134 _('free the working state lock (DANGEROUS)'))],
1135 _('[OPTION]...'))
1135 _('[OPTION]...'))
1136 def debuglocks(ui, repo, **opts):
1136 def debuglocks(ui, repo, **opts):
1137 """show or modify state of locks
1137 """show or modify state of locks
1138
1138
1139 By default, this command will show which locks are held. This
1139 By default, this command will show which locks are held. This
1140 includes the user and process holding the lock, the amount of time
1140 includes the user and process holding the lock, the amount of time
1141 the lock has been held, and the machine name where the process is
1141 the lock has been held, and the machine name where the process is
1142 running if it's not local.
1142 running if it's not local.
1143
1143
1144 Locks protect the integrity of Mercurial's data, so should be
1144 Locks protect the integrity of Mercurial's data, so should be
1145 treated with care. System crashes or other interruptions may cause
1145 treated with care. System crashes or other interruptions may cause
1146 locks to not be properly released, though Mercurial will usually
1146 locks to not be properly released, though Mercurial will usually
1147 detect and remove such stale locks automatically.
1147 detect and remove such stale locks automatically.
1148
1148
1149 However, detecting stale locks may not always be possible (for
1149 However, detecting stale locks may not always be possible (for
1150 instance, on a shared filesystem). Removing locks may also be
1150 instance, on a shared filesystem). Removing locks may also be
1151 blocked by filesystem permissions.
1151 blocked by filesystem permissions.
1152
1152
1153 Returns 0 if no locks are held.
1153 Returns 0 if no locks are held.
1154
1154
1155 """
1155 """
1156
1156
1157 if opts.get('force_lock'):
1157 if opts.get('force_lock'):
1158 repo.svfs.unlink('lock')
1158 repo.svfs.unlink('lock')
1159 if opts.get('force_wlock'):
1159 if opts.get('force_wlock'):
1160 repo.vfs.unlink('wlock')
1160 repo.vfs.unlink('wlock')
1161 if opts.get('force_lock') or opts.get('force_lock'):
1161 if opts.get('force_lock') or opts.get('force_lock'):
1162 return 0
1162 return 0
1163
1163
1164 now = time.time()
1164 now = time.time()
1165 held = 0
1165 held = 0
1166
1166
1167 def report(vfs, name, method):
1167 def report(vfs, name, method):
1168 # this causes stale locks to get reaped for more accurate reporting
1168 # this causes stale locks to get reaped for more accurate reporting
1169 try:
1169 try:
1170 l = method(False)
1170 l = method(False)
1171 except error.LockHeld:
1171 except error.LockHeld:
1172 l = None
1172 l = None
1173
1173
1174 if l:
1174 if l:
1175 l.release()
1175 l.release()
1176 else:
1176 else:
1177 try:
1177 try:
1178 stat = vfs.lstat(name)
1178 stat = vfs.lstat(name)
1179 age = now - stat.st_mtime
1179 age = now - stat.st_mtime
1180 user = util.username(stat.st_uid)
1180 user = util.username(stat.st_uid)
1181 locker = vfs.readlock(name)
1181 locker = vfs.readlock(name)
1182 if ":" in locker:
1182 if ":" in locker:
1183 host, pid = locker.split(':')
1183 host, pid = locker.split(':')
1184 if host == socket.gethostname():
1184 if host == socket.gethostname():
1185 locker = 'user %s, process %s' % (user, pid)
1185 locker = 'user %s, process %s' % (user, pid)
1186 else:
1186 else:
1187 locker = 'user %s, process %s, host %s' \
1187 locker = 'user %s, process %s, host %s' \
1188 % (user, pid, host)
1188 % (user, pid, host)
1189 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1189 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1190 return 1
1190 return 1
1191 except OSError as e:
1191 except OSError as e:
1192 if e.errno != errno.ENOENT:
1192 if e.errno != errno.ENOENT:
1193 raise
1193 raise
1194
1194
1195 ui.write(("%-6s free\n") % (name + ":"))
1195 ui.write(("%-6s free\n") % (name + ":"))
1196 return 0
1196 return 0
1197
1197
1198 held += report(repo.svfs, "lock", repo.lock)
1198 held += report(repo.svfs, "lock", repo.lock)
1199 held += report(repo.vfs, "wlock", repo.wlock)
1199 held += report(repo.vfs, "wlock", repo.wlock)
1200
1200
1201 return held
1201 return held
1202
1202
1203 @command('debugmergestate', [], '')
1203 @command('debugmergestate', [], '')
1204 def debugmergestate(ui, repo, *args):
1204 def debugmergestate(ui, repo, *args):
1205 """print merge state
1205 """print merge state
1206
1206
1207 Use --verbose to print out information about whether v1 or v2 merge state
1207 Use --verbose to print out information about whether v1 or v2 merge state
1208 was chosen."""
1208 was chosen."""
1209 def _hashornull(h):
1209 def _hashornull(h):
1210 if h == nullhex:
1210 if h == nullhex:
1211 return 'null'
1211 return 'null'
1212 else:
1212 else:
1213 return h
1213 return h
1214
1214
1215 def printrecords(version):
1215 def printrecords(version):
1216 ui.write(('* version %s records\n') % version)
1216 ui.write(('* version %s records\n') % version)
1217 if version == 1:
1217 if version == 1:
1218 records = v1records
1218 records = v1records
1219 else:
1219 else:
1220 records = v2records
1220 records = v2records
1221
1221
1222 for rtype, record in records:
1222 for rtype, record in records:
1223 # pretty print some record types
1223 # pretty print some record types
1224 if rtype == 'L':
1224 if rtype == 'L':
1225 ui.write(('local: %s\n') % record)
1225 ui.write(('local: %s\n') % record)
1226 elif rtype == 'O':
1226 elif rtype == 'O':
1227 ui.write(('other: %s\n') % record)
1227 ui.write(('other: %s\n') % record)
1228 elif rtype == 'm':
1228 elif rtype == 'm':
1229 driver, mdstate = record.split('\0', 1)
1229 driver, mdstate = record.split('\0', 1)
1230 ui.write(('merge driver: %s (state "%s")\n')
1230 ui.write(('merge driver: %s (state "%s")\n')
1231 % (driver, mdstate))
1231 % (driver, mdstate))
1232 elif rtype in 'FDC':
1232 elif rtype in 'FDC':
1233 r = record.split('\0')
1233 r = record.split('\0')
1234 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1234 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1235 if version == 1:
1235 if version == 1:
1236 onode = 'not stored in v1 format'
1236 onode = 'not stored in v1 format'
1237 flags = r[7]
1237 flags = r[7]
1238 else:
1238 else:
1239 onode, flags = r[7:9]
1239 onode, flags = r[7:9]
1240 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1240 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1241 % (f, rtype, state, _hashornull(hash)))
1241 % (f, rtype, state, _hashornull(hash)))
1242 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1242 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1243 ui.write((' ancestor path: %s (node %s)\n')
1243 ui.write((' ancestor path: %s (node %s)\n')
1244 % (afile, _hashornull(anode)))
1244 % (afile, _hashornull(anode)))
1245 ui.write((' other path: %s (node %s)\n')
1245 ui.write((' other path: %s (node %s)\n')
1246 % (ofile, _hashornull(onode)))
1246 % (ofile, _hashornull(onode)))
1247 elif rtype == 'f':
1247 elif rtype == 'f':
1248 filename, rawextras = record.split('\0', 1)
1248 filename, rawextras = record.split('\0', 1)
1249 extras = rawextras.split('\0')
1249 extras = rawextras.split('\0')
1250 i = 0
1250 i = 0
1251 extrastrings = []
1251 extrastrings = []
1252 while i < len(extras):
1252 while i < len(extras):
1253 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1253 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1254 i += 2
1254 i += 2
1255
1255
1256 ui.write(('file extras: %s (%s)\n')
1256 ui.write(('file extras: %s (%s)\n')
1257 % (filename, ', '.join(extrastrings)))
1257 % (filename, ', '.join(extrastrings)))
1258 elif rtype == 'l':
1258 elif rtype == 'l':
1259 labels = record.split('\0', 2)
1259 labels = record.split('\0', 2)
1260 labels = [l for l in labels if len(l) > 0]
1260 labels = [l for l in labels if len(l) > 0]
1261 ui.write(('labels:\n'))
1261 ui.write(('labels:\n'))
1262 ui.write((' local: %s\n' % labels[0]))
1262 ui.write((' local: %s\n' % labels[0]))
1263 ui.write((' other: %s\n' % labels[1]))
1263 ui.write((' other: %s\n' % labels[1]))
1264 if len(labels) > 2:
1264 if len(labels) > 2:
1265 ui.write((' base: %s\n' % labels[2]))
1265 ui.write((' base: %s\n' % labels[2]))
1266 else:
1266 else:
1267 ui.write(('unrecognized entry: %s\t%s\n')
1267 ui.write(('unrecognized entry: %s\t%s\n')
1268 % (rtype, record.replace('\0', '\t')))
1268 % (rtype, record.replace('\0', '\t')))
1269
1269
1270 # Avoid mergestate.read() since it may raise an exception for unsupported
1270 # Avoid mergestate.read() since it may raise an exception for unsupported
1271 # merge state records. We shouldn't be doing this, but this is OK since this
1271 # merge state records. We shouldn't be doing this, but this is OK since this
1272 # command is pretty low-level.
1272 # command is pretty low-level.
1273 ms = mergemod.mergestate(repo)
1273 ms = mergemod.mergestate(repo)
1274
1274
1275 # sort so that reasonable information is on top
1275 # sort so that reasonable information is on top
1276 v1records = ms._readrecordsv1()
1276 v1records = ms._readrecordsv1()
1277 v2records = ms._readrecordsv2()
1277 v2records = ms._readrecordsv2()
1278 order = 'LOml'
1278 order = 'LOml'
1279 def key(r):
1279 def key(r):
1280 idx = order.find(r[0])
1280 idx = order.find(r[0])
1281 if idx == -1:
1281 if idx == -1:
1282 return (1, r[1])
1282 return (1, r[1])
1283 else:
1283 else:
1284 return (0, idx)
1284 return (0, idx)
1285 v1records.sort(key=key)
1285 v1records.sort(key=key)
1286 v2records.sort(key=key)
1286 v2records.sort(key=key)
1287
1287
1288 if not v1records and not v2records:
1288 if not v1records and not v2records:
1289 ui.write(('no merge state found\n'))
1289 ui.write(('no merge state found\n'))
1290 elif not v2records:
1290 elif not v2records:
1291 ui.note(('no version 2 merge state\n'))
1291 ui.note(('no version 2 merge state\n'))
1292 printrecords(1)
1292 printrecords(1)
1293 elif ms._v1v2match(v1records, v2records):
1293 elif ms._v1v2match(v1records, v2records):
1294 ui.note(('v1 and v2 states match: using v2\n'))
1294 ui.note(('v1 and v2 states match: using v2\n'))
1295 printrecords(2)
1295 printrecords(2)
1296 else:
1296 else:
1297 ui.note(('v1 and v2 states mismatch: using v1\n'))
1297 ui.note(('v1 and v2 states mismatch: using v1\n'))
1298 printrecords(1)
1298 printrecords(1)
1299 if ui.verbose:
1299 if ui.verbose:
1300 printrecords(2)
1300 printrecords(2)
1301
1301
1302 @command('debugnamecomplete', [], _('NAME...'))
1302 @command('debugnamecomplete', [], _('NAME...'))
1303 def debugnamecomplete(ui, repo, *args):
1303 def debugnamecomplete(ui, repo, *args):
1304 '''complete "names" - tags, open branch names, bookmark names'''
1304 '''complete "names" - tags, open branch names, bookmark names'''
1305
1305
1306 names = set()
1306 names = set()
1307 # since we previously only listed open branches, we will handle that
1307 # since we previously only listed open branches, we will handle that
1308 # specially (after this for loop)
1308 # specially (after this for loop)
1309 for name, ns in repo.names.iteritems():
1309 for name, ns in repo.names.iteritems():
1310 if name != 'branches':
1310 if name != 'branches':
1311 names.update(ns.listnames(repo))
1311 names.update(ns.listnames(repo))
1312 names.update(tag for (tag, heads, tip, closed)
1312 names.update(tag for (tag, heads, tip, closed)
1313 in repo.branchmap().iterbranches() if not closed)
1313 in repo.branchmap().iterbranches() if not closed)
1314 completions = set()
1314 completions = set()
1315 if not args:
1315 if not args:
1316 args = ['']
1316 args = ['']
1317 for a in args:
1317 for a in args:
1318 completions.update(n for n in names if n.startswith(a))
1318 completions.update(n for n in names if n.startswith(a))
1319 ui.write('\n'.join(sorted(completions)))
1319 ui.write('\n'.join(sorted(completions)))
1320 ui.write('\n')
1320 ui.write('\n')
1321
1321
1322 @command('debugobsolete',
1322 @command('debugobsolete',
1323 [('', 'flags', 0, _('markers flag')),
1323 [('', 'flags', 0, _('markers flag')),
1324 ('', 'record-parents', False,
1324 ('', 'record-parents', False,
1325 _('record parent information for the precursor')),
1325 _('record parent information for the precursor')),
1326 ('r', 'rev', [], _('display markers relevant to REV')),
1326 ('r', 'rev', [], _('display markers relevant to REV')),
1327 ('', 'exclusive', False, _('restrict display to markers only '
1327 ('', 'exclusive', False, _('restrict display to markers only '
1328 'relevant to REV')),
1328 'relevant to REV')),
1329 ('', 'index', False, _('display index of the marker')),
1329 ('', 'index', False, _('display index of the marker')),
1330 ('', 'delete', [], _('delete markers specified by indices')),
1330 ('', 'delete', [], _('delete markers specified by indices')),
1331 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1331 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1332 _('[OBSOLETED [REPLACEMENT ...]]'))
1332 _('[OBSOLETED [REPLACEMENT ...]]'))
1333 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1333 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1334 """create arbitrary obsolete marker
1334 """create arbitrary obsolete marker
1335
1335
1336 With no arguments, displays the list of obsolescence markers."""
1336 With no arguments, displays the list of obsolescence markers."""
1337
1337
1338 def parsenodeid(s):
1338 def parsenodeid(s):
1339 try:
1339 try:
1340 # We do not use revsingle/revrange functions here to accept
1340 # We do not use revsingle/revrange functions here to accept
1341 # arbitrary node identifiers, possibly not present in the
1341 # arbitrary node identifiers, possibly not present in the
1342 # local repository.
1342 # local repository.
1343 n = bin(s)
1343 n = bin(s)
1344 if len(n) != len(nullid):
1344 if len(n) != len(nullid):
1345 raise TypeError()
1345 raise TypeError()
1346 return n
1346 return n
1347 except TypeError:
1347 except TypeError:
1348 raise error.Abort('changeset references must be full hexadecimal '
1348 raise error.Abort('changeset references must be full hexadecimal '
1349 'node identifiers')
1349 'node identifiers')
1350
1350
1351 if opts.get('delete'):
1351 if opts.get('delete'):
1352 indices = []
1352 indices = []
1353 for v in opts.get('delete'):
1353 for v in opts.get('delete'):
1354 try:
1354 try:
1355 indices.append(int(v))
1355 indices.append(int(v))
1356 except ValueError:
1356 except ValueError:
1357 raise error.Abort(_('invalid index value: %r') % v,
1357 raise error.Abort(_('invalid index value: %r') % v,
1358 hint=_('use integers for indices'))
1358 hint=_('use integers for indices'))
1359
1359
1360 if repo.currenttransaction():
1360 if repo.currenttransaction():
1361 raise error.Abort(_('cannot delete obsmarkers in the middle '
1361 raise error.Abort(_('cannot delete obsmarkers in the middle '
1362 'of transaction.'))
1362 'of transaction.'))
1363
1363
1364 with repo.lock():
1364 with repo.lock():
1365 n = repair.deleteobsmarkers(repo.obsstore, indices)
1365 n = repair.deleteobsmarkers(repo.obsstore, indices)
1366 ui.write(_('deleted %i obsolescence markers\n') % n)
1366 ui.write(_('deleted %i obsolescence markers\n') % n)
1367
1367
1368 return
1368 return
1369
1369
1370 if precursor is not None:
1370 if precursor is not None:
1371 if opts['rev']:
1371 if opts['rev']:
1372 raise error.Abort('cannot select revision when creating marker')
1372 raise error.Abort('cannot select revision when creating marker')
1373 metadata = {}
1373 metadata = {}
1374 metadata['user'] = opts['user'] or ui.username()
1374 metadata['user'] = opts['user'] or ui.username()
1375 succs = tuple(parsenodeid(succ) for succ in successors)
1375 succs = tuple(parsenodeid(succ) for succ in successors)
1376 l = repo.lock()
1376 l = repo.lock()
1377 try:
1377 try:
1378 tr = repo.transaction('debugobsolete')
1378 tr = repo.transaction('debugobsolete')
1379 try:
1379 try:
1380 date = opts.get('date')
1380 date = opts.get('date')
1381 if date:
1381 if date:
1382 date = util.parsedate(date)
1382 date = util.parsedate(date)
1383 else:
1383 else:
1384 date = None
1384 date = None
1385 prec = parsenodeid(precursor)
1385 prec = parsenodeid(precursor)
1386 parents = None
1386 parents = None
1387 if opts['record_parents']:
1387 if opts['record_parents']:
1388 if prec not in repo.unfiltered():
1388 if prec not in repo.unfiltered():
1389 raise error.Abort('cannot used --record-parents on '
1389 raise error.Abort('cannot used --record-parents on '
1390 'unknown changesets')
1390 'unknown changesets')
1391 parents = repo.unfiltered()[prec].parents()
1391 parents = repo.unfiltered()[prec].parents()
1392 parents = tuple(p.node() for p in parents)
1392 parents = tuple(p.node() for p in parents)
1393 repo.obsstore.create(tr, prec, succs, opts['flags'],
1393 repo.obsstore.create(tr, prec, succs, opts['flags'],
1394 parents=parents, date=date,
1394 parents=parents, date=date,
1395 metadata=metadata, ui=ui)
1395 metadata=metadata, ui=ui)
1396 tr.close()
1396 tr.close()
1397 except ValueError as exc:
1397 except ValueError as exc:
1398 raise error.Abort(_('bad obsmarker input: %s') % exc)
1398 raise error.Abort(_('bad obsmarker input: %s') % exc)
1399 finally:
1399 finally:
1400 tr.release()
1400 tr.release()
1401 finally:
1401 finally:
1402 l.release()
1402 l.release()
1403 else:
1403 else:
1404 if opts['rev']:
1404 if opts['rev']:
1405 revs = scmutil.revrange(repo, opts['rev'])
1405 revs = scmutil.revrange(repo, opts['rev'])
1406 nodes = [repo[r].node() for r in revs]
1406 nodes = [repo[r].node() for r in revs]
1407 markers = list(obsolete.getmarkers(repo, nodes=nodes,
1407 markers = list(obsolete.getmarkers(repo, nodes=nodes,
1408 exclusive=opts['exclusive']))
1408 exclusive=opts['exclusive']))
1409 markers.sort(key=lambda x: x._data)
1409 markers.sort(key=lambda x: x._data)
1410 else:
1410 else:
1411 markers = obsolete.getmarkers(repo)
1411 markers = obsolete.getmarkers(repo)
1412
1412
1413 markerstoiter = markers
1413 markerstoiter = markers
1414 isrelevant = lambda m: True
1414 isrelevant = lambda m: True
1415 if opts.get('rev') and opts.get('index'):
1415 if opts.get('rev') and opts.get('index'):
1416 markerstoiter = obsolete.getmarkers(repo)
1416 markerstoiter = obsolete.getmarkers(repo)
1417 markerset = set(markers)
1417 markerset = set(markers)
1418 isrelevant = lambda m: m in markerset
1418 isrelevant = lambda m: m in markerset
1419
1419
1420 fm = ui.formatter('debugobsolete', opts)
1420 fm = ui.formatter('debugobsolete', opts)
1421 for i, m in enumerate(markerstoiter):
1421 for i, m in enumerate(markerstoiter):
1422 if not isrelevant(m):
1422 if not isrelevant(m):
1423 # marker can be irrelevant when we're iterating over a set
1423 # marker can be irrelevant when we're iterating over a set
1424 # of markers (markerstoiter) which is bigger than the set
1424 # of markers (markerstoiter) which is bigger than the set
1425 # of markers we want to display (markers)
1425 # of markers we want to display (markers)
1426 # this can happen if both --index and --rev options are
1426 # this can happen if both --index and --rev options are
1427 # provided and thus we need to iterate over all of the markers
1427 # provided and thus we need to iterate over all of the markers
1428 # to get the correct indices, but only display the ones that
1428 # to get the correct indices, but only display the ones that
1429 # are relevant to --rev value
1429 # are relevant to --rev value
1430 continue
1430 continue
1431 fm.startitem()
1431 fm.startitem()
1432 ind = i if opts.get('index') else None
1432 ind = i if opts.get('index') else None
1433 cmdutil.showmarker(fm, m, index=ind)
1433 cmdutil.showmarker(fm, m, index=ind)
1434 fm.end()
1434 fm.end()
1435
1435
1436 @command('debugpathcomplete',
1436 @command('debugpathcomplete',
1437 [('f', 'full', None, _('complete an entire path')),
1437 [('f', 'full', None, _('complete an entire path')),
1438 ('n', 'normal', None, _('show only normal files')),
1438 ('n', 'normal', None, _('show only normal files')),
1439 ('a', 'added', None, _('show only added files')),
1439 ('a', 'added', None, _('show only added files')),
1440 ('r', 'removed', None, _('show only removed files'))],
1440 ('r', 'removed', None, _('show only removed files'))],
1441 _('FILESPEC...'))
1441 _('FILESPEC...'))
1442 def debugpathcomplete(ui, repo, *specs, **opts):
1442 def debugpathcomplete(ui, repo, *specs, **opts):
1443 '''complete part or all of a tracked path
1443 '''complete part or all of a tracked path
1444
1444
1445 This command supports shells that offer path name completion. It
1445 This command supports shells that offer path name completion. It
1446 currently completes only files already known to the dirstate.
1446 currently completes only files already known to the dirstate.
1447
1447
1448 Completion extends only to the next path segment unless
1448 Completion extends only to the next path segment unless
1449 --full is specified, in which case entire paths are used.'''
1449 --full is specified, in which case entire paths are used.'''
1450
1450
1451 def complete(path, acceptable):
1451 def complete(path, acceptable):
1452 dirstate = repo.dirstate
1452 dirstate = repo.dirstate
1453 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1453 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1454 rootdir = repo.root + pycompat.ossep
1454 rootdir = repo.root + pycompat.ossep
1455 if spec != repo.root and not spec.startswith(rootdir):
1455 if spec != repo.root and not spec.startswith(rootdir):
1456 return [], []
1456 return [], []
1457 if os.path.isdir(spec):
1457 if os.path.isdir(spec):
1458 spec += '/'
1458 spec += '/'
1459 spec = spec[len(rootdir):]
1459 spec = spec[len(rootdir):]
1460 fixpaths = pycompat.ossep != '/'
1460 fixpaths = pycompat.ossep != '/'
1461 if fixpaths:
1461 if fixpaths:
1462 spec = spec.replace(pycompat.ossep, '/')
1462 spec = spec.replace(pycompat.ossep, '/')
1463 speclen = len(spec)
1463 speclen = len(spec)
1464 fullpaths = opts['full']
1464 fullpaths = opts['full']
1465 files, dirs = set(), set()
1465 files, dirs = set(), set()
1466 adddir, addfile = dirs.add, files.add
1466 adddir, addfile = dirs.add, files.add
1467 for f, st in dirstate.iteritems():
1467 for f, st in dirstate.iteritems():
1468 if f.startswith(spec) and st[0] in acceptable:
1468 if f.startswith(spec) and st[0] in acceptable:
1469 if fixpaths:
1469 if fixpaths:
1470 f = f.replace('/', pycompat.ossep)
1470 f = f.replace('/', pycompat.ossep)
1471 if fullpaths:
1471 if fullpaths:
1472 addfile(f)
1472 addfile(f)
1473 continue
1473 continue
1474 s = f.find(pycompat.ossep, speclen)
1474 s = f.find(pycompat.ossep, speclen)
1475 if s >= 0:
1475 if s >= 0:
1476 adddir(f[:s])
1476 adddir(f[:s])
1477 else:
1477 else:
1478 addfile(f)
1478 addfile(f)
1479 return files, dirs
1479 return files, dirs
1480
1480
1481 acceptable = ''
1481 acceptable = ''
1482 if opts['normal']:
1482 if opts['normal']:
1483 acceptable += 'nm'
1483 acceptable += 'nm'
1484 if opts['added']:
1484 if opts['added']:
1485 acceptable += 'a'
1485 acceptable += 'a'
1486 if opts['removed']:
1486 if opts['removed']:
1487 acceptable += 'r'
1487 acceptable += 'r'
1488 cwd = repo.getcwd()
1488 cwd = repo.getcwd()
1489 if not specs:
1489 if not specs:
1490 specs = ['.']
1490 specs = ['.']
1491
1491
1492 files, dirs = set(), set()
1492 files, dirs = set(), set()
1493 for spec in specs:
1493 for spec in specs:
1494 f, d = complete(spec, acceptable or 'nmar')
1494 f, d = complete(spec, acceptable or 'nmar')
1495 files.update(f)
1495 files.update(f)
1496 dirs.update(d)
1496 dirs.update(d)
1497 files.update(dirs)
1497 files.update(dirs)
1498 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1498 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1499 ui.write('\n')
1499 ui.write('\n')
1500
1500
1501 @command('debugpickmergetool',
1501 @command('debugpickmergetool',
1502 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1502 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1503 ('', 'changedelete', None, _('emulate merging change and delete')),
1503 ('', 'changedelete', None, _('emulate merging change and delete')),
1504 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1504 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1505 _('[PATTERN]...'),
1505 _('[PATTERN]...'),
1506 inferrepo=True)
1506 inferrepo=True)
1507 def debugpickmergetool(ui, repo, *pats, **opts):
1507 def debugpickmergetool(ui, repo, *pats, **opts):
1508 """examine which merge tool is chosen for specified file
1508 """examine which merge tool is chosen for specified file
1509
1509
1510 As described in :hg:`help merge-tools`, Mercurial examines
1510 As described in :hg:`help merge-tools`, Mercurial examines
1511 configurations below in this order to decide which merge tool is
1511 configurations below in this order to decide which merge tool is
1512 chosen for specified file.
1512 chosen for specified file.
1513
1513
1514 1. ``--tool`` option
1514 1. ``--tool`` option
1515 2. ``HGMERGE`` environment variable
1515 2. ``HGMERGE`` environment variable
1516 3. configurations in ``merge-patterns`` section
1516 3. configurations in ``merge-patterns`` section
1517 4. configuration of ``ui.merge``
1517 4. configuration of ``ui.merge``
1518 5. configurations in ``merge-tools`` section
1518 5. configurations in ``merge-tools`` section
1519 6. ``hgmerge`` tool (for historical reason only)
1519 6. ``hgmerge`` tool (for historical reason only)
1520 7. default tool for fallback (``:merge`` or ``:prompt``)
1520 7. default tool for fallback (``:merge`` or ``:prompt``)
1521
1521
1522 This command writes out examination result in the style below::
1522 This command writes out examination result in the style below::
1523
1523
1524 FILE = MERGETOOL
1524 FILE = MERGETOOL
1525
1525
1526 By default, all files known in the first parent context of the
1526 By default, all files known in the first parent context of the
1527 working directory are examined. Use file patterns and/or -I/-X
1527 working directory are examined. Use file patterns and/or -I/-X
1528 options to limit target files. -r/--rev is also useful to examine
1528 options to limit target files. -r/--rev is also useful to examine
1529 files in another context without actual updating to it.
1529 files in another context without actual updating to it.
1530
1530
1531 With --debug, this command shows warning messages while matching
1531 With --debug, this command shows warning messages while matching
1532 against ``merge-patterns`` and so on, too. It is recommended to
1532 against ``merge-patterns`` and so on, too. It is recommended to
1533 use this option with explicit file patterns and/or -I/-X options,
1533 use this option with explicit file patterns and/or -I/-X options,
1534 because this option increases amount of output per file according
1534 because this option increases amount of output per file according
1535 to configurations in hgrc.
1535 to configurations in hgrc.
1536
1536
1537 With -v/--verbose, this command shows configurations below at
1537 With -v/--verbose, this command shows configurations below at
1538 first (only if specified).
1538 first (only if specified).
1539
1539
1540 - ``--tool`` option
1540 - ``--tool`` option
1541 - ``HGMERGE`` environment variable
1541 - ``HGMERGE`` environment variable
1542 - configuration of ``ui.merge``
1542 - configuration of ``ui.merge``
1543
1543
1544 If merge tool is chosen before matching against
1544 If merge tool is chosen before matching against
1545 ``merge-patterns``, this command can't show any helpful
1545 ``merge-patterns``, this command can't show any helpful
1546 information, even with --debug. In such case, information above is
1546 information, even with --debug. In such case, information above is
1547 useful to know why a merge tool is chosen.
1547 useful to know why a merge tool is chosen.
1548 """
1548 """
1549 overrides = {}
1549 overrides = {}
1550 if opts['tool']:
1550 if opts['tool']:
1551 overrides[('ui', 'forcemerge')] = opts['tool']
1551 overrides[('ui', 'forcemerge')] = opts['tool']
1552 ui.note(('with --tool %r\n') % (opts['tool']))
1552 ui.note(('with --tool %r\n') % (opts['tool']))
1553
1553
1554 with ui.configoverride(overrides, 'debugmergepatterns'):
1554 with ui.configoverride(overrides, 'debugmergepatterns'):
1555 hgmerge = encoding.environ.get("HGMERGE")
1555 hgmerge = encoding.environ.get("HGMERGE")
1556 if hgmerge is not None:
1556 if hgmerge is not None:
1557 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1557 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1558 uimerge = ui.config("ui", "merge")
1558 uimerge = ui.config("ui", "merge")
1559 if uimerge:
1559 if uimerge:
1560 ui.note(('with ui.merge=%r\n') % (uimerge))
1560 ui.note(('with ui.merge=%r\n') % (uimerge))
1561
1561
1562 ctx = scmutil.revsingle(repo, opts.get('rev'))
1562 ctx = scmutil.revsingle(repo, opts.get('rev'))
1563 m = scmutil.match(ctx, pats, opts)
1563 m = scmutil.match(ctx, pats, opts)
1564 changedelete = opts['changedelete']
1564 changedelete = opts['changedelete']
1565 for path in ctx.walk(m):
1565 for path in ctx.walk(m):
1566 fctx = ctx[path]
1566 fctx = ctx[path]
1567 try:
1567 try:
1568 if not ui.debugflag:
1568 if not ui.debugflag:
1569 ui.pushbuffer(error=True)
1569 ui.pushbuffer(error=True)
1570 tool, toolpath = filemerge._picktool(repo, ui, path,
1570 tool, toolpath = filemerge._picktool(repo, ui, path,
1571 fctx.isbinary(),
1571 fctx.isbinary(),
1572 'l' in fctx.flags(),
1572 'l' in fctx.flags(),
1573 changedelete)
1573 changedelete)
1574 finally:
1574 finally:
1575 if not ui.debugflag:
1575 if not ui.debugflag:
1576 ui.popbuffer()
1576 ui.popbuffer()
1577 ui.write(('%s = %s\n') % (path, tool))
1577 ui.write(('%s = %s\n') % (path, tool))
1578
1578
1579 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1579 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1580 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1580 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1581 '''access the pushkey key/value protocol
1581 '''access the pushkey key/value protocol
1582
1582
1583 With two args, list the keys in the given namespace.
1583 With two args, list the keys in the given namespace.
1584
1584
1585 With five args, set a key to new if it currently is set to old.
1585 With five args, set a key to new if it currently is set to old.
1586 Reports success or failure.
1586 Reports success or failure.
1587 '''
1587 '''
1588
1588
1589 target = hg.peer(ui, {}, repopath)
1589 target = hg.peer(ui, {}, repopath)
1590 if keyinfo:
1590 if keyinfo:
1591 key, old, new = keyinfo
1591 key, old, new = keyinfo
1592 r = target.pushkey(namespace, key, old, new)
1592 r = target.pushkey(namespace, key, old, new)
1593 ui.status(str(r) + '\n')
1593 ui.status(str(r) + '\n')
1594 return not r
1594 return not r
1595 else:
1595 else:
1596 for k, v in sorted(target.listkeys(namespace).iteritems()):
1596 for k, v in sorted(target.listkeys(namespace).iteritems()):
1597 ui.write("%s\t%s\n" % (util.escapestr(k),
1597 ui.write("%s\t%s\n" % (util.escapestr(k),
1598 util.escapestr(v)))
1598 util.escapestr(v)))
1599
1599
1600 @command('debugpvec', [], _('A B'))
1600 @command('debugpvec', [], _('A B'))
1601 def debugpvec(ui, repo, a, b=None):
1601 def debugpvec(ui, repo, a, b=None):
1602 ca = scmutil.revsingle(repo, a)
1602 ca = scmutil.revsingle(repo, a)
1603 cb = scmutil.revsingle(repo, b)
1603 cb = scmutil.revsingle(repo, b)
1604 pa = pvec.ctxpvec(ca)
1604 pa = pvec.ctxpvec(ca)
1605 pb = pvec.ctxpvec(cb)
1605 pb = pvec.ctxpvec(cb)
1606 if pa == pb:
1606 if pa == pb:
1607 rel = "="
1607 rel = "="
1608 elif pa > pb:
1608 elif pa > pb:
1609 rel = ">"
1609 rel = ">"
1610 elif pa < pb:
1610 elif pa < pb:
1611 rel = "<"
1611 rel = "<"
1612 elif pa | pb:
1612 elif pa | pb:
1613 rel = "|"
1613 rel = "|"
1614 ui.write(_("a: %s\n") % pa)
1614 ui.write(_("a: %s\n") % pa)
1615 ui.write(_("b: %s\n") % pb)
1615 ui.write(_("b: %s\n") % pb)
1616 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1616 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1617 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1617 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1618 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1618 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1619 pa.distance(pb), rel))
1619 pa.distance(pb), rel))
1620
1620
1621 @command('debugrebuilddirstate|debugrebuildstate',
1621 @command('debugrebuilddirstate|debugrebuildstate',
1622 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1622 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1623 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1623 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1624 'the working copy parent')),
1624 'the working copy parent')),
1625 ],
1625 ],
1626 _('[-r REV]'))
1626 _('[-r REV]'))
1627 def debugrebuilddirstate(ui, repo, rev, **opts):
1627 def debugrebuilddirstate(ui, repo, rev, **opts):
1628 """rebuild the dirstate as it would look like for the given revision
1628 """rebuild the dirstate as it would look like for the given revision
1629
1629
1630 If no revision is specified the first current parent will be used.
1630 If no revision is specified the first current parent will be used.
1631
1631
1632 The dirstate will be set to the files of the given revision.
1632 The dirstate will be set to the files of the given revision.
1633 The actual working directory content or existing dirstate
1633 The actual working directory content or existing dirstate
1634 information such as adds or removes is not considered.
1634 information such as adds or removes is not considered.
1635
1635
1636 ``minimal`` will only rebuild the dirstate status for files that claim to be
1636 ``minimal`` will only rebuild the dirstate status for files that claim to be
1637 tracked but are not in the parent manifest, or that exist in the parent
1637 tracked but are not in the parent manifest, or that exist in the parent
1638 manifest but are not in the dirstate. It will not change adds, removes, or
1638 manifest but are not in the dirstate. It will not change adds, removes, or
1639 modified files that are in the working copy parent.
1639 modified files that are in the working copy parent.
1640
1640
1641 One use of this command is to make the next :hg:`status` invocation
1641 One use of this command is to make the next :hg:`status` invocation
1642 check the actual file content.
1642 check the actual file content.
1643 """
1643 """
1644 ctx = scmutil.revsingle(repo, rev)
1644 ctx = scmutil.revsingle(repo, rev)
1645 with repo.wlock():
1645 with repo.wlock():
1646 dirstate = repo.dirstate
1646 dirstate = repo.dirstate
1647 changedfiles = None
1647 changedfiles = None
1648 # See command doc for what minimal does.
1648 # See command doc for what minimal does.
1649 if opts.get('minimal'):
1649 if opts.get('minimal'):
1650 manifestfiles = set(ctx.manifest().keys())
1650 manifestfiles = set(ctx.manifest().keys())
1651 dirstatefiles = set(dirstate)
1651 dirstatefiles = set(dirstate)
1652 manifestonly = manifestfiles - dirstatefiles
1652 manifestonly = manifestfiles - dirstatefiles
1653 dsonly = dirstatefiles - manifestfiles
1653 dsonly = dirstatefiles - manifestfiles
1654 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1654 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1655 changedfiles = manifestonly | dsnotadded
1655 changedfiles = manifestonly | dsnotadded
1656
1656
1657 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1657 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1658
1658
1659 @command('debugrebuildfncache', [], '')
1659 @command('debugrebuildfncache', [], '')
1660 def debugrebuildfncache(ui, repo):
1660 def debugrebuildfncache(ui, repo):
1661 """rebuild the fncache file"""
1661 """rebuild the fncache file"""
1662 repair.rebuildfncache(ui, repo)
1662 repair.rebuildfncache(ui, repo)
1663
1663
1664 @command('debugrename',
1664 @command('debugrename',
1665 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1665 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1666 _('[-r REV] FILE'))
1666 _('[-r REV] FILE'))
1667 def debugrename(ui, repo, file1, *pats, **opts):
1667 def debugrename(ui, repo, file1, *pats, **opts):
1668 """dump rename information"""
1668 """dump rename information"""
1669
1669
1670 ctx = scmutil.revsingle(repo, opts.get('rev'))
1670 ctx = scmutil.revsingle(repo, opts.get('rev'))
1671 m = scmutil.match(ctx, (file1,) + pats, opts)
1671 m = scmutil.match(ctx, (file1,) + pats, opts)
1672 for abs in ctx.walk(m):
1672 for abs in ctx.walk(m):
1673 fctx = ctx[abs]
1673 fctx = ctx[abs]
1674 o = fctx.filelog().renamed(fctx.filenode())
1674 o = fctx.filelog().renamed(fctx.filenode())
1675 rel = m.rel(abs)
1675 rel = m.rel(abs)
1676 if o:
1676 if o:
1677 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1677 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1678 else:
1678 else:
1679 ui.write(_("%s not renamed\n") % rel)
1679 ui.write(_("%s not renamed\n") % rel)
1680
1680
1681 @command('debugrevlog', cmdutil.debugrevlogopts +
1681 @command('debugrevlog', cmdutil.debugrevlogopts +
1682 [('d', 'dump', False, _('dump index data'))],
1682 [('d', 'dump', False, _('dump index data'))],
1683 _('-c|-m|FILE'),
1683 _('-c|-m|FILE'),
1684 optionalrepo=True)
1684 optionalrepo=True)
1685 def debugrevlog(ui, repo, file_=None, **opts):
1685 def debugrevlog(ui, repo, file_=None, **opts):
1686 """show data and statistics about a revlog"""
1686 """show data and statistics about a revlog"""
1687 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1687 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1688
1688
1689 if opts.get("dump"):
1689 if opts.get("dump"):
1690 numrevs = len(r)
1690 numrevs = len(r)
1691 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1691 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1692 " rawsize totalsize compression heads chainlen\n"))
1692 " rawsize totalsize compression heads chainlen\n"))
1693 ts = 0
1693 ts = 0
1694 heads = set()
1694 heads = set()
1695
1695
1696 for rev in xrange(numrevs):
1696 for rev in xrange(numrevs):
1697 dbase = r.deltaparent(rev)
1697 dbase = r.deltaparent(rev)
1698 if dbase == -1:
1698 if dbase == -1:
1699 dbase = rev
1699 dbase = rev
1700 cbase = r.chainbase(rev)
1700 cbase = r.chainbase(rev)
1701 clen = r.chainlen(rev)
1701 clen = r.chainlen(rev)
1702 p1, p2 = r.parentrevs(rev)
1702 p1, p2 = r.parentrevs(rev)
1703 rs = r.rawsize(rev)
1703 rs = r.rawsize(rev)
1704 ts = ts + rs
1704 ts = ts + rs
1705 heads -= set(r.parentrevs(rev))
1705 heads -= set(r.parentrevs(rev))
1706 heads.add(rev)
1706 heads.add(rev)
1707 try:
1707 try:
1708 compression = ts / r.end(rev)
1708 compression = ts / r.end(rev)
1709 except ZeroDivisionError:
1709 except ZeroDivisionError:
1710 compression = 0
1710 compression = 0
1711 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1711 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1712 "%11d %5d %8d\n" %
1712 "%11d %5d %8d\n" %
1713 (rev, p1, p2, r.start(rev), r.end(rev),
1713 (rev, p1, p2, r.start(rev), r.end(rev),
1714 r.start(dbase), r.start(cbase),
1714 r.start(dbase), r.start(cbase),
1715 r.start(p1), r.start(p2),
1715 r.start(p1), r.start(p2),
1716 rs, ts, compression, len(heads), clen))
1716 rs, ts, compression, len(heads), clen))
1717 return 0
1717 return 0
1718
1718
1719 v = r.version
1719 v = r.version
1720 format = v & 0xFFFF
1720 format = v & 0xFFFF
1721 flags = []
1721 flags = []
1722 gdelta = False
1722 gdelta = False
1723 if v & revlog.FLAG_INLINE_DATA:
1723 if v & revlog.FLAG_INLINE_DATA:
1724 flags.append('inline')
1724 flags.append('inline')
1725 if v & revlog.FLAG_GENERALDELTA:
1725 if v & revlog.FLAG_GENERALDELTA:
1726 gdelta = True
1726 gdelta = True
1727 flags.append('generaldelta')
1727 flags.append('generaldelta')
1728 if not flags:
1728 if not flags:
1729 flags = ['(none)']
1729 flags = ['(none)']
1730
1730
1731 nummerges = 0
1731 nummerges = 0
1732 numfull = 0
1732 numfull = 0
1733 numprev = 0
1733 numprev = 0
1734 nump1 = 0
1734 nump1 = 0
1735 nump2 = 0
1735 nump2 = 0
1736 numother = 0
1736 numother = 0
1737 nump1prev = 0
1737 nump1prev = 0
1738 nump2prev = 0
1738 nump2prev = 0
1739 chainlengths = []
1739 chainlengths = []
1740
1740
1741 datasize = [None, 0, 0]
1741 datasize = [None, 0, 0]
1742 fullsize = [None, 0, 0]
1742 fullsize = [None, 0, 0]
1743 deltasize = [None, 0, 0]
1743 deltasize = [None, 0, 0]
1744 chunktypecounts = {}
1744 chunktypecounts = {}
1745 chunktypesizes = {}
1745 chunktypesizes = {}
1746
1746
1747 def addsize(size, l):
1747 def addsize(size, l):
1748 if l[0] is None or size < l[0]:
1748 if l[0] is None or size < l[0]:
1749 l[0] = size
1749 l[0] = size
1750 if size > l[1]:
1750 if size > l[1]:
1751 l[1] = size
1751 l[1] = size
1752 l[2] += size
1752 l[2] += size
1753
1753
1754 numrevs = len(r)
1754 numrevs = len(r)
1755 for rev in xrange(numrevs):
1755 for rev in xrange(numrevs):
1756 p1, p2 = r.parentrevs(rev)
1756 p1, p2 = r.parentrevs(rev)
1757 delta = r.deltaparent(rev)
1757 delta = r.deltaparent(rev)
1758 if format > 0:
1758 if format > 0:
1759 addsize(r.rawsize(rev), datasize)
1759 addsize(r.rawsize(rev), datasize)
1760 if p2 != nullrev:
1760 if p2 != nullrev:
1761 nummerges += 1
1761 nummerges += 1
1762 size = r.length(rev)
1762 size = r.length(rev)
1763 if delta == nullrev:
1763 if delta == nullrev:
1764 chainlengths.append(0)
1764 chainlengths.append(0)
1765 numfull += 1
1765 numfull += 1
1766 addsize(size, fullsize)
1766 addsize(size, fullsize)
1767 else:
1767 else:
1768 chainlengths.append(chainlengths[delta] + 1)
1768 chainlengths.append(chainlengths[delta] + 1)
1769 addsize(size, deltasize)
1769 addsize(size, deltasize)
1770 if delta == rev - 1:
1770 if delta == rev - 1:
1771 numprev += 1
1771 numprev += 1
1772 if delta == p1:
1772 if delta == p1:
1773 nump1prev += 1
1773 nump1prev += 1
1774 elif delta == p2:
1774 elif delta == p2:
1775 nump2prev += 1
1775 nump2prev += 1
1776 elif delta == p1:
1776 elif delta == p1:
1777 nump1 += 1
1777 nump1 += 1
1778 elif delta == p2:
1778 elif delta == p2:
1779 nump2 += 1
1779 nump2 += 1
1780 elif delta != nullrev:
1780 elif delta != nullrev:
1781 numother += 1
1781 numother += 1
1782
1782
1783 # Obtain data on the raw chunks in the revlog.
1783 # Obtain data on the raw chunks in the revlog.
1784 segment = r._getsegmentforrevs(rev, rev)[1]
1784 segment = r._getsegmentforrevs(rev, rev)[1]
1785 if segment:
1785 if segment:
1786 chunktype = segment[0]
1786 chunktype = segment[0]
1787 else:
1787 else:
1788 chunktype = 'empty'
1788 chunktype = 'empty'
1789
1789
1790 if chunktype not in chunktypecounts:
1790 if chunktype not in chunktypecounts:
1791 chunktypecounts[chunktype] = 0
1791 chunktypecounts[chunktype] = 0
1792 chunktypesizes[chunktype] = 0
1792 chunktypesizes[chunktype] = 0
1793
1793
1794 chunktypecounts[chunktype] += 1
1794 chunktypecounts[chunktype] += 1
1795 chunktypesizes[chunktype] += size
1795 chunktypesizes[chunktype] += size
1796
1796
1797 # Adjust size min value for empty cases
1797 # Adjust size min value for empty cases
1798 for size in (datasize, fullsize, deltasize):
1798 for size in (datasize, fullsize, deltasize):
1799 if size[0] is None:
1799 if size[0] is None:
1800 size[0] = 0
1800 size[0] = 0
1801
1801
1802 numdeltas = numrevs - numfull
1802 numdeltas = numrevs - numfull
1803 numoprev = numprev - nump1prev - nump2prev
1803 numoprev = numprev - nump1prev - nump2prev
1804 totalrawsize = datasize[2]
1804 totalrawsize = datasize[2]
1805 datasize[2] /= numrevs
1805 datasize[2] /= numrevs
1806 fulltotal = fullsize[2]
1806 fulltotal = fullsize[2]
1807 fullsize[2] /= numfull
1807 fullsize[2] /= numfull
1808 deltatotal = deltasize[2]
1808 deltatotal = deltasize[2]
1809 if numrevs - numfull > 0:
1809 if numrevs - numfull > 0:
1810 deltasize[2] /= numrevs - numfull
1810 deltasize[2] /= numrevs - numfull
1811 totalsize = fulltotal + deltatotal
1811 totalsize = fulltotal + deltatotal
1812 avgchainlen = sum(chainlengths) / numrevs
1812 avgchainlen = sum(chainlengths) / numrevs
1813 maxchainlen = max(chainlengths)
1813 maxchainlen = max(chainlengths)
1814 compratio = 1
1814 compratio = 1
1815 if totalsize:
1815 if totalsize:
1816 compratio = totalrawsize / totalsize
1816 compratio = totalrawsize / totalsize
1817
1817
1818 basedfmtstr = '%%%dd\n'
1818 basedfmtstr = '%%%dd\n'
1819 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1819 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1820
1820
1821 def dfmtstr(max):
1821 def dfmtstr(max):
1822 return basedfmtstr % len(str(max))
1822 return basedfmtstr % len(str(max))
1823 def pcfmtstr(max, padding=0):
1823 def pcfmtstr(max, padding=0):
1824 return basepcfmtstr % (len(str(max)), ' ' * padding)
1824 return basepcfmtstr % (len(str(max)), ' ' * padding)
1825
1825
1826 def pcfmt(value, total):
1826 def pcfmt(value, total):
1827 if total:
1827 if total:
1828 return (value, 100 * float(value) / total)
1828 return (value, 100 * float(value) / total)
1829 else:
1829 else:
1830 return value, 100.0
1830 return value, 100.0
1831
1831
1832 ui.write(('format : %d\n') % format)
1832 ui.write(('format : %d\n') % format)
1833 ui.write(('flags : %s\n') % ', '.join(flags))
1833 ui.write(('flags : %s\n') % ', '.join(flags))
1834
1834
1835 ui.write('\n')
1835 ui.write('\n')
1836 fmt = pcfmtstr(totalsize)
1836 fmt = pcfmtstr(totalsize)
1837 fmt2 = dfmtstr(totalsize)
1837 fmt2 = dfmtstr(totalsize)
1838 ui.write(('revisions : ') + fmt2 % numrevs)
1838 ui.write(('revisions : ') + fmt2 % numrevs)
1839 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1839 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1840 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1840 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1841 ui.write(('revisions : ') + fmt2 % numrevs)
1841 ui.write(('revisions : ') + fmt2 % numrevs)
1842 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1842 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1843 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1843 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1844 ui.write(('revision size : ') + fmt2 % totalsize)
1844 ui.write(('revision size : ') + fmt2 % totalsize)
1845 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1845 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1846 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1846 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1847
1847
1848 def fmtchunktype(chunktype):
1848 def fmtchunktype(chunktype):
1849 if chunktype == 'empty':
1849 if chunktype == 'empty':
1850 return ' %s : ' % chunktype
1850 return ' %s : ' % chunktype
1851 elif chunktype in string.ascii_letters:
1851 elif chunktype in string.ascii_letters:
1852 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1852 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1853 else:
1853 else:
1854 return ' 0x%s : ' % hex(chunktype)
1854 return ' 0x%s : ' % hex(chunktype)
1855
1855
1856 ui.write('\n')
1856 ui.write('\n')
1857 ui.write(('chunks : ') + fmt2 % numrevs)
1857 ui.write(('chunks : ') + fmt2 % numrevs)
1858 for chunktype in sorted(chunktypecounts):
1858 for chunktype in sorted(chunktypecounts):
1859 ui.write(fmtchunktype(chunktype))
1859 ui.write(fmtchunktype(chunktype))
1860 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1860 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1861 ui.write(('chunks size : ') + fmt2 % totalsize)
1861 ui.write(('chunks size : ') + fmt2 % totalsize)
1862 for chunktype in sorted(chunktypecounts):
1862 for chunktype in sorted(chunktypecounts):
1863 ui.write(fmtchunktype(chunktype))
1863 ui.write(fmtchunktype(chunktype))
1864 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1864 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1865
1865
1866 ui.write('\n')
1866 ui.write('\n')
1867 fmt = dfmtstr(max(avgchainlen, compratio))
1867 fmt = dfmtstr(max(avgchainlen, compratio))
1868 ui.write(('avg chain length : ') + fmt % avgchainlen)
1868 ui.write(('avg chain length : ') + fmt % avgchainlen)
1869 ui.write(('max chain length : ') + fmt % maxchainlen)
1869 ui.write(('max chain length : ') + fmt % maxchainlen)
1870 ui.write(('compression ratio : ') + fmt % compratio)
1870 ui.write(('compression ratio : ') + fmt % compratio)
1871
1871
1872 if format > 0:
1872 if format > 0:
1873 ui.write('\n')
1873 ui.write('\n')
1874 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1874 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1875 % tuple(datasize))
1875 % tuple(datasize))
1876 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1876 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1877 % tuple(fullsize))
1877 % tuple(fullsize))
1878 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1878 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1879 % tuple(deltasize))
1879 % tuple(deltasize))
1880
1880
1881 if numdeltas > 0:
1881 if numdeltas > 0:
1882 ui.write('\n')
1882 ui.write('\n')
1883 fmt = pcfmtstr(numdeltas)
1883 fmt = pcfmtstr(numdeltas)
1884 fmt2 = pcfmtstr(numdeltas, 4)
1884 fmt2 = pcfmtstr(numdeltas, 4)
1885 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1885 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1886 if numprev > 0:
1886 if numprev > 0:
1887 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1887 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1888 numprev))
1888 numprev))
1889 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1889 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1890 numprev))
1890 numprev))
1891 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1891 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1892 numprev))
1892 numprev))
1893 if gdelta:
1893 if gdelta:
1894 ui.write(('deltas against p1 : ')
1894 ui.write(('deltas against p1 : ')
1895 + fmt % pcfmt(nump1, numdeltas))
1895 + fmt % pcfmt(nump1, numdeltas))
1896 ui.write(('deltas against p2 : ')
1896 ui.write(('deltas against p2 : ')
1897 + fmt % pcfmt(nump2, numdeltas))
1897 + fmt % pcfmt(nump2, numdeltas))
1898 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1898 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1899 numdeltas))
1899 numdeltas))
1900
1900
1901 @command('debugrevspec',
1901 @command('debugrevspec',
1902 [('', 'optimize', None,
1902 [('', 'optimize', None,
1903 _('print parsed tree after optimizing (DEPRECATED)')),
1903 _('print parsed tree after optimizing (DEPRECATED)')),
1904 ('', 'show-revs', True, _('print list of result revisions (default)')),
1904 ('', 'show-revs', True, _('print list of result revisions (default)')),
1905 ('s', 'show-set', None, _('print internal representation of result set')),
1905 ('s', 'show-set', None, _('print internal representation of result set')),
1906 ('p', 'show-stage', [],
1906 ('p', 'show-stage', [],
1907 _('print parsed tree at the given stage'), _('NAME')),
1907 _('print parsed tree at the given stage'), _('NAME')),
1908 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1908 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1909 ('', 'verify-optimized', False, _('verify optimized result')),
1909 ('', 'verify-optimized', False, _('verify optimized result')),
1910 ],
1910 ],
1911 ('REVSPEC'))
1911 ('REVSPEC'))
1912 def debugrevspec(ui, repo, expr, **opts):
1912 def debugrevspec(ui, repo, expr, **opts):
1913 """parse and apply a revision specification
1913 """parse and apply a revision specification
1914
1914
1915 Use -p/--show-stage option to print the parsed tree at the given stages.
1915 Use -p/--show-stage option to print the parsed tree at the given stages.
1916 Use -p all to print tree at every stage.
1916 Use -p all to print tree at every stage.
1917
1917
1918 Use --no-show-revs option with -s or -p to print only the set
1918 Use --no-show-revs option with -s or -p to print only the set
1919 representation or the parsed tree respectively.
1919 representation or the parsed tree respectively.
1920
1920
1921 Use --verify-optimized to compare the optimized result with the unoptimized
1921 Use --verify-optimized to compare the optimized result with the unoptimized
1922 one. Returns 1 if the optimized result differs.
1922 one. Returns 1 if the optimized result differs.
1923 """
1923 """
1924 stages = [
1924 stages = [
1925 ('parsed', lambda tree: tree),
1925 ('parsed', lambda tree: tree),
1926 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1926 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1927 ('concatenated', revsetlang.foldconcat),
1927 ('concatenated', revsetlang.foldconcat),
1928 ('analyzed', revsetlang.analyze),
1928 ('analyzed', revsetlang.analyze),
1929 ('optimized', revsetlang.optimize),
1929 ('optimized', revsetlang.optimize),
1930 ]
1930 ]
1931 if opts['no_optimized']:
1931 if opts['no_optimized']:
1932 stages = stages[:-1]
1932 stages = stages[:-1]
1933 if opts['verify_optimized'] and opts['no_optimized']:
1933 if opts['verify_optimized'] and opts['no_optimized']:
1934 raise error.Abort(_('cannot use --verify-optimized with '
1934 raise error.Abort(_('cannot use --verify-optimized with '
1935 '--no-optimized'))
1935 '--no-optimized'))
1936 stagenames = set(n for n, f in stages)
1936 stagenames = set(n for n, f in stages)
1937
1937
1938 showalways = set()
1938 showalways = set()
1939 showchanged = set()
1939 showchanged = set()
1940 if ui.verbose and not opts['show_stage']:
1940 if ui.verbose and not opts['show_stage']:
1941 # show parsed tree by --verbose (deprecated)
1941 # show parsed tree by --verbose (deprecated)
1942 showalways.add('parsed')
1942 showalways.add('parsed')
1943 showchanged.update(['expanded', 'concatenated'])
1943 showchanged.update(['expanded', 'concatenated'])
1944 if opts['optimize']:
1944 if opts['optimize']:
1945 showalways.add('optimized')
1945 showalways.add('optimized')
1946 if opts['show_stage'] and opts['optimize']:
1946 if opts['show_stage'] and opts['optimize']:
1947 raise error.Abort(_('cannot use --optimize with --show-stage'))
1947 raise error.Abort(_('cannot use --optimize with --show-stage'))
1948 if opts['show_stage'] == ['all']:
1948 if opts['show_stage'] == ['all']:
1949 showalways.update(stagenames)
1949 showalways.update(stagenames)
1950 else:
1950 else:
1951 for n in opts['show_stage']:
1951 for n in opts['show_stage']:
1952 if n not in stagenames:
1952 if n not in stagenames:
1953 raise error.Abort(_('invalid stage name: %s') % n)
1953 raise error.Abort(_('invalid stage name: %s') % n)
1954 showalways.update(opts['show_stage'])
1954 showalways.update(opts['show_stage'])
1955
1955
1956 treebystage = {}
1956 treebystage = {}
1957 printedtree = None
1957 printedtree = None
1958 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1958 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1959 for n, f in stages:
1959 for n, f in stages:
1960 treebystage[n] = tree = f(tree)
1960 treebystage[n] = tree = f(tree)
1961 if n in showalways or (n in showchanged and tree != printedtree):
1961 if n in showalways or (n in showchanged and tree != printedtree):
1962 if opts['show_stage'] or n != 'parsed':
1962 if opts['show_stage'] or n != 'parsed':
1963 ui.write(("* %s:\n") % n)
1963 ui.write(("* %s:\n") % n)
1964 ui.write(revsetlang.prettyformat(tree), "\n")
1964 ui.write(revsetlang.prettyformat(tree), "\n")
1965 printedtree = tree
1965 printedtree = tree
1966
1966
1967 if opts['verify_optimized']:
1967 if opts['verify_optimized']:
1968 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1968 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1969 brevs = revset.makematcher(treebystage['optimized'])(repo)
1969 brevs = revset.makematcher(treebystage['optimized'])(repo)
1970 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
1970 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
1971 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1971 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1972 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1972 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1973 arevs = list(arevs)
1973 arevs = list(arevs)
1974 brevs = list(brevs)
1974 brevs = list(brevs)
1975 if arevs == brevs:
1975 if arevs == brevs:
1976 return 0
1976 return 0
1977 ui.write(('--- analyzed\n'), label='diff.file_a')
1977 ui.write(('--- analyzed\n'), label='diff.file_a')
1978 ui.write(('+++ optimized\n'), label='diff.file_b')
1978 ui.write(('+++ optimized\n'), label='diff.file_b')
1979 sm = difflib.SequenceMatcher(None, arevs, brevs)
1979 sm = difflib.SequenceMatcher(None, arevs, brevs)
1980 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1980 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1981 if tag in ('delete', 'replace'):
1981 if tag in ('delete', 'replace'):
1982 for c in arevs[alo:ahi]:
1982 for c in arevs[alo:ahi]:
1983 ui.write('-%s\n' % c, label='diff.deleted')
1983 ui.write('-%s\n' % c, label='diff.deleted')
1984 if tag in ('insert', 'replace'):
1984 if tag in ('insert', 'replace'):
1985 for c in brevs[blo:bhi]:
1985 for c in brevs[blo:bhi]:
1986 ui.write('+%s\n' % c, label='diff.inserted')
1986 ui.write('+%s\n' % c, label='diff.inserted')
1987 if tag == 'equal':
1987 if tag == 'equal':
1988 for c in arevs[alo:ahi]:
1988 for c in arevs[alo:ahi]:
1989 ui.write(' %s\n' % c)
1989 ui.write(' %s\n' % c)
1990 return 1
1990 return 1
1991
1991
1992 func = revset.makematcher(tree)
1992 func = revset.makematcher(tree)
1993 revs = func(repo)
1993 revs = func(repo)
1994 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
1994 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
1995 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
1995 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
1996 if not opts['show_revs']:
1996 if not opts['show_revs']:
1997 return
1997 return
1998 for c in revs:
1998 for c in revs:
1999 ui.write("%s\n" % c)
1999 ui.write("%s\n" % c)
2000
2000
2001 @command('debugsetparents', [], _('REV1 [REV2]'))
2001 @command('debugsetparents', [], _('REV1 [REV2]'))
2002 def debugsetparents(ui, repo, rev1, rev2=None):
2002 def debugsetparents(ui, repo, rev1, rev2=None):
2003 """manually set the parents of the current working directory
2003 """manually set the parents of the current working directory
2004
2004
2005 This is useful for writing repository conversion tools, but should
2005 This is useful for writing repository conversion tools, but should
2006 be used with care. For example, neither the working directory nor the
2006 be used with care. For example, neither the working directory nor the
2007 dirstate is updated, so file status may be incorrect after running this
2007 dirstate is updated, so file status may be incorrect after running this
2008 command.
2008 command.
2009
2009
2010 Returns 0 on success.
2010 Returns 0 on success.
2011 """
2011 """
2012
2012
2013 r1 = scmutil.revsingle(repo, rev1).node()
2013 r1 = scmutil.revsingle(repo, rev1).node()
2014 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2014 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2015
2015
2016 with repo.wlock():
2016 with repo.wlock():
2017 repo.setparents(r1, r2)
2017 repo.setparents(r1, r2)
2018
2018
2019 @command('debugsub',
2019 @command('debugsub',
2020 [('r', 'rev', '',
2020 [('r', 'rev', '',
2021 _('revision to check'), _('REV'))],
2021 _('revision to check'), _('REV'))],
2022 _('[-r REV] [REV]'))
2022 _('[-r REV] [REV]'))
2023 def debugsub(ui, repo, rev=None):
2023 def debugsub(ui, repo, rev=None):
2024 ctx = scmutil.revsingle(repo, rev, None)
2024 ctx = scmutil.revsingle(repo, rev, None)
2025 for k, v in sorted(ctx.substate.items()):
2025 for k, v in sorted(ctx.substate.items()):
2026 ui.write(('path %s\n') % k)
2026 ui.write(('path %s\n') % k)
2027 ui.write((' source %s\n') % v[0])
2027 ui.write((' source %s\n') % v[0])
2028 ui.write((' revision %s\n') % v[1])
2028 ui.write((' revision %s\n') % v[1])
2029
2029
2030 @command('debugsuccessorssets',
2030 @command('debugsuccessorssets',
2031 [],
2031 [],
2032 _('[REV]'))
2032 _('[REV]'))
2033 def debugsuccessorssets(ui, repo, *revs):
2033 def debugsuccessorssets(ui, repo, *revs):
2034 """show set of successors for revision
2034 """show set of successors for revision
2035
2035
2036 A successors set of changeset A is a consistent group of revisions that
2036 A successors set of changeset A is a consistent group of revisions that
2037 succeed A. It contains non-obsolete changesets only.
2037 succeed A. It contains non-obsolete changesets only.
2038
2038
2039 In most cases a changeset A has a single successors set containing a single
2039 In most cases a changeset A has a single successors set containing a single
2040 successor (changeset A replaced by A').
2040 successor (changeset A replaced by A').
2041
2041
2042 A changeset that is made obsolete with no successors are called "pruned".
2042 A changeset that is made obsolete with no successors are called "pruned".
2043 Such changesets have no successors sets at all.
2043 Such changesets have no successors sets at all.
2044
2044
2045 A changeset that has been "split" will have a successors set containing
2045 A changeset that has been "split" will have a successors set containing
2046 more than one successor.
2046 more than one successor.
2047
2047
2048 A changeset that has been rewritten in multiple different ways is called
2048 A changeset that has been rewritten in multiple different ways is called
2049 "divergent". Such changesets have multiple successor sets (each of which
2049 "divergent". Such changesets have multiple successor sets (each of which
2050 may also be split, i.e. have multiple successors).
2050 may also be split, i.e. have multiple successors).
2051
2051
2052 Results are displayed as follows::
2052 Results are displayed as follows::
2053
2053
2054 <rev1>
2054 <rev1>
2055 <successors-1A>
2055 <successors-1A>
2056 <rev2>
2056 <rev2>
2057 <successors-2A>
2057 <successors-2A>
2058 <successors-2B1> <successors-2B2> <successors-2B3>
2058 <successors-2B1> <successors-2B2> <successors-2B3>
2059
2059
2060 Here rev2 has two possible (i.e. divergent) successors sets. The first
2060 Here rev2 has two possible (i.e. divergent) successors sets. The first
2061 holds one element, whereas the second holds three (i.e. the changeset has
2061 holds one element, whereas the second holds three (i.e. the changeset has
2062 been split).
2062 been split).
2063 """
2063 """
2064 # passed to successorssets caching computation from one call to another
2064 # passed to successorssets caching computation from one call to another
2065 cache = {}
2065 cache = {}
2066 ctx2str = str
2066 ctx2str = str
2067 node2str = short
2067 node2str = short
2068 if ui.debug():
2068 if ui.debug():
2069 def ctx2str(ctx):
2069 def ctx2str(ctx):
2070 return ctx.hex()
2070 return ctx.hex()
2071 node2str = hex
2071 node2str = hex
2072 for rev in scmutil.revrange(repo, revs):
2072 for rev in scmutil.revrange(repo, revs):
2073 ctx = repo[rev]
2073 ctx = repo[rev]
2074 ui.write('%s\n'% ctx2str(ctx))
2074 ui.write('%s\n'% ctx2str(ctx))
2075 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2075 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2076 if succsset:
2076 if succsset:
2077 ui.write(' ')
2077 ui.write(' ')
2078 ui.write(node2str(succsset[0]))
2078 ui.write(node2str(succsset[0]))
2079 for node in succsset[1:]:
2079 for node in succsset[1:]:
2080 ui.write(' ')
2080 ui.write(' ')
2081 ui.write(node2str(node))
2081 ui.write(node2str(node))
2082 ui.write('\n')
2082 ui.write('\n')
2083
2083
2084 @command('debugtemplate',
2084 @command('debugtemplate',
2085 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2085 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2086 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2086 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2087 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2087 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2088 optionalrepo=True)
2088 optionalrepo=True)
2089 def debugtemplate(ui, repo, tmpl, **opts):
2089 def debugtemplate(ui, repo, tmpl, **opts):
2090 """parse and apply a template
2090 """parse and apply a template
2091
2091
2092 If -r/--rev is given, the template is processed as a log template and
2092 If -r/--rev is given, the template is processed as a log template and
2093 applied to the given changesets. Otherwise, it is processed as a generic
2093 applied to the given changesets. Otherwise, it is processed as a generic
2094 template.
2094 template.
2095
2095
2096 Use --verbose to print the parsed tree.
2096 Use --verbose to print the parsed tree.
2097 """
2097 """
2098 revs = None
2098 revs = None
2099 if opts['rev']:
2099 if opts['rev']:
2100 if repo is None:
2100 if repo is None:
2101 raise error.RepoError(_('there is no Mercurial repository here '
2101 raise error.RepoError(_('there is no Mercurial repository here '
2102 '(.hg not found)'))
2102 '(.hg not found)'))
2103 revs = scmutil.revrange(repo, opts['rev'])
2103 revs = scmutil.revrange(repo, opts['rev'])
2104
2104
2105 props = {}
2105 props = {}
2106 for d in opts['define']:
2106 for d in opts['define']:
2107 try:
2107 try:
2108 k, v = (e.strip() for e in d.split('=', 1))
2108 k, v = (e.strip() for e in d.split('=', 1))
2109 if not k or k == 'ui':
2109 if not k or k == 'ui':
2110 raise ValueError
2110 raise ValueError
2111 props[k] = v
2111 props[k] = v
2112 except ValueError:
2112 except ValueError:
2113 raise error.Abort(_('malformed keyword definition: %s') % d)
2113 raise error.Abort(_('malformed keyword definition: %s') % d)
2114
2114
2115 if ui.verbose:
2115 if ui.verbose:
2116 aliases = ui.configitems('templatealias')
2116 aliases = ui.configitems('templatealias')
2117 tree = templater.parse(tmpl)
2117 tree = templater.parse(tmpl)
2118 ui.note(templater.prettyformat(tree), '\n')
2118 ui.note(templater.prettyformat(tree), '\n')
2119 newtree = templater.expandaliases(tree, aliases)
2119 newtree = templater.expandaliases(tree, aliases)
2120 if newtree != tree:
2120 if newtree != tree:
2121 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2121 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2122
2122
2123 mapfile = None
2124 if revs is None:
2123 if revs is None:
2125 k = 'debugtemplate'
2124 k = 'debugtemplate'
2126 t = formatter.maketemplater(ui, k, tmpl)
2125 t = formatter.maketemplater(ui, k, tmpl)
2127 ui.write(templater.stringify(t(k, ui=ui, **props)))
2126 ui.write(templater.stringify(t(k, ui=ui, **props)))
2128 else:
2127 else:
2129 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2128 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2130 mapfile, buffered=False)
2131 for r in revs:
2129 for r in revs:
2132 displayer.show(repo[r], **props)
2130 displayer.show(repo[r], **props)
2133 displayer.close()
2131 displayer.close()
2134
2132
2135 @command('debugupdatecaches', [])
2133 @command('debugupdatecaches', [])
2136 def debugupdatecaches(ui, repo, *pats, **opts):
2134 def debugupdatecaches(ui, repo, *pats, **opts):
2137 """warm all known caches in the repository"""
2135 """warm all known caches in the repository"""
2138 with repo.wlock():
2136 with repo.wlock():
2139 with repo.lock():
2137 with repo.lock():
2140 repo.updatecaches()
2138 repo.updatecaches()
2141
2139
2142 @command('debugupgraderepo', [
2140 @command('debugupgraderepo', [
2143 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2141 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2144 ('', 'run', False, _('performs an upgrade')),
2142 ('', 'run', False, _('performs an upgrade')),
2145 ])
2143 ])
2146 def debugupgraderepo(ui, repo, run=False, optimize=None):
2144 def debugupgraderepo(ui, repo, run=False, optimize=None):
2147 """upgrade a repository to use different features
2145 """upgrade a repository to use different features
2148
2146
2149 If no arguments are specified, the repository is evaluated for upgrade
2147 If no arguments are specified, the repository is evaluated for upgrade
2150 and a list of problems and potential optimizations is printed.
2148 and a list of problems and potential optimizations is printed.
2151
2149
2152 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2150 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2153 can be influenced via additional arguments. More details will be provided
2151 can be influenced via additional arguments. More details will be provided
2154 by the command output when run without ``--run``.
2152 by the command output when run without ``--run``.
2155
2153
2156 During the upgrade, the repository will be locked and no writes will be
2154 During the upgrade, the repository will be locked and no writes will be
2157 allowed.
2155 allowed.
2158
2156
2159 At the end of the upgrade, the repository may not be readable while new
2157 At the end of the upgrade, the repository may not be readable while new
2160 repository data is swapped in. This window will be as long as it takes to
2158 repository data is swapped in. This window will be as long as it takes to
2161 rename some directories inside the ``.hg`` directory. On most machines, this
2159 rename some directories inside the ``.hg`` directory. On most machines, this
2162 should complete almost instantaneously and the chances of a consumer being
2160 should complete almost instantaneously and the chances of a consumer being
2163 unable to access the repository should be low.
2161 unable to access the repository should be low.
2164 """
2162 """
2165 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2163 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2166
2164
2167 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2165 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2168 inferrepo=True)
2166 inferrepo=True)
2169 def debugwalk(ui, repo, *pats, **opts):
2167 def debugwalk(ui, repo, *pats, **opts):
2170 """show how files match on given patterns"""
2168 """show how files match on given patterns"""
2171 m = scmutil.match(repo[None], pats, opts)
2169 m = scmutil.match(repo[None], pats, opts)
2172 ui.write(('matcher: %r\n' % m))
2170 ui.write(('matcher: %r\n' % m))
2173 items = list(repo[None].walk(m))
2171 items = list(repo[None].walk(m))
2174 if not items:
2172 if not items:
2175 return
2173 return
2176 f = lambda fn: fn
2174 f = lambda fn: fn
2177 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2175 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2178 f = lambda fn: util.normpath(fn)
2176 f = lambda fn: util.normpath(fn)
2179 fmt = 'f %%-%ds %%-%ds %%s' % (
2177 fmt = 'f %%-%ds %%-%ds %%s' % (
2180 max([len(abs) for abs in items]),
2178 max([len(abs) for abs in items]),
2181 max([len(m.rel(abs)) for abs in items]))
2179 max([len(m.rel(abs)) for abs in items]))
2182 for abs in items:
2180 for abs in items:
2183 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2181 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2184 ui.write("%s\n" % line.rstrip())
2182 ui.write("%s\n" % line.rstrip())
2185
2183
2186 @command('debugwireargs',
2184 @command('debugwireargs',
2187 [('', 'three', '', 'three'),
2185 [('', 'three', '', 'three'),
2188 ('', 'four', '', 'four'),
2186 ('', 'four', '', 'four'),
2189 ('', 'five', '', 'five'),
2187 ('', 'five', '', 'five'),
2190 ] + cmdutil.remoteopts,
2188 ] + cmdutil.remoteopts,
2191 _('REPO [OPTIONS]... [ONE [TWO]]'),
2189 _('REPO [OPTIONS]... [ONE [TWO]]'),
2192 norepo=True)
2190 norepo=True)
2193 def debugwireargs(ui, repopath, *vals, **opts):
2191 def debugwireargs(ui, repopath, *vals, **opts):
2194 repo = hg.peer(ui, opts, repopath)
2192 repo = hg.peer(ui, opts, repopath)
2195 for opt in cmdutil.remoteopts:
2193 for opt in cmdutil.remoteopts:
2196 del opts[opt[1]]
2194 del opts[opt[1]]
2197 args = {}
2195 args = {}
2198 for k, v in opts.iteritems():
2196 for k, v in opts.iteritems():
2199 if v:
2197 if v:
2200 args[k] = v
2198 args[k] = v
2201 # run twice to check that we don't mess up the stream for the next command
2199 # run twice to check that we don't mess up the stream for the next command
2202 res1 = repo.debugwireargs(*vals, **args)
2200 res1 = repo.debugwireargs(*vals, **args)
2203 res2 = repo.debugwireargs(*vals, **args)
2201 res2 = repo.debugwireargs(*vals, **args)
2204 ui.write("%s\n" % res1)
2202 ui.write("%s\n" % res1)
2205 if res1 != res2:
2203 if res1 != res2:
2206 ui.warn("%s\n" % res2)
2204 ui.warn("%s\n" % res2)
General Comments 0
You need to be logged in to leave comments. Login now