##// END OF EJS Templates
walkchangerevs: drop ui arg
Matt Mackall -
r9665:1de5ebfa default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,175 +1,175 b''
1 # churn.py - create a graph of revisions count grouped by template
1 # churn.py - create a graph of revisions count grouped by template
2 #
2 #
3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
4 # Copyright 2008 Alexander Solovyov <piranha@piranha.org.ua>
4 # Copyright 2008 Alexander Solovyov <piranha@piranha.org.ua>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2, incorporated herein by reference.
7 # GNU General Public License version 2, incorporated herein by reference.
8
8
9 '''command to display statistics about repository history'''
9 '''command to display statistics about repository history'''
10
10
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12 from mercurial import patch, cmdutil, util, templater
12 from mercurial import patch, cmdutil, util, templater
13 import sys, os
13 import sys, os
14 import time, datetime
14 import time, datetime
15
15
16 def maketemplater(ui, repo, tmpl):
16 def maketemplater(ui, repo, tmpl):
17 tmpl = templater.parsestring(tmpl, quoted=False)
17 tmpl = templater.parsestring(tmpl, quoted=False)
18 try:
18 try:
19 t = cmdutil.changeset_templater(ui, repo, False, None, None, False)
19 t = cmdutil.changeset_templater(ui, repo, False, None, None, False)
20 except SyntaxError, inst:
20 except SyntaxError, inst:
21 raise util.Abort(inst.args[0])
21 raise util.Abort(inst.args[0])
22 t.use_template(tmpl)
22 t.use_template(tmpl)
23 return t
23 return t
24
24
25 def changedlines(ui, repo, ctx1, ctx2, fns):
25 def changedlines(ui, repo, ctx1, ctx2, fns):
26 lines = 0
26 lines = 0
27 fmatch = cmdutil.matchfiles(repo, fns)
27 fmatch = cmdutil.matchfiles(repo, fns)
28 diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
28 diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
29 for l in diff.split('\n'):
29 for l in diff.split('\n'):
30 if (l.startswith("+") and not l.startswith("+++ ") or
30 if (l.startswith("+") and not l.startswith("+++ ") or
31 l.startswith("-") and not l.startswith("--- ")):
31 l.startswith("-") and not l.startswith("--- ")):
32 lines += 1
32 lines += 1
33 return lines
33 return lines
34
34
35 def countrate(ui, repo, amap, *pats, **opts):
35 def countrate(ui, repo, amap, *pats, **opts):
36 """Calculate stats"""
36 """Calculate stats"""
37 if opts.get('dateformat'):
37 if opts.get('dateformat'):
38 def getkey(ctx):
38 def getkey(ctx):
39 t, tz = ctx.date()
39 t, tz = ctx.date()
40 date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
40 date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
41 return date.strftime(opts['dateformat'])
41 return date.strftime(opts['dateformat'])
42 else:
42 else:
43 tmpl = opts.get('template', '{author|email}')
43 tmpl = opts.get('template', '{author|email}')
44 tmpl = maketemplater(ui, repo, tmpl)
44 tmpl = maketemplater(ui, repo, tmpl)
45 def getkey(ctx):
45 def getkey(ctx):
46 ui.pushbuffer()
46 ui.pushbuffer()
47 tmpl.show(ctx)
47 tmpl.show(ctx)
48 return ui.popbuffer()
48 return ui.popbuffer()
49
49
50 count = pct = 0
50 count = pct = 0
51 rate = {}
51 rate = {}
52 df = False
52 df = False
53 if opts.get('date'):
53 if opts.get('date'):
54 df = util.matchdate(opts['date'])
54 df = util.matchdate(opts['date'])
55
55
56 m = cmdutil.match(repo, pats, opts)
56 m = cmdutil.match(repo, pats, opts)
57 def prep(ctx, fns):
57 def prep(ctx, fns):
58 rev = ctx.rev()
58 rev = ctx.rev()
59 if df and not df(ctx.date()[0]): # doesn't match date format
59 if df and not df(ctx.date()[0]): # doesn't match date format
60 return
60 return
61
61
62 key = getkey(ctx)
62 key = getkey(ctx)
63 key = amap.get(key, key) # alias remap
63 key = amap.get(key, key) # alias remap
64 if opts.get('changesets'):
64 if opts.get('changesets'):
65 rate[key] = rate.get(key, 0) + 1
65 rate[key] = rate.get(key, 0) + 1
66 else:
66 else:
67 parents = ctx.parents()
67 parents = ctx.parents()
68 if len(parents) > 1:
68 if len(parents) > 1:
69 ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,))
69 ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,))
70 return
70 return
71
71
72 ctx1 = parents[0]
72 ctx1 = parents[0]
73 lines = changedlines(ui, repo, ctx1, ctx, fns)
73 lines = changedlines(ui, repo, ctx1, ctx, fns)
74 rate[key] = rate.get(key, 0) + lines
74 rate[key] = rate.get(key, 0) + lines
75
75
76 if opts.get('progress'):
76 if opts.get('progress'):
77 count += 1
77 count += 1
78 newpct = int(100.0 * count / max(len(repo), 1))
78 newpct = int(100.0 * count / max(len(repo), 1))
79 if pct < newpct:
79 if pct < newpct:
80 pct = newpct
80 pct = newpct
81 ui.write("\r" + _("generating stats: %d%%") % pct)
81 ui.write("\r" + _("generating stats: %d%%") % pct)
82 sys.stdout.flush()
82 sys.stdout.flush()
83
83
84 for ctx in cmdutil.walkchangerevs(ui, repo, m, opts, prep):
84 for ctx in cmdutil.walkchangerevs(repo, m, opts, prep):
85 continue
85 continue
86
86
87 if opts.get('progress'):
87 if opts.get('progress'):
88 ui.write("\r")
88 ui.write("\r")
89 sys.stdout.flush()
89 sys.stdout.flush()
90
90
91 return rate
91 return rate
92
92
93
93
94 def churn(ui, repo, *pats, **opts):
94 def churn(ui, repo, *pats, **opts):
95 '''histogram of changes to the repository
95 '''histogram of changes to the repository
96
96
97 This command will display a histogram representing the number
97 This command will display a histogram representing the number
98 of changed lines or revisions, grouped according to the given
98 of changed lines or revisions, grouped according to the given
99 template. The default template will group changes by author.
99 template. The default template will group changes by author.
100 The --dateformat option may be used to group the results by
100 The --dateformat option may be used to group the results by
101 date instead.
101 date instead.
102
102
103 Statistics are based on the number of changed lines, or
103 Statistics are based on the number of changed lines, or
104 alternatively the number of matching revisions if the
104 alternatively the number of matching revisions if the
105 --changesets option is specified.
105 --changesets option is specified.
106
106
107 Examples::
107 Examples::
108
108
109 # display count of changed lines for every committer
109 # display count of changed lines for every committer
110 hg churn -t '{author|email}'
110 hg churn -t '{author|email}'
111
111
112 # display daily activity graph
112 # display daily activity graph
113 hg churn -f '%H' -s -c
113 hg churn -f '%H' -s -c
114
114
115 # display activity of developers by month
115 # display activity of developers by month
116 hg churn -f '%Y-%m' -s -c
116 hg churn -f '%Y-%m' -s -c
117
117
118 # display count of lines changed in every year
118 # display count of lines changed in every year
119 hg churn -f '%Y' -s
119 hg churn -f '%Y' -s
120
120
121 It is possible to map alternate email addresses to a main address
121 It is possible to map alternate email addresses to a main address
122 by providing a file using the following format::
122 by providing a file using the following format::
123
123
124 <alias email> <actual email>
124 <alias email> <actual email>
125
125
126 Such a file may be specified with the --aliases option, otherwise
126 Such a file may be specified with the --aliases option, otherwise
127 a .hgchurn file will be looked for in the working directory root.
127 a .hgchurn file will be looked for in the working directory root.
128 '''
128 '''
129 def pad(s, l):
129 def pad(s, l):
130 return (s + " " * l)[:l]
130 return (s + " " * l)[:l]
131
131
132 amap = {}
132 amap = {}
133 aliases = opts.get('aliases')
133 aliases = opts.get('aliases')
134 if not aliases and os.path.exists(repo.wjoin('.hgchurn')):
134 if not aliases and os.path.exists(repo.wjoin('.hgchurn')):
135 aliases = repo.wjoin('.hgchurn')
135 aliases = repo.wjoin('.hgchurn')
136 if aliases:
136 if aliases:
137 for l in open(aliases, "r"):
137 for l in open(aliases, "r"):
138 l = l.strip()
138 l = l.strip()
139 alias, actual = l.split()
139 alias, actual = l.split()
140 amap[alias] = actual
140 amap[alias] = actual
141
141
142 rate = countrate(ui, repo, amap, *pats, **opts).items()
142 rate = countrate(ui, repo, amap, *pats, **opts).items()
143 if not rate:
143 if not rate:
144 return
144 return
145
145
146 sortkey = ((not opts.get('sort')) and (lambda x: -x[1]) or None)
146 sortkey = ((not opts.get('sort')) and (lambda x: -x[1]) or None)
147 rate.sort(key=sortkey)
147 rate.sort(key=sortkey)
148
148
149 # Be careful not to have a zero maxcount (issue833)
149 # Be careful not to have a zero maxcount (issue833)
150 maxcount = float(max(v for k, v in rate)) or 1.0
150 maxcount = float(max(v for k, v in rate)) or 1.0
151 maxname = max(len(k) for k, v in rate)
151 maxname = max(len(k) for k, v in rate)
152
152
153 ttywidth = util.termwidth()
153 ttywidth = util.termwidth()
154 ui.debug("assuming %i character terminal\n" % ttywidth)
154 ui.debug("assuming %i character terminal\n" % ttywidth)
155 width = ttywidth - maxname - 2 - 6 - 2 - 2
155 width = ttywidth - maxname - 2 - 6 - 2 - 2
156
156
157 for date, count in rate:
157 for date, count in rate:
158 print "%s %6d %s" % (pad(date, maxname), count,
158 print "%s %6d %s" % (pad(date, maxname), count,
159 "*" * int(count * width / maxcount))
159 "*" * int(count * width / maxcount))
160
160
161
161
162 cmdtable = {
162 cmdtable = {
163 "churn":
163 "churn":
164 (churn,
164 (churn,
165 [('r', 'rev', [], _('count rate for the specified revision or range')),
165 [('r', 'rev', [], _('count rate for the specified revision or range')),
166 ('d', 'date', '', _('count rate for revisions matching date spec')),
166 ('d', 'date', '', _('count rate for revisions matching date spec')),
167 ('t', 'template', '{author|email}', _('template to group changesets')),
167 ('t', 'template', '{author|email}', _('template to group changesets')),
168 ('f', 'dateformat', '',
168 ('f', 'dateformat', '',
169 _('strftime-compatible format for grouping by date')),
169 _('strftime-compatible format for grouping by date')),
170 ('c', 'changesets', False, _('count rate by number of changesets')),
170 ('c', 'changesets', False, _('count rate by number of changesets')),
171 ('s', 'sort', False, _('sort by key (default: sort by count)')),
171 ('s', 'sort', False, _('sort by key (default: sort by count)')),
172 ('', 'aliases', '', _('file with email aliases')),
172 ('', 'aliases', '', _('file with email aliases')),
173 ('', 'progress', None, _('show progress'))],
173 ('', 'progress', None, _('show progress'))],
174 _("hg churn [-d DATE] [-r REV] [--aliases FILE] [--progress] [FILE]")),
174 _("hg churn [-d DATE] [-r REV] [--aliases FILE] [--progress] [FILE]")),
175 }
175 }
@@ -1,1282 +1,1281 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, sys, errno, re, glob
10 import os, sys, errno, re, glob
11 import mdiff, bdiff, util, templater, patch, error, encoding
11 import mdiff, bdiff, util, templater, patch, error, encoding
12 import match as _match
12 import match as _match
13
13
14 revrangesep = ':'
14 revrangesep = ':'
15
15
16 def findpossible(cmd, table, strict=False):
16 def findpossible(cmd, table, strict=False):
17 """
17 """
18 Return cmd -> (aliases, command table entry)
18 Return cmd -> (aliases, command table entry)
19 for each matching command.
19 for each matching command.
20 Return debug commands (or their aliases) only if no normal command matches.
20 Return debug commands (or their aliases) only if no normal command matches.
21 """
21 """
22 choice = {}
22 choice = {}
23 debugchoice = {}
23 debugchoice = {}
24 for e in table.keys():
24 for e in table.keys():
25 aliases = e.lstrip("^").split("|")
25 aliases = e.lstrip("^").split("|")
26 found = None
26 found = None
27 if cmd in aliases:
27 if cmd in aliases:
28 found = cmd
28 found = cmd
29 elif not strict:
29 elif not strict:
30 for a in aliases:
30 for a in aliases:
31 if a.startswith(cmd):
31 if a.startswith(cmd):
32 found = a
32 found = a
33 break
33 break
34 if found is not None:
34 if found is not None:
35 if aliases[0].startswith("debug") or found.startswith("debug"):
35 if aliases[0].startswith("debug") or found.startswith("debug"):
36 debugchoice[found] = (aliases, table[e])
36 debugchoice[found] = (aliases, table[e])
37 else:
37 else:
38 choice[found] = (aliases, table[e])
38 choice[found] = (aliases, table[e])
39
39
40 if not choice and debugchoice:
40 if not choice and debugchoice:
41 choice = debugchoice
41 choice = debugchoice
42
42
43 return choice
43 return choice
44
44
45 def findcmd(cmd, table, strict=True):
45 def findcmd(cmd, table, strict=True):
46 """Return (aliases, command table entry) for command string."""
46 """Return (aliases, command table entry) for command string."""
47 choice = findpossible(cmd, table, strict)
47 choice = findpossible(cmd, table, strict)
48
48
49 if cmd in choice:
49 if cmd in choice:
50 return choice[cmd]
50 return choice[cmd]
51
51
52 if len(choice) > 1:
52 if len(choice) > 1:
53 clist = choice.keys()
53 clist = choice.keys()
54 clist.sort()
54 clist.sort()
55 raise error.AmbiguousCommand(cmd, clist)
55 raise error.AmbiguousCommand(cmd, clist)
56
56
57 if choice:
57 if choice:
58 return choice.values()[0]
58 return choice.values()[0]
59
59
60 raise error.UnknownCommand(cmd)
60 raise error.UnknownCommand(cmd)
61
61
62 def bail_if_changed(repo):
62 def bail_if_changed(repo):
63 if repo.dirstate.parents()[1] != nullid:
63 if repo.dirstate.parents()[1] != nullid:
64 raise util.Abort(_('outstanding uncommitted merge'))
64 raise util.Abort(_('outstanding uncommitted merge'))
65 modified, added, removed, deleted = repo.status()[:4]
65 modified, added, removed, deleted = repo.status()[:4]
66 if modified or added or removed or deleted:
66 if modified or added or removed or deleted:
67 raise util.Abort(_("outstanding uncommitted changes"))
67 raise util.Abort(_("outstanding uncommitted changes"))
68
68
69 def logmessage(opts):
69 def logmessage(opts):
70 """ get the log message according to -m and -l option """
70 """ get the log message according to -m and -l option """
71 message = opts.get('message')
71 message = opts.get('message')
72 logfile = opts.get('logfile')
72 logfile = opts.get('logfile')
73
73
74 if message and logfile:
74 if message and logfile:
75 raise util.Abort(_('options --message and --logfile are mutually '
75 raise util.Abort(_('options --message and --logfile are mutually '
76 'exclusive'))
76 'exclusive'))
77 if not message and logfile:
77 if not message and logfile:
78 try:
78 try:
79 if logfile == '-':
79 if logfile == '-':
80 message = sys.stdin.read()
80 message = sys.stdin.read()
81 else:
81 else:
82 message = open(logfile).read()
82 message = open(logfile).read()
83 except IOError, inst:
83 except IOError, inst:
84 raise util.Abort(_("can't read commit message '%s': %s") %
84 raise util.Abort(_("can't read commit message '%s': %s") %
85 (logfile, inst.strerror))
85 (logfile, inst.strerror))
86 return message
86 return message
87
87
88 def loglimit(opts):
88 def loglimit(opts):
89 """get the log limit according to option -l/--limit"""
89 """get the log limit according to option -l/--limit"""
90 limit = opts.get('limit')
90 limit = opts.get('limit')
91 if limit:
91 if limit:
92 try:
92 try:
93 limit = int(limit)
93 limit = int(limit)
94 except ValueError:
94 except ValueError:
95 raise util.Abort(_('limit must be a positive integer'))
95 raise util.Abort(_('limit must be a positive integer'))
96 if limit <= 0: raise util.Abort(_('limit must be positive'))
96 if limit <= 0: raise util.Abort(_('limit must be positive'))
97 else:
97 else:
98 limit = sys.maxint
98 limit = sys.maxint
99 return limit
99 return limit
100
100
101 def remoteui(src, opts):
101 def remoteui(src, opts):
102 'build a remote ui from ui or repo and opts'
102 'build a remote ui from ui or repo and opts'
103 if hasattr(src, 'baseui'): # looks like a repository
103 if hasattr(src, 'baseui'): # looks like a repository
104 dst = src.baseui.copy() # drop repo-specific config
104 dst = src.baseui.copy() # drop repo-specific config
105 src = src.ui # copy target options from repo
105 src = src.ui # copy target options from repo
106 else: # assume it's a global ui object
106 else: # assume it's a global ui object
107 dst = src.copy() # keep all global options
107 dst = src.copy() # keep all global options
108
108
109 # copy ssh-specific options
109 # copy ssh-specific options
110 for o in 'ssh', 'remotecmd':
110 for o in 'ssh', 'remotecmd':
111 v = opts.get(o) or src.config('ui', o)
111 v = opts.get(o) or src.config('ui', o)
112 if v:
112 if v:
113 dst.setconfig("ui", o, v)
113 dst.setconfig("ui", o, v)
114 # copy bundle-specific options
114 # copy bundle-specific options
115 r = src.config('bundle', 'mainreporoot')
115 r = src.config('bundle', 'mainreporoot')
116 if r:
116 if r:
117 dst.setconfig('bundle', 'mainreporoot', r)
117 dst.setconfig('bundle', 'mainreporoot', r)
118
118
119 return dst
119 return dst
120
120
121 def revpair(repo, revs):
121 def revpair(repo, revs):
122 '''return pair of nodes, given list of revisions. second item can
122 '''return pair of nodes, given list of revisions. second item can
123 be None, meaning use working dir.'''
123 be None, meaning use working dir.'''
124
124
125 def revfix(repo, val, defval):
125 def revfix(repo, val, defval):
126 if not val and val != 0 and defval is not None:
126 if not val and val != 0 and defval is not None:
127 val = defval
127 val = defval
128 return repo.lookup(val)
128 return repo.lookup(val)
129
129
130 if not revs:
130 if not revs:
131 return repo.dirstate.parents()[0], None
131 return repo.dirstate.parents()[0], None
132 end = None
132 end = None
133 if len(revs) == 1:
133 if len(revs) == 1:
134 if revrangesep in revs[0]:
134 if revrangesep in revs[0]:
135 start, end = revs[0].split(revrangesep, 1)
135 start, end = revs[0].split(revrangesep, 1)
136 start = revfix(repo, start, 0)
136 start = revfix(repo, start, 0)
137 end = revfix(repo, end, len(repo) - 1)
137 end = revfix(repo, end, len(repo) - 1)
138 else:
138 else:
139 start = revfix(repo, revs[0], None)
139 start = revfix(repo, revs[0], None)
140 elif len(revs) == 2:
140 elif len(revs) == 2:
141 if revrangesep in revs[0] or revrangesep in revs[1]:
141 if revrangesep in revs[0] or revrangesep in revs[1]:
142 raise util.Abort(_('too many revisions specified'))
142 raise util.Abort(_('too many revisions specified'))
143 start = revfix(repo, revs[0], None)
143 start = revfix(repo, revs[0], None)
144 end = revfix(repo, revs[1], None)
144 end = revfix(repo, revs[1], None)
145 else:
145 else:
146 raise util.Abort(_('too many revisions specified'))
146 raise util.Abort(_('too many revisions specified'))
147 return start, end
147 return start, end
148
148
149 def revrange(repo, revs):
149 def revrange(repo, revs):
150 """Yield revision as strings from a list of revision specifications."""
150 """Yield revision as strings from a list of revision specifications."""
151
151
152 def revfix(repo, val, defval):
152 def revfix(repo, val, defval):
153 if not val and val != 0 and defval is not None:
153 if not val and val != 0 and defval is not None:
154 return defval
154 return defval
155 return repo.changelog.rev(repo.lookup(val))
155 return repo.changelog.rev(repo.lookup(val))
156
156
157 seen, l = set(), []
157 seen, l = set(), []
158 for spec in revs:
158 for spec in revs:
159 if revrangesep in spec:
159 if revrangesep in spec:
160 start, end = spec.split(revrangesep, 1)
160 start, end = spec.split(revrangesep, 1)
161 start = revfix(repo, start, 0)
161 start = revfix(repo, start, 0)
162 end = revfix(repo, end, len(repo) - 1)
162 end = revfix(repo, end, len(repo) - 1)
163 step = start > end and -1 or 1
163 step = start > end and -1 or 1
164 for rev in xrange(start, end+step, step):
164 for rev in xrange(start, end+step, step):
165 if rev in seen:
165 if rev in seen:
166 continue
166 continue
167 seen.add(rev)
167 seen.add(rev)
168 l.append(rev)
168 l.append(rev)
169 else:
169 else:
170 rev = revfix(repo, spec, None)
170 rev = revfix(repo, spec, None)
171 if rev in seen:
171 if rev in seen:
172 continue
172 continue
173 seen.add(rev)
173 seen.add(rev)
174 l.append(rev)
174 l.append(rev)
175
175
176 return l
176 return l
177
177
178 def make_filename(repo, pat, node,
178 def make_filename(repo, pat, node,
179 total=None, seqno=None, revwidth=None, pathname=None):
179 total=None, seqno=None, revwidth=None, pathname=None):
180 node_expander = {
180 node_expander = {
181 'H': lambda: hex(node),
181 'H': lambda: hex(node),
182 'R': lambda: str(repo.changelog.rev(node)),
182 'R': lambda: str(repo.changelog.rev(node)),
183 'h': lambda: short(node),
183 'h': lambda: short(node),
184 }
184 }
185 expander = {
185 expander = {
186 '%': lambda: '%',
186 '%': lambda: '%',
187 'b': lambda: os.path.basename(repo.root),
187 'b': lambda: os.path.basename(repo.root),
188 }
188 }
189
189
190 try:
190 try:
191 if node:
191 if node:
192 expander.update(node_expander)
192 expander.update(node_expander)
193 if node:
193 if node:
194 expander['r'] = (lambda:
194 expander['r'] = (lambda:
195 str(repo.changelog.rev(node)).zfill(revwidth or 0))
195 str(repo.changelog.rev(node)).zfill(revwidth or 0))
196 if total is not None:
196 if total is not None:
197 expander['N'] = lambda: str(total)
197 expander['N'] = lambda: str(total)
198 if seqno is not None:
198 if seqno is not None:
199 expander['n'] = lambda: str(seqno)
199 expander['n'] = lambda: str(seqno)
200 if total is not None and seqno is not None:
200 if total is not None and seqno is not None:
201 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
201 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
202 if pathname is not None:
202 if pathname is not None:
203 expander['s'] = lambda: os.path.basename(pathname)
203 expander['s'] = lambda: os.path.basename(pathname)
204 expander['d'] = lambda: os.path.dirname(pathname) or '.'
204 expander['d'] = lambda: os.path.dirname(pathname) or '.'
205 expander['p'] = lambda: pathname
205 expander['p'] = lambda: pathname
206
206
207 newname = []
207 newname = []
208 patlen = len(pat)
208 patlen = len(pat)
209 i = 0
209 i = 0
210 while i < patlen:
210 while i < patlen:
211 c = pat[i]
211 c = pat[i]
212 if c == '%':
212 if c == '%':
213 i += 1
213 i += 1
214 c = pat[i]
214 c = pat[i]
215 c = expander[c]()
215 c = expander[c]()
216 newname.append(c)
216 newname.append(c)
217 i += 1
217 i += 1
218 return ''.join(newname)
218 return ''.join(newname)
219 except KeyError, inst:
219 except KeyError, inst:
220 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
220 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
221 inst.args[0])
221 inst.args[0])
222
222
223 def make_file(repo, pat, node=None,
223 def make_file(repo, pat, node=None,
224 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
224 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
225
225
226 writable = 'w' in mode or 'a' in mode
226 writable = 'w' in mode or 'a' in mode
227
227
228 if not pat or pat == '-':
228 if not pat or pat == '-':
229 return writable and sys.stdout or sys.stdin
229 return writable and sys.stdout or sys.stdin
230 if hasattr(pat, 'write') and writable:
230 if hasattr(pat, 'write') and writable:
231 return pat
231 return pat
232 if hasattr(pat, 'read') and 'r' in mode:
232 if hasattr(pat, 'read') and 'r' in mode:
233 return pat
233 return pat
234 return open(make_filename(repo, pat, node, total, seqno, revwidth,
234 return open(make_filename(repo, pat, node, total, seqno, revwidth,
235 pathname),
235 pathname),
236 mode)
236 mode)
237
237
238 def expandpats(pats):
238 def expandpats(pats):
239 if not util.expandglobs:
239 if not util.expandglobs:
240 return list(pats)
240 return list(pats)
241 ret = []
241 ret = []
242 for p in pats:
242 for p in pats:
243 kind, name = _match._patsplit(p, None)
243 kind, name = _match._patsplit(p, None)
244 if kind is None:
244 if kind is None:
245 try:
245 try:
246 globbed = glob.glob(name)
246 globbed = glob.glob(name)
247 except re.error:
247 except re.error:
248 globbed = [name]
248 globbed = [name]
249 if globbed:
249 if globbed:
250 ret.extend(globbed)
250 ret.extend(globbed)
251 continue
251 continue
252 ret.append(p)
252 ret.append(p)
253 return ret
253 return ret
254
254
255 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
255 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
256 if not globbed and default == 'relpath':
256 if not globbed and default == 'relpath':
257 pats = expandpats(pats or [])
257 pats = expandpats(pats or [])
258 m = _match.match(repo.root, repo.getcwd(), pats,
258 m = _match.match(repo.root, repo.getcwd(), pats,
259 opts.get('include'), opts.get('exclude'), default)
259 opts.get('include'), opts.get('exclude'), default)
260 def badfn(f, msg):
260 def badfn(f, msg):
261 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
261 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
262 m.bad = badfn
262 m.bad = badfn
263 return m
263 return m
264
264
265 def matchall(repo):
265 def matchall(repo):
266 return _match.always(repo.root, repo.getcwd())
266 return _match.always(repo.root, repo.getcwd())
267
267
268 def matchfiles(repo, files):
268 def matchfiles(repo, files):
269 return _match.exact(repo.root, repo.getcwd(), files)
269 return _match.exact(repo.root, repo.getcwd(), files)
270
270
271 def findrenames(repo, added, removed, threshold):
271 def findrenames(repo, added, removed, threshold):
272 '''find renamed files -- yields (before, after, score) tuples'''
272 '''find renamed files -- yields (before, after, score) tuples'''
273 ctx = repo['.']
273 ctx = repo['.']
274 for a in added:
274 for a in added:
275 aa = repo.wread(a)
275 aa = repo.wread(a)
276 bestname, bestscore = None, threshold
276 bestname, bestscore = None, threshold
277 for r in removed:
277 for r in removed:
278 if r not in ctx:
278 if r not in ctx:
279 continue
279 continue
280 rr = ctx.filectx(r).data()
280 rr = ctx.filectx(r).data()
281
281
282 # bdiff.blocks() returns blocks of matching lines
282 # bdiff.blocks() returns blocks of matching lines
283 # count the number of bytes in each
283 # count the number of bytes in each
284 equal = 0
284 equal = 0
285 alines = mdiff.splitnewlines(aa)
285 alines = mdiff.splitnewlines(aa)
286 matches = bdiff.blocks(aa, rr)
286 matches = bdiff.blocks(aa, rr)
287 for x1,x2,y1,y2 in matches:
287 for x1,x2,y1,y2 in matches:
288 for line in alines[x1:x2]:
288 for line in alines[x1:x2]:
289 equal += len(line)
289 equal += len(line)
290
290
291 lengths = len(aa) + len(rr)
291 lengths = len(aa) + len(rr)
292 if lengths:
292 if lengths:
293 myscore = equal*2.0 / lengths
293 myscore = equal*2.0 / lengths
294 if myscore >= bestscore:
294 if myscore >= bestscore:
295 bestname, bestscore = r, myscore
295 bestname, bestscore = r, myscore
296 if bestname:
296 if bestname:
297 yield bestname, a, bestscore
297 yield bestname, a, bestscore
298
298
299 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
299 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
300 if dry_run is None:
300 if dry_run is None:
301 dry_run = opts.get('dry_run')
301 dry_run = opts.get('dry_run')
302 if similarity is None:
302 if similarity is None:
303 similarity = float(opts.get('similarity') or 0)
303 similarity = float(opts.get('similarity') or 0)
304 # we'd use status here, except handling of symlinks and ignore is tricky
304 # we'd use status here, except handling of symlinks and ignore is tricky
305 added, unknown, deleted, removed = [], [], [], []
305 added, unknown, deleted, removed = [], [], [], []
306 audit_path = util.path_auditor(repo.root)
306 audit_path = util.path_auditor(repo.root)
307 m = match(repo, pats, opts)
307 m = match(repo, pats, opts)
308 for abs in repo.walk(m):
308 for abs in repo.walk(m):
309 target = repo.wjoin(abs)
309 target = repo.wjoin(abs)
310 good = True
310 good = True
311 try:
311 try:
312 audit_path(abs)
312 audit_path(abs)
313 except:
313 except:
314 good = False
314 good = False
315 rel = m.rel(abs)
315 rel = m.rel(abs)
316 exact = m.exact(abs)
316 exact = m.exact(abs)
317 if good and abs not in repo.dirstate:
317 if good and abs not in repo.dirstate:
318 unknown.append(abs)
318 unknown.append(abs)
319 if repo.ui.verbose or not exact:
319 if repo.ui.verbose or not exact:
320 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
320 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
321 elif repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
321 elif repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
322 or (os.path.isdir(target) and not os.path.islink(target))):
322 or (os.path.isdir(target) and not os.path.islink(target))):
323 deleted.append(abs)
323 deleted.append(abs)
324 if repo.ui.verbose or not exact:
324 if repo.ui.verbose or not exact:
325 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
325 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
326 # for finding renames
326 # for finding renames
327 elif repo.dirstate[abs] == 'r':
327 elif repo.dirstate[abs] == 'r':
328 removed.append(abs)
328 removed.append(abs)
329 elif repo.dirstate[abs] == 'a':
329 elif repo.dirstate[abs] == 'a':
330 added.append(abs)
330 added.append(abs)
331 if not dry_run:
331 if not dry_run:
332 repo.remove(deleted)
332 repo.remove(deleted)
333 repo.add(unknown)
333 repo.add(unknown)
334 if similarity > 0:
334 if similarity > 0:
335 for old, new, score in findrenames(repo, added + unknown,
335 for old, new, score in findrenames(repo, added + unknown,
336 removed + deleted, similarity):
336 removed + deleted, similarity):
337 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
337 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
338 repo.ui.status(_('recording removal of %s as rename to %s '
338 repo.ui.status(_('recording removal of %s as rename to %s '
339 '(%d%% similar)\n') %
339 '(%d%% similar)\n') %
340 (m.rel(old), m.rel(new), score * 100))
340 (m.rel(old), m.rel(new), score * 100))
341 if not dry_run:
341 if not dry_run:
342 repo.copy(old, new)
342 repo.copy(old, new)
343
343
344 def copy(ui, repo, pats, opts, rename=False):
344 def copy(ui, repo, pats, opts, rename=False):
345 # called with the repo lock held
345 # called with the repo lock held
346 #
346 #
347 # hgsep => pathname that uses "/" to separate directories
347 # hgsep => pathname that uses "/" to separate directories
348 # ossep => pathname that uses os.sep to separate directories
348 # ossep => pathname that uses os.sep to separate directories
349 cwd = repo.getcwd()
349 cwd = repo.getcwd()
350 targets = {}
350 targets = {}
351 after = opts.get("after")
351 after = opts.get("after")
352 dryrun = opts.get("dry_run")
352 dryrun = opts.get("dry_run")
353
353
354 def walkpat(pat):
354 def walkpat(pat):
355 srcs = []
355 srcs = []
356 m = match(repo, [pat], opts, globbed=True)
356 m = match(repo, [pat], opts, globbed=True)
357 for abs in repo.walk(m):
357 for abs in repo.walk(m):
358 state = repo.dirstate[abs]
358 state = repo.dirstate[abs]
359 rel = m.rel(abs)
359 rel = m.rel(abs)
360 exact = m.exact(abs)
360 exact = m.exact(abs)
361 if state in '?r':
361 if state in '?r':
362 if exact and state == '?':
362 if exact and state == '?':
363 ui.warn(_('%s: not copying - file is not managed\n') % rel)
363 ui.warn(_('%s: not copying - file is not managed\n') % rel)
364 if exact and state == 'r':
364 if exact and state == 'r':
365 ui.warn(_('%s: not copying - file has been marked for'
365 ui.warn(_('%s: not copying - file has been marked for'
366 ' remove\n') % rel)
366 ' remove\n') % rel)
367 continue
367 continue
368 # abs: hgsep
368 # abs: hgsep
369 # rel: ossep
369 # rel: ossep
370 srcs.append((abs, rel, exact))
370 srcs.append((abs, rel, exact))
371 return srcs
371 return srcs
372
372
373 # abssrc: hgsep
373 # abssrc: hgsep
374 # relsrc: ossep
374 # relsrc: ossep
375 # otarget: ossep
375 # otarget: ossep
376 def copyfile(abssrc, relsrc, otarget, exact):
376 def copyfile(abssrc, relsrc, otarget, exact):
377 abstarget = util.canonpath(repo.root, cwd, otarget)
377 abstarget = util.canonpath(repo.root, cwd, otarget)
378 reltarget = repo.pathto(abstarget, cwd)
378 reltarget = repo.pathto(abstarget, cwd)
379 target = repo.wjoin(abstarget)
379 target = repo.wjoin(abstarget)
380 src = repo.wjoin(abssrc)
380 src = repo.wjoin(abssrc)
381 state = repo.dirstate[abstarget]
381 state = repo.dirstate[abstarget]
382
382
383 # check for collisions
383 # check for collisions
384 prevsrc = targets.get(abstarget)
384 prevsrc = targets.get(abstarget)
385 if prevsrc is not None:
385 if prevsrc is not None:
386 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
386 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
387 (reltarget, repo.pathto(abssrc, cwd),
387 (reltarget, repo.pathto(abssrc, cwd),
388 repo.pathto(prevsrc, cwd)))
388 repo.pathto(prevsrc, cwd)))
389 return
389 return
390
390
391 # check for overwrites
391 # check for overwrites
392 exists = os.path.exists(target)
392 exists = os.path.exists(target)
393 if not after and exists or after and state in 'mn':
393 if not after and exists or after and state in 'mn':
394 if not opts['force']:
394 if not opts['force']:
395 ui.warn(_('%s: not overwriting - file exists\n') %
395 ui.warn(_('%s: not overwriting - file exists\n') %
396 reltarget)
396 reltarget)
397 return
397 return
398
398
399 if after:
399 if after:
400 if not exists:
400 if not exists:
401 return
401 return
402 elif not dryrun:
402 elif not dryrun:
403 try:
403 try:
404 if exists:
404 if exists:
405 os.unlink(target)
405 os.unlink(target)
406 targetdir = os.path.dirname(target) or '.'
406 targetdir = os.path.dirname(target) or '.'
407 if not os.path.isdir(targetdir):
407 if not os.path.isdir(targetdir):
408 os.makedirs(targetdir)
408 os.makedirs(targetdir)
409 util.copyfile(src, target)
409 util.copyfile(src, target)
410 except IOError, inst:
410 except IOError, inst:
411 if inst.errno == errno.ENOENT:
411 if inst.errno == errno.ENOENT:
412 ui.warn(_('%s: deleted in working copy\n') % relsrc)
412 ui.warn(_('%s: deleted in working copy\n') % relsrc)
413 else:
413 else:
414 ui.warn(_('%s: cannot copy - %s\n') %
414 ui.warn(_('%s: cannot copy - %s\n') %
415 (relsrc, inst.strerror))
415 (relsrc, inst.strerror))
416 return True # report a failure
416 return True # report a failure
417
417
418 if ui.verbose or not exact:
418 if ui.verbose or not exact:
419 if rename:
419 if rename:
420 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
420 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
421 else:
421 else:
422 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
422 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
423
423
424 targets[abstarget] = abssrc
424 targets[abstarget] = abssrc
425
425
426 # fix up dirstate
426 # fix up dirstate
427 origsrc = repo.dirstate.copied(abssrc) or abssrc
427 origsrc = repo.dirstate.copied(abssrc) or abssrc
428 if abstarget == origsrc: # copying back a copy?
428 if abstarget == origsrc: # copying back a copy?
429 if state not in 'mn' and not dryrun:
429 if state not in 'mn' and not dryrun:
430 repo.dirstate.normallookup(abstarget)
430 repo.dirstate.normallookup(abstarget)
431 else:
431 else:
432 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
432 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
433 if not ui.quiet:
433 if not ui.quiet:
434 ui.warn(_("%s has not been committed yet, so no copy "
434 ui.warn(_("%s has not been committed yet, so no copy "
435 "data will be stored for %s.\n")
435 "data will be stored for %s.\n")
436 % (repo.pathto(origsrc, cwd), reltarget))
436 % (repo.pathto(origsrc, cwd), reltarget))
437 if repo.dirstate[abstarget] in '?r' and not dryrun:
437 if repo.dirstate[abstarget] in '?r' and not dryrun:
438 repo.add([abstarget])
438 repo.add([abstarget])
439 elif not dryrun:
439 elif not dryrun:
440 repo.copy(origsrc, abstarget)
440 repo.copy(origsrc, abstarget)
441
441
442 if rename and not dryrun:
442 if rename and not dryrun:
443 repo.remove([abssrc], not after)
443 repo.remove([abssrc], not after)
444
444
445 # pat: ossep
445 # pat: ossep
446 # dest ossep
446 # dest ossep
447 # srcs: list of (hgsep, hgsep, ossep, bool)
447 # srcs: list of (hgsep, hgsep, ossep, bool)
448 # return: function that takes hgsep and returns ossep
448 # return: function that takes hgsep and returns ossep
449 def targetpathfn(pat, dest, srcs):
449 def targetpathfn(pat, dest, srcs):
450 if os.path.isdir(pat):
450 if os.path.isdir(pat):
451 abspfx = util.canonpath(repo.root, cwd, pat)
451 abspfx = util.canonpath(repo.root, cwd, pat)
452 abspfx = util.localpath(abspfx)
452 abspfx = util.localpath(abspfx)
453 if destdirexists:
453 if destdirexists:
454 striplen = len(os.path.split(abspfx)[0])
454 striplen = len(os.path.split(abspfx)[0])
455 else:
455 else:
456 striplen = len(abspfx)
456 striplen = len(abspfx)
457 if striplen:
457 if striplen:
458 striplen += len(os.sep)
458 striplen += len(os.sep)
459 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
459 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
460 elif destdirexists:
460 elif destdirexists:
461 res = lambda p: os.path.join(dest,
461 res = lambda p: os.path.join(dest,
462 os.path.basename(util.localpath(p)))
462 os.path.basename(util.localpath(p)))
463 else:
463 else:
464 res = lambda p: dest
464 res = lambda p: dest
465 return res
465 return res
466
466
467 # pat: ossep
467 # pat: ossep
468 # dest ossep
468 # dest ossep
469 # srcs: list of (hgsep, hgsep, ossep, bool)
469 # srcs: list of (hgsep, hgsep, ossep, bool)
470 # return: function that takes hgsep and returns ossep
470 # return: function that takes hgsep and returns ossep
471 def targetpathafterfn(pat, dest, srcs):
471 def targetpathafterfn(pat, dest, srcs):
472 if _match.patkind(pat):
472 if _match.patkind(pat):
473 # a mercurial pattern
473 # a mercurial pattern
474 res = lambda p: os.path.join(dest,
474 res = lambda p: os.path.join(dest,
475 os.path.basename(util.localpath(p)))
475 os.path.basename(util.localpath(p)))
476 else:
476 else:
477 abspfx = util.canonpath(repo.root, cwd, pat)
477 abspfx = util.canonpath(repo.root, cwd, pat)
478 if len(abspfx) < len(srcs[0][0]):
478 if len(abspfx) < len(srcs[0][0]):
479 # A directory. Either the target path contains the last
479 # A directory. Either the target path contains the last
480 # component of the source path or it does not.
480 # component of the source path or it does not.
481 def evalpath(striplen):
481 def evalpath(striplen):
482 score = 0
482 score = 0
483 for s in srcs:
483 for s in srcs:
484 t = os.path.join(dest, util.localpath(s[0])[striplen:])
484 t = os.path.join(dest, util.localpath(s[0])[striplen:])
485 if os.path.exists(t):
485 if os.path.exists(t):
486 score += 1
486 score += 1
487 return score
487 return score
488
488
489 abspfx = util.localpath(abspfx)
489 abspfx = util.localpath(abspfx)
490 striplen = len(abspfx)
490 striplen = len(abspfx)
491 if striplen:
491 if striplen:
492 striplen += len(os.sep)
492 striplen += len(os.sep)
493 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
493 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
494 score = evalpath(striplen)
494 score = evalpath(striplen)
495 striplen1 = len(os.path.split(abspfx)[0])
495 striplen1 = len(os.path.split(abspfx)[0])
496 if striplen1:
496 if striplen1:
497 striplen1 += len(os.sep)
497 striplen1 += len(os.sep)
498 if evalpath(striplen1) > score:
498 if evalpath(striplen1) > score:
499 striplen = striplen1
499 striplen = striplen1
500 res = lambda p: os.path.join(dest,
500 res = lambda p: os.path.join(dest,
501 util.localpath(p)[striplen:])
501 util.localpath(p)[striplen:])
502 else:
502 else:
503 # a file
503 # a file
504 if destdirexists:
504 if destdirexists:
505 res = lambda p: os.path.join(dest,
505 res = lambda p: os.path.join(dest,
506 os.path.basename(util.localpath(p)))
506 os.path.basename(util.localpath(p)))
507 else:
507 else:
508 res = lambda p: dest
508 res = lambda p: dest
509 return res
509 return res
510
510
511
511
512 pats = expandpats(pats)
512 pats = expandpats(pats)
513 if not pats:
513 if not pats:
514 raise util.Abort(_('no source or destination specified'))
514 raise util.Abort(_('no source or destination specified'))
515 if len(pats) == 1:
515 if len(pats) == 1:
516 raise util.Abort(_('no destination specified'))
516 raise util.Abort(_('no destination specified'))
517 dest = pats.pop()
517 dest = pats.pop()
518 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
518 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
519 if not destdirexists:
519 if not destdirexists:
520 if len(pats) > 1 or _match.patkind(pats[0]):
520 if len(pats) > 1 or _match.patkind(pats[0]):
521 raise util.Abort(_('with multiple sources, destination must be an '
521 raise util.Abort(_('with multiple sources, destination must be an '
522 'existing directory'))
522 'existing directory'))
523 if util.endswithsep(dest):
523 if util.endswithsep(dest):
524 raise util.Abort(_('destination %s is not a directory') % dest)
524 raise util.Abort(_('destination %s is not a directory') % dest)
525
525
526 tfn = targetpathfn
526 tfn = targetpathfn
527 if after:
527 if after:
528 tfn = targetpathafterfn
528 tfn = targetpathafterfn
529 copylist = []
529 copylist = []
530 for pat in pats:
530 for pat in pats:
531 srcs = walkpat(pat)
531 srcs = walkpat(pat)
532 if not srcs:
532 if not srcs:
533 continue
533 continue
534 copylist.append((tfn(pat, dest, srcs), srcs))
534 copylist.append((tfn(pat, dest, srcs), srcs))
535 if not copylist:
535 if not copylist:
536 raise util.Abort(_('no files to copy'))
536 raise util.Abort(_('no files to copy'))
537
537
538 errors = 0
538 errors = 0
539 for targetpath, srcs in copylist:
539 for targetpath, srcs in copylist:
540 for abssrc, relsrc, exact in srcs:
540 for abssrc, relsrc, exact in srcs:
541 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
541 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
542 errors += 1
542 errors += 1
543
543
544 if errors:
544 if errors:
545 ui.warn(_('(consider using --after)\n'))
545 ui.warn(_('(consider using --after)\n'))
546
546
547 return errors
547 return errors
548
548
549 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
549 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
550 runargs=None):
550 runargs=None):
551 '''Run a command as a service.'''
551 '''Run a command as a service.'''
552
552
553 if opts['daemon'] and not opts['daemon_pipefds']:
553 if opts['daemon'] and not opts['daemon_pipefds']:
554 rfd, wfd = os.pipe()
554 rfd, wfd = os.pipe()
555 if not runargs:
555 if not runargs:
556 runargs = sys.argv[:]
556 runargs = sys.argv[:]
557 runargs.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
557 runargs.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
558 # Don't pass --cwd to the child process, because we've already
558 # Don't pass --cwd to the child process, because we've already
559 # changed directory.
559 # changed directory.
560 for i in xrange(1,len(runargs)):
560 for i in xrange(1,len(runargs)):
561 if runargs[i].startswith('--cwd='):
561 if runargs[i].startswith('--cwd='):
562 del runargs[i]
562 del runargs[i]
563 break
563 break
564 elif runargs[i].startswith('--cwd'):
564 elif runargs[i].startswith('--cwd'):
565 del runargs[i:i+2]
565 del runargs[i:i+2]
566 break
566 break
567 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
567 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
568 runargs[0], runargs)
568 runargs[0], runargs)
569 os.close(wfd)
569 os.close(wfd)
570 os.read(rfd, 1)
570 os.read(rfd, 1)
571 if parentfn:
571 if parentfn:
572 return parentfn(pid)
572 return parentfn(pid)
573 else:
573 else:
574 os._exit(0)
574 os._exit(0)
575
575
576 if initfn:
576 if initfn:
577 initfn()
577 initfn()
578
578
579 if opts['pid_file']:
579 if opts['pid_file']:
580 fp = open(opts['pid_file'], 'w')
580 fp = open(opts['pid_file'], 'w')
581 fp.write(str(os.getpid()) + '\n')
581 fp.write(str(os.getpid()) + '\n')
582 fp.close()
582 fp.close()
583
583
584 if opts['daemon_pipefds']:
584 if opts['daemon_pipefds']:
585 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
585 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
586 os.close(rfd)
586 os.close(rfd)
587 try:
587 try:
588 os.setsid()
588 os.setsid()
589 except AttributeError:
589 except AttributeError:
590 pass
590 pass
591 os.write(wfd, 'y')
591 os.write(wfd, 'y')
592 os.close(wfd)
592 os.close(wfd)
593 sys.stdout.flush()
593 sys.stdout.flush()
594 sys.stderr.flush()
594 sys.stderr.flush()
595
595
596 nullfd = os.open(util.nulldev, os.O_RDWR)
596 nullfd = os.open(util.nulldev, os.O_RDWR)
597 logfilefd = nullfd
597 logfilefd = nullfd
598 if logfile:
598 if logfile:
599 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
599 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
600 os.dup2(nullfd, 0)
600 os.dup2(nullfd, 0)
601 os.dup2(logfilefd, 1)
601 os.dup2(logfilefd, 1)
602 os.dup2(logfilefd, 2)
602 os.dup2(logfilefd, 2)
603 if nullfd not in (0, 1, 2):
603 if nullfd not in (0, 1, 2):
604 os.close(nullfd)
604 os.close(nullfd)
605 if logfile and logfilefd not in (0, 1, 2):
605 if logfile and logfilefd not in (0, 1, 2):
606 os.close(logfilefd)
606 os.close(logfilefd)
607
607
608 if runfn:
608 if runfn:
609 return runfn()
609 return runfn()
610
610
611 class changeset_printer(object):
611 class changeset_printer(object):
612 '''show changeset information when templating not requested.'''
612 '''show changeset information when templating not requested.'''
613
613
614 def __init__(self, ui, repo, patch, diffopts, buffered):
614 def __init__(self, ui, repo, patch, diffopts, buffered):
615 self.ui = ui
615 self.ui = ui
616 self.repo = repo
616 self.repo = repo
617 self.buffered = buffered
617 self.buffered = buffered
618 self.patch = patch
618 self.patch = patch
619 self.diffopts = diffopts
619 self.diffopts = diffopts
620 self.header = {}
620 self.header = {}
621 self.hunk = {}
621 self.hunk = {}
622 self.lastheader = None
622 self.lastheader = None
623
623
624 def flush(self, rev):
624 def flush(self, rev):
625 if rev in self.header:
625 if rev in self.header:
626 h = self.header[rev]
626 h = self.header[rev]
627 if h != self.lastheader:
627 if h != self.lastheader:
628 self.lastheader = h
628 self.lastheader = h
629 self.ui.write(h)
629 self.ui.write(h)
630 del self.header[rev]
630 del self.header[rev]
631 if rev in self.hunk:
631 if rev in self.hunk:
632 self.ui.write(self.hunk[rev])
632 self.ui.write(self.hunk[rev])
633 del self.hunk[rev]
633 del self.hunk[rev]
634 return 1
634 return 1
635 return 0
635 return 0
636
636
637 def show(self, ctx, copies=(), **props):
637 def show(self, ctx, copies=(), **props):
638 if self.buffered:
638 if self.buffered:
639 self.ui.pushbuffer()
639 self.ui.pushbuffer()
640 self._show(ctx, copies, props)
640 self._show(ctx, copies, props)
641 self.hunk[ctx.rev()] = self.ui.popbuffer()
641 self.hunk[ctx.rev()] = self.ui.popbuffer()
642 else:
642 else:
643 self._show(ctx, copies, props)
643 self._show(ctx, copies, props)
644
644
645 def _show(self, ctx, copies, props):
645 def _show(self, ctx, copies, props):
646 '''show a single changeset or file revision'''
646 '''show a single changeset or file revision'''
647 changenode = ctx.node()
647 changenode = ctx.node()
648 rev = ctx.rev()
648 rev = ctx.rev()
649
649
650 if self.ui.quiet:
650 if self.ui.quiet:
651 self.ui.write("%d:%s\n" % (rev, short(changenode)))
651 self.ui.write("%d:%s\n" % (rev, short(changenode)))
652 return
652 return
653
653
654 log = self.repo.changelog
654 log = self.repo.changelog
655 date = util.datestr(ctx.date())
655 date = util.datestr(ctx.date())
656
656
657 hexfunc = self.ui.debugflag and hex or short
657 hexfunc = self.ui.debugflag and hex or short
658
658
659 parents = [(p, hexfunc(log.node(p)))
659 parents = [(p, hexfunc(log.node(p)))
660 for p in self._meaningful_parentrevs(log, rev)]
660 for p in self._meaningful_parentrevs(log, rev)]
661
661
662 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
662 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
663
663
664 branch = ctx.branch()
664 branch = ctx.branch()
665 # don't show the default branch name
665 # don't show the default branch name
666 if branch != 'default':
666 if branch != 'default':
667 branch = encoding.tolocal(branch)
667 branch = encoding.tolocal(branch)
668 self.ui.write(_("branch: %s\n") % branch)
668 self.ui.write(_("branch: %s\n") % branch)
669 for tag in self.repo.nodetags(changenode):
669 for tag in self.repo.nodetags(changenode):
670 self.ui.write(_("tag: %s\n") % tag)
670 self.ui.write(_("tag: %s\n") % tag)
671 for parent in parents:
671 for parent in parents:
672 self.ui.write(_("parent: %d:%s\n") % parent)
672 self.ui.write(_("parent: %d:%s\n") % parent)
673
673
674 if self.ui.debugflag:
674 if self.ui.debugflag:
675 mnode = ctx.manifestnode()
675 mnode = ctx.manifestnode()
676 self.ui.write(_("manifest: %d:%s\n") %
676 self.ui.write(_("manifest: %d:%s\n") %
677 (self.repo.manifest.rev(mnode), hex(mnode)))
677 (self.repo.manifest.rev(mnode), hex(mnode)))
678 self.ui.write(_("user: %s\n") % ctx.user())
678 self.ui.write(_("user: %s\n") % ctx.user())
679 self.ui.write(_("date: %s\n") % date)
679 self.ui.write(_("date: %s\n") % date)
680
680
681 if self.ui.debugflag:
681 if self.ui.debugflag:
682 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
682 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
683 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
683 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
684 files):
684 files):
685 if value:
685 if value:
686 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
686 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
687 elif ctx.files() and self.ui.verbose:
687 elif ctx.files() and self.ui.verbose:
688 self.ui.write(_("files: %s\n") % " ".join(ctx.files()))
688 self.ui.write(_("files: %s\n") % " ".join(ctx.files()))
689 if copies and self.ui.verbose:
689 if copies and self.ui.verbose:
690 copies = ['%s (%s)' % c for c in copies]
690 copies = ['%s (%s)' % c for c in copies]
691 self.ui.write(_("copies: %s\n") % ' '.join(copies))
691 self.ui.write(_("copies: %s\n") % ' '.join(copies))
692
692
693 extra = ctx.extra()
693 extra = ctx.extra()
694 if extra and self.ui.debugflag:
694 if extra and self.ui.debugflag:
695 for key, value in sorted(extra.items()):
695 for key, value in sorted(extra.items()):
696 self.ui.write(_("extra: %s=%s\n")
696 self.ui.write(_("extra: %s=%s\n")
697 % (key, value.encode('string_escape')))
697 % (key, value.encode('string_escape')))
698
698
699 description = ctx.description().strip()
699 description = ctx.description().strip()
700 if description:
700 if description:
701 if self.ui.verbose:
701 if self.ui.verbose:
702 self.ui.write(_("description:\n"))
702 self.ui.write(_("description:\n"))
703 self.ui.write(description)
703 self.ui.write(description)
704 self.ui.write("\n\n")
704 self.ui.write("\n\n")
705 else:
705 else:
706 self.ui.write(_("summary: %s\n") %
706 self.ui.write(_("summary: %s\n") %
707 description.splitlines()[0])
707 description.splitlines()[0])
708 self.ui.write("\n")
708 self.ui.write("\n")
709
709
710 self.showpatch(changenode)
710 self.showpatch(changenode)
711
711
712 def showpatch(self, node):
712 def showpatch(self, node):
713 if self.patch:
713 if self.patch:
714 prev = self.repo.changelog.parents(node)[0]
714 prev = self.repo.changelog.parents(node)[0]
715 chunks = patch.diff(self.repo, prev, node, match=self.patch,
715 chunks = patch.diff(self.repo, prev, node, match=self.patch,
716 opts=patch.diffopts(self.ui, self.diffopts))
716 opts=patch.diffopts(self.ui, self.diffopts))
717 for chunk in chunks:
717 for chunk in chunks:
718 self.ui.write(chunk)
718 self.ui.write(chunk)
719 self.ui.write("\n")
719 self.ui.write("\n")
720
720
721 def _meaningful_parentrevs(self, log, rev):
721 def _meaningful_parentrevs(self, log, rev):
722 """Return list of meaningful (or all if debug) parentrevs for rev.
722 """Return list of meaningful (or all if debug) parentrevs for rev.
723
723
724 For merges (two non-nullrev revisions) both parents are meaningful.
724 For merges (two non-nullrev revisions) both parents are meaningful.
725 Otherwise the first parent revision is considered meaningful if it
725 Otherwise the first parent revision is considered meaningful if it
726 is not the preceding revision.
726 is not the preceding revision.
727 """
727 """
728 parents = log.parentrevs(rev)
728 parents = log.parentrevs(rev)
729 if not self.ui.debugflag and parents[1] == nullrev:
729 if not self.ui.debugflag and parents[1] == nullrev:
730 if parents[0] >= rev - 1:
730 if parents[0] >= rev - 1:
731 parents = []
731 parents = []
732 else:
732 else:
733 parents = [parents[0]]
733 parents = [parents[0]]
734 return parents
734 return parents
735
735
736
736
737 class changeset_templater(changeset_printer):
737 class changeset_templater(changeset_printer):
738 '''format changeset information.'''
738 '''format changeset information.'''
739
739
740 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
740 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
741 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
741 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
742 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
742 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
743 self.t = templater.templater(mapfile, {'formatnode': formatnode},
743 self.t = templater.templater(mapfile, {'formatnode': formatnode},
744 cache={
744 cache={
745 'parent': '{rev}:{node|formatnode} ',
745 'parent': '{rev}:{node|formatnode} ',
746 'manifest': '{rev}:{node|formatnode}',
746 'manifest': '{rev}:{node|formatnode}',
747 'filecopy': '{name} ({source})'})
747 'filecopy': '{name} ({source})'})
748 # Cache mapping from rev to a tuple with tag date, tag
748 # Cache mapping from rev to a tuple with tag date, tag
749 # distance and tag name
749 # distance and tag name
750 self._latesttagcache = {-1: (0, 0, 'null')}
750 self._latesttagcache = {-1: (0, 0, 'null')}
751
751
752 def use_template(self, t):
752 def use_template(self, t):
753 '''set template string to use'''
753 '''set template string to use'''
754 self.t.cache['changeset'] = t
754 self.t.cache['changeset'] = t
755
755
756 def _meaningful_parentrevs(self, ctx):
756 def _meaningful_parentrevs(self, ctx):
757 """Return list of meaningful (or all if debug) parentrevs for rev.
757 """Return list of meaningful (or all if debug) parentrevs for rev.
758 """
758 """
759 parents = ctx.parents()
759 parents = ctx.parents()
760 if len(parents) > 1:
760 if len(parents) > 1:
761 return parents
761 return parents
762 if self.ui.debugflag:
762 if self.ui.debugflag:
763 return [parents[0], self.repo['null']]
763 return [parents[0], self.repo['null']]
764 if parents[0].rev() >= ctx.rev() - 1:
764 if parents[0].rev() >= ctx.rev() - 1:
765 return []
765 return []
766 return parents
766 return parents
767
767
768 def _latesttaginfo(self, rev):
768 def _latesttaginfo(self, rev):
769 '''return date, distance and name for the latest tag of rev'''
769 '''return date, distance and name for the latest tag of rev'''
770 todo = [rev]
770 todo = [rev]
771 while todo:
771 while todo:
772 rev = todo.pop()
772 rev = todo.pop()
773 if rev in self._latesttagcache:
773 if rev in self._latesttagcache:
774 continue
774 continue
775 ctx = self.repo[rev]
775 ctx = self.repo[rev]
776 tags = [t for t in ctx.tags() if self.repo.tagtype(t) == 'global']
776 tags = [t for t in ctx.tags() if self.repo.tagtype(t) == 'global']
777 if tags:
777 if tags:
778 self._latesttagcache[rev] = ctx.date()[0], 0, ':'.join(sorted(tags))
778 self._latesttagcache[rev] = ctx.date()[0], 0, ':'.join(sorted(tags))
779 continue
779 continue
780 try:
780 try:
781 # The tuples are laid out so the right one can be found by comparison.
781 # The tuples are laid out so the right one can be found by comparison.
782 pdate, pdist, ptag = max(
782 pdate, pdist, ptag = max(
783 self._latesttagcache[p.rev()] for p in ctx.parents())
783 self._latesttagcache[p.rev()] for p in ctx.parents())
784 except KeyError:
784 except KeyError:
785 # Cache miss - recurse
785 # Cache miss - recurse
786 todo.append(rev)
786 todo.append(rev)
787 todo.extend(p.rev() for p in ctx.parents())
787 todo.extend(p.rev() for p in ctx.parents())
788 continue
788 continue
789 self._latesttagcache[rev] = pdate, pdist + 1, ptag
789 self._latesttagcache[rev] = pdate, pdist + 1, ptag
790 return self._latesttagcache[rev]
790 return self._latesttagcache[rev]
791
791
792 def _show(self, ctx, copies, props):
792 def _show(self, ctx, copies, props):
793 '''show a single changeset or file revision'''
793 '''show a single changeset or file revision'''
794
794
795 def showlist(name, values, plural=None, **args):
795 def showlist(name, values, plural=None, **args):
796 '''expand set of values.
796 '''expand set of values.
797 name is name of key in template map.
797 name is name of key in template map.
798 values is list of strings or dicts.
798 values is list of strings or dicts.
799 plural is plural of name, if not simply name + 's'.
799 plural is plural of name, if not simply name + 's'.
800
800
801 expansion works like this, given name 'foo'.
801 expansion works like this, given name 'foo'.
802
802
803 if values is empty, expand 'no_foos'.
803 if values is empty, expand 'no_foos'.
804
804
805 if 'foo' not in template map, return values as a string,
805 if 'foo' not in template map, return values as a string,
806 joined by space.
806 joined by space.
807
807
808 expand 'start_foos'.
808 expand 'start_foos'.
809
809
810 for each value, expand 'foo'. if 'last_foo' in template
810 for each value, expand 'foo'. if 'last_foo' in template
811 map, expand it instead of 'foo' for last key.
811 map, expand it instead of 'foo' for last key.
812
812
813 expand 'end_foos'.
813 expand 'end_foos'.
814 '''
814 '''
815 if plural: names = plural
815 if plural: names = plural
816 else: names = name + 's'
816 else: names = name + 's'
817 if not values:
817 if not values:
818 noname = 'no_' + names
818 noname = 'no_' + names
819 if noname in self.t:
819 if noname in self.t:
820 yield self.t(noname, **args)
820 yield self.t(noname, **args)
821 return
821 return
822 if name not in self.t:
822 if name not in self.t:
823 if isinstance(values[0], str):
823 if isinstance(values[0], str):
824 yield ' '.join(values)
824 yield ' '.join(values)
825 else:
825 else:
826 for v in values:
826 for v in values:
827 yield dict(v, **args)
827 yield dict(v, **args)
828 return
828 return
829 startname = 'start_' + names
829 startname = 'start_' + names
830 if startname in self.t:
830 if startname in self.t:
831 yield self.t(startname, **args)
831 yield self.t(startname, **args)
832 vargs = args.copy()
832 vargs = args.copy()
833 def one(v, tag=name):
833 def one(v, tag=name):
834 try:
834 try:
835 vargs.update(v)
835 vargs.update(v)
836 except (AttributeError, ValueError):
836 except (AttributeError, ValueError):
837 try:
837 try:
838 for a, b in v:
838 for a, b in v:
839 vargs[a] = b
839 vargs[a] = b
840 except ValueError:
840 except ValueError:
841 vargs[name] = v
841 vargs[name] = v
842 return self.t(tag, **vargs)
842 return self.t(tag, **vargs)
843 lastname = 'last_' + name
843 lastname = 'last_' + name
844 if lastname in self.t:
844 if lastname in self.t:
845 last = values.pop()
845 last = values.pop()
846 else:
846 else:
847 last = None
847 last = None
848 for v in values:
848 for v in values:
849 yield one(v)
849 yield one(v)
850 if last is not None:
850 if last is not None:
851 yield one(last, tag=lastname)
851 yield one(last, tag=lastname)
852 endname = 'end_' + names
852 endname = 'end_' + names
853 if endname in self.t:
853 if endname in self.t:
854 yield self.t(endname, **args)
854 yield self.t(endname, **args)
855
855
856 def showbranches(**args):
856 def showbranches(**args):
857 branch = ctx.branch()
857 branch = ctx.branch()
858 if branch != 'default':
858 if branch != 'default':
859 branch = encoding.tolocal(branch)
859 branch = encoding.tolocal(branch)
860 return showlist('branch', [branch], plural='branches', **args)
860 return showlist('branch', [branch], plural='branches', **args)
861
861
862 def showparents(**args):
862 def showparents(**args):
863 parents = [[('rev', p.rev()), ('node', p.hex())]
863 parents = [[('rev', p.rev()), ('node', p.hex())]
864 for p in self._meaningful_parentrevs(ctx)]
864 for p in self._meaningful_parentrevs(ctx)]
865 return showlist('parent', parents, **args)
865 return showlist('parent', parents, **args)
866
866
867 def showtags(**args):
867 def showtags(**args):
868 return showlist('tag', ctx.tags(), **args)
868 return showlist('tag', ctx.tags(), **args)
869
869
870 def showextras(**args):
870 def showextras(**args):
871 for key, value in sorted(ctx.extra().items()):
871 for key, value in sorted(ctx.extra().items()):
872 args = args.copy()
872 args = args.copy()
873 args.update(dict(key=key, value=value))
873 args.update(dict(key=key, value=value))
874 yield self.t('extra', **args)
874 yield self.t('extra', **args)
875
875
876 def showcopies(**args):
876 def showcopies(**args):
877 c = [{'name': x[0], 'source': x[1]} for x in copies]
877 c = [{'name': x[0], 'source': x[1]} for x in copies]
878 return showlist('file_copy', c, plural='file_copies', **args)
878 return showlist('file_copy', c, plural='file_copies', **args)
879
879
880 files = []
880 files = []
881 def getfiles():
881 def getfiles():
882 if not files:
882 if not files:
883 files[:] = self.repo.status(ctx.parents()[0].node(),
883 files[:] = self.repo.status(ctx.parents()[0].node(),
884 ctx.node())[:3]
884 ctx.node())[:3]
885 return files
885 return files
886 def showfiles(**args):
886 def showfiles(**args):
887 return showlist('file', ctx.files(), **args)
887 return showlist('file', ctx.files(), **args)
888 def showmods(**args):
888 def showmods(**args):
889 return showlist('file_mod', getfiles()[0], **args)
889 return showlist('file_mod', getfiles()[0], **args)
890 def showadds(**args):
890 def showadds(**args):
891 return showlist('file_add', getfiles()[1], **args)
891 return showlist('file_add', getfiles()[1], **args)
892 def showdels(**args):
892 def showdels(**args):
893 return showlist('file_del', getfiles()[2], **args)
893 return showlist('file_del', getfiles()[2], **args)
894 def showmanifest(**args):
894 def showmanifest(**args):
895 args = args.copy()
895 args = args.copy()
896 args.update(dict(rev=self.repo.manifest.rev(ctx.changeset()[0]),
896 args.update(dict(rev=self.repo.manifest.rev(ctx.changeset()[0]),
897 node=hex(ctx.changeset()[0])))
897 node=hex(ctx.changeset()[0])))
898 return self.t('manifest', **args)
898 return self.t('manifest', **args)
899
899
900 def showdiffstat(**args):
900 def showdiffstat(**args):
901 diff = patch.diff(self.repo, ctx.parents()[0].node(), ctx.node())
901 diff = patch.diff(self.repo, ctx.parents()[0].node(), ctx.node())
902 files, adds, removes = 0, 0, 0
902 files, adds, removes = 0, 0, 0
903 for i in patch.diffstatdata(util.iterlines(diff)):
903 for i in patch.diffstatdata(util.iterlines(diff)):
904 files += 1
904 files += 1
905 adds += i[1]
905 adds += i[1]
906 removes += i[2]
906 removes += i[2]
907 return '%s: +%s/-%s' % (files, adds, removes)
907 return '%s: +%s/-%s' % (files, adds, removes)
908
908
909 def showlatesttag(**args):
909 def showlatesttag(**args):
910 return self._latesttaginfo(ctx.rev())[2]
910 return self._latesttaginfo(ctx.rev())[2]
911 def showlatesttagdistance(**args):
911 def showlatesttagdistance(**args):
912 return self._latesttaginfo(ctx.rev())[1]
912 return self._latesttaginfo(ctx.rev())[1]
913
913
914 defprops = {
914 defprops = {
915 'author': ctx.user(),
915 'author': ctx.user(),
916 'branches': showbranches,
916 'branches': showbranches,
917 'date': ctx.date(),
917 'date': ctx.date(),
918 'desc': ctx.description().strip(),
918 'desc': ctx.description().strip(),
919 'file_adds': showadds,
919 'file_adds': showadds,
920 'file_dels': showdels,
920 'file_dels': showdels,
921 'file_mods': showmods,
921 'file_mods': showmods,
922 'files': showfiles,
922 'files': showfiles,
923 'file_copies': showcopies,
923 'file_copies': showcopies,
924 'manifest': showmanifest,
924 'manifest': showmanifest,
925 'node': ctx.hex(),
925 'node': ctx.hex(),
926 'parents': showparents,
926 'parents': showparents,
927 'rev': ctx.rev(),
927 'rev': ctx.rev(),
928 'tags': showtags,
928 'tags': showtags,
929 'extras': showextras,
929 'extras': showextras,
930 'diffstat': showdiffstat,
930 'diffstat': showdiffstat,
931 'latesttag': showlatesttag,
931 'latesttag': showlatesttag,
932 'latesttagdistance': showlatesttagdistance,
932 'latesttagdistance': showlatesttagdistance,
933 }
933 }
934 props = props.copy()
934 props = props.copy()
935 props.update(defprops)
935 props.update(defprops)
936
936
937 # find correct templates for current mode
937 # find correct templates for current mode
938
938
939 tmplmodes = [
939 tmplmodes = [
940 (True, None),
940 (True, None),
941 (self.ui.verbose, 'verbose'),
941 (self.ui.verbose, 'verbose'),
942 (self.ui.quiet, 'quiet'),
942 (self.ui.quiet, 'quiet'),
943 (self.ui.debugflag, 'debug'),
943 (self.ui.debugflag, 'debug'),
944 ]
944 ]
945
945
946 types = {'header': '', 'changeset': 'changeset'}
946 types = {'header': '', 'changeset': 'changeset'}
947 for mode, postfix in tmplmodes:
947 for mode, postfix in tmplmodes:
948 for type in types:
948 for type in types:
949 cur = postfix and ('%s_%s' % (type, postfix)) or type
949 cur = postfix and ('%s_%s' % (type, postfix)) or type
950 if mode and cur in self.t:
950 if mode and cur in self.t:
951 types[type] = cur
951 types[type] = cur
952
952
953 try:
953 try:
954
954
955 # write header
955 # write header
956 if types['header']:
956 if types['header']:
957 h = templater.stringify(self.t(types['header'], **props))
957 h = templater.stringify(self.t(types['header'], **props))
958 if self.buffered:
958 if self.buffered:
959 self.header[ctx.rev()] = h
959 self.header[ctx.rev()] = h
960 else:
960 else:
961 self.ui.write(h)
961 self.ui.write(h)
962
962
963 # write changeset metadata, then patch if requested
963 # write changeset metadata, then patch if requested
964 key = types['changeset']
964 key = types['changeset']
965 self.ui.write(templater.stringify(self.t(key, **props)))
965 self.ui.write(templater.stringify(self.t(key, **props)))
966 self.showpatch(ctx.node())
966 self.showpatch(ctx.node())
967
967
968 except KeyError, inst:
968 except KeyError, inst:
969 msg = _("%s: no key named '%s'")
969 msg = _("%s: no key named '%s'")
970 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
970 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
971 except SyntaxError, inst:
971 except SyntaxError, inst:
972 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
972 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
973
973
974 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
974 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
975 """show one changeset using template or regular display.
975 """show one changeset using template or regular display.
976
976
977 Display format will be the first non-empty hit of:
977 Display format will be the first non-empty hit of:
978 1. option 'template'
978 1. option 'template'
979 2. option 'style'
979 2. option 'style'
980 3. [ui] setting 'logtemplate'
980 3. [ui] setting 'logtemplate'
981 4. [ui] setting 'style'
981 4. [ui] setting 'style'
982 If all of these values are either the unset or the empty string,
982 If all of these values are either the unset or the empty string,
983 regular display via changeset_printer() is done.
983 regular display via changeset_printer() is done.
984 """
984 """
985 # options
985 # options
986 patch = False
986 patch = False
987 if opts.get('patch'):
987 if opts.get('patch'):
988 patch = matchfn or matchall(repo)
988 patch = matchfn or matchall(repo)
989
989
990 tmpl = opts.get('template')
990 tmpl = opts.get('template')
991 style = None
991 style = None
992 if tmpl:
992 if tmpl:
993 tmpl = templater.parsestring(tmpl, quoted=False)
993 tmpl = templater.parsestring(tmpl, quoted=False)
994 else:
994 else:
995 style = opts.get('style')
995 style = opts.get('style')
996
996
997 # ui settings
997 # ui settings
998 if not (tmpl or style):
998 if not (tmpl or style):
999 tmpl = ui.config('ui', 'logtemplate')
999 tmpl = ui.config('ui', 'logtemplate')
1000 if tmpl:
1000 if tmpl:
1001 tmpl = templater.parsestring(tmpl)
1001 tmpl = templater.parsestring(tmpl)
1002 else:
1002 else:
1003 style = ui.config('ui', 'style')
1003 style = ui.config('ui', 'style')
1004
1004
1005 if not (tmpl or style):
1005 if not (tmpl or style):
1006 return changeset_printer(ui, repo, patch, opts, buffered)
1006 return changeset_printer(ui, repo, patch, opts, buffered)
1007
1007
1008 mapfile = None
1008 mapfile = None
1009 if style and not tmpl:
1009 if style and not tmpl:
1010 mapfile = style
1010 mapfile = style
1011 if not os.path.split(mapfile)[0]:
1011 if not os.path.split(mapfile)[0]:
1012 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1012 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1013 or templater.templatepath(mapfile))
1013 or templater.templatepath(mapfile))
1014 if mapname: mapfile = mapname
1014 if mapname: mapfile = mapname
1015
1015
1016 try:
1016 try:
1017 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
1017 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
1018 except SyntaxError, inst:
1018 except SyntaxError, inst:
1019 raise util.Abort(inst.args[0])
1019 raise util.Abort(inst.args[0])
1020 if tmpl: t.use_template(tmpl)
1020 if tmpl: t.use_template(tmpl)
1021 return t
1021 return t
1022
1022
1023 def finddate(ui, repo, date):
1023 def finddate(ui, repo, date):
1024 """Find the tipmost changeset that matches the given date spec"""
1024 """Find the tipmost changeset that matches the given date spec"""
1025 df = util.matchdate(date)
1025 df = util.matchdate(date)
1026 m = matchall(repo)
1026 m = matchall(repo)
1027 results = {}
1027 results = {}
1028
1028
1029 def prep(ctx, fns):
1029 def prep(ctx, fns):
1030 d = ctx.date()
1030 d = ctx.date()
1031 if df(d[0]):
1031 if df(d[0]):
1032 results[rev] = d
1032 results[rev] = d
1033
1033
1034 for ctx in walkchangerevs(ui, repo, m, {'rev':None}, prep):
1034 for ctx in walkchangerevs(repo, m, {'rev':None}, prep):
1035 rev = ctx.rev()
1035 rev = ctx.rev()
1036 if rev in results:
1036 if rev in results:
1037 ui.status(_("Found revision %s from %s\n") %
1037 ui.status(_("Found revision %s from %s\n") %
1038 (rev, util.datestr(results[rev])))
1038 (rev, util.datestr(results[rev])))
1039 return str(rev)
1039 return str(rev)
1040
1040
1041 raise util.Abort(_("revision matching date not found"))
1041 raise util.Abort(_("revision matching date not found"))
1042
1042
1043 def walkchangerevs(ui, repo, match, opts, prepare):
1043 def walkchangerevs(repo, match, opts, prepare):
1044 '''Iterate over files and the revs in which they changed.
1044 '''Iterate over files and the revs in which they changed.
1045
1045
1046 Callers most commonly need to iterate backwards over the history
1046 Callers most commonly need to iterate backwards over the history
1047 in which they are interested. Doing so has awful (quadratic-looking)
1047 in which they are interested. Doing so has awful (quadratic-looking)
1048 performance, so we use iterators in a "windowed" way.
1048 performance, so we use iterators in a "windowed" way.
1049
1049
1050 We walk a window of revisions in the desired order. Within the
1050 We walk a window of revisions in the desired order. Within the
1051 window, we first walk forwards to gather data, then in the desired
1051 window, we first walk forwards to gather data, then in the desired
1052 order (usually backwards) to display it.
1052 order (usually backwards) to display it.
1053
1053
1054 This function returns an iterator yielding contexts. Before
1054 This function returns an iterator yielding contexts. Before
1055 yielding each context, the iterator will first call the prepare
1055 yielding each context, the iterator will first call the prepare
1056 function on each context in the window in forward order.'''
1056 function on each context in the window in forward order.'''
1057
1057
1058 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1058 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1059 if start < end:
1059 if start < end:
1060 while start < end:
1060 while start < end:
1061 yield start, min(windowsize, end-start)
1061 yield start, min(windowsize, end-start)
1062 start += windowsize
1062 start += windowsize
1063 if windowsize < sizelimit:
1063 if windowsize < sizelimit:
1064 windowsize *= 2
1064 windowsize *= 2
1065 else:
1065 else:
1066 while start > end:
1066 while start > end:
1067 yield start, min(windowsize, start-end-1)
1067 yield start, min(windowsize, start-end-1)
1068 start -= windowsize
1068 start -= windowsize
1069 if windowsize < sizelimit:
1069 if windowsize < sizelimit:
1070 windowsize *= 2
1070 windowsize *= 2
1071
1071
1072 follow = opts.get('follow') or opts.get('follow_first')
1072 follow = opts.get('follow') or opts.get('follow_first')
1073
1073
1074 if not len(repo):
1074 if not len(repo):
1075 return []
1075 return []
1076
1076
1077 if follow:
1077 if follow:
1078 defrange = '%s:0' % repo['.'].rev()
1078 defrange = '%s:0' % repo['.'].rev()
1079 else:
1079 else:
1080 defrange = '-1:0'
1080 defrange = '-1:0'
1081 revs = revrange(repo, opts['rev'] or [defrange])
1081 revs = revrange(repo, opts['rev'] or [defrange])
1082 wanted = set()
1082 wanted = set()
1083 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1083 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1084 fncache = {}
1084 fncache = {}
1085 change = util.cachefunc(repo.changectx)
1085 change = util.cachefunc(repo.changectx)
1086
1086
1087 if not slowpath and not match.files():
1087 if not slowpath and not match.files():
1088 # No files, no patterns. Display all revs.
1088 # No files, no patterns. Display all revs.
1089 wanted = set(revs)
1089 wanted = set(revs)
1090 copies = []
1090 copies = []
1091
1091 if not slowpath:
1092 if not slowpath:
1092 # Only files, no patterns. Check the history of each file.
1093 # Only files, no patterns. Check the history of each file.
1093 def filerevgen(filelog, node):
1094 def filerevgen(filelog, node):
1094 cl_count = len(repo)
1095 cl_count = len(repo)
1095 if node is None:
1096 if node is None:
1096 last = len(filelog) - 1
1097 last = len(filelog) - 1
1097 else:
1098 else:
1098 last = filelog.rev(node)
1099 last = filelog.rev(node)
1099 for i, window in increasing_windows(last, nullrev):
1100 for i, window in increasing_windows(last, nullrev):
1100 revs = []
1101 revs = []
1101 for j in xrange(i - window, i + 1):
1102 for j in xrange(i - window, i + 1):
1102 n = filelog.node(j)
1103 n = filelog.node(j)
1103 revs.append((filelog.linkrev(j),
1104 revs.append((filelog.linkrev(j),
1104 follow and filelog.renamed(n)))
1105 follow and filelog.renamed(n)))
1105 for rev in reversed(revs):
1106 for rev in reversed(revs):
1106 # only yield rev for which we have the changelog, it can
1107 # only yield rev for which we have the changelog, it can
1107 # happen while doing "hg log" during a pull or commit
1108 # happen while doing "hg log" during a pull or commit
1108 if rev[0] < cl_count:
1109 if rev[0] < cl_count:
1109 yield rev
1110 yield rev
1110 def iterfiles():
1111 def iterfiles():
1111 for filename in match.files():
1112 for filename in match.files():
1112 yield filename, None
1113 yield filename, None
1113 for filename_node in copies:
1114 for filename_node in copies:
1114 yield filename_node
1115 yield filename_node
1115 minrev, maxrev = min(revs), max(revs)
1116 minrev, maxrev = min(revs), max(revs)
1116 for file_, node in iterfiles():
1117 for file_, node in iterfiles():
1117 filelog = repo.file(file_)
1118 filelog = repo.file(file_)
1118 if not len(filelog):
1119 if not len(filelog):
1119 if node is None:
1120 if node is None:
1120 # A zero count may be a directory or deleted file, so
1121 # A zero count may be a directory or deleted file, so
1121 # try to find matching entries on the slow path.
1122 # try to find matching entries on the slow path.
1122 if follow:
1123 if follow:
1123 raise util.Abort(_('cannot follow nonexistent file: "%s"') % file_)
1124 raise util.Abort(_('cannot follow nonexistent file: "%s"') % file_)
1124 slowpath = True
1125 slowpath = True
1125 break
1126 break
1126 else:
1127 else:
1127 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1128 % (file_, short(node)))
1129 continue
1128 continue
1130 for rev, copied in filerevgen(filelog, node):
1129 for rev, copied in filerevgen(filelog, node):
1131 if rev <= maxrev:
1130 if rev <= maxrev:
1132 if rev < minrev:
1131 if rev < minrev:
1133 break
1132 break
1134 fncache.setdefault(rev, [])
1133 fncache.setdefault(rev, [])
1135 fncache[rev].append(file_)
1134 fncache[rev].append(file_)
1136 wanted.add(rev)
1135 wanted.add(rev)
1137 if follow and copied:
1136 if follow and copied:
1138 copies.append(copied)
1137 copies.append(copied)
1139 if slowpath:
1138 if slowpath:
1140 if follow:
1139 if follow:
1141 raise util.Abort(_('can only follow copies/renames for explicit '
1140 raise util.Abort(_('can only follow copies/renames for explicit '
1142 'filenames'))
1141 'filenames'))
1143
1142
1144 # The slow path checks files modified in every changeset.
1143 # The slow path checks files modified in every changeset.
1145 def changerevgen():
1144 def changerevgen():
1146 for i, window in increasing_windows(len(repo) - 1, nullrev):
1145 for i, window in increasing_windows(len(repo) - 1, nullrev):
1147 for j in xrange(i - window, i + 1):
1146 for j in xrange(i - window, i + 1):
1148 yield change(j)
1147 yield change(j)
1149
1148
1150 for ctx in changerevgen():
1149 for ctx in changerevgen():
1151 matches = filter(match, ctx.files())
1150 matches = filter(match, ctx.files())
1152 if matches:
1151 if matches:
1153 fncache[ctx.rev()] = matches
1152 fncache[ctx.rev()] = matches
1154 wanted.add(ctx.rev())
1153 wanted.add(ctx.rev())
1155
1154
1156 class followfilter(object):
1155 class followfilter(object):
1157 def __init__(self, onlyfirst=False):
1156 def __init__(self, onlyfirst=False):
1158 self.startrev = nullrev
1157 self.startrev = nullrev
1159 self.roots = []
1158 self.roots = []
1160 self.onlyfirst = onlyfirst
1159 self.onlyfirst = onlyfirst
1161
1160
1162 def match(self, rev):
1161 def match(self, rev):
1163 def realparents(rev):
1162 def realparents(rev):
1164 if self.onlyfirst:
1163 if self.onlyfirst:
1165 return repo.changelog.parentrevs(rev)[0:1]
1164 return repo.changelog.parentrevs(rev)[0:1]
1166 else:
1165 else:
1167 return filter(lambda x: x != nullrev,
1166 return filter(lambda x: x != nullrev,
1168 repo.changelog.parentrevs(rev))
1167 repo.changelog.parentrevs(rev))
1169
1168
1170 if self.startrev == nullrev:
1169 if self.startrev == nullrev:
1171 self.startrev = rev
1170 self.startrev = rev
1172 return True
1171 return True
1173
1172
1174 if rev > self.startrev:
1173 if rev > self.startrev:
1175 # forward: all descendants
1174 # forward: all descendants
1176 if not self.roots:
1175 if not self.roots:
1177 self.roots.append(self.startrev)
1176 self.roots.append(self.startrev)
1178 for parent in realparents(rev):
1177 for parent in realparents(rev):
1179 if parent in self.roots:
1178 if parent in self.roots:
1180 self.roots.append(rev)
1179 self.roots.append(rev)
1181 return True
1180 return True
1182 else:
1181 else:
1183 # backwards: all parents
1182 # backwards: all parents
1184 if not self.roots:
1183 if not self.roots:
1185 self.roots.extend(realparents(self.startrev))
1184 self.roots.extend(realparents(self.startrev))
1186 if rev in self.roots:
1185 if rev in self.roots:
1187 self.roots.remove(rev)
1186 self.roots.remove(rev)
1188 self.roots.extend(realparents(rev))
1187 self.roots.extend(realparents(rev))
1189 return True
1188 return True
1190
1189
1191 return False
1190 return False
1192
1191
1193 # it might be worthwhile to do this in the iterator if the rev range
1192 # it might be worthwhile to do this in the iterator if the rev range
1194 # is descending and the prune args are all within that range
1193 # is descending and the prune args are all within that range
1195 for rev in opts.get('prune', ()):
1194 for rev in opts.get('prune', ()):
1196 rev = repo.changelog.rev(repo.lookup(rev))
1195 rev = repo.changelog.rev(repo.lookup(rev))
1197 ff = followfilter()
1196 ff = followfilter()
1198 stop = min(revs[0], revs[-1])
1197 stop = min(revs[0], revs[-1])
1199 for x in xrange(rev, stop-1, -1):
1198 for x in xrange(rev, stop-1, -1):
1200 if ff.match(x):
1199 if ff.match(x):
1201 wanted.discard(x)
1200 wanted.discard(x)
1202
1201
1203 def iterate():
1202 def iterate():
1204 if follow and not match.files():
1203 if follow and not match.files():
1205 ff = followfilter(onlyfirst=opts.get('follow_first'))
1204 ff = followfilter(onlyfirst=opts.get('follow_first'))
1206 def want(rev):
1205 def want(rev):
1207 return ff.match(rev) and rev in wanted
1206 return ff.match(rev) and rev in wanted
1208 else:
1207 else:
1209 def want(rev):
1208 def want(rev):
1210 return rev in wanted
1209 return rev in wanted
1211
1210
1212 for i, window in increasing_windows(0, len(revs)):
1211 for i, window in increasing_windows(0, len(revs)):
1213 change = util.cachefunc(repo.changectx)
1212 change = util.cachefunc(repo.changectx)
1214 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1213 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1215 for rev in sorted(nrevs):
1214 for rev in sorted(nrevs):
1216 fns = fncache.get(rev)
1215 fns = fncache.get(rev)
1217 ctx = change(rev)
1216 ctx = change(rev)
1218 if not fns:
1217 if not fns:
1219 def fns_generator():
1218 def fns_generator():
1220 for f in ctx.files():
1219 for f in ctx.files():
1221 if match(f):
1220 if match(f):
1222 yield f
1221 yield f
1223 fns = fns_generator()
1222 fns = fns_generator()
1224 prepare(ctx, fns)
1223 prepare(ctx, fns)
1225 for rev in nrevs:
1224 for rev in nrevs:
1226 yield change(rev)
1225 yield change(rev)
1227 return iterate()
1226 return iterate()
1228
1227
1229 def commit(ui, repo, commitfunc, pats, opts):
1228 def commit(ui, repo, commitfunc, pats, opts):
1230 '''commit the specified files or all outstanding changes'''
1229 '''commit the specified files or all outstanding changes'''
1231 date = opts.get('date')
1230 date = opts.get('date')
1232 if date:
1231 if date:
1233 opts['date'] = util.parsedate(date)
1232 opts['date'] = util.parsedate(date)
1234 message = logmessage(opts)
1233 message = logmessage(opts)
1235
1234
1236 # extract addremove carefully -- this function can be called from a command
1235 # extract addremove carefully -- this function can be called from a command
1237 # that doesn't support addremove
1236 # that doesn't support addremove
1238 if opts.get('addremove'):
1237 if opts.get('addremove'):
1239 addremove(repo, pats, opts)
1238 addremove(repo, pats, opts)
1240
1239
1241 return commitfunc(ui, repo, message, match(repo, pats, opts), opts)
1240 return commitfunc(ui, repo, message, match(repo, pats, opts), opts)
1242
1241
1243 def commiteditor(repo, ctx, subs):
1242 def commiteditor(repo, ctx, subs):
1244 if ctx.description():
1243 if ctx.description():
1245 return ctx.description()
1244 return ctx.description()
1246 return commitforceeditor(repo, ctx, subs)
1245 return commitforceeditor(repo, ctx, subs)
1247
1246
1248 def commitforceeditor(repo, ctx, subs):
1247 def commitforceeditor(repo, ctx, subs):
1249 edittext = []
1248 edittext = []
1250 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
1249 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
1251 if ctx.description():
1250 if ctx.description():
1252 edittext.append(ctx.description())
1251 edittext.append(ctx.description())
1253 edittext.append("")
1252 edittext.append("")
1254 edittext.append("") # Empty line between message and comments.
1253 edittext.append("") # Empty line between message and comments.
1255 edittext.append(_("HG: Enter commit message."
1254 edittext.append(_("HG: Enter commit message."
1256 " Lines beginning with 'HG:' are removed."))
1255 " Lines beginning with 'HG:' are removed."))
1257 edittext.append(_("HG: Leave message empty to abort commit."))
1256 edittext.append(_("HG: Leave message empty to abort commit."))
1258 edittext.append("HG: --")
1257 edittext.append("HG: --")
1259 edittext.append(_("HG: user: %s") % ctx.user())
1258 edittext.append(_("HG: user: %s") % ctx.user())
1260 if ctx.p2():
1259 if ctx.p2():
1261 edittext.append(_("HG: branch merge"))
1260 edittext.append(_("HG: branch merge"))
1262 if ctx.branch():
1261 if ctx.branch():
1263 edittext.append(_("HG: branch '%s'")
1262 edittext.append(_("HG: branch '%s'")
1264 % encoding.tolocal(ctx.branch()))
1263 % encoding.tolocal(ctx.branch()))
1265 edittext.extend([_("HG: subrepo %s") % s for s in subs])
1264 edittext.extend([_("HG: subrepo %s") % s for s in subs])
1266 edittext.extend([_("HG: added %s") % f for f in added])
1265 edittext.extend([_("HG: added %s") % f for f in added])
1267 edittext.extend([_("HG: changed %s") % f for f in modified])
1266 edittext.extend([_("HG: changed %s") % f for f in modified])
1268 edittext.extend([_("HG: removed %s") % f for f in removed])
1267 edittext.extend([_("HG: removed %s") % f for f in removed])
1269 if not added and not modified and not removed:
1268 if not added and not modified and not removed:
1270 edittext.append(_("HG: no files changed"))
1269 edittext.append(_("HG: no files changed"))
1271 edittext.append("")
1270 edittext.append("")
1272 # run editor in the repository root
1271 # run editor in the repository root
1273 olddir = os.getcwd()
1272 olddir = os.getcwd()
1274 os.chdir(repo.root)
1273 os.chdir(repo.root)
1275 text = repo.ui.edit("\n".join(edittext), ctx.user())
1274 text = repo.ui.edit("\n".join(edittext), ctx.user())
1276 text = re.sub("(?m)^HG:.*\n", "", text)
1275 text = re.sub("(?m)^HG:.*\n", "", text)
1277 os.chdir(olddir)
1276 os.chdir(olddir)
1278
1277
1279 if not text.strip():
1278 if not text.strip():
1280 raise util.Abort(_("empty commit message"))
1279 raise util.Abort(_("empty commit message"))
1281
1280
1282 return text
1281 return text
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now