##// END OF EJS Templates
Prefer i in d over d.has_key(i)
Christian Ebert -
r5915:d0576d06 default
parent child Browse files
Show More
@@ -1,204 +1,204 b''
1 # churn.py - create a graph showing who changed the most lines
1 # churn.py - create a graph showing who changed the most lines
2 #
2 #
3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
3 # Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 #
8 #
9 # Aliases map file format is simple one alias per line in the following
9 # Aliases map file format is simple one alias per line in the following
10 # format:
10 # format:
11 #
11 #
12 # <alias email> <actual email>
12 # <alias email> <actual email>
13
13
14 from mercurial.i18n import gettext as _
14 from mercurial.i18n import gettext as _
15 from mercurial import hg, mdiff, cmdutil, ui, util, templater, node
15 from mercurial import hg, mdiff, cmdutil, ui, util, templater, node
16 import os, sys
16 import os, sys
17
17
18 def get_tty_width():
18 def get_tty_width():
19 if 'COLUMNS' in os.environ:
19 if 'COLUMNS' in os.environ:
20 try:
20 try:
21 return int(os.environ['COLUMNS'])
21 return int(os.environ['COLUMNS'])
22 except ValueError:
22 except ValueError:
23 pass
23 pass
24 try:
24 try:
25 import termios, array, fcntl
25 import termios, array, fcntl
26 for dev in (sys.stdout, sys.stdin):
26 for dev in (sys.stdout, sys.stdin):
27 try:
27 try:
28 fd = dev.fileno()
28 fd = dev.fileno()
29 if not os.isatty(fd):
29 if not os.isatty(fd):
30 continue
30 continue
31 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
31 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
32 return array.array('h', arri)[1]
32 return array.array('h', arri)[1]
33 except ValueError:
33 except ValueError:
34 pass
34 pass
35 except ImportError:
35 except ImportError:
36 pass
36 pass
37 return 80
37 return 80
38
38
39 def __gather(ui, repo, node1, node2):
39 def __gather(ui, repo, node1, node2):
40 def dirtywork(f, mmap1, mmap2):
40 def dirtywork(f, mmap1, mmap2):
41 lines = 0
41 lines = 0
42
42
43 to = mmap1 and repo.file(f).read(mmap1[f]) or None
43 to = mmap1 and repo.file(f).read(mmap1[f]) or None
44 tn = mmap2 and repo.file(f).read(mmap2[f]) or None
44 tn = mmap2 and repo.file(f).read(mmap2[f]) or None
45
45
46 diff = mdiff.unidiff(to, "", tn, "", f, f).split("\n")
46 diff = mdiff.unidiff(to, "", tn, "", f, f).split("\n")
47
47
48 for line in diff:
48 for line in diff:
49 if not line:
49 if not line:
50 continue # skip EOF
50 continue # skip EOF
51 if line.startswith(" "):
51 if line.startswith(" "):
52 continue # context line
52 continue # context line
53 if line.startswith("--- ") or line.startswith("+++ "):
53 if line.startswith("--- ") or line.startswith("+++ "):
54 continue # begining of diff
54 continue # begining of diff
55 if line.startswith("@@ "):
55 if line.startswith("@@ "):
56 continue # info line
56 continue # info line
57
57
58 # changed lines
58 # changed lines
59 lines += 1
59 lines += 1
60
60
61 return lines
61 return lines
62
62
63 ##
63 ##
64
64
65 lines = 0
65 lines = 0
66
66
67 changes = repo.status(node1, node2, None, util.always)[:5]
67 changes = repo.status(node1, node2, None, util.always)[:5]
68
68
69 modified, added, removed, deleted, unknown = changes
69 modified, added, removed, deleted, unknown = changes
70
70
71 who = repo.changelog.read(node2)[1]
71 who = repo.changelog.read(node2)[1]
72 who = templater.email(who) # get the email of the person
72 who = templater.email(who) # get the email of the person
73
73
74 mmap1 = repo.manifest.read(repo.changelog.read(node1)[0])
74 mmap1 = repo.manifest.read(repo.changelog.read(node1)[0])
75 mmap2 = repo.manifest.read(repo.changelog.read(node2)[0])
75 mmap2 = repo.manifest.read(repo.changelog.read(node2)[0])
76 for f in modified:
76 for f in modified:
77 lines += dirtywork(f, mmap1, mmap2)
77 lines += dirtywork(f, mmap1, mmap2)
78
78
79 for f in added:
79 for f in added:
80 lines += dirtywork(f, None, mmap2)
80 lines += dirtywork(f, None, mmap2)
81
81
82 for f in removed:
82 for f in removed:
83 lines += dirtywork(f, mmap1, None)
83 lines += dirtywork(f, mmap1, None)
84
84
85 for f in deleted:
85 for f in deleted:
86 lines += dirtywork(f, mmap1, mmap2)
86 lines += dirtywork(f, mmap1, mmap2)
87
87
88 for f in unknown:
88 for f in unknown:
89 lines += dirtywork(f, mmap1, mmap2)
89 lines += dirtywork(f, mmap1, mmap2)
90
90
91 return (who, lines)
91 return (who, lines)
92
92
93 def gather_stats(ui, repo, amap, revs=None, progress=False):
93 def gather_stats(ui, repo, amap, revs=None, progress=False):
94 stats = {}
94 stats = {}
95
95
96 cl = repo.changelog
96 cl = repo.changelog
97
97
98 if not revs:
98 if not revs:
99 revs = range(0, cl.count())
99 revs = range(0, cl.count())
100
100
101 nr_revs = len(revs)
101 nr_revs = len(revs)
102 cur_rev = 0
102 cur_rev = 0
103
103
104 for rev in revs:
104 for rev in revs:
105 cur_rev += 1 # next revision
105 cur_rev += 1 # next revision
106
106
107 node2 = cl.node(rev)
107 node2 = cl.node(rev)
108 node1 = cl.parents(node2)[0]
108 node1 = cl.parents(node2)[0]
109
109
110 if cl.parents(node2)[1] != node.nullid:
110 if cl.parents(node2)[1] != node.nullid:
111 ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,))
111 ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,))
112 continue
112 continue
113
113
114 who, lines = __gather(ui, repo, node1, node2)
114 who, lines = __gather(ui, repo, node1, node2)
115
115
116 # remap the owner if possible
116 # remap the owner if possible
117 if amap.has_key(who):
117 if who in amap:
118 ui.note("using '%s' alias for '%s'\n" % (amap[who], who))
118 ui.note("using '%s' alias for '%s'\n" % (amap[who], who))
119 who = amap[who]
119 who = amap[who]
120
120
121 if not stats.has_key(who):
121 if not who in stats:
122 stats[who] = 0
122 stats[who] = 0
123 stats[who] += lines
123 stats[who] += lines
124
124
125 ui.note("rev %d: %d lines by %s\n" % (rev, lines, who))
125 ui.note("rev %d: %d lines by %s\n" % (rev, lines, who))
126
126
127 if progress:
127 if progress:
128 nr_revs = max(nr_revs, 1)
128 nr_revs = max(nr_revs, 1)
129 if int(100.0*(cur_rev - 1)/nr_revs) < int(100.0*cur_rev/nr_revs):
129 if int(100.0*(cur_rev - 1)/nr_revs) < int(100.0*cur_rev/nr_revs):
130 ui.write("%d%%.." % (int(100.0*cur_rev/nr_revs),))
130 ui.write("%d%%.." % (int(100.0*cur_rev/nr_revs),))
131 sys.stdout.flush()
131 sys.stdout.flush()
132
132
133 if progress:
133 if progress:
134 ui.write("done\n")
134 ui.write("done\n")
135 sys.stdout.flush()
135 sys.stdout.flush()
136
136
137 return stats
137 return stats
138
138
139 def churn(ui, repo, **opts):
139 def churn(ui, repo, **opts):
140 "Graphs the number of lines changed"
140 "Graphs the number of lines changed"
141
141
142 def pad(s, l):
142 def pad(s, l):
143 if len(s) < l:
143 if len(s) < l:
144 return s + " " * (l-len(s))
144 return s + " " * (l-len(s))
145 return s[0:l]
145 return s[0:l]
146
146
147 def graph(n, maximum, width, char):
147 def graph(n, maximum, width, char):
148 maximum = max(1, maximum)
148 maximum = max(1, maximum)
149 n = int(n * width / float(maximum))
149 n = int(n * width / float(maximum))
150
150
151 return char * (n)
151 return char * (n)
152
152
153 def get_aliases(f):
153 def get_aliases(f):
154 aliases = {}
154 aliases = {}
155
155
156 for l in f.readlines():
156 for l in f.readlines():
157 l = l.strip()
157 l = l.strip()
158 alias, actual = l.split(" ")
158 alias, actual = l.split(" ")
159 aliases[alias] = actual
159 aliases[alias] = actual
160
160
161 return aliases
161 return aliases
162
162
163 amap = {}
163 amap = {}
164 aliases = opts.get('aliases')
164 aliases = opts.get('aliases')
165 if aliases:
165 if aliases:
166 try:
166 try:
167 f = open(aliases,"r")
167 f = open(aliases,"r")
168 except OSError, e:
168 except OSError, e:
169 print "Error: " + e
169 print "Error: " + e
170 return
170 return
171
171
172 amap = get_aliases(f)
172 amap = get_aliases(f)
173 f.close()
173 f.close()
174
174
175 revs = [int(r) for r in cmdutil.revrange(repo, opts['rev'])]
175 revs = [int(r) for r in cmdutil.revrange(repo, opts['rev'])]
176 revs.sort()
176 revs.sort()
177 stats = gather_stats(ui, repo, amap, revs, opts.get('progress'))
177 stats = gather_stats(ui, repo, amap, revs, opts.get('progress'))
178
178
179 # make a list of tuples (name, lines) and sort it in descending order
179 # make a list of tuples (name, lines) and sort it in descending order
180 ordered = stats.items()
180 ordered = stats.items()
181 ordered.sort(lambda x, y: cmp(y[1], x[1]))
181 ordered.sort(lambda x, y: cmp(y[1], x[1]))
182
182
183 if not ordered:
183 if not ordered:
184 return
184 return
185 maximum = ordered[0][1]
185 maximum = ordered[0][1]
186
186
187 width = get_tty_width()
187 width = get_tty_width()
188 ui.note(_("assuming %i character terminal\n") % width)
188 ui.note(_("assuming %i character terminal\n") % width)
189 width -= 1
189 width -= 1
190
190
191 for i in ordered:
191 for i in ordered:
192 person = i[0]
192 person = i[0]
193 lines = i[1]
193 lines = i[1]
194 print "%s %6d %s" % (pad(person, 20), lines,
194 print "%s %6d %s" % (pad(person, 20), lines,
195 graph(lines, maximum, width - 20 - 1 - 6 - 2 - 2, '*'))
195 graph(lines, maximum, width - 20 - 1 - 6 - 2 - 2, '*'))
196
196
197 cmdtable = {
197 cmdtable = {
198 "churn":
198 "churn":
199 (churn,
199 (churn,
200 [('r', 'rev', [], _('limit statistics to the specified revisions')),
200 [('r', 'rev', [], _('limit statistics to the specified revisions')),
201 ('', 'aliases', '', _('file with email aliases')),
201 ('', 'aliases', '', _('file with email aliases')),
202 ('', 'progress', None, _('show progress'))],
202 ('', 'progress', None, _('show progress'))],
203 'hg churn [-r revision range] [-a file] [--progress]'),
203 'hg churn [-r revision range] [-a file] [--progress]'),
204 }
204 }
@@ -1,521 +1,521 b''
1 # keyword.py - $Keyword$ expansion for Mercurial
1 # keyword.py - $Keyword$ expansion for Mercurial
2 #
2 #
3 # Copyright 2007, 2008 Christian Ebert <blacktrash@gmx.net>
3 # Copyright 2007, 2008 Christian Ebert <blacktrash@gmx.net>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 # $Id$
8 # $Id$
9 #
9 #
10 # Keyword expansion hack against the grain of a DSCM
10 # Keyword expansion hack against the grain of a DSCM
11 #
11 #
12 # There are many good reasons why this is not needed in a distributed
12 # There are many good reasons why this is not needed in a distributed
13 # SCM, still it may be useful in very small projects based on single
13 # SCM, still it may be useful in very small projects based on single
14 # files (like LaTeX packages), that are mostly addressed to an audience
14 # files (like LaTeX packages), that are mostly addressed to an audience
15 # not running a version control system.
15 # not running a version control system.
16 #
16 #
17 # For in-depth discussion refer to
17 # For in-depth discussion refer to
18 # <http://www.selenic.com/mercurial/wiki/index.cgi/KeywordPlan>.
18 # <http://www.selenic.com/mercurial/wiki/index.cgi/KeywordPlan>.
19 #
19 #
20 # Keyword expansion is based on Mercurial's changeset template mappings.
20 # Keyword expansion is based on Mercurial's changeset template mappings.
21 #
21 #
22 # Binary files are not touched.
22 # Binary files are not touched.
23 #
23 #
24 # Setup in hgrc:
24 # Setup in hgrc:
25 #
25 #
26 # [extensions]
26 # [extensions]
27 # # enable extension
27 # # enable extension
28 # hgext.keyword =
28 # hgext.keyword =
29 #
29 #
30 # Files to act upon/ignore are specified in the [keyword] section.
30 # Files to act upon/ignore are specified in the [keyword] section.
31 # Customized keyword template mappings in the [keywordmaps] section.
31 # Customized keyword template mappings in the [keywordmaps] section.
32 #
32 #
33 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
33 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
34
34
35 '''keyword expansion in local repositories
35 '''keyword expansion in local repositories
36
36
37 This extension expands RCS/CVS-like or self-customized $Keywords$
37 This extension expands RCS/CVS-like or self-customized $Keywords$
38 in tracked text files selected by your configuration.
38 in tracked text files selected by your configuration.
39
39
40 Keywords are only expanded in local repositories and not stored in
40 Keywords are only expanded in local repositories and not stored in
41 the change history. The mechanism can be regarded as a convenience
41 the change history. The mechanism can be regarded as a convenience
42 for the current user or for archive distribution.
42 for the current user or for archive distribution.
43
43
44 Configuration is done in the [keyword] and [keywordmaps] sections
44 Configuration is done in the [keyword] and [keywordmaps] sections
45 of hgrc files.
45 of hgrc files.
46
46
47 Example:
47 Example:
48
48
49 [keyword]
49 [keyword]
50 # expand keywords in every python file except those matching "x*"
50 # expand keywords in every python file except those matching "x*"
51 **.py =
51 **.py =
52 x* = ignore
52 x* = ignore
53
53
54 Note: the more specific you are in your filename patterns
54 Note: the more specific you are in your filename patterns
55 the less you lose speed in huge repos.
55 the less you lose speed in huge repos.
56
56
57 For [keywordmaps] template mapping and expansion demonstration and
57 For [keywordmaps] template mapping and expansion demonstration and
58 control run "hg kwdemo".
58 control run "hg kwdemo".
59
59
60 An additional date template filter {date|utcdate} is provided.
60 An additional date template filter {date|utcdate} is provided.
61
61
62 The default template mappings (view with "hg kwdemo -d") can be replaced
62 The default template mappings (view with "hg kwdemo -d") can be replaced
63 with customized keywords and templates.
63 with customized keywords and templates.
64 Again, run "hg kwdemo" to control the results of your config changes.
64 Again, run "hg kwdemo" to control the results of your config changes.
65
65
66 Before changing/disabling active keywords, run "hg kwshrink" to avoid
66 Before changing/disabling active keywords, run "hg kwshrink" to avoid
67 the risk of inadvertedly storing expanded keywords in the change history.
67 the risk of inadvertedly storing expanded keywords in the change history.
68
68
69 To force expansion after enabling it, or a configuration change, run
69 To force expansion after enabling it, or a configuration change, run
70 "hg kwexpand".
70 "hg kwexpand".
71
71
72 Also, when committing with the record extension or using mq's qrecord, be aware
72 Also, when committing with the record extension or using mq's qrecord, be aware
73 that keywords cannot be updated. Again, run "hg kwexpand" on the files in
73 that keywords cannot be updated. Again, run "hg kwexpand" on the files in
74 question to update keyword expansions after all changes have been checked in.
74 question to update keyword expansions after all changes have been checked in.
75
75
76 Expansions spanning more than one line and incremental expansions,
76 Expansions spanning more than one line and incremental expansions,
77 like CVS' $Log$, are not supported. A keyword template map
77 like CVS' $Log$, are not supported. A keyword template map
78 "Log = {desc}" expands to the first line of the changeset description.
78 "Log = {desc}" expands to the first line of the changeset description.
79 '''
79 '''
80
80
81 from mercurial import commands, cmdutil, context, dispatch, filelog
81 from mercurial import commands, cmdutil, context, dispatch, filelog
82 from mercurial import patch, localrepo, revlog, templater, util
82 from mercurial import patch, localrepo, revlog, templater, util
83 from mercurial.node import *
83 from mercurial.node import *
84 from mercurial.i18n import _
84 from mercurial.i18n import _
85 import re, shutil, sys, tempfile, time
85 import re, shutil, sys, tempfile, time
86
86
87 commands.optionalrepo += ' kwdemo'
87 commands.optionalrepo += ' kwdemo'
88
88
89 def utcdate(date):
89 def utcdate(date):
90 '''Returns hgdate in cvs-like UTC format.'''
90 '''Returns hgdate in cvs-like UTC format.'''
91 return time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(date[0]))
91 return time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(date[0]))
92
92
93 def _kwrestrict(cmd):
93 def _kwrestrict(cmd):
94 '''Returns True if cmd should trigger restricted expansion.
94 '''Returns True if cmd should trigger restricted expansion.
95 Keywords will only expanded when writing to working dir.
95 Keywords will only expanded when writing to working dir.
96 Crucial for mq as expanded keywords should not make it into patches.'''
96 Crucial for mq as expanded keywords should not make it into patches.'''
97 return cmd in ('diff1',
97 return cmd in ('diff1',
98 'qimport', 'qnew', 'qpush', 'qrefresh', 'record', 'qrecord')
98 'qimport', 'qnew', 'qpush', 'qrefresh', 'record', 'qrecord')
99
99
100
100
101 _kwtemplater = None
101 _kwtemplater = None
102
102
103 class kwtemplater(object):
103 class kwtemplater(object):
104 '''
104 '''
105 Sets up keyword templates, corresponding keyword regex, and
105 Sets up keyword templates, corresponding keyword regex, and
106 provides keyword substitution functions.
106 provides keyword substitution functions.
107 '''
107 '''
108 templates = {
108 templates = {
109 'Revision': '{node|short}',
109 'Revision': '{node|short}',
110 'Author': '{author|user}',
110 'Author': '{author|user}',
111 'Date': '{date|utcdate}',
111 'Date': '{date|utcdate}',
112 'RCSFile': '{file|basename},v',
112 'RCSFile': '{file|basename},v',
113 'Source': '{root}/{file},v',
113 'Source': '{root}/{file},v',
114 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
114 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
115 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
115 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
116 }
116 }
117
117
118 def __init__(self, ui, repo, inc, exc, hgcmd):
118 def __init__(self, ui, repo, inc, exc, hgcmd):
119 self.ui = ui
119 self.ui = ui
120 self.repo = repo
120 self.repo = repo
121 self.matcher = util.matcher(repo.root, inc=inc, exc=exc)[1]
121 self.matcher = util.matcher(repo.root, inc=inc, exc=exc)[1]
122 self.hgcmd = hgcmd
122 self.hgcmd = hgcmd
123 self.commitnode = None
123 self.commitnode = None
124 self.path = ''
124 self.path = ''
125
125
126 kwmaps = self.ui.configitems('keywordmaps')
126 kwmaps = self.ui.configitems('keywordmaps')
127 if kwmaps: # override default templates
127 if kwmaps: # override default templates
128 kwmaps = [(k, templater.parsestring(v, quoted=False))
128 kwmaps = [(k, templater.parsestring(v, quoted=False))
129 for (k, v) in kwmaps]
129 for (k, v) in kwmaps]
130 self.templates = dict(kwmaps)
130 self.templates = dict(kwmaps)
131 escaped = map(re.escape, self.templates.keys())
131 escaped = map(re.escape, self.templates.keys())
132 kwpat = r'\$(%s)(: [^$\n\r]*? )??\$' % '|'.join(escaped)
132 kwpat = r'\$(%s)(: [^$\n\r]*? )??\$' % '|'.join(escaped)
133 self.re_kw = re.compile(kwpat)
133 self.re_kw = re.compile(kwpat)
134
134
135 templater.common_filters['utcdate'] = utcdate
135 templater.common_filters['utcdate'] = utcdate
136 self.ct = cmdutil.changeset_templater(self.ui, self.repo,
136 self.ct = cmdutil.changeset_templater(self.ui, self.repo,
137 False, '', False)
137 False, '', False)
138
138
139 def substitute(self, node, data, subfunc):
139 def substitute(self, node, data, subfunc):
140 '''Obtains file's changenode if commit node not given,
140 '''Obtains file's changenode if commit node not given,
141 and calls given substitution function.'''
141 and calls given substitution function.'''
142 if self.commitnode:
142 if self.commitnode:
143 fnode = self.commitnode
143 fnode = self.commitnode
144 else:
144 else:
145 c = context.filectx(self.repo, self.path, fileid=node)
145 c = context.filectx(self.repo, self.path, fileid=node)
146 fnode = c.node()
146 fnode = c.node()
147
147
148 def kwsub(mobj):
148 def kwsub(mobj):
149 '''Substitutes keyword using corresponding template.'''
149 '''Substitutes keyword using corresponding template.'''
150 kw = mobj.group(1)
150 kw = mobj.group(1)
151 self.ct.use_template(self.templates[kw])
151 self.ct.use_template(self.templates[kw])
152 self.ui.pushbuffer()
152 self.ui.pushbuffer()
153 self.ct.show(changenode=fnode, root=self.repo.root, file=self.path)
153 self.ct.show(changenode=fnode, root=self.repo.root, file=self.path)
154 return '$%s: %s $' % (kw, templater.firstline(self.ui.popbuffer()))
154 return '$%s: %s $' % (kw, templater.firstline(self.ui.popbuffer()))
155
155
156 return subfunc(kwsub, data)
156 return subfunc(kwsub, data)
157
157
158 def expand(self, node, data):
158 def expand(self, node, data):
159 '''Returns data with keywords expanded.'''
159 '''Returns data with keywords expanded.'''
160 if util.binary(data) or _kwrestrict(self.hgcmd):
160 if util.binary(data) or _kwrestrict(self.hgcmd):
161 return data
161 return data
162 return self.substitute(node, data, self.re_kw.sub)
162 return self.substitute(node, data, self.re_kw.sub)
163
163
164 def process(self, node, data, expand):
164 def process(self, node, data, expand):
165 '''Returns a tuple: data, count.
165 '''Returns a tuple: data, count.
166 Count is number of keywords/keyword substitutions,
166 Count is number of keywords/keyword substitutions,
167 telling caller whether to act on file containing data.'''
167 telling caller whether to act on file containing data.'''
168 if util.binary(data):
168 if util.binary(data):
169 return data, None
169 return data, None
170 if expand:
170 if expand:
171 return self.substitute(node, data, self.re_kw.subn)
171 return self.substitute(node, data, self.re_kw.subn)
172 return data, self.re_kw.search(data)
172 return data, self.re_kw.search(data)
173
173
174 def shrink(self, text):
174 def shrink(self, text):
175 '''Returns text with all keyword substitutions removed.'''
175 '''Returns text with all keyword substitutions removed.'''
176 if util.binary(text):
176 if util.binary(text):
177 return text
177 return text
178 return self.re_kw.sub(r'$\1$', text)
178 return self.re_kw.sub(r'$\1$', text)
179
179
180 class kwfilelog(filelog.filelog):
180 class kwfilelog(filelog.filelog):
181 '''
181 '''
182 Subclass of filelog to hook into its read, add, cmp methods.
182 Subclass of filelog to hook into its read, add, cmp methods.
183 Keywords are "stored" unexpanded, and processed on reading.
183 Keywords are "stored" unexpanded, and processed on reading.
184 '''
184 '''
185 def __init__(self, opener, path):
185 def __init__(self, opener, path):
186 super(kwfilelog, self).__init__(opener, path)
186 super(kwfilelog, self).__init__(opener, path)
187 _kwtemplater.path = path
187 _kwtemplater.path = path
188
188
189 def kwctread(self, node, expand):
189 def kwctread(self, node, expand):
190 '''Reads expanding and counting keywords, called from _overwrite.'''
190 '''Reads expanding and counting keywords, called from _overwrite.'''
191 data = super(kwfilelog, self).read(node)
191 data = super(kwfilelog, self).read(node)
192 return _kwtemplater.process(node, data, expand)
192 return _kwtemplater.process(node, data, expand)
193
193
194 def read(self, node):
194 def read(self, node):
195 '''Expands keywords when reading filelog.'''
195 '''Expands keywords when reading filelog.'''
196 data = super(kwfilelog, self).read(node)
196 data = super(kwfilelog, self).read(node)
197 return _kwtemplater.expand(node, data)
197 return _kwtemplater.expand(node, data)
198
198
199 def add(self, text, meta, tr, link, p1=None, p2=None):
199 def add(self, text, meta, tr, link, p1=None, p2=None):
200 '''Removes keyword substitutions when adding to filelog.'''
200 '''Removes keyword substitutions when adding to filelog.'''
201 text = _kwtemplater.shrink(text)
201 text = _kwtemplater.shrink(text)
202 return super(kwfilelog, self).add(text, meta, tr, link, p1=p1, p2=p2)
202 return super(kwfilelog, self).add(text, meta, tr, link, p1=p1, p2=p2)
203
203
204 def cmp(self, node, text):
204 def cmp(self, node, text):
205 '''Removes keyword substitutions for comparison.'''
205 '''Removes keyword substitutions for comparison.'''
206 text = _kwtemplater.shrink(text)
206 text = _kwtemplater.shrink(text)
207 if self.renamed(node):
207 if self.renamed(node):
208 t2 = super(kwfilelog, self).read(node)
208 t2 = super(kwfilelog, self).read(node)
209 return t2 != text
209 return t2 != text
210 return revlog.revlog.cmp(self, node, text)
210 return revlog.revlog.cmp(self, node, text)
211
211
212
212
213 # store original patch.patchfile.__init__
213 # store original patch.patchfile.__init__
214 _patchfile_init = patch.patchfile.__init__
214 _patchfile_init = patch.patchfile.__init__
215
215
216 def _kwpatchfile_init(self, ui, fname, missing=False):
216 def _kwpatchfile_init(self, ui, fname, missing=False):
217 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
217 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
218 rejects or conflicts due to expanded keywords in working dir.'''
218 rejects or conflicts due to expanded keywords in working dir.'''
219 _patchfile_init(self, ui, fname, missing=missing)
219 _patchfile_init(self, ui, fname, missing=missing)
220
220
221 if _kwtemplater.matcher(self.fname):
221 if _kwtemplater.matcher(self.fname):
222 # shrink keywords read from working dir
222 # shrink keywords read from working dir
223 kwshrunk = _kwtemplater.shrink(''.join(self.lines))
223 kwshrunk = _kwtemplater.shrink(''.join(self.lines))
224 self.lines = kwshrunk.splitlines(True)
224 self.lines = kwshrunk.splitlines(True)
225
225
226
226
227 def _iskwfile(f, link):
227 def _iskwfile(f, link):
228 return not link(f) and _kwtemplater.matcher(f)
228 return not link(f) and _kwtemplater.matcher(f)
229
229
230 def _status(ui, repo, *pats, **opts):
230 def _status(ui, repo, *pats, **opts):
231 '''Bails out if [keyword] configuration is not active.
231 '''Bails out if [keyword] configuration is not active.
232 Returns status of working directory.'''
232 Returns status of working directory.'''
233 if _kwtemplater:
233 if _kwtemplater:
234 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
234 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
235 return repo.status(files=files, match=match, list_clean=True)
235 return repo.status(files=files, match=match, list_clean=True)
236 if ui.configitems('keyword'):
236 if ui.configitems('keyword'):
237 raise util.Abort(_('[keyword] patterns cannot match'))
237 raise util.Abort(_('[keyword] patterns cannot match'))
238 raise util.Abort(_('no [keyword] patterns configured'))
238 raise util.Abort(_('no [keyword] patterns configured'))
239
239
240 def _overwrite(ui, repo, node=None, expand=True, files=None):
240 def _overwrite(ui, repo, node=None, expand=True, files=None):
241 '''Overwrites selected files expanding/shrinking keywords.'''
241 '''Overwrites selected files expanding/shrinking keywords.'''
242 ctx = repo.changectx(node)
242 ctx = repo.changectx(node)
243 mf = ctx.manifest()
243 mf = ctx.manifest()
244 if node is not None: # commit
244 if node is not None: # commit
245 _kwtemplater.commitnode = node
245 _kwtemplater.commitnode = node
246 files = [f for f in ctx.files() if mf.has_key(f)]
246 files = [f for f in ctx.files() if f in mf]
247 notify = ui.debug
247 notify = ui.debug
248 else: # kwexpand/kwshrink
248 else: # kwexpand/kwshrink
249 notify = ui.note
249 notify = ui.note
250 candidates = [f for f in files if _iskwfile(f, mf.linkf)]
250 candidates = [f for f in files if _iskwfile(f, mf.linkf)]
251 if candidates:
251 if candidates:
252 candidates.sort()
252 candidates.sort()
253 action = expand and 'expanding' or 'shrinking'
253 action = expand and 'expanding' or 'shrinking'
254 for f in candidates:
254 for f in candidates:
255 fp = repo.file(f, kwmatch=True)
255 fp = repo.file(f, kwmatch=True)
256 data, kwfound = fp.kwctread(mf[f], expand)
256 data, kwfound = fp.kwctread(mf[f], expand)
257 if kwfound:
257 if kwfound:
258 notify(_('overwriting %s %s keywords\n') % (f, action))
258 notify(_('overwriting %s %s keywords\n') % (f, action))
259 repo.wwrite(f, data, mf.flags(f))
259 repo.wwrite(f, data, mf.flags(f))
260 repo.dirstate.normal(f)
260 repo.dirstate.normal(f)
261
261
262 def _kwfwrite(ui, repo, expand, *pats, **opts):
262 def _kwfwrite(ui, repo, expand, *pats, **opts):
263 '''Selects files and passes them to _overwrite.'''
263 '''Selects files and passes them to _overwrite.'''
264 status = _status(ui, repo, *pats, **opts)
264 status = _status(ui, repo, *pats, **opts)
265 modified, added, removed, deleted, unknown, ignored, clean = status
265 modified, added, removed, deleted, unknown, ignored, clean = status
266 if modified or added or removed or deleted:
266 if modified or added or removed or deleted:
267 raise util.Abort(_('outstanding uncommitted changes in given files'))
267 raise util.Abort(_('outstanding uncommitted changes in given files'))
268 wlock = lock = None
268 wlock = lock = None
269 try:
269 try:
270 wlock = repo.wlock()
270 wlock = repo.wlock()
271 lock = repo.lock()
271 lock = repo.lock()
272 _overwrite(ui, repo, expand=expand, files=clean)
272 _overwrite(ui, repo, expand=expand, files=clean)
273 finally:
273 finally:
274 del wlock, lock
274 del wlock, lock
275
275
276
276
277 def demo(ui, repo, *args, **opts):
277 def demo(ui, repo, *args, **opts):
278 '''print [keywordmaps] configuration and an expansion example
278 '''print [keywordmaps] configuration and an expansion example
279
279
280 Show current, custom, or default keyword template maps
280 Show current, custom, or default keyword template maps
281 and their expansion.
281 and their expansion.
282
282
283 Extend current configuration by specifying maps as arguments
283 Extend current configuration by specifying maps as arguments
284 and optionally by reading from an additional hgrc file.
284 and optionally by reading from an additional hgrc file.
285
285
286 Override current keyword template maps with "default" option.
286 Override current keyword template maps with "default" option.
287 '''
287 '''
288 def demostatus(stat):
288 def demostatus(stat):
289 ui.status(_('\n\t%s\n') % stat)
289 ui.status(_('\n\t%s\n') % stat)
290
290
291 def demoitems(section, items):
291 def demoitems(section, items):
292 ui.write('[%s]\n' % section)
292 ui.write('[%s]\n' % section)
293 for k, v in items:
293 for k, v in items:
294 ui.write('%s = %s\n' % (k, v))
294 ui.write('%s = %s\n' % (k, v))
295
295
296 msg = 'hg keyword config and expansion example'
296 msg = 'hg keyword config and expansion example'
297 kwstatus = 'current'
297 kwstatus = 'current'
298 fn = 'demo.txt'
298 fn = 'demo.txt'
299 branchname = 'demobranch'
299 branchname = 'demobranch'
300 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
300 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
301 ui.note(_('creating temporary repo at %s\n') % tmpdir)
301 ui.note(_('creating temporary repo at %s\n') % tmpdir)
302 repo = localrepo.localrepository(ui, path=tmpdir, create=True)
302 repo = localrepo.localrepository(ui, path=tmpdir, create=True)
303 ui.setconfig('keyword', fn, '')
303 ui.setconfig('keyword', fn, '')
304 if args or opts.get('rcfile'):
304 if args or opts.get('rcfile'):
305 kwstatus = 'custom'
305 kwstatus = 'custom'
306 if opts.get('rcfile'):
306 if opts.get('rcfile'):
307 ui.readconfig(opts.get('rcfile'))
307 ui.readconfig(opts.get('rcfile'))
308 if opts.get('default'):
308 if opts.get('default'):
309 kwstatus = 'default'
309 kwstatus = 'default'
310 kwmaps = kwtemplater.templates
310 kwmaps = kwtemplater.templates
311 if ui.configitems('keywordmaps'):
311 if ui.configitems('keywordmaps'):
312 # override maps from optional rcfile
312 # override maps from optional rcfile
313 for k, v in kwmaps.items():
313 for k, v in kwmaps.items():
314 ui.setconfig('keywordmaps', k, v)
314 ui.setconfig('keywordmaps', k, v)
315 elif args:
315 elif args:
316 # simulate hgrc parsing
316 # simulate hgrc parsing
317 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
317 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
318 fp = repo.opener('hgrc', 'w')
318 fp = repo.opener('hgrc', 'w')
319 fp.writelines(rcmaps)
319 fp.writelines(rcmaps)
320 fp.close()
320 fp.close()
321 ui.readconfig(repo.join('hgrc'))
321 ui.readconfig(repo.join('hgrc'))
322 if not opts.get('default'):
322 if not opts.get('default'):
323 kwmaps = dict(ui.configitems('keywordmaps')) or kwtemplater.templates
323 kwmaps = dict(ui.configitems('keywordmaps')) or kwtemplater.templates
324 reposetup(ui, repo)
324 reposetup(ui, repo)
325 for k, v in ui.configitems('extensions'):
325 for k, v in ui.configitems('extensions'):
326 if k.endswith('keyword'):
326 if k.endswith('keyword'):
327 extension = '%s = %s' % (k, v)
327 extension = '%s = %s' % (k, v)
328 break
328 break
329 demostatus('config using %s keyword template maps' % kwstatus)
329 demostatus('config using %s keyword template maps' % kwstatus)
330 ui.write('[extensions]\n%s\n' % extension)
330 ui.write('[extensions]\n%s\n' % extension)
331 demoitems('keyword', ui.configitems('keyword'))
331 demoitems('keyword', ui.configitems('keyword'))
332 demoitems('keywordmaps', kwmaps.items())
332 demoitems('keywordmaps', kwmaps.items())
333 keywords = '$' + '$\n$'.join(kwmaps.keys()) + '$\n'
333 keywords = '$' + '$\n$'.join(kwmaps.keys()) + '$\n'
334 repo.wopener(fn, 'w').write(keywords)
334 repo.wopener(fn, 'w').write(keywords)
335 repo.add([fn])
335 repo.add([fn])
336 path = repo.wjoin(fn)
336 path = repo.wjoin(fn)
337 ui.note(_('\n%s keywords written to %s:\n') % (kwstatus, path))
337 ui.note(_('\n%s keywords written to %s:\n') % (kwstatus, path))
338 ui.note(keywords)
338 ui.note(keywords)
339 ui.note('\nhg -R "%s" branch "%s"\n' % (tmpdir, branchname))
339 ui.note('\nhg -R "%s" branch "%s"\n' % (tmpdir, branchname))
340 # silence branch command if not verbose
340 # silence branch command if not verbose
341 quiet = ui.quiet
341 quiet = ui.quiet
342 ui.quiet = not ui.verbose
342 ui.quiet = not ui.verbose
343 commands.branch(ui, repo, branchname)
343 commands.branch(ui, repo, branchname)
344 ui.quiet = quiet
344 ui.quiet = quiet
345 for name, cmd in ui.configitems('hooks'):
345 for name, cmd in ui.configitems('hooks'):
346 if name.split('.', 1)[0].find('commit') > -1:
346 if name.split('.', 1)[0].find('commit') > -1:
347 repo.ui.setconfig('hooks', name, '')
347 repo.ui.setconfig('hooks', name, '')
348 ui.note(_('unhooked all commit hooks\n'))
348 ui.note(_('unhooked all commit hooks\n'))
349 ui.note('hg -R "%s" ci -m "%s"\n' % (tmpdir, msg))
349 ui.note('hg -R "%s" ci -m "%s"\n' % (tmpdir, msg))
350 repo.commit(text=msg)
350 repo.commit(text=msg)
351 format = ui.verbose and ' in %s' % path or ''
351 format = ui.verbose and ' in %s' % path or ''
352 demostatus('%s keywords expanded%s' % (kwstatus, format))
352 demostatus('%s keywords expanded%s' % (kwstatus, format))
353 ui.write(repo.wread(fn))
353 ui.write(repo.wread(fn))
354 ui.debug(_('\nremoving temporary repo %s\n') % tmpdir)
354 ui.debug(_('\nremoving temporary repo %s\n') % tmpdir)
355 shutil.rmtree(tmpdir, ignore_errors=True)
355 shutil.rmtree(tmpdir, ignore_errors=True)
356
356
357 def expand(ui, repo, *pats, **opts):
357 def expand(ui, repo, *pats, **opts):
358 '''expand keywords in working directory
358 '''expand keywords in working directory
359
359
360 Run after (re)enabling keyword expansion.
360 Run after (re)enabling keyword expansion.
361
361
362 kwexpand refuses to run if given files contain local changes.
362 kwexpand refuses to run if given files contain local changes.
363 '''
363 '''
364 # 3rd argument sets expansion to True
364 # 3rd argument sets expansion to True
365 _kwfwrite(ui, repo, True, *pats, **opts)
365 _kwfwrite(ui, repo, True, *pats, **opts)
366
366
367 def files(ui, repo, *pats, **opts):
367 def files(ui, repo, *pats, **opts):
368 '''print files currently configured for keyword expansion
368 '''print files currently configured for keyword expansion
369
369
370 Crosscheck which files in working directory are potential targets for
370 Crosscheck which files in working directory are potential targets for
371 keyword expansion.
371 keyword expansion.
372 That is, files matched by [keyword] config patterns but not symlinks.
372 That is, files matched by [keyword] config patterns but not symlinks.
373 '''
373 '''
374 status = _status(ui, repo, *pats, **opts)
374 status = _status(ui, repo, *pats, **opts)
375 modified, added, removed, deleted, unknown, ignored, clean = status
375 modified, added, removed, deleted, unknown, ignored, clean = status
376 files = modified + added + clean
376 files = modified + added + clean
377 if opts.get('untracked'):
377 if opts.get('untracked'):
378 files += unknown
378 files += unknown
379 files.sort()
379 files.sort()
380 kwfiles = [f for f in files if _iskwfile(f, repo._link)]
380 kwfiles = [f for f in files if _iskwfile(f, repo._link)]
381 cwd = pats and repo.getcwd() or ''
381 cwd = pats and repo.getcwd() or ''
382 kwfstats = not opts.get('ignore') and (('K', kwfiles),) or ()
382 kwfstats = not opts.get('ignore') and (('K', kwfiles),) or ()
383 if opts.get('all') or opts.get('ignore'):
383 if opts.get('all') or opts.get('ignore'):
384 kwfstats += (('I', [f for f in files if f not in kwfiles]),)
384 kwfstats += (('I', [f for f in files if f not in kwfiles]),)
385 for char, filenames in kwfstats:
385 for char, filenames in kwfstats:
386 format = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n'
386 format = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n'
387 for f in filenames:
387 for f in filenames:
388 ui.write(format % repo.pathto(f, cwd))
388 ui.write(format % repo.pathto(f, cwd))
389
389
390 def shrink(ui, repo, *pats, **opts):
390 def shrink(ui, repo, *pats, **opts):
391 '''revert expanded keywords in working directory
391 '''revert expanded keywords in working directory
392
392
393 Run before changing/disabling active keywords
393 Run before changing/disabling active keywords
394 or if you experience problems with "hg import" or "hg merge".
394 or if you experience problems with "hg import" or "hg merge".
395
395
396 kwshrink refuses to run if given files contain local changes.
396 kwshrink refuses to run if given files contain local changes.
397 '''
397 '''
398 # 3rd argument sets expansion to False
398 # 3rd argument sets expansion to False
399 _kwfwrite(ui, repo, False, *pats, **opts)
399 _kwfwrite(ui, repo, False, *pats, **opts)
400
400
401
401
402 def reposetup(ui, repo):
402 def reposetup(ui, repo):
403 '''Sets up repo as kwrepo for keyword substitution.
403 '''Sets up repo as kwrepo for keyword substitution.
404 Overrides file method to return kwfilelog instead of filelog
404 Overrides file method to return kwfilelog instead of filelog
405 if file matches user configuration.
405 if file matches user configuration.
406 Wraps commit to overwrite configured files with updated
406 Wraps commit to overwrite configured files with updated
407 keyword substitutions.
407 keyword substitutions.
408 This is done for local repos only, and only if there are
408 This is done for local repos only, and only if there are
409 files configured at all for keyword substitution.'''
409 files configured at all for keyword substitution.'''
410
410
411 if not repo.local():
411 if not repo.local():
412 return
412 return
413
413
414 nokwcommands = ('add', 'addremove', 'bundle', 'clone', 'copy',
414 nokwcommands = ('add', 'addremove', 'bundle', 'clone', 'copy',
415 'export', 'grep', 'identify', 'incoming', 'init',
415 'export', 'grep', 'identify', 'incoming', 'init',
416 'log', 'outgoing', 'push', 'remove', 'rename',
416 'log', 'outgoing', 'push', 'remove', 'rename',
417 'rollback', 'tip',
417 'rollback', 'tip',
418 'convert')
418 'convert')
419 hgcmd, func, args, opts, cmdopts = dispatch._parse(ui, sys.argv[1:])
419 hgcmd, func, args, opts, cmdopts = dispatch._parse(ui, sys.argv[1:])
420 if hgcmd in nokwcommands:
420 if hgcmd in nokwcommands:
421 return
421 return
422
422
423 if hgcmd == 'diff':
423 if hgcmd == 'diff':
424 # only expand if comparing against working dir
424 # only expand if comparing against working dir
425 node1, node2 = cmdutil.revpair(repo, cmdopts.get('rev'))
425 node1, node2 = cmdutil.revpair(repo, cmdopts.get('rev'))
426 if node2 is not None:
426 if node2 is not None:
427 return
427 return
428 # shrink if rev is not current node
428 # shrink if rev is not current node
429 if node1 is not None and node1 != repo.changectx().node():
429 if node1 is not None and node1 != repo.changectx().node():
430 hgcmd = 'diff1'
430 hgcmd = 'diff1'
431
431
432 inc, exc = [], ['.hgtags']
432 inc, exc = [], ['.hgtags']
433 for pat, opt in ui.configitems('keyword'):
433 for pat, opt in ui.configitems('keyword'):
434 if opt != 'ignore':
434 if opt != 'ignore':
435 inc.append(pat)
435 inc.append(pat)
436 else:
436 else:
437 exc.append(pat)
437 exc.append(pat)
438 if not inc:
438 if not inc:
439 return
439 return
440
440
441 global _kwtemplater
441 global _kwtemplater
442 _kwtemplater = kwtemplater(ui, repo, inc, exc, hgcmd)
442 _kwtemplater = kwtemplater(ui, repo, inc, exc, hgcmd)
443
443
444 class kwrepo(repo.__class__):
444 class kwrepo(repo.__class__):
445 def file(self, f, kwmatch=False):
445 def file(self, f, kwmatch=False):
446 if f[0] == '/':
446 if f[0] == '/':
447 f = f[1:]
447 f = f[1:]
448 if kwmatch or _kwtemplater.matcher(f):
448 if kwmatch or _kwtemplater.matcher(f):
449 return kwfilelog(self.sopener, f)
449 return kwfilelog(self.sopener, f)
450 return filelog.filelog(self.sopener, f)
450 return filelog.filelog(self.sopener, f)
451
451
452 def wread(self, filename):
452 def wread(self, filename):
453 data = super(kwrepo, self).wread(filename)
453 data = super(kwrepo, self).wread(filename)
454 if _kwrestrict(hgcmd) and _kwtemplater.matcher(filename):
454 if _kwrestrict(hgcmd) and _kwtemplater.matcher(filename):
455 return _kwtemplater.shrink(data)
455 return _kwtemplater.shrink(data)
456 return data
456 return data
457
457
458 def commit(self, files=None, text='', user=None, date=None,
458 def commit(self, files=None, text='', user=None, date=None,
459 match=util.always, force=False, force_editor=False,
459 match=util.always, force=False, force_editor=False,
460 p1=None, p2=None, extra={}):
460 p1=None, p2=None, extra={}):
461 wlock = lock = None
461 wlock = lock = None
462 _p1 = _p2 = None
462 _p1 = _p2 = None
463 try:
463 try:
464 wlock = self.wlock()
464 wlock = self.wlock()
465 lock = self.lock()
465 lock = self.lock()
466 # store and postpone commit hooks
466 # store and postpone commit hooks
467 commithooks = []
467 commithooks = []
468 for name, cmd in ui.configitems('hooks'):
468 for name, cmd in ui.configitems('hooks'):
469 if name.split('.', 1)[0] == 'commit':
469 if name.split('.', 1)[0] == 'commit':
470 commithooks.append((name, cmd))
470 commithooks.append((name, cmd))
471 ui.setconfig('hooks', name, None)
471 ui.setconfig('hooks', name, None)
472 if commithooks:
472 if commithooks:
473 # store parents for commit hook environment
473 # store parents for commit hook environment
474 if p1 is None:
474 if p1 is None:
475 _p1, _p2 = repo.dirstate.parents()
475 _p1, _p2 = repo.dirstate.parents()
476 else:
476 else:
477 _p1, _p2 = p1, p2 or nullid
477 _p1, _p2 = p1, p2 or nullid
478 _p1 = hex(_p1)
478 _p1 = hex(_p1)
479 if _p2 == nullid:
479 if _p2 == nullid:
480 _p2 = ''
480 _p2 = ''
481 else:
481 else:
482 _p2 = hex(_p2)
482 _p2 = hex(_p2)
483
483
484 node = super(kwrepo,
484 node = super(kwrepo,
485 self).commit(files=files, text=text, user=user,
485 self).commit(files=files, text=text, user=user,
486 date=date, match=match, force=force,
486 date=date, match=match, force=force,
487 force_editor=force_editor,
487 force_editor=force_editor,
488 p1=p1, p2=p2, extra=extra)
488 p1=p1, p2=p2, extra=extra)
489
489
490 # restore commit hooks
490 # restore commit hooks
491 for name, cmd in commithooks:
491 for name, cmd in commithooks:
492 ui.setconfig('hooks', name, cmd)
492 ui.setconfig('hooks', name, cmd)
493 if node is not None:
493 if node is not None:
494 _overwrite(ui, self, node=node)
494 _overwrite(ui, self, node=node)
495 repo.hook('commit', node=node, parent1=_p1, parent2=_p2)
495 repo.hook('commit', node=node, parent1=_p1, parent2=_p2)
496 return node
496 return node
497 finally:
497 finally:
498 del wlock, lock
498 del wlock, lock
499
499
500 repo.__class__ = kwrepo
500 repo.__class__ = kwrepo
501 patch.patchfile.__init__ = _kwpatchfile_init
501 patch.patchfile.__init__ = _kwpatchfile_init
502
502
503
503
504 cmdtable = {
504 cmdtable = {
505 'kwdemo':
505 'kwdemo':
506 (demo,
506 (demo,
507 [('d', 'default', None, _('show default keyword template maps')),
507 [('d', 'default', None, _('show default keyword template maps')),
508 ('f', 'rcfile', [], _('read maps from rcfile'))],
508 ('f', 'rcfile', [], _('read maps from rcfile'))],
509 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...')),
509 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...')),
510 'kwexpand': (expand, commands.walkopts,
510 'kwexpand': (expand, commands.walkopts,
511 _('hg kwexpand [OPTION]... [FILE]...')),
511 _('hg kwexpand [OPTION]... [FILE]...')),
512 'kwfiles':
512 'kwfiles':
513 (files,
513 (files,
514 [('a', 'all', None, _('show keyword status flags of all files')),
514 [('a', 'all', None, _('show keyword status flags of all files')),
515 ('i', 'ignore', None, _('show files excluded from expansion')),
515 ('i', 'ignore', None, _('show files excluded from expansion')),
516 ('u', 'untracked', None, _('additionally show untracked files')),
516 ('u', 'untracked', None, _('additionally show untracked files')),
517 ] + commands.walkopts,
517 ] + commands.walkopts,
518 _('hg kwfiles [OPTION]... [FILE]...')),
518 _('hg kwfiles [OPTION]... [FILE]...')),
519 'kwshrink': (shrink, commands.walkopts,
519 'kwshrink': (shrink, commands.walkopts,
520 _('hg kwshrink [OPTION]... [FILE]...')),
520 _('hg kwshrink [OPTION]... [FILE]...')),
521 }
521 }
@@ -1,467 +1,466 b''
1 # Command for sending a collection of Mercurial changesets as a series
1 # Command for sending a collection of Mercurial changesets as a series
2 # of patch emails.
2 # of patch emails.
3 #
3 #
4 # The series is started off with a "[PATCH 0 of N]" introduction,
4 # The series is started off with a "[PATCH 0 of N]" introduction,
5 # which describes the series as a whole.
5 # which describes the series as a whole.
6 #
6 #
7 # Each patch email has a Subject line of "[PATCH M of N] ...", using
7 # Each patch email has a Subject line of "[PATCH M of N] ...", using
8 # the first line of the changeset description as the subject text.
8 # the first line of the changeset description as the subject text.
9 # The message contains two or three body parts:
9 # The message contains two or three body parts:
10 #
10 #
11 # The remainder of the changeset description.
11 # The remainder of the changeset description.
12 #
12 #
13 # [Optional] If the diffstat program is installed, the result of
13 # [Optional] If the diffstat program is installed, the result of
14 # running diffstat on the patch.
14 # running diffstat on the patch.
15 #
15 #
16 # The patch itself, as generated by "hg export".
16 # The patch itself, as generated by "hg export".
17 #
17 #
18 # Each message refers to all of its predecessors using the In-Reply-To
18 # Each message refers to all of its predecessors using the In-Reply-To
19 # and References headers, so they will show up as a sequence in
19 # and References headers, so they will show up as a sequence in
20 # threaded mail and news readers, and in mail archives.
20 # threaded mail and news readers, and in mail archives.
21 #
21 #
22 # For each changeset, you will be prompted with a diffstat summary and
22 # For each changeset, you will be prompted with a diffstat summary and
23 # the changeset summary, so you can be sure you are sending the right
23 # the changeset summary, so you can be sure you are sending the right
24 # changes.
24 # changes.
25 #
25 #
26 # To enable this extension:
26 # To enable this extension:
27 #
27 #
28 # [extensions]
28 # [extensions]
29 # hgext.patchbomb =
29 # hgext.patchbomb =
30 #
30 #
31 # To configure other defaults, add a section like this to your hgrc
31 # To configure other defaults, add a section like this to your hgrc
32 # file:
32 # file:
33 #
33 #
34 # [email]
34 # [email]
35 # from = My Name <my@email>
35 # from = My Name <my@email>
36 # to = recipient1, recipient2, ...
36 # to = recipient1, recipient2, ...
37 # cc = cc1, cc2, ...
37 # cc = cc1, cc2, ...
38 # bcc = bcc1, bcc2, ...
38 # bcc = bcc1, bcc2, ...
39 #
39 #
40 # Then you can use the "hg email" command to mail a series of changesets
40 # Then you can use the "hg email" command to mail a series of changesets
41 # as a patchbomb.
41 # as a patchbomb.
42 #
42 #
43 # To avoid sending patches prematurely, it is a good idea to first run
43 # To avoid sending patches prematurely, it is a good idea to first run
44 # the "email" command with the "-n" option (test only). You will be
44 # the "email" command with the "-n" option (test only). You will be
45 # prompted for an email recipient address, a subject an an introductory
45 # prompted for an email recipient address, a subject an an introductory
46 # message describing the patches of your patchbomb. Then when all is
46 # message describing the patches of your patchbomb. Then when all is
47 # done, patchbomb messages are displayed. If PAGER environment variable
47 # done, patchbomb messages are displayed. If PAGER environment variable
48 # is set, your pager will be fired up once for each patchbomb message, so
48 # is set, your pager will be fired up once for each patchbomb message, so
49 # you can verify everything is alright.
49 # you can verify everything is alright.
50 #
50 #
51 # The "-m" (mbox) option is also very useful. Instead of previewing
51 # The "-m" (mbox) option is also very useful. Instead of previewing
52 # each patchbomb message in a pager or sending the messages directly,
52 # each patchbomb message in a pager or sending the messages directly,
53 # it will create a UNIX mailbox file with the patch emails. This
53 # it will create a UNIX mailbox file with the patch emails. This
54 # mailbox file can be previewed with any mail user agent which supports
54 # mailbox file can be previewed with any mail user agent which supports
55 # UNIX mbox files, i.e. with mutt:
55 # UNIX mbox files, i.e. with mutt:
56 #
56 #
57 # % mutt -R -f mbox
57 # % mutt -R -f mbox
58 #
58 #
59 # When you are previewing the patchbomb messages, you can use `formail'
59 # When you are previewing the patchbomb messages, you can use `formail'
60 # (a utility that is commonly installed as part of the procmail package),
60 # (a utility that is commonly installed as part of the procmail package),
61 # to send each message out:
61 # to send each message out:
62 #
62 #
63 # % formail -s sendmail -bm -t < mbox
63 # % formail -s sendmail -bm -t < mbox
64 #
64 #
65 # That should be all. Now your patchbomb is on its way out.
65 # That should be all. Now your patchbomb is on its way out.
66
66
67 import os, errno, socket, tempfile
67 import os, errno, socket, tempfile
68 import email.MIMEMultipart, email.MIMEText, email.MIMEBase
68 import email.MIMEMultipart, email.MIMEText, email.MIMEBase
69 import email.Utils, email.Encoders
69 import email.Utils, email.Encoders
70 from mercurial import cmdutil, commands, hg, mail, ui, patch, util
70 from mercurial import cmdutil, commands, hg, mail, ui, patch, util
71 from mercurial.i18n import _
71 from mercurial.i18n import _
72 from mercurial.node import *
72 from mercurial.node import *
73
73
74 def patchbomb(ui, repo, *revs, **opts):
74 def patchbomb(ui, repo, *revs, **opts):
75 '''send changesets by email
75 '''send changesets by email
76
76
77 By default, diffs are sent in the format generated by hg export,
77 By default, diffs are sent in the format generated by hg export,
78 one per message. The series starts with a "[PATCH 0 of N]"
78 one per message. The series starts with a "[PATCH 0 of N]"
79 introduction, which describes the series as a whole.
79 introduction, which describes the series as a whole.
80
80
81 Each patch email has a Subject line of "[PATCH M of N] ...", using
81 Each patch email has a Subject line of "[PATCH M of N] ...", using
82 the first line of the changeset description as the subject text.
82 the first line of the changeset description as the subject text.
83 The message contains two or three body parts. First, the rest of
83 The message contains two or three body parts. First, the rest of
84 the changeset description. Next, (optionally) if the diffstat
84 the changeset description. Next, (optionally) if the diffstat
85 program is installed, the result of running diffstat on the patch.
85 program is installed, the result of running diffstat on the patch.
86 Finally, the patch itself, as generated by "hg export".
86 Finally, the patch itself, as generated by "hg export".
87
87
88 With --outgoing, emails will be generated for patches not
88 With --outgoing, emails will be generated for patches not
89 found in the destination repository (or only those which are
89 found in the destination repository (or only those which are
90 ancestors of the specified revisions if any are provided)
90 ancestors of the specified revisions if any are provided)
91
91
92 With --bundle, changesets are selected as for --outgoing,
92 With --bundle, changesets are selected as for --outgoing,
93 but a single email containing a binary Mercurial bundle as an
93 but a single email containing a binary Mercurial bundle as an
94 attachment will be sent.
94 attachment will be sent.
95
95
96 Examples:
96 Examples:
97
97
98 hg email -r 3000 # send patch 3000 only
98 hg email -r 3000 # send patch 3000 only
99 hg email -r 3000 -r 3001 # send patches 3000 and 3001
99 hg email -r 3000 -r 3001 # send patches 3000 and 3001
100 hg email -r 3000:3005 # send patches 3000 through 3005
100 hg email -r 3000:3005 # send patches 3000 through 3005
101 hg email 3000 # send patch 3000 (deprecated)
101 hg email 3000 # send patch 3000 (deprecated)
102
102
103 hg email -o # send all patches not in default
103 hg email -o # send all patches not in default
104 hg email -o DEST # send all patches not in DEST
104 hg email -o DEST # send all patches not in DEST
105 hg email -o -r 3000 # send all ancestors of 3000 not in default
105 hg email -o -r 3000 # send all ancestors of 3000 not in default
106 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
106 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
107
107
108 hg email -b # send bundle of all patches not in default
108 hg email -b # send bundle of all patches not in default
109 hg email -b DEST # send bundle of all patches not in DEST
109 hg email -b DEST # send bundle of all patches not in DEST
110 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
110 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
111 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
111 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
112
112
113 Before using this command, you will need to enable email in your hgrc.
113 Before using this command, you will need to enable email in your hgrc.
114 See the [email] section in hgrc(5) for details.
114 See the [email] section in hgrc(5) for details.
115 '''
115 '''
116
116
117 def prompt(prompt, default = None, rest = ': ', empty_ok = False):
117 def prompt(prompt, default = None, rest = ': ', empty_ok = False):
118 if not ui.interactive:
118 if not ui.interactive:
119 return default
119 return default
120 if default:
120 if default:
121 prompt += ' [%s]' % default
121 prompt += ' [%s]' % default
122 prompt += rest
122 prompt += rest
123 while True:
123 while True:
124 r = ui.prompt(prompt, default=default)
124 r = ui.prompt(prompt, default=default)
125 if r:
125 if r:
126 return r
126 return r
127 if default is not None:
127 if default is not None:
128 return default
128 return default
129 if empty_ok:
129 if empty_ok:
130 return r
130 return r
131 ui.warn(_('Please enter a valid value.\n'))
131 ui.warn(_('Please enter a valid value.\n'))
132
132
133 def confirm(s, denial):
133 def confirm(s, denial):
134 if not prompt(s, default = 'y', rest = '? ').lower().startswith('y'):
134 if not prompt(s, default = 'y', rest = '? ').lower().startswith('y'):
135 raise util.Abort(denial)
135 raise util.Abort(denial)
136
136
137 def cdiffstat(summary, patchlines):
137 def cdiffstat(summary, patchlines):
138 s = patch.diffstat(patchlines)
138 s = patch.diffstat(patchlines)
139 if s:
139 if s:
140 if summary:
140 if summary:
141 ui.write(summary, '\n')
141 ui.write(summary, '\n')
142 ui.write(s, '\n')
142 ui.write(s, '\n')
143 confirm(_('Does the diffstat above look okay'),
143 confirm(_('Does the diffstat above look okay'),
144 _('diffstat rejected'))
144 _('diffstat rejected'))
145 elif s is None:
145 elif s is None:
146 ui.warn(_('No diffstat information available.\n'))
146 ui.warn(_('No diffstat information available.\n'))
147 s = ''
147 s = ''
148 return s
148 return s
149
149
150 def makepatch(patch, idx, total):
150 def makepatch(patch, idx, total):
151 desc = []
151 desc = []
152 node = None
152 node = None
153 body = ''
153 body = ''
154 for line in patch:
154 for line in patch:
155 if line.startswith('#'):
155 if line.startswith('#'):
156 if line.startswith('# Node ID'):
156 if line.startswith('# Node ID'):
157 node = line.split()[-1]
157 node = line.split()[-1]
158 continue
158 continue
159 if line.startswith('diff -r') or line.startswith('diff --git'):
159 if line.startswith('diff -r') or line.startswith('diff --git'):
160 break
160 break
161 desc.append(line)
161 desc.append(line)
162 if not node:
162 if not node:
163 raise ValueError
163 raise ValueError
164
164
165 if opts['attach']:
165 if opts['attach']:
166 body = ('\n'.join(desc[1:]).strip() or
166 body = ('\n'.join(desc[1:]).strip() or
167 'Patch subject is complete summary.')
167 'Patch subject is complete summary.')
168 body += '\n\n\n'
168 body += '\n\n\n'
169
169
170 if opts.get('plain'):
170 if opts.get('plain'):
171 while patch and patch[0].startswith('# '):
171 while patch and patch[0].startswith('# '):
172 patch.pop(0)
172 patch.pop(0)
173 if patch:
173 if patch:
174 patch.pop(0)
174 patch.pop(0)
175 while patch and not patch[0].strip():
175 while patch and not patch[0].strip():
176 patch.pop(0)
176 patch.pop(0)
177 if opts.get('diffstat'):
177 if opts.get('diffstat'):
178 body += cdiffstat('\n'.join(desc), patch) + '\n\n'
178 body += cdiffstat('\n'.join(desc), patch) + '\n\n'
179 if opts.get('attach') or opts.get('inline'):
179 if opts.get('attach') or opts.get('inline'):
180 msg = email.MIMEMultipart.MIMEMultipart()
180 msg = email.MIMEMultipart.MIMEMultipart()
181 if body:
181 if body:
182 msg.attach(email.MIMEText.MIMEText(body, 'plain'))
182 msg.attach(email.MIMEText.MIMEText(body, 'plain'))
183 p = email.MIMEText.MIMEText('\n'.join(patch), 'x-patch')
183 p = email.MIMEText.MIMEText('\n'.join(patch), 'x-patch')
184 binnode = bin(node)
184 binnode = bin(node)
185 # if node is mq patch, it will have patch file name as tag
185 # if node is mq patch, it will have patch file name as tag
186 patchname = [t for t in repo.nodetags(binnode)
186 patchname = [t for t in repo.nodetags(binnode)
187 if t.endswith('.patch') or t.endswith('.diff')]
187 if t.endswith('.patch') or t.endswith('.diff')]
188 if patchname:
188 if patchname:
189 patchname = patchname[0]
189 patchname = patchname[0]
190 elif total > 1:
190 elif total > 1:
191 patchname = cmdutil.make_filename(repo, '%b-%n.patch',
191 patchname = cmdutil.make_filename(repo, '%b-%n.patch',
192 binnode, idx, total)
192 binnode, idx, total)
193 else:
193 else:
194 patchname = cmdutil.make_filename(repo, '%b.patch', binnode)
194 patchname = cmdutil.make_filename(repo, '%b.patch', binnode)
195 disposition = 'inline'
195 disposition = 'inline'
196 if opts['attach']:
196 if opts['attach']:
197 disposition = 'attachment'
197 disposition = 'attachment'
198 p['Content-Disposition'] = disposition + '; filename=' + patchname
198 p['Content-Disposition'] = disposition + '; filename=' + patchname
199 msg.attach(p)
199 msg.attach(p)
200 else:
200 else:
201 body += '\n'.join(patch)
201 body += '\n'.join(patch)
202 msg = email.MIMEText.MIMEText(body)
202 msg = email.MIMEText.MIMEText(body)
203
203
204 subj = desc[0].strip().rstrip('. ')
204 subj = desc[0].strip().rstrip('. ')
205 if total == 1:
205 if total == 1:
206 subj = '[PATCH] ' + (opts.get('subject') or subj)
206 subj = '[PATCH] ' + (opts.get('subject') or subj)
207 else:
207 else:
208 tlen = len(str(total))
208 tlen = len(str(total))
209 subj = '[PATCH %0*d of %d] %s' % (tlen, idx, total, subj)
209 subj = '[PATCH %0*d of %d] %s' % (tlen, idx, total, subj)
210 msg['Subject'] = subj
210 msg['Subject'] = subj
211 msg['X-Mercurial-Node'] = node
211 msg['X-Mercurial-Node'] = node
212 return msg
212 return msg
213
213
214 def outgoing(dest, revs):
214 def outgoing(dest, revs):
215 '''Return the revisions present locally but not in dest'''
215 '''Return the revisions present locally but not in dest'''
216 dest = ui.expandpath(dest or 'default-push', dest or 'default')
216 dest = ui.expandpath(dest or 'default-push', dest or 'default')
217 revs = [repo.lookup(rev) for rev in revs]
217 revs = [repo.lookup(rev) for rev in revs]
218 other = hg.repository(ui, dest)
218 other = hg.repository(ui, dest)
219 ui.status(_('comparing with %s\n') % dest)
219 ui.status(_('comparing with %s\n') % dest)
220 o = repo.findoutgoing(other)
220 o = repo.findoutgoing(other)
221 if not o:
221 if not o:
222 ui.status(_("no changes found\n"))
222 ui.status(_("no changes found\n"))
223 return []
223 return []
224 o = repo.changelog.nodesbetween(o, revs or None)[0]
224 o = repo.changelog.nodesbetween(o, revs or None)[0]
225 return [str(repo.changelog.rev(r)) for r in o]
225 return [str(repo.changelog.rev(r)) for r in o]
226
226
227 def getbundle(dest):
227 def getbundle(dest):
228 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
228 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
229 tmpfn = os.path.join(tmpdir, 'bundle')
229 tmpfn = os.path.join(tmpdir, 'bundle')
230 try:
230 try:
231 commands.bundle(ui, repo, tmpfn, dest, **opts)
231 commands.bundle(ui, repo, tmpfn, dest, **opts)
232 return open(tmpfn, 'rb').read()
232 return open(tmpfn, 'rb').read()
233 finally:
233 finally:
234 try:
234 try:
235 os.unlink(tmpfn)
235 os.unlink(tmpfn)
236 except:
236 except:
237 pass
237 pass
238 os.rmdir(tmpdir)
238 os.rmdir(tmpdir)
239
239
240 if not (opts.get('test') or opts.get('mbox')):
240 if not (opts.get('test') or opts.get('mbox')):
241 # really sending
241 # really sending
242 mail.validateconfig(ui)
242 mail.validateconfig(ui)
243
243
244 if not (revs or opts.get('rev')
244 if not (revs or opts.get('rev')
245 or opts.get('outgoing') or opts.get('bundle')):
245 or opts.get('outgoing') or opts.get('bundle')):
246 raise util.Abort(_('specify at least one changeset with -r or -o'))
246 raise util.Abort(_('specify at least one changeset with -r or -o'))
247
247
248 cmdutil.setremoteconfig(ui, opts)
248 cmdutil.setremoteconfig(ui, opts)
249 if opts.get('outgoing') and opts.get('bundle'):
249 if opts.get('outgoing') and opts.get('bundle'):
250 raise util.Abort(_("--outgoing mode always on with --bundle;"
250 raise util.Abort(_("--outgoing mode always on with --bundle;"
251 " do not re-specify --outgoing"))
251 " do not re-specify --outgoing"))
252
252
253 if opts.get('outgoing') or opts.get('bundle'):
253 if opts.get('outgoing') or opts.get('bundle'):
254 if len(revs) > 1:
254 if len(revs) > 1:
255 raise util.Abort(_("too many destinations"))
255 raise util.Abort(_("too many destinations"))
256 dest = revs and revs[0] or None
256 dest = revs and revs[0] or None
257 revs = []
257 revs = []
258
258
259 if opts.get('rev'):
259 if opts.get('rev'):
260 if revs:
260 if revs:
261 raise util.Abort(_('use only one form to specify the revision'))
261 raise util.Abort(_('use only one form to specify the revision'))
262 revs = opts.get('rev')
262 revs = opts.get('rev')
263
263
264 if opts.get('outgoing'):
264 if opts.get('outgoing'):
265 revs = outgoing(dest, opts.get('rev'))
265 revs = outgoing(dest, opts.get('rev'))
266 if opts.get('bundle'):
266 if opts.get('bundle'):
267 opts['revs'] = revs
267 opts['revs'] = revs
268
268
269 # start
269 # start
270 if opts.get('date'):
270 if opts.get('date'):
271 start_time = util.parsedate(opts.get('date'))
271 start_time = util.parsedate(opts.get('date'))
272 else:
272 else:
273 start_time = util.makedate()
273 start_time = util.makedate()
274
274
275 def genmsgid(id):
275 def genmsgid(id):
276 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
276 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
277
277
278 def getdescription(body, sender):
278 def getdescription(body, sender):
279 if opts.get('desc'):
279 if opts.get('desc'):
280 body = open(opts.get('desc')).read()
280 body = open(opts.get('desc')).read()
281 else:
281 else:
282 ui.write(_('\nWrite the introductory message for the '
282 ui.write(_('\nWrite the introductory message for the '
283 'patch series.\n\n'))
283 'patch series.\n\n'))
284 body = ui.edit(body, sender)
284 body = ui.edit(body, sender)
285 return body
285 return body
286
286
287 def getexportmsgs():
287 def getexportmsgs():
288 patches = []
288 patches = []
289
289
290 class exportee:
290 class exportee:
291 def __init__(self, container):
291 def __init__(self, container):
292 self.lines = []
292 self.lines = []
293 self.container = container
293 self.container = container
294 self.name = 'email'
294 self.name = 'email'
295
295
296 def write(self, data):
296 def write(self, data):
297 self.lines.append(data)
297 self.lines.append(data)
298
298
299 def close(self):
299 def close(self):
300 self.container.append(''.join(self.lines).split('\n'))
300 self.container.append(''.join(self.lines).split('\n'))
301 self.lines = []
301 self.lines = []
302
302
303 commands.export(ui, repo, *revs, **{'output': exportee(patches),
303 commands.export(ui, repo, *revs, **{'output': exportee(patches),
304 'switch_parent': False,
304 'switch_parent': False,
305 'text': None,
305 'text': None,
306 'git': opts.get('git')})
306 'git': opts.get('git')})
307
307
308 jumbo = []
308 jumbo = []
309 msgs = []
309 msgs = []
310
310
311 ui.write(_('This patch series consists of %d patches.\n\n')
311 ui.write(_('This patch series consists of %d patches.\n\n')
312 % len(patches))
312 % len(patches))
313
313
314 for p, i in zip(patches, xrange(len(patches))):
314 for p, i in zip(patches, xrange(len(patches))):
315 jumbo.extend(p)
315 jumbo.extend(p)
316 msgs.append(makepatch(p, i + 1, len(patches)))
316 msgs.append(makepatch(p, i + 1, len(patches)))
317
317
318 if len(patches) > 1:
318 if len(patches) > 1:
319 tlen = len(str(len(patches)))
319 tlen = len(str(len(patches)))
320
320
321 subj = '[PATCH %0*d of %d] %s' % (
321 subj = '[PATCH %0*d of %d] %s' % (
322 tlen, 0, len(patches),
322 tlen, 0, len(patches),
323 opts.get('subject') or
323 opts.get('subject') or
324 prompt('Subject:',
324 prompt('Subject:',
325 rest=' [PATCH %0*d of %d] ' % (tlen, 0, len(patches))))
325 rest=' [PATCH %0*d of %d] ' % (tlen, 0, len(patches))))
326
326
327 body = ''
327 body = ''
328 if opts.get('diffstat'):
328 if opts.get('diffstat'):
329 d = cdiffstat(_('Final summary:\n'), jumbo)
329 d = cdiffstat(_('Final summary:\n'), jumbo)
330 if d:
330 if d:
331 body = '\n' + d
331 body = '\n' + d
332
332
333 body = getdescription(body, sender)
333 body = getdescription(body, sender)
334 msg = email.MIMEText.MIMEText(body)
334 msg = email.MIMEText.MIMEText(body)
335 msg['Subject'] = subj
335 msg['Subject'] = subj
336
336
337 msgs.insert(0, msg)
337 msgs.insert(0, msg)
338 return msgs
338 return msgs
339
339
340 def getbundlemsgs(bundle):
340 def getbundlemsgs(bundle):
341 subj = (opts.get('subject')
341 subj = (opts.get('subject')
342 or prompt('Subject:', default='A bundle for your repository'))
342 or prompt('Subject:', default='A bundle for your repository'))
343
343
344 body = getdescription('', sender)
344 body = getdescription('', sender)
345 msg = email.MIMEMultipart.MIMEMultipart()
345 msg = email.MIMEMultipart.MIMEMultipart()
346 if body:
346 if body:
347 msg.attach(email.MIMEText.MIMEText(body, 'plain'))
347 msg.attach(email.MIMEText.MIMEText(body, 'plain'))
348 datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
348 datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
349 datapart.set_payload(bundle)
349 datapart.set_payload(bundle)
350 datapart.add_header('Content-Disposition', 'attachment',
350 datapart.add_header('Content-Disposition', 'attachment',
351 filename='bundle.hg')
351 filename='bundle.hg')
352 email.Encoders.encode_base64(datapart)
352 email.Encoders.encode_base64(datapart)
353 msg.attach(datapart)
353 msg.attach(datapart)
354 msg['Subject'] = subj
354 msg['Subject'] = subj
355 return [msg]
355 return [msg]
356
356
357 sender = (opts.get('from') or ui.config('email', 'from') or
357 sender = (opts.get('from') or ui.config('email', 'from') or
358 ui.config('patchbomb', 'from') or
358 ui.config('patchbomb', 'from') or
359 prompt('From', ui.username()))
359 prompt('From', ui.username()))
360
360
361 if opts.get('bundle'):
361 if opts.get('bundle'):
362 msgs = getbundlemsgs(getbundle(dest))
362 msgs = getbundlemsgs(getbundle(dest))
363 else:
363 else:
364 msgs = getexportmsgs()
364 msgs = getexportmsgs()
365
365
366 def getaddrs(opt, prpt, default = None):
366 def getaddrs(opt, prpt, default = None):
367 addrs = opts.get(opt) or (ui.config('email', opt) or
367 addrs = opts.get(opt) or (ui.config('email', opt) or
368 ui.config('patchbomb', opt) or
368 ui.config('patchbomb', opt) or
369 prompt(prpt, default = default)).split(',')
369 prompt(prpt, default = default)).split(',')
370 return [a.strip() for a in addrs if a.strip()]
370 return [a.strip() for a in addrs if a.strip()]
371
371
372 to = getaddrs('to', 'To')
372 to = getaddrs('to', 'To')
373 cc = getaddrs('cc', 'Cc', '')
373 cc = getaddrs('cc', 'Cc', '')
374
374
375 bcc = opts.get('bcc') or (ui.config('email', 'bcc') or
375 bcc = opts.get('bcc') or (ui.config('email', 'bcc') or
376 ui.config('patchbomb', 'bcc') or '').split(',')
376 ui.config('patchbomb', 'bcc') or '').split(',')
377 bcc = [a.strip() for a in bcc if a.strip()]
377 bcc = [a.strip() for a in bcc if a.strip()]
378
378
379 ui.write('\n')
379 ui.write('\n')
380
380
381 parent = None
381 parent = None
382
382
383 sender_addr = email.Utils.parseaddr(sender)[1]
383 sender_addr = email.Utils.parseaddr(sender)[1]
384 sendmail = None
384 sendmail = None
385 for m in msgs:
385 for m in msgs:
386 try:
386 try:
387 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
387 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
388 except TypeError:
388 except TypeError:
389 m['Message-Id'] = genmsgid('patchbomb')
389 m['Message-Id'] = genmsgid('patchbomb')
390 if parent:
390 if parent:
391 m['In-Reply-To'] = parent
391 m['In-Reply-To'] = parent
392 else:
392 else:
393 parent = m['Message-Id']
393 parent = m['Message-Id']
394 m['Date'] = util.datestr(date=start_time,
394 m['Date'] = util.datestr(date=start_time,
395 format="%a, %d %b %Y %H:%M:%S", timezone=True)
395 format="%a, %d %b %Y %H:%M:%S", timezone=True)
396
396
397 start_time = (start_time[0] + 1, start_time[1])
397 start_time = (start_time[0] + 1, start_time[1])
398 m['From'] = sender
398 m['From'] = sender
399 m['To'] = ', '.join(to)
399 m['To'] = ', '.join(to)
400 if cc:
400 if cc:
401 m['Cc'] = ', '.join(cc)
401 m['Cc'] = ', '.join(cc)
402 if bcc:
402 if bcc:
403 m['Bcc'] = ', '.join(bcc)
403 m['Bcc'] = ', '.join(bcc)
404 if opts.get('test'):
404 if opts.get('test'):
405 ui.status('Displaying ', m['Subject'], ' ...\n')
405 ui.status('Displaying ', m['Subject'], ' ...\n')
406 ui.flush()
406 ui.flush()
407 if 'PAGER' in os.environ:
407 if 'PAGER' in os.environ:
408 fp = os.popen(os.environ['PAGER'], 'w')
408 fp = os.popen(os.environ['PAGER'], 'w')
409 else:
409 else:
410 fp = ui
410 fp = ui
411 try:
411 try:
412 fp.write(m.as_string(0))
412 fp.write(m.as_string(0))
413 fp.write('\n')
413 fp.write('\n')
414 except IOError, inst:
414 except IOError, inst:
415 if inst.errno != errno.EPIPE:
415 if inst.errno != errno.EPIPE:
416 raise
416 raise
417 if fp is not ui:
417 if fp is not ui:
418 fp.close()
418 fp.close()
419 elif opts.get('mbox'):
419 elif opts.get('mbox'):
420 ui.status('Writing ', m['Subject'], ' ...\n')
420 ui.status('Writing ', m['Subject'], ' ...\n')
421 fp = open(opts.get('mbox'),
421 fp = open(opts.get('mbox'), 'In-Reply-To' in m and 'ab+' or 'wb+')
422 m.has_key('In-Reply-To') and 'ab+' or 'wb+')
423 date = util.datestr(date=start_time,
422 date = util.datestr(date=start_time,
424 format='%a %b %d %H:%M:%S %Y', timezone=False)
423 format='%a %b %d %H:%M:%S %Y', timezone=False)
425 fp.write('From %s %s\n' % (sender_addr, date))
424 fp.write('From %s %s\n' % (sender_addr, date))
426 fp.write(m.as_string(0))
425 fp.write(m.as_string(0))
427 fp.write('\n\n')
426 fp.write('\n\n')
428 fp.close()
427 fp.close()
429 else:
428 else:
430 if not sendmail:
429 if not sendmail:
431 sendmail = mail.connect(ui)
430 sendmail = mail.connect(ui)
432 ui.status('Sending ', m['Subject'], ' ...\n')
431 ui.status('Sending ', m['Subject'], ' ...\n')
433 # Exim does not remove the Bcc field
432 # Exim does not remove the Bcc field
434 del m['Bcc']
433 del m['Bcc']
435 sendmail(ui, sender, to + bcc + cc, m.as_string(0))
434 sendmail(ui, sender, to + bcc + cc, m.as_string(0))
436
435
437 cmdtable = {
436 cmdtable = {
438 "email":
437 "email":
439 (patchbomb,
438 (patchbomb,
440 [('a', 'attach', None, _('send patches as attachments')),
439 [('a', 'attach', None, _('send patches as attachments')),
441 ('i', 'inline', None, _('send patches as inline attachments')),
440 ('i', 'inline', None, _('send patches as inline attachments')),
442 ('', 'bcc', [], _('email addresses of blind copy recipients')),
441 ('', 'bcc', [], _('email addresses of blind copy recipients')),
443 ('c', 'cc', [], _('email addresses of copy recipients')),
442 ('c', 'cc', [], _('email addresses of copy recipients')),
444 ('d', 'diffstat', None, _('add diffstat output to messages')),
443 ('d', 'diffstat', None, _('add diffstat output to messages')),
445 ('', 'date', '', _('use the given date as the sending date')),
444 ('', 'date', '', _('use the given date as the sending date')),
446 ('', 'desc', '', _('use the given file as the series description')),
445 ('', 'desc', '', _('use the given file as the series description')),
447 ('g', 'git', None, _('use git extended diff format')),
446 ('g', 'git', None, _('use git extended diff format')),
448 ('f', 'from', '', _('email address of sender')),
447 ('f', 'from', '', _('email address of sender')),
449 ('', 'plain', None, _('omit hg patch header')),
448 ('', 'plain', None, _('omit hg patch header')),
450 ('n', 'test', None, _('print messages that would be sent')),
449 ('n', 'test', None, _('print messages that would be sent')),
451 ('m', 'mbox', '',
450 ('m', 'mbox', '',
452 _('write messages to mbox file instead of sending them')),
451 _('write messages to mbox file instead of sending them')),
453 ('o', 'outgoing', None,
452 ('o', 'outgoing', None,
454 _('send changes not found in the target repository')),
453 _('send changes not found in the target repository')),
455 ('b', 'bundle', None,
454 ('b', 'bundle', None,
456 _('send changes not in target as a binary bundle')),
455 _('send changes not in target as a binary bundle')),
457 ('r', 'rev', [], _('a revision to send')),
456 ('r', 'rev', [], _('a revision to send')),
458 ('s', 'subject', '',
457 ('s', 'subject', '',
459 _('subject of first message (intro or single patch)')),
458 _('subject of first message (intro or single patch)')),
460 ('t', 'to', [], _('email addresses of recipients')),
459 ('t', 'to', [], _('email addresses of recipients')),
461 ('', 'force', None,
460 ('', 'force', None,
462 _('run even when remote repository is unrelated (with -b)')),
461 _('run even when remote repository is unrelated (with -b)')),
463 ('', 'base', [],
462 ('', 'base', [],
464 _('a base changeset to specify instead of a destination (with -b)')),
463 _('a base changeset to specify instead of a destination (with -b)')),
465 ] + commands.remoteopts,
464 ] + commands.remoteopts,
466 _('hg email [OPTION]... [DEST]...'))
465 _('hg email [OPTION]... [DEST]...'))
467 }
466 }
@@ -1,1159 +1,1159 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import os, sys, bisect, stat
10 import os, sys, bisect, stat
11 import mdiff, bdiff, util, templater, patch, errno
11 import mdiff, bdiff, util, templater, patch, errno
12
12
13 revrangesep = ':'
13 revrangesep = ':'
14
14
15 class UnknownCommand(Exception):
15 class UnknownCommand(Exception):
16 """Exception raised if command is not in the command table."""
16 """Exception raised if command is not in the command table."""
17 class AmbiguousCommand(Exception):
17 class AmbiguousCommand(Exception):
18 """Exception raised if command shortcut matches more than one command."""
18 """Exception raised if command shortcut matches more than one command."""
19
19
20 def findpossible(ui, cmd, table):
20 def findpossible(ui, cmd, table):
21 """
21 """
22 Return cmd -> (aliases, command table entry)
22 Return cmd -> (aliases, command table entry)
23 for each matching command.
23 for each matching command.
24 Return debug commands (or their aliases) only if no normal command matches.
24 Return debug commands (or their aliases) only if no normal command matches.
25 """
25 """
26 choice = {}
26 choice = {}
27 debugchoice = {}
27 debugchoice = {}
28 for e in table.keys():
28 for e in table.keys():
29 aliases = e.lstrip("^").split("|")
29 aliases = e.lstrip("^").split("|")
30 found = None
30 found = None
31 if cmd in aliases:
31 if cmd in aliases:
32 found = cmd
32 found = cmd
33 elif not ui.config("ui", "strict"):
33 elif not ui.config("ui", "strict"):
34 for a in aliases:
34 for a in aliases:
35 if a.startswith(cmd):
35 if a.startswith(cmd):
36 found = a
36 found = a
37 break
37 break
38 if found is not None:
38 if found is not None:
39 if aliases[0].startswith("debug") or found.startswith("debug"):
39 if aliases[0].startswith("debug") or found.startswith("debug"):
40 debugchoice[found] = (aliases, table[e])
40 debugchoice[found] = (aliases, table[e])
41 else:
41 else:
42 choice[found] = (aliases, table[e])
42 choice[found] = (aliases, table[e])
43
43
44 if not choice and debugchoice:
44 if not choice and debugchoice:
45 choice = debugchoice
45 choice = debugchoice
46
46
47 return choice
47 return choice
48
48
49 def findcmd(ui, cmd, table):
49 def findcmd(ui, cmd, table):
50 """Return (aliases, command table entry) for command string."""
50 """Return (aliases, command table entry) for command string."""
51 choice = findpossible(ui, cmd, table)
51 choice = findpossible(ui, cmd, table)
52
52
53 if choice.has_key(cmd):
53 if cmd in choice:
54 return choice[cmd]
54 return choice[cmd]
55
55
56 if len(choice) > 1:
56 if len(choice) > 1:
57 clist = choice.keys()
57 clist = choice.keys()
58 clist.sort()
58 clist.sort()
59 raise AmbiguousCommand(cmd, clist)
59 raise AmbiguousCommand(cmd, clist)
60
60
61 if choice:
61 if choice:
62 return choice.values()[0]
62 return choice.values()[0]
63
63
64 raise UnknownCommand(cmd)
64 raise UnknownCommand(cmd)
65
65
66 def bail_if_changed(repo):
66 def bail_if_changed(repo):
67 if repo.dirstate.parents()[1] != nullid:
67 if repo.dirstate.parents()[1] != nullid:
68 raise util.Abort(_('outstanding uncommitted merge'))
68 raise util.Abort(_('outstanding uncommitted merge'))
69 modified, added, removed, deleted = repo.status()[:4]
69 modified, added, removed, deleted = repo.status()[:4]
70 if modified or added or removed or deleted:
70 if modified or added or removed or deleted:
71 raise util.Abort(_("outstanding uncommitted changes"))
71 raise util.Abort(_("outstanding uncommitted changes"))
72
72
73 def logmessage(opts):
73 def logmessage(opts):
74 """ get the log message according to -m and -l option """
74 """ get the log message according to -m and -l option """
75 message = opts['message']
75 message = opts['message']
76 logfile = opts['logfile']
76 logfile = opts['logfile']
77
77
78 if message and logfile:
78 if message and logfile:
79 raise util.Abort(_('options --message and --logfile are mutually '
79 raise util.Abort(_('options --message and --logfile are mutually '
80 'exclusive'))
80 'exclusive'))
81 if not message and logfile:
81 if not message and logfile:
82 try:
82 try:
83 if logfile == '-':
83 if logfile == '-':
84 message = sys.stdin.read()
84 message = sys.stdin.read()
85 else:
85 else:
86 message = open(logfile).read()
86 message = open(logfile).read()
87 except IOError, inst:
87 except IOError, inst:
88 raise util.Abort(_("can't read commit message '%s': %s") %
88 raise util.Abort(_("can't read commit message '%s': %s") %
89 (logfile, inst.strerror))
89 (logfile, inst.strerror))
90 return message
90 return message
91
91
92 def setremoteconfig(ui, opts):
92 def setremoteconfig(ui, opts):
93 "copy remote options to ui tree"
93 "copy remote options to ui tree"
94 if opts.get('ssh'):
94 if opts.get('ssh'):
95 ui.setconfig("ui", "ssh", opts['ssh'])
95 ui.setconfig("ui", "ssh", opts['ssh'])
96 if opts.get('remotecmd'):
96 if opts.get('remotecmd'):
97 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
97 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
98
98
99 def revpair(repo, revs):
99 def revpair(repo, revs):
100 '''return pair of nodes, given list of revisions. second item can
100 '''return pair of nodes, given list of revisions. second item can
101 be None, meaning use working dir.'''
101 be None, meaning use working dir.'''
102
102
103 def revfix(repo, val, defval):
103 def revfix(repo, val, defval):
104 if not val and val != 0 and defval is not None:
104 if not val and val != 0 and defval is not None:
105 val = defval
105 val = defval
106 return repo.lookup(val)
106 return repo.lookup(val)
107
107
108 if not revs:
108 if not revs:
109 return repo.dirstate.parents()[0], None
109 return repo.dirstate.parents()[0], None
110 end = None
110 end = None
111 if len(revs) == 1:
111 if len(revs) == 1:
112 if revrangesep in revs[0]:
112 if revrangesep in revs[0]:
113 start, end = revs[0].split(revrangesep, 1)
113 start, end = revs[0].split(revrangesep, 1)
114 start = revfix(repo, start, 0)
114 start = revfix(repo, start, 0)
115 end = revfix(repo, end, repo.changelog.count() - 1)
115 end = revfix(repo, end, repo.changelog.count() - 1)
116 else:
116 else:
117 start = revfix(repo, revs[0], None)
117 start = revfix(repo, revs[0], None)
118 elif len(revs) == 2:
118 elif len(revs) == 2:
119 if revrangesep in revs[0] or revrangesep in revs[1]:
119 if revrangesep in revs[0] or revrangesep in revs[1]:
120 raise util.Abort(_('too many revisions specified'))
120 raise util.Abort(_('too many revisions specified'))
121 start = revfix(repo, revs[0], None)
121 start = revfix(repo, revs[0], None)
122 end = revfix(repo, revs[1], None)
122 end = revfix(repo, revs[1], None)
123 else:
123 else:
124 raise util.Abort(_('too many revisions specified'))
124 raise util.Abort(_('too many revisions specified'))
125 return start, end
125 return start, end
126
126
127 def revrange(repo, revs):
127 def revrange(repo, revs):
128 """Yield revision as strings from a list of revision specifications."""
128 """Yield revision as strings from a list of revision specifications."""
129
129
130 def revfix(repo, val, defval):
130 def revfix(repo, val, defval):
131 if not val and val != 0 and defval is not None:
131 if not val and val != 0 and defval is not None:
132 return defval
132 return defval
133 return repo.changelog.rev(repo.lookup(val))
133 return repo.changelog.rev(repo.lookup(val))
134
134
135 seen, l = {}, []
135 seen, l = {}, []
136 for spec in revs:
136 for spec in revs:
137 if revrangesep in spec:
137 if revrangesep in spec:
138 start, end = spec.split(revrangesep, 1)
138 start, end = spec.split(revrangesep, 1)
139 start = revfix(repo, start, 0)
139 start = revfix(repo, start, 0)
140 end = revfix(repo, end, repo.changelog.count() - 1)
140 end = revfix(repo, end, repo.changelog.count() - 1)
141 step = start > end and -1 or 1
141 step = start > end and -1 or 1
142 for rev in xrange(start, end+step, step):
142 for rev in xrange(start, end+step, step):
143 if rev in seen:
143 if rev in seen:
144 continue
144 continue
145 seen[rev] = 1
145 seen[rev] = 1
146 l.append(rev)
146 l.append(rev)
147 else:
147 else:
148 rev = revfix(repo, spec, None)
148 rev = revfix(repo, spec, None)
149 if rev in seen:
149 if rev in seen:
150 continue
150 continue
151 seen[rev] = 1
151 seen[rev] = 1
152 l.append(rev)
152 l.append(rev)
153
153
154 return l
154 return l
155
155
156 def make_filename(repo, pat, node,
156 def make_filename(repo, pat, node,
157 total=None, seqno=None, revwidth=None, pathname=None):
157 total=None, seqno=None, revwidth=None, pathname=None):
158 node_expander = {
158 node_expander = {
159 'H': lambda: hex(node),
159 'H': lambda: hex(node),
160 'R': lambda: str(repo.changelog.rev(node)),
160 'R': lambda: str(repo.changelog.rev(node)),
161 'h': lambda: short(node),
161 'h': lambda: short(node),
162 }
162 }
163 expander = {
163 expander = {
164 '%': lambda: '%',
164 '%': lambda: '%',
165 'b': lambda: os.path.basename(repo.root),
165 'b': lambda: os.path.basename(repo.root),
166 }
166 }
167
167
168 try:
168 try:
169 if node:
169 if node:
170 expander.update(node_expander)
170 expander.update(node_expander)
171 if node:
171 if node:
172 expander['r'] = (lambda:
172 expander['r'] = (lambda:
173 str(repo.changelog.rev(node)).zfill(revwidth or 0))
173 str(repo.changelog.rev(node)).zfill(revwidth or 0))
174 if total is not None:
174 if total is not None:
175 expander['N'] = lambda: str(total)
175 expander['N'] = lambda: str(total)
176 if seqno is not None:
176 if seqno is not None:
177 expander['n'] = lambda: str(seqno)
177 expander['n'] = lambda: str(seqno)
178 if total is not None and seqno is not None:
178 if total is not None and seqno is not None:
179 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
179 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
180 if pathname is not None:
180 if pathname is not None:
181 expander['s'] = lambda: os.path.basename(pathname)
181 expander['s'] = lambda: os.path.basename(pathname)
182 expander['d'] = lambda: os.path.dirname(pathname) or '.'
182 expander['d'] = lambda: os.path.dirname(pathname) or '.'
183 expander['p'] = lambda: pathname
183 expander['p'] = lambda: pathname
184
184
185 newname = []
185 newname = []
186 patlen = len(pat)
186 patlen = len(pat)
187 i = 0
187 i = 0
188 while i < patlen:
188 while i < patlen:
189 c = pat[i]
189 c = pat[i]
190 if c == '%':
190 if c == '%':
191 i += 1
191 i += 1
192 c = pat[i]
192 c = pat[i]
193 c = expander[c]()
193 c = expander[c]()
194 newname.append(c)
194 newname.append(c)
195 i += 1
195 i += 1
196 return ''.join(newname)
196 return ''.join(newname)
197 except KeyError, inst:
197 except KeyError, inst:
198 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
198 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
199 inst.args[0])
199 inst.args[0])
200
200
201 def make_file(repo, pat, node=None,
201 def make_file(repo, pat, node=None,
202 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
202 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
203 if not pat or pat == '-':
203 if not pat or pat == '-':
204 return 'w' in mode and sys.stdout or sys.stdin
204 return 'w' in mode and sys.stdout or sys.stdin
205 if hasattr(pat, 'write') and 'w' in mode:
205 if hasattr(pat, 'write') and 'w' in mode:
206 return pat
206 return pat
207 if hasattr(pat, 'read') and 'r' in mode:
207 if hasattr(pat, 'read') and 'r' in mode:
208 return pat
208 return pat
209 return open(make_filename(repo, pat, node, total, seqno, revwidth,
209 return open(make_filename(repo, pat, node, total, seqno, revwidth,
210 pathname),
210 pathname),
211 mode)
211 mode)
212
212
213 def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
213 def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
214 cwd = repo.getcwd()
214 cwd = repo.getcwd()
215 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
215 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
216 opts.get('exclude'), globbed=globbed,
216 opts.get('exclude'), globbed=globbed,
217 default=default)
217 default=default)
218
218
219 def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
219 def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
220 default=None):
220 default=None):
221 files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
221 files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
222 default=default)
222 default=default)
223 exact = dict.fromkeys(files)
223 exact = dict.fromkeys(files)
224 cwd = repo.getcwd()
224 cwd = repo.getcwd()
225 for src, fn in repo.walk(node=node, files=files, match=matchfn,
225 for src, fn in repo.walk(node=node, files=files, match=matchfn,
226 badmatch=badmatch):
226 badmatch=badmatch):
227 yield src, fn, repo.pathto(fn, cwd), fn in exact
227 yield src, fn, repo.pathto(fn, cwd), fn in exact
228
228
229 def findrenames(repo, added=None, removed=None, threshold=0.5):
229 def findrenames(repo, added=None, removed=None, threshold=0.5):
230 '''find renamed files -- yields (before, after, score) tuples'''
230 '''find renamed files -- yields (before, after, score) tuples'''
231 if added is None or removed is None:
231 if added is None or removed is None:
232 added, removed = repo.status()[1:3]
232 added, removed = repo.status()[1:3]
233 ctx = repo.changectx()
233 ctx = repo.changectx()
234 for a in added:
234 for a in added:
235 aa = repo.wread(a)
235 aa = repo.wread(a)
236 bestname, bestscore = None, threshold
236 bestname, bestscore = None, threshold
237 for r in removed:
237 for r in removed:
238 rr = ctx.filectx(r).data()
238 rr = ctx.filectx(r).data()
239
239
240 # bdiff.blocks() returns blocks of matching lines
240 # bdiff.blocks() returns blocks of matching lines
241 # count the number of bytes in each
241 # count the number of bytes in each
242 equal = 0
242 equal = 0
243 alines = mdiff.splitnewlines(aa)
243 alines = mdiff.splitnewlines(aa)
244 matches = bdiff.blocks(aa, rr)
244 matches = bdiff.blocks(aa, rr)
245 for x1,x2,y1,y2 in matches:
245 for x1,x2,y1,y2 in matches:
246 for line in alines[x1:x2]:
246 for line in alines[x1:x2]:
247 equal += len(line)
247 equal += len(line)
248
248
249 lengths = len(aa) + len(rr)
249 lengths = len(aa) + len(rr)
250 if lengths:
250 if lengths:
251 myscore = equal*2.0 / lengths
251 myscore = equal*2.0 / lengths
252 if myscore >= bestscore:
252 if myscore >= bestscore:
253 bestname, bestscore = r, myscore
253 bestname, bestscore = r, myscore
254 if bestname:
254 if bestname:
255 yield bestname, a, bestscore
255 yield bestname, a, bestscore
256
256
257 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
257 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
258 if dry_run is None:
258 if dry_run is None:
259 dry_run = opts.get('dry_run')
259 dry_run = opts.get('dry_run')
260 if similarity is None:
260 if similarity is None:
261 similarity = float(opts.get('similarity') or 0)
261 similarity = float(opts.get('similarity') or 0)
262 add, remove = [], []
262 add, remove = [], []
263 mapping = {}
263 mapping = {}
264 for src, abs, rel, exact in walk(repo, pats, opts):
264 for src, abs, rel, exact in walk(repo, pats, opts):
265 target = repo.wjoin(abs)
265 target = repo.wjoin(abs)
266 if src == 'f' and abs not in repo.dirstate:
266 if src == 'f' and abs not in repo.dirstate:
267 add.append(abs)
267 add.append(abs)
268 mapping[abs] = rel, exact
268 mapping[abs] = rel, exact
269 if repo.ui.verbose or not exact:
269 if repo.ui.verbose or not exact:
270 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
270 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
271 if repo.dirstate[abs] != 'r' and (not util.lexists(target)
271 if repo.dirstate[abs] != 'r' and (not util.lexists(target)
272 or (os.path.isdir(target) and not os.path.islink(target))):
272 or (os.path.isdir(target) and not os.path.islink(target))):
273 remove.append(abs)
273 remove.append(abs)
274 mapping[abs] = rel, exact
274 mapping[abs] = rel, exact
275 if repo.ui.verbose or not exact:
275 if repo.ui.verbose or not exact:
276 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
276 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
277 if not dry_run:
277 if not dry_run:
278 repo.remove(remove)
278 repo.remove(remove)
279 repo.add(add)
279 repo.add(add)
280 if similarity > 0:
280 if similarity > 0:
281 for old, new, score in findrenames(repo, add, remove, similarity):
281 for old, new, score in findrenames(repo, add, remove, similarity):
282 oldrel, oldexact = mapping[old]
282 oldrel, oldexact = mapping[old]
283 newrel, newexact = mapping[new]
283 newrel, newexact = mapping[new]
284 if repo.ui.verbose or not oldexact or not newexact:
284 if repo.ui.verbose or not oldexact or not newexact:
285 repo.ui.status(_('recording removal of %s as rename to %s '
285 repo.ui.status(_('recording removal of %s as rename to %s '
286 '(%d%% similar)\n') %
286 '(%d%% similar)\n') %
287 (oldrel, newrel, score * 100))
287 (oldrel, newrel, score * 100))
288 if not dry_run:
288 if not dry_run:
289 repo.copy(old, new)
289 repo.copy(old, new)
290
290
291 def copy(ui, repo, pats, opts, rename=False):
291 def copy(ui, repo, pats, opts, rename=False):
292 # called with the repo lock held
292 # called with the repo lock held
293 #
293 #
294 # hgsep => pathname that uses "/" to separate directories
294 # hgsep => pathname that uses "/" to separate directories
295 # ossep => pathname that uses os.sep to separate directories
295 # ossep => pathname that uses os.sep to separate directories
296 cwd = repo.getcwd()
296 cwd = repo.getcwd()
297 targets = {}
297 targets = {}
298 after = opts.get("after")
298 after = opts.get("after")
299 dryrun = opts.get("dry_run")
299 dryrun = opts.get("dry_run")
300
300
301 def walkpat(pat):
301 def walkpat(pat):
302 srcs = []
302 srcs = []
303 for tag, abs, rel, exact in walk(repo, [pat], opts, globbed=True):
303 for tag, abs, rel, exact in walk(repo, [pat], opts, globbed=True):
304 state = repo.dirstate[abs]
304 state = repo.dirstate[abs]
305 if state in '?r':
305 if state in '?r':
306 if exact and state == '?':
306 if exact and state == '?':
307 ui.warn(_('%s: not copying - file is not managed\n') % rel)
307 ui.warn(_('%s: not copying - file is not managed\n') % rel)
308 if exact and state == 'r':
308 if exact and state == 'r':
309 ui.warn(_('%s: not copying - file has been marked for'
309 ui.warn(_('%s: not copying - file has been marked for'
310 ' remove\n') % rel)
310 ' remove\n') % rel)
311 continue
311 continue
312 # abs: hgsep
312 # abs: hgsep
313 # rel: ossep
313 # rel: ossep
314 srcs.append((abs, rel, exact))
314 srcs.append((abs, rel, exact))
315 return srcs
315 return srcs
316
316
317 # abssrc: hgsep
317 # abssrc: hgsep
318 # relsrc: ossep
318 # relsrc: ossep
319 # otarget: ossep
319 # otarget: ossep
320 def copyfile(abssrc, relsrc, otarget, exact):
320 def copyfile(abssrc, relsrc, otarget, exact):
321 abstarget = util.canonpath(repo.root, cwd, otarget)
321 abstarget = util.canonpath(repo.root, cwd, otarget)
322 reltarget = repo.pathto(abstarget, cwd)
322 reltarget = repo.pathto(abstarget, cwd)
323 target = repo.wjoin(abstarget)
323 target = repo.wjoin(abstarget)
324 src = repo.wjoin(abssrc)
324 src = repo.wjoin(abssrc)
325 state = repo.dirstate[abstarget]
325 state = repo.dirstate[abstarget]
326
326
327 # check for collisions
327 # check for collisions
328 prevsrc = targets.get(abstarget)
328 prevsrc = targets.get(abstarget)
329 if prevsrc is not None:
329 if prevsrc is not None:
330 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
330 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
331 (reltarget, repo.pathto(abssrc, cwd),
331 (reltarget, repo.pathto(abssrc, cwd),
332 repo.pathto(prevsrc, cwd)))
332 repo.pathto(prevsrc, cwd)))
333 return
333 return
334
334
335 # check for overwrites
335 # check for overwrites
336 exists = os.path.exists(target)
336 exists = os.path.exists(target)
337 if (not after and exists or after and state in 'mn'):
337 if (not after and exists or after and state in 'mn'):
338 if not opts['force']:
338 if not opts['force']:
339 ui.warn(_('%s: not overwriting - file exists\n') %
339 ui.warn(_('%s: not overwriting - file exists\n') %
340 reltarget)
340 reltarget)
341 return
341 return
342
342
343 if after:
343 if after:
344 if not exists:
344 if not exists:
345 return
345 return
346 elif not dryrun:
346 elif not dryrun:
347 try:
347 try:
348 if exists:
348 if exists:
349 os.unlink(target)
349 os.unlink(target)
350 targetdir = os.path.dirname(target) or '.'
350 targetdir = os.path.dirname(target) or '.'
351 if not os.path.isdir(targetdir):
351 if not os.path.isdir(targetdir):
352 os.makedirs(targetdir)
352 os.makedirs(targetdir)
353 util.copyfile(src, target)
353 util.copyfile(src, target)
354 except IOError, inst:
354 except IOError, inst:
355 if inst.errno == errno.ENOENT:
355 if inst.errno == errno.ENOENT:
356 ui.warn(_('%s: deleted in working copy\n') % relsrc)
356 ui.warn(_('%s: deleted in working copy\n') % relsrc)
357 else:
357 else:
358 ui.warn(_('%s: cannot copy - %s\n') %
358 ui.warn(_('%s: cannot copy - %s\n') %
359 (relsrc, inst.strerror))
359 (relsrc, inst.strerror))
360 return True # report a failure
360 return True # report a failure
361
361
362 if ui.verbose or not exact:
362 if ui.verbose or not exact:
363 action = rename and "moving" or "copying"
363 action = rename and "moving" or "copying"
364 ui.status(_('%s %s to %s\n') % (action, relsrc, reltarget))
364 ui.status(_('%s %s to %s\n') % (action, relsrc, reltarget))
365
365
366 targets[abstarget] = abssrc
366 targets[abstarget] = abssrc
367
367
368 # fix up dirstate
368 # fix up dirstate
369 origsrc = repo.dirstate.copied(abssrc) or abssrc
369 origsrc = repo.dirstate.copied(abssrc) or abssrc
370 if abstarget == origsrc: # copying back a copy?
370 if abstarget == origsrc: # copying back a copy?
371 if state not in 'mn' and not dryrun:
371 if state not in 'mn' and not dryrun:
372 repo.dirstate.normallookup(abstarget)
372 repo.dirstate.normallookup(abstarget)
373 else:
373 else:
374 if repo.dirstate[origsrc] == 'a':
374 if repo.dirstate[origsrc] == 'a':
375 if not ui.quiet:
375 if not ui.quiet:
376 ui.warn(_("%s has not been committed yet, so no copy "
376 ui.warn(_("%s has not been committed yet, so no copy "
377 "data will be stored for %s.\n")
377 "data will be stored for %s.\n")
378 % (repo.pathto(origsrc, cwd), reltarget))
378 % (repo.pathto(origsrc, cwd), reltarget))
379 if abstarget not in repo.dirstate and not dryrun:
379 if abstarget not in repo.dirstate and not dryrun:
380 repo.add([abstarget])
380 repo.add([abstarget])
381 elif not dryrun:
381 elif not dryrun:
382 repo.copy(origsrc, abstarget)
382 repo.copy(origsrc, abstarget)
383
383
384 if rename and not dryrun:
384 if rename and not dryrun:
385 repo.remove([abssrc], True)
385 repo.remove([abssrc], True)
386
386
387 # pat: ossep
387 # pat: ossep
388 # dest ossep
388 # dest ossep
389 # srcs: list of (hgsep, hgsep, ossep, bool)
389 # srcs: list of (hgsep, hgsep, ossep, bool)
390 # return: function that takes hgsep and returns ossep
390 # return: function that takes hgsep and returns ossep
391 def targetpathfn(pat, dest, srcs):
391 def targetpathfn(pat, dest, srcs):
392 if os.path.isdir(pat):
392 if os.path.isdir(pat):
393 abspfx = util.canonpath(repo.root, cwd, pat)
393 abspfx = util.canonpath(repo.root, cwd, pat)
394 abspfx = util.localpath(abspfx)
394 abspfx = util.localpath(abspfx)
395 if destdirexists:
395 if destdirexists:
396 striplen = len(os.path.split(abspfx)[0])
396 striplen = len(os.path.split(abspfx)[0])
397 else:
397 else:
398 striplen = len(abspfx)
398 striplen = len(abspfx)
399 if striplen:
399 if striplen:
400 striplen += len(os.sep)
400 striplen += len(os.sep)
401 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
401 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
402 elif destdirexists:
402 elif destdirexists:
403 res = lambda p: os.path.join(dest,
403 res = lambda p: os.path.join(dest,
404 os.path.basename(util.localpath(p)))
404 os.path.basename(util.localpath(p)))
405 else:
405 else:
406 res = lambda p: dest
406 res = lambda p: dest
407 return res
407 return res
408
408
409 # pat: ossep
409 # pat: ossep
410 # dest ossep
410 # dest ossep
411 # srcs: list of (hgsep, hgsep, ossep, bool)
411 # srcs: list of (hgsep, hgsep, ossep, bool)
412 # return: function that takes hgsep and returns ossep
412 # return: function that takes hgsep and returns ossep
413 def targetpathafterfn(pat, dest, srcs):
413 def targetpathafterfn(pat, dest, srcs):
414 if util.patkind(pat, None)[0]:
414 if util.patkind(pat, None)[0]:
415 # a mercurial pattern
415 # a mercurial pattern
416 res = lambda p: os.path.join(dest,
416 res = lambda p: os.path.join(dest,
417 os.path.basename(util.localpath(p)))
417 os.path.basename(util.localpath(p)))
418 else:
418 else:
419 abspfx = util.canonpath(repo.root, cwd, pat)
419 abspfx = util.canonpath(repo.root, cwd, pat)
420 if len(abspfx) < len(srcs[0][0]):
420 if len(abspfx) < len(srcs[0][0]):
421 # A directory. Either the target path contains the last
421 # A directory. Either the target path contains the last
422 # component of the source path or it does not.
422 # component of the source path or it does not.
423 def evalpath(striplen):
423 def evalpath(striplen):
424 score = 0
424 score = 0
425 for s in srcs:
425 for s in srcs:
426 t = os.path.join(dest, util.localpath(s[0])[striplen:])
426 t = os.path.join(dest, util.localpath(s[0])[striplen:])
427 if os.path.exists(t):
427 if os.path.exists(t):
428 score += 1
428 score += 1
429 return score
429 return score
430
430
431 abspfx = util.localpath(abspfx)
431 abspfx = util.localpath(abspfx)
432 striplen = len(abspfx)
432 striplen = len(abspfx)
433 if striplen:
433 if striplen:
434 striplen += len(os.sep)
434 striplen += len(os.sep)
435 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
435 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
436 score = evalpath(striplen)
436 score = evalpath(striplen)
437 striplen1 = len(os.path.split(abspfx)[0])
437 striplen1 = len(os.path.split(abspfx)[0])
438 if striplen1:
438 if striplen1:
439 striplen1 += len(os.sep)
439 striplen1 += len(os.sep)
440 if evalpath(striplen1) > score:
440 if evalpath(striplen1) > score:
441 striplen = striplen1
441 striplen = striplen1
442 res = lambda p: os.path.join(dest,
442 res = lambda p: os.path.join(dest,
443 util.localpath(p)[striplen:])
443 util.localpath(p)[striplen:])
444 else:
444 else:
445 # a file
445 # a file
446 if destdirexists:
446 if destdirexists:
447 res = lambda p: os.path.join(dest,
447 res = lambda p: os.path.join(dest,
448 os.path.basename(util.localpath(p)))
448 os.path.basename(util.localpath(p)))
449 else:
449 else:
450 res = lambda p: dest
450 res = lambda p: dest
451 return res
451 return res
452
452
453
453
454 pats = util.expand_glob(pats)
454 pats = util.expand_glob(pats)
455 if not pats:
455 if not pats:
456 raise util.Abort(_('no source or destination specified'))
456 raise util.Abort(_('no source or destination specified'))
457 if len(pats) == 1:
457 if len(pats) == 1:
458 raise util.Abort(_('no destination specified'))
458 raise util.Abort(_('no destination specified'))
459 dest = pats.pop()
459 dest = pats.pop()
460 destdirexists = os.path.isdir(dest)
460 destdirexists = os.path.isdir(dest)
461 if not destdirexists:
461 if not destdirexists:
462 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
462 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
463 raise util.Abort(_('with multiple sources, destination must be an '
463 raise util.Abort(_('with multiple sources, destination must be an '
464 'existing directory'))
464 'existing directory'))
465 if util.endswithsep(dest):
465 if util.endswithsep(dest):
466 raise util.Abort(_('destination %s is not a directory') % dest)
466 raise util.Abort(_('destination %s is not a directory') % dest)
467
467
468 tfn = targetpathfn
468 tfn = targetpathfn
469 if after:
469 if after:
470 tfn = targetpathafterfn
470 tfn = targetpathafterfn
471 copylist = []
471 copylist = []
472 for pat in pats:
472 for pat in pats:
473 srcs = walkpat(pat)
473 srcs = walkpat(pat)
474 if not srcs:
474 if not srcs:
475 continue
475 continue
476 copylist.append((tfn(pat, dest, srcs), srcs))
476 copylist.append((tfn(pat, dest, srcs), srcs))
477 if not copylist:
477 if not copylist:
478 raise util.Abort(_('no files to copy'))
478 raise util.Abort(_('no files to copy'))
479
479
480 errors = 0
480 errors = 0
481 for targetpath, srcs in copylist:
481 for targetpath, srcs in copylist:
482 for abssrc, relsrc, exact in srcs:
482 for abssrc, relsrc, exact in srcs:
483 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
483 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
484 errors += 1
484 errors += 1
485
485
486 if errors:
486 if errors:
487 ui.warn(_('(consider using --after)\n'))
487 ui.warn(_('(consider using --after)\n'))
488
488
489 return errors
489 return errors
490
490
491 def service(opts, parentfn=None, initfn=None, runfn=None):
491 def service(opts, parentfn=None, initfn=None, runfn=None):
492 '''Run a command as a service.'''
492 '''Run a command as a service.'''
493
493
494 if opts['daemon'] and not opts['daemon_pipefds']:
494 if opts['daemon'] and not opts['daemon_pipefds']:
495 rfd, wfd = os.pipe()
495 rfd, wfd = os.pipe()
496 args = sys.argv[:]
496 args = sys.argv[:]
497 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
497 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
498 # Don't pass --cwd to the child process, because we've already
498 # Don't pass --cwd to the child process, because we've already
499 # changed directory.
499 # changed directory.
500 for i in xrange(1,len(args)):
500 for i in xrange(1,len(args)):
501 if args[i].startswith('--cwd='):
501 if args[i].startswith('--cwd='):
502 del args[i]
502 del args[i]
503 break
503 break
504 elif args[i].startswith('--cwd'):
504 elif args[i].startswith('--cwd'):
505 del args[i:i+2]
505 del args[i:i+2]
506 break
506 break
507 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
507 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
508 args[0], args)
508 args[0], args)
509 os.close(wfd)
509 os.close(wfd)
510 os.read(rfd, 1)
510 os.read(rfd, 1)
511 if parentfn:
511 if parentfn:
512 return parentfn(pid)
512 return parentfn(pid)
513 else:
513 else:
514 os._exit(0)
514 os._exit(0)
515
515
516 if initfn:
516 if initfn:
517 initfn()
517 initfn()
518
518
519 if opts['pid_file']:
519 if opts['pid_file']:
520 fp = open(opts['pid_file'], 'w')
520 fp = open(opts['pid_file'], 'w')
521 fp.write(str(os.getpid()) + '\n')
521 fp.write(str(os.getpid()) + '\n')
522 fp.close()
522 fp.close()
523
523
524 if opts['daemon_pipefds']:
524 if opts['daemon_pipefds']:
525 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
525 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
526 os.close(rfd)
526 os.close(rfd)
527 try:
527 try:
528 os.setsid()
528 os.setsid()
529 except AttributeError:
529 except AttributeError:
530 pass
530 pass
531 os.write(wfd, 'y')
531 os.write(wfd, 'y')
532 os.close(wfd)
532 os.close(wfd)
533 sys.stdout.flush()
533 sys.stdout.flush()
534 sys.stderr.flush()
534 sys.stderr.flush()
535 fd = os.open(util.nulldev, os.O_RDWR)
535 fd = os.open(util.nulldev, os.O_RDWR)
536 if fd != 0: os.dup2(fd, 0)
536 if fd != 0: os.dup2(fd, 0)
537 if fd != 1: os.dup2(fd, 1)
537 if fd != 1: os.dup2(fd, 1)
538 if fd != 2: os.dup2(fd, 2)
538 if fd != 2: os.dup2(fd, 2)
539 if fd not in (0, 1, 2): os.close(fd)
539 if fd not in (0, 1, 2): os.close(fd)
540
540
541 if runfn:
541 if runfn:
542 return runfn()
542 return runfn()
543
543
544 class changeset_printer(object):
544 class changeset_printer(object):
545 '''show changeset information when templating not requested.'''
545 '''show changeset information when templating not requested.'''
546
546
547 def __init__(self, ui, repo, patch, buffered):
547 def __init__(self, ui, repo, patch, buffered):
548 self.ui = ui
548 self.ui = ui
549 self.repo = repo
549 self.repo = repo
550 self.buffered = buffered
550 self.buffered = buffered
551 self.patch = patch
551 self.patch = patch
552 self.header = {}
552 self.header = {}
553 self.hunk = {}
553 self.hunk = {}
554 self.lastheader = None
554 self.lastheader = None
555
555
556 def flush(self, rev):
556 def flush(self, rev):
557 if rev in self.header:
557 if rev in self.header:
558 h = self.header[rev]
558 h = self.header[rev]
559 if h != self.lastheader:
559 if h != self.lastheader:
560 self.lastheader = h
560 self.lastheader = h
561 self.ui.write(h)
561 self.ui.write(h)
562 del self.header[rev]
562 del self.header[rev]
563 if rev in self.hunk:
563 if rev in self.hunk:
564 self.ui.write(self.hunk[rev])
564 self.ui.write(self.hunk[rev])
565 del self.hunk[rev]
565 del self.hunk[rev]
566 return 1
566 return 1
567 return 0
567 return 0
568
568
569 def show(self, rev=0, changenode=None, copies=(), **props):
569 def show(self, rev=0, changenode=None, copies=(), **props):
570 if self.buffered:
570 if self.buffered:
571 self.ui.pushbuffer()
571 self.ui.pushbuffer()
572 self._show(rev, changenode, copies, props)
572 self._show(rev, changenode, copies, props)
573 self.hunk[rev] = self.ui.popbuffer()
573 self.hunk[rev] = self.ui.popbuffer()
574 else:
574 else:
575 self._show(rev, changenode, copies, props)
575 self._show(rev, changenode, copies, props)
576
576
577 def _show(self, rev, changenode, copies, props):
577 def _show(self, rev, changenode, copies, props):
578 '''show a single changeset or file revision'''
578 '''show a single changeset or file revision'''
579 log = self.repo.changelog
579 log = self.repo.changelog
580 if changenode is None:
580 if changenode is None:
581 changenode = log.node(rev)
581 changenode = log.node(rev)
582 elif not rev:
582 elif not rev:
583 rev = log.rev(changenode)
583 rev = log.rev(changenode)
584
584
585 if self.ui.quiet:
585 if self.ui.quiet:
586 self.ui.write("%d:%s\n" % (rev, short(changenode)))
586 self.ui.write("%d:%s\n" % (rev, short(changenode)))
587 return
587 return
588
588
589 changes = log.read(changenode)
589 changes = log.read(changenode)
590 date = util.datestr(changes[2])
590 date = util.datestr(changes[2])
591 extra = changes[5]
591 extra = changes[5]
592 branch = extra.get("branch")
592 branch = extra.get("branch")
593
593
594 hexfunc = self.ui.debugflag and hex or short
594 hexfunc = self.ui.debugflag and hex or short
595
595
596 parents = [(p, hexfunc(log.node(p)))
596 parents = [(p, hexfunc(log.node(p)))
597 for p in self._meaningful_parentrevs(log, rev)]
597 for p in self._meaningful_parentrevs(log, rev)]
598
598
599 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
599 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
600
600
601 # don't show the default branch name
601 # don't show the default branch name
602 if branch != 'default':
602 if branch != 'default':
603 branch = util.tolocal(branch)
603 branch = util.tolocal(branch)
604 self.ui.write(_("branch: %s\n") % branch)
604 self.ui.write(_("branch: %s\n") % branch)
605 for tag in self.repo.nodetags(changenode):
605 for tag in self.repo.nodetags(changenode):
606 self.ui.write(_("tag: %s\n") % tag)
606 self.ui.write(_("tag: %s\n") % tag)
607 for parent in parents:
607 for parent in parents:
608 self.ui.write(_("parent: %d:%s\n") % parent)
608 self.ui.write(_("parent: %d:%s\n") % parent)
609
609
610 if self.ui.debugflag:
610 if self.ui.debugflag:
611 self.ui.write(_("manifest: %d:%s\n") %
611 self.ui.write(_("manifest: %d:%s\n") %
612 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
612 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
613 self.ui.write(_("user: %s\n") % changes[1])
613 self.ui.write(_("user: %s\n") % changes[1])
614 self.ui.write(_("date: %s\n") % date)
614 self.ui.write(_("date: %s\n") % date)
615
615
616 if self.ui.debugflag:
616 if self.ui.debugflag:
617 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
617 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
618 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
618 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
619 files):
619 files):
620 if value:
620 if value:
621 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
621 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
622 elif changes[3] and self.ui.verbose:
622 elif changes[3] and self.ui.verbose:
623 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
623 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
624 if copies and self.ui.verbose:
624 if copies and self.ui.verbose:
625 copies = ['%s (%s)' % c for c in copies]
625 copies = ['%s (%s)' % c for c in copies]
626 self.ui.write(_("copies: %s\n") % ' '.join(copies))
626 self.ui.write(_("copies: %s\n") % ' '.join(copies))
627
627
628 if extra and self.ui.debugflag:
628 if extra and self.ui.debugflag:
629 extraitems = extra.items()
629 extraitems = extra.items()
630 extraitems.sort()
630 extraitems.sort()
631 for key, value in extraitems:
631 for key, value in extraitems:
632 self.ui.write(_("extra: %s=%s\n")
632 self.ui.write(_("extra: %s=%s\n")
633 % (key, value.encode('string_escape')))
633 % (key, value.encode('string_escape')))
634
634
635 description = changes[4].strip()
635 description = changes[4].strip()
636 if description:
636 if description:
637 if self.ui.verbose:
637 if self.ui.verbose:
638 self.ui.write(_("description:\n"))
638 self.ui.write(_("description:\n"))
639 self.ui.write(description)
639 self.ui.write(description)
640 self.ui.write("\n\n")
640 self.ui.write("\n\n")
641 else:
641 else:
642 self.ui.write(_("summary: %s\n") %
642 self.ui.write(_("summary: %s\n") %
643 description.splitlines()[0])
643 description.splitlines()[0])
644 self.ui.write("\n")
644 self.ui.write("\n")
645
645
646 self.showpatch(changenode)
646 self.showpatch(changenode)
647
647
648 def showpatch(self, node):
648 def showpatch(self, node):
649 if self.patch:
649 if self.patch:
650 prev = self.repo.changelog.parents(node)[0]
650 prev = self.repo.changelog.parents(node)[0]
651 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
651 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
652 opts=patch.diffopts(self.ui))
652 opts=patch.diffopts(self.ui))
653 self.ui.write("\n")
653 self.ui.write("\n")
654
654
655 def _meaningful_parentrevs(self, log, rev):
655 def _meaningful_parentrevs(self, log, rev):
656 """Return list of meaningful (or all if debug) parentrevs for rev.
656 """Return list of meaningful (or all if debug) parentrevs for rev.
657
657
658 For merges (two non-nullrev revisions) both parents are meaningful.
658 For merges (two non-nullrev revisions) both parents are meaningful.
659 Otherwise the first parent revision is considered meaningful if it
659 Otherwise the first parent revision is considered meaningful if it
660 is not the preceding revision.
660 is not the preceding revision.
661 """
661 """
662 parents = log.parentrevs(rev)
662 parents = log.parentrevs(rev)
663 if not self.ui.debugflag and parents[1] == nullrev:
663 if not self.ui.debugflag and parents[1] == nullrev:
664 if parents[0] >= rev - 1:
664 if parents[0] >= rev - 1:
665 parents = []
665 parents = []
666 else:
666 else:
667 parents = [parents[0]]
667 parents = [parents[0]]
668 return parents
668 return parents
669
669
670
670
671 class changeset_templater(changeset_printer):
671 class changeset_templater(changeset_printer):
672 '''format changeset information.'''
672 '''format changeset information.'''
673
673
674 def __init__(self, ui, repo, patch, mapfile, buffered):
674 def __init__(self, ui, repo, patch, mapfile, buffered):
675 changeset_printer.__init__(self, ui, repo, patch, buffered)
675 changeset_printer.__init__(self, ui, repo, patch, buffered)
676 filters = templater.common_filters.copy()
676 filters = templater.common_filters.copy()
677 filters['formatnode'] = (ui.debugflag and (lambda x: x)
677 filters['formatnode'] = (ui.debugflag and (lambda x: x)
678 or (lambda x: x[:12]))
678 or (lambda x: x[:12]))
679 self.t = templater.templater(mapfile, filters,
679 self.t = templater.templater(mapfile, filters,
680 cache={
680 cache={
681 'parent': '{rev}:{node|formatnode} ',
681 'parent': '{rev}:{node|formatnode} ',
682 'manifest': '{rev}:{node|formatnode}',
682 'manifest': '{rev}:{node|formatnode}',
683 'filecopy': '{name} ({source})'})
683 'filecopy': '{name} ({source})'})
684
684
685 def use_template(self, t):
685 def use_template(self, t):
686 '''set template string to use'''
686 '''set template string to use'''
687 self.t.cache['changeset'] = t
687 self.t.cache['changeset'] = t
688
688
689 def _show(self, rev, changenode, copies, props):
689 def _show(self, rev, changenode, copies, props):
690 '''show a single changeset or file revision'''
690 '''show a single changeset or file revision'''
691 log = self.repo.changelog
691 log = self.repo.changelog
692 if changenode is None:
692 if changenode is None:
693 changenode = log.node(rev)
693 changenode = log.node(rev)
694 elif not rev:
694 elif not rev:
695 rev = log.rev(changenode)
695 rev = log.rev(changenode)
696
696
697 changes = log.read(changenode)
697 changes = log.read(changenode)
698
698
699 def showlist(name, values, plural=None, **args):
699 def showlist(name, values, plural=None, **args):
700 '''expand set of values.
700 '''expand set of values.
701 name is name of key in template map.
701 name is name of key in template map.
702 values is list of strings or dicts.
702 values is list of strings or dicts.
703 plural is plural of name, if not simply name + 's'.
703 plural is plural of name, if not simply name + 's'.
704
704
705 expansion works like this, given name 'foo'.
705 expansion works like this, given name 'foo'.
706
706
707 if values is empty, expand 'no_foos'.
707 if values is empty, expand 'no_foos'.
708
708
709 if 'foo' not in template map, return values as a string,
709 if 'foo' not in template map, return values as a string,
710 joined by space.
710 joined by space.
711
711
712 expand 'start_foos'.
712 expand 'start_foos'.
713
713
714 for each value, expand 'foo'. if 'last_foo' in template
714 for each value, expand 'foo'. if 'last_foo' in template
715 map, expand it instead of 'foo' for last key.
715 map, expand it instead of 'foo' for last key.
716
716
717 expand 'end_foos'.
717 expand 'end_foos'.
718 '''
718 '''
719 if plural: names = plural
719 if plural: names = plural
720 else: names = name + 's'
720 else: names = name + 's'
721 if not values:
721 if not values:
722 noname = 'no_' + names
722 noname = 'no_' + names
723 if noname in self.t:
723 if noname in self.t:
724 yield self.t(noname, **args)
724 yield self.t(noname, **args)
725 return
725 return
726 if name not in self.t:
726 if name not in self.t:
727 if isinstance(values[0], str):
727 if isinstance(values[0], str):
728 yield ' '.join(values)
728 yield ' '.join(values)
729 else:
729 else:
730 for v in values:
730 for v in values:
731 yield dict(v, **args)
731 yield dict(v, **args)
732 return
732 return
733 startname = 'start_' + names
733 startname = 'start_' + names
734 if startname in self.t:
734 if startname in self.t:
735 yield self.t(startname, **args)
735 yield self.t(startname, **args)
736 vargs = args.copy()
736 vargs = args.copy()
737 def one(v, tag=name):
737 def one(v, tag=name):
738 try:
738 try:
739 vargs.update(v)
739 vargs.update(v)
740 except (AttributeError, ValueError):
740 except (AttributeError, ValueError):
741 try:
741 try:
742 for a, b in v:
742 for a, b in v:
743 vargs[a] = b
743 vargs[a] = b
744 except ValueError:
744 except ValueError:
745 vargs[name] = v
745 vargs[name] = v
746 return self.t(tag, **vargs)
746 return self.t(tag, **vargs)
747 lastname = 'last_' + name
747 lastname = 'last_' + name
748 if lastname in self.t:
748 if lastname in self.t:
749 last = values.pop()
749 last = values.pop()
750 else:
750 else:
751 last = None
751 last = None
752 for v in values:
752 for v in values:
753 yield one(v)
753 yield one(v)
754 if last is not None:
754 if last is not None:
755 yield one(last, tag=lastname)
755 yield one(last, tag=lastname)
756 endname = 'end_' + names
756 endname = 'end_' + names
757 if endname in self.t:
757 if endname in self.t:
758 yield self.t(endname, **args)
758 yield self.t(endname, **args)
759
759
760 def showbranches(**args):
760 def showbranches(**args):
761 branch = changes[5].get("branch")
761 branch = changes[5].get("branch")
762 if branch != 'default':
762 if branch != 'default':
763 branch = util.tolocal(branch)
763 branch = util.tolocal(branch)
764 return showlist('branch', [branch], plural='branches', **args)
764 return showlist('branch', [branch], plural='branches', **args)
765
765
766 def showparents(**args):
766 def showparents(**args):
767 parents = [[('rev', p), ('node', hex(log.node(p)))]
767 parents = [[('rev', p), ('node', hex(log.node(p)))]
768 for p in self._meaningful_parentrevs(log, rev)]
768 for p in self._meaningful_parentrevs(log, rev)]
769 return showlist('parent', parents, **args)
769 return showlist('parent', parents, **args)
770
770
771 def showtags(**args):
771 def showtags(**args):
772 return showlist('tag', self.repo.nodetags(changenode), **args)
772 return showlist('tag', self.repo.nodetags(changenode), **args)
773
773
774 def showextras(**args):
774 def showextras(**args):
775 extras = changes[5].items()
775 extras = changes[5].items()
776 extras.sort()
776 extras.sort()
777 for key, value in extras:
777 for key, value in extras:
778 args = args.copy()
778 args = args.copy()
779 args.update(dict(key=key, value=value))
779 args.update(dict(key=key, value=value))
780 yield self.t('extra', **args)
780 yield self.t('extra', **args)
781
781
782 def showcopies(**args):
782 def showcopies(**args):
783 c = [{'name': x[0], 'source': x[1]} for x in copies]
783 c = [{'name': x[0], 'source': x[1]} for x in copies]
784 return showlist('file_copy', c, plural='file_copies', **args)
784 return showlist('file_copy', c, plural='file_copies', **args)
785
785
786 files = []
786 files = []
787 def getfiles():
787 def getfiles():
788 if not files:
788 if not files:
789 files[:] = self.repo.status(
789 files[:] = self.repo.status(
790 log.parents(changenode)[0], changenode)[:3]
790 log.parents(changenode)[0], changenode)[:3]
791 return files
791 return files
792 def showfiles(**args):
792 def showfiles(**args):
793 return showlist('file', changes[3], **args)
793 return showlist('file', changes[3], **args)
794 def showmods(**args):
794 def showmods(**args):
795 return showlist('file_mod', getfiles()[0], **args)
795 return showlist('file_mod', getfiles()[0], **args)
796 def showadds(**args):
796 def showadds(**args):
797 return showlist('file_add', getfiles()[1], **args)
797 return showlist('file_add', getfiles()[1], **args)
798 def showdels(**args):
798 def showdels(**args):
799 return showlist('file_del', getfiles()[2], **args)
799 return showlist('file_del', getfiles()[2], **args)
800 def showmanifest(**args):
800 def showmanifest(**args):
801 args = args.copy()
801 args = args.copy()
802 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
802 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
803 node=hex(changes[0])))
803 node=hex(changes[0])))
804 return self.t('manifest', **args)
804 return self.t('manifest', **args)
805
805
806 defprops = {
806 defprops = {
807 'author': changes[1],
807 'author': changes[1],
808 'branches': showbranches,
808 'branches': showbranches,
809 'date': changes[2],
809 'date': changes[2],
810 'desc': changes[4].strip(),
810 'desc': changes[4].strip(),
811 'file_adds': showadds,
811 'file_adds': showadds,
812 'file_dels': showdels,
812 'file_dels': showdels,
813 'file_mods': showmods,
813 'file_mods': showmods,
814 'files': showfiles,
814 'files': showfiles,
815 'file_copies': showcopies,
815 'file_copies': showcopies,
816 'manifest': showmanifest,
816 'manifest': showmanifest,
817 'node': hex(changenode),
817 'node': hex(changenode),
818 'parents': showparents,
818 'parents': showparents,
819 'rev': rev,
819 'rev': rev,
820 'tags': showtags,
820 'tags': showtags,
821 'extras': showextras,
821 'extras': showextras,
822 }
822 }
823 props = props.copy()
823 props = props.copy()
824 props.update(defprops)
824 props.update(defprops)
825
825
826 try:
826 try:
827 if self.ui.debugflag and 'header_debug' in self.t:
827 if self.ui.debugflag and 'header_debug' in self.t:
828 key = 'header_debug'
828 key = 'header_debug'
829 elif self.ui.quiet and 'header_quiet' in self.t:
829 elif self.ui.quiet and 'header_quiet' in self.t:
830 key = 'header_quiet'
830 key = 'header_quiet'
831 elif self.ui.verbose and 'header_verbose' in self.t:
831 elif self.ui.verbose and 'header_verbose' in self.t:
832 key = 'header_verbose'
832 key = 'header_verbose'
833 elif 'header' in self.t:
833 elif 'header' in self.t:
834 key = 'header'
834 key = 'header'
835 else:
835 else:
836 key = ''
836 key = ''
837 if key:
837 if key:
838 h = templater.stringify(self.t(key, **props))
838 h = templater.stringify(self.t(key, **props))
839 if self.buffered:
839 if self.buffered:
840 self.header[rev] = h
840 self.header[rev] = h
841 else:
841 else:
842 self.ui.write(h)
842 self.ui.write(h)
843 if self.ui.debugflag and 'changeset_debug' in self.t:
843 if self.ui.debugflag and 'changeset_debug' in self.t:
844 key = 'changeset_debug'
844 key = 'changeset_debug'
845 elif self.ui.quiet and 'changeset_quiet' in self.t:
845 elif self.ui.quiet and 'changeset_quiet' in self.t:
846 key = 'changeset_quiet'
846 key = 'changeset_quiet'
847 elif self.ui.verbose and 'changeset_verbose' in self.t:
847 elif self.ui.verbose and 'changeset_verbose' in self.t:
848 key = 'changeset_verbose'
848 key = 'changeset_verbose'
849 else:
849 else:
850 key = 'changeset'
850 key = 'changeset'
851 self.ui.write(templater.stringify(self.t(key, **props)))
851 self.ui.write(templater.stringify(self.t(key, **props)))
852 self.showpatch(changenode)
852 self.showpatch(changenode)
853 except KeyError, inst:
853 except KeyError, inst:
854 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
854 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
855 inst.args[0]))
855 inst.args[0]))
856 except SyntaxError, inst:
856 except SyntaxError, inst:
857 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
857 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
858
858
859 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
859 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
860 """show one changeset using template or regular display.
860 """show one changeset using template or regular display.
861
861
862 Display format will be the first non-empty hit of:
862 Display format will be the first non-empty hit of:
863 1. option 'template'
863 1. option 'template'
864 2. option 'style'
864 2. option 'style'
865 3. [ui] setting 'logtemplate'
865 3. [ui] setting 'logtemplate'
866 4. [ui] setting 'style'
866 4. [ui] setting 'style'
867 If all of these values are either the unset or the empty string,
867 If all of these values are either the unset or the empty string,
868 regular display via changeset_printer() is done.
868 regular display via changeset_printer() is done.
869 """
869 """
870 # options
870 # options
871 patch = False
871 patch = False
872 if opts.get('patch'):
872 if opts.get('patch'):
873 patch = matchfn or util.always
873 patch = matchfn or util.always
874
874
875 tmpl = opts.get('template')
875 tmpl = opts.get('template')
876 mapfile = None
876 mapfile = None
877 if tmpl:
877 if tmpl:
878 tmpl = templater.parsestring(tmpl, quoted=False)
878 tmpl = templater.parsestring(tmpl, quoted=False)
879 else:
879 else:
880 mapfile = opts.get('style')
880 mapfile = opts.get('style')
881 # ui settings
881 # ui settings
882 if not mapfile:
882 if not mapfile:
883 tmpl = ui.config('ui', 'logtemplate')
883 tmpl = ui.config('ui', 'logtemplate')
884 if tmpl:
884 if tmpl:
885 tmpl = templater.parsestring(tmpl)
885 tmpl = templater.parsestring(tmpl)
886 else:
886 else:
887 mapfile = ui.config('ui', 'style')
887 mapfile = ui.config('ui', 'style')
888
888
889 if tmpl or mapfile:
889 if tmpl or mapfile:
890 if mapfile:
890 if mapfile:
891 if not os.path.split(mapfile)[0]:
891 if not os.path.split(mapfile)[0]:
892 mapname = (templater.templatepath('map-cmdline.' + mapfile)
892 mapname = (templater.templatepath('map-cmdline.' + mapfile)
893 or templater.templatepath(mapfile))
893 or templater.templatepath(mapfile))
894 if mapname: mapfile = mapname
894 if mapname: mapfile = mapname
895 try:
895 try:
896 t = changeset_templater(ui, repo, patch, mapfile, buffered)
896 t = changeset_templater(ui, repo, patch, mapfile, buffered)
897 except SyntaxError, inst:
897 except SyntaxError, inst:
898 raise util.Abort(inst.args[0])
898 raise util.Abort(inst.args[0])
899 if tmpl: t.use_template(tmpl)
899 if tmpl: t.use_template(tmpl)
900 return t
900 return t
901 return changeset_printer(ui, repo, patch, buffered)
901 return changeset_printer(ui, repo, patch, buffered)
902
902
903 def finddate(ui, repo, date):
903 def finddate(ui, repo, date):
904 """Find the tipmost changeset that matches the given date spec"""
904 """Find the tipmost changeset that matches the given date spec"""
905 df = util.matchdate(date)
905 df = util.matchdate(date)
906 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
906 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
907 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
907 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
908 results = {}
908 results = {}
909 for st, rev, fns in changeiter:
909 for st, rev, fns in changeiter:
910 if st == 'add':
910 if st == 'add':
911 d = get(rev)[2]
911 d = get(rev)[2]
912 if df(d[0]):
912 if df(d[0]):
913 results[rev] = d
913 results[rev] = d
914 elif st == 'iter':
914 elif st == 'iter':
915 if rev in results:
915 if rev in results:
916 ui.status("Found revision %s from %s\n" %
916 ui.status("Found revision %s from %s\n" %
917 (rev, util.datestr(results[rev])))
917 (rev, util.datestr(results[rev])))
918 return str(rev)
918 return str(rev)
919
919
920 raise util.Abort(_("revision matching date not found"))
920 raise util.Abort(_("revision matching date not found"))
921
921
922 def walkchangerevs(ui, repo, pats, change, opts):
922 def walkchangerevs(ui, repo, pats, change, opts):
923 '''Iterate over files and the revs they changed in.
923 '''Iterate over files and the revs they changed in.
924
924
925 Callers most commonly need to iterate backwards over the history
925 Callers most commonly need to iterate backwards over the history
926 it is interested in. Doing so has awful (quadratic-looking)
926 it is interested in. Doing so has awful (quadratic-looking)
927 performance, so we use iterators in a "windowed" way.
927 performance, so we use iterators in a "windowed" way.
928
928
929 We walk a window of revisions in the desired order. Within the
929 We walk a window of revisions in the desired order. Within the
930 window, we first walk forwards to gather data, then in the desired
930 window, we first walk forwards to gather data, then in the desired
931 order (usually backwards) to display it.
931 order (usually backwards) to display it.
932
932
933 This function returns an (iterator, matchfn) tuple. The iterator
933 This function returns an (iterator, matchfn) tuple. The iterator
934 yields 3-tuples. They will be of one of the following forms:
934 yields 3-tuples. They will be of one of the following forms:
935
935
936 "window", incrementing, lastrev: stepping through a window,
936 "window", incrementing, lastrev: stepping through a window,
937 positive if walking forwards through revs, last rev in the
937 positive if walking forwards through revs, last rev in the
938 sequence iterated over - use to reset state for the current window
938 sequence iterated over - use to reset state for the current window
939
939
940 "add", rev, fns: out-of-order traversal of the given file names
940 "add", rev, fns: out-of-order traversal of the given file names
941 fns, which changed during revision rev - use to gather data for
941 fns, which changed during revision rev - use to gather data for
942 possible display
942 possible display
943
943
944 "iter", rev, None: in-order traversal of the revs earlier iterated
944 "iter", rev, None: in-order traversal of the revs earlier iterated
945 over with "add" - use to display data'''
945 over with "add" - use to display data'''
946
946
947 def increasing_windows(start, end, windowsize=8, sizelimit=512):
947 def increasing_windows(start, end, windowsize=8, sizelimit=512):
948 if start < end:
948 if start < end:
949 while start < end:
949 while start < end:
950 yield start, min(windowsize, end-start)
950 yield start, min(windowsize, end-start)
951 start += windowsize
951 start += windowsize
952 if windowsize < sizelimit:
952 if windowsize < sizelimit:
953 windowsize *= 2
953 windowsize *= 2
954 else:
954 else:
955 while start > end:
955 while start > end:
956 yield start, min(windowsize, start-end-1)
956 yield start, min(windowsize, start-end-1)
957 start -= windowsize
957 start -= windowsize
958 if windowsize < sizelimit:
958 if windowsize < sizelimit:
959 windowsize *= 2
959 windowsize *= 2
960
960
961 files, matchfn, anypats = matchpats(repo, pats, opts)
961 files, matchfn, anypats = matchpats(repo, pats, opts)
962 follow = opts.get('follow') or opts.get('follow_first')
962 follow = opts.get('follow') or opts.get('follow_first')
963
963
964 if repo.changelog.count() == 0:
964 if repo.changelog.count() == 0:
965 return [], matchfn
965 return [], matchfn
966
966
967 if follow:
967 if follow:
968 defrange = '%s:0' % repo.changectx().rev()
968 defrange = '%s:0' % repo.changectx().rev()
969 else:
969 else:
970 defrange = 'tip:0'
970 defrange = 'tip:0'
971 revs = revrange(repo, opts['rev'] or [defrange])
971 revs = revrange(repo, opts['rev'] or [defrange])
972 wanted = {}
972 wanted = {}
973 slowpath = anypats or opts.get('removed')
973 slowpath = anypats or opts.get('removed')
974 fncache = {}
974 fncache = {}
975
975
976 if not slowpath and not files:
976 if not slowpath and not files:
977 # No files, no patterns. Display all revs.
977 # No files, no patterns. Display all revs.
978 wanted = dict.fromkeys(revs)
978 wanted = dict.fromkeys(revs)
979 copies = []
979 copies = []
980 if not slowpath:
980 if not slowpath:
981 # Only files, no patterns. Check the history of each file.
981 # Only files, no patterns. Check the history of each file.
982 def filerevgen(filelog, node):
982 def filerevgen(filelog, node):
983 cl_count = repo.changelog.count()
983 cl_count = repo.changelog.count()
984 if node is None:
984 if node is None:
985 last = filelog.count() - 1
985 last = filelog.count() - 1
986 else:
986 else:
987 last = filelog.rev(node)
987 last = filelog.rev(node)
988 for i, window in increasing_windows(last, nullrev):
988 for i, window in increasing_windows(last, nullrev):
989 revs = []
989 revs = []
990 for j in xrange(i - window, i + 1):
990 for j in xrange(i - window, i + 1):
991 n = filelog.node(j)
991 n = filelog.node(j)
992 revs.append((filelog.linkrev(n),
992 revs.append((filelog.linkrev(n),
993 follow and filelog.renamed(n)))
993 follow and filelog.renamed(n)))
994 revs.reverse()
994 revs.reverse()
995 for rev in revs:
995 for rev in revs:
996 # only yield rev for which we have the changelog, it can
996 # only yield rev for which we have the changelog, it can
997 # happen while doing "hg log" during a pull or commit
997 # happen while doing "hg log" during a pull or commit
998 if rev[0] < cl_count:
998 if rev[0] < cl_count:
999 yield rev
999 yield rev
1000 def iterfiles():
1000 def iterfiles():
1001 for filename in files:
1001 for filename in files:
1002 yield filename, None
1002 yield filename, None
1003 for filename_node in copies:
1003 for filename_node in copies:
1004 yield filename_node
1004 yield filename_node
1005 minrev, maxrev = min(revs), max(revs)
1005 minrev, maxrev = min(revs), max(revs)
1006 for file_, node in iterfiles():
1006 for file_, node in iterfiles():
1007 filelog = repo.file(file_)
1007 filelog = repo.file(file_)
1008 # A zero count may be a directory or deleted file, so
1008 # A zero count may be a directory or deleted file, so
1009 # try to find matching entries on the slow path.
1009 # try to find matching entries on the slow path.
1010 if filelog.count() == 0:
1010 if filelog.count() == 0:
1011 slowpath = True
1011 slowpath = True
1012 break
1012 break
1013 for rev, copied in filerevgen(filelog, node):
1013 for rev, copied in filerevgen(filelog, node):
1014 if rev <= maxrev:
1014 if rev <= maxrev:
1015 if rev < minrev:
1015 if rev < minrev:
1016 break
1016 break
1017 fncache.setdefault(rev, [])
1017 fncache.setdefault(rev, [])
1018 fncache[rev].append(file_)
1018 fncache[rev].append(file_)
1019 wanted[rev] = 1
1019 wanted[rev] = 1
1020 if follow and copied:
1020 if follow and copied:
1021 copies.append(copied)
1021 copies.append(copied)
1022 if slowpath:
1022 if slowpath:
1023 if follow:
1023 if follow:
1024 raise util.Abort(_('can only follow copies/renames for explicit '
1024 raise util.Abort(_('can only follow copies/renames for explicit '
1025 'file names'))
1025 'file names'))
1026
1026
1027 # The slow path checks files modified in every changeset.
1027 # The slow path checks files modified in every changeset.
1028 def changerevgen():
1028 def changerevgen():
1029 for i, window in increasing_windows(repo.changelog.count()-1,
1029 for i, window in increasing_windows(repo.changelog.count()-1,
1030 nullrev):
1030 nullrev):
1031 for j in xrange(i - window, i + 1):
1031 for j in xrange(i - window, i + 1):
1032 yield j, change(j)[3]
1032 yield j, change(j)[3]
1033
1033
1034 for rev, changefiles in changerevgen():
1034 for rev, changefiles in changerevgen():
1035 matches = filter(matchfn, changefiles)
1035 matches = filter(matchfn, changefiles)
1036 if matches:
1036 if matches:
1037 fncache[rev] = matches
1037 fncache[rev] = matches
1038 wanted[rev] = 1
1038 wanted[rev] = 1
1039
1039
1040 class followfilter:
1040 class followfilter:
1041 def __init__(self, onlyfirst=False):
1041 def __init__(self, onlyfirst=False):
1042 self.startrev = nullrev
1042 self.startrev = nullrev
1043 self.roots = []
1043 self.roots = []
1044 self.onlyfirst = onlyfirst
1044 self.onlyfirst = onlyfirst
1045
1045
1046 def match(self, rev):
1046 def match(self, rev):
1047 def realparents(rev):
1047 def realparents(rev):
1048 if self.onlyfirst:
1048 if self.onlyfirst:
1049 return repo.changelog.parentrevs(rev)[0:1]
1049 return repo.changelog.parentrevs(rev)[0:1]
1050 else:
1050 else:
1051 return filter(lambda x: x != nullrev,
1051 return filter(lambda x: x != nullrev,
1052 repo.changelog.parentrevs(rev))
1052 repo.changelog.parentrevs(rev))
1053
1053
1054 if self.startrev == nullrev:
1054 if self.startrev == nullrev:
1055 self.startrev = rev
1055 self.startrev = rev
1056 return True
1056 return True
1057
1057
1058 if rev > self.startrev:
1058 if rev > self.startrev:
1059 # forward: all descendants
1059 # forward: all descendants
1060 if not self.roots:
1060 if not self.roots:
1061 self.roots.append(self.startrev)
1061 self.roots.append(self.startrev)
1062 for parent in realparents(rev):
1062 for parent in realparents(rev):
1063 if parent in self.roots:
1063 if parent in self.roots:
1064 self.roots.append(rev)
1064 self.roots.append(rev)
1065 return True
1065 return True
1066 else:
1066 else:
1067 # backwards: all parents
1067 # backwards: all parents
1068 if not self.roots:
1068 if not self.roots:
1069 self.roots.extend(realparents(self.startrev))
1069 self.roots.extend(realparents(self.startrev))
1070 if rev in self.roots:
1070 if rev in self.roots:
1071 self.roots.remove(rev)
1071 self.roots.remove(rev)
1072 self.roots.extend(realparents(rev))
1072 self.roots.extend(realparents(rev))
1073 return True
1073 return True
1074
1074
1075 return False
1075 return False
1076
1076
1077 # it might be worthwhile to do this in the iterator if the rev range
1077 # it might be worthwhile to do this in the iterator if the rev range
1078 # is descending and the prune args are all within that range
1078 # is descending and the prune args are all within that range
1079 for rev in opts.get('prune', ()):
1079 for rev in opts.get('prune', ()):
1080 rev = repo.changelog.rev(repo.lookup(rev))
1080 rev = repo.changelog.rev(repo.lookup(rev))
1081 ff = followfilter()
1081 ff = followfilter()
1082 stop = min(revs[0], revs[-1])
1082 stop = min(revs[0], revs[-1])
1083 for x in xrange(rev, stop-1, -1):
1083 for x in xrange(rev, stop-1, -1):
1084 if ff.match(x) and x in wanted:
1084 if ff.match(x) and x in wanted:
1085 del wanted[x]
1085 del wanted[x]
1086
1086
1087 def iterate():
1087 def iterate():
1088 if follow and not files:
1088 if follow and not files:
1089 ff = followfilter(onlyfirst=opts.get('follow_first'))
1089 ff = followfilter(onlyfirst=opts.get('follow_first'))
1090 def want(rev):
1090 def want(rev):
1091 if ff.match(rev) and rev in wanted:
1091 if ff.match(rev) and rev in wanted:
1092 return True
1092 return True
1093 return False
1093 return False
1094 else:
1094 else:
1095 def want(rev):
1095 def want(rev):
1096 return rev in wanted
1096 return rev in wanted
1097
1097
1098 for i, window in increasing_windows(0, len(revs)):
1098 for i, window in increasing_windows(0, len(revs)):
1099 yield 'window', revs[0] < revs[-1], revs[-1]
1099 yield 'window', revs[0] < revs[-1], revs[-1]
1100 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1100 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1101 srevs = list(nrevs)
1101 srevs = list(nrevs)
1102 srevs.sort()
1102 srevs.sort()
1103 for rev in srevs:
1103 for rev in srevs:
1104 fns = fncache.get(rev)
1104 fns = fncache.get(rev)
1105 if not fns:
1105 if not fns:
1106 def fns_generator():
1106 def fns_generator():
1107 for f in change(rev)[3]:
1107 for f in change(rev)[3]:
1108 if matchfn(f):
1108 if matchfn(f):
1109 yield f
1109 yield f
1110 fns = fns_generator()
1110 fns = fns_generator()
1111 yield 'add', rev, fns
1111 yield 'add', rev, fns
1112 for rev in nrevs:
1112 for rev in nrevs:
1113 yield 'iter', rev, None
1113 yield 'iter', rev, None
1114 return iterate(), matchfn
1114 return iterate(), matchfn
1115
1115
1116 def commit(ui, repo, commitfunc, pats, opts):
1116 def commit(ui, repo, commitfunc, pats, opts):
1117 '''commit the specified files or all outstanding changes'''
1117 '''commit the specified files or all outstanding changes'''
1118 message = logmessage(opts)
1118 message = logmessage(opts)
1119
1119
1120 # extract addremove carefully -- this function can be called from a command
1120 # extract addremove carefully -- this function can be called from a command
1121 # that doesn't support addremove
1121 # that doesn't support addremove
1122 if opts.get('addremove'):
1122 if opts.get('addremove'):
1123 addremove(repo, pats, opts)
1123 addremove(repo, pats, opts)
1124
1124
1125 fns, match, anypats = matchpats(repo, pats, opts)
1125 fns, match, anypats = matchpats(repo, pats, opts)
1126 if pats:
1126 if pats:
1127 status = repo.status(files=fns, match=match)
1127 status = repo.status(files=fns, match=match)
1128 modified, added, removed, deleted, unknown = status[:5]
1128 modified, added, removed, deleted, unknown = status[:5]
1129 files = modified + added + removed
1129 files = modified + added + removed
1130 slist = None
1130 slist = None
1131 for f in fns:
1131 for f in fns:
1132 if f == '.':
1132 if f == '.':
1133 continue
1133 continue
1134 if f not in files:
1134 if f not in files:
1135 rf = repo.wjoin(f)
1135 rf = repo.wjoin(f)
1136 try:
1136 try:
1137 mode = os.lstat(rf)[stat.ST_MODE]
1137 mode = os.lstat(rf)[stat.ST_MODE]
1138 except OSError:
1138 except OSError:
1139 raise util.Abort(_("file %s not found!") % rf)
1139 raise util.Abort(_("file %s not found!") % rf)
1140 if stat.S_ISDIR(mode):
1140 if stat.S_ISDIR(mode):
1141 name = f + '/'
1141 name = f + '/'
1142 if slist is None:
1142 if slist is None:
1143 slist = list(files)
1143 slist = list(files)
1144 slist.sort()
1144 slist.sort()
1145 i = bisect.bisect(slist, name)
1145 i = bisect.bisect(slist, name)
1146 if i >= len(slist) or not slist[i].startswith(name):
1146 if i >= len(slist) or not slist[i].startswith(name):
1147 raise util.Abort(_("no match under directory %s!")
1147 raise util.Abort(_("no match under directory %s!")
1148 % rf)
1148 % rf)
1149 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1149 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1150 raise util.Abort(_("can't commit %s: "
1150 raise util.Abort(_("can't commit %s: "
1151 "unsupported file type!") % rf)
1151 "unsupported file type!") % rf)
1152 elif f not in repo.dirstate:
1152 elif f not in repo.dirstate:
1153 raise util.Abort(_("file %s not tracked!") % rf)
1153 raise util.Abort(_("file %s not tracked!") % rf)
1154 else:
1154 else:
1155 files = []
1155 files = []
1156 try:
1156 try:
1157 return commitfunc(ui, repo, files, message, match, opts)
1157 return commitfunc(ui, repo, files, message, match, opts)
1158 except ValueError, inst:
1158 except ValueError, inst:
1159 raise util.Abort(str(inst))
1159 raise util.Abort(str(inst))
@@ -1,582 +1,582 b''
1 """
1 """
2 dirstate.py - working directory tracking for mercurial
2 dirstate.py - working directory tracking for mercurial
3
3
4 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8 """
8 """
9
9
10 from node import *
10 from node import *
11 from i18n import _
11 from i18n import _
12 import struct, os, time, bisect, stat, strutil, util, re, errno, ignore
12 import struct, os, time, bisect, stat, strutil, util, re, errno, ignore
13 import cStringIO, osutil
13 import cStringIO, osutil
14
14
15 _unknown = ('?', 0, 0, 0)
15 _unknown = ('?', 0, 0, 0)
16 _format = ">cllll"
16 _format = ">cllll"
17
17
18 class dirstate(object):
18 class dirstate(object):
19
19
20 def __init__(self, opener, ui, root):
20 def __init__(self, opener, ui, root):
21 self._opener = opener
21 self._opener = opener
22 self._root = root
22 self._root = root
23 self._dirty = False
23 self._dirty = False
24 self._dirtypl = False
24 self._dirtypl = False
25 self._ui = ui
25 self._ui = ui
26
26
27 def __getattr__(self, name):
27 def __getattr__(self, name):
28 if name == '_map':
28 if name == '_map':
29 self._read()
29 self._read()
30 return self._map
30 return self._map
31 elif name == '_copymap':
31 elif name == '_copymap':
32 self._read()
32 self._read()
33 return self._copymap
33 return self._copymap
34 elif name == '_branch':
34 elif name == '_branch':
35 try:
35 try:
36 self._branch = (self._opener("branch").read().strip()
36 self._branch = (self._opener("branch").read().strip()
37 or "default")
37 or "default")
38 except IOError:
38 except IOError:
39 self._branch = "default"
39 self._branch = "default"
40 return self._branch
40 return self._branch
41 elif name == '_pl':
41 elif name == '_pl':
42 self._pl = [nullid, nullid]
42 self._pl = [nullid, nullid]
43 try:
43 try:
44 st = self._opener("dirstate").read(40)
44 st = self._opener("dirstate").read(40)
45 if len(st) == 40:
45 if len(st) == 40:
46 self._pl = st[:20], st[20:40]
46 self._pl = st[:20], st[20:40]
47 except IOError, err:
47 except IOError, err:
48 if err.errno != errno.ENOENT: raise
48 if err.errno != errno.ENOENT: raise
49 return self._pl
49 return self._pl
50 elif name == '_dirs':
50 elif name == '_dirs':
51 self._dirs = {}
51 self._dirs = {}
52 for f in self._map:
52 for f in self._map:
53 if self[f] != 'r':
53 if self[f] != 'r':
54 self._incpath(f)
54 self._incpath(f)
55 return self._dirs
55 return self._dirs
56 elif name == '_ignore':
56 elif name == '_ignore':
57 files = [self._join('.hgignore')]
57 files = [self._join('.hgignore')]
58 for name, path in self._ui.configitems("ui"):
58 for name, path in self._ui.configitems("ui"):
59 if name == 'ignore' or name.startswith('ignore.'):
59 if name == 'ignore' or name.startswith('ignore.'):
60 files.append(os.path.expanduser(path))
60 files.append(os.path.expanduser(path))
61 self._ignore = ignore.ignore(self._root, files, self._ui.warn)
61 self._ignore = ignore.ignore(self._root, files, self._ui.warn)
62 return self._ignore
62 return self._ignore
63 elif name == '_slash':
63 elif name == '_slash':
64 self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/'
64 self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/'
65 return self._slash
65 return self._slash
66 else:
66 else:
67 raise AttributeError, name
67 raise AttributeError, name
68
68
69 def _join(self, f):
69 def _join(self, f):
70 return os.path.join(self._root, f)
70 return os.path.join(self._root, f)
71
71
72 def getcwd(self):
72 def getcwd(self):
73 cwd = os.getcwd()
73 cwd = os.getcwd()
74 if cwd == self._root: return ''
74 if cwd == self._root: return ''
75 # self._root ends with a path separator if self._root is '/' or 'C:\'
75 # self._root ends with a path separator if self._root is '/' or 'C:\'
76 rootsep = self._root
76 rootsep = self._root
77 if not util.endswithsep(rootsep):
77 if not util.endswithsep(rootsep):
78 rootsep += os.sep
78 rootsep += os.sep
79 if cwd.startswith(rootsep):
79 if cwd.startswith(rootsep):
80 return cwd[len(rootsep):]
80 return cwd[len(rootsep):]
81 else:
81 else:
82 # we're outside the repo. return an absolute path.
82 # we're outside the repo. return an absolute path.
83 return cwd
83 return cwd
84
84
85 def pathto(self, f, cwd=None):
85 def pathto(self, f, cwd=None):
86 if cwd is None:
86 if cwd is None:
87 cwd = self.getcwd()
87 cwd = self.getcwd()
88 path = util.pathto(self._root, cwd, f)
88 path = util.pathto(self._root, cwd, f)
89 if self._slash:
89 if self._slash:
90 return util.normpath(path)
90 return util.normpath(path)
91 return path
91 return path
92
92
93 def __getitem__(self, key):
93 def __getitem__(self, key):
94 ''' current states:
94 ''' current states:
95 n normal
95 n normal
96 m needs merging
96 m needs merging
97 r marked for removal
97 r marked for removal
98 a marked for addition
98 a marked for addition
99 ? not tracked'''
99 ? not tracked'''
100 return self._map.get(key, ("?",))[0]
100 return self._map.get(key, ("?",))[0]
101
101
102 def __contains__(self, key):
102 def __contains__(self, key):
103 return key in self._map
103 return key in self._map
104
104
105 def __iter__(self):
105 def __iter__(self):
106 a = self._map.keys()
106 a = self._map.keys()
107 a.sort()
107 a.sort()
108 for x in a:
108 for x in a:
109 yield x
109 yield x
110
110
111 def parents(self):
111 def parents(self):
112 return self._pl
112 return self._pl
113
113
114 def branch(self):
114 def branch(self):
115 return self._branch
115 return self._branch
116
116
117 def setparents(self, p1, p2=nullid):
117 def setparents(self, p1, p2=nullid):
118 self._dirty = self._dirtypl = True
118 self._dirty = self._dirtypl = True
119 self._pl = p1, p2
119 self._pl = p1, p2
120
120
121 def setbranch(self, branch):
121 def setbranch(self, branch):
122 self._branch = branch
122 self._branch = branch
123 self._opener("branch", "w").write(branch + '\n')
123 self._opener("branch", "w").write(branch + '\n')
124
124
125 def _read(self):
125 def _read(self):
126 self._map = {}
126 self._map = {}
127 self._copymap = {}
127 self._copymap = {}
128 if not self._dirtypl:
128 if not self._dirtypl:
129 self._pl = [nullid, nullid]
129 self._pl = [nullid, nullid]
130 try:
130 try:
131 st = self._opener("dirstate").read()
131 st = self._opener("dirstate").read()
132 except IOError, err:
132 except IOError, err:
133 if err.errno != errno.ENOENT: raise
133 if err.errno != errno.ENOENT: raise
134 return
134 return
135 if not st:
135 if not st:
136 return
136 return
137
137
138 if not self._dirtypl:
138 if not self._dirtypl:
139 self._pl = [st[:20], st[20: 40]]
139 self._pl = [st[:20], st[20: 40]]
140
140
141 # deref fields so they will be local in loop
141 # deref fields so they will be local in loop
142 dmap = self._map
142 dmap = self._map
143 copymap = self._copymap
143 copymap = self._copymap
144 unpack = struct.unpack
144 unpack = struct.unpack
145 e_size = struct.calcsize(_format)
145 e_size = struct.calcsize(_format)
146 pos1 = 40
146 pos1 = 40
147 l = len(st)
147 l = len(st)
148
148
149 # the inner loop
149 # the inner loop
150 while pos1 < l:
150 while pos1 < l:
151 pos2 = pos1 + e_size
151 pos2 = pos1 + e_size
152 e = unpack(">cllll", st[pos1:pos2]) # a literal here is faster
152 e = unpack(">cllll", st[pos1:pos2]) # a literal here is faster
153 pos1 = pos2 + e[4]
153 pos1 = pos2 + e[4]
154 f = st[pos2:pos1]
154 f = st[pos2:pos1]
155 if '\0' in f:
155 if '\0' in f:
156 f, c = f.split('\0')
156 f, c = f.split('\0')
157 copymap[f] = c
157 copymap[f] = c
158 dmap[f] = e # we hold onto e[4] because making a subtuple is slow
158 dmap[f] = e # we hold onto e[4] because making a subtuple is slow
159
159
160 def invalidate(self):
160 def invalidate(self):
161 for a in "_map _copymap _branch _pl _dirs _ignore".split():
161 for a in "_map _copymap _branch _pl _dirs _ignore".split():
162 if a in self.__dict__:
162 if a in self.__dict__:
163 delattr(self, a)
163 delattr(self, a)
164 self._dirty = False
164 self._dirty = False
165
165
166 def copy(self, source, dest):
166 def copy(self, source, dest):
167 self._dirty = True
167 self._dirty = True
168 self._copymap[dest] = source
168 self._copymap[dest] = source
169
169
170 def copied(self, file):
170 def copied(self, file):
171 return self._copymap.get(file, None)
171 return self._copymap.get(file, None)
172
172
173 def copies(self):
173 def copies(self):
174 return self._copymap
174 return self._copymap
175
175
176 def _incpath(self, path):
176 def _incpath(self, path):
177 c = path.rfind('/')
177 c = path.rfind('/')
178 if c >= 0:
178 if c >= 0:
179 dirs = self._dirs
179 dirs = self._dirs
180 base = path[:c]
180 base = path[:c]
181 if base not in dirs:
181 if base not in dirs:
182 self._incpath(base)
182 self._incpath(base)
183 dirs[base] = 1
183 dirs[base] = 1
184 else:
184 else:
185 dirs[base] += 1
185 dirs[base] += 1
186
186
187 def _decpath(self, path):
187 def _decpath(self, path):
188 c = path.rfind('/')
188 c = path.rfind('/')
189 if c >= 0:
189 if c >= 0:
190 base = path[:c]
190 base = path[:c]
191 dirs = self._dirs
191 dirs = self._dirs
192 if dirs[base] == 1:
192 if dirs[base] == 1:
193 del dirs[base]
193 del dirs[base]
194 self._decpath(base)
194 self._decpath(base)
195 else:
195 else:
196 dirs[base] -= 1
196 dirs[base] -= 1
197
197
198 def _incpathcheck(self, f):
198 def _incpathcheck(self, f):
199 if '\r' in f or '\n' in f:
199 if '\r' in f or '\n' in f:
200 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames"))
200 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames"))
201 # shadows
201 # shadows
202 if f in self._dirs:
202 if f in self._dirs:
203 raise util.Abort(_('directory %r already in dirstate') % f)
203 raise util.Abort(_('directory %r already in dirstate') % f)
204 for c in strutil.rfindall(f, '/'):
204 for c in strutil.rfindall(f, '/'):
205 d = f[:c]
205 d = f[:c]
206 if d in self._dirs:
206 if d in self._dirs:
207 break
207 break
208 if d in self._map and self[d] != 'r':
208 if d in self._map and self[d] != 'r':
209 raise util.Abort(_('file %r in dirstate clashes with %r') %
209 raise util.Abort(_('file %r in dirstate clashes with %r') %
210 (d, f))
210 (d, f))
211 self._incpath(f)
211 self._incpath(f)
212
212
213 def _changepath(self, f, newstate, relaxed=False):
213 def _changepath(self, f, newstate, relaxed=False):
214 # handle upcoming path changes
214 # handle upcoming path changes
215 oldstate = self[f]
215 oldstate = self[f]
216 if oldstate not in "?r" and newstate in "?r":
216 if oldstate not in "?r" and newstate in "?r":
217 if "_dirs" in self.__dict__:
217 if "_dirs" in self.__dict__:
218 self._decpath(f)
218 self._decpath(f)
219 return
219 return
220 if oldstate in "?r" and newstate not in "?r":
220 if oldstate in "?r" and newstate not in "?r":
221 if relaxed and oldstate == '?':
221 if relaxed and oldstate == '?':
222 # XXX
222 # XXX
223 # in relaxed mode we assume the caller knows
223 # in relaxed mode we assume the caller knows
224 # what it is doing, workaround for updating
224 # what it is doing, workaround for updating
225 # dir-to-file revisions
225 # dir-to-file revisions
226 if "_dirs" in self.__dict__:
226 if "_dirs" in self.__dict__:
227 self._incpath(f)
227 self._incpath(f)
228 return
228 return
229 self._incpathcheck(f)
229 self._incpathcheck(f)
230 return
230 return
231
231
232 def normal(self, f):
232 def normal(self, f):
233 'mark a file normal and clean'
233 'mark a file normal and clean'
234 self._dirty = True
234 self._dirty = True
235 self._changepath(f, 'n', True)
235 self._changepath(f, 'n', True)
236 s = os.lstat(self._join(f))
236 s = os.lstat(self._join(f))
237 self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime, 0)
237 self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime, 0)
238 if self._copymap.has_key(f):
238 if f in self._copymap:
239 del self._copymap[f]
239 del self._copymap[f]
240
240
241 def normallookup(self, f):
241 def normallookup(self, f):
242 'mark a file normal, but possibly dirty'
242 'mark a file normal, but possibly dirty'
243 self._dirty = True
243 self._dirty = True
244 self._changepath(f, 'n', True)
244 self._changepath(f, 'n', True)
245 self._map[f] = ('n', 0, -1, -1, 0)
245 self._map[f] = ('n', 0, -1, -1, 0)
246 if f in self._copymap:
246 if f in self._copymap:
247 del self._copymap[f]
247 del self._copymap[f]
248
248
249 def normaldirty(self, f):
249 def normaldirty(self, f):
250 'mark a file normal, but dirty'
250 'mark a file normal, but dirty'
251 self._dirty = True
251 self._dirty = True
252 self._changepath(f, 'n', True)
252 self._changepath(f, 'n', True)
253 self._map[f] = ('n', 0, -2, -1, 0)
253 self._map[f] = ('n', 0, -2, -1, 0)
254 if f in self._copymap:
254 if f in self._copymap:
255 del self._copymap[f]
255 del self._copymap[f]
256
256
257 def add(self, f):
257 def add(self, f):
258 'mark a file added'
258 'mark a file added'
259 self._dirty = True
259 self._dirty = True
260 self._changepath(f, 'a')
260 self._changepath(f, 'a')
261 self._map[f] = ('a', 0, -1, -1, 0)
261 self._map[f] = ('a', 0, -1, -1, 0)
262 if f in self._copymap:
262 if f in self._copymap:
263 del self._copymap[f]
263 del self._copymap[f]
264
264
265 def remove(self, f):
265 def remove(self, f):
266 'mark a file removed'
266 'mark a file removed'
267 self._dirty = True
267 self._dirty = True
268 self._changepath(f, 'r')
268 self._changepath(f, 'r')
269 self._map[f] = ('r', 0, 0, 0, 0)
269 self._map[f] = ('r', 0, 0, 0, 0)
270 if f in self._copymap:
270 if f in self._copymap:
271 del self._copymap[f]
271 del self._copymap[f]
272
272
273 def merge(self, f):
273 def merge(self, f):
274 'mark a file merged'
274 'mark a file merged'
275 self._dirty = True
275 self._dirty = True
276 s = os.lstat(self._join(f))
276 s = os.lstat(self._join(f))
277 self._changepath(f, 'm', True)
277 self._changepath(f, 'm', True)
278 self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime, 0)
278 self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime, 0)
279 if f in self._copymap:
279 if f in self._copymap:
280 del self._copymap[f]
280 del self._copymap[f]
281
281
282 def forget(self, f):
282 def forget(self, f):
283 'forget a file'
283 'forget a file'
284 self._dirty = True
284 self._dirty = True
285 try:
285 try:
286 self._changepath(f, '?')
286 self._changepath(f, '?')
287 del self._map[f]
287 del self._map[f]
288 except KeyError:
288 except KeyError:
289 self._ui.warn(_("not in dirstate: %s!\n") % f)
289 self._ui.warn(_("not in dirstate: %s!\n") % f)
290
290
291 def clear(self):
291 def clear(self):
292 self._map = {}
292 self._map = {}
293 if "_dirs" in self.__dict__:
293 if "_dirs" in self.__dict__:
294 delattr(self, "_dirs");
294 delattr(self, "_dirs");
295 self._copymap = {}
295 self._copymap = {}
296 self._pl = [nullid, nullid]
296 self._pl = [nullid, nullid]
297 self._dirty = True
297 self._dirty = True
298
298
299 def rebuild(self, parent, files):
299 def rebuild(self, parent, files):
300 self.clear()
300 self.clear()
301 for f in files:
301 for f in files:
302 if files.execf(f):
302 if files.execf(f):
303 self._map[f] = ('n', 0777, -1, 0, 0)
303 self._map[f] = ('n', 0777, -1, 0, 0)
304 else:
304 else:
305 self._map[f] = ('n', 0666, -1, 0, 0)
305 self._map[f] = ('n', 0666, -1, 0, 0)
306 self._pl = (parent, nullid)
306 self._pl = (parent, nullid)
307 self._dirty = True
307 self._dirty = True
308
308
309 def write(self):
309 def write(self):
310 if not self._dirty:
310 if not self._dirty:
311 return
311 return
312 cs = cStringIO.StringIO()
312 cs = cStringIO.StringIO()
313 copymap = self._copymap
313 copymap = self._copymap
314 pack = struct.pack
314 pack = struct.pack
315 write = cs.write
315 write = cs.write
316 write("".join(self._pl))
316 write("".join(self._pl))
317 for f, e in self._map.iteritems():
317 for f, e in self._map.iteritems():
318 if f in copymap:
318 if f in copymap:
319 f = "%s\0%s" % (f, copymap[f])
319 f = "%s\0%s" % (f, copymap[f])
320 e = pack(_format, e[0], e[1], e[2], e[3], len(f))
320 e = pack(_format, e[0], e[1], e[2], e[3], len(f))
321 write(e)
321 write(e)
322 write(f)
322 write(f)
323 st = self._opener("dirstate", "w", atomictemp=True)
323 st = self._opener("dirstate", "w", atomictemp=True)
324 st.write(cs.getvalue())
324 st.write(cs.getvalue())
325 st.rename()
325 st.rename()
326 self._dirty = self._dirtypl = False
326 self._dirty = self._dirtypl = False
327
327
328 def _filter(self, files):
328 def _filter(self, files):
329 ret = {}
329 ret = {}
330 unknown = []
330 unknown = []
331
331
332 for x in files:
332 for x in files:
333 if x == '.':
333 if x == '.':
334 return self._map.copy()
334 return self._map.copy()
335 if x not in self._map:
335 if x not in self._map:
336 unknown.append(x)
336 unknown.append(x)
337 else:
337 else:
338 ret[x] = self._map[x]
338 ret[x] = self._map[x]
339
339
340 if not unknown:
340 if not unknown:
341 return ret
341 return ret
342
342
343 b = self._map.keys()
343 b = self._map.keys()
344 b.sort()
344 b.sort()
345 blen = len(b)
345 blen = len(b)
346
346
347 for x in unknown:
347 for x in unknown:
348 bs = bisect.bisect(b, "%s%s" % (x, '/'))
348 bs = bisect.bisect(b, "%s%s" % (x, '/'))
349 while bs < blen:
349 while bs < blen:
350 s = b[bs]
350 s = b[bs]
351 if len(s) > len(x) and s.startswith(x):
351 if len(s) > len(x) and s.startswith(x):
352 ret[s] = self._map[s]
352 ret[s] = self._map[s]
353 else:
353 else:
354 break
354 break
355 bs += 1
355 bs += 1
356 return ret
356 return ret
357
357
358 def _supported(self, f, mode, verbose=False):
358 def _supported(self, f, mode, verbose=False):
359 if stat.S_ISREG(mode) or stat.S_ISLNK(mode):
359 if stat.S_ISREG(mode) or stat.S_ISLNK(mode):
360 return True
360 return True
361 if verbose:
361 if verbose:
362 kind = 'unknown'
362 kind = 'unknown'
363 if stat.S_ISCHR(mode): kind = _('character device')
363 if stat.S_ISCHR(mode): kind = _('character device')
364 elif stat.S_ISBLK(mode): kind = _('block device')
364 elif stat.S_ISBLK(mode): kind = _('block device')
365 elif stat.S_ISFIFO(mode): kind = _('fifo')
365 elif stat.S_ISFIFO(mode): kind = _('fifo')
366 elif stat.S_ISSOCK(mode): kind = _('socket')
366 elif stat.S_ISSOCK(mode): kind = _('socket')
367 elif stat.S_ISDIR(mode): kind = _('directory')
367 elif stat.S_ISDIR(mode): kind = _('directory')
368 self._ui.warn(_('%s: unsupported file type (type is %s)\n')
368 self._ui.warn(_('%s: unsupported file type (type is %s)\n')
369 % (self.pathto(f), kind))
369 % (self.pathto(f), kind))
370 return False
370 return False
371
371
372 def walk(self, files=None, match=util.always, badmatch=None):
372 def walk(self, files=None, match=util.always, badmatch=None):
373 # filter out the stat
373 # filter out the stat
374 for src, f, st in self.statwalk(files, match, badmatch=badmatch):
374 for src, f, st in self.statwalk(files, match, badmatch=badmatch):
375 yield src, f
375 yield src, f
376
376
377 def statwalk(self, files=None, match=util.always, ignored=False,
377 def statwalk(self, files=None, match=util.always, ignored=False,
378 badmatch=None, directories=False):
378 badmatch=None, directories=False):
379 '''
379 '''
380 walk recursively through the directory tree, finding all files
380 walk recursively through the directory tree, finding all files
381 matched by the match function
381 matched by the match function
382
382
383 results are yielded in a tuple (src, filename, st), where src
383 results are yielded in a tuple (src, filename, st), where src
384 is one of:
384 is one of:
385 'f' the file was found in the directory tree
385 'f' the file was found in the directory tree
386 'd' the file is a directory of the tree
386 'd' the file is a directory of the tree
387 'm' the file was only in the dirstate and not in the tree
387 'm' the file was only in the dirstate and not in the tree
388 'b' file was not found and matched badmatch
388 'b' file was not found and matched badmatch
389
389
390 and st is the stat result if the file was found in the directory.
390 and st is the stat result if the file was found in the directory.
391 '''
391 '''
392
392
393 # walk all files by default
393 # walk all files by default
394 if not files:
394 if not files:
395 files = ['.']
395 files = ['.']
396 dc = self._map.copy()
396 dc = self._map.copy()
397 else:
397 else:
398 files = util.unique(files)
398 files = util.unique(files)
399 dc = self._filter(files)
399 dc = self._filter(files)
400
400
401 def imatch(file_):
401 def imatch(file_):
402 if file_ not in dc and self._ignore(file_):
402 if file_ not in dc and self._ignore(file_):
403 return False
403 return False
404 return match(file_)
404 return match(file_)
405
405
406 ignore = self._ignore
406 ignore = self._ignore
407 if ignored:
407 if ignored:
408 imatch = match
408 imatch = match
409 ignore = util.never
409 ignore = util.never
410
410
411 # self._root may end with a path separator when self._root == '/'
411 # self._root may end with a path separator when self._root == '/'
412 common_prefix_len = len(self._root)
412 common_prefix_len = len(self._root)
413 if not util.endswithsep(self._root):
413 if not util.endswithsep(self._root):
414 common_prefix_len += 1
414 common_prefix_len += 1
415
415
416 normpath = util.normpath
416 normpath = util.normpath
417 listdir = osutil.listdir
417 listdir = osutil.listdir
418 lstat = os.lstat
418 lstat = os.lstat
419 bisect_left = bisect.bisect_left
419 bisect_left = bisect.bisect_left
420 isdir = os.path.isdir
420 isdir = os.path.isdir
421 pconvert = util.pconvert
421 pconvert = util.pconvert
422 join = os.path.join
422 join = os.path.join
423 s_isdir = stat.S_ISDIR
423 s_isdir = stat.S_ISDIR
424 supported = self._supported
424 supported = self._supported
425 _join = self._join
425 _join = self._join
426 known = {'.hg': 1}
426 known = {'.hg': 1}
427
427
428 # recursion free walker, faster than os.walk.
428 # recursion free walker, faster than os.walk.
429 def findfiles(s):
429 def findfiles(s):
430 work = [s]
430 work = [s]
431 wadd = work.append
431 wadd = work.append
432 found = []
432 found = []
433 add = found.append
433 add = found.append
434 if directories:
434 if directories:
435 add((normpath(s[common_prefix_len:]), 'd', lstat(s)))
435 add((normpath(s[common_prefix_len:]), 'd', lstat(s)))
436 while work:
436 while work:
437 top = work.pop()
437 top = work.pop()
438 entries = listdir(top, stat=True)
438 entries = listdir(top, stat=True)
439 # nd is the top of the repository dir tree
439 # nd is the top of the repository dir tree
440 nd = normpath(top[common_prefix_len:])
440 nd = normpath(top[common_prefix_len:])
441 if nd == '.':
441 if nd == '.':
442 nd = ''
442 nd = ''
443 else:
443 else:
444 # do not recurse into a repo contained in this
444 # do not recurse into a repo contained in this
445 # one. use bisect to find .hg directory so speed
445 # one. use bisect to find .hg directory so speed
446 # is good on big directory.
446 # is good on big directory.
447 names = [e[0] for e in entries]
447 names = [e[0] for e in entries]
448 hg = bisect_left(names, '.hg')
448 hg = bisect_left(names, '.hg')
449 if hg < len(names) and names[hg] == '.hg':
449 if hg < len(names) and names[hg] == '.hg':
450 if isdir(join(top, '.hg')):
450 if isdir(join(top, '.hg')):
451 continue
451 continue
452 for f, kind, st in entries:
452 for f, kind, st in entries:
453 np = pconvert(join(nd, f))
453 np = pconvert(join(nd, f))
454 if np in known:
454 if np in known:
455 continue
455 continue
456 known[np] = 1
456 known[np] = 1
457 p = join(top, f)
457 p = join(top, f)
458 # don't trip over symlinks
458 # don't trip over symlinks
459 if kind == stat.S_IFDIR:
459 if kind == stat.S_IFDIR:
460 if not ignore(np):
460 if not ignore(np):
461 wadd(p)
461 wadd(p)
462 if directories:
462 if directories:
463 add((np, 'd', st))
463 add((np, 'd', st))
464 if np in dc and match(np):
464 if np in dc and match(np):
465 add((np, 'm', st))
465 add((np, 'm', st))
466 elif imatch(np):
466 elif imatch(np):
467 if supported(np, st.st_mode):
467 if supported(np, st.st_mode):
468 add((np, 'f', st))
468 add((np, 'f', st))
469 elif np in dc:
469 elif np in dc:
470 add((np, 'm', st))
470 add((np, 'm', st))
471 found.sort()
471 found.sort()
472 return found
472 return found
473
473
474 # step one, find all files that match our criteria
474 # step one, find all files that match our criteria
475 files.sort()
475 files.sort()
476 for ff in files:
476 for ff in files:
477 nf = normpath(ff)
477 nf = normpath(ff)
478 f = _join(ff)
478 f = _join(ff)
479 try:
479 try:
480 st = lstat(f)
480 st = lstat(f)
481 except OSError, inst:
481 except OSError, inst:
482 found = False
482 found = False
483 for fn in dc:
483 for fn in dc:
484 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
484 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
485 found = True
485 found = True
486 break
486 break
487 if not found:
487 if not found:
488 if inst.errno != errno.ENOENT or not badmatch:
488 if inst.errno != errno.ENOENT or not badmatch:
489 self._ui.warn('%s: %s\n' %
489 self._ui.warn('%s: %s\n' %
490 (self.pathto(ff), inst.strerror))
490 (self.pathto(ff), inst.strerror))
491 elif badmatch and badmatch(ff) and imatch(nf):
491 elif badmatch and badmatch(ff) and imatch(nf):
492 yield 'b', ff, None
492 yield 'b', ff, None
493 continue
493 continue
494 if s_isdir(st.st_mode):
494 if s_isdir(st.st_mode):
495 for f, src, st in findfiles(f):
495 for f, src, st in findfiles(f):
496 yield src, f, st
496 yield src, f, st
497 else:
497 else:
498 if nf in known:
498 if nf in known:
499 continue
499 continue
500 known[nf] = 1
500 known[nf] = 1
501 if match(nf):
501 if match(nf):
502 if supported(ff, st.st_mode, verbose=True):
502 if supported(ff, st.st_mode, verbose=True):
503 yield 'f', nf, st
503 yield 'f', nf, st
504 elif ff in dc:
504 elif ff in dc:
505 yield 'm', nf, st
505 yield 'm', nf, st
506
506
507 # step two run through anything left in the dc hash and yield
507 # step two run through anything left in the dc hash and yield
508 # if we haven't already seen it
508 # if we haven't already seen it
509 ks = dc.keys()
509 ks = dc.keys()
510 ks.sort()
510 ks.sort()
511 for k in ks:
511 for k in ks:
512 if k in known:
512 if k in known:
513 continue
513 continue
514 known[k] = 1
514 known[k] = 1
515 if imatch(k):
515 if imatch(k):
516 yield 'm', k, None
516 yield 'm', k, None
517
517
518 def status(self, files, match, list_ignored, list_clean):
518 def status(self, files, match, list_ignored, list_clean):
519 lookup, modified, added, unknown, ignored = [], [], [], [], []
519 lookup, modified, added, unknown, ignored = [], [], [], [], []
520 removed, deleted, clean = [], [], []
520 removed, deleted, clean = [], [], []
521
521
522 _join = self._join
522 _join = self._join
523 lstat = os.lstat
523 lstat = os.lstat
524 cmap = self._copymap
524 cmap = self._copymap
525 dmap = self._map
525 dmap = self._map
526 ladd = lookup.append
526 ladd = lookup.append
527 madd = modified.append
527 madd = modified.append
528 aadd = added.append
528 aadd = added.append
529 uadd = unknown.append
529 uadd = unknown.append
530 iadd = ignored.append
530 iadd = ignored.append
531 radd = removed.append
531 radd = removed.append
532 dadd = deleted.append
532 dadd = deleted.append
533 cadd = clean.append
533 cadd = clean.append
534
534
535 for src, fn, st in self.statwalk(files, match, ignored=list_ignored):
535 for src, fn, st in self.statwalk(files, match, ignored=list_ignored):
536 if fn in dmap:
536 if fn in dmap:
537 type_, mode, size, time, foo = dmap[fn]
537 type_, mode, size, time, foo = dmap[fn]
538 else:
538 else:
539 if list_ignored and self._ignore(fn):
539 if list_ignored and self._ignore(fn):
540 iadd(fn)
540 iadd(fn)
541 else:
541 else:
542 uadd(fn)
542 uadd(fn)
543 continue
543 continue
544 if src == 'm':
544 if src == 'm':
545 nonexistent = True
545 nonexistent = True
546 if not st:
546 if not st:
547 try:
547 try:
548 st = lstat(_join(fn))
548 st = lstat(_join(fn))
549 except OSError, inst:
549 except OSError, inst:
550 if inst.errno not in (errno.ENOENT, errno.ENOTDIR):
550 if inst.errno not in (errno.ENOENT, errno.ENOTDIR):
551 raise
551 raise
552 st = None
552 st = None
553 # We need to re-check that it is a valid file
553 # We need to re-check that it is a valid file
554 if st and self._supported(fn, st.st_mode):
554 if st and self._supported(fn, st.st_mode):
555 nonexistent = False
555 nonexistent = False
556 # XXX: what to do with file no longer present in the fs
556 # XXX: what to do with file no longer present in the fs
557 # who are not removed in the dirstate ?
557 # who are not removed in the dirstate ?
558 if nonexistent and type_ in "nm":
558 if nonexistent and type_ in "nm":
559 dadd(fn)
559 dadd(fn)
560 continue
560 continue
561 # check the common case first
561 # check the common case first
562 if type_ == 'n':
562 if type_ == 'n':
563 if not st:
563 if not st:
564 st = lstat(_join(fn))
564 st = lstat(_join(fn))
565 if (size >= 0 and (size != st.st_size
565 if (size >= 0 and (size != st.st_size
566 or (mode ^ st.st_mode) & 0100)
566 or (mode ^ st.st_mode) & 0100)
567 or size == -2
567 or size == -2
568 or fn in self._copymap):
568 or fn in self._copymap):
569 madd(fn)
569 madd(fn)
570 elif time != int(st.st_mtime):
570 elif time != int(st.st_mtime):
571 ladd(fn)
571 ladd(fn)
572 elif list_clean:
572 elif list_clean:
573 cadd(fn)
573 cadd(fn)
574 elif type_ == 'm':
574 elif type_ == 'm':
575 madd(fn)
575 madd(fn)
576 elif type_ == 'a':
576 elif type_ == 'a':
577 aadd(fn)
577 aadd(fn)
578 elif type_ == 'r':
578 elif type_ == 'r':
579 radd(fn)
579 radd(fn)
580
580
581 return (lookup, modified, added, removed, deleted, unknown, ignored,
581 return (lookup, modified, added, removed, deleted, unknown, ignored,
582 clean)
582 clean)
@@ -1,83 +1,83 b''
1 # filelog.py - file history class for mercurial
1 # filelog.py - file history class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from revlog import *
8 from revlog import *
9 import os
9 import os
10
10
11 class filelog(revlog):
11 class filelog(revlog):
12 def __init__(self, opener, path):
12 def __init__(self, opener, path):
13 revlog.__init__(self, opener,
13 revlog.__init__(self, opener,
14 "/".join(("data", self.encodedir(path + ".i"))))
14 "/".join(("data", self.encodedir(path + ".i"))))
15
15
16 # This avoids a collision between a file named foo and a dir named
16 # This avoids a collision between a file named foo and a dir named
17 # foo.i or foo.d
17 # foo.i or foo.d
18 def encodedir(self, path):
18 def encodedir(self, path):
19 return (path
19 return (path
20 .replace(".hg/", ".hg.hg/")
20 .replace(".hg/", ".hg.hg/")
21 .replace(".i/", ".i.hg/")
21 .replace(".i/", ".i.hg/")
22 .replace(".d/", ".d.hg/"))
22 .replace(".d/", ".d.hg/"))
23
23
24 def decodedir(self, path):
24 def decodedir(self, path):
25 return (path
25 return (path
26 .replace(".d.hg/", ".d/")
26 .replace(".d.hg/", ".d/")
27 .replace(".i.hg/", ".i/")
27 .replace(".i.hg/", ".i/")
28 .replace(".hg.hg/", ".hg/"))
28 .replace(".hg.hg/", ".hg/"))
29
29
30 def read(self, node):
30 def read(self, node):
31 t = self.revision(node)
31 t = self.revision(node)
32 if not t.startswith('\1\n'):
32 if not t.startswith('\1\n'):
33 return t
33 return t
34 s = t.index('\1\n', 2)
34 s = t.index('\1\n', 2)
35 return t[s+2:]
35 return t[s+2:]
36
36
37 def _readmeta(self, node):
37 def _readmeta(self, node):
38 t = self.revision(node)
38 t = self.revision(node)
39 if not t.startswith('\1\n'):
39 if not t.startswith('\1\n'):
40 return {}
40 return {}
41 s = t.index('\1\n', 2)
41 s = t.index('\1\n', 2)
42 mt = t[2:s]
42 mt = t[2:s]
43 m = {}
43 m = {}
44 for l in mt.splitlines():
44 for l in mt.splitlines():
45 k, v = l.split(": ", 1)
45 k, v = l.split(": ", 1)
46 m[k] = v
46 m[k] = v
47 return m
47 return m
48
48
49 def add(self, text, meta, transaction, link, p1=None, p2=None):
49 def add(self, text, meta, transaction, link, p1=None, p2=None):
50 if meta or text.startswith('\1\n'):
50 if meta or text.startswith('\1\n'):
51 mt = ""
51 mt = ""
52 if meta:
52 if meta:
53 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
53 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
54 text = "\1\n%s\1\n%s" % ("".join(mt), text)
54 text = "\1\n%s\1\n%s" % ("".join(mt), text)
55 return self.addrevision(text, transaction, link, p1, p2)
55 return self.addrevision(text, transaction, link, p1, p2)
56
56
57 def renamed(self, node):
57 def renamed(self, node):
58 if self.parents(node)[0] != nullid:
58 if self.parents(node)[0] != nullid:
59 return False
59 return False
60 m = self._readmeta(node)
60 m = self._readmeta(node)
61 if m and m.has_key("copy"):
61 if m and "copy" in m:
62 return (m["copy"], bin(m["copyrev"]))
62 return (m["copy"], bin(m["copyrev"]))
63 return False
63 return False
64
64
65 def size(self, rev):
65 def size(self, rev):
66 """return the size of a given revision"""
66 """return the size of a given revision"""
67
67
68 # for revisions with renames, we have to go the slow way
68 # for revisions with renames, we have to go the slow way
69 node = self.node(rev)
69 node = self.node(rev)
70 if self.renamed(node):
70 if self.renamed(node):
71 return len(self.read(node))
71 return len(self.read(node))
72
72
73 return revlog.size(self, rev)
73 return revlog.size(self, rev)
74
74
75 def cmp(self, node, text):
75 def cmp(self, node, text):
76 """compare text with a given file revision"""
76 """compare text with a given file revision"""
77
77
78 # for renames, we have to go the slow way
78 # for renames, we have to go the slow way
79 if self.renamed(node):
79 if self.renamed(node):
80 t2 = self.read(node)
80 t2 = self.read(node)
81 return t2 != text
81 return t2 != text
82
82
83 return revlog.cmp(self, node, text)
83 return revlog.cmp(self, node, text)
@@ -1,908 +1,908 b''
1 # hgweb/hgweb_mod.py - Web interface for a repository.
1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os, mimetypes, re, mimetools, cStringIO
9 import os, mimetypes, re, mimetools, cStringIO
10 from mercurial.node import *
10 from mercurial.node import *
11 from mercurial import mdiff, ui, hg, util, archival, patch, hook
11 from mercurial import mdiff, ui, hg, util, archival, patch, hook
12 from mercurial import revlog, templater
12 from mercurial import revlog, templater
13 from common import ErrorResponse, get_mtime, style_map, paritygen, get_contact
13 from common import ErrorResponse, get_mtime, style_map, paritygen, get_contact
14 from request import wsgirequest
14 from request import wsgirequest
15 import webcommands, protocol
15 import webcommands, protocol
16
16
17 shortcuts = {
17 shortcuts = {
18 'cl': [('cmd', ['changelog']), ('rev', None)],
18 'cl': [('cmd', ['changelog']), ('rev', None)],
19 'sl': [('cmd', ['shortlog']), ('rev', None)],
19 'sl': [('cmd', ['shortlog']), ('rev', None)],
20 'cs': [('cmd', ['changeset']), ('node', None)],
20 'cs': [('cmd', ['changeset']), ('node', None)],
21 'f': [('cmd', ['file']), ('filenode', None)],
21 'f': [('cmd', ['file']), ('filenode', None)],
22 'fl': [('cmd', ['filelog']), ('filenode', None)],
22 'fl': [('cmd', ['filelog']), ('filenode', None)],
23 'fd': [('cmd', ['filediff']), ('node', None)],
23 'fd': [('cmd', ['filediff']), ('node', None)],
24 'fa': [('cmd', ['annotate']), ('filenode', None)],
24 'fa': [('cmd', ['annotate']), ('filenode', None)],
25 'mf': [('cmd', ['manifest']), ('manifest', None)],
25 'mf': [('cmd', ['manifest']), ('manifest', None)],
26 'ca': [('cmd', ['archive']), ('node', None)],
26 'ca': [('cmd', ['archive']), ('node', None)],
27 'tags': [('cmd', ['tags'])],
27 'tags': [('cmd', ['tags'])],
28 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
28 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
29 'static': [('cmd', ['static']), ('file', None)]
29 'static': [('cmd', ['static']), ('file', None)]
30 }
30 }
31
31
32 def _up(p):
32 def _up(p):
33 if p[0] != "/":
33 if p[0] != "/":
34 p = "/" + p
34 p = "/" + p
35 if p[-1] == "/":
35 if p[-1] == "/":
36 p = p[:-1]
36 p = p[:-1]
37 up = os.path.dirname(p)
37 up = os.path.dirname(p)
38 if up == "/":
38 if up == "/":
39 return "/"
39 return "/"
40 return up + "/"
40 return up + "/"
41
41
42 def revnavgen(pos, pagelen, limit, nodefunc):
42 def revnavgen(pos, pagelen, limit, nodefunc):
43 def seq(factor, limit=None):
43 def seq(factor, limit=None):
44 if limit:
44 if limit:
45 yield limit
45 yield limit
46 if limit >= 20 and limit <= 40:
46 if limit >= 20 and limit <= 40:
47 yield 50
47 yield 50
48 else:
48 else:
49 yield 1 * factor
49 yield 1 * factor
50 yield 3 * factor
50 yield 3 * factor
51 for f in seq(factor * 10):
51 for f in seq(factor * 10):
52 yield f
52 yield f
53
53
54 def nav(**map):
54 def nav(**map):
55 l = []
55 l = []
56 last = 0
56 last = 0
57 for f in seq(1, pagelen):
57 for f in seq(1, pagelen):
58 if f < pagelen or f <= last:
58 if f < pagelen or f <= last:
59 continue
59 continue
60 if f > limit:
60 if f > limit:
61 break
61 break
62 last = f
62 last = f
63 if pos + f < limit:
63 if pos + f < limit:
64 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
64 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
65 if pos - f >= 0:
65 if pos - f >= 0:
66 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
66 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
67
67
68 try:
68 try:
69 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
69 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
70
70
71 for label, node in l:
71 for label, node in l:
72 yield {"label": label, "node": node}
72 yield {"label": label, "node": node}
73
73
74 yield {"label": "tip", "node": "tip"}
74 yield {"label": "tip", "node": "tip"}
75 except hg.RepoError:
75 except hg.RepoError:
76 pass
76 pass
77
77
78 return nav
78 return nav
79
79
80 class hgweb(object):
80 class hgweb(object):
81 def __init__(self, repo, name=None):
81 def __init__(self, repo, name=None):
82 if isinstance(repo, str):
82 if isinstance(repo, str):
83 parentui = ui.ui(report_untrusted=False, interactive=False)
83 parentui = ui.ui(report_untrusted=False, interactive=False)
84 self.repo = hg.repository(parentui, repo)
84 self.repo = hg.repository(parentui, repo)
85 else:
85 else:
86 self.repo = repo
86 self.repo = repo
87
87
88 hook.redirect(True)
88 hook.redirect(True)
89 self.mtime = -1
89 self.mtime = -1
90 self.reponame = name
90 self.reponame = name
91 self.archives = 'zip', 'gz', 'bz2'
91 self.archives = 'zip', 'gz', 'bz2'
92 self.stripecount = 1
92 self.stripecount = 1
93 # a repo owner may set web.templates in .hg/hgrc to get any file
93 # a repo owner may set web.templates in .hg/hgrc to get any file
94 # readable by the user running the CGI script
94 # readable by the user running the CGI script
95 self.templatepath = self.config("web", "templates",
95 self.templatepath = self.config("web", "templates",
96 templater.templatepath(),
96 templater.templatepath(),
97 untrusted=False)
97 untrusted=False)
98
98
99 # The CGI scripts are often run by a user different from the repo owner.
99 # The CGI scripts are often run by a user different from the repo owner.
100 # Trust the settings from the .hg/hgrc files by default.
100 # Trust the settings from the .hg/hgrc files by default.
101 def config(self, section, name, default=None, untrusted=True):
101 def config(self, section, name, default=None, untrusted=True):
102 return self.repo.ui.config(section, name, default,
102 return self.repo.ui.config(section, name, default,
103 untrusted=untrusted)
103 untrusted=untrusted)
104
104
105 def configbool(self, section, name, default=False, untrusted=True):
105 def configbool(self, section, name, default=False, untrusted=True):
106 return self.repo.ui.configbool(section, name, default,
106 return self.repo.ui.configbool(section, name, default,
107 untrusted=untrusted)
107 untrusted=untrusted)
108
108
109 def configlist(self, section, name, default=None, untrusted=True):
109 def configlist(self, section, name, default=None, untrusted=True):
110 return self.repo.ui.configlist(section, name, default,
110 return self.repo.ui.configlist(section, name, default,
111 untrusted=untrusted)
111 untrusted=untrusted)
112
112
113 def refresh(self):
113 def refresh(self):
114 mtime = get_mtime(self.repo.root)
114 mtime = get_mtime(self.repo.root)
115 if mtime != self.mtime:
115 if mtime != self.mtime:
116 self.mtime = mtime
116 self.mtime = mtime
117 self.repo = hg.repository(self.repo.ui, self.repo.root)
117 self.repo = hg.repository(self.repo.ui, self.repo.root)
118 self.maxchanges = int(self.config("web", "maxchanges", 10))
118 self.maxchanges = int(self.config("web", "maxchanges", 10))
119 self.stripecount = int(self.config("web", "stripes", 1))
119 self.stripecount = int(self.config("web", "stripes", 1))
120 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
120 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
121 self.maxfiles = int(self.config("web", "maxfiles", 10))
121 self.maxfiles = int(self.config("web", "maxfiles", 10))
122 self.allowpull = self.configbool("web", "allowpull", True)
122 self.allowpull = self.configbool("web", "allowpull", True)
123 self.encoding = self.config("web", "encoding", util._encoding)
123 self.encoding = self.config("web", "encoding", util._encoding)
124
124
125 def run(self):
125 def run(self):
126 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
126 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
127 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
127 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
128 import mercurial.hgweb.wsgicgi as wsgicgi
128 import mercurial.hgweb.wsgicgi as wsgicgi
129 wsgicgi.launch(self)
129 wsgicgi.launch(self)
130
130
131 def __call__(self, env, respond):
131 def __call__(self, env, respond):
132 req = wsgirequest(env, respond)
132 req = wsgirequest(env, respond)
133 self.run_wsgi(req)
133 self.run_wsgi(req)
134 return req
134 return req
135
135
136 def run_wsgi(self, req):
136 def run_wsgi(self, req):
137
137
138 self.refresh()
138 self.refresh()
139
139
140 # expand form shortcuts
140 # expand form shortcuts
141
141
142 for k in shortcuts.iterkeys():
142 for k in shortcuts.iterkeys():
143 if k in req.form:
143 if k in req.form:
144 for name, value in shortcuts[k]:
144 for name, value in shortcuts[k]:
145 if value is None:
145 if value is None:
146 value = req.form[k]
146 value = req.form[k]
147 req.form[name] = value
147 req.form[name] = value
148 del req.form[k]
148 del req.form[k]
149
149
150 # work with CGI variables to create coherent structure
150 # work with CGI variables to create coherent structure
151 # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME
151 # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME
152
152
153 req.url = req.env['SCRIPT_NAME']
153 req.url = req.env['SCRIPT_NAME']
154 if not req.url.endswith('/'):
154 if not req.url.endswith('/'):
155 req.url += '/'
155 req.url += '/'
156 if req.env.has_key('REPO_NAME'):
156 if 'REPO_NAME' in req.env:
157 req.url += req.env['REPO_NAME'] + '/'
157 req.url += req.env['REPO_NAME'] + '/'
158
158
159 if req.env.get('PATH_INFO'):
159 if req.env.get('PATH_INFO'):
160 parts = req.env.get('PATH_INFO').strip('/').split('/')
160 parts = req.env.get('PATH_INFO').strip('/').split('/')
161 repo_parts = req.env.get('REPO_NAME', '').split('/')
161 repo_parts = req.env.get('REPO_NAME', '').split('/')
162 if parts[:len(repo_parts)] == repo_parts:
162 if parts[:len(repo_parts)] == repo_parts:
163 parts = parts[len(repo_parts):]
163 parts = parts[len(repo_parts):]
164 query = '/'.join(parts)
164 query = '/'.join(parts)
165 else:
165 else:
166 query = req.env['QUERY_STRING'].split('&', 1)[0]
166 query = req.env['QUERY_STRING'].split('&', 1)[0]
167 query = query.split(';', 1)[0]
167 query = query.split(';', 1)[0]
168
168
169 # translate user-visible url structure to internal structure
169 # translate user-visible url structure to internal structure
170
170
171 args = query.split('/', 2)
171 args = query.split('/', 2)
172 if 'cmd' not in req.form and args and args[0]:
172 if 'cmd' not in req.form and args and args[0]:
173
173
174 cmd = args.pop(0)
174 cmd = args.pop(0)
175 style = cmd.rfind('-')
175 style = cmd.rfind('-')
176 if style != -1:
176 if style != -1:
177 req.form['style'] = [cmd[:style]]
177 req.form['style'] = [cmd[:style]]
178 cmd = cmd[style+1:]
178 cmd = cmd[style+1:]
179
179
180 # avoid accepting e.g. style parameter as command
180 # avoid accepting e.g. style parameter as command
181 if hasattr(webcommands, cmd) or hasattr(protocol, cmd):
181 if hasattr(webcommands, cmd) or hasattr(protocol, cmd):
182 req.form['cmd'] = [cmd]
182 req.form['cmd'] = [cmd]
183
183
184 if args and args[0]:
184 if args and args[0]:
185 node = args.pop(0)
185 node = args.pop(0)
186 req.form['node'] = [node]
186 req.form['node'] = [node]
187 if args:
187 if args:
188 req.form['file'] = args
188 req.form['file'] = args
189
189
190 if cmd == 'static':
190 if cmd == 'static':
191 req.form['file'] = req.form['node']
191 req.form['file'] = req.form['node']
192 elif cmd == 'archive':
192 elif cmd == 'archive':
193 fn = req.form['node'][0]
193 fn = req.form['node'][0]
194 for type_, spec in self.archive_specs.iteritems():
194 for type_, spec in self.archive_specs.iteritems():
195 ext = spec[2]
195 ext = spec[2]
196 if fn.endswith(ext):
196 if fn.endswith(ext):
197 req.form['node'] = [fn[:-len(ext)]]
197 req.form['node'] = [fn[:-len(ext)]]
198 req.form['type'] = [type_]
198 req.form['type'] = [type_]
199
199
200 # actually process the request
200 # actually process the request
201
201
202 try:
202 try:
203
203
204 cmd = req.form.get('cmd', [''])[0]
204 cmd = req.form.get('cmd', [''])[0]
205 if hasattr(protocol, cmd):
205 if hasattr(protocol, cmd):
206 method = getattr(protocol, cmd)
206 method = getattr(protocol, cmd)
207 method(self, req)
207 method(self, req)
208 else:
208 else:
209
209
210 tmpl = self.templater(req)
210 tmpl = self.templater(req)
211 if cmd == '':
211 if cmd == '':
212 req.form['cmd'] = [tmpl.cache['default']]
212 req.form['cmd'] = [tmpl.cache['default']]
213 cmd = req.form['cmd'][0]
213 cmd = req.form['cmd'][0]
214
214
215 if cmd == 'file' and 'raw' in req.form['style']:
215 if cmd == 'file' and 'raw' in req.form['style']:
216 webcommands.rawfile(self, req, tmpl)
216 webcommands.rawfile(self, req, tmpl)
217 else:
217 else:
218 getattr(webcommands, cmd)(self, req, tmpl)
218 getattr(webcommands, cmd)(self, req, tmpl)
219
219
220 del tmpl
220 del tmpl
221
221
222 except revlog.LookupError, err:
222 except revlog.LookupError, err:
223 req.respond(404, tmpl(
223 req.respond(404, tmpl(
224 'error', error='revision not found: %s' % err.name))
224 'error', error='revision not found: %s' % err.name))
225 except (hg.RepoError, revlog.RevlogError), inst:
225 except (hg.RepoError, revlog.RevlogError), inst:
226 req.respond('500 Internal Server Error',
226 req.respond('500 Internal Server Error',
227 tmpl('error', error=str(inst)))
227 tmpl('error', error=str(inst)))
228 except ErrorResponse, inst:
228 except ErrorResponse, inst:
229 req.respond(inst.code, tmpl('error', error=inst.message))
229 req.respond(inst.code, tmpl('error', error=inst.message))
230 except AttributeError:
230 except AttributeError:
231 req.respond(400, tmpl('error', error='No such method: ' + cmd))
231 req.respond(400, tmpl('error', error='No such method: ' + cmd))
232
232
233 def templater(self, req):
233 def templater(self, req):
234
234
235 # determine scheme, port and server name
235 # determine scheme, port and server name
236 # this is needed to create absolute urls
236 # this is needed to create absolute urls
237
237
238 proto = req.env.get('wsgi.url_scheme')
238 proto = req.env.get('wsgi.url_scheme')
239 if proto == 'https':
239 if proto == 'https':
240 proto = 'https'
240 proto = 'https'
241 default_port = "443"
241 default_port = "443"
242 else:
242 else:
243 proto = 'http'
243 proto = 'http'
244 default_port = "80"
244 default_port = "80"
245
245
246 port = req.env["SERVER_PORT"]
246 port = req.env["SERVER_PORT"]
247 port = port != default_port and (":" + port) or ""
247 port = port != default_port and (":" + port) or ""
248 urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
248 urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
249 staticurl = self.config("web", "staticurl") or req.url + 'static/'
249 staticurl = self.config("web", "staticurl") or req.url + 'static/'
250 if not staticurl.endswith('/'):
250 if not staticurl.endswith('/'):
251 staticurl += '/'
251 staticurl += '/'
252
252
253 # some functions for the templater
253 # some functions for the templater
254
254
255 def header(**map):
255 def header(**map):
256 header_file = cStringIO.StringIO(
256 header_file = cStringIO.StringIO(
257 ''.join(tmpl("header", encoding=self.encoding, **map)))
257 ''.join(tmpl("header", encoding=self.encoding, **map)))
258 msg = mimetools.Message(header_file, 0)
258 msg = mimetools.Message(header_file, 0)
259 req.header(msg.items())
259 req.header(msg.items())
260 yield header_file.read()
260 yield header_file.read()
261
261
262 def footer(**map):
262 def footer(**map):
263 yield tmpl("footer", **map)
263 yield tmpl("footer", **map)
264
264
265 def motd(**map):
265 def motd(**map):
266 yield self.config("web", "motd", "")
266 yield self.config("web", "motd", "")
267
267
268 def sessionvars(**map):
268 def sessionvars(**map):
269 fields = []
269 fields = []
270 if req.form.has_key('style'):
270 if 'style' in req.form:
271 style = req.form['style'][0]
271 style = req.form['style'][0]
272 if style != self.config('web', 'style', ''):
272 if style != self.config('web', 'style', ''):
273 fields.append(('style', style))
273 fields.append(('style', style))
274
274
275 separator = req.url[-1] == '?' and ';' or '?'
275 separator = req.url[-1] == '?' and ';' or '?'
276 for name, value in fields:
276 for name, value in fields:
277 yield dict(name=name, value=value, separator=separator)
277 yield dict(name=name, value=value, separator=separator)
278 separator = ';'
278 separator = ';'
279
279
280 # figure out which style to use
280 # figure out which style to use
281
281
282 style = self.config("web", "style", "")
282 style = self.config("web", "style", "")
283 if req.form.has_key('style'):
283 if 'style' in req.form:
284 style = req.form['style'][0]
284 style = req.form['style'][0]
285 mapfile = style_map(self.templatepath, style)
285 mapfile = style_map(self.templatepath, style)
286
286
287 if not self.reponame:
287 if not self.reponame:
288 self.reponame = (self.config("web", "name")
288 self.reponame = (self.config("web", "name")
289 or req.env.get('REPO_NAME')
289 or req.env.get('REPO_NAME')
290 or req.url.strip('/') or self.repo.root)
290 or req.url.strip('/') or self.repo.root)
291
291
292 # create the templater
292 # create the templater
293
293
294 tmpl = templater.templater(mapfile, templater.common_filters,
294 tmpl = templater.templater(mapfile, templater.common_filters,
295 defaults={"url": req.url,
295 defaults={"url": req.url,
296 "staticurl": staticurl,
296 "staticurl": staticurl,
297 "urlbase": urlbase,
297 "urlbase": urlbase,
298 "repo": self.reponame,
298 "repo": self.reponame,
299 "header": header,
299 "header": header,
300 "footer": footer,
300 "footer": footer,
301 "motd": motd,
301 "motd": motd,
302 "sessionvars": sessionvars
302 "sessionvars": sessionvars
303 })
303 })
304 return tmpl
304 return tmpl
305
305
306 def archivelist(self, nodeid):
306 def archivelist(self, nodeid):
307 allowed = self.configlist("web", "allow_archive")
307 allowed = self.configlist("web", "allow_archive")
308 for i, spec in self.archive_specs.iteritems():
308 for i, spec in self.archive_specs.iteritems():
309 if i in allowed or self.configbool("web", "allow" + i):
309 if i in allowed or self.configbool("web", "allow" + i):
310 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
310 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
311
311
312 def listfilediffs(self, tmpl, files, changeset):
312 def listfilediffs(self, tmpl, files, changeset):
313 for f in files[:self.maxfiles]:
313 for f in files[:self.maxfiles]:
314 yield tmpl("filedifflink", node=hex(changeset), file=f)
314 yield tmpl("filedifflink", node=hex(changeset), file=f)
315 if len(files) > self.maxfiles:
315 if len(files) > self.maxfiles:
316 yield tmpl("fileellipses")
316 yield tmpl("fileellipses")
317
317
318 def siblings(self, siblings=[], hiderev=None, **args):
318 def siblings(self, siblings=[], hiderev=None, **args):
319 siblings = [s for s in siblings if s.node() != nullid]
319 siblings = [s for s in siblings if s.node() != nullid]
320 if len(siblings) == 1 and siblings[0].rev() == hiderev:
320 if len(siblings) == 1 and siblings[0].rev() == hiderev:
321 return
321 return
322 for s in siblings:
322 for s in siblings:
323 d = {'node': hex(s.node()), 'rev': s.rev()}
323 d = {'node': hex(s.node()), 'rev': s.rev()}
324 if hasattr(s, 'path'):
324 if hasattr(s, 'path'):
325 d['file'] = s.path()
325 d['file'] = s.path()
326 d.update(args)
326 d.update(args)
327 yield d
327 yield d
328
328
329 def renamelink(self, fl, node):
329 def renamelink(self, fl, node):
330 r = fl.renamed(node)
330 r = fl.renamed(node)
331 if r:
331 if r:
332 return [dict(file=r[0], node=hex(r[1]))]
332 return [dict(file=r[0], node=hex(r[1]))]
333 return []
333 return []
334
334
335 def nodetagsdict(self, node):
335 def nodetagsdict(self, node):
336 return [{"name": i} for i in self.repo.nodetags(node)]
336 return [{"name": i} for i in self.repo.nodetags(node)]
337
337
338 def nodebranchdict(self, ctx):
338 def nodebranchdict(self, ctx):
339 branches = []
339 branches = []
340 branch = ctx.branch()
340 branch = ctx.branch()
341 # If this is an empty repo, ctx.node() == nullid,
341 # If this is an empty repo, ctx.node() == nullid,
342 # ctx.branch() == 'default', but branchtags() is
342 # ctx.branch() == 'default', but branchtags() is
343 # an empty dict. Using dict.get avoids a traceback.
343 # an empty dict. Using dict.get avoids a traceback.
344 if self.repo.branchtags().get(branch) == ctx.node():
344 if self.repo.branchtags().get(branch) == ctx.node():
345 branches.append({"name": branch})
345 branches.append({"name": branch})
346 return branches
346 return branches
347
347
348 def showtag(self, tmpl, t1, node=nullid, **args):
348 def showtag(self, tmpl, t1, node=nullid, **args):
349 for t in self.repo.nodetags(node):
349 for t in self.repo.nodetags(node):
350 yield tmpl(t1, tag=t, **args)
350 yield tmpl(t1, tag=t, **args)
351
351
352 def diff(self, tmpl, node1, node2, files):
352 def diff(self, tmpl, node1, node2, files):
353 def filterfiles(filters, files):
353 def filterfiles(filters, files):
354 l = [x for x in files if x in filters]
354 l = [x for x in files if x in filters]
355
355
356 for t in filters:
356 for t in filters:
357 if t and t[-1] != os.sep:
357 if t and t[-1] != os.sep:
358 t += os.sep
358 t += os.sep
359 l += [x for x in files if x.startswith(t)]
359 l += [x for x in files if x.startswith(t)]
360 return l
360 return l
361
361
362 parity = paritygen(self.stripecount)
362 parity = paritygen(self.stripecount)
363 def diffblock(diff, f, fn):
363 def diffblock(diff, f, fn):
364 yield tmpl("diffblock",
364 yield tmpl("diffblock",
365 lines=prettyprintlines(diff),
365 lines=prettyprintlines(diff),
366 parity=parity.next(),
366 parity=parity.next(),
367 file=f,
367 file=f,
368 filenode=hex(fn or nullid))
368 filenode=hex(fn or nullid))
369
369
370 def prettyprintlines(diff):
370 def prettyprintlines(diff):
371 for l in diff.splitlines(1):
371 for l in diff.splitlines(1):
372 if l.startswith('+'):
372 if l.startswith('+'):
373 yield tmpl("difflineplus", line=l)
373 yield tmpl("difflineplus", line=l)
374 elif l.startswith('-'):
374 elif l.startswith('-'):
375 yield tmpl("difflineminus", line=l)
375 yield tmpl("difflineminus", line=l)
376 elif l.startswith('@'):
376 elif l.startswith('@'):
377 yield tmpl("difflineat", line=l)
377 yield tmpl("difflineat", line=l)
378 else:
378 else:
379 yield tmpl("diffline", line=l)
379 yield tmpl("diffline", line=l)
380
380
381 r = self.repo
381 r = self.repo
382 c1 = r.changectx(node1)
382 c1 = r.changectx(node1)
383 c2 = r.changectx(node2)
383 c2 = r.changectx(node2)
384 date1 = util.datestr(c1.date())
384 date1 = util.datestr(c1.date())
385 date2 = util.datestr(c2.date())
385 date2 = util.datestr(c2.date())
386
386
387 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
387 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
388 if files:
388 if files:
389 modified, added, removed = map(lambda x: filterfiles(files, x),
389 modified, added, removed = map(lambda x: filterfiles(files, x),
390 (modified, added, removed))
390 (modified, added, removed))
391
391
392 diffopts = patch.diffopts(self.repo.ui, untrusted=True)
392 diffopts = patch.diffopts(self.repo.ui, untrusted=True)
393 for f in modified:
393 for f in modified:
394 to = c1.filectx(f).data()
394 to = c1.filectx(f).data()
395 tn = c2.filectx(f).data()
395 tn = c2.filectx(f).data()
396 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
396 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
397 opts=diffopts), f, tn)
397 opts=diffopts), f, tn)
398 for f in added:
398 for f in added:
399 to = None
399 to = None
400 tn = c2.filectx(f).data()
400 tn = c2.filectx(f).data()
401 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
401 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
402 opts=diffopts), f, tn)
402 opts=diffopts), f, tn)
403 for f in removed:
403 for f in removed:
404 to = c1.filectx(f).data()
404 to = c1.filectx(f).data()
405 tn = None
405 tn = None
406 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
406 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f, f,
407 opts=diffopts), f, tn)
407 opts=diffopts), f, tn)
408
408
409 def changelog(self, tmpl, ctx, shortlog=False):
409 def changelog(self, tmpl, ctx, shortlog=False):
410 def changelist(limit=0,**map):
410 def changelist(limit=0,**map):
411 cl = self.repo.changelog
411 cl = self.repo.changelog
412 l = [] # build a list in forward order for efficiency
412 l = [] # build a list in forward order for efficiency
413 for i in xrange(start, end):
413 for i in xrange(start, end):
414 ctx = self.repo.changectx(i)
414 ctx = self.repo.changectx(i)
415 n = ctx.node()
415 n = ctx.node()
416
416
417 l.insert(0, {"parity": parity.next(),
417 l.insert(0, {"parity": parity.next(),
418 "author": ctx.user(),
418 "author": ctx.user(),
419 "parent": self.siblings(ctx.parents(), i - 1),
419 "parent": self.siblings(ctx.parents(), i - 1),
420 "child": self.siblings(ctx.children(), i + 1),
420 "child": self.siblings(ctx.children(), i + 1),
421 "changelogtag": self.showtag("changelogtag",n),
421 "changelogtag": self.showtag("changelogtag",n),
422 "desc": ctx.description(),
422 "desc": ctx.description(),
423 "date": ctx.date(),
423 "date": ctx.date(),
424 "files": self.listfilediffs(tmpl, ctx.files(), n),
424 "files": self.listfilediffs(tmpl, ctx.files(), n),
425 "rev": i,
425 "rev": i,
426 "node": hex(n),
426 "node": hex(n),
427 "tags": self.nodetagsdict(n),
427 "tags": self.nodetagsdict(n),
428 "branches": self.nodebranchdict(ctx)})
428 "branches": self.nodebranchdict(ctx)})
429
429
430 if limit > 0:
430 if limit > 0:
431 l = l[:limit]
431 l = l[:limit]
432
432
433 for e in l:
433 for e in l:
434 yield e
434 yield e
435
435
436 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
436 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
437 cl = self.repo.changelog
437 cl = self.repo.changelog
438 count = cl.count()
438 count = cl.count()
439 pos = ctx.rev()
439 pos = ctx.rev()
440 start = max(0, pos - maxchanges + 1)
440 start = max(0, pos - maxchanges + 1)
441 end = min(count, start + maxchanges)
441 end = min(count, start + maxchanges)
442 pos = end - 1
442 pos = end - 1
443 parity = paritygen(self.stripecount, offset=start-end)
443 parity = paritygen(self.stripecount, offset=start-end)
444
444
445 changenav = revnavgen(pos, maxchanges, count, self.repo.changectx)
445 changenav = revnavgen(pos, maxchanges, count, self.repo.changectx)
446
446
447 return tmpl(shortlog and 'shortlog' or 'changelog',
447 return tmpl(shortlog and 'shortlog' or 'changelog',
448 changenav=changenav,
448 changenav=changenav,
449 node=hex(cl.tip()),
449 node=hex(cl.tip()),
450 rev=pos, changesets=count,
450 rev=pos, changesets=count,
451 entries=lambda **x: changelist(limit=0,**x),
451 entries=lambda **x: changelist(limit=0,**x),
452 latestentry=lambda **x: changelist(limit=1,**x),
452 latestentry=lambda **x: changelist(limit=1,**x),
453 archives=self.archivelist("tip"))
453 archives=self.archivelist("tip"))
454
454
455 def search(self, tmpl, query):
455 def search(self, tmpl, query):
456
456
457 def changelist(**map):
457 def changelist(**map):
458 cl = self.repo.changelog
458 cl = self.repo.changelog
459 count = 0
459 count = 0
460 qw = query.lower().split()
460 qw = query.lower().split()
461
461
462 def revgen():
462 def revgen():
463 for i in xrange(cl.count() - 1, 0, -100):
463 for i in xrange(cl.count() - 1, 0, -100):
464 l = []
464 l = []
465 for j in xrange(max(0, i - 100), i):
465 for j in xrange(max(0, i - 100), i):
466 ctx = self.repo.changectx(j)
466 ctx = self.repo.changectx(j)
467 l.append(ctx)
467 l.append(ctx)
468 l.reverse()
468 l.reverse()
469 for e in l:
469 for e in l:
470 yield e
470 yield e
471
471
472 for ctx in revgen():
472 for ctx in revgen():
473 miss = 0
473 miss = 0
474 for q in qw:
474 for q in qw:
475 if not (q in ctx.user().lower() or
475 if not (q in ctx.user().lower() or
476 q in ctx.description().lower() or
476 q in ctx.description().lower() or
477 q in " ".join(ctx.files()).lower()):
477 q in " ".join(ctx.files()).lower()):
478 miss = 1
478 miss = 1
479 break
479 break
480 if miss:
480 if miss:
481 continue
481 continue
482
482
483 count += 1
483 count += 1
484 n = ctx.node()
484 n = ctx.node()
485
485
486 yield tmpl('searchentry',
486 yield tmpl('searchentry',
487 parity=parity.next(),
487 parity=parity.next(),
488 author=ctx.user(),
488 author=ctx.user(),
489 parent=self.siblings(ctx.parents()),
489 parent=self.siblings(ctx.parents()),
490 child=self.siblings(ctx.children()),
490 child=self.siblings(ctx.children()),
491 changelogtag=self.showtag("changelogtag",n),
491 changelogtag=self.showtag("changelogtag",n),
492 desc=ctx.description(),
492 desc=ctx.description(),
493 date=ctx.date(),
493 date=ctx.date(),
494 files=self.listfilediffs(tmpl, ctx.files(), n),
494 files=self.listfilediffs(tmpl, ctx.files(), n),
495 rev=ctx.rev(),
495 rev=ctx.rev(),
496 node=hex(n),
496 node=hex(n),
497 tags=self.nodetagsdict(n),
497 tags=self.nodetagsdict(n),
498 branches=self.nodebranchdict(ctx))
498 branches=self.nodebranchdict(ctx))
499
499
500 if count >= self.maxchanges:
500 if count >= self.maxchanges:
501 break
501 break
502
502
503 cl = self.repo.changelog
503 cl = self.repo.changelog
504 parity = paritygen(self.stripecount)
504 parity = paritygen(self.stripecount)
505
505
506 return tmpl('search',
506 return tmpl('search',
507 query=query,
507 query=query,
508 node=hex(cl.tip()),
508 node=hex(cl.tip()),
509 entries=changelist,
509 entries=changelist,
510 archives=self.archivelist("tip"))
510 archives=self.archivelist("tip"))
511
511
512 def changeset(self, tmpl, ctx):
512 def changeset(self, tmpl, ctx):
513 n = ctx.node()
513 n = ctx.node()
514 parents = ctx.parents()
514 parents = ctx.parents()
515 p1 = parents[0].node()
515 p1 = parents[0].node()
516
516
517 files = []
517 files = []
518 parity = paritygen(self.stripecount)
518 parity = paritygen(self.stripecount)
519 for f in ctx.files():
519 for f in ctx.files():
520 files.append(tmpl("filenodelink",
520 files.append(tmpl("filenodelink",
521 node=hex(n), file=f,
521 node=hex(n), file=f,
522 parity=parity.next()))
522 parity=parity.next()))
523
523
524 def diff(**map):
524 def diff(**map):
525 yield self.diff(tmpl, p1, n, None)
525 yield self.diff(tmpl, p1, n, None)
526
526
527 return tmpl('changeset',
527 return tmpl('changeset',
528 diff=diff,
528 diff=diff,
529 rev=ctx.rev(),
529 rev=ctx.rev(),
530 node=hex(n),
530 node=hex(n),
531 parent=self.siblings(parents),
531 parent=self.siblings(parents),
532 child=self.siblings(ctx.children()),
532 child=self.siblings(ctx.children()),
533 changesettag=self.showtag("changesettag",n),
533 changesettag=self.showtag("changesettag",n),
534 author=ctx.user(),
534 author=ctx.user(),
535 desc=ctx.description(),
535 desc=ctx.description(),
536 date=ctx.date(),
536 date=ctx.date(),
537 files=files,
537 files=files,
538 archives=self.archivelist(hex(n)),
538 archives=self.archivelist(hex(n)),
539 tags=self.nodetagsdict(n),
539 tags=self.nodetagsdict(n),
540 branches=self.nodebranchdict(ctx))
540 branches=self.nodebranchdict(ctx))
541
541
542 def filelog(self, tmpl, fctx):
542 def filelog(self, tmpl, fctx):
543 f = fctx.path()
543 f = fctx.path()
544 fl = fctx.filelog()
544 fl = fctx.filelog()
545 count = fl.count()
545 count = fl.count()
546 pagelen = self.maxshortchanges
546 pagelen = self.maxshortchanges
547 pos = fctx.filerev()
547 pos = fctx.filerev()
548 start = max(0, pos - pagelen + 1)
548 start = max(0, pos - pagelen + 1)
549 end = min(count, start + pagelen)
549 end = min(count, start + pagelen)
550 pos = end - 1
550 pos = end - 1
551 parity = paritygen(self.stripecount, offset=start-end)
551 parity = paritygen(self.stripecount, offset=start-end)
552
552
553 def entries(limit=0, **map):
553 def entries(limit=0, **map):
554 l = []
554 l = []
555
555
556 for i in xrange(start, end):
556 for i in xrange(start, end):
557 ctx = fctx.filectx(i)
557 ctx = fctx.filectx(i)
558 n = fl.node(i)
558 n = fl.node(i)
559
559
560 l.insert(0, {"parity": parity.next(),
560 l.insert(0, {"parity": parity.next(),
561 "filerev": i,
561 "filerev": i,
562 "file": f,
562 "file": f,
563 "node": hex(ctx.node()),
563 "node": hex(ctx.node()),
564 "author": ctx.user(),
564 "author": ctx.user(),
565 "date": ctx.date(),
565 "date": ctx.date(),
566 "rename": self.renamelink(fl, n),
566 "rename": self.renamelink(fl, n),
567 "parent": self.siblings(fctx.parents()),
567 "parent": self.siblings(fctx.parents()),
568 "child": self.siblings(fctx.children()),
568 "child": self.siblings(fctx.children()),
569 "desc": ctx.description()})
569 "desc": ctx.description()})
570
570
571 if limit > 0:
571 if limit > 0:
572 l = l[:limit]
572 l = l[:limit]
573
573
574 for e in l:
574 for e in l:
575 yield e
575 yield e
576
576
577 nodefunc = lambda x: fctx.filectx(fileid=x)
577 nodefunc = lambda x: fctx.filectx(fileid=x)
578 nav = revnavgen(pos, pagelen, count, nodefunc)
578 nav = revnavgen(pos, pagelen, count, nodefunc)
579 return tmpl("filelog", file=f, node=hex(fctx.node()), nav=nav,
579 return tmpl("filelog", file=f, node=hex(fctx.node()), nav=nav,
580 entries=lambda **x: entries(limit=0, **x),
580 entries=lambda **x: entries(limit=0, **x),
581 latestentry=lambda **x: entries(limit=1, **x))
581 latestentry=lambda **x: entries(limit=1, **x))
582
582
583 def filerevision(self, tmpl, fctx):
583 def filerevision(self, tmpl, fctx):
584 f = fctx.path()
584 f = fctx.path()
585 text = fctx.data()
585 text = fctx.data()
586 fl = fctx.filelog()
586 fl = fctx.filelog()
587 n = fctx.filenode()
587 n = fctx.filenode()
588 parity = paritygen(self.stripecount)
588 parity = paritygen(self.stripecount)
589
589
590 mt = mimetypes.guess_type(f)[0]
590 mt = mimetypes.guess_type(f)[0]
591 rawtext = text
591 rawtext = text
592 if util.binary(text):
592 if util.binary(text):
593 mt = mt or 'application/octet-stream'
593 mt = mt or 'application/octet-stream'
594 text = "(binary:%s)" % mt
594 text = "(binary:%s)" % mt
595 mt = mt or 'text/plain'
595 mt = mt or 'text/plain'
596
596
597 def lines():
597 def lines():
598 for l, t in enumerate(text.splitlines(1)):
598 for l, t in enumerate(text.splitlines(1)):
599 yield {"line": t,
599 yield {"line": t,
600 "linenumber": "% 6d" % (l + 1),
600 "linenumber": "% 6d" % (l + 1),
601 "parity": parity.next()}
601 "parity": parity.next()}
602
602
603 return tmpl("filerevision",
603 return tmpl("filerevision",
604 file=f,
604 file=f,
605 path=_up(f),
605 path=_up(f),
606 text=lines(),
606 text=lines(),
607 raw=rawtext,
607 raw=rawtext,
608 mimetype=mt,
608 mimetype=mt,
609 rev=fctx.rev(),
609 rev=fctx.rev(),
610 node=hex(fctx.node()),
610 node=hex(fctx.node()),
611 author=fctx.user(),
611 author=fctx.user(),
612 date=fctx.date(),
612 date=fctx.date(),
613 desc=fctx.description(),
613 desc=fctx.description(),
614 parent=self.siblings(fctx.parents()),
614 parent=self.siblings(fctx.parents()),
615 child=self.siblings(fctx.children()),
615 child=self.siblings(fctx.children()),
616 rename=self.renamelink(fl, n),
616 rename=self.renamelink(fl, n),
617 permissions=fctx.manifest().flags(f))
617 permissions=fctx.manifest().flags(f))
618
618
619 def fileannotate(self, tmpl, fctx):
619 def fileannotate(self, tmpl, fctx):
620 f = fctx.path()
620 f = fctx.path()
621 n = fctx.filenode()
621 n = fctx.filenode()
622 fl = fctx.filelog()
622 fl = fctx.filelog()
623 parity = paritygen(self.stripecount)
623 parity = paritygen(self.stripecount)
624
624
625 def annotate(**map):
625 def annotate(**map):
626 last = None
626 last = None
627 for f, l in fctx.annotate(follow=True):
627 for f, l in fctx.annotate(follow=True):
628 fnode = f.filenode()
628 fnode = f.filenode()
629 name = self.repo.ui.shortuser(f.user())
629 name = self.repo.ui.shortuser(f.user())
630
630
631 if last != fnode:
631 if last != fnode:
632 last = fnode
632 last = fnode
633
633
634 yield {"parity": parity.next(),
634 yield {"parity": parity.next(),
635 "node": hex(f.node()),
635 "node": hex(f.node()),
636 "rev": f.rev(),
636 "rev": f.rev(),
637 "author": name,
637 "author": name,
638 "file": f.path(),
638 "file": f.path(),
639 "line": l}
639 "line": l}
640
640
641 return tmpl("fileannotate",
641 return tmpl("fileannotate",
642 file=f,
642 file=f,
643 annotate=annotate,
643 annotate=annotate,
644 path=_up(f),
644 path=_up(f),
645 rev=fctx.rev(),
645 rev=fctx.rev(),
646 node=hex(fctx.node()),
646 node=hex(fctx.node()),
647 author=fctx.user(),
647 author=fctx.user(),
648 date=fctx.date(),
648 date=fctx.date(),
649 desc=fctx.description(),
649 desc=fctx.description(),
650 rename=self.renamelink(fl, n),
650 rename=self.renamelink(fl, n),
651 parent=self.siblings(fctx.parents()),
651 parent=self.siblings(fctx.parents()),
652 child=self.siblings(fctx.children()),
652 child=self.siblings(fctx.children()),
653 permissions=fctx.manifest().flags(f))
653 permissions=fctx.manifest().flags(f))
654
654
655 def manifest(self, tmpl, ctx, path):
655 def manifest(self, tmpl, ctx, path):
656 mf = ctx.manifest()
656 mf = ctx.manifest()
657 node = ctx.node()
657 node = ctx.node()
658
658
659 files = {}
659 files = {}
660 parity = paritygen(self.stripecount)
660 parity = paritygen(self.stripecount)
661
661
662 if path and path[-1] != "/":
662 if path and path[-1] != "/":
663 path += "/"
663 path += "/"
664 l = len(path)
664 l = len(path)
665 abspath = "/" + path
665 abspath = "/" + path
666
666
667 for f, n in mf.items():
667 for f, n in mf.items():
668 if f[:l] != path:
668 if f[:l] != path:
669 continue
669 continue
670 remain = f[l:]
670 remain = f[l:]
671 if "/" in remain:
671 if "/" in remain:
672 short = remain[:remain.index("/") + 1] # bleah
672 short = remain[:remain.index("/") + 1] # bleah
673 files[short] = (f, None)
673 files[short] = (f, None)
674 else:
674 else:
675 short = os.path.basename(remain)
675 short = os.path.basename(remain)
676 files[short] = (f, n)
676 files[short] = (f, n)
677
677
678 if not files:
678 if not files:
679 raise ErrorResponse(404, 'Path not found: ' + path)
679 raise ErrorResponse(404, 'Path not found: ' + path)
680
680
681 def filelist(**map):
681 def filelist(**map):
682 fl = files.keys()
682 fl = files.keys()
683 fl.sort()
683 fl.sort()
684 for f in fl:
684 for f in fl:
685 full, fnode = files[f]
685 full, fnode = files[f]
686 if not fnode:
686 if not fnode:
687 continue
687 continue
688
688
689 fctx = ctx.filectx(full)
689 fctx = ctx.filectx(full)
690 yield {"file": full,
690 yield {"file": full,
691 "parity": parity.next(),
691 "parity": parity.next(),
692 "basename": f,
692 "basename": f,
693 "date": fctx.changectx().date(),
693 "date": fctx.changectx().date(),
694 "size": fctx.size(),
694 "size": fctx.size(),
695 "permissions": mf.flags(full)}
695 "permissions": mf.flags(full)}
696
696
697 def dirlist(**map):
697 def dirlist(**map):
698 fl = files.keys()
698 fl = files.keys()
699 fl.sort()
699 fl.sort()
700 for f in fl:
700 for f in fl:
701 full, fnode = files[f]
701 full, fnode = files[f]
702 if fnode:
702 if fnode:
703 continue
703 continue
704
704
705 yield {"parity": parity.next(),
705 yield {"parity": parity.next(),
706 "path": "%s%s" % (abspath, f),
706 "path": "%s%s" % (abspath, f),
707 "basename": f[:-1]}
707 "basename": f[:-1]}
708
708
709 return tmpl("manifest",
709 return tmpl("manifest",
710 rev=ctx.rev(),
710 rev=ctx.rev(),
711 node=hex(node),
711 node=hex(node),
712 path=abspath,
712 path=abspath,
713 up=_up(abspath),
713 up=_up(abspath),
714 upparity=parity.next(),
714 upparity=parity.next(),
715 fentries=filelist,
715 fentries=filelist,
716 dentries=dirlist,
716 dentries=dirlist,
717 archives=self.archivelist(hex(node)),
717 archives=self.archivelist(hex(node)),
718 tags=self.nodetagsdict(node),
718 tags=self.nodetagsdict(node),
719 branches=self.nodebranchdict(ctx))
719 branches=self.nodebranchdict(ctx))
720
720
721 def tags(self, tmpl):
721 def tags(self, tmpl):
722 i = self.repo.tagslist()
722 i = self.repo.tagslist()
723 i.reverse()
723 i.reverse()
724 parity = paritygen(self.stripecount)
724 parity = paritygen(self.stripecount)
725
725
726 def entries(notip=False,limit=0, **map):
726 def entries(notip=False,limit=0, **map):
727 count = 0
727 count = 0
728 for k, n in i:
728 for k, n in i:
729 if notip and k == "tip":
729 if notip and k == "tip":
730 continue
730 continue
731 if limit > 0 and count >= limit:
731 if limit > 0 and count >= limit:
732 continue
732 continue
733 count = count + 1
733 count = count + 1
734 yield {"parity": parity.next(),
734 yield {"parity": parity.next(),
735 "tag": k,
735 "tag": k,
736 "date": self.repo.changectx(n).date(),
736 "date": self.repo.changectx(n).date(),
737 "node": hex(n)}
737 "node": hex(n)}
738
738
739 return tmpl("tags",
739 return tmpl("tags",
740 node=hex(self.repo.changelog.tip()),
740 node=hex(self.repo.changelog.tip()),
741 entries=lambda **x: entries(False,0, **x),
741 entries=lambda **x: entries(False,0, **x),
742 entriesnotip=lambda **x: entries(True,0, **x),
742 entriesnotip=lambda **x: entries(True,0, **x),
743 latestentry=lambda **x: entries(True,1, **x))
743 latestentry=lambda **x: entries(True,1, **x))
744
744
745 def summary(self, tmpl):
745 def summary(self, tmpl):
746 i = self.repo.tagslist()
746 i = self.repo.tagslist()
747 i.reverse()
747 i.reverse()
748
748
749 def tagentries(**map):
749 def tagentries(**map):
750 parity = paritygen(self.stripecount)
750 parity = paritygen(self.stripecount)
751 count = 0
751 count = 0
752 for k, n in i:
752 for k, n in i:
753 if k == "tip": # skip tip
753 if k == "tip": # skip tip
754 continue;
754 continue;
755
755
756 count += 1
756 count += 1
757 if count > 10: # limit to 10 tags
757 if count > 10: # limit to 10 tags
758 break;
758 break;
759
759
760 yield tmpl("tagentry",
760 yield tmpl("tagentry",
761 parity=parity.next(),
761 parity=parity.next(),
762 tag=k,
762 tag=k,
763 node=hex(n),
763 node=hex(n),
764 date=self.repo.changectx(n).date())
764 date=self.repo.changectx(n).date())
765
765
766
766
767 def branches(**map):
767 def branches(**map):
768 parity = paritygen(self.stripecount)
768 parity = paritygen(self.stripecount)
769
769
770 b = self.repo.branchtags()
770 b = self.repo.branchtags()
771 l = [(-self.repo.changelog.rev(n), n, t) for t, n in b.items()]
771 l = [(-self.repo.changelog.rev(n), n, t) for t, n in b.items()]
772 l.sort()
772 l.sort()
773
773
774 for r,n,t in l:
774 for r,n,t in l:
775 ctx = self.repo.changectx(n)
775 ctx = self.repo.changectx(n)
776
776
777 yield {'parity': parity.next(),
777 yield {'parity': parity.next(),
778 'branch': t,
778 'branch': t,
779 'node': hex(n),
779 'node': hex(n),
780 'date': ctx.date()}
780 'date': ctx.date()}
781
781
782 def changelist(**map):
782 def changelist(**map):
783 parity = paritygen(self.stripecount, offset=start-end)
783 parity = paritygen(self.stripecount, offset=start-end)
784 l = [] # build a list in forward order for efficiency
784 l = [] # build a list in forward order for efficiency
785 for i in xrange(start, end):
785 for i in xrange(start, end):
786 ctx = self.repo.changectx(i)
786 ctx = self.repo.changectx(i)
787 n = ctx.node()
787 n = ctx.node()
788 hn = hex(n)
788 hn = hex(n)
789
789
790 l.insert(0, tmpl(
790 l.insert(0, tmpl(
791 'shortlogentry',
791 'shortlogentry',
792 parity=parity.next(),
792 parity=parity.next(),
793 author=ctx.user(),
793 author=ctx.user(),
794 desc=ctx.description(),
794 desc=ctx.description(),
795 date=ctx.date(),
795 date=ctx.date(),
796 rev=i,
796 rev=i,
797 node=hn,
797 node=hn,
798 tags=self.nodetagsdict(n),
798 tags=self.nodetagsdict(n),
799 branches=self.nodebranchdict(ctx)))
799 branches=self.nodebranchdict(ctx)))
800
800
801 yield l
801 yield l
802
802
803 cl = self.repo.changelog
803 cl = self.repo.changelog
804 count = cl.count()
804 count = cl.count()
805 start = max(0, count - self.maxchanges)
805 start = max(0, count - self.maxchanges)
806 end = min(count, start + self.maxchanges)
806 end = min(count, start + self.maxchanges)
807
807
808 return tmpl("summary",
808 return tmpl("summary",
809 desc=self.config("web", "description", "unknown"),
809 desc=self.config("web", "description", "unknown"),
810 owner=get_contact(self.config) or "unknown",
810 owner=get_contact(self.config) or "unknown",
811 lastchange=cl.read(cl.tip())[2],
811 lastchange=cl.read(cl.tip())[2],
812 tags=tagentries,
812 tags=tagentries,
813 branches=branches,
813 branches=branches,
814 shortlog=changelist,
814 shortlog=changelist,
815 node=hex(cl.tip()),
815 node=hex(cl.tip()),
816 archives=self.archivelist("tip"))
816 archives=self.archivelist("tip"))
817
817
818 def filediff(self, tmpl, fctx):
818 def filediff(self, tmpl, fctx):
819 n = fctx.node()
819 n = fctx.node()
820 path = fctx.path()
820 path = fctx.path()
821 parents = fctx.parents()
821 parents = fctx.parents()
822 p1 = parents and parents[0].node() or nullid
822 p1 = parents and parents[0].node() or nullid
823
823
824 def diff(**map):
824 def diff(**map):
825 yield self.diff(tmpl, p1, n, [path])
825 yield self.diff(tmpl, p1, n, [path])
826
826
827 return tmpl("filediff",
827 return tmpl("filediff",
828 file=path,
828 file=path,
829 node=hex(n),
829 node=hex(n),
830 rev=fctx.rev(),
830 rev=fctx.rev(),
831 parent=self.siblings(parents),
831 parent=self.siblings(parents),
832 child=self.siblings(fctx.children()),
832 child=self.siblings(fctx.children()),
833 diff=diff)
833 diff=diff)
834
834
835 archive_specs = {
835 archive_specs = {
836 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
836 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
837 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
837 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
838 'zip': ('application/zip', 'zip', '.zip', None),
838 'zip': ('application/zip', 'zip', '.zip', None),
839 }
839 }
840
840
841 def archive(self, tmpl, req, key, type_):
841 def archive(self, tmpl, req, key, type_):
842 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
842 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
843 cnode = self.repo.lookup(key)
843 cnode = self.repo.lookup(key)
844 arch_version = key
844 arch_version = key
845 if cnode == key or key == 'tip':
845 if cnode == key or key == 'tip':
846 arch_version = short(cnode)
846 arch_version = short(cnode)
847 name = "%s-%s" % (reponame, arch_version)
847 name = "%s-%s" % (reponame, arch_version)
848 mimetype, artype, extension, encoding = self.archive_specs[type_]
848 mimetype, artype, extension, encoding = self.archive_specs[type_]
849 headers = [('Content-type', mimetype),
849 headers = [('Content-type', mimetype),
850 ('Content-disposition', 'attachment; filename=%s%s' %
850 ('Content-disposition', 'attachment; filename=%s%s' %
851 (name, extension))]
851 (name, extension))]
852 if encoding:
852 if encoding:
853 headers.append(('Content-encoding', encoding))
853 headers.append(('Content-encoding', encoding))
854 req.header(headers)
854 req.header(headers)
855 archival.archive(self.repo, req, cnode, artype, prefix=name)
855 archival.archive(self.repo, req, cnode, artype, prefix=name)
856
856
857 # add tags to things
857 # add tags to things
858 # tags -> list of changesets corresponding to tags
858 # tags -> list of changesets corresponding to tags
859 # find tag, changeset, file
859 # find tag, changeset, file
860
860
861 def cleanpath(self, path):
861 def cleanpath(self, path):
862 path = path.lstrip('/')
862 path = path.lstrip('/')
863 return util.canonpath(self.repo.root, '', path)
863 return util.canonpath(self.repo.root, '', path)
864
864
865 def changectx(self, req):
865 def changectx(self, req):
866 if req.form.has_key('node'):
866 if 'node' in req.form:
867 changeid = req.form['node'][0]
867 changeid = req.form['node'][0]
868 elif req.form.has_key('manifest'):
868 elif 'manifest' in req.form:
869 changeid = req.form['manifest'][0]
869 changeid = req.form['manifest'][0]
870 else:
870 else:
871 changeid = self.repo.changelog.count() - 1
871 changeid = self.repo.changelog.count() - 1
872
872
873 try:
873 try:
874 ctx = self.repo.changectx(changeid)
874 ctx = self.repo.changectx(changeid)
875 except hg.RepoError:
875 except hg.RepoError:
876 man = self.repo.manifest
876 man = self.repo.manifest
877 mn = man.lookup(changeid)
877 mn = man.lookup(changeid)
878 ctx = self.repo.changectx(man.linkrev(mn))
878 ctx = self.repo.changectx(man.linkrev(mn))
879
879
880 return ctx
880 return ctx
881
881
882 def filectx(self, req):
882 def filectx(self, req):
883 path = self.cleanpath(req.form['file'][0])
883 path = self.cleanpath(req.form['file'][0])
884 if req.form.has_key('node'):
884 if 'node' in req.form:
885 changeid = req.form['node'][0]
885 changeid = req.form['node'][0]
886 else:
886 else:
887 changeid = req.form['filenode'][0]
887 changeid = req.form['filenode'][0]
888 try:
888 try:
889 ctx = self.repo.changectx(changeid)
889 ctx = self.repo.changectx(changeid)
890 fctx = ctx.filectx(path)
890 fctx = ctx.filectx(path)
891 except hg.RepoError:
891 except hg.RepoError:
892 fctx = self.repo.filectx(path, fileid=changeid)
892 fctx = self.repo.filectx(path, fileid=changeid)
893
893
894 return fctx
894 return fctx
895
895
896 def check_perm(self, req, op, default):
896 def check_perm(self, req, op, default):
897 '''check permission for operation based on user auth.
897 '''check permission for operation based on user auth.
898 return true if op allowed, else false.
898 return true if op allowed, else false.
899 default is policy to use if no config given.'''
899 default is policy to use if no config given.'''
900
900
901 user = req.env.get('REMOTE_USER')
901 user = req.env.get('REMOTE_USER')
902
902
903 deny = self.configlist('web', 'deny_' + op)
903 deny = self.configlist('web', 'deny_' + op)
904 if deny and (not user or deny == ['*'] or user in deny):
904 if deny and (not user or deny == ['*'] or user in deny):
905 return False
905 return False
906
906
907 allow = self.configlist('web', 'allow_' + op)
907 allow = self.configlist('web', 'allow_' + op)
908 return (allow and (allow == ['*'] or user in allow)) or default
908 return (allow and (allow == ['*'] or user in allow)) or default
@@ -1,276 +1,276 b''
1 # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories.
1 # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os, mimetools, cStringIO
9 import os, mimetools, cStringIO
10 from mercurial.i18n import gettext as _
10 from mercurial.i18n import gettext as _
11 from mercurial import ui, hg, util, templater
11 from mercurial import ui, hg, util, templater
12 from common import ErrorResponse, get_mtime, staticfile, style_map, paritygen, \
12 from common import ErrorResponse, get_mtime, staticfile, style_map, paritygen, \
13 get_contact
13 get_contact
14 from hgweb_mod import hgweb
14 from hgweb_mod import hgweb
15 from request import wsgirequest
15 from request import wsgirequest
16
16
17 # This is a stopgap
17 # This is a stopgap
18 class hgwebdir(object):
18 class hgwebdir(object):
19 def __init__(self, config, parentui=None):
19 def __init__(self, config, parentui=None):
20 def cleannames(items):
20 def cleannames(items):
21 return [(util.pconvert(name).strip('/'), path)
21 return [(util.pconvert(name).strip('/'), path)
22 for name, path in items]
22 for name, path in items]
23
23
24 self.parentui = parentui or ui.ui(report_untrusted=False,
24 self.parentui = parentui or ui.ui(report_untrusted=False,
25 interactive = False)
25 interactive = False)
26 self.motd = None
26 self.motd = None
27 self.style = None
27 self.style = None
28 self.stripecount = None
28 self.stripecount = None
29 self.repos_sorted = ('name', False)
29 self.repos_sorted = ('name', False)
30 if isinstance(config, (list, tuple)):
30 if isinstance(config, (list, tuple)):
31 self.repos = cleannames(config)
31 self.repos = cleannames(config)
32 self.repos_sorted = ('', False)
32 self.repos_sorted = ('', False)
33 elif isinstance(config, dict):
33 elif isinstance(config, dict):
34 self.repos = cleannames(config.items())
34 self.repos = cleannames(config.items())
35 self.repos.sort()
35 self.repos.sort()
36 else:
36 else:
37 if isinstance(config, util.configparser):
37 if isinstance(config, util.configparser):
38 cp = config
38 cp = config
39 else:
39 else:
40 cp = util.configparser()
40 cp = util.configparser()
41 cp.read(config)
41 cp.read(config)
42 self.repos = []
42 self.repos = []
43 if cp.has_section('web'):
43 if cp.has_section('web'):
44 if cp.has_option('web', 'motd'):
44 if cp.has_option('web', 'motd'):
45 self.motd = cp.get('web', 'motd')
45 self.motd = cp.get('web', 'motd')
46 if cp.has_option('web', 'style'):
46 if cp.has_option('web', 'style'):
47 self.style = cp.get('web', 'style')
47 self.style = cp.get('web', 'style')
48 if cp.has_option('web', 'stripes'):
48 if cp.has_option('web', 'stripes'):
49 self.stripecount = int(cp.get('web', 'stripes'))
49 self.stripecount = int(cp.get('web', 'stripes'))
50 if cp.has_section('paths'):
50 if cp.has_section('paths'):
51 self.repos.extend(cleannames(cp.items('paths')))
51 self.repos.extend(cleannames(cp.items('paths')))
52 if cp.has_section('collections'):
52 if cp.has_section('collections'):
53 for prefix, root in cp.items('collections'):
53 for prefix, root in cp.items('collections'):
54 for path in util.walkrepos(root):
54 for path in util.walkrepos(root):
55 repo = os.path.normpath(path)
55 repo = os.path.normpath(path)
56 name = repo
56 name = repo
57 if name.startswith(prefix):
57 if name.startswith(prefix):
58 name = name[len(prefix):]
58 name = name[len(prefix):]
59 self.repos.append((name.lstrip(os.sep), repo))
59 self.repos.append((name.lstrip(os.sep), repo))
60 self.repos.sort()
60 self.repos.sort()
61
61
62 def run(self):
62 def run(self):
63 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
63 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
64 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
64 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
65 import mercurial.hgweb.wsgicgi as wsgicgi
65 import mercurial.hgweb.wsgicgi as wsgicgi
66 wsgicgi.launch(self)
66 wsgicgi.launch(self)
67
67
68 def __call__(self, env, respond):
68 def __call__(self, env, respond):
69 req = wsgirequest(env, respond)
69 req = wsgirequest(env, respond)
70 self.run_wsgi(req)
70 self.run_wsgi(req)
71 return req
71 return req
72
72
73 def run_wsgi(self, req):
73 def run_wsgi(self, req):
74
74
75 try:
75 try:
76 try:
76 try:
77
77
78 virtual = req.env.get("PATH_INFO", "").strip('/')
78 virtual = req.env.get("PATH_INFO", "").strip('/')
79
79
80 # a static file
80 # a static file
81 if virtual.startswith('static/') or 'static' in req.form:
81 if virtual.startswith('static/') or 'static' in req.form:
82 static = os.path.join(templater.templatepath(), 'static')
82 static = os.path.join(templater.templatepath(), 'static')
83 if virtual.startswith('static/'):
83 if virtual.startswith('static/'):
84 fname = virtual[7:]
84 fname = virtual[7:]
85 else:
85 else:
86 fname = req.form['static'][0]
86 fname = req.form['static'][0]
87 req.write(staticfile(static, fname, req))
87 req.write(staticfile(static, fname, req))
88 return
88 return
89
89
90 # top-level index
90 # top-level index
91 elif not virtual:
91 elif not virtual:
92 tmpl = self.templater(req)
92 tmpl = self.templater(req)
93 self.makeindex(req, tmpl)
93 self.makeindex(req, tmpl)
94 return
94 return
95
95
96 # nested indexes and hgwebs
96 # nested indexes and hgwebs
97 repos = dict(self.repos)
97 repos = dict(self.repos)
98 while virtual:
98 while virtual:
99 real = repos.get(virtual)
99 real = repos.get(virtual)
100 if real:
100 if real:
101 req.env['REPO_NAME'] = virtual
101 req.env['REPO_NAME'] = virtual
102 try:
102 try:
103 repo = hg.repository(self.parentui, real)
103 repo = hg.repository(self.parentui, real)
104 hgweb(repo).run_wsgi(req)
104 hgweb(repo).run_wsgi(req)
105 return
105 return
106 except IOError, inst:
106 except IOError, inst:
107 raise ErrorResponse(500, inst.strerror)
107 raise ErrorResponse(500, inst.strerror)
108 except hg.RepoError, inst:
108 except hg.RepoError, inst:
109 raise ErrorResponse(500, str(inst))
109 raise ErrorResponse(500, str(inst))
110
110
111 # browse subdirectories
111 # browse subdirectories
112 subdir = virtual + '/'
112 subdir = virtual + '/'
113 if [r for r in repos if r.startswith(subdir)]:
113 if [r for r in repos if r.startswith(subdir)]:
114 tmpl = self.templater(req)
114 tmpl = self.templater(req)
115 self.makeindex(req, tmpl, subdir)
115 self.makeindex(req, tmpl, subdir)
116 return
116 return
117
117
118 up = virtual.rfind('/')
118 up = virtual.rfind('/')
119 if up < 0:
119 if up < 0:
120 break
120 break
121 virtual = virtual[:up]
121 virtual = virtual[:up]
122
122
123 # prefixes not found
123 # prefixes not found
124 tmpl = self.templater(req)
124 tmpl = self.templater(req)
125 req.respond(404, tmpl("notfound", repo=virtual))
125 req.respond(404, tmpl("notfound", repo=virtual))
126
126
127 except ErrorResponse, err:
127 except ErrorResponse, err:
128 tmpl = self.templater(req)
128 tmpl = self.templater(req)
129 req.respond(err.code, tmpl('error', error=err.message or ''))
129 req.respond(err.code, tmpl('error', error=err.message or ''))
130 finally:
130 finally:
131 tmpl = None
131 tmpl = None
132
132
133 def makeindex(self, req, tmpl, subdir=""):
133 def makeindex(self, req, tmpl, subdir=""):
134
134
135 def archivelist(ui, nodeid, url):
135 def archivelist(ui, nodeid, url):
136 allowed = ui.configlist("web", "allow_archive", untrusted=True)
136 allowed = ui.configlist("web", "allow_archive", untrusted=True)
137 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
137 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
138 if i[0] in allowed or ui.configbool("web", "allow" + i[0],
138 if i[0] in allowed or ui.configbool("web", "allow" + i[0],
139 untrusted=True):
139 untrusted=True):
140 yield {"type" : i[0], "extension": i[1],
140 yield {"type" : i[0], "extension": i[1],
141 "node": nodeid, "url": url}
141 "node": nodeid, "url": url}
142
142
143 def entries(sortcolumn="", descending=False, subdir="", **map):
143 def entries(sortcolumn="", descending=False, subdir="", **map):
144 def sessionvars(**map):
144 def sessionvars(**map):
145 fields = []
145 fields = []
146 if req.form.has_key('style'):
146 if 'style' in req.form:
147 style = req.form['style'][0]
147 style = req.form['style'][0]
148 if style != get('web', 'style', ''):
148 if style != get('web', 'style', ''):
149 fields.append(('style', style))
149 fields.append(('style', style))
150
150
151 separator = url[-1] == '?' and ';' or '?'
151 separator = url[-1] == '?' and ';' or '?'
152 for name, value in fields:
152 for name, value in fields:
153 yield dict(name=name, value=value, separator=separator)
153 yield dict(name=name, value=value, separator=separator)
154 separator = ';'
154 separator = ';'
155
155
156 rows = []
156 rows = []
157 parity = paritygen(self.stripecount)
157 parity = paritygen(self.stripecount)
158 for name, path in self.repos:
158 for name, path in self.repos:
159 if not name.startswith(subdir):
159 if not name.startswith(subdir):
160 continue
160 continue
161 name = name[len(subdir):]
161 name = name[len(subdir):]
162
162
163 u = ui.ui(parentui=self.parentui)
163 u = ui.ui(parentui=self.parentui)
164 try:
164 try:
165 u.readconfig(os.path.join(path, '.hg', 'hgrc'))
165 u.readconfig(os.path.join(path, '.hg', 'hgrc'))
166 except Exception, e:
166 except Exception, e:
167 u.warn(_('error reading %s/.hg/hgrc: %s\n' % (path, e)))
167 u.warn(_('error reading %s/.hg/hgrc: %s\n' % (path, e)))
168 continue
168 continue
169 def get(section, name, default=None):
169 def get(section, name, default=None):
170 return u.config(section, name, default, untrusted=True)
170 return u.config(section, name, default, untrusted=True)
171
171
172 if u.configbool("web", "hidden", untrusted=True):
172 if u.configbool("web", "hidden", untrusted=True):
173 continue
173 continue
174
174
175 parts = [req.env['PATH_INFO'], name]
175 parts = [req.env['PATH_INFO'], name]
176 if req.env['SCRIPT_NAME']:
176 if req.env['SCRIPT_NAME']:
177 parts.insert(0, req.env['SCRIPT_NAME'])
177 parts.insert(0, req.env['SCRIPT_NAME'])
178 url = ('/'.join(parts).replace("//", "/")) + '/'
178 url = ('/'.join(parts).replace("//", "/")) + '/'
179
179
180 # update time with local timezone
180 # update time with local timezone
181 try:
181 try:
182 d = (get_mtime(path), util.makedate()[1])
182 d = (get_mtime(path), util.makedate()[1])
183 except OSError:
183 except OSError:
184 continue
184 continue
185
185
186 contact = get_contact(get)
186 contact = get_contact(get)
187 description = get("web", "description", "")
187 description = get("web", "description", "")
188 name = get("web", "name", name)
188 name = get("web", "name", name)
189 row = dict(contact=contact or "unknown",
189 row = dict(contact=contact or "unknown",
190 contact_sort=contact.upper() or "unknown",
190 contact_sort=contact.upper() or "unknown",
191 name=name,
191 name=name,
192 name_sort=name,
192 name_sort=name,
193 url=url,
193 url=url,
194 description=description or "unknown",
194 description=description or "unknown",
195 description_sort=description.upper() or "unknown",
195 description_sort=description.upper() or "unknown",
196 lastchange=d,
196 lastchange=d,
197 lastchange_sort=d[1]-d[0],
197 lastchange_sort=d[1]-d[0],
198 sessionvars=sessionvars,
198 sessionvars=sessionvars,
199 archives=archivelist(u, "tip", url))
199 archives=archivelist(u, "tip", url))
200 if (not sortcolumn
200 if (not sortcolumn
201 or (sortcolumn, descending) == self.repos_sorted):
201 or (sortcolumn, descending) == self.repos_sorted):
202 # fast path for unsorted output
202 # fast path for unsorted output
203 row['parity'] = parity.next()
203 row['parity'] = parity.next()
204 yield row
204 yield row
205 else:
205 else:
206 rows.append((row["%s_sort" % sortcolumn], row))
206 rows.append((row["%s_sort" % sortcolumn], row))
207 if rows:
207 if rows:
208 rows.sort()
208 rows.sort()
209 if descending:
209 if descending:
210 rows.reverse()
210 rows.reverse()
211 for key, row in rows:
211 for key, row in rows:
212 row['parity'] = parity.next()
212 row['parity'] = parity.next()
213 yield row
213 yield row
214
214
215 sortable = ["name", "description", "contact", "lastchange"]
215 sortable = ["name", "description", "contact", "lastchange"]
216 sortcolumn, descending = self.repos_sorted
216 sortcolumn, descending = self.repos_sorted
217 if req.form.has_key('sort'):
217 if 'sort' in req.form:
218 sortcolumn = req.form['sort'][0]
218 sortcolumn = req.form['sort'][0]
219 descending = sortcolumn.startswith('-')
219 descending = sortcolumn.startswith('-')
220 if descending:
220 if descending:
221 sortcolumn = sortcolumn[1:]
221 sortcolumn = sortcolumn[1:]
222 if sortcolumn not in sortable:
222 if sortcolumn not in sortable:
223 sortcolumn = ""
223 sortcolumn = ""
224
224
225 sort = [("sort_%s" % column,
225 sort = [("sort_%s" % column,
226 "%s%s" % ((not descending and column == sortcolumn)
226 "%s%s" % ((not descending and column == sortcolumn)
227 and "-" or "", column))
227 and "-" or "", column))
228 for column in sortable]
228 for column in sortable]
229 req.write(tmpl("index", entries=entries, subdir=subdir,
229 req.write(tmpl("index", entries=entries, subdir=subdir,
230 sortcolumn=sortcolumn, descending=descending,
230 sortcolumn=sortcolumn, descending=descending,
231 **dict(sort)))
231 **dict(sort)))
232
232
233 def templater(self, req):
233 def templater(self, req):
234
234
235 def header(**map):
235 def header(**map):
236 header_file = cStringIO.StringIO(
236 header_file = cStringIO.StringIO(
237 ''.join(tmpl("header", encoding=util._encoding, **map)))
237 ''.join(tmpl("header", encoding=util._encoding, **map)))
238 msg = mimetools.Message(header_file, 0)
238 msg = mimetools.Message(header_file, 0)
239 req.header(msg.items())
239 req.header(msg.items())
240 yield header_file.read()
240 yield header_file.read()
241
241
242 def footer(**map):
242 def footer(**map):
243 yield tmpl("footer", **map)
243 yield tmpl("footer", **map)
244
244
245 def motd(**map):
245 def motd(**map):
246 if self.motd is not None:
246 if self.motd is not None:
247 yield self.motd
247 yield self.motd
248 else:
248 else:
249 yield config('web', 'motd', '')
249 yield config('web', 'motd', '')
250
250
251 def config(section, name, default=None, untrusted=True):
251 def config(section, name, default=None, untrusted=True):
252 return self.parentui.config(section, name, default, untrusted)
252 return self.parentui.config(section, name, default, untrusted)
253
253
254 url = req.env.get('SCRIPT_NAME', '')
254 url = req.env.get('SCRIPT_NAME', '')
255 if not url.endswith('/'):
255 if not url.endswith('/'):
256 url += '/'
256 url += '/'
257
257
258 staticurl = config('web', 'staticurl') or url + 'static/'
258 staticurl = config('web', 'staticurl') or url + 'static/'
259 if not staticurl.endswith('/'):
259 if not staticurl.endswith('/'):
260 staticurl += '/'
260 staticurl += '/'
261
261
262 style = self.style
262 style = self.style
263 if style is None:
263 if style is None:
264 style = config('web', 'style', '')
264 style = config('web', 'style', '')
265 if req.form.has_key('style'):
265 if 'style' in req.form:
266 style = req.form['style'][0]
266 style = req.form['style'][0]
267 if self.stripecount is None:
267 if self.stripecount is None:
268 self.stripecount = int(config('web', 'stripes', 1))
268 self.stripecount = int(config('web', 'stripes', 1))
269 mapfile = style_map(templater.templatepath(), style)
269 mapfile = style_map(templater.templatepath(), style)
270 tmpl = templater.templater(mapfile, templater.common_filters,
270 tmpl = templater.templater(mapfile, templater.common_filters,
271 defaults={"header": header,
271 defaults={"header": header,
272 "footer": footer,
272 "footer": footer,
273 "motd": motd,
273 "motd": motd,
274 "url": url,
274 "url": url,
275 "staticurl": staticurl})
275 "staticurl": staticurl})
276 return tmpl
276 return tmpl
@@ -1,237 +1,237 b''
1 #
1 #
2 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
2 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import cStringIO, zlib, bz2, tempfile, errno, os, sys
8 import cStringIO, zlib, bz2, tempfile, errno, os, sys
9 from mercurial import util, streamclone
9 from mercurial import util, streamclone
10 from mercurial.i18n import gettext as _
10 from mercurial.i18n import gettext as _
11 from mercurial.node import *
11 from mercurial.node import *
12
12
13 def lookup(web, req):
13 def lookup(web, req):
14 try:
14 try:
15 r = hex(web.repo.lookup(req.form['key'][0]))
15 r = hex(web.repo.lookup(req.form['key'][0]))
16 success = 1
16 success = 1
17 except Exception,inst:
17 except Exception,inst:
18 r = str(inst)
18 r = str(inst)
19 success = 0
19 success = 0
20 resp = "%s %s\n" % (success, r)
20 resp = "%s %s\n" % (success, r)
21 req.httphdr("application/mercurial-0.1", length=len(resp))
21 req.httphdr("application/mercurial-0.1", length=len(resp))
22 req.write(resp)
22 req.write(resp)
23
23
24 def heads(web, req):
24 def heads(web, req):
25 resp = " ".join(map(hex, web.repo.heads())) + "\n"
25 resp = " ".join(map(hex, web.repo.heads())) + "\n"
26 req.httphdr("application/mercurial-0.1", length=len(resp))
26 req.httphdr("application/mercurial-0.1", length=len(resp))
27 req.write(resp)
27 req.write(resp)
28
28
29 def branches(web, req):
29 def branches(web, req):
30 nodes = []
30 nodes = []
31 if req.form.has_key('nodes'):
31 if 'nodes' in req.form:
32 nodes = map(bin, req.form['nodes'][0].split(" "))
32 nodes = map(bin, req.form['nodes'][0].split(" "))
33 resp = cStringIO.StringIO()
33 resp = cStringIO.StringIO()
34 for b in web.repo.branches(nodes):
34 for b in web.repo.branches(nodes):
35 resp.write(" ".join(map(hex, b)) + "\n")
35 resp.write(" ".join(map(hex, b)) + "\n")
36 resp = resp.getvalue()
36 resp = resp.getvalue()
37 req.httphdr("application/mercurial-0.1", length=len(resp))
37 req.httphdr("application/mercurial-0.1", length=len(resp))
38 req.write(resp)
38 req.write(resp)
39
39
40 def between(web, req):
40 def between(web, req):
41 if req.form.has_key('pairs'):
41 if 'pairs' in req.form:
42 pairs = [map(bin, p.split("-"))
42 pairs = [map(bin, p.split("-"))
43 for p in req.form['pairs'][0].split(" ")]
43 for p in req.form['pairs'][0].split(" ")]
44 resp = cStringIO.StringIO()
44 resp = cStringIO.StringIO()
45 for b in web.repo.between(pairs):
45 for b in web.repo.between(pairs):
46 resp.write(" ".join(map(hex, b)) + "\n")
46 resp.write(" ".join(map(hex, b)) + "\n")
47 resp = resp.getvalue()
47 resp = resp.getvalue()
48 req.httphdr("application/mercurial-0.1", length=len(resp))
48 req.httphdr("application/mercurial-0.1", length=len(resp))
49 req.write(resp)
49 req.write(resp)
50
50
51 def changegroup(web, req):
51 def changegroup(web, req):
52 req.httphdr("application/mercurial-0.1")
52 req.httphdr("application/mercurial-0.1")
53 nodes = []
53 nodes = []
54 if not web.allowpull:
54 if not web.allowpull:
55 return
55 return
56
56
57 if req.form.has_key('roots'):
57 if 'roots' in req.form:
58 nodes = map(bin, req.form['roots'][0].split(" "))
58 nodes = map(bin, req.form['roots'][0].split(" "))
59
59
60 z = zlib.compressobj()
60 z = zlib.compressobj()
61 f = web.repo.changegroup(nodes, 'serve')
61 f = web.repo.changegroup(nodes, 'serve')
62 while 1:
62 while 1:
63 chunk = f.read(4096)
63 chunk = f.read(4096)
64 if not chunk:
64 if not chunk:
65 break
65 break
66 req.write(z.compress(chunk))
66 req.write(z.compress(chunk))
67
67
68 req.write(z.flush())
68 req.write(z.flush())
69
69
70 def changegroupsubset(web, req):
70 def changegroupsubset(web, req):
71 req.httphdr("application/mercurial-0.1")
71 req.httphdr("application/mercurial-0.1")
72 bases = []
72 bases = []
73 heads = []
73 heads = []
74 if not web.allowpull:
74 if not web.allowpull:
75 return
75 return
76
76
77 if req.form.has_key('bases'):
77 if 'bases' in req.form:
78 bases = [bin(x) for x in req.form['bases'][0].split(' ')]
78 bases = [bin(x) for x in req.form['bases'][0].split(' ')]
79 if req.form.has_key('heads'):
79 if 'heads' in req.form:
80 heads = [bin(x) for x in req.form['heads'][0].split(' ')]
80 heads = [bin(x) for x in req.form['heads'][0].split(' ')]
81
81
82 z = zlib.compressobj()
82 z = zlib.compressobj()
83 f = web.repo.changegroupsubset(bases, heads, 'serve')
83 f = web.repo.changegroupsubset(bases, heads, 'serve')
84 while 1:
84 while 1:
85 chunk = f.read(4096)
85 chunk = f.read(4096)
86 if not chunk:
86 if not chunk:
87 break
87 break
88 req.write(z.compress(chunk))
88 req.write(z.compress(chunk))
89
89
90 req.write(z.flush())
90 req.write(z.flush())
91
91
92 def capabilities(web, req):
92 def capabilities(web, req):
93 caps = ['lookup', 'changegroupsubset']
93 caps = ['lookup', 'changegroupsubset']
94 if web.configbool('server', 'uncompressed'):
94 if web.configbool('server', 'uncompressed'):
95 caps.append('stream=%d' % web.repo.changelog.version)
95 caps.append('stream=%d' % web.repo.changelog.version)
96 # XXX: make configurable and/or share code with do_unbundle:
96 # XXX: make configurable and/or share code with do_unbundle:
97 unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN']
97 unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN']
98 if unbundleversions:
98 if unbundleversions:
99 caps.append('unbundle=%s' % ','.join(unbundleversions))
99 caps.append('unbundle=%s' % ','.join(unbundleversions))
100 resp = ' '.join(caps)
100 resp = ' '.join(caps)
101 req.httphdr("application/mercurial-0.1", length=len(resp))
101 req.httphdr("application/mercurial-0.1", length=len(resp))
102 req.write(resp)
102 req.write(resp)
103
103
104 def unbundle(web, req):
104 def unbundle(web, req):
105 def bail(response, headers={}):
105 def bail(response, headers={}):
106 length = int(req.env['CONTENT_LENGTH'])
106 length = int(req.env['CONTENT_LENGTH'])
107 for s in util.filechunkiter(req, limit=length):
107 for s in util.filechunkiter(req, limit=length):
108 # drain incoming bundle, else client will not see
108 # drain incoming bundle, else client will not see
109 # response when run outside cgi script
109 # response when run outside cgi script
110 pass
110 pass
111 req.httphdr("application/mercurial-0.1", headers=headers)
111 req.httphdr("application/mercurial-0.1", headers=headers)
112 req.write('0\n')
112 req.write('0\n')
113 req.write(response)
113 req.write(response)
114
114
115 # require ssl by default, auth info cannot be sniffed and
115 # require ssl by default, auth info cannot be sniffed and
116 # replayed
116 # replayed
117 ssl_req = web.configbool('web', 'push_ssl', True)
117 ssl_req = web.configbool('web', 'push_ssl', True)
118 if ssl_req:
118 if ssl_req:
119 if req.env.get('wsgi.url_scheme') != 'https':
119 if req.env.get('wsgi.url_scheme') != 'https':
120 bail(_('ssl required\n'))
120 bail(_('ssl required\n'))
121 return
121 return
122 proto = 'https'
122 proto = 'https'
123 else:
123 else:
124 proto = 'http'
124 proto = 'http'
125
125
126 # do not allow push unless explicitly allowed
126 # do not allow push unless explicitly allowed
127 if not web.check_perm(req, 'push', False):
127 if not web.check_perm(req, 'push', False):
128 bail(_('push not authorized\n'),
128 bail(_('push not authorized\n'),
129 headers={'status': '401 Unauthorized'})
129 headers={'status': '401 Unauthorized'})
130 return
130 return
131
131
132 their_heads = req.form['heads'][0].split(' ')
132 their_heads = req.form['heads'][0].split(' ')
133
133
134 def check_heads():
134 def check_heads():
135 heads = map(hex, web.repo.heads())
135 heads = map(hex, web.repo.heads())
136 return their_heads == [hex('force')] or their_heads == heads
136 return their_heads == [hex('force')] or their_heads == heads
137
137
138 # fail early if possible
138 # fail early if possible
139 if not check_heads():
139 if not check_heads():
140 bail(_('unsynced changes\n'))
140 bail(_('unsynced changes\n'))
141 return
141 return
142
142
143 req.httphdr("application/mercurial-0.1")
143 req.httphdr("application/mercurial-0.1")
144
144
145 # do not lock repo until all changegroup data is
145 # do not lock repo until all changegroup data is
146 # streamed. save to temporary file.
146 # streamed. save to temporary file.
147
147
148 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
148 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
149 fp = os.fdopen(fd, 'wb+')
149 fp = os.fdopen(fd, 'wb+')
150 try:
150 try:
151 length = int(req.env['CONTENT_LENGTH'])
151 length = int(req.env['CONTENT_LENGTH'])
152 for s in util.filechunkiter(req, limit=length):
152 for s in util.filechunkiter(req, limit=length):
153 fp.write(s)
153 fp.write(s)
154
154
155 try:
155 try:
156 lock = web.repo.lock()
156 lock = web.repo.lock()
157 try:
157 try:
158 if not check_heads():
158 if not check_heads():
159 req.write('0\n')
159 req.write('0\n')
160 req.write(_('unsynced changes\n'))
160 req.write(_('unsynced changes\n'))
161 return
161 return
162
162
163 fp.seek(0)
163 fp.seek(0)
164 header = fp.read(6)
164 header = fp.read(6)
165 if not header.startswith("HG"):
165 if not header.startswith("HG"):
166 # old client with uncompressed bundle
166 # old client with uncompressed bundle
167 def generator(f):
167 def generator(f):
168 yield header
168 yield header
169 for chunk in f:
169 for chunk in f:
170 yield chunk
170 yield chunk
171 elif not header.startswith("HG10"):
171 elif not header.startswith("HG10"):
172 req.write("0\n")
172 req.write("0\n")
173 req.write(_("unknown bundle version\n"))
173 req.write(_("unknown bundle version\n"))
174 return
174 return
175 elif header == "HG10GZ":
175 elif header == "HG10GZ":
176 def generator(f):
176 def generator(f):
177 zd = zlib.decompressobj()
177 zd = zlib.decompressobj()
178 for chunk in f:
178 for chunk in f:
179 yield zd.decompress(chunk)
179 yield zd.decompress(chunk)
180 elif header == "HG10BZ":
180 elif header == "HG10BZ":
181 def generator(f):
181 def generator(f):
182 zd = bz2.BZ2Decompressor()
182 zd = bz2.BZ2Decompressor()
183 zd.decompress("BZ")
183 zd.decompress("BZ")
184 for chunk in f:
184 for chunk in f:
185 yield zd.decompress(chunk)
185 yield zd.decompress(chunk)
186 elif header == "HG10UN":
186 elif header == "HG10UN":
187 def generator(f):
187 def generator(f):
188 for chunk in f:
188 for chunk in f:
189 yield chunk
189 yield chunk
190 else:
190 else:
191 req.write("0\n")
191 req.write("0\n")
192 req.write(_("unknown bundle compression type\n"))
192 req.write(_("unknown bundle compression type\n"))
193 return
193 return
194 gen = generator(util.filechunkiter(fp, 4096))
194 gen = generator(util.filechunkiter(fp, 4096))
195
195
196 # send addchangegroup output to client
196 # send addchangegroup output to client
197
197
198 old_stdout = sys.stdout
198 old_stdout = sys.stdout
199 sys.stdout = cStringIO.StringIO()
199 sys.stdout = cStringIO.StringIO()
200
200
201 try:
201 try:
202 url = 'remote:%s:%s' % (proto,
202 url = 'remote:%s:%s' % (proto,
203 req.env.get('REMOTE_HOST', ''))
203 req.env.get('REMOTE_HOST', ''))
204 try:
204 try:
205 ret = web.repo.addchangegroup(
205 ret = web.repo.addchangegroup(
206 util.chunkbuffer(gen), 'serve', url)
206 util.chunkbuffer(gen), 'serve', url)
207 except util.Abort, inst:
207 except util.Abort, inst:
208 sys.stdout.write("abort: %s\n" % inst)
208 sys.stdout.write("abort: %s\n" % inst)
209 ret = 0
209 ret = 0
210 finally:
210 finally:
211 val = sys.stdout.getvalue()
211 val = sys.stdout.getvalue()
212 sys.stdout = old_stdout
212 sys.stdout = old_stdout
213 req.write('%d\n' % ret)
213 req.write('%d\n' % ret)
214 req.write(val)
214 req.write(val)
215 finally:
215 finally:
216 del lock
216 del lock
217 except (OSError, IOError), inst:
217 except (OSError, IOError), inst:
218 req.write('0\n')
218 req.write('0\n')
219 filename = getattr(inst, 'filename', '')
219 filename = getattr(inst, 'filename', '')
220 # Don't send our filesystem layout to the client
220 # Don't send our filesystem layout to the client
221 if filename.startswith(web.repo.root):
221 if filename.startswith(web.repo.root):
222 filename = filename[len(web.repo.root)+1:]
222 filename = filename[len(web.repo.root)+1:]
223 else:
223 else:
224 filename = ''
224 filename = ''
225 error = getattr(inst, 'strerror', 'Unknown error')
225 error = getattr(inst, 'strerror', 'Unknown error')
226 if inst.errno == errno.ENOENT:
226 if inst.errno == errno.ENOENT:
227 code = 404
227 code = 404
228 else:
228 else:
229 code = 500
229 code = 500
230 req.respond(code, '%s: %s\n' % (error, filename))
230 req.respond(code, '%s: %s\n' % (error, filename))
231 finally:
231 finally:
232 fp.close()
232 fp.close()
233 os.unlink(tempname)
233 os.unlink(tempname)
234
234
235 def stream_out(web, req):
235 def stream_out(web, req):
236 req.httphdr("application/mercurial-0.1")
236 req.httphdr("application/mercurial-0.1")
237 streamclone.stream_out(web.repo, req, untrusted=True)
237 streamclone.stream_out(web.repo, req, untrusted=True)
@@ -1,113 +1,113 b''
1 #
1 #
2 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
2 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import os, mimetypes
8 import os, mimetypes
9 from mercurial import revlog, util
9 from mercurial import revlog, util
10 from common import staticfile
10 from common import staticfile
11
11
12 def log(web, req, tmpl):
12 def log(web, req, tmpl):
13 if req.form.has_key('file') and req.form['file'][0]:
13 if 'file' in req.form and req.form['file'][0]:
14 filelog(web, req, tmpl)
14 filelog(web, req, tmpl)
15 else:
15 else:
16 changelog(web, req, tmpl)
16 changelog(web, req, tmpl)
17
17
18 def rawfile(web, req, tmpl):
18 def rawfile(web, req, tmpl):
19 path = web.cleanpath(req.form.get('file', [''])[0])
19 path = web.cleanpath(req.form.get('file', [''])[0])
20 if not path:
20 if not path:
21 req.write(web.manifest(tmpl, web.changectx(req), path))
21 req.write(web.manifest(tmpl, web.changectx(req), path))
22 return
22 return
23
23
24 try:
24 try:
25 fctx = web.filectx(req)
25 fctx = web.filectx(req)
26 except revlog.LookupError:
26 except revlog.LookupError:
27 req.write(web.manifest(tmpl, web.changectx(req), path))
27 req.write(web.manifest(tmpl, web.changectx(req), path))
28 return
28 return
29
29
30 path = fctx.path()
30 path = fctx.path()
31 text = fctx.data()
31 text = fctx.data()
32 mt = mimetypes.guess_type(path)[0]
32 mt = mimetypes.guess_type(path)[0]
33 if util.binary(text):
33 if util.binary(text):
34 mt = mt or 'application/octet-stream'
34 mt = mt or 'application/octet-stream'
35
35
36 req.httphdr(mt, path, len(text))
36 req.httphdr(mt, path, len(text))
37 req.write(text)
37 req.write(text)
38
38
39 def file(web, req, tmpl):
39 def file(web, req, tmpl):
40 path = web.cleanpath(req.form.get('file', [''])[0])
40 path = web.cleanpath(req.form.get('file', [''])[0])
41 if path:
41 if path:
42 try:
42 try:
43 req.write(web.filerevision(tmpl, web.filectx(req)))
43 req.write(web.filerevision(tmpl, web.filectx(req)))
44 return
44 return
45 except revlog.LookupError:
45 except revlog.LookupError:
46 pass
46 pass
47
47
48 req.write(web.manifest(tmpl, web.changectx(req), path))
48 req.write(web.manifest(tmpl, web.changectx(req), path))
49
49
50 def changelog(web, req, tmpl, shortlog = False):
50 def changelog(web, req, tmpl, shortlog = False):
51 if req.form.has_key('node'):
51 if 'node' in req.form:
52 ctx = web.changectx(req)
52 ctx = web.changectx(req)
53 else:
53 else:
54 if req.form.has_key('rev'):
54 if 'rev' in req.form:
55 hi = req.form['rev'][0]
55 hi = req.form['rev'][0]
56 else:
56 else:
57 hi = web.repo.changelog.count() - 1
57 hi = web.repo.changelog.count() - 1
58 try:
58 try:
59 ctx = web.repo.changectx(hi)
59 ctx = web.repo.changectx(hi)
60 except hg.RepoError:
60 except hg.RepoError:
61 req.write(web.search(tmpl, hi)) # XXX redirect to 404 page?
61 req.write(web.search(tmpl, hi)) # XXX redirect to 404 page?
62 return
62 return
63
63
64 req.write(web.changelog(tmpl, ctx, shortlog = shortlog))
64 req.write(web.changelog(tmpl, ctx, shortlog = shortlog))
65
65
66 def shortlog(web, req, tmpl):
66 def shortlog(web, req, tmpl):
67 changelog(web, req, tmpl, shortlog = True)
67 changelog(web, req, tmpl, shortlog = True)
68
68
69 def changeset(web, req, tmpl):
69 def changeset(web, req, tmpl):
70 req.write(web.changeset(tmpl, web.changectx(req)))
70 req.write(web.changeset(tmpl, web.changectx(req)))
71
71
72 rev = changeset
72 rev = changeset
73
73
74 def manifest(web, req, tmpl):
74 def manifest(web, req, tmpl):
75 req.write(web.manifest(tmpl, web.changectx(req),
75 req.write(web.manifest(tmpl, web.changectx(req),
76 web.cleanpath(req.form['path'][0])))
76 web.cleanpath(req.form['path'][0])))
77
77
78 def tags(web, req, tmpl):
78 def tags(web, req, tmpl):
79 req.write(web.tags(tmpl))
79 req.write(web.tags(tmpl))
80
80
81 def summary(web, req, tmpl):
81 def summary(web, req, tmpl):
82 req.write(web.summary(tmpl))
82 req.write(web.summary(tmpl))
83
83
84 def filediff(web, req, tmpl):
84 def filediff(web, req, tmpl):
85 req.write(web.filediff(tmpl, web.filectx(req)))
85 req.write(web.filediff(tmpl, web.filectx(req)))
86
86
87 diff = filediff
87 diff = filediff
88
88
89 def annotate(web, req, tmpl):
89 def annotate(web, req, tmpl):
90 req.write(web.fileannotate(tmpl, web.filectx(req)))
90 req.write(web.fileannotate(tmpl, web.filectx(req)))
91
91
92 def filelog(web, req, tmpl):
92 def filelog(web, req, tmpl):
93 req.write(web.filelog(tmpl, web.filectx(req)))
93 req.write(web.filelog(tmpl, web.filectx(req)))
94
94
95 def archive(web, req, tmpl):
95 def archive(web, req, tmpl):
96 type_ = req.form['type'][0]
96 type_ = req.form['type'][0]
97 allowed = web.configlist("web", "allow_archive")
97 allowed = web.configlist("web", "allow_archive")
98 if (type_ in web.archives and (type_ in allowed or
98 if (type_ in web.archives and (type_ in allowed or
99 web.configbool("web", "allow" + type_, False))):
99 web.configbool("web", "allow" + type_, False))):
100 web.archive(tmpl, req, req.form['node'][0], type_)
100 web.archive(tmpl, req, req.form['node'][0], type_)
101 return
101 return
102
102
103 req.respond(400, tmpl('error',
103 req.respond(400, tmpl('error',
104 error='Unsupported archive type: %s' % type_))
104 error='Unsupported archive type: %s' % type_))
105
105
106 def static(web, req, tmpl):
106 def static(web, req, tmpl):
107 fname = req.form['file'][0]
107 fname = req.form['file'][0]
108 # a repo owner may set web.static in .hg/hgrc to get any file
108 # a repo owner may set web.static in .hg/hgrc to get any file
109 # readable by the user running the CGI script
109 # readable by the user running the CGI script
110 static = web.config("web", "static",
110 static = web.config("web", "static",
111 os.path.join(web.templatepath, "static"),
111 os.path.join(web.templatepath, "static"),
112 untrusted=False)
112 untrusted=False)
113 req.write(staticfile(static, fname, req))
113 req.write(staticfile(static, fname, req))
@@ -1,462 +1,462 b''
1 # httprepo.py - HTTP repository proxy classes for mercurial
1 # httprepo.py - HTTP repository proxy classes for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from node import *
9 from node import *
10 from remoterepo import *
10 from remoterepo import *
11 from i18n import _
11 from i18n import _
12 import repo, os, urllib, urllib2, urlparse, zlib, util, httplib
12 import repo, os, urllib, urllib2, urlparse, zlib, util, httplib
13 import errno, keepalive, tempfile, socket, changegroup
13 import errno, keepalive, tempfile, socket, changegroup
14
14
15 class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm):
15 class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm):
16 def __init__(self, ui):
16 def __init__(self, ui):
17 urllib2.HTTPPasswordMgrWithDefaultRealm.__init__(self)
17 urllib2.HTTPPasswordMgrWithDefaultRealm.__init__(self)
18 self.ui = ui
18 self.ui = ui
19
19
20 def find_user_password(self, realm, authuri):
20 def find_user_password(self, realm, authuri):
21 authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password(
21 authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password(
22 self, realm, authuri)
22 self, realm, authuri)
23 user, passwd = authinfo
23 user, passwd = authinfo
24 if user and passwd:
24 if user and passwd:
25 return (user, passwd)
25 return (user, passwd)
26
26
27 if not self.ui.interactive:
27 if not self.ui.interactive:
28 raise util.Abort(_('http authorization required'))
28 raise util.Abort(_('http authorization required'))
29
29
30 self.ui.write(_("http authorization required\n"))
30 self.ui.write(_("http authorization required\n"))
31 self.ui.status(_("realm: %s\n") % realm)
31 self.ui.status(_("realm: %s\n") % realm)
32 if user:
32 if user:
33 self.ui.status(_("user: %s\n") % user)
33 self.ui.status(_("user: %s\n") % user)
34 else:
34 else:
35 user = self.ui.prompt(_("user:"), default=None)
35 user = self.ui.prompt(_("user:"), default=None)
36
36
37 if not passwd:
37 if not passwd:
38 passwd = self.ui.getpass()
38 passwd = self.ui.getpass()
39
39
40 self.add_password(realm, authuri, user, passwd)
40 self.add_password(realm, authuri, user, passwd)
41 return (user, passwd)
41 return (user, passwd)
42
42
43 def netlocsplit(netloc):
43 def netlocsplit(netloc):
44 '''split [user[:passwd]@]host[:port] into 4-tuple.'''
44 '''split [user[:passwd]@]host[:port] into 4-tuple.'''
45
45
46 a = netloc.find('@')
46 a = netloc.find('@')
47 if a == -1:
47 if a == -1:
48 user, passwd = None, None
48 user, passwd = None, None
49 else:
49 else:
50 userpass, netloc = netloc[:a], netloc[a+1:]
50 userpass, netloc = netloc[:a], netloc[a+1:]
51 c = userpass.find(':')
51 c = userpass.find(':')
52 if c == -1:
52 if c == -1:
53 user, passwd = urllib.unquote(userpass), None
53 user, passwd = urllib.unquote(userpass), None
54 else:
54 else:
55 user = urllib.unquote(userpass[:c])
55 user = urllib.unquote(userpass[:c])
56 passwd = urllib.unquote(userpass[c+1:])
56 passwd = urllib.unquote(userpass[c+1:])
57 c = netloc.find(':')
57 c = netloc.find(':')
58 if c == -1:
58 if c == -1:
59 host, port = netloc, None
59 host, port = netloc, None
60 else:
60 else:
61 host, port = netloc[:c], netloc[c+1:]
61 host, port = netloc[:c], netloc[c+1:]
62 return host, port, user, passwd
62 return host, port, user, passwd
63
63
64 def netlocunsplit(host, port, user=None, passwd=None):
64 def netlocunsplit(host, port, user=None, passwd=None):
65 '''turn host, port, user, passwd into [user[:passwd]@]host[:port].'''
65 '''turn host, port, user, passwd into [user[:passwd]@]host[:port].'''
66 if port:
66 if port:
67 hostport = host + ':' + port
67 hostport = host + ':' + port
68 else:
68 else:
69 hostport = host
69 hostport = host
70 if user:
70 if user:
71 if passwd:
71 if passwd:
72 userpass = urllib.quote(user) + ':' + urllib.quote(passwd)
72 userpass = urllib.quote(user) + ':' + urllib.quote(passwd)
73 else:
73 else:
74 userpass = urllib.quote(user)
74 userpass = urllib.quote(user)
75 return userpass + '@' + hostport
75 return userpass + '@' + hostport
76 return hostport
76 return hostport
77
77
78 # work around a bug in Python < 2.4.2
78 # work around a bug in Python < 2.4.2
79 # (it leaves a "\n" at the end of Proxy-authorization headers)
79 # (it leaves a "\n" at the end of Proxy-authorization headers)
80 class request(urllib2.Request):
80 class request(urllib2.Request):
81 def add_header(self, key, val):
81 def add_header(self, key, val):
82 if key.lower() == 'proxy-authorization':
82 if key.lower() == 'proxy-authorization':
83 val = val.strip()
83 val = val.strip()
84 return urllib2.Request.add_header(self, key, val)
84 return urllib2.Request.add_header(self, key, val)
85
85
86 class httpsendfile(file):
86 class httpsendfile(file):
87 def __len__(self):
87 def __len__(self):
88 return os.fstat(self.fileno()).st_size
88 return os.fstat(self.fileno()).st_size
89
89
90 def _gen_sendfile(connection):
90 def _gen_sendfile(connection):
91 def _sendfile(self, data):
91 def _sendfile(self, data):
92 # send a file
92 # send a file
93 if isinstance(data, httpsendfile):
93 if isinstance(data, httpsendfile):
94 # if auth required, some data sent twice, so rewind here
94 # if auth required, some data sent twice, so rewind here
95 data.seek(0)
95 data.seek(0)
96 for chunk in util.filechunkiter(data):
96 for chunk in util.filechunkiter(data):
97 connection.send(self, chunk)
97 connection.send(self, chunk)
98 else:
98 else:
99 connection.send(self, data)
99 connection.send(self, data)
100 return _sendfile
100 return _sendfile
101
101
102 class httpconnection(keepalive.HTTPConnection):
102 class httpconnection(keepalive.HTTPConnection):
103 # must be able to send big bundle as stream.
103 # must be able to send big bundle as stream.
104 send = _gen_sendfile(keepalive.HTTPConnection)
104 send = _gen_sendfile(keepalive.HTTPConnection)
105
105
106 class basehttphandler(keepalive.HTTPHandler):
106 class basehttphandler(keepalive.HTTPHandler):
107 def http_open(self, req):
107 def http_open(self, req):
108 return self.do_open(httpconnection, req)
108 return self.do_open(httpconnection, req)
109
109
110 has_https = hasattr(urllib2, 'HTTPSHandler')
110 has_https = hasattr(urllib2, 'HTTPSHandler')
111 if has_https:
111 if has_https:
112 class httpsconnection(httplib.HTTPSConnection):
112 class httpsconnection(httplib.HTTPSConnection):
113 response_class = keepalive.HTTPResponse
113 response_class = keepalive.HTTPResponse
114 # must be able to send big bundle as stream.
114 # must be able to send big bundle as stream.
115 send = _gen_sendfile(httplib.HTTPSConnection)
115 send = _gen_sendfile(httplib.HTTPSConnection)
116
116
117 class httphandler(basehttphandler, urllib2.HTTPSHandler):
117 class httphandler(basehttphandler, urllib2.HTTPSHandler):
118 def https_open(self, req):
118 def https_open(self, req):
119 return self.do_open(httpsconnection, req)
119 return self.do_open(httpsconnection, req)
120 else:
120 else:
121 class httphandler(basehttphandler):
121 class httphandler(basehttphandler):
122 pass
122 pass
123
123
124 # In python < 2.5 AbstractDigestAuthHandler raises a ValueError if
124 # In python < 2.5 AbstractDigestAuthHandler raises a ValueError if
125 # it doesn't know about the auth type requested. This can happen if
125 # it doesn't know about the auth type requested. This can happen if
126 # somebody is using BasicAuth and types a bad password.
126 # somebody is using BasicAuth and types a bad password.
127 class httpdigestauthhandler(urllib2.HTTPDigestAuthHandler):
127 class httpdigestauthhandler(urllib2.HTTPDigestAuthHandler):
128 def http_error_auth_reqed(self, auth_header, host, req, headers):
128 def http_error_auth_reqed(self, auth_header, host, req, headers):
129 try:
129 try:
130 return urllib2.HTTPDigestAuthHandler.http_error_auth_reqed(
130 return urllib2.HTTPDigestAuthHandler.http_error_auth_reqed(
131 self, auth_header, host, req, headers)
131 self, auth_header, host, req, headers)
132 except ValueError, inst:
132 except ValueError, inst:
133 arg = inst.args[0]
133 arg = inst.args[0]
134 if arg.startswith("AbstractDigestAuthHandler doesn't know "):
134 if arg.startswith("AbstractDigestAuthHandler doesn't know "):
135 return
135 return
136 raise
136 raise
137
137
138 def zgenerator(f):
138 def zgenerator(f):
139 zd = zlib.decompressobj()
139 zd = zlib.decompressobj()
140 try:
140 try:
141 for chunk in util.filechunkiter(f):
141 for chunk in util.filechunkiter(f):
142 yield zd.decompress(chunk)
142 yield zd.decompress(chunk)
143 except httplib.HTTPException, inst:
143 except httplib.HTTPException, inst:
144 raise IOError(None, _('connection ended unexpectedly'))
144 raise IOError(None, _('connection ended unexpectedly'))
145 yield zd.flush()
145 yield zd.flush()
146
146
147 _safe = ('abcdefghijklmnopqrstuvwxyz'
147 _safe = ('abcdefghijklmnopqrstuvwxyz'
148 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
148 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
149 '0123456789' '_.-/')
149 '0123456789' '_.-/')
150 _safeset = None
150 _safeset = None
151 _hex = None
151 _hex = None
152 def quotepath(path):
152 def quotepath(path):
153 '''quote the path part of a URL
153 '''quote the path part of a URL
154
154
155 This is similar to urllib.quote, but it also tries to avoid
155 This is similar to urllib.quote, but it also tries to avoid
156 quoting things twice (inspired by wget):
156 quoting things twice (inspired by wget):
157
157
158 >>> quotepath('abc def')
158 >>> quotepath('abc def')
159 'abc%20def'
159 'abc%20def'
160 >>> quotepath('abc%20def')
160 >>> quotepath('abc%20def')
161 'abc%20def'
161 'abc%20def'
162 >>> quotepath('abc%20 def')
162 >>> quotepath('abc%20 def')
163 'abc%20%20def'
163 'abc%20%20def'
164 >>> quotepath('abc def%20')
164 >>> quotepath('abc def%20')
165 'abc%20def%20'
165 'abc%20def%20'
166 >>> quotepath('abc def%2')
166 >>> quotepath('abc def%2')
167 'abc%20def%252'
167 'abc%20def%252'
168 >>> quotepath('abc def%')
168 >>> quotepath('abc def%')
169 'abc%20def%25'
169 'abc%20def%25'
170 '''
170 '''
171 global _safeset, _hex
171 global _safeset, _hex
172 if _safeset is None:
172 if _safeset is None:
173 _safeset = util.set(_safe)
173 _safeset = util.set(_safe)
174 _hex = util.set('abcdefABCDEF0123456789')
174 _hex = util.set('abcdefABCDEF0123456789')
175 l = list(path)
175 l = list(path)
176 for i in xrange(len(l)):
176 for i in xrange(len(l)):
177 c = l[i]
177 c = l[i]
178 if c == '%' and i + 2 < len(l) and (l[i+1] in _hex and l[i+2] in _hex):
178 if c == '%' and i + 2 < len(l) and (l[i+1] in _hex and l[i+2] in _hex):
179 pass
179 pass
180 elif c not in _safeset:
180 elif c not in _safeset:
181 l[i] = '%%%02X' % ord(c)
181 l[i] = '%%%02X' % ord(c)
182 return ''.join(l)
182 return ''.join(l)
183
183
184 class httprepository(remoterepository):
184 class httprepository(remoterepository):
185 def __init__(self, ui, path):
185 def __init__(self, ui, path):
186 self.path = path
186 self.path = path
187 self.caps = None
187 self.caps = None
188 self.handler = None
188 self.handler = None
189 scheme, netloc, urlpath, query, frag = urlparse.urlsplit(path)
189 scheme, netloc, urlpath, query, frag = urlparse.urlsplit(path)
190 if query or frag:
190 if query or frag:
191 raise util.Abort(_('unsupported URL component: "%s"') %
191 raise util.Abort(_('unsupported URL component: "%s"') %
192 (query or frag))
192 (query or frag))
193 if not urlpath:
193 if not urlpath:
194 urlpath = '/'
194 urlpath = '/'
195 urlpath = quotepath(urlpath)
195 urlpath = quotepath(urlpath)
196 host, port, user, passwd = netlocsplit(netloc)
196 host, port, user, passwd = netlocsplit(netloc)
197
197
198 # urllib cannot handle URLs with embedded user or passwd
198 # urllib cannot handle URLs with embedded user or passwd
199 self._url = urlparse.urlunsplit((scheme, netlocunsplit(host, port),
199 self._url = urlparse.urlunsplit((scheme, netlocunsplit(host, port),
200 urlpath, '', ''))
200 urlpath, '', ''))
201 self.ui = ui
201 self.ui = ui
202 self.ui.debug(_('using %s\n') % self._url)
202 self.ui.debug(_('using %s\n') % self._url)
203
203
204 proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy')
204 proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy')
205 # XXX proxyauthinfo = None
205 # XXX proxyauthinfo = None
206 self.handler = httphandler()
206 self.handler = httphandler()
207 handlers = [self.handler]
207 handlers = [self.handler]
208
208
209 if proxyurl:
209 if proxyurl:
210 # proxy can be proper url or host[:port]
210 # proxy can be proper url or host[:port]
211 if not (proxyurl.startswith('http:') or
211 if not (proxyurl.startswith('http:') or
212 proxyurl.startswith('https:')):
212 proxyurl.startswith('https:')):
213 proxyurl = 'http://' + proxyurl + '/'
213 proxyurl = 'http://' + proxyurl + '/'
214 snpqf = urlparse.urlsplit(proxyurl)
214 snpqf = urlparse.urlsplit(proxyurl)
215 proxyscheme, proxynetloc, proxypath, proxyquery, proxyfrag = snpqf
215 proxyscheme, proxynetloc, proxypath, proxyquery, proxyfrag = snpqf
216 hpup = netlocsplit(proxynetloc)
216 hpup = netlocsplit(proxynetloc)
217
217
218 proxyhost, proxyport, proxyuser, proxypasswd = hpup
218 proxyhost, proxyport, proxyuser, proxypasswd = hpup
219 if not proxyuser:
219 if not proxyuser:
220 proxyuser = ui.config("http_proxy", "user")
220 proxyuser = ui.config("http_proxy", "user")
221 proxypasswd = ui.config("http_proxy", "passwd")
221 proxypasswd = ui.config("http_proxy", "passwd")
222
222
223 # see if we should use a proxy for this url
223 # see if we should use a proxy for this url
224 no_list = [ "localhost", "127.0.0.1" ]
224 no_list = [ "localhost", "127.0.0.1" ]
225 no_list.extend([p.lower() for
225 no_list.extend([p.lower() for
226 p in ui.configlist("http_proxy", "no")])
226 p in ui.configlist("http_proxy", "no")])
227 no_list.extend([p.strip().lower() for
227 no_list.extend([p.strip().lower() for
228 p in os.getenv("no_proxy", '').split(',')
228 p in os.getenv("no_proxy", '').split(',')
229 if p.strip()])
229 if p.strip()])
230 # "http_proxy.always" config is for running tests on localhost
230 # "http_proxy.always" config is for running tests on localhost
231 if (not ui.configbool("http_proxy", "always") and
231 if (not ui.configbool("http_proxy", "always") and
232 host.lower() in no_list):
232 host.lower() in no_list):
233 # avoid auto-detection of proxy settings by appending
233 # avoid auto-detection of proxy settings by appending
234 # a ProxyHandler with no proxies defined.
234 # a ProxyHandler with no proxies defined.
235 handlers.append(urllib2.ProxyHandler({}))
235 handlers.append(urllib2.ProxyHandler({}))
236 ui.debug(_('disabling proxy for %s\n') % host)
236 ui.debug(_('disabling proxy for %s\n') % host)
237 else:
237 else:
238 proxyurl = urlparse.urlunsplit((
238 proxyurl = urlparse.urlunsplit((
239 proxyscheme, netlocunsplit(proxyhost, proxyport,
239 proxyscheme, netlocunsplit(proxyhost, proxyport,
240 proxyuser, proxypasswd or ''),
240 proxyuser, proxypasswd or ''),
241 proxypath, proxyquery, proxyfrag))
241 proxypath, proxyquery, proxyfrag))
242 handlers.append(urllib2.ProxyHandler({scheme: proxyurl}))
242 handlers.append(urllib2.ProxyHandler({scheme: proxyurl}))
243 ui.debug(_('proxying through http://%s:%s\n') %
243 ui.debug(_('proxying through http://%s:%s\n') %
244 (proxyhost, proxyport))
244 (proxyhost, proxyport))
245
245
246 # urllib2 takes proxy values from the environment and those
246 # urllib2 takes proxy values from the environment and those
247 # will take precedence if found, so drop them
247 # will take precedence if found, so drop them
248 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
248 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
249 try:
249 try:
250 if os.environ.has_key(env):
250 if env in os.environ:
251 del os.environ[env]
251 del os.environ[env]
252 except OSError:
252 except OSError:
253 pass
253 pass
254
254
255 passmgr = passwordmgr(ui)
255 passmgr = passwordmgr(ui)
256 if user:
256 if user:
257 ui.debug(_('http auth: user %s, password %s\n') %
257 ui.debug(_('http auth: user %s, password %s\n') %
258 (user, passwd and '*' * len(passwd) or 'not set'))
258 (user, passwd and '*' * len(passwd) or 'not set'))
259 netloc = host
259 netloc = host
260 if port:
260 if port:
261 netloc += ':' + port
261 netloc += ':' + port
262 # Python < 2.4.3 uses only the netloc to search for a password
262 # Python < 2.4.3 uses only the netloc to search for a password
263 passmgr.add_password(None, (self._url, netloc), user, passwd or '')
263 passmgr.add_password(None, (self._url, netloc), user, passwd or '')
264
264
265 handlers.extend((urllib2.HTTPBasicAuthHandler(passmgr),
265 handlers.extend((urllib2.HTTPBasicAuthHandler(passmgr),
266 httpdigestauthhandler(passmgr)))
266 httpdigestauthhandler(passmgr)))
267 opener = urllib2.build_opener(*handlers)
267 opener = urllib2.build_opener(*handlers)
268
268
269 # 1.0 here is the _protocol_ version
269 # 1.0 here is the _protocol_ version
270 opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
270 opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
271 urllib2.install_opener(opener)
271 urllib2.install_opener(opener)
272
272
273 def __del__(self):
273 def __del__(self):
274 if self.handler:
274 if self.handler:
275 self.handler.close_all()
275 self.handler.close_all()
276 self.handler = None
276 self.handler = None
277
277
278 def url(self):
278 def url(self):
279 return self.path
279 return self.path
280
280
281 # look up capabilities only when needed
281 # look up capabilities only when needed
282
282
283 def get_caps(self):
283 def get_caps(self):
284 if self.caps is None:
284 if self.caps is None:
285 try:
285 try:
286 self.caps = util.set(self.do_read('capabilities').split())
286 self.caps = util.set(self.do_read('capabilities').split())
287 except repo.RepoError:
287 except repo.RepoError:
288 self.caps = util.set()
288 self.caps = util.set()
289 self.ui.debug(_('capabilities: %s\n') %
289 self.ui.debug(_('capabilities: %s\n') %
290 (' '.join(self.caps or ['none'])))
290 (' '.join(self.caps or ['none'])))
291 return self.caps
291 return self.caps
292
292
293 capabilities = property(get_caps)
293 capabilities = property(get_caps)
294
294
295 def lock(self):
295 def lock(self):
296 raise util.Abort(_('operation not supported over http'))
296 raise util.Abort(_('operation not supported over http'))
297
297
298 def do_cmd(self, cmd, **args):
298 def do_cmd(self, cmd, **args):
299 data = args.pop('data', None)
299 data = args.pop('data', None)
300 headers = args.pop('headers', {})
300 headers = args.pop('headers', {})
301 self.ui.debug(_("sending %s command\n") % cmd)
301 self.ui.debug(_("sending %s command\n") % cmd)
302 q = {"cmd": cmd}
302 q = {"cmd": cmd}
303 q.update(args)
303 q.update(args)
304 qs = '?%s' % urllib.urlencode(q)
304 qs = '?%s' % urllib.urlencode(q)
305 cu = "%s%s" % (self._url, qs)
305 cu = "%s%s" % (self._url, qs)
306 try:
306 try:
307 if data:
307 if data:
308 self.ui.debug(_("sending %s bytes\n") % len(data))
308 self.ui.debug(_("sending %s bytes\n") % len(data))
309 resp = urllib2.urlopen(request(cu, data, headers))
309 resp = urllib2.urlopen(request(cu, data, headers))
310 except urllib2.HTTPError, inst:
310 except urllib2.HTTPError, inst:
311 if inst.code == 401:
311 if inst.code == 401:
312 raise util.Abort(_('authorization failed'))
312 raise util.Abort(_('authorization failed'))
313 raise
313 raise
314 except httplib.HTTPException, inst:
314 except httplib.HTTPException, inst:
315 self.ui.debug(_('http error while sending %s command\n') % cmd)
315 self.ui.debug(_('http error while sending %s command\n') % cmd)
316 self.ui.print_exc()
316 self.ui.print_exc()
317 raise IOError(None, inst)
317 raise IOError(None, inst)
318 except IndexError:
318 except IndexError:
319 # this only happens with Python 2.3, later versions raise URLError
319 # this only happens with Python 2.3, later versions raise URLError
320 raise util.Abort(_('http error, possibly caused by proxy setting'))
320 raise util.Abort(_('http error, possibly caused by proxy setting'))
321 # record the url we got redirected to
321 # record the url we got redirected to
322 resp_url = resp.geturl()
322 resp_url = resp.geturl()
323 if resp_url.endswith(qs):
323 if resp_url.endswith(qs):
324 resp_url = resp_url[:-len(qs)]
324 resp_url = resp_url[:-len(qs)]
325 if self._url != resp_url:
325 if self._url != resp_url:
326 self.ui.status(_('real URL is %s\n') % resp_url)
326 self.ui.status(_('real URL is %s\n') % resp_url)
327 self._url = resp_url
327 self._url = resp_url
328 try:
328 try:
329 proto = resp.getheader('content-type')
329 proto = resp.getheader('content-type')
330 except AttributeError:
330 except AttributeError:
331 proto = resp.headers['content-type']
331 proto = resp.headers['content-type']
332
332
333 # accept old "text/plain" and "application/hg-changegroup" for now
333 # accept old "text/plain" and "application/hg-changegroup" for now
334 if not (proto.startswith('application/mercurial-') or
334 if not (proto.startswith('application/mercurial-') or
335 proto.startswith('text/plain') or
335 proto.startswith('text/plain') or
336 proto.startswith('application/hg-changegroup')):
336 proto.startswith('application/hg-changegroup')):
337 self.ui.debug(_("Requested URL: '%s'\n") % cu)
337 self.ui.debug(_("Requested URL: '%s'\n") % cu)
338 raise repo.RepoError(_("'%s' does not appear to be an hg repository")
338 raise repo.RepoError(_("'%s' does not appear to be an hg repository")
339 % self._url)
339 % self._url)
340
340
341 if proto.startswith('application/mercurial-'):
341 if proto.startswith('application/mercurial-'):
342 try:
342 try:
343 version = proto.split('-', 1)[1]
343 version = proto.split('-', 1)[1]
344 version_info = tuple([int(n) for n in version.split('.')])
344 version_info = tuple([int(n) for n in version.split('.')])
345 except ValueError:
345 except ValueError:
346 raise repo.RepoError(_("'%s' sent a broken Content-type "
346 raise repo.RepoError(_("'%s' sent a broken Content-type "
347 "header (%s)") % (self._url, proto))
347 "header (%s)") % (self._url, proto))
348 if version_info > (0, 1):
348 if version_info > (0, 1):
349 raise repo.RepoError(_("'%s' uses newer protocol %s") %
349 raise repo.RepoError(_("'%s' uses newer protocol %s") %
350 (self._url, version))
350 (self._url, version))
351
351
352 return resp
352 return resp
353
353
354 def do_read(self, cmd, **args):
354 def do_read(self, cmd, **args):
355 fp = self.do_cmd(cmd, **args)
355 fp = self.do_cmd(cmd, **args)
356 try:
356 try:
357 return fp.read()
357 return fp.read()
358 finally:
358 finally:
359 # if using keepalive, allow connection to be reused
359 # if using keepalive, allow connection to be reused
360 fp.close()
360 fp.close()
361
361
362 def lookup(self, key):
362 def lookup(self, key):
363 self.requirecap('lookup', _('look up remote revision'))
363 self.requirecap('lookup', _('look up remote revision'))
364 d = self.do_cmd("lookup", key = key).read()
364 d = self.do_cmd("lookup", key = key).read()
365 success, data = d[:-1].split(' ', 1)
365 success, data = d[:-1].split(' ', 1)
366 if int(success):
366 if int(success):
367 return bin(data)
367 return bin(data)
368 raise repo.RepoError(data)
368 raise repo.RepoError(data)
369
369
370 def heads(self):
370 def heads(self):
371 d = self.do_read("heads")
371 d = self.do_read("heads")
372 try:
372 try:
373 return map(bin, d[:-1].split(" "))
373 return map(bin, d[:-1].split(" "))
374 except:
374 except:
375 raise util.UnexpectedOutput(_("unexpected response:"), d)
375 raise util.UnexpectedOutput(_("unexpected response:"), d)
376
376
377 def branches(self, nodes):
377 def branches(self, nodes):
378 n = " ".join(map(hex, nodes))
378 n = " ".join(map(hex, nodes))
379 d = self.do_read("branches", nodes=n)
379 d = self.do_read("branches", nodes=n)
380 try:
380 try:
381 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
381 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
382 return br
382 return br
383 except:
383 except:
384 raise util.UnexpectedOutput(_("unexpected response:"), d)
384 raise util.UnexpectedOutput(_("unexpected response:"), d)
385
385
386 def between(self, pairs):
386 def between(self, pairs):
387 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
387 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
388 d = self.do_read("between", pairs=n)
388 d = self.do_read("between", pairs=n)
389 try:
389 try:
390 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
390 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
391 return p
391 return p
392 except:
392 except:
393 raise util.UnexpectedOutput(_("unexpected response:"), d)
393 raise util.UnexpectedOutput(_("unexpected response:"), d)
394
394
395 def changegroup(self, nodes, kind):
395 def changegroup(self, nodes, kind):
396 n = " ".join(map(hex, nodes))
396 n = " ".join(map(hex, nodes))
397 f = self.do_cmd("changegroup", roots=n)
397 f = self.do_cmd("changegroup", roots=n)
398 return util.chunkbuffer(zgenerator(f))
398 return util.chunkbuffer(zgenerator(f))
399
399
400 def changegroupsubset(self, bases, heads, source):
400 def changegroupsubset(self, bases, heads, source):
401 self.requirecap('changegroupsubset', _('look up remote changes'))
401 self.requirecap('changegroupsubset', _('look up remote changes'))
402 baselst = " ".join([hex(n) for n in bases])
402 baselst = " ".join([hex(n) for n in bases])
403 headlst = " ".join([hex(n) for n in heads])
403 headlst = " ".join([hex(n) for n in heads])
404 f = self.do_cmd("changegroupsubset", bases=baselst, heads=headlst)
404 f = self.do_cmd("changegroupsubset", bases=baselst, heads=headlst)
405 return util.chunkbuffer(zgenerator(f))
405 return util.chunkbuffer(zgenerator(f))
406
406
407 def unbundle(self, cg, heads, source):
407 def unbundle(self, cg, heads, source):
408 # have to stream bundle to a temp file because we do not have
408 # have to stream bundle to a temp file because we do not have
409 # http 1.1 chunked transfer.
409 # http 1.1 chunked transfer.
410
410
411 type = ""
411 type = ""
412 types = self.capable('unbundle')
412 types = self.capable('unbundle')
413 # servers older than d1b16a746db6 will send 'unbundle' as a
413 # servers older than d1b16a746db6 will send 'unbundle' as a
414 # boolean capability
414 # boolean capability
415 try:
415 try:
416 types = types.split(',')
416 types = types.split(',')
417 except AttributeError:
417 except AttributeError:
418 types = [""]
418 types = [""]
419 if types:
419 if types:
420 for x in types:
420 for x in types:
421 if x in changegroup.bundletypes:
421 if x in changegroup.bundletypes:
422 type = x
422 type = x
423 break
423 break
424
424
425 tempname = changegroup.writebundle(cg, None, type)
425 tempname = changegroup.writebundle(cg, None, type)
426 fp = httpsendfile(tempname, "rb")
426 fp = httpsendfile(tempname, "rb")
427 try:
427 try:
428 try:
428 try:
429 rfp = self.do_cmd(
429 rfp = self.do_cmd(
430 'unbundle', data=fp,
430 'unbundle', data=fp,
431 headers={'content-type': 'application/octet-stream'},
431 headers={'content-type': 'application/octet-stream'},
432 heads=' '.join(map(hex, heads)))
432 heads=' '.join(map(hex, heads)))
433 try:
433 try:
434 ret = int(rfp.readline())
434 ret = int(rfp.readline())
435 self.ui.write(rfp.read())
435 self.ui.write(rfp.read())
436 return ret
436 return ret
437 finally:
437 finally:
438 rfp.close()
438 rfp.close()
439 except socket.error, err:
439 except socket.error, err:
440 if err[0] in (errno.ECONNRESET, errno.EPIPE):
440 if err[0] in (errno.ECONNRESET, errno.EPIPE):
441 raise util.Abort(_('push failed: %s') % err[1])
441 raise util.Abort(_('push failed: %s') % err[1])
442 raise util.Abort(err[1])
442 raise util.Abort(err[1])
443 finally:
443 finally:
444 fp.close()
444 fp.close()
445 os.unlink(tempname)
445 os.unlink(tempname)
446
446
447 def stream_out(self):
447 def stream_out(self):
448 return self.do_cmd('stream_out')
448 return self.do_cmd('stream_out')
449
449
450 class httpsrepository(httprepository):
450 class httpsrepository(httprepository):
451 def __init__(self, ui, path):
451 def __init__(self, ui, path):
452 if not has_https:
452 if not has_https:
453 raise util.Abort(_('Python support for SSL and HTTPS '
453 raise util.Abort(_('Python support for SSL and HTTPS '
454 'is not installed'))
454 'is not installed'))
455 httprepository.__init__(self, ui, path)
455 httprepository.__init__(self, ui, path)
456
456
457 def instance(ui, path, create):
457 def instance(ui, path, create):
458 if create:
458 if create:
459 raise util.Abort(_('cannot create new http repository'))
459 raise util.Abort(_('cannot create new http repository'))
460 if path.startswith('https:'):
460 if path.startswith('https:'):
461 return httpsrepository(ui, path)
461 return httpsrepository(ui, path)
462 return httprepository(ui, path)
462 return httprepository(ui, path)
@@ -1,579 +1,579 b''
1 # This library is free software; you can redistribute it and/or
1 # This library is free software; you can redistribute it and/or
2 # modify it under the terms of the GNU Lesser General Public
2 # modify it under the terms of the GNU Lesser General Public
3 # License as published by the Free Software Foundation; either
3 # License as published by the Free Software Foundation; either
4 # version 2.1 of the License, or (at your option) any later version.
4 # version 2.1 of the License, or (at your option) any later version.
5 #
5 #
6 # This library is distributed in the hope that it will be useful,
6 # This library is distributed in the hope that it will be useful,
7 # but WITHOUT ANY WARRANTY; without even the implied warranty of
7 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
8 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
9 # Lesser General Public License for more details.
9 # Lesser General Public License for more details.
10 #
10 #
11 # You should have received a copy of the GNU Lesser General Public
11 # You should have received a copy of the GNU Lesser General Public
12 # License along with this library; if not, write to the
12 # License along with this library; if not, write to the
13 # Free Software Foundation, Inc.,
13 # Free Software Foundation, Inc.,
14 # 59 Temple Place, Suite 330,
14 # 59 Temple Place, Suite 330,
15 # Boston, MA 02111-1307 USA
15 # Boston, MA 02111-1307 USA
16
16
17 # This file is part of urlgrabber, a high-level cross-protocol url-grabber
17 # This file is part of urlgrabber, a high-level cross-protocol url-grabber
18 # Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
18 # Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
19
19
20 # Modified by Benoit Boissinot:
20 # Modified by Benoit Boissinot:
21 # - fix for digest auth (inspired from urllib2.py @ Python v2.4)
21 # - fix for digest auth (inspired from urllib2.py @ Python v2.4)
22
22
23 """An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive.
23 """An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive.
24
24
25 >>> import urllib2
25 >>> import urllib2
26 >>> from keepalive import HTTPHandler
26 >>> from keepalive import HTTPHandler
27 >>> keepalive_handler = HTTPHandler()
27 >>> keepalive_handler = HTTPHandler()
28 >>> opener = urllib2.build_opener(keepalive_handler)
28 >>> opener = urllib2.build_opener(keepalive_handler)
29 >>> urllib2.install_opener(opener)
29 >>> urllib2.install_opener(opener)
30 >>>
30 >>>
31 >>> fo = urllib2.urlopen('http://www.python.org')
31 >>> fo = urllib2.urlopen('http://www.python.org')
32
32
33 If a connection to a given host is requested, and all of the existing
33 If a connection to a given host is requested, and all of the existing
34 connections are still in use, another connection will be opened. If
34 connections are still in use, another connection will be opened. If
35 the handler tries to use an existing connection but it fails in some
35 the handler tries to use an existing connection but it fails in some
36 way, it will be closed and removed from the pool.
36 way, it will be closed and removed from the pool.
37
37
38 To remove the handler, simply re-run build_opener with no arguments, and
38 To remove the handler, simply re-run build_opener with no arguments, and
39 install that opener.
39 install that opener.
40
40
41 You can explicitly close connections by using the close_connection()
41 You can explicitly close connections by using the close_connection()
42 method of the returned file-like object (described below) or you can
42 method of the returned file-like object (described below) or you can
43 use the handler methods:
43 use the handler methods:
44
44
45 close_connection(host)
45 close_connection(host)
46 close_all()
46 close_all()
47 open_connections()
47 open_connections()
48
48
49 NOTE: using the close_connection and close_all methods of the handler
49 NOTE: using the close_connection and close_all methods of the handler
50 should be done with care when using multiple threads.
50 should be done with care when using multiple threads.
51 * there is nothing that prevents another thread from creating new
51 * there is nothing that prevents another thread from creating new
52 connections immediately after connections are closed
52 connections immediately after connections are closed
53 * no checks are done to prevent in-use connections from being closed
53 * no checks are done to prevent in-use connections from being closed
54
54
55 >>> keepalive_handler.close_all()
55 >>> keepalive_handler.close_all()
56
56
57 EXTRA ATTRIBUTES AND METHODS
57 EXTRA ATTRIBUTES AND METHODS
58
58
59 Upon a status of 200, the object returned has a few additional
59 Upon a status of 200, the object returned has a few additional
60 attributes and methods, which should not be used if you want to
60 attributes and methods, which should not be used if you want to
61 remain consistent with the normal urllib2-returned objects:
61 remain consistent with the normal urllib2-returned objects:
62
62
63 close_connection() - close the connection to the host
63 close_connection() - close the connection to the host
64 readlines() - you know, readlines()
64 readlines() - you know, readlines()
65 status - the return status (ie 404)
65 status - the return status (ie 404)
66 reason - english translation of status (ie 'File not found')
66 reason - english translation of status (ie 'File not found')
67
67
68 If you want the best of both worlds, use this inside an
68 If you want the best of both worlds, use this inside an
69 AttributeError-catching try:
69 AttributeError-catching try:
70
70
71 >>> try: status = fo.status
71 >>> try: status = fo.status
72 >>> except AttributeError: status = None
72 >>> except AttributeError: status = None
73
73
74 Unfortunately, these are ONLY there if status == 200, so it's not
74 Unfortunately, these are ONLY there if status == 200, so it's not
75 easy to distinguish between non-200 responses. The reason is that
75 easy to distinguish between non-200 responses. The reason is that
76 urllib2 tries to do clever things with error codes 301, 302, 401,
76 urllib2 tries to do clever things with error codes 301, 302, 401,
77 and 407, and it wraps the object upon return.
77 and 407, and it wraps the object upon return.
78
78
79 For python versions earlier than 2.4, you can avoid this fancy error
79 For python versions earlier than 2.4, you can avoid this fancy error
80 handling by setting the module-level global HANDLE_ERRORS to zero.
80 handling by setting the module-level global HANDLE_ERRORS to zero.
81 You see, prior to 2.4, it's the HTTP Handler's job to determine what
81 You see, prior to 2.4, it's the HTTP Handler's job to determine what
82 to handle specially, and what to just pass up. HANDLE_ERRORS == 0
82 to handle specially, and what to just pass up. HANDLE_ERRORS == 0
83 means "pass everything up". In python 2.4, however, this job no
83 means "pass everything up". In python 2.4, however, this job no
84 longer belongs to the HTTP Handler and is now done by a NEW handler,
84 longer belongs to the HTTP Handler and is now done by a NEW handler,
85 HTTPErrorProcessor. Here's the bottom line:
85 HTTPErrorProcessor. Here's the bottom line:
86
86
87 python version < 2.4
87 python version < 2.4
88 HANDLE_ERRORS == 1 (default) pass up 200, treat the rest as
88 HANDLE_ERRORS == 1 (default) pass up 200, treat the rest as
89 errors
89 errors
90 HANDLE_ERRORS == 0 pass everything up, error processing is
90 HANDLE_ERRORS == 0 pass everything up, error processing is
91 left to the calling code
91 left to the calling code
92 python version >= 2.4
92 python version >= 2.4
93 HANDLE_ERRORS == 1 pass up 200, treat the rest as errors
93 HANDLE_ERRORS == 1 pass up 200, treat the rest as errors
94 HANDLE_ERRORS == 0 (default) pass everything up, let the
94 HANDLE_ERRORS == 0 (default) pass everything up, let the
95 other handlers (specifically,
95 other handlers (specifically,
96 HTTPErrorProcessor) decide what to do
96 HTTPErrorProcessor) decide what to do
97
97
98 In practice, setting the variable either way makes little difference
98 In practice, setting the variable either way makes little difference
99 in python 2.4, so for the most consistent behavior across versions,
99 in python 2.4, so for the most consistent behavior across versions,
100 you probably just want to use the defaults, which will give you
100 you probably just want to use the defaults, which will give you
101 exceptions on errors.
101 exceptions on errors.
102
102
103 """
103 """
104
104
105 # $Id: keepalive.py,v 1.14 2006/04/04 21:00:32 mstenner Exp $
105 # $Id: keepalive.py,v 1.14 2006/04/04 21:00:32 mstenner Exp $
106
106
107 import urllib2
107 import urllib2
108 import httplib
108 import httplib
109 import socket
109 import socket
110 import thread
110 import thread
111
111
112 DEBUG = None
112 DEBUG = None
113
113
114 import sys
114 import sys
115 if sys.version_info < (2, 4): HANDLE_ERRORS = 1
115 if sys.version_info < (2, 4): HANDLE_ERRORS = 1
116 else: HANDLE_ERRORS = 0
116 else: HANDLE_ERRORS = 0
117
117
118 class ConnectionManager:
118 class ConnectionManager:
119 """
119 """
120 The connection manager must be able to:
120 The connection manager must be able to:
121 * keep track of all existing
121 * keep track of all existing
122 """
122 """
123 def __init__(self):
123 def __init__(self):
124 self._lock = thread.allocate_lock()
124 self._lock = thread.allocate_lock()
125 self._hostmap = {} # map hosts to a list of connections
125 self._hostmap = {} # map hosts to a list of connections
126 self._connmap = {} # map connections to host
126 self._connmap = {} # map connections to host
127 self._readymap = {} # map connection to ready state
127 self._readymap = {} # map connection to ready state
128
128
129 def add(self, host, connection, ready):
129 def add(self, host, connection, ready):
130 self._lock.acquire()
130 self._lock.acquire()
131 try:
131 try:
132 if not self._hostmap.has_key(host): self._hostmap[host] = []
132 if not host in self._hostmap: self._hostmap[host] = []
133 self._hostmap[host].append(connection)
133 self._hostmap[host].append(connection)
134 self._connmap[connection] = host
134 self._connmap[connection] = host
135 self._readymap[connection] = ready
135 self._readymap[connection] = ready
136 finally:
136 finally:
137 self._lock.release()
137 self._lock.release()
138
138
139 def remove(self, connection):
139 def remove(self, connection):
140 self._lock.acquire()
140 self._lock.acquire()
141 try:
141 try:
142 try:
142 try:
143 host = self._connmap[connection]
143 host = self._connmap[connection]
144 except KeyError:
144 except KeyError:
145 pass
145 pass
146 else:
146 else:
147 del self._connmap[connection]
147 del self._connmap[connection]
148 del self._readymap[connection]
148 del self._readymap[connection]
149 self._hostmap[host].remove(connection)
149 self._hostmap[host].remove(connection)
150 if not self._hostmap[host]: del self._hostmap[host]
150 if not self._hostmap[host]: del self._hostmap[host]
151 finally:
151 finally:
152 self._lock.release()
152 self._lock.release()
153
153
154 def set_ready(self, connection, ready):
154 def set_ready(self, connection, ready):
155 try: self._readymap[connection] = ready
155 try: self._readymap[connection] = ready
156 except KeyError: pass
156 except KeyError: pass
157
157
158 def get_ready_conn(self, host):
158 def get_ready_conn(self, host):
159 conn = None
159 conn = None
160 self._lock.acquire()
160 self._lock.acquire()
161 try:
161 try:
162 if self._hostmap.has_key(host):
162 if host in self._hostmap:
163 for c in self._hostmap[host]:
163 for c in self._hostmap[host]:
164 if self._readymap[c]:
164 if self._readymap[c]:
165 self._readymap[c] = 0
165 self._readymap[c] = 0
166 conn = c
166 conn = c
167 break
167 break
168 finally:
168 finally:
169 self._lock.release()
169 self._lock.release()
170 return conn
170 return conn
171
171
172 def get_all(self, host=None):
172 def get_all(self, host=None):
173 if host:
173 if host:
174 return list(self._hostmap.get(host, []))
174 return list(self._hostmap.get(host, []))
175 else:
175 else:
176 return dict(self._hostmap)
176 return dict(self._hostmap)
177
177
178 class HTTPHandler(urllib2.HTTPHandler):
178 class HTTPHandler(urllib2.HTTPHandler):
179 def __init__(self):
179 def __init__(self):
180 self._cm = ConnectionManager()
180 self._cm = ConnectionManager()
181
181
182 #### Connection Management
182 #### Connection Management
183 def open_connections(self):
183 def open_connections(self):
184 """return a list of connected hosts and the number of connections
184 """return a list of connected hosts and the number of connections
185 to each. [('foo.com:80', 2), ('bar.org', 1)]"""
185 to each. [('foo.com:80', 2), ('bar.org', 1)]"""
186 return [(host, len(li)) for (host, li) in self._cm.get_all().items()]
186 return [(host, len(li)) for (host, li) in self._cm.get_all().items()]
187
187
188 def close_connection(self, host):
188 def close_connection(self, host):
189 """close connection(s) to <host>
189 """close connection(s) to <host>
190 host is the host:port spec, as in 'www.cnn.com:8080' as passed in.
190 host is the host:port spec, as in 'www.cnn.com:8080' as passed in.
191 no error occurs if there is no connection to that host."""
191 no error occurs if there is no connection to that host."""
192 for h in self._cm.get_all(host):
192 for h in self._cm.get_all(host):
193 self._cm.remove(h)
193 self._cm.remove(h)
194 h.close()
194 h.close()
195
195
196 def close_all(self):
196 def close_all(self):
197 """close all open connections"""
197 """close all open connections"""
198 for host, conns in self._cm.get_all().items():
198 for host, conns in self._cm.get_all().items():
199 for h in conns:
199 for h in conns:
200 self._cm.remove(h)
200 self._cm.remove(h)
201 h.close()
201 h.close()
202
202
203 def _request_closed(self, request, host, connection):
203 def _request_closed(self, request, host, connection):
204 """tells us that this request is now closed and the the
204 """tells us that this request is now closed and the the
205 connection is ready for another request"""
205 connection is ready for another request"""
206 self._cm.set_ready(connection, 1)
206 self._cm.set_ready(connection, 1)
207
207
208 def _remove_connection(self, host, connection, close=0):
208 def _remove_connection(self, host, connection, close=0):
209 if close: connection.close()
209 if close: connection.close()
210 self._cm.remove(connection)
210 self._cm.remove(connection)
211
211
212 #### Transaction Execution
212 #### Transaction Execution
213 def http_open(self, req):
213 def http_open(self, req):
214 return self.do_open(HTTPConnection, req)
214 return self.do_open(HTTPConnection, req)
215
215
216 def do_open(self, http_class, req):
216 def do_open(self, http_class, req):
217 host = req.get_host()
217 host = req.get_host()
218 if not host:
218 if not host:
219 raise urllib2.URLError('no host given')
219 raise urllib2.URLError('no host given')
220
220
221 try:
221 try:
222 h = self._cm.get_ready_conn(host)
222 h = self._cm.get_ready_conn(host)
223 while h:
223 while h:
224 r = self._reuse_connection(h, req, host)
224 r = self._reuse_connection(h, req, host)
225
225
226 # if this response is non-None, then it worked and we're
226 # if this response is non-None, then it worked and we're
227 # done. Break out, skipping the else block.
227 # done. Break out, skipping the else block.
228 if r: break
228 if r: break
229
229
230 # connection is bad - possibly closed by server
230 # connection is bad - possibly closed by server
231 # discard it and ask for the next free connection
231 # discard it and ask for the next free connection
232 h.close()
232 h.close()
233 self._cm.remove(h)
233 self._cm.remove(h)
234 h = self._cm.get_ready_conn(host)
234 h = self._cm.get_ready_conn(host)
235 else:
235 else:
236 # no (working) free connections were found. Create a new one.
236 # no (working) free connections were found. Create a new one.
237 h = http_class(host)
237 h = http_class(host)
238 if DEBUG: DEBUG.info("creating new connection to %s (%d)",
238 if DEBUG: DEBUG.info("creating new connection to %s (%d)",
239 host, id(h))
239 host, id(h))
240 self._cm.add(host, h, 0)
240 self._cm.add(host, h, 0)
241 self._start_transaction(h, req)
241 self._start_transaction(h, req)
242 r = h.getresponse()
242 r = h.getresponse()
243 except (socket.error, httplib.HTTPException), err:
243 except (socket.error, httplib.HTTPException), err:
244 raise urllib2.URLError(err)
244 raise urllib2.URLError(err)
245
245
246 # if not a persistent connection, don't try to reuse it
246 # if not a persistent connection, don't try to reuse it
247 if r.will_close: self._cm.remove(h)
247 if r.will_close: self._cm.remove(h)
248
248
249 if DEBUG: DEBUG.info("STATUS: %s, %s", r.status, r.reason)
249 if DEBUG: DEBUG.info("STATUS: %s, %s", r.status, r.reason)
250 r._handler = self
250 r._handler = self
251 r._host = host
251 r._host = host
252 r._url = req.get_full_url()
252 r._url = req.get_full_url()
253 r._connection = h
253 r._connection = h
254 r.code = r.status
254 r.code = r.status
255 r.headers = r.msg
255 r.headers = r.msg
256 r.msg = r.reason
256 r.msg = r.reason
257
257
258 if r.status == 200 or not HANDLE_ERRORS:
258 if r.status == 200 or not HANDLE_ERRORS:
259 return r
259 return r
260 else:
260 else:
261 return self.parent.error('http', req, r,
261 return self.parent.error('http', req, r,
262 r.status, r.msg, r.headers)
262 r.status, r.msg, r.headers)
263
263
264 def _reuse_connection(self, h, req, host):
264 def _reuse_connection(self, h, req, host):
265 """start the transaction with a re-used connection
265 """start the transaction with a re-used connection
266 return a response object (r) upon success or None on failure.
266 return a response object (r) upon success or None on failure.
267 This DOES not close or remove bad connections in cases where
267 This DOES not close or remove bad connections in cases where
268 it returns. However, if an unexpected exception occurs, it
268 it returns. However, if an unexpected exception occurs, it
269 will close and remove the connection before re-raising.
269 will close and remove the connection before re-raising.
270 """
270 """
271 try:
271 try:
272 self._start_transaction(h, req)
272 self._start_transaction(h, req)
273 r = h.getresponse()
273 r = h.getresponse()
274 # note: just because we got something back doesn't mean it
274 # note: just because we got something back doesn't mean it
275 # worked. We'll check the version below, too.
275 # worked. We'll check the version below, too.
276 except (socket.error, httplib.HTTPException):
276 except (socket.error, httplib.HTTPException):
277 r = None
277 r = None
278 except:
278 except:
279 # adding this block just in case we've missed
279 # adding this block just in case we've missed
280 # something we will still raise the exception, but
280 # something we will still raise the exception, but
281 # lets try and close the connection and remove it
281 # lets try and close the connection and remove it
282 # first. We previously got into a nasty loop
282 # first. We previously got into a nasty loop
283 # where an exception was uncaught, and so the
283 # where an exception was uncaught, and so the
284 # connection stayed open. On the next try, the
284 # connection stayed open. On the next try, the
285 # same exception was raised, etc. The tradeoff is
285 # same exception was raised, etc. The tradeoff is
286 # that it's now possible this call will raise
286 # that it's now possible this call will raise
287 # a DIFFERENT exception
287 # a DIFFERENT exception
288 if DEBUG: DEBUG.error("unexpected exception - closing " + \
288 if DEBUG: DEBUG.error("unexpected exception - closing " + \
289 "connection to %s (%d)", host, id(h))
289 "connection to %s (%d)", host, id(h))
290 self._cm.remove(h)
290 self._cm.remove(h)
291 h.close()
291 h.close()
292 raise
292 raise
293
293
294 if r is None or r.version == 9:
294 if r is None or r.version == 9:
295 # httplib falls back to assuming HTTP 0.9 if it gets a
295 # httplib falls back to assuming HTTP 0.9 if it gets a
296 # bad header back. This is most likely to happen if
296 # bad header back. This is most likely to happen if
297 # the socket has been closed by the server since we
297 # the socket has been closed by the server since we
298 # last used the connection.
298 # last used the connection.
299 if DEBUG: DEBUG.info("failed to re-use connection to %s (%d)",
299 if DEBUG: DEBUG.info("failed to re-use connection to %s (%d)",
300 host, id(h))
300 host, id(h))
301 r = None
301 r = None
302 else:
302 else:
303 if DEBUG: DEBUG.info("re-using connection to %s (%d)", host, id(h))
303 if DEBUG: DEBUG.info("re-using connection to %s (%d)", host, id(h))
304
304
305 return r
305 return r
306
306
307 def _start_transaction(self, h, req):
307 def _start_transaction(self, h, req):
308 headers = req.headers.copy()
308 headers = req.headers.copy()
309 body = req.data
309 body = req.data
310 if sys.version_info >= (2, 4):
310 if sys.version_info >= (2, 4):
311 headers.update(req.unredirected_hdrs)
311 headers.update(req.unredirected_hdrs)
312 try:
312 try:
313 h.request(req.get_method(), req.get_selector(), body, headers)
313 h.request(req.get_method(), req.get_selector(), body, headers)
314 except socket.error, err: # XXX what error?
314 except socket.error, err: # XXX what error?
315 raise urllib2.URLError(err)
315 raise urllib2.URLError(err)
316
316
317 class HTTPResponse(httplib.HTTPResponse):
317 class HTTPResponse(httplib.HTTPResponse):
318 # we need to subclass HTTPResponse in order to
318 # we need to subclass HTTPResponse in order to
319 # 1) add readline() and readlines() methods
319 # 1) add readline() and readlines() methods
320 # 2) add close_connection() methods
320 # 2) add close_connection() methods
321 # 3) add info() and geturl() methods
321 # 3) add info() and geturl() methods
322
322
323 # in order to add readline(), read must be modified to deal with a
323 # in order to add readline(), read must be modified to deal with a
324 # buffer. example: readline must read a buffer and then spit back
324 # buffer. example: readline must read a buffer and then spit back
325 # one line at a time. The only real alternative is to read one
325 # one line at a time. The only real alternative is to read one
326 # BYTE at a time (ick). Once something has been read, it can't be
326 # BYTE at a time (ick). Once something has been read, it can't be
327 # put back (ok, maybe it can, but that's even uglier than this),
327 # put back (ok, maybe it can, but that's even uglier than this),
328 # so if you THEN do a normal read, you must first take stuff from
328 # so if you THEN do a normal read, you must first take stuff from
329 # the buffer.
329 # the buffer.
330
330
331 # the read method wraps the original to accomodate buffering,
331 # the read method wraps the original to accomodate buffering,
332 # although read() never adds to the buffer.
332 # although read() never adds to the buffer.
333 # Both readline and readlines have been stolen with almost no
333 # Both readline and readlines have been stolen with almost no
334 # modification from socket.py
334 # modification from socket.py
335
335
336
336
337 def __init__(self, sock, debuglevel=0, strict=0, method=None):
337 def __init__(self, sock, debuglevel=0, strict=0, method=None):
338 if method: # the httplib in python 2.3 uses the method arg
338 if method: # the httplib in python 2.3 uses the method arg
339 httplib.HTTPResponse.__init__(self, sock, debuglevel, method)
339 httplib.HTTPResponse.__init__(self, sock, debuglevel, method)
340 else: # 2.2 doesn't
340 else: # 2.2 doesn't
341 httplib.HTTPResponse.__init__(self, sock, debuglevel)
341 httplib.HTTPResponse.__init__(self, sock, debuglevel)
342 self.fileno = sock.fileno
342 self.fileno = sock.fileno
343 self.code = None
343 self.code = None
344 self._rbuf = ''
344 self._rbuf = ''
345 self._rbufsize = 8096
345 self._rbufsize = 8096
346 self._handler = None # inserted by the handler later
346 self._handler = None # inserted by the handler later
347 self._host = None # (same)
347 self._host = None # (same)
348 self._url = None # (same)
348 self._url = None # (same)
349 self._connection = None # (same)
349 self._connection = None # (same)
350
350
351 _raw_read = httplib.HTTPResponse.read
351 _raw_read = httplib.HTTPResponse.read
352
352
353 def close(self):
353 def close(self):
354 if self.fp:
354 if self.fp:
355 self.fp.close()
355 self.fp.close()
356 self.fp = None
356 self.fp = None
357 if self._handler:
357 if self._handler:
358 self._handler._request_closed(self, self._host,
358 self._handler._request_closed(self, self._host,
359 self._connection)
359 self._connection)
360
360
361 def close_connection(self):
361 def close_connection(self):
362 self._handler._remove_connection(self._host, self._connection, close=1)
362 self._handler._remove_connection(self._host, self._connection, close=1)
363 self.close()
363 self.close()
364
364
365 def info(self):
365 def info(self):
366 return self.headers
366 return self.headers
367
367
368 def geturl(self):
368 def geturl(self):
369 return self._url
369 return self._url
370
370
371 def read(self, amt=None):
371 def read(self, amt=None):
372 # the _rbuf test is only in this first if for speed. It's not
372 # the _rbuf test is only in this first if for speed. It's not
373 # logically necessary
373 # logically necessary
374 if self._rbuf and not amt is None:
374 if self._rbuf and not amt is None:
375 L = len(self._rbuf)
375 L = len(self._rbuf)
376 if amt > L:
376 if amt > L:
377 amt -= L
377 amt -= L
378 else:
378 else:
379 s = self._rbuf[:amt]
379 s = self._rbuf[:amt]
380 self._rbuf = self._rbuf[amt:]
380 self._rbuf = self._rbuf[amt:]
381 return s
381 return s
382
382
383 s = self._rbuf + self._raw_read(amt)
383 s = self._rbuf + self._raw_read(amt)
384 self._rbuf = ''
384 self._rbuf = ''
385 return s
385 return s
386
386
387 def readline(self, limit=-1):
387 def readline(self, limit=-1):
388 data = ""
388 data = ""
389 i = self._rbuf.find('\n')
389 i = self._rbuf.find('\n')
390 while i < 0 and not (0 < limit <= len(self._rbuf)):
390 while i < 0 and not (0 < limit <= len(self._rbuf)):
391 new = self._raw_read(self._rbufsize)
391 new = self._raw_read(self._rbufsize)
392 if not new: break
392 if not new: break
393 i = new.find('\n')
393 i = new.find('\n')
394 if i >= 0: i = i + len(self._rbuf)
394 if i >= 0: i = i + len(self._rbuf)
395 self._rbuf = self._rbuf + new
395 self._rbuf = self._rbuf + new
396 if i < 0: i = len(self._rbuf)
396 if i < 0: i = len(self._rbuf)
397 else: i = i+1
397 else: i = i+1
398 if 0 <= limit < len(self._rbuf): i = limit
398 if 0 <= limit < len(self._rbuf): i = limit
399 data, self._rbuf = self._rbuf[:i], self._rbuf[i:]
399 data, self._rbuf = self._rbuf[:i], self._rbuf[i:]
400 return data
400 return data
401
401
402 def readlines(self, sizehint = 0):
402 def readlines(self, sizehint = 0):
403 total = 0
403 total = 0
404 list = []
404 list = []
405 while 1:
405 while 1:
406 line = self.readline()
406 line = self.readline()
407 if not line: break
407 if not line: break
408 list.append(line)
408 list.append(line)
409 total += len(line)
409 total += len(line)
410 if sizehint and total >= sizehint:
410 if sizehint and total >= sizehint:
411 break
411 break
412 return list
412 return list
413
413
414
414
415 class HTTPConnection(httplib.HTTPConnection):
415 class HTTPConnection(httplib.HTTPConnection):
416 # use the modified response class
416 # use the modified response class
417 response_class = HTTPResponse
417 response_class = HTTPResponse
418
418
419 #########################################################################
419 #########################################################################
420 ##### TEST FUNCTIONS
420 ##### TEST FUNCTIONS
421 #########################################################################
421 #########################################################################
422
422
423 def error_handler(url):
423 def error_handler(url):
424 global HANDLE_ERRORS
424 global HANDLE_ERRORS
425 orig = HANDLE_ERRORS
425 orig = HANDLE_ERRORS
426 keepalive_handler = HTTPHandler()
426 keepalive_handler = HTTPHandler()
427 opener = urllib2.build_opener(keepalive_handler)
427 opener = urllib2.build_opener(keepalive_handler)
428 urllib2.install_opener(opener)
428 urllib2.install_opener(opener)
429 pos = {0: 'off', 1: 'on'}
429 pos = {0: 'off', 1: 'on'}
430 for i in (0, 1):
430 for i in (0, 1):
431 print " fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)
431 print " fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)
432 HANDLE_ERRORS = i
432 HANDLE_ERRORS = i
433 try:
433 try:
434 fo = urllib2.urlopen(url)
434 fo = urllib2.urlopen(url)
435 foo = fo.read()
435 foo = fo.read()
436 fo.close()
436 fo.close()
437 try: status, reason = fo.status, fo.reason
437 try: status, reason = fo.status, fo.reason
438 except AttributeError: status, reason = None, None
438 except AttributeError: status, reason = None, None
439 except IOError, e:
439 except IOError, e:
440 print " EXCEPTION: %s" % e
440 print " EXCEPTION: %s" % e
441 raise
441 raise
442 else:
442 else:
443 print " status = %s, reason = %s" % (status, reason)
443 print " status = %s, reason = %s" % (status, reason)
444 HANDLE_ERRORS = orig
444 HANDLE_ERRORS = orig
445 hosts = keepalive_handler.open_connections()
445 hosts = keepalive_handler.open_connections()
446 print "open connections:", hosts
446 print "open connections:", hosts
447 keepalive_handler.close_all()
447 keepalive_handler.close_all()
448
448
449 def continuity(url):
449 def continuity(url):
450 import md5
450 import md5
451 format = '%25s: %s'
451 format = '%25s: %s'
452
452
453 # first fetch the file with the normal http handler
453 # first fetch the file with the normal http handler
454 opener = urllib2.build_opener()
454 opener = urllib2.build_opener()
455 urllib2.install_opener(opener)
455 urllib2.install_opener(opener)
456 fo = urllib2.urlopen(url)
456 fo = urllib2.urlopen(url)
457 foo = fo.read()
457 foo = fo.read()
458 fo.close()
458 fo.close()
459 m = md5.new(foo)
459 m = md5.new(foo)
460 print format % ('normal urllib', m.hexdigest())
460 print format % ('normal urllib', m.hexdigest())
461
461
462 # now install the keepalive handler and try again
462 # now install the keepalive handler and try again
463 opener = urllib2.build_opener(HTTPHandler())
463 opener = urllib2.build_opener(HTTPHandler())
464 urllib2.install_opener(opener)
464 urllib2.install_opener(opener)
465
465
466 fo = urllib2.urlopen(url)
466 fo = urllib2.urlopen(url)
467 foo = fo.read()
467 foo = fo.read()
468 fo.close()
468 fo.close()
469 m = md5.new(foo)
469 m = md5.new(foo)
470 print format % ('keepalive read', m.hexdigest())
470 print format % ('keepalive read', m.hexdigest())
471
471
472 fo = urllib2.urlopen(url)
472 fo = urllib2.urlopen(url)
473 foo = ''
473 foo = ''
474 while 1:
474 while 1:
475 f = fo.readline()
475 f = fo.readline()
476 if f: foo = foo + f
476 if f: foo = foo + f
477 else: break
477 else: break
478 fo.close()
478 fo.close()
479 m = md5.new(foo)
479 m = md5.new(foo)
480 print format % ('keepalive readline', m.hexdigest())
480 print format % ('keepalive readline', m.hexdigest())
481
481
482 def comp(N, url):
482 def comp(N, url):
483 print ' making %i connections to:\n %s' % (N, url)
483 print ' making %i connections to:\n %s' % (N, url)
484
484
485 sys.stdout.write(' first using the normal urllib handlers')
485 sys.stdout.write(' first using the normal urllib handlers')
486 # first use normal opener
486 # first use normal opener
487 opener = urllib2.build_opener()
487 opener = urllib2.build_opener()
488 urllib2.install_opener(opener)
488 urllib2.install_opener(opener)
489 t1 = fetch(N, url)
489 t1 = fetch(N, url)
490 print ' TIME: %.3f s' % t1
490 print ' TIME: %.3f s' % t1
491
491
492 sys.stdout.write(' now using the keepalive handler ')
492 sys.stdout.write(' now using the keepalive handler ')
493 # now install the keepalive handler and try again
493 # now install the keepalive handler and try again
494 opener = urllib2.build_opener(HTTPHandler())
494 opener = urllib2.build_opener(HTTPHandler())
495 urllib2.install_opener(opener)
495 urllib2.install_opener(opener)
496 t2 = fetch(N, url)
496 t2 = fetch(N, url)
497 print ' TIME: %.3f s' % t2
497 print ' TIME: %.3f s' % t2
498 print ' improvement factor: %.2f' % (t1/t2, )
498 print ' improvement factor: %.2f' % (t1/t2, )
499
499
500 def fetch(N, url, delay=0):
500 def fetch(N, url, delay=0):
501 import time
501 import time
502 lens = []
502 lens = []
503 starttime = time.time()
503 starttime = time.time()
504 for i in range(N):
504 for i in range(N):
505 if delay and i > 0: time.sleep(delay)
505 if delay and i > 0: time.sleep(delay)
506 fo = urllib2.urlopen(url)
506 fo = urllib2.urlopen(url)
507 foo = fo.read()
507 foo = fo.read()
508 fo.close()
508 fo.close()
509 lens.append(len(foo))
509 lens.append(len(foo))
510 diff = time.time() - starttime
510 diff = time.time() - starttime
511
511
512 j = 0
512 j = 0
513 for i in lens[1:]:
513 for i in lens[1:]:
514 j = j + 1
514 j = j + 1
515 if not i == lens[0]:
515 if not i == lens[0]:
516 print "WARNING: inconsistent length on read %i: %i" % (j, i)
516 print "WARNING: inconsistent length on read %i: %i" % (j, i)
517
517
518 return diff
518 return diff
519
519
520 def test_timeout(url):
520 def test_timeout(url):
521 global DEBUG
521 global DEBUG
522 dbbackup = DEBUG
522 dbbackup = DEBUG
523 class FakeLogger:
523 class FakeLogger:
524 def debug(self, msg, *args): print msg % args
524 def debug(self, msg, *args): print msg % args
525 info = warning = error = debug
525 info = warning = error = debug
526 DEBUG = FakeLogger()
526 DEBUG = FakeLogger()
527 print " fetching the file to establish a connection"
527 print " fetching the file to establish a connection"
528 fo = urllib2.urlopen(url)
528 fo = urllib2.urlopen(url)
529 data1 = fo.read()
529 data1 = fo.read()
530 fo.close()
530 fo.close()
531
531
532 i = 20
532 i = 20
533 print " waiting %i seconds for the server to close the connection" % i
533 print " waiting %i seconds for the server to close the connection" % i
534 while i > 0:
534 while i > 0:
535 sys.stdout.write('\r %2i' % i)
535 sys.stdout.write('\r %2i' % i)
536 sys.stdout.flush()
536 sys.stdout.flush()
537 time.sleep(1)
537 time.sleep(1)
538 i -= 1
538 i -= 1
539 sys.stderr.write('\r')
539 sys.stderr.write('\r')
540
540
541 print " fetching the file a second time"
541 print " fetching the file a second time"
542 fo = urllib2.urlopen(url)
542 fo = urllib2.urlopen(url)
543 data2 = fo.read()
543 data2 = fo.read()
544 fo.close()
544 fo.close()
545
545
546 if data1 == data2:
546 if data1 == data2:
547 print ' data are identical'
547 print ' data are identical'
548 else:
548 else:
549 print ' ERROR: DATA DIFFER'
549 print ' ERROR: DATA DIFFER'
550
550
551 DEBUG = dbbackup
551 DEBUG = dbbackup
552
552
553
553
554 def test(url, N=10):
554 def test(url, N=10):
555 print "checking error hander (do this on a non-200)"
555 print "checking error hander (do this on a non-200)"
556 try: error_handler(url)
556 try: error_handler(url)
557 except IOError, e:
557 except IOError, e:
558 print "exiting - exception will prevent further tests"
558 print "exiting - exception will prevent further tests"
559 sys.exit()
559 sys.exit()
560 print
560 print
561 print "performing continuity test (making sure stuff isn't corrupted)"
561 print "performing continuity test (making sure stuff isn't corrupted)"
562 continuity(url)
562 continuity(url)
563 print
563 print
564 print "performing speed comparison"
564 print "performing speed comparison"
565 comp(N, url)
565 comp(N, url)
566 print
566 print
567 print "performing dropped-connection check"
567 print "performing dropped-connection check"
568 test_timeout(url)
568 test_timeout(url)
569
569
570 if __name__ == '__main__':
570 if __name__ == '__main__':
571 import time
571 import time
572 import sys
572 import sys
573 try:
573 try:
574 N = int(sys.argv[1])
574 N = int(sys.argv[1])
575 url = sys.argv[2]
575 url = sys.argv[2]
576 except:
576 except:
577 print "%s <integer> <url>" % sys.argv[0]
577 print "%s <integer> <url>" % sys.argv[0]
578 else:
578 else:
579 test(url, N)
579 test(url, N)
@@ -1,2061 +1,2061 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import repo, changegroup
10 import repo, changegroup
11 import changelog, dirstate, filelog, manifest, context, weakref
11 import changelog, dirstate, filelog, manifest, context, weakref
12 import re, lock, transaction, tempfile, stat, errno, ui
12 import re, lock, transaction, tempfile, stat, errno, ui
13 import os, revlog, time, util, extensions, hook
13 import os, revlog, time, util, extensions, hook
14
14
15 class localrepository(repo.repository):
15 class localrepository(repo.repository):
16 capabilities = util.set(('lookup', 'changegroupsubset'))
16 capabilities = util.set(('lookup', 'changegroupsubset'))
17 supported = ('revlogv1', 'store')
17 supported = ('revlogv1', 'store')
18
18
19 def __init__(self, parentui, path=None, create=0):
19 def __init__(self, parentui, path=None, create=0):
20 repo.repository.__init__(self)
20 repo.repository.__init__(self)
21 self.root = os.path.realpath(path)
21 self.root = os.path.realpath(path)
22 self.path = os.path.join(self.root, ".hg")
22 self.path = os.path.join(self.root, ".hg")
23 self.origroot = path
23 self.origroot = path
24 self.opener = util.opener(self.path)
24 self.opener = util.opener(self.path)
25 self.wopener = util.opener(self.root)
25 self.wopener = util.opener(self.root)
26
26
27 if not os.path.isdir(self.path):
27 if not os.path.isdir(self.path):
28 if create:
28 if create:
29 if not os.path.exists(path):
29 if not os.path.exists(path):
30 os.mkdir(path)
30 os.mkdir(path)
31 os.mkdir(self.path)
31 os.mkdir(self.path)
32 requirements = ["revlogv1"]
32 requirements = ["revlogv1"]
33 if parentui.configbool('format', 'usestore', True):
33 if parentui.configbool('format', 'usestore', True):
34 os.mkdir(os.path.join(self.path, "store"))
34 os.mkdir(os.path.join(self.path, "store"))
35 requirements.append("store")
35 requirements.append("store")
36 # create an invalid changelog
36 # create an invalid changelog
37 self.opener("00changelog.i", "a").write(
37 self.opener("00changelog.i", "a").write(
38 '\0\0\0\2' # represents revlogv2
38 '\0\0\0\2' # represents revlogv2
39 ' dummy changelog to prevent using the old repo layout'
39 ' dummy changelog to prevent using the old repo layout'
40 )
40 )
41 reqfile = self.opener("requires", "w")
41 reqfile = self.opener("requires", "w")
42 for r in requirements:
42 for r in requirements:
43 reqfile.write("%s\n" % r)
43 reqfile.write("%s\n" % r)
44 reqfile.close()
44 reqfile.close()
45 else:
45 else:
46 raise repo.RepoError(_("repository %s not found") % path)
46 raise repo.RepoError(_("repository %s not found") % path)
47 elif create:
47 elif create:
48 raise repo.RepoError(_("repository %s already exists") % path)
48 raise repo.RepoError(_("repository %s already exists") % path)
49 else:
49 else:
50 # find requirements
50 # find requirements
51 try:
51 try:
52 requirements = self.opener("requires").read().splitlines()
52 requirements = self.opener("requires").read().splitlines()
53 except IOError, inst:
53 except IOError, inst:
54 if inst.errno != errno.ENOENT:
54 if inst.errno != errno.ENOENT:
55 raise
55 raise
56 requirements = []
56 requirements = []
57 # check them
57 # check them
58 for r in requirements:
58 for r in requirements:
59 if r not in self.supported:
59 if r not in self.supported:
60 raise repo.RepoError(_("requirement '%s' not supported") % r)
60 raise repo.RepoError(_("requirement '%s' not supported") % r)
61
61
62 # setup store
62 # setup store
63 if "store" in requirements:
63 if "store" in requirements:
64 self.encodefn = util.encodefilename
64 self.encodefn = util.encodefilename
65 self.decodefn = util.decodefilename
65 self.decodefn = util.decodefilename
66 self.spath = os.path.join(self.path, "store")
66 self.spath = os.path.join(self.path, "store")
67 else:
67 else:
68 self.encodefn = lambda x: x
68 self.encodefn = lambda x: x
69 self.decodefn = lambda x: x
69 self.decodefn = lambda x: x
70 self.spath = self.path
70 self.spath = self.path
71 self.sopener = util.encodedopener(util.opener(self.spath),
71 self.sopener = util.encodedopener(util.opener(self.spath),
72 self.encodefn)
72 self.encodefn)
73
73
74 self.ui = ui.ui(parentui=parentui)
74 self.ui = ui.ui(parentui=parentui)
75 try:
75 try:
76 self.ui.readconfig(self.join("hgrc"), self.root)
76 self.ui.readconfig(self.join("hgrc"), self.root)
77 extensions.loadall(self.ui)
77 extensions.loadall(self.ui)
78 except IOError:
78 except IOError:
79 pass
79 pass
80
80
81 self.tagscache = None
81 self.tagscache = None
82 self._tagstypecache = None
82 self._tagstypecache = None
83 self.branchcache = None
83 self.branchcache = None
84 self.nodetagscache = None
84 self.nodetagscache = None
85 self.filterpats = {}
85 self.filterpats = {}
86 self._transref = self._lockref = self._wlockref = None
86 self._transref = self._lockref = self._wlockref = None
87
87
88 def __getattr__(self, name):
88 def __getattr__(self, name):
89 if name == 'changelog':
89 if name == 'changelog':
90 self.changelog = changelog.changelog(self.sopener)
90 self.changelog = changelog.changelog(self.sopener)
91 self.sopener.defversion = self.changelog.version
91 self.sopener.defversion = self.changelog.version
92 return self.changelog
92 return self.changelog
93 if name == 'manifest':
93 if name == 'manifest':
94 self.changelog
94 self.changelog
95 self.manifest = manifest.manifest(self.sopener)
95 self.manifest = manifest.manifest(self.sopener)
96 return self.manifest
96 return self.manifest
97 if name == 'dirstate':
97 if name == 'dirstate':
98 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
98 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
99 return self.dirstate
99 return self.dirstate
100 else:
100 else:
101 raise AttributeError, name
101 raise AttributeError, name
102
102
103 def url(self):
103 def url(self):
104 return 'file:' + self.root
104 return 'file:' + self.root
105
105
106 def hook(self, name, throw=False, **args):
106 def hook(self, name, throw=False, **args):
107 return hook.hook(self.ui, self, name, throw, **args)
107 return hook.hook(self.ui, self, name, throw, **args)
108
108
109 tag_disallowed = ':\r\n'
109 tag_disallowed = ':\r\n'
110
110
111 def _tag(self, name, node, message, local, user, date, parent=None,
111 def _tag(self, name, node, message, local, user, date, parent=None,
112 extra={}):
112 extra={}):
113 use_dirstate = parent is None
113 use_dirstate = parent is None
114
114
115 for c in self.tag_disallowed:
115 for c in self.tag_disallowed:
116 if c in name:
116 if c in name:
117 raise util.Abort(_('%r cannot be used in a tag name') % c)
117 raise util.Abort(_('%r cannot be used in a tag name') % c)
118
118
119 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
119 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
120
120
121 def writetag(fp, name, munge, prevtags):
121 def writetag(fp, name, munge, prevtags):
122 if prevtags and prevtags[-1] != '\n':
122 if prevtags and prevtags[-1] != '\n':
123 fp.write('\n')
123 fp.write('\n')
124 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
124 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
125 fp.close()
125 fp.close()
126
126
127 prevtags = ''
127 prevtags = ''
128 if local:
128 if local:
129 try:
129 try:
130 fp = self.opener('localtags', 'r+')
130 fp = self.opener('localtags', 'r+')
131 except IOError, err:
131 except IOError, err:
132 fp = self.opener('localtags', 'a')
132 fp = self.opener('localtags', 'a')
133 else:
133 else:
134 prevtags = fp.read()
134 prevtags = fp.read()
135
135
136 # local tags are stored in the current charset
136 # local tags are stored in the current charset
137 writetag(fp, name, None, prevtags)
137 writetag(fp, name, None, prevtags)
138 self.hook('tag', node=hex(node), tag=name, local=local)
138 self.hook('tag', node=hex(node), tag=name, local=local)
139 return
139 return
140
140
141 if use_dirstate:
141 if use_dirstate:
142 try:
142 try:
143 fp = self.wfile('.hgtags', 'rb+')
143 fp = self.wfile('.hgtags', 'rb+')
144 except IOError, err:
144 except IOError, err:
145 fp = self.wfile('.hgtags', 'ab')
145 fp = self.wfile('.hgtags', 'ab')
146 else:
146 else:
147 prevtags = fp.read()
147 prevtags = fp.read()
148 else:
148 else:
149 try:
149 try:
150 prevtags = self.filectx('.hgtags', parent).data()
150 prevtags = self.filectx('.hgtags', parent).data()
151 except revlog.LookupError:
151 except revlog.LookupError:
152 pass
152 pass
153 fp = self.wfile('.hgtags', 'wb')
153 fp = self.wfile('.hgtags', 'wb')
154 if prevtags:
154 if prevtags:
155 fp.write(prevtags)
155 fp.write(prevtags)
156
156
157 # committed tags are stored in UTF-8
157 # committed tags are stored in UTF-8
158 writetag(fp, name, util.fromlocal, prevtags)
158 writetag(fp, name, util.fromlocal, prevtags)
159
159
160 if use_dirstate and '.hgtags' not in self.dirstate:
160 if use_dirstate and '.hgtags' not in self.dirstate:
161 self.add(['.hgtags'])
161 self.add(['.hgtags'])
162
162
163 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
163 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
164 extra=extra)
164 extra=extra)
165
165
166 self.hook('tag', node=hex(node), tag=name, local=local)
166 self.hook('tag', node=hex(node), tag=name, local=local)
167
167
168 return tagnode
168 return tagnode
169
169
170 def tag(self, name, node, message, local, user, date):
170 def tag(self, name, node, message, local, user, date):
171 '''tag a revision with a symbolic name.
171 '''tag a revision with a symbolic name.
172
172
173 if local is True, the tag is stored in a per-repository file.
173 if local is True, the tag is stored in a per-repository file.
174 otherwise, it is stored in the .hgtags file, and a new
174 otherwise, it is stored in the .hgtags file, and a new
175 changeset is committed with the change.
175 changeset is committed with the change.
176
176
177 keyword arguments:
177 keyword arguments:
178
178
179 local: whether to store tag in non-version-controlled file
179 local: whether to store tag in non-version-controlled file
180 (default False)
180 (default False)
181
181
182 message: commit message to use if committing
182 message: commit message to use if committing
183
183
184 user: name of user to use if committing
184 user: name of user to use if committing
185
185
186 date: date tuple to use if committing'''
186 date: date tuple to use if committing'''
187
187
188 for x in self.status()[:5]:
188 for x in self.status()[:5]:
189 if '.hgtags' in x:
189 if '.hgtags' in x:
190 raise util.Abort(_('working copy of .hgtags is changed '
190 raise util.Abort(_('working copy of .hgtags is changed '
191 '(please commit .hgtags manually)'))
191 '(please commit .hgtags manually)'))
192
192
193
193
194 self._tag(name, node, message, local, user, date)
194 self._tag(name, node, message, local, user, date)
195
195
196 def tags(self):
196 def tags(self):
197 '''return a mapping of tag to node'''
197 '''return a mapping of tag to node'''
198 if self.tagscache:
198 if self.tagscache:
199 return self.tagscache
199 return self.tagscache
200
200
201 globaltags = {}
201 globaltags = {}
202 tagtypes = {}
202 tagtypes = {}
203
203
204 def readtags(lines, fn, tagtype):
204 def readtags(lines, fn, tagtype):
205 filetags = {}
205 filetags = {}
206 count = 0
206 count = 0
207
207
208 def warn(msg):
208 def warn(msg):
209 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
209 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
210
210
211 for l in lines:
211 for l in lines:
212 count += 1
212 count += 1
213 if not l:
213 if not l:
214 continue
214 continue
215 s = l.split(" ", 1)
215 s = l.split(" ", 1)
216 if len(s) != 2:
216 if len(s) != 2:
217 warn(_("cannot parse entry"))
217 warn(_("cannot parse entry"))
218 continue
218 continue
219 node, key = s
219 node, key = s
220 key = util.tolocal(key.strip()) # stored in UTF-8
220 key = util.tolocal(key.strip()) # stored in UTF-8
221 try:
221 try:
222 bin_n = bin(node)
222 bin_n = bin(node)
223 except TypeError:
223 except TypeError:
224 warn(_("node '%s' is not well formed") % node)
224 warn(_("node '%s' is not well formed") % node)
225 continue
225 continue
226 if bin_n not in self.changelog.nodemap:
226 if bin_n not in self.changelog.nodemap:
227 warn(_("tag '%s' refers to unknown node") % key)
227 warn(_("tag '%s' refers to unknown node") % key)
228 continue
228 continue
229
229
230 h = []
230 h = []
231 if key in filetags:
231 if key in filetags:
232 n, h = filetags[key]
232 n, h = filetags[key]
233 h.append(n)
233 h.append(n)
234 filetags[key] = (bin_n, h)
234 filetags[key] = (bin_n, h)
235
235
236 for k, nh in filetags.items():
236 for k, nh in filetags.items():
237 if k not in globaltags:
237 if k not in globaltags:
238 globaltags[k] = nh
238 globaltags[k] = nh
239 tagtypes[k] = tagtype
239 tagtypes[k] = tagtype
240 continue
240 continue
241
241
242 # we prefer the global tag if:
242 # we prefer the global tag if:
243 # it supercedes us OR
243 # it supercedes us OR
244 # mutual supercedes and it has a higher rank
244 # mutual supercedes and it has a higher rank
245 # otherwise we win because we're tip-most
245 # otherwise we win because we're tip-most
246 an, ah = nh
246 an, ah = nh
247 bn, bh = globaltags[k]
247 bn, bh = globaltags[k]
248 if (bn != an and an in bh and
248 if (bn != an and an in bh and
249 (bn not in ah or len(bh) > len(ah))):
249 (bn not in ah or len(bh) > len(ah))):
250 an = bn
250 an = bn
251 ah.extend([n for n in bh if n not in ah])
251 ah.extend([n for n in bh if n not in ah])
252 globaltags[k] = an, ah
252 globaltags[k] = an, ah
253 tagtypes[k] = tagtype
253 tagtypes[k] = tagtype
254
254
255 # read the tags file from each head, ending with the tip
255 # read the tags file from each head, ending with the tip
256 f = None
256 f = None
257 for rev, node, fnode in self._hgtagsnodes():
257 for rev, node, fnode in self._hgtagsnodes():
258 f = (f and f.filectx(fnode) or
258 f = (f and f.filectx(fnode) or
259 self.filectx('.hgtags', fileid=fnode))
259 self.filectx('.hgtags', fileid=fnode))
260 readtags(f.data().splitlines(), f, "global")
260 readtags(f.data().splitlines(), f, "global")
261
261
262 try:
262 try:
263 data = util.fromlocal(self.opener("localtags").read())
263 data = util.fromlocal(self.opener("localtags").read())
264 # localtags are stored in the local character set
264 # localtags are stored in the local character set
265 # while the internal tag table is stored in UTF-8
265 # while the internal tag table is stored in UTF-8
266 readtags(data.splitlines(), "localtags", "local")
266 readtags(data.splitlines(), "localtags", "local")
267 except IOError:
267 except IOError:
268 pass
268 pass
269
269
270 self.tagscache = {}
270 self.tagscache = {}
271 self._tagstypecache = {}
271 self._tagstypecache = {}
272 for k,nh in globaltags.items():
272 for k,nh in globaltags.items():
273 n = nh[0]
273 n = nh[0]
274 if n != nullid:
274 if n != nullid:
275 self.tagscache[k] = n
275 self.tagscache[k] = n
276 self._tagstypecache[k] = tagtypes[k]
276 self._tagstypecache[k] = tagtypes[k]
277 self.tagscache['tip'] = self.changelog.tip()
277 self.tagscache['tip'] = self.changelog.tip()
278
278
279 return self.tagscache
279 return self.tagscache
280
280
281 def tagtype(self, tagname):
281 def tagtype(self, tagname):
282 '''
282 '''
283 return the type of the given tag. result can be:
283 return the type of the given tag. result can be:
284
284
285 'local' : a local tag
285 'local' : a local tag
286 'global' : a global tag
286 'global' : a global tag
287 None : tag does not exist
287 None : tag does not exist
288 '''
288 '''
289
289
290 self.tags()
290 self.tags()
291
291
292 return self._tagstypecache.get(tagname)
292 return self._tagstypecache.get(tagname)
293
293
294 def _hgtagsnodes(self):
294 def _hgtagsnodes(self):
295 heads = self.heads()
295 heads = self.heads()
296 heads.reverse()
296 heads.reverse()
297 last = {}
297 last = {}
298 ret = []
298 ret = []
299 for node in heads:
299 for node in heads:
300 c = self.changectx(node)
300 c = self.changectx(node)
301 rev = c.rev()
301 rev = c.rev()
302 try:
302 try:
303 fnode = c.filenode('.hgtags')
303 fnode = c.filenode('.hgtags')
304 except revlog.LookupError:
304 except revlog.LookupError:
305 continue
305 continue
306 ret.append((rev, node, fnode))
306 ret.append((rev, node, fnode))
307 if fnode in last:
307 if fnode in last:
308 ret[last[fnode]] = None
308 ret[last[fnode]] = None
309 last[fnode] = len(ret) - 1
309 last[fnode] = len(ret) - 1
310 return [item for item in ret if item]
310 return [item for item in ret if item]
311
311
312 def tagslist(self):
312 def tagslist(self):
313 '''return a list of tags ordered by revision'''
313 '''return a list of tags ordered by revision'''
314 l = []
314 l = []
315 for t, n in self.tags().items():
315 for t, n in self.tags().items():
316 try:
316 try:
317 r = self.changelog.rev(n)
317 r = self.changelog.rev(n)
318 except:
318 except:
319 r = -2 # sort to the beginning of the list if unknown
319 r = -2 # sort to the beginning of the list if unknown
320 l.append((r, t, n))
320 l.append((r, t, n))
321 l.sort()
321 l.sort()
322 return [(t, n) for r, t, n in l]
322 return [(t, n) for r, t, n in l]
323
323
324 def nodetags(self, node):
324 def nodetags(self, node):
325 '''return the tags associated with a node'''
325 '''return the tags associated with a node'''
326 if not self.nodetagscache:
326 if not self.nodetagscache:
327 self.nodetagscache = {}
327 self.nodetagscache = {}
328 for t, n in self.tags().items():
328 for t, n in self.tags().items():
329 self.nodetagscache.setdefault(n, []).append(t)
329 self.nodetagscache.setdefault(n, []).append(t)
330 return self.nodetagscache.get(node, [])
330 return self.nodetagscache.get(node, [])
331
331
332 def _branchtags(self):
332 def _branchtags(self):
333 partial, last, lrev = self._readbranchcache()
333 partial, last, lrev = self._readbranchcache()
334
334
335 tiprev = self.changelog.count() - 1
335 tiprev = self.changelog.count() - 1
336 if lrev != tiprev:
336 if lrev != tiprev:
337 self._updatebranchcache(partial, lrev+1, tiprev+1)
337 self._updatebranchcache(partial, lrev+1, tiprev+1)
338 self._writebranchcache(partial, self.changelog.tip(), tiprev)
338 self._writebranchcache(partial, self.changelog.tip(), tiprev)
339
339
340 return partial
340 return partial
341
341
342 def branchtags(self):
342 def branchtags(self):
343 if self.branchcache is not None:
343 if self.branchcache is not None:
344 return self.branchcache
344 return self.branchcache
345
345
346 self.branchcache = {} # avoid recursion in changectx
346 self.branchcache = {} # avoid recursion in changectx
347 partial = self._branchtags()
347 partial = self._branchtags()
348
348
349 # the branch cache is stored on disk as UTF-8, but in the local
349 # the branch cache is stored on disk as UTF-8, but in the local
350 # charset internally
350 # charset internally
351 for k, v in partial.items():
351 for k, v in partial.items():
352 self.branchcache[util.tolocal(k)] = v
352 self.branchcache[util.tolocal(k)] = v
353 return self.branchcache
353 return self.branchcache
354
354
355 def _readbranchcache(self):
355 def _readbranchcache(self):
356 partial = {}
356 partial = {}
357 try:
357 try:
358 f = self.opener("branch.cache")
358 f = self.opener("branch.cache")
359 lines = f.read().split('\n')
359 lines = f.read().split('\n')
360 f.close()
360 f.close()
361 except (IOError, OSError):
361 except (IOError, OSError):
362 return {}, nullid, nullrev
362 return {}, nullid, nullrev
363
363
364 try:
364 try:
365 last, lrev = lines.pop(0).split(" ", 1)
365 last, lrev = lines.pop(0).split(" ", 1)
366 last, lrev = bin(last), int(lrev)
366 last, lrev = bin(last), int(lrev)
367 if not (lrev < self.changelog.count() and
367 if not (lrev < self.changelog.count() and
368 self.changelog.node(lrev) == last): # sanity check
368 self.changelog.node(lrev) == last): # sanity check
369 # invalidate the cache
369 # invalidate the cache
370 raise ValueError('Invalid branch cache: unknown tip')
370 raise ValueError('Invalid branch cache: unknown tip')
371 for l in lines:
371 for l in lines:
372 if not l: continue
372 if not l: continue
373 node, label = l.split(" ", 1)
373 node, label = l.split(" ", 1)
374 partial[label.strip()] = bin(node)
374 partial[label.strip()] = bin(node)
375 except (KeyboardInterrupt, util.SignalInterrupt):
375 except (KeyboardInterrupt, util.SignalInterrupt):
376 raise
376 raise
377 except Exception, inst:
377 except Exception, inst:
378 if self.ui.debugflag:
378 if self.ui.debugflag:
379 self.ui.warn(str(inst), '\n')
379 self.ui.warn(str(inst), '\n')
380 partial, last, lrev = {}, nullid, nullrev
380 partial, last, lrev = {}, nullid, nullrev
381 return partial, last, lrev
381 return partial, last, lrev
382
382
383 def _writebranchcache(self, branches, tip, tiprev):
383 def _writebranchcache(self, branches, tip, tiprev):
384 try:
384 try:
385 f = self.opener("branch.cache", "w", atomictemp=True)
385 f = self.opener("branch.cache", "w", atomictemp=True)
386 f.write("%s %s\n" % (hex(tip), tiprev))
386 f.write("%s %s\n" % (hex(tip), tiprev))
387 for label, node in branches.iteritems():
387 for label, node in branches.iteritems():
388 f.write("%s %s\n" % (hex(node), label))
388 f.write("%s %s\n" % (hex(node), label))
389 f.rename()
389 f.rename()
390 except (IOError, OSError):
390 except (IOError, OSError):
391 pass
391 pass
392
392
393 def _updatebranchcache(self, partial, start, end):
393 def _updatebranchcache(self, partial, start, end):
394 for r in xrange(start, end):
394 for r in xrange(start, end):
395 c = self.changectx(r)
395 c = self.changectx(r)
396 b = c.branch()
396 b = c.branch()
397 partial[b] = c.node()
397 partial[b] = c.node()
398
398
399 def lookup(self, key):
399 def lookup(self, key):
400 if key == '.':
400 if key == '.':
401 key, second = self.dirstate.parents()
401 key, second = self.dirstate.parents()
402 if key == nullid:
402 if key == nullid:
403 raise repo.RepoError(_("no revision checked out"))
403 raise repo.RepoError(_("no revision checked out"))
404 if second != nullid:
404 if second != nullid:
405 self.ui.warn(_("warning: working directory has two parents, "
405 self.ui.warn(_("warning: working directory has two parents, "
406 "tag '.' uses the first\n"))
406 "tag '.' uses the first\n"))
407 elif key == 'null':
407 elif key == 'null':
408 return nullid
408 return nullid
409 n = self.changelog._match(key)
409 n = self.changelog._match(key)
410 if n:
410 if n:
411 return n
411 return n
412 if key in self.tags():
412 if key in self.tags():
413 return self.tags()[key]
413 return self.tags()[key]
414 if key in self.branchtags():
414 if key in self.branchtags():
415 return self.branchtags()[key]
415 return self.branchtags()[key]
416 n = self.changelog._partialmatch(key)
416 n = self.changelog._partialmatch(key)
417 if n:
417 if n:
418 return n
418 return n
419 try:
419 try:
420 if len(key) == 20:
420 if len(key) == 20:
421 key = hex(key)
421 key = hex(key)
422 except:
422 except:
423 pass
423 pass
424 raise repo.RepoError(_("unknown revision '%s'") % key)
424 raise repo.RepoError(_("unknown revision '%s'") % key)
425
425
426 def dev(self):
426 def dev(self):
427 return os.lstat(self.path).st_dev
427 return os.lstat(self.path).st_dev
428
428
429 def local(self):
429 def local(self):
430 return True
430 return True
431
431
432 def join(self, f):
432 def join(self, f):
433 return os.path.join(self.path, f)
433 return os.path.join(self.path, f)
434
434
435 def sjoin(self, f):
435 def sjoin(self, f):
436 f = self.encodefn(f)
436 f = self.encodefn(f)
437 return os.path.join(self.spath, f)
437 return os.path.join(self.spath, f)
438
438
439 def wjoin(self, f):
439 def wjoin(self, f):
440 return os.path.join(self.root, f)
440 return os.path.join(self.root, f)
441
441
442 def file(self, f):
442 def file(self, f):
443 if f[0] == '/':
443 if f[0] == '/':
444 f = f[1:]
444 f = f[1:]
445 return filelog.filelog(self.sopener, f)
445 return filelog.filelog(self.sopener, f)
446
446
447 def changectx(self, changeid=None):
447 def changectx(self, changeid=None):
448 return context.changectx(self, changeid)
448 return context.changectx(self, changeid)
449
449
450 def workingctx(self):
450 def workingctx(self):
451 return context.workingctx(self)
451 return context.workingctx(self)
452
452
453 def parents(self, changeid=None):
453 def parents(self, changeid=None):
454 '''
454 '''
455 get list of changectxs for parents of changeid or working directory
455 get list of changectxs for parents of changeid or working directory
456 '''
456 '''
457 if changeid is None:
457 if changeid is None:
458 pl = self.dirstate.parents()
458 pl = self.dirstate.parents()
459 else:
459 else:
460 n = self.changelog.lookup(changeid)
460 n = self.changelog.lookup(changeid)
461 pl = self.changelog.parents(n)
461 pl = self.changelog.parents(n)
462 if pl[1] == nullid:
462 if pl[1] == nullid:
463 return [self.changectx(pl[0])]
463 return [self.changectx(pl[0])]
464 return [self.changectx(pl[0]), self.changectx(pl[1])]
464 return [self.changectx(pl[0]), self.changectx(pl[1])]
465
465
466 def filectx(self, path, changeid=None, fileid=None):
466 def filectx(self, path, changeid=None, fileid=None):
467 """changeid can be a changeset revision, node, or tag.
467 """changeid can be a changeset revision, node, or tag.
468 fileid can be a file revision or node."""
468 fileid can be a file revision or node."""
469 return context.filectx(self, path, changeid, fileid)
469 return context.filectx(self, path, changeid, fileid)
470
470
471 def getcwd(self):
471 def getcwd(self):
472 return self.dirstate.getcwd()
472 return self.dirstate.getcwd()
473
473
474 def pathto(self, f, cwd=None):
474 def pathto(self, f, cwd=None):
475 return self.dirstate.pathto(f, cwd)
475 return self.dirstate.pathto(f, cwd)
476
476
477 def wfile(self, f, mode='r'):
477 def wfile(self, f, mode='r'):
478 return self.wopener(f, mode)
478 return self.wopener(f, mode)
479
479
480 def _link(self, f):
480 def _link(self, f):
481 return os.path.islink(self.wjoin(f))
481 return os.path.islink(self.wjoin(f))
482
482
483 def _filter(self, filter, filename, data):
483 def _filter(self, filter, filename, data):
484 if filter not in self.filterpats:
484 if filter not in self.filterpats:
485 l = []
485 l = []
486 for pat, cmd in self.ui.configitems(filter):
486 for pat, cmd in self.ui.configitems(filter):
487 mf = util.matcher(self.root, "", [pat], [], [])[1]
487 mf = util.matcher(self.root, "", [pat], [], [])[1]
488 l.append((mf, cmd))
488 l.append((mf, cmd))
489 self.filterpats[filter] = l
489 self.filterpats[filter] = l
490
490
491 for mf, cmd in self.filterpats[filter]:
491 for mf, cmd in self.filterpats[filter]:
492 if mf(filename):
492 if mf(filename):
493 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
493 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
494 data = util.filter(data, cmd)
494 data = util.filter(data, cmd)
495 break
495 break
496
496
497 return data
497 return data
498
498
499 def wread(self, filename):
499 def wread(self, filename):
500 if self._link(filename):
500 if self._link(filename):
501 data = os.readlink(self.wjoin(filename))
501 data = os.readlink(self.wjoin(filename))
502 else:
502 else:
503 data = self.wopener(filename, 'r').read()
503 data = self.wopener(filename, 'r').read()
504 return self._filter("encode", filename, data)
504 return self._filter("encode", filename, data)
505
505
506 def wwrite(self, filename, data, flags):
506 def wwrite(self, filename, data, flags):
507 data = self._filter("decode", filename, data)
507 data = self._filter("decode", filename, data)
508 try:
508 try:
509 os.unlink(self.wjoin(filename))
509 os.unlink(self.wjoin(filename))
510 except OSError:
510 except OSError:
511 pass
511 pass
512 self.wopener(filename, 'w').write(data)
512 self.wopener(filename, 'w').write(data)
513 util.set_flags(self.wjoin(filename), flags)
513 util.set_flags(self.wjoin(filename), flags)
514
514
515 def wwritedata(self, filename, data):
515 def wwritedata(self, filename, data):
516 return self._filter("decode", filename, data)
516 return self._filter("decode", filename, data)
517
517
518 def transaction(self):
518 def transaction(self):
519 if self._transref and self._transref():
519 if self._transref and self._transref():
520 return self._transref().nest()
520 return self._transref().nest()
521
521
522 # abort here if the journal already exists
522 # abort here if the journal already exists
523 if os.path.exists(self.sjoin("journal")):
523 if os.path.exists(self.sjoin("journal")):
524 raise repo.RepoError(_("journal already exists - run hg recover"))
524 raise repo.RepoError(_("journal already exists - run hg recover"))
525
525
526 # save dirstate for rollback
526 # save dirstate for rollback
527 try:
527 try:
528 ds = self.opener("dirstate").read()
528 ds = self.opener("dirstate").read()
529 except IOError:
529 except IOError:
530 ds = ""
530 ds = ""
531 self.opener("journal.dirstate", "w").write(ds)
531 self.opener("journal.dirstate", "w").write(ds)
532 self.opener("journal.branch", "w").write(self.dirstate.branch())
532 self.opener("journal.branch", "w").write(self.dirstate.branch())
533
533
534 renames = [(self.sjoin("journal"), self.sjoin("undo")),
534 renames = [(self.sjoin("journal"), self.sjoin("undo")),
535 (self.join("journal.dirstate"), self.join("undo.dirstate")),
535 (self.join("journal.dirstate"), self.join("undo.dirstate")),
536 (self.join("journal.branch"), self.join("undo.branch"))]
536 (self.join("journal.branch"), self.join("undo.branch"))]
537 tr = transaction.transaction(self.ui.warn, self.sopener,
537 tr = transaction.transaction(self.ui.warn, self.sopener,
538 self.sjoin("journal"),
538 self.sjoin("journal"),
539 aftertrans(renames))
539 aftertrans(renames))
540 self._transref = weakref.ref(tr)
540 self._transref = weakref.ref(tr)
541 return tr
541 return tr
542
542
543 def recover(self):
543 def recover(self):
544 l = self.lock()
544 l = self.lock()
545 try:
545 try:
546 if os.path.exists(self.sjoin("journal")):
546 if os.path.exists(self.sjoin("journal")):
547 self.ui.status(_("rolling back interrupted transaction\n"))
547 self.ui.status(_("rolling back interrupted transaction\n"))
548 transaction.rollback(self.sopener, self.sjoin("journal"))
548 transaction.rollback(self.sopener, self.sjoin("journal"))
549 self.invalidate()
549 self.invalidate()
550 return True
550 return True
551 else:
551 else:
552 self.ui.warn(_("no interrupted transaction available\n"))
552 self.ui.warn(_("no interrupted transaction available\n"))
553 return False
553 return False
554 finally:
554 finally:
555 del l
555 del l
556
556
557 def rollback(self):
557 def rollback(self):
558 wlock = lock = None
558 wlock = lock = None
559 try:
559 try:
560 wlock = self.wlock()
560 wlock = self.wlock()
561 lock = self.lock()
561 lock = self.lock()
562 if os.path.exists(self.sjoin("undo")):
562 if os.path.exists(self.sjoin("undo")):
563 self.ui.status(_("rolling back last transaction\n"))
563 self.ui.status(_("rolling back last transaction\n"))
564 transaction.rollback(self.sopener, self.sjoin("undo"))
564 transaction.rollback(self.sopener, self.sjoin("undo"))
565 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
565 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
566 branch = self.opener("undo.branch").read()
566 branch = self.opener("undo.branch").read()
567 self.dirstate.setbranch(branch)
567 self.dirstate.setbranch(branch)
568 self.invalidate()
568 self.invalidate()
569 self.dirstate.invalidate()
569 self.dirstate.invalidate()
570 else:
570 else:
571 self.ui.warn(_("no rollback information available\n"))
571 self.ui.warn(_("no rollback information available\n"))
572 finally:
572 finally:
573 del lock, wlock
573 del lock, wlock
574
574
575 def invalidate(self):
575 def invalidate(self):
576 for a in "changelog manifest".split():
576 for a in "changelog manifest".split():
577 if hasattr(self, a):
577 if hasattr(self, a):
578 self.__delattr__(a)
578 self.__delattr__(a)
579 self.tagscache = None
579 self.tagscache = None
580 self._tagstypecache = None
580 self._tagstypecache = None
581 self.nodetagscache = None
581 self.nodetagscache = None
582
582
583 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
583 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
584 try:
584 try:
585 l = lock.lock(lockname, 0, releasefn, desc=desc)
585 l = lock.lock(lockname, 0, releasefn, desc=desc)
586 except lock.LockHeld, inst:
586 except lock.LockHeld, inst:
587 if not wait:
587 if not wait:
588 raise
588 raise
589 self.ui.warn(_("waiting for lock on %s held by %r\n") %
589 self.ui.warn(_("waiting for lock on %s held by %r\n") %
590 (desc, inst.locker))
590 (desc, inst.locker))
591 # default to 600 seconds timeout
591 # default to 600 seconds timeout
592 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
592 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
593 releasefn, desc=desc)
593 releasefn, desc=desc)
594 if acquirefn:
594 if acquirefn:
595 acquirefn()
595 acquirefn()
596 return l
596 return l
597
597
598 def lock(self, wait=True):
598 def lock(self, wait=True):
599 if self._lockref and self._lockref():
599 if self._lockref and self._lockref():
600 return self._lockref()
600 return self._lockref()
601
601
602 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
602 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
603 _('repository %s') % self.origroot)
603 _('repository %s') % self.origroot)
604 self._lockref = weakref.ref(l)
604 self._lockref = weakref.ref(l)
605 return l
605 return l
606
606
607 def wlock(self, wait=True):
607 def wlock(self, wait=True):
608 if self._wlockref and self._wlockref():
608 if self._wlockref and self._wlockref():
609 return self._wlockref()
609 return self._wlockref()
610
610
611 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
611 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
612 self.dirstate.invalidate, _('working directory of %s') %
612 self.dirstate.invalidate, _('working directory of %s') %
613 self.origroot)
613 self.origroot)
614 self._wlockref = weakref.ref(l)
614 self._wlockref = weakref.ref(l)
615 return l
615 return l
616
616
617 def filecommit(self, fn, manifest1, manifest2, linkrev, tr, changelist):
617 def filecommit(self, fn, manifest1, manifest2, linkrev, tr, changelist):
618 """
618 """
619 commit an individual file as part of a larger transaction
619 commit an individual file as part of a larger transaction
620 """
620 """
621
621
622 t = self.wread(fn)
622 t = self.wread(fn)
623 fl = self.file(fn)
623 fl = self.file(fn)
624 fp1 = manifest1.get(fn, nullid)
624 fp1 = manifest1.get(fn, nullid)
625 fp2 = manifest2.get(fn, nullid)
625 fp2 = manifest2.get(fn, nullid)
626
626
627 meta = {}
627 meta = {}
628 cp = self.dirstate.copied(fn)
628 cp = self.dirstate.copied(fn)
629 if cp:
629 if cp:
630 # Mark the new revision of this file as a copy of another
630 # Mark the new revision of this file as a copy of another
631 # file. This copy data will effectively act as a parent
631 # file. This copy data will effectively act as a parent
632 # of this new revision. If this is a merge, the first
632 # of this new revision. If this is a merge, the first
633 # parent will be the nullid (meaning "look up the copy data")
633 # parent will be the nullid (meaning "look up the copy data")
634 # and the second one will be the other parent. For example:
634 # and the second one will be the other parent. For example:
635 #
635 #
636 # 0 --- 1 --- 3 rev1 changes file foo
636 # 0 --- 1 --- 3 rev1 changes file foo
637 # \ / rev2 renames foo to bar and changes it
637 # \ / rev2 renames foo to bar and changes it
638 # \- 2 -/ rev3 should have bar with all changes and
638 # \- 2 -/ rev3 should have bar with all changes and
639 # should record that bar descends from
639 # should record that bar descends from
640 # bar in rev2 and foo in rev1
640 # bar in rev2 and foo in rev1
641 #
641 #
642 # this allows this merge to succeed:
642 # this allows this merge to succeed:
643 #
643 #
644 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
644 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
645 # \ / merging rev3 and rev4 should use bar@rev2
645 # \ / merging rev3 and rev4 should use bar@rev2
646 # \- 2 --- 4 as the merge base
646 # \- 2 --- 4 as the merge base
647 #
647 #
648 meta["copy"] = cp
648 meta["copy"] = cp
649 if not manifest2: # not a branch merge
649 if not manifest2: # not a branch merge
650 meta["copyrev"] = hex(manifest1.get(cp, nullid))
650 meta["copyrev"] = hex(manifest1.get(cp, nullid))
651 fp2 = nullid
651 fp2 = nullid
652 elif fp2 != nullid: # copied on remote side
652 elif fp2 != nullid: # copied on remote side
653 meta["copyrev"] = hex(manifest1.get(cp, nullid))
653 meta["copyrev"] = hex(manifest1.get(cp, nullid))
654 elif fp1 != nullid: # copied on local side, reversed
654 elif fp1 != nullid: # copied on local side, reversed
655 meta["copyrev"] = hex(manifest2.get(cp))
655 meta["copyrev"] = hex(manifest2.get(cp))
656 fp2 = fp1
656 fp2 = fp1
657 elif cp in manifest2: # directory rename on local side
657 elif cp in manifest2: # directory rename on local side
658 meta["copyrev"] = hex(manifest2[cp])
658 meta["copyrev"] = hex(manifest2[cp])
659 else: # directory rename on remote side
659 else: # directory rename on remote side
660 meta["copyrev"] = hex(manifest1.get(cp, nullid))
660 meta["copyrev"] = hex(manifest1.get(cp, nullid))
661 self.ui.debug(_(" %s: copy %s:%s\n") %
661 self.ui.debug(_(" %s: copy %s:%s\n") %
662 (fn, cp, meta["copyrev"]))
662 (fn, cp, meta["copyrev"]))
663 fp1 = nullid
663 fp1 = nullid
664 elif fp2 != nullid:
664 elif fp2 != nullid:
665 # is one parent an ancestor of the other?
665 # is one parent an ancestor of the other?
666 fpa = fl.ancestor(fp1, fp2)
666 fpa = fl.ancestor(fp1, fp2)
667 if fpa == fp1:
667 if fpa == fp1:
668 fp1, fp2 = fp2, nullid
668 fp1, fp2 = fp2, nullid
669 elif fpa == fp2:
669 elif fpa == fp2:
670 fp2 = nullid
670 fp2 = nullid
671
671
672 # is the file unmodified from the parent? report existing entry
672 # is the file unmodified from the parent? report existing entry
673 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
673 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
674 return fp1
674 return fp1
675
675
676 changelist.append(fn)
676 changelist.append(fn)
677 return fl.add(t, meta, tr, linkrev, fp1, fp2)
677 return fl.add(t, meta, tr, linkrev, fp1, fp2)
678
678
679 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
679 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
680 if p1 is None:
680 if p1 is None:
681 p1, p2 = self.dirstate.parents()
681 p1, p2 = self.dirstate.parents()
682 return self.commit(files=files, text=text, user=user, date=date,
682 return self.commit(files=files, text=text, user=user, date=date,
683 p1=p1, p2=p2, extra=extra, empty_ok=True)
683 p1=p1, p2=p2, extra=extra, empty_ok=True)
684
684
685 def commit(self, files=None, text="", user=None, date=None,
685 def commit(self, files=None, text="", user=None, date=None,
686 match=util.always, force=False, force_editor=False,
686 match=util.always, force=False, force_editor=False,
687 p1=None, p2=None, extra={}, empty_ok=False):
687 p1=None, p2=None, extra={}, empty_ok=False):
688 wlock = lock = tr = None
688 wlock = lock = tr = None
689 valid = 0 # don't save the dirstate if this isn't set
689 valid = 0 # don't save the dirstate if this isn't set
690 if files:
690 if files:
691 files = util.unique(files)
691 files = util.unique(files)
692 try:
692 try:
693 commit = []
693 commit = []
694 remove = []
694 remove = []
695 changed = []
695 changed = []
696 use_dirstate = (p1 is None) # not rawcommit
696 use_dirstate = (p1 is None) # not rawcommit
697 extra = extra.copy()
697 extra = extra.copy()
698
698
699 if use_dirstate:
699 if use_dirstate:
700 if files:
700 if files:
701 for f in files:
701 for f in files:
702 s = self.dirstate[f]
702 s = self.dirstate[f]
703 if s in 'nma':
703 if s in 'nma':
704 commit.append(f)
704 commit.append(f)
705 elif s == 'r':
705 elif s == 'r':
706 remove.append(f)
706 remove.append(f)
707 else:
707 else:
708 self.ui.warn(_("%s not tracked!\n") % f)
708 self.ui.warn(_("%s not tracked!\n") % f)
709 else:
709 else:
710 changes = self.status(match=match)[:5]
710 changes = self.status(match=match)[:5]
711 modified, added, removed, deleted, unknown = changes
711 modified, added, removed, deleted, unknown = changes
712 commit = modified + added
712 commit = modified + added
713 remove = removed
713 remove = removed
714 else:
714 else:
715 commit = files
715 commit = files
716
716
717 if use_dirstate:
717 if use_dirstate:
718 p1, p2 = self.dirstate.parents()
718 p1, p2 = self.dirstate.parents()
719 update_dirstate = True
719 update_dirstate = True
720 else:
720 else:
721 p1, p2 = p1, p2 or nullid
721 p1, p2 = p1, p2 or nullid
722 update_dirstate = (self.dirstate.parents()[0] == p1)
722 update_dirstate = (self.dirstate.parents()[0] == p1)
723
723
724 c1 = self.changelog.read(p1)
724 c1 = self.changelog.read(p1)
725 c2 = self.changelog.read(p2)
725 c2 = self.changelog.read(p2)
726 m1 = self.manifest.read(c1[0]).copy()
726 m1 = self.manifest.read(c1[0]).copy()
727 m2 = self.manifest.read(c2[0])
727 m2 = self.manifest.read(c2[0])
728
728
729 if use_dirstate:
729 if use_dirstate:
730 branchname = self.workingctx().branch()
730 branchname = self.workingctx().branch()
731 try:
731 try:
732 branchname = branchname.decode('UTF-8').encode('UTF-8')
732 branchname = branchname.decode('UTF-8').encode('UTF-8')
733 except UnicodeDecodeError:
733 except UnicodeDecodeError:
734 raise util.Abort(_('branch name not in UTF-8!'))
734 raise util.Abort(_('branch name not in UTF-8!'))
735 else:
735 else:
736 branchname = ""
736 branchname = ""
737
737
738 if use_dirstate:
738 if use_dirstate:
739 oldname = c1[5].get("branch") # stored in UTF-8
739 oldname = c1[5].get("branch") # stored in UTF-8
740 if (not commit and not remove and not force and p2 == nullid
740 if (not commit and not remove and not force and p2 == nullid
741 and branchname == oldname):
741 and branchname == oldname):
742 self.ui.status(_("nothing changed\n"))
742 self.ui.status(_("nothing changed\n"))
743 return None
743 return None
744
744
745 xp1 = hex(p1)
745 xp1 = hex(p1)
746 if p2 == nullid: xp2 = ''
746 if p2 == nullid: xp2 = ''
747 else: xp2 = hex(p2)
747 else: xp2 = hex(p2)
748
748
749 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
749 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
750
750
751 wlock = self.wlock()
751 wlock = self.wlock()
752 lock = self.lock()
752 lock = self.lock()
753 tr = self.transaction()
753 tr = self.transaction()
754 trp = weakref.proxy(tr)
754 trp = weakref.proxy(tr)
755
755
756 # check in files
756 # check in files
757 new = {}
757 new = {}
758 linkrev = self.changelog.count()
758 linkrev = self.changelog.count()
759 commit.sort()
759 commit.sort()
760 is_exec = util.execfunc(self.root, m1.execf)
760 is_exec = util.execfunc(self.root, m1.execf)
761 is_link = util.linkfunc(self.root, m1.linkf)
761 is_link = util.linkfunc(self.root, m1.linkf)
762 for f in commit:
762 for f in commit:
763 self.ui.note(f + "\n")
763 self.ui.note(f + "\n")
764 try:
764 try:
765 new[f] = self.filecommit(f, m1, m2, linkrev, trp, changed)
765 new[f] = self.filecommit(f, m1, m2, linkrev, trp, changed)
766 new_exec = is_exec(f)
766 new_exec = is_exec(f)
767 new_link = is_link(f)
767 new_link = is_link(f)
768 if ((not changed or changed[-1] != f) and
768 if ((not changed or changed[-1] != f) and
769 m2.get(f) != new[f]):
769 m2.get(f) != new[f]):
770 # mention the file in the changelog if some
770 # mention the file in the changelog if some
771 # flag changed, even if there was no content
771 # flag changed, even if there was no content
772 # change.
772 # change.
773 old_exec = m1.execf(f)
773 old_exec = m1.execf(f)
774 old_link = m1.linkf(f)
774 old_link = m1.linkf(f)
775 if old_exec != new_exec or old_link != new_link:
775 if old_exec != new_exec or old_link != new_link:
776 changed.append(f)
776 changed.append(f)
777 m1.set(f, new_exec, new_link)
777 m1.set(f, new_exec, new_link)
778 if use_dirstate:
778 if use_dirstate:
779 self.dirstate.normal(f)
779 self.dirstate.normal(f)
780
780
781 except (OSError, IOError):
781 except (OSError, IOError):
782 if use_dirstate:
782 if use_dirstate:
783 self.ui.warn(_("trouble committing %s!\n") % f)
783 self.ui.warn(_("trouble committing %s!\n") % f)
784 raise
784 raise
785 else:
785 else:
786 remove.append(f)
786 remove.append(f)
787
787
788 # update manifest
788 # update manifest
789 m1.update(new)
789 m1.update(new)
790 remove.sort()
790 remove.sort()
791 removed = []
791 removed = []
792
792
793 for f in remove:
793 for f in remove:
794 if f in m1:
794 if f in m1:
795 del m1[f]
795 del m1[f]
796 removed.append(f)
796 removed.append(f)
797 elif f in m2:
797 elif f in m2:
798 removed.append(f)
798 removed.append(f)
799 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
799 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
800 (new, removed))
800 (new, removed))
801
801
802 # add changeset
802 # add changeset
803 new = new.keys()
803 new = new.keys()
804 new.sort()
804 new.sort()
805
805
806 user = user or self.ui.username()
806 user = user or self.ui.username()
807 if (not empty_ok and not text) or force_editor:
807 if (not empty_ok and not text) or force_editor:
808 edittext = []
808 edittext = []
809 if text:
809 if text:
810 edittext.append(text)
810 edittext.append(text)
811 edittext.append("")
811 edittext.append("")
812 edittext.append(_("HG: Enter commit message."
812 edittext.append(_("HG: Enter commit message."
813 " Lines beginning with 'HG:' are removed."))
813 " Lines beginning with 'HG:' are removed."))
814 edittext.append("HG: --")
814 edittext.append("HG: --")
815 edittext.append("HG: user: %s" % user)
815 edittext.append("HG: user: %s" % user)
816 if p2 != nullid:
816 if p2 != nullid:
817 edittext.append("HG: branch merge")
817 edittext.append("HG: branch merge")
818 if branchname:
818 if branchname:
819 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
819 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
820 edittext.extend(["HG: changed %s" % f for f in changed])
820 edittext.extend(["HG: changed %s" % f for f in changed])
821 edittext.extend(["HG: removed %s" % f for f in removed])
821 edittext.extend(["HG: removed %s" % f for f in removed])
822 if not changed and not remove:
822 if not changed and not remove:
823 edittext.append("HG: no files changed")
823 edittext.append("HG: no files changed")
824 edittext.append("")
824 edittext.append("")
825 # run editor in the repository root
825 # run editor in the repository root
826 olddir = os.getcwd()
826 olddir = os.getcwd()
827 os.chdir(self.root)
827 os.chdir(self.root)
828 text = self.ui.edit("\n".join(edittext), user)
828 text = self.ui.edit("\n".join(edittext), user)
829 os.chdir(olddir)
829 os.chdir(olddir)
830
830
831 if branchname:
831 if branchname:
832 extra["branch"] = branchname
832 extra["branch"] = branchname
833
833
834 if use_dirstate:
834 if use_dirstate:
835 lines = [line.rstrip() for line in text.rstrip().splitlines()]
835 lines = [line.rstrip() for line in text.rstrip().splitlines()]
836 while lines and not lines[0]:
836 while lines and not lines[0]:
837 del lines[0]
837 del lines[0]
838 if not lines:
838 if not lines:
839 raise util.Abort(_("empty commit message"))
839 raise util.Abort(_("empty commit message"))
840 text = '\n'.join(lines)
840 text = '\n'.join(lines)
841
841
842 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
842 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
843 user, date, extra)
843 user, date, extra)
844 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
844 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
845 parent2=xp2)
845 parent2=xp2)
846 tr.close()
846 tr.close()
847
847
848 if self.branchcache and "branch" in extra:
848 if self.branchcache and "branch" in extra:
849 self.branchcache[util.tolocal(extra["branch"])] = n
849 self.branchcache[util.tolocal(extra["branch"])] = n
850
850
851 if use_dirstate or update_dirstate:
851 if use_dirstate or update_dirstate:
852 self.dirstate.setparents(n)
852 self.dirstate.setparents(n)
853 if use_dirstate:
853 if use_dirstate:
854 for f in removed:
854 for f in removed:
855 self.dirstate.forget(f)
855 self.dirstate.forget(f)
856 valid = 1 # our dirstate updates are complete
856 valid = 1 # our dirstate updates are complete
857
857
858 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
858 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
859 return n
859 return n
860 finally:
860 finally:
861 if not valid: # don't save our updated dirstate
861 if not valid: # don't save our updated dirstate
862 self.dirstate.invalidate()
862 self.dirstate.invalidate()
863 del tr, lock, wlock
863 del tr, lock, wlock
864
864
865 def walk(self, node=None, files=[], match=util.always, badmatch=None):
865 def walk(self, node=None, files=[], match=util.always, badmatch=None):
866 '''
866 '''
867 walk recursively through the directory tree or a given
867 walk recursively through the directory tree or a given
868 changeset, finding all files matched by the match
868 changeset, finding all files matched by the match
869 function
869 function
870
870
871 results are yielded in a tuple (src, filename), where src
871 results are yielded in a tuple (src, filename), where src
872 is one of:
872 is one of:
873 'f' the file was found in the directory tree
873 'f' the file was found in the directory tree
874 'm' the file was only in the dirstate and not in the tree
874 'm' the file was only in the dirstate and not in the tree
875 'b' file was not found and matched badmatch
875 'b' file was not found and matched badmatch
876 '''
876 '''
877
877
878 if node:
878 if node:
879 fdict = dict.fromkeys(files)
879 fdict = dict.fromkeys(files)
880 # for dirstate.walk, files=['.'] means "walk the whole tree".
880 # for dirstate.walk, files=['.'] means "walk the whole tree".
881 # follow that here, too
881 # follow that here, too
882 fdict.pop('.', None)
882 fdict.pop('.', None)
883 mdict = self.manifest.read(self.changelog.read(node)[0])
883 mdict = self.manifest.read(self.changelog.read(node)[0])
884 mfiles = mdict.keys()
884 mfiles = mdict.keys()
885 mfiles.sort()
885 mfiles.sort()
886 for fn in mfiles:
886 for fn in mfiles:
887 for ffn in fdict:
887 for ffn in fdict:
888 # match if the file is the exact name or a directory
888 # match if the file is the exact name or a directory
889 if ffn == fn or fn.startswith("%s/" % ffn):
889 if ffn == fn or fn.startswith("%s/" % ffn):
890 del fdict[ffn]
890 del fdict[ffn]
891 break
891 break
892 if match(fn):
892 if match(fn):
893 yield 'm', fn
893 yield 'm', fn
894 ffiles = fdict.keys()
894 ffiles = fdict.keys()
895 ffiles.sort()
895 ffiles.sort()
896 for fn in ffiles:
896 for fn in ffiles:
897 if badmatch and badmatch(fn):
897 if badmatch and badmatch(fn):
898 if match(fn):
898 if match(fn):
899 yield 'b', fn
899 yield 'b', fn
900 else:
900 else:
901 self.ui.warn(_('%s: No such file in rev %s\n')
901 self.ui.warn(_('%s: No such file in rev %s\n')
902 % (self.pathto(fn), short(node)))
902 % (self.pathto(fn), short(node)))
903 else:
903 else:
904 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
904 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
905 yield src, fn
905 yield src, fn
906
906
907 def status(self, node1=None, node2=None, files=[], match=util.always,
907 def status(self, node1=None, node2=None, files=[], match=util.always,
908 list_ignored=False, list_clean=False):
908 list_ignored=False, list_clean=False):
909 """return status of files between two nodes or node and working directory
909 """return status of files between two nodes or node and working directory
910
910
911 If node1 is None, use the first dirstate parent instead.
911 If node1 is None, use the first dirstate parent instead.
912 If node2 is None, compare node1 with working directory.
912 If node2 is None, compare node1 with working directory.
913 """
913 """
914
914
915 def fcmp(fn, getnode):
915 def fcmp(fn, getnode):
916 t1 = self.wread(fn)
916 t1 = self.wread(fn)
917 return self.file(fn).cmp(getnode(fn), t1)
917 return self.file(fn).cmp(getnode(fn), t1)
918
918
919 def mfmatches(node):
919 def mfmatches(node):
920 change = self.changelog.read(node)
920 change = self.changelog.read(node)
921 mf = self.manifest.read(change[0]).copy()
921 mf = self.manifest.read(change[0]).copy()
922 for fn in mf.keys():
922 for fn in mf.keys():
923 if not match(fn):
923 if not match(fn):
924 del mf[fn]
924 del mf[fn]
925 return mf
925 return mf
926
926
927 modified, added, removed, deleted, unknown = [], [], [], [], []
927 modified, added, removed, deleted, unknown = [], [], [], [], []
928 ignored, clean = [], []
928 ignored, clean = [], []
929
929
930 compareworking = False
930 compareworking = False
931 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
931 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
932 compareworking = True
932 compareworking = True
933
933
934 if not compareworking:
934 if not compareworking:
935 # read the manifest from node1 before the manifest from node2,
935 # read the manifest from node1 before the manifest from node2,
936 # so that we'll hit the manifest cache if we're going through
936 # so that we'll hit the manifest cache if we're going through
937 # all the revisions in parent->child order.
937 # all the revisions in parent->child order.
938 mf1 = mfmatches(node1)
938 mf1 = mfmatches(node1)
939
939
940 # are we comparing the working directory?
940 # are we comparing the working directory?
941 if not node2:
941 if not node2:
942 (lookup, modified, added, removed, deleted, unknown,
942 (lookup, modified, added, removed, deleted, unknown,
943 ignored, clean) = self.dirstate.status(files, match,
943 ignored, clean) = self.dirstate.status(files, match,
944 list_ignored, list_clean)
944 list_ignored, list_clean)
945
945
946 # are we comparing working dir against its parent?
946 # are we comparing working dir against its parent?
947 if compareworking:
947 if compareworking:
948 if lookup:
948 if lookup:
949 fixup = []
949 fixup = []
950 # do a full compare of any files that might have changed
950 # do a full compare of any files that might have changed
951 ctx = self.changectx()
951 ctx = self.changectx()
952 for f in lookup:
952 for f in lookup:
953 if f not in ctx or ctx[f].cmp(self.wread(f)):
953 if f not in ctx or ctx[f].cmp(self.wread(f)):
954 modified.append(f)
954 modified.append(f)
955 else:
955 else:
956 fixup.append(f)
956 fixup.append(f)
957 if list_clean:
957 if list_clean:
958 clean.append(f)
958 clean.append(f)
959
959
960 # update dirstate for files that are actually clean
960 # update dirstate for files that are actually clean
961 if fixup:
961 if fixup:
962 wlock = None
962 wlock = None
963 try:
963 try:
964 try:
964 try:
965 wlock = self.wlock(False)
965 wlock = self.wlock(False)
966 except lock.LockException:
966 except lock.LockException:
967 pass
967 pass
968 if wlock:
968 if wlock:
969 for f in fixup:
969 for f in fixup:
970 self.dirstate.normal(f)
970 self.dirstate.normal(f)
971 finally:
971 finally:
972 del wlock
972 del wlock
973 else:
973 else:
974 # we are comparing working dir against non-parent
974 # we are comparing working dir against non-parent
975 # generate a pseudo-manifest for the working dir
975 # generate a pseudo-manifest for the working dir
976 # XXX: create it in dirstate.py ?
976 # XXX: create it in dirstate.py ?
977 mf2 = mfmatches(self.dirstate.parents()[0])
977 mf2 = mfmatches(self.dirstate.parents()[0])
978 is_exec = util.execfunc(self.root, mf2.execf)
978 is_exec = util.execfunc(self.root, mf2.execf)
979 is_link = util.linkfunc(self.root, mf2.linkf)
979 is_link = util.linkfunc(self.root, mf2.linkf)
980 for f in lookup + modified + added:
980 for f in lookup + modified + added:
981 mf2[f] = ""
981 mf2[f] = ""
982 mf2.set(f, is_exec(f), is_link(f))
982 mf2.set(f, is_exec(f), is_link(f))
983 for f in removed:
983 for f in removed:
984 if f in mf2:
984 if f in mf2:
985 del mf2[f]
985 del mf2[f]
986
986
987 else:
987 else:
988 # we are comparing two revisions
988 # we are comparing two revisions
989 mf2 = mfmatches(node2)
989 mf2 = mfmatches(node2)
990
990
991 if not compareworking:
991 if not compareworking:
992 # flush lists from dirstate before comparing manifests
992 # flush lists from dirstate before comparing manifests
993 modified, added, clean = [], [], []
993 modified, added, clean = [], [], []
994
994
995 # make sure to sort the files so we talk to the disk in a
995 # make sure to sort the files so we talk to the disk in a
996 # reasonable order
996 # reasonable order
997 mf2keys = mf2.keys()
997 mf2keys = mf2.keys()
998 mf2keys.sort()
998 mf2keys.sort()
999 getnode = lambda fn: mf1.get(fn, nullid)
999 getnode = lambda fn: mf1.get(fn, nullid)
1000 for fn in mf2keys:
1000 for fn in mf2keys:
1001 if mf1.has_key(fn):
1001 if fn in mf1:
1002 if (mf1.flags(fn) != mf2.flags(fn) or
1002 if (mf1.flags(fn) != mf2.flags(fn) or
1003 (mf1[fn] != mf2[fn] and
1003 (mf1[fn] != mf2[fn] and
1004 (mf2[fn] != "" or fcmp(fn, getnode)))):
1004 (mf2[fn] != "" or fcmp(fn, getnode)))):
1005 modified.append(fn)
1005 modified.append(fn)
1006 elif list_clean:
1006 elif list_clean:
1007 clean.append(fn)
1007 clean.append(fn)
1008 del mf1[fn]
1008 del mf1[fn]
1009 else:
1009 else:
1010 added.append(fn)
1010 added.append(fn)
1011
1011
1012 removed = mf1.keys()
1012 removed = mf1.keys()
1013
1013
1014 # sort and return results:
1014 # sort and return results:
1015 for l in modified, added, removed, deleted, unknown, ignored, clean:
1015 for l in modified, added, removed, deleted, unknown, ignored, clean:
1016 l.sort()
1016 l.sort()
1017 return (modified, added, removed, deleted, unknown, ignored, clean)
1017 return (modified, added, removed, deleted, unknown, ignored, clean)
1018
1018
1019 def add(self, list):
1019 def add(self, list):
1020 wlock = self.wlock()
1020 wlock = self.wlock()
1021 try:
1021 try:
1022 rejected = []
1022 rejected = []
1023 for f in list:
1023 for f in list:
1024 p = self.wjoin(f)
1024 p = self.wjoin(f)
1025 try:
1025 try:
1026 st = os.lstat(p)
1026 st = os.lstat(p)
1027 except:
1027 except:
1028 self.ui.warn(_("%s does not exist!\n") % f)
1028 self.ui.warn(_("%s does not exist!\n") % f)
1029 rejected.append(f)
1029 rejected.append(f)
1030 continue
1030 continue
1031 if st.st_size > 10000000:
1031 if st.st_size > 10000000:
1032 self.ui.warn(_("%s: files over 10MB may cause memory and"
1032 self.ui.warn(_("%s: files over 10MB may cause memory and"
1033 " performance problems\n"
1033 " performance problems\n"
1034 "(use 'hg revert %s' to unadd the file)\n")
1034 "(use 'hg revert %s' to unadd the file)\n")
1035 % (f, f))
1035 % (f, f))
1036 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1036 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1037 self.ui.warn(_("%s not added: only files and symlinks "
1037 self.ui.warn(_("%s not added: only files and symlinks "
1038 "supported currently\n") % f)
1038 "supported currently\n") % f)
1039 rejected.append(p)
1039 rejected.append(p)
1040 elif self.dirstate[f] in 'amn':
1040 elif self.dirstate[f] in 'amn':
1041 self.ui.warn(_("%s already tracked!\n") % f)
1041 self.ui.warn(_("%s already tracked!\n") % f)
1042 elif self.dirstate[f] == 'r':
1042 elif self.dirstate[f] == 'r':
1043 self.dirstate.normallookup(f)
1043 self.dirstate.normallookup(f)
1044 else:
1044 else:
1045 self.dirstate.add(f)
1045 self.dirstate.add(f)
1046 return rejected
1046 return rejected
1047 finally:
1047 finally:
1048 del wlock
1048 del wlock
1049
1049
1050 def forget(self, list):
1050 def forget(self, list):
1051 wlock = self.wlock()
1051 wlock = self.wlock()
1052 try:
1052 try:
1053 for f in list:
1053 for f in list:
1054 if self.dirstate[f] != 'a':
1054 if self.dirstate[f] != 'a':
1055 self.ui.warn(_("%s not added!\n") % f)
1055 self.ui.warn(_("%s not added!\n") % f)
1056 else:
1056 else:
1057 self.dirstate.forget(f)
1057 self.dirstate.forget(f)
1058 finally:
1058 finally:
1059 del wlock
1059 del wlock
1060
1060
1061 def remove(self, list, unlink=False):
1061 def remove(self, list, unlink=False):
1062 wlock = None
1062 wlock = None
1063 try:
1063 try:
1064 if unlink:
1064 if unlink:
1065 for f in list:
1065 for f in list:
1066 try:
1066 try:
1067 util.unlink(self.wjoin(f))
1067 util.unlink(self.wjoin(f))
1068 except OSError, inst:
1068 except OSError, inst:
1069 if inst.errno != errno.ENOENT:
1069 if inst.errno != errno.ENOENT:
1070 raise
1070 raise
1071 wlock = self.wlock()
1071 wlock = self.wlock()
1072 for f in list:
1072 for f in list:
1073 if unlink and os.path.exists(self.wjoin(f)):
1073 if unlink and os.path.exists(self.wjoin(f)):
1074 self.ui.warn(_("%s still exists!\n") % f)
1074 self.ui.warn(_("%s still exists!\n") % f)
1075 elif self.dirstate[f] == 'a':
1075 elif self.dirstate[f] == 'a':
1076 self.dirstate.forget(f)
1076 self.dirstate.forget(f)
1077 elif f not in self.dirstate:
1077 elif f not in self.dirstate:
1078 self.ui.warn(_("%s not tracked!\n") % f)
1078 self.ui.warn(_("%s not tracked!\n") % f)
1079 else:
1079 else:
1080 self.dirstate.remove(f)
1080 self.dirstate.remove(f)
1081 finally:
1081 finally:
1082 del wlock
1082 del wlock
1083
1083
1084 def undelete(self, list):
1084 def undelete(self, list):
1085 wlock = None
1085 wlock = None
1086 try:
1086 try:
1087 manifests = [self.manifest.read(self.changelog.read(p)[0])
1087 manifests = [self.manifest.read(self.changelog.read(p)[0])
1088 for p in self.dirstate.parents() if p != nullid]
1088 for p in self.dirstate.parents() if p != nullid]
1089 wlock = self.wlock()
1089 wlock = self.wlock()
1090 for f in list:
1090 for f in list:
1091 if self.dirstate[f] != 'r':
1091 if self.dirstate[f] != 'r':
1092 self.ui.warn("%s not removed!\n" % f)
1092 self.ui.warn("%s not removed!\n" % f)
1093 else:
1093 else:
1094 m = f in manifests[0] and manifests[0] or manifests[1]
1094 m = f in manifests[0] and manifests[0] or manifests[1]
1095 t = self.file(f).read(m[f])
1095 t = self.file(f).read(m[f])
1096 self.wwrite(f, t, m.flags(f))
1096 self.wwrite(f, t, m.flags(f))
1097 self.dirstate.normal(f)
1097 self.dirstate.normal(f)
1098 finally:
1098 finally:
1099 del wlock
1099 del wlock
1100
1100
1101 def copy(self, source, dest):
1101 def copy(self, source, dest):
1102 wlock = None
1102 wlock = None
1103 try:
1103 try:
1104 p = self.wjoin(dest)
1104 p = self.wjoin(dest)
1105 if not (os.path.exists(p) or os.path.islink(p)):
1105 if not (os.path.exists(p) or os.path.islink(p)):
1106 self.ui.warn(_("%s does not exist!\n") % dest)
1106 self.ui.warn(_("%s does not exist!\n") % dest)
1107 elif not (os.path.isfile(p) or os.path.islink(p)):
1107 elif not (os.path.isfile(p) or os.path.islink(p)):
1108 self.ui.warn(_("copy failed: %s is not a file or a "
1108 self.ui.warn(_("copy failed: %s is not a file or a "
1109 "symbolic link\n") % dest)
1109 "symbolic link\n") % dest)
1110 else:
1110 else:
1111 wlock = self.wlock()
1111 wlock = self.wlock()
1112 if dest not in self.dirstate:
1112 if dest not in self.dirstate:
1113 self.dirstate.add(dest)
1113 self.dirstate.add(dest)
1114 self.dirstate.copy(source, dest)
1114 self.dirstate.copy(source, dest)
1115 finally:
1115 finally:
1116 del wlock
1116 del wlock
1117
1117
1118 def heads(self, start=None):
1118 def heads(self, start=None):
1119 heads = self.changelog.heads(start)
1119 heads = self.changelog.heads(start)
1120 # sort the output in rev descending order
1120 # sort the output in rev descending order
1121 heads = [(-self.changelog.rev(h), h) for h in heads]
1121 heads = [(-self.changelog.rev(h), h) for h in heads]
1122 heads.sort()
1122 heads.sort()
1123 return [n for (r, n) in heads]
1123 return [n for (r, n) in heads]
1124
1124
1125 def branchheads(self, branch, start=None):
1125 def branchheads(self, branch, start=None):
1126 branches = self.branchtags()
1126 branches = self.branchtags()
1127 if branch not in branches:
1127 if branch not in branches:
1128 return []
1128 return []
1129 # The basic algorithm is this:
1129 # The basic algorithm is this:
1130 #
1130 #
1131 # Start from the branch tip since there are no later revisions that can
1131 # Start from the branch tip since there are no later revisions that can
1132 # possibly be in this branch, and the tip is a guaranteed head.
1132 # possibly be in this branch, and the tip is a guaranteed head.
1133 #
1133 #
1134 # Remember the tip's parents as the first ancestors, since these by
1134 # Remember the tip's parents as the first ancestors, since these by
1135 # definition are not heads.
1135 # definition are not heads.
1136 #
1136 #
1137 # Step backwards from the brach tip through all the revisions. We are
1137 # Step backwards from the brach tip through all the revisions. We are
1138 # guaranteed by the rules of Mercurial that we will now be visiting the
1138 # guaranteed by the rules of Mercurial that we will now be visiting the
1139 # nodes in reverse topological order (children before parents).
1139 # nodes in reverse topological order (children before parents).
1140 #
1140 #
1141 # If a revision is one of the ancestors of a head then we can toss it
1141 # If a revision is one of the ancestors of a head then we can toss it
1142 # out of the ancestors set (we've already found it and won't be
1142 # out of the ancestors set (we've already found it and won't be
1143 # visiting it again) and put its parents in the ancestors set.
1143 # visiting it again) and put its parents in the ancestors set.
1144 #
1144 #
1145 # Otherwise, if a revision is in the branch it's another head, since it
1145 # Otherwise, if a revision is in the branch it's another head, since it
1146 # wasn't in the ancestor list of an existing head. So add it to the
1146 # wasn't in the ancestor list of an existing head. So add it to the
1147 # head list, and add its parents to the ancestor list.
1147 # head list, and add its parents to the ancestor list.
1148 #
1148 #
1149 # If it is not in the branch ignore it.
1149 # If it is not in the branch ignore it.
1150 #
1150 #
1151 # Once we have a list of heads, use nodesbetween to filter out all the
1151 # Once we have a list of heads, use nodesbetween to filter out all the
1152 # heads that cannot be reached from startrev. There may be a more
1152 # heads that cannot be reached from startrev. There may be a more
1153 # efficient way to do this as part of the previous algorithm.
1153 # efficient way to do this as part of the previous algorithm.
1154
1154
1155 set = util.set
1155 set = util.set
1156 heads = [self.changelog.rev(branches[branch])]
1156 heads = [self.changelog.rev(branches[branch])]
1157 # Don't care if ancestors contains nullrev or not.
1157 # Don't care if ancestors contains nullrev or not.
1158 ancestors = set(self.changelog.parentrevs(heads[0]))
1158 ancestors = set(self.changelog.parentrevs(heads[0]))
1159 for rev in xrange(heads[0] - 1, nullrev, -1):
1159 for rev in xrange(heads[0] - 1, nullrev, -1):
1160 if rev in ancestors:
1160 if rev in ancestors:
1161 ancestors.update(self.changelog.parentrevs(rev))
1161 ancestors.update(self.changelog.parentrevs(rev))
1162 ancestors.remove(rev)
1162 ancestors.remove(rev)
1163 elif self.changectx(rev).branch() == branch:
1163 elif self.changectx(rev).branch() == branch:
1164 heads.append(rev)
1164 heads.append(rev)
1165 ancestors.update(self.changelog.parentrevs(rev))
1165 ancestors.update(self.changelog.parentrevs(rev))
1166 heads = [self.changelog.node(rev) for rev in heads]
1166 heads = [self.changelog.node(rev) for rev in heads]
1167 if start is not None:
1167 if start is not None:
1168 heads = self.changelog.nodesbetween([start], heads)[2]
1168 heads = self.changelog.nodesbetween([start], heads)[2]
1169 return heads
1169 return heads
1170
1170
1171 def branches(self, nodes):
1171 def branches(self, nodes):
1172 if not nodes:
1172 if not nodes:
1173 nodes = [self.changelog.tip()]
1173 nodes = [self.changelog.tip()]
1174 b = []
1174 b = []
1175 for n in nodes:
1175 for n in nodes:
1176 t = n
1176 t = n
1177 while 1:
1177 while 1:
1178 p = self.changelog.parents(n)
1178 p = self.changelog.parents(n)
1179 if p[1] != nullid or p[0] == nullid:
1179 if p[1] != nullid or p[0] == nullid:
1180 b.append((t, n, p[0], p[1]))
1180 b.append((t, n, p[0], p[1]))
1181 break
1181 break
1182 n = p[0]
1182 n = p[0]
1183 return b
1183 return b
1184
1184
1185 def between(self, pairs):
1185 def between(self, pairs):
1186 r = []
1186 r = []
1187
1187
1188 for top, bottom in pairs:
1188 for top, bottom in pairs:
1189 n, l, i = top, [], 0
1189 n, l, i = top, [], 0
1190 f = 1
1190 f = 1
1191
1191
1192 while n != bottom:
1192 while n != bottom:
1193 p = self.changelog.parents(n)[0]
1193 p = self.changelog.parents(n)[0]
1194 if i == f:
1194 if i == f:
1195 l.append(n)
1195 l.append(n)
1196 f = f * 2
1196 f = f * 2
1197 n = p
1197 n = p
1198 i += 1
1198 i += 1
1199
1199
1200 r.append(l)
1200 r.append(l)
1201
1201
1202 return r
1202 return r
1203
1203
1204 def findincoming(self, remote, base=None, heads=None, force=False):
1204 def findincoming(self, remote, base=None, heads=None, force=False):
1205 """Return list of roots of the subsets of missing nodes from remote
1205 """Return list of roots of the subsets of missing nodes from remote
1206
1206
1207 If base dict is specified, assume that these nodes and their parents
1207 If base dict is specified, assume that these nodes and their parents
1208 exist on the remote side and that no child of a node of base exists
1208 exist on the remote side and that no child of a node of base exists
1209 in both remote and self.
1209 in both remote and self.
1210 Furthermore base will be updated to include the nodes that exists
1210 Furthermore base will be updated to include the nodes that exists
1211 in self and remote but no children exists in self and remote.
1211 in self and remote but no children exists in self and remote.
1212 If a list of heads is specified, return only nodes which are heads
1212 If a list of heads is specified, return only nodes which are heads
1213 or ancestors of these heads.
1213 or ancestors of these heads.
1214
1214
1215 All the ancestors of base are in self and in remote.
1215 All the ancestors of base are in self and in remote.
1216 All the descendants of the list returned are missing in self.
1216 All the descendants of the list returned are missing in self.
1217 (and so we know that the rest of the nodes are missing in remote, see
1217 (and so we know that the rest of the nodes are missing in remote, see
1218 outgoing)
1218 outgoing)
1219 """
1219 """
1220 m = self.changelog.nodemap
1220 m = self.changelog.nodemap
1221 search = []
1221 search = []
1222 fetch = {}
1222 fetch = {}
1223 seen = {}
1223 seen = {}
1224 seenbranch = {}
1224 seenbranch = {}
1225 if base == None:
1225 if base == None:
1226 base = {}
1226 base = {}
1227
1227
1228 if not heads:
1228 if not heads:
1229 heads = remote.heads()
1229 heads = remote.heads()
1230
1230
1231 if self.changelog.tip() == nullid:
1231 if self.changelog.tip() == nullid:
1232 base[nullid] = 1
1232 base[nullid] = 1
1233 if heads != [nullid]:
1233 if heads != [nullid]:
1234 return [nullid]
1234 return [nullid]
1235 return []
1235 return []
1236
1236
1237 # assume we're closer to the tip than the root
1237 # assume we're closer to the tip than the root
1238 # and start by examining the heads
1238 # and start by examining the heads
1239 self.ui.status(_("searching for changes\n"))
1239 self.ui.status(_("searching for changes\n"))
1240
1240
1241 unknown = []
1241 unknown = []
1242 for h in heads:
1242 for h in heads:
1243 if h not in m:
1243 if h not in m:
1244 unknown.append(h)
1244 unknown.append(h)
1245 else:
1245 else:
1246 base[h] = 1
1246 base[h] = 1
1247
1247
1248 if not unknown:
1248 if not unknown:
1249 return []
1249 return []
1250
1250
1251 req = dict.fromkeys(unknown)
1251 req = dict.fromkeys(unknown)
1252 reqcnt = 0
1252 reqcnt = 0
1253
1253
1254 # search through remote branches
1254 # search through remote branches
1255 # a 'branch' here is a linear segment of history, with four parts:
1255 # a 'branch' here is a linear segment of history, with four parts:
1256 # head, root, first parent, second parent
1256 # head, root, first parent, second parent
1257 # (a branch always has two parents (or none) by definition)
1257 # (a branch always has two parents (or none) by definition)
1258 unknown = remote.branches(unknown)
1258 unknown = remote.branches(unknown)
1259 while unknown:
1259 while unknown:
1260 r = []
1260 r = []
1261 while unknown:
1261 while unknown:
1262 n = unknown.pop(0)
1262 n = unknown.pop(0)
1263 if n[0] in seen:
1263 if n[0] in seen:
1264 continue
1264 continue
1265
1265
1266 self.ui.debug(_("examining %s:%s\n")
1266 self.ui.debug(_("examining %s:%s\n")
1267 % (short(n[0]), short(n[1])))
1267 % (short(n[0]), short(n[1])))
1268 if n[0] == nullid: # found the end of the branch
1268 if n[0] == nullid: # found the end of the branch
1269 pass
1269 pass
1270 elif n in seenbranch:
1270 elif n in seenbranch:
1271 self.ui.debug(_("branch already found\n"))
1271 self.ui.debug(_("branch already found\n"))
1272 continue
1272 continue
1273 elif n[1] and n[1] in m: # do we know the base?
1273 elif n[1] and n[1] in m: # do we know the base?
1274 self.ui.debug(_("found incomplete branch %s:%s\n")
1274 self.ui.debug(_("found incomplete branch %s:%s\n")
1275 % (short(n[0]), short(n[1])))
1275 % (short(n[0]), short(n[1])))
1276 search.append(n) # schedule branch range for scanning
1276 search.append(n) # schedule branch range for scanning
1277 seenbranch[n] = 1
1277 seenbranch[n] = 1
1278 else:
1278 else:
1279 if n[1] not in seen and n[1] not in fetch:
1279 if n[1] not in seen and n[1] not in fetch:
1280 if n[2] in m and n[3] in m:
1280 if n[2] in m and n[3] in m:
1281 self.ui.debug(_("found new changeset %s\n") %
1281 self.ui.debug(_("found new changeset %s\n") %
1282 short(n[1]))
1282 short(n[1]))
1283 fetch[n[1]] = 1 # earliest unknown
1283 fetch[n[1]] = 1 # earliest unknown
1284 for p in n[2:4]:
1284 for p in n[2:4]:
1285 if p in m:
1285 if p in m:
1286 base[p] = 1 # latest known
1286 base[p] = 1 # latest known
1287
1287
1288 for p in n[2:4]:
1288 for p in n[2:4]:
1289 if p not in req and p not in m:
1289 if p not in req and p not in m:
1290 r.append(p)
1290 r.append(p)
1291 req[p] = 1
1291 req[p] = 1
1292 seen[n[0]] = 1
1292 seen[n[0]] = 1
1293
1293
1294 if r:
1294 if r:
1295 reqcnt += 1
1295 reqcnt += 1
1296 self.ui.debug(_("request %d: %s\n") %
1296 self.ui.debug(_("request %d: %s\n") %
1297 (reqcnt, " ".join(map(short, r))))
1297 (reqcnt, " ".join(map(short, r))))
1298 for p in xrange(0, len(r), 10):
1298 for p in xrange(0, len(r), 10):
1299 for b in remote.branches(r[p:p+10]):
1299 for b in remote.branches(r[p:p+10]):
1300 self.ui.debug(_("received %s:%s\n") %
1300 self.ui.debug(_("received %s:%s\n") %
1301 (short(b[0]), short(b[1])))
1301 (short(b[0]), short(b[1])))
1302 unknown.append(b)
1302 unknown.append(b)
1303
1303
1304 # do binary search on the branches we found
1304 # do binary search on the branches we found
1305 while search:
1305 while search:
1306 n = search.pop(0)
1306 n = search.pop(0)
1307 reqcnt += 1
1307 reqcnt += 1
1308 l = remote.between([(n[0], n[1])])[0]
1308 l = remote.between([(n[0], n[1])])[0]
1309 l.append(n[1])
1309 l.append(n[1])
1310 p = n[0]
1310 p = n[0]
1311 f = 1
1311 f = 1
1312 for i in l:
1312 for i in l:
1313 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1313 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1314 if i in m:
1314 if i in m:
1315 if f <= 2:
1315 if f <= 2:
1316 self.ui.debug(_("found new branch changeset %s\n") %
1316 self.ui.debug(_("found new branch changeset %s\n") %
1317 short(p))
1317 short(p))
1318 fetch[p] = 1
1318 fetch[p] = 1
1319 base[i] = 1
1319 base[i] = 1
1320 else:
1320 else:
1321 self.ui.debug(_("narrowed branch search to %s:%s\n")
1321 self.ui.debug(_("narrowed branch search to %s:%s\n")
1322 % (short(p), short(i)))
1322 % (short(p), short(i)))
1323 search.append((p, i))
1323 search.append((p, i))
1324 break
1324 break
1325 p, f = i, f * 2
1325 p, f = i, f * 2
1326
1326
1327 # sanity check our fetch list
1327 # sanity check our fetch list
1328 for f in fetch.keys():
1328 for f in fetch.keys():
1329 if f in m:
1329 if f in m:
1330 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1330 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1331
1331
1332 if base.keys() == [nullid]:
1332 if base.keys() == [nullid]:
1333 if force:
1333 if force:
1334 self.ui.warn(_("warning: repository is unrelated\n"))
1334 self.ui.warn(_("warning: repository is unrelated\n"))
1335 else:
1335 else:
1336 raise util.Abort(_("repository is unrelated"))
1336 raise util.Abort(_("repository is unrelated"))
1337
1337
1338 self.ui.debug(_("found new changesets starting at ") +
1338 self.ui.debug(_("found new changesets starting at ") +
1339 " ".join([short(f) for f in fetch]) + "\n")
1339 " ".join([short(f) for f in fetch]) + "\n")
1340
1340
1341 self.ui.debug(_("%d total queries\n") % reqcnt)
1341 self.ui.debug(_("%d total queries\n") % reqcnt)
1342
1342
1343 return fetch.keys()
1343 return fetch.keys()
1344
1344
1345 def findoutgoing(self, remote, base=None, heads=None, force=False):
1345 def findoutgoing(self, remote, base=None, heads=None, force=False):
1346 """Return list of nodes that are roots of subsets not in remote
1346 """Return list of nodes that are roots of subsets not in remote
1347
1347
1348 If base dict is specified, assume that these nodes and their parents
1348 If base dict is specified, assume that these nodes and their parents
1349 exist on the remote side.
1349 exist on the remote side.
1350 If a list of heads is specified, return only nodes which are heads
1350 If a list of heads is specified, return only nodes which are heads
1351 or ancestors of these heads, and return a second element which
1351 or ancestors of these heads, and return a second element which
1352 contains all remote heads which get new children.
1352 contains all remote heads which get new children.
1353 """
1353 """
1354 if base == None:
1354 if base == None:
1355 base = {}
1355 base = {}
1356 self.findincoming(remote, base, heads, force=force)
1356 self.findincoming(remote, base, heads, force=force)
1357
1357
1358 self.ui.debug(_("common changesets up to ")
1358 self.ui.debug(_("common changesets up to ")
1359 + " ".join(map(short, base.keys())) + "\n")
1359 + " ".join(map(short, base.keys())) + "\n")
1360
1360
1361 remain = dict.fromkeys(self.changelog.nodemap)
1361 remain = dict.fromkeys(self.changelog.nodemap)
1362
1362
1363 # prune everything remote has from the tree
1363 # prune everything remote has from the tree
1364 del remain[nullid]
1364 del remain[nullid]
1365 remove = base.keys()
1365 remove = base.keys()
1366 while remove:
1366 while remove:
1367 n = remove.pop(0)
1367 n = remove.pop(0)
1368 if n in remain:
1368 if n in remain:
1369 del remain[n]
1369 del remain[n]
1370 for p in self.changelog.parents(n):
1370 for p in self.changelog.parents(n):
1371 remove.append(p)
1371 remove.append(p)
1372
1372
1373 # find every node whose parents have been pruned
1373 # find every node whose parents have been pruned
1374 subset = []
1374 subset = []
1375 # find every remote head that will get new children
1375 # find every remote head that will get new children
1376 updated_heads = {}
1376 updated_heads = {}
1377 for n in remain:
1377 for n in remain:
1378 p1, p2 = self.changelog.parents(n)
1378 p1, p2 = self.changelog.parents(n)
1379 if p1 not in remain and p2 not in remain:
1379 if p1 not in remain and p2 not in remain:
1380 subset.append(n)
1380 subset.append(n)
1381 if heads:
1381 if heads:
1382 if p1 in heads:
1382 if p1 in heads:
1383 updated_heads[p1] = True
1383 updated_heads[p1] = True
1384 if p2 in heads:
1384 if p2 in heads:
1385 updated_heads[p2] = True
1385 updated_heads[p2] = True
1386
1386
1387 # this is the set of all roots we have to push
1387 # this is the set of all roots we have to push
1388 if heads:
1388 if heads:
1389 return subset, updated_heads.keys()
1389 return subset, updated_heads.keys()
1390 else:
1390 else:
1391 return subset
1391 return subset
1392
1392
1393 def pull(self, remote, heads=None, force=False):
1393 def pull(self, remote, heads=None, force=False):
1394 lock = self.lock()
1394 lock = self.lock()
1395 try:
1395 try:
1396 fetch = self.findincoming(remote, heads=heads, force=force)
1396 fetch = self.findincoming(remote, heads=heads, force=force)
1397 if fetch == [nullid]:
1397 if fetch == [nullid]:
1398 self.ui.status(_("requesting all changes\n"))
1398 self.ui.status(_("requesting all changes\n"))
1399
1399
1400 if not fetch:
1400 if not fetch:
1401 self.ui.status(_("no changes found\n"))
1401 self.ui.status(_("no changes found\n"))
1402 return 0
1402 return 0
1403
1403
1404 if heads is None:
1404 if heads is None:
1405 cg = remote.changegroup(fetch, 'pull')
1405 cg = remote.changegroup(fetch, 'pull')
1406 else:
1406 else:
1407 if 'changegroupsubset' not in remote.capabilities:
1407 if 'changegroupsubset' not in remote.capabilities:
1408 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1408 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1409 cg = remote.changegroupsubset(fetch, heads, 'pull')
1409 cg = remote.changegroupsubset(fetch, heads, 'pull')
1410 return self.addchangegroup(cg, 'pull', remote.url())
1410 return self.addchangegroup(cg, 'pull', remote.url())
1411 finally:
1411 finally:
1412 del lock
1412 del lock
1413
1413
1414 def push(self, remote, force=False, revs=None):
1414 def push(self, remote, force=False, revs=None):
1415 # there are two ways to push to remote repo:
1415 # there are two ways to push to remote repo:
1416 #
1416 #
1417 # addchangegroup assumes local user can lock remote
1417 # addchangegroup assumes local user can lock remote
1418 # repo (local filesystem, old ssh servers).
1418 # repo (local filesystem, old ssh servers).
1419 #
1419 #
1420 # unbundle assumes local user cannot lock remote repo (new ssh
1420 # unbundle assumes local user cannot lock remote repo (new ssh
1421 # servers, http servers).
1421 # servers, http servers).
1422
1422
1423 if remote.capable('unbundle'):
1423 if remote.capable('unbundle'):
1424 return self.push_unbundle(remote, force, revs)
1424 return self.push_unbundle(remote, force, revs)
1425 return self.push_addchangegroup(remote, force, revs)
1425 return self.push_addchangegroup(remote, force, revs)
1426
1426
1427 def prepush(self, remote, force, revs):
1427 def prepush(self, remote, force, revs):
1428 base = {}
1428 base = {}
1429 remote_heads = remote.heads()
1429 remote_heads = remote.heads()
1430 inc = self.findincoming(remote, base, remote_heads, force=force)
1430 inc = self.findincoming(remote, base, remote_heads, force=force)
1431
1431
1432 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1432 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1433 if revs is not None:
1433 if revs is not None:
1434 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1434 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1435 else:
1435 else:
1436 bases, heads = update, self.changelog.heads()
1436 bases, heads = update, self.changelog.heads()
1437
1437
1438 if not bases:
1438 if not bases:
1439 self.ui.status(_("no changes found\n"))
1439 self.ui.status(_("no changes found\n"))
1440 return None, 1
1440 return None, 1
1441 elif not force:
1441 elif not force:
1442 # check if we're creating new remote heads
1442 # check if we're creating new remote heads
1443 # to be a remote head after push, node must be either
1443 # to be a remote head after push, node must be either
1444 # - unknown locally
1444 # - unknown locally
1445 # - a local outgoing head descended from update
1445 # - a local outgoing head descended from update
1446 # - a remote head that's known locally and not
1446 # - a remote head that's known locally and not
1447 # ancestral to an outgoing head
1447 # ancestral to an outgoing head
1448
1448
1449 warn = 0
1449 warn = 0
1450
1450
1451 if remote_heads == [nullid]:
1451 if remote_heads == [nullid]:
1452 warn = 0
1452 warn = 0
1453 elif not revs and len(heads) > len(remote_heads):
1453 elif not revs and len(heads) > len(remote_heads):
1454 warn = 1
1454 warn = 1
1455 else:
1455 else:
1456 newheads = list(heads)
1456 newheads = list(heads)
1457 for r in remote_heads:
1457 for r in remote_heads:
1458 if r in self.changelog.nodemap:
1458 if r in self.changelog.nodemap:
1459 desc = self.changelog.heads(r, heads)
1459 desc = self.changelog.heads(r, heads)
1460 l = [h for h in heads if h in desc]
1460 l = [h for h in heads if h in desc]
1461 if not l:
1461 if not l:
1462 newheads.append(r)
1462 newheads.append(r)
1463 else:
1463 else:
1464 newheads.append(r)
1464 newheads.append(r)
1465 if len(newheads) > len(remote_heads):
1465 if len(newheads) > len(remote_heads):
1466 warn = 1
1466 warn = 1
1467
1467
1468 if warn:
1468 if warn:
1469 self.ui.warn(_("abort: push creates new remote branches!\n"))
1469 self.ui.warn(_("abort: push creates new remote branches!\n"))
1470 self.ui.status(_("(did you forget to merge?"
1470 self.ui.status(_("(did you forget to merge?"
1471 " use push -f to force)\n"))
1471 " use push -f to force)\n"))
1472 return None, 1
1472 return None, 1
1473 elif inc:
1473 elif inc:
1474 self.ui.warn(_("note: unsynced remote changes!\n"))
1474 self.ui.warn(_("note: unsynced remote changes!\n"))
1475
1475
1476
1476
1477 if revs is None:
1477 if revs is None:
1478 cg = self.changegroup(update, 'push')
1478 cg = self.changegroup(update, 'push')
1479 else:
1479 else:
1480 cg = self.changegroupsubset(update, revs, 'push')
1480 cg = self.changegroupsubset(update, revs, 'push')
1481 return cg, remote_heads
1481 return cg, remote_heads
1482
1482
1483 def push_addchangegroup(self, remote, force, revs):
1483 def push_addchangegroup(self, remote, force, revs):
1484 lock = remote.lock()
1484 lock = remote.lock()
1485 try:
1485 try:
1486 ret = self.prepush(remote, force, revs)
1486 ret = self.prepush(remote, force, revs)
1487 if ret[0] is not None:
1487 if ret[0] is not None:
1488 cg, remote_heads = ret
1488 cg, remote_heads = ret
1489 return remote.addchangegroup(cg, 'push', self.url())
1489 return remote.addchangegroup(cg, 'push', self.url())
1490 return ret[1]
1490 return ret[1]
1491 finally:
1491 finally:
1492 del lock
1492 del lock
1493
1493
1494 def push_unbundle(self, remote, force, revs):
1494 def push_unbundle(self, remote, force, revs):
1495 # local repo finds heads on server, finds out what revs it
1495 # local repo finds heads on server, finds out what revs it
1496 # must push. once revs transferred, if server finds it has
1496 # must push. once revs transferred, if server finds it has
1497 # different heads (someone else won commit/push race), server
1497 # different heads (someone else won commit/push race), server
1498 # aborts.
1498 # aborts.
1499
1499
1500 ret = self.prepush(remote, force, revs)
1500 ret = self.prepush(remote, force, revs)
1501 if ret[0] is not None:
1501 if ret[0] is not None:
1502 cg, remote_heads = ret
1502 cg, remote_heads = ret
1503 if force: remote_heads = ['force']
1503 if force: remote_heads = ['force']
1504 return remote.unbundle(cg, remote_heads, 'push')
1504 return remote.unbundle(cg, remote_heads, 'push')
1505 return ret[1]
1505 return ret[1]
1506
1506
1507 def changegroupinfo(self, nodes, source):
1507 def changegroupinfo(self, nodes, source):
1508 if self.ui.verbose or source == 'bundle':
1508 if self.ui.verbose or source == 'bundle':
1509 self.ui.status(_("%d changesets found\n") % len(nodes))
1509 self.ui.status(_("%d changesets found\n") % len(nodes))
1510 if self.ui.debugflag:
1510 if self.ui.debugflag:
1511 self.ui.debug(_("List of changesets:\n"))
1511 self.ui.debug(_("List of changesets:\n"))
1512 for node in nodes:
1512 for node in nodes:
1513 self.ui.debug("%s\n" % hex(node))
1513 self.ui.debug("%s\n" % hex(node))
1514
1514
1515 def changegroupsubset(self, bases, heads, source, extranodes=None):
1515 def changegroupsubset(self, bases, heads, source, extranodes=None):
1516 """This function generates a changegroup consisting of all the nodes
1516 """This function generates a changegroup consisting of all the nodes
1517 that are descendents of any of the bases, and ancestors of any of
1517 that are descendents of any of the bases, and ancestors of any of
1518 the heads.
1518 the heads.
1519
1519
1520 It is fairly complex as determining which filenodes and which
1520 It is fairly complex as determining which filenodes and which
1521 manifest nodes need to be included for the changeset to be complete
1521 manifest nodes need to be included for the changeset to be complete
1522 is non-trivial.
1522 is non-trivial.
1523
1523
1524 Another wrinkle is doing the reverse, figuring out which changeset in
1524 Another wrinkle is doing the reverse, figuring out which changeset in
1525 the changegroup a particular filenode or manifestnode belongs to.
1525 the changegroup a particular filenode or manifestnode belongs to.
1526
1526
1527 The caller can specify some nodes that must be included in the
1527 The caller can specify some nodes that must be included in the
1528 changegroup using the extranodes argument. It should be a dict
1528 changegroup using the extranodes argument. It should be a dict
1529 where the keys are the filenames (or 1 for the manifest), and the
1529 where the keys are the filenames (or 1 for the manifest), and the
1530 values are lists of (node, linknode) tuples, where node is a wanted
1530 values are lists of (node, linknode) tuples, where node is a wanted
1531 node and linknode is the changelog node that should be transmitted as
1531 node and linknode is the changelog node that should be transmitted as
1532 the linkrev.
1532 the linkrev.
1533 """
1533 """
1534
1534
1535 self.hook('preoutgoing', throw=True, source=source)
1535 self.hook('preoutgoing', throw=True, source=source)
1536
1536
1537 # Set up some initial variables
1537 # Set up some initial variables
1538 # Make it easy to refer to self.changelog
1538 # Make it easy to refer to self.changelog
1539 cl = self.changelog
1539 cl = self.changelog
1540 # msng is short for missing - compute the list of changesets in this
1540 # msng is short for missing - compute the list of changesets in this
1541 # changegroup.
1541 # changegroup.
1542 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1542 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1543 self.changegroupinfo(msng_cl_lst, source)
1543 self.changegroupinfo(msng_cl_lst, source)
1544 # Some bases may turn out to be superfluous, and some heads may be
1544 # Some bases may turn out to be superfluous, and some heads may be
1545 # too. nodesbetween will return the minimal set of bases and heads
1545 # too. nodesbetween will return the minimal set of bases and heads
1546 # necessary to re-create the changegroup.
1546 # necessary to re-create the changegroup.
1547
1547
1548 # Known heads are the list of heads that it is assumed the recipient
1548 # Known heads are the list of heads that it is assumed the recipient
1549 # of this changegroup will know about.
1549 # of this changegroup will know about.
1550 knownheads = {}
1550 knownheads = {}
1551 # We assume that all parents of bases are known heads.
1551 # We assume that all parents of bases are known heads.
1552 for n in bases:
1552 for n in bases:
1553 for p in cl.parents(n):
1553 for p in cl.parents(n):
1554 if p != nullid:
1554 if p != nullid:
1555 knownheads[p] = 1
1555 knownheads[p] = 1
1556 knownheads = knownheads.keys()
1556 knownheads = knownheads.keys()
1557 if knownheads:
1557 if knownheads:
1558 # Now that we know what heads are known, we can compute which
1558 # Now that we know what heads are known, we can compute which
1559 # changesets are known. The recipient must know about all
1559 # changesets are known. The recipient must know about all
1560 # changesets required to reach the known heads from the null
1560 # changesets required to reach the known heads from the null
1561 # changeset.
1561 # changeset.
1562 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1562 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1563 junk = None
1563 junk = None
1564 # Transform the list into an ersatz set.
1564 # Transform the list into an ersatz set.
1565 has_cl_set = dict.fromkeys(has_cl_set)
1565 has_cl_set = dict.fromkeys(has_cl_set)
1566 else:
1566 else:
1567 # If there were no known heads, the recipient cannot be assumed to
1567 # If there were no known heads, the recipient cannot be assumed to
1568 # know about any changesets.
1568 # know about any changesets.
1569 has_cl_set = {}
1569 has_cl_set = {}
1570
1570
1571 # Make it easy to refer to self.manifest
1571 # Make it easy to refer to self.manifest
1572 mnfst = self.manifest
1572 mnfst = self.manifest
1573 # We don't know which manifests are missing yet
1573 # We don't know which manifests are missing yet
1574 msng_mnfst_set = {}
1574 msng_mnfst_set = {}
1575 # Nor do we know which filenodes are missing.
1575 # Nor do we know which filenodes are missing.
1576 msng_filenode_set = {}
1576 msng_filenode_set = {}
1577
1577
1578 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1578 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1579 junk = None
1579 junk = None
1580
1580
1581 # A changeset always belongs to itself, so the changenode lookup
1581 # A changeset always belongs to itself, so the changenode lookup
1582 # function for a changenode is identity.
1582 # function for a changenode is identity.
1583 def identity(x):
1583 def identity(x):
1584 return x
1584 return x
1585
1585
1586 # A function generating function. Sets up an environment for the
1586 # A function generating function. Sets up an environment for the
1587 # inner function.
1587 # inner function.
1588 def cmp_by_rev_func(revlog):
1588 def cmp_by_rev_func(revlog):
1589 # Compare two nodes by their revision number in the environment's
1589 # Compare two nodes by their revision number in the environment's
1590 # revision history. Since the revision number both represents the
1590 # revision history. Since the revision number both represents the
1591 # most efficient order to read the nodes in, and represents a
1591 # most efficient order to read the nodes in, and represents a
1592 # topological sorting of the nodes, this function is often useful.
1592 # topological sorting of the nodes, this function is often useful.
1593 def cmp_by_rev(a, b):
1593 def cmp_by_rev(a, b):
1594 return cmp(revlog.rev(a), revlog.rev(b))
1594 return cmp(revlog.rev(a), revlog.rev(b))
1595 return cmp_by_rev
1595 return cmp_by_rev
1596
1596
1597 # If we determine that a particular file or manifest node must be a
1597 # If we determine that a particular file or manifest node must be a
1598 # node that the recipient of the changegroup will already have, we can
1598 # node that the recipient of the changegroup will already have, we can
1599 # also assume the recipient will have all the parents. This function
1599 # also assume the recipient will have all the parents. This function
1600 # prunes them from the set of missing nodes.
1600 # prunes them from the set of missing nodes.
1601 def prune_parents(revlog, hasset, msngset):
1601 def prune_parents(revlog, hasset, msngset):
1602 haslst = hasset.keys()
1602 haslst = hasset.keys()
1603 haslst.sort(cmp_by_rev_func(revlog))
1603 haslst.sort(cmp_by_rev_func(revlog))
1604 for node in haslst:
1604 for node in haslst:
1605 parentlst = [p for p in revlog.parents(node) if p != nullid]
1605 parentlst = [p for p in revlog.parents(node) if p != nullid]
1606 while parentlst:
1606 while parentlst:
1607 n = parentlst.pop()
1607 n = parentlst.pop()
1608 if n not in hasset:
1608 if n not in hasset:
1609 hasset[n] = 1
1609 hasset[n] = 1
1610 p = [p for p in revlog.parents(n) if p != nullid]
1610 p = [p for p in revlog.parents(n) if p != nullid]
1611 parentlst.extend(p)
1611 parentlst.extend(p)
1612 for n in hasset:
1612 for n in hasset:
1613 msngset.pop(n, None)
1613 msngset.pop(n, None)
1614
1614
1615 # This is a function generating function used to set up an environment
1615 # This is a function generating function used to set up an environment
1616 # for the inner function to execute in.
1616 # for the inner function to execute in.
1617 def manifest_and_file_collector(changedfileset):
1617 def manifest_and_file_collector(changedfileset):
1618 # This is an information gathering function that gathers
1618 # This is an information gathering function that gathers
1619 # information from each changeset node that goes out as part of
1619 # information from each changeset node that goes out as part of
1620 # the changegroup. The information gathered is a list of which
1620 # the changegroup. The information gathered is a list of which
1621 # manifest nodes are potentially required (the recipient may
1621 # manifest nodes are potentially required (the recipient may
1622 # already have them) and total list of all files which were
1622 # already have them) and total list of all files which were
1623 # changed in any changeset in the changegroup.
1623 # changed in any changeset in the changegroup.
1624 #
1624 #
1625 # We also remember the first changenode we saw any manifest
1625 # We also remember the first changenode we saw any manifest
1626 # referenced by so we can later determine which changenode 'owns'
1626 # referenced by so we can later determine which changenode 'owns'
1627 # the manifest.
1627 # the manifest.
1628 def collect_manifests_and_files(clnode):
1628 def collect_manifests_and_files(clnode):
1629 c = cl.read(clnode)
1629 c = cl.read(clnode)
1630 for f in c[3]:
1630 for f in c[3]:
1631 # This is to make sure we only have one instance of each
1631 # This is to make sure we only have one instance of each
1632 # filename string for each filename.
1632 # filename string for each filename.
1633 changedfileset.setdefault(f, f)
1633 changedfileset.setdefault(f, f)
1634 msng_mnfst_set.setdefault(c[0], clnode)
1634 msng_mnfst_set.setdefault(c[0], clnode)
1635 return collect_manifests_and_files
1635 return collect_manifests_and_files
1636
1636
1637 # Figure out which manifest nodes (of the ones we think might be part
1637 # Figure out which manifest nodes (of the ones we think might be part
1638 # of the changegroup) the recipient must know about and remove them
1638 # of the changegroup) the recipient must know about and remove them
1639 # from the changegroup.
1639 # from the changegroup.
1640 def prune_manifests():
1640 def prune_manifests():
1641 has_mnfst_set = {}
1641 has_mnfst_set = {}
1642 for n in msng_mnfst_set:
1642 for n in msng_mnfst_set:
1643 # If a 'missing' manifest thinks it belongs to a changenode
1643 # If a 'missing' manifest thinks it belongs to a changenode
1644 # the recipient is assumed to have, obviously the recipient
1644 # the recipient is assumed to have, obviously the recipient
1645 # must have that manifest.
1645 # must have that manifest.
1646 linknode = cl.node(mnfst.linkrev(n))
1646 linknode = cl.node(mnfst.linkrev(n))
1647 if linknode in has_cl_set:
1647 if linknode in has_cl_set:
1648 has_mnfst_set[n] = 1
1648 has_mnfst_set[n] = 1
1649 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1649 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1650
1650
1651 # Use the information collected in collect_manifests_and_files to say
1651 # Use the information collected in collect_manifests_and_files to say
1652 # which changenode any manifestnode belongs to.
1652 # which changenode any manifestnode belongs to.
1653 def lookup_manifest_link(mnfstnode):
1653 def lookup_manifest_link(mnfstnode):
1654 return msng_mnfst_set[mnfstnode]
1654 return msng_mnfst_set[mnfstnode]
1655
1655
1656 # A function generating function that sets up the initial environment
1656 # A function generating function that sets up the initial environment
1657 # the inner function.
1657 # the inner function.
1658 def filenode_collector(changedfiles):
1658 def filenode_collector(changedfiles):
1659 next_rev = [0]
1659 next_rev = [0]
1660 # This gathers information from each manifestnode included in the
1660 # This gathers information from each manifestnode included in the
1661 # changegroup about which filenodes the manifest node references
1661 # changegroup about which filenodes the manifest node references
1662 # so we can include those in the changegroup too.
1662 # so we can include those in the changegroup too.
1663 #
1663 #
1664 # It also remembers which changenode each filenode belongs to. It
1664 # It also remembers which changenode each filenode belongs to. It
1665 # does this by assuming the a filenode belongs to the changenode
1665 # does this by assuming the a filenode belongs to the changenode
1666 # the first manifest that references it belongs to.
1666 # the first manifest that references it belongs to.
1667 def collect_msng_filenodes(mnfstnode):
1667 def collect_msng_filenodes(mnfstnode):
1668 r = mnfst.rev(mnfstnode)
1668 r = mnfst.rev(mnfstnode)
1669 if r == next_rev[0]:
1669 if r == next_rev[0]:
1670 # If the last rev we looked at was the one just previous,
1670 # If the last rev we looked at was the one just previous,
1671 # we only need to see a diff.
1671 # we only need to see a diff.
1672 deltamf = mnfst.readdelta(mnfstnode)
1672 deltamf = mnfst.readdelta(mnfstnode)
1673 # For each line in the delta
1673 # For each line in the delta
1674 for f, fnode in deltamf.items():
1674 for f, fnode in deltamf.items():
1675 f = changedfiles.get(f, None)
1675 f = changedfiles.get(f, None)
1676 # And if the file is in the list of files we care
1676 # And if the file is in the list of files we care
1677 # about.
1677 # about.
1678 if f is not None:
1678 if f is not None:
1679 # Get the changenode this manifest belongs to
1679 # Get the changenode this manifest belongs to
1680 clnode = msng_mnfst_set[mnfstnode]
1680 clnode = msng_mnfst_set[mnfstnode]
1681 # Create the set of filenodes for the file if
1681 # Create the set of filenodes for the file if
1682 # there isn't one already.
1682 # there isn't one already.
1683 ndset = msng_filenode_set.setdefault(f, {})
1683 ndset = msng_filenode_set.setdefault(f, {})
1684 # And set the filenode's changelog node to the
1684 # And set the filenode's changelog node to the
1685 # manifest's if it hasn't been set already.
1685 # manifest's if it hasn't been set already.
1686 ndset.setdefault(fnode, clnode)
1686 ndset.setdefault(fnode, clnode)
1687 else:
1687 else:
1688 # Otherwise we need a full manifest.
1688 # Otherwise we need a full manifest.
1689 m = mnfst.read(mnfstnode)
1689 m = mnfst.read(mnfstnode)
1690 # For every file in we care about.
1690 # For every file in we care about.
1691 for f in changedfiles:
1691 for f in changedfiles:
1692 fnode = m.get(f, None)
1692 fnode = m.get(f, None)
1693 # If it's in the manifest
1693 # If it's in the manifest
1694 if fnode is not None:
1694 if fnode is not None:
1695 # See comments above.
1695 # See comments above.
1696 clnode = msng_mnfst_set[mnfstnode]
1696 clnode = msng_mnfst_set[mnfstnode]
1697 ndset = msng_filenode_set.setdefault(f, {})
1697 ndset = msng_filenode_set.setdefault(f, {})
1698 ndset.setdefault(fnode, clnode)
1698 ndset.setdefault(fnode, clnode)
1699 # Remember the revision we hope to see next.
1699 # Remember the revision we hope to see next.
1700 next_rev[0] = r + 1
1700 next_rev[0] = r + 1
1701 return collect_msng_filenodes
1701 return collect_msng_filenodes
1702
1702
1703 # We have a list of filenodes we think we need for a file, lets remove
1703 # We have a list of filenodes we think we need for a file, lets remove
1704 # all those we now the recipient must have.
1704 # all those we now the recipient must have.
1705 def prune_filenodes(f, filerevlog):
1705 def prune_filenodes(f, filerevlog):
1706 msngset = msng_filenode_set[f]
1706 msngset = msng_filenode_set[f]
1707 hasset = {}
1707 hasset = {}
1708 # If a 'missing' filenode thinks it belongs to a changenode we
1708 # If a 'missing' filenode thinks it belongs to a changenode we
1709 # assume the recipient must have, then the recipient must have
1709 # assume the recipient must have, then the recipient must have
1710 # that filenode.
1710 # that filenode.
1711 for n in msngset:
1711 for n in msngset:
1712 clnode = cl.node(filerevlog.linkrev(n))
1712 clnode = cl.node(filerevlog.linkrev(n))
1713 if clnode in has_cl_set:
1713 if clnode in has_cl_set:
1714 hasset[n] = 1
1714 hasset[n] = 1
1715 prune_parents(filerevlog, hasset, msngset)
1715 prune_parents(filerevlog, hasset, msngset)
1716
1716
1717 # A function generator function that sets up the a context for the
1717 # A function generator function that sets up the a context for the
1718 # inner function.
1718 # inner function.
1719 def lookup_filenode_link_func(fname):
1719 def lookup_filenode_link_func(fname):
1720 msngset = msng_filenode_set[fname]
1720 msngset = msng_filenode_set[fname]
1721 # Lookup the changenode the filenode belongs to.
1721 # Lookup the changenode the filenode belongs to.
1722 def lookup_filenode_link(fnode):
1722 def lookup_filenode_link(fnode):
1723 return msngset[fnode]
1723 return msngset[fnode]
1724 return lookup_filenode_link
1724 return lookup_filenode_link
1725
1725
1726 # Add the nodes that were explicitly requested.
1726 # Add the nodes that were explicitly requested.
1727 def add_extra_nodes(name, nodes):
1727 def add_extra_nodes(name, nodes):
1728 if not extranodes or name not in extranodes:
1728 if not extranodes or name not in extranodes:
1729 return
1729 return
1730
1730
1731 for node, linknode in extranodes[name]:
1731 for node, linknode in extranodes[name]:
1732 if node not in nodes:
1732 if node not in nodes:
1733 nodes[node] = linknode
1733 nodes[node] = linknode
1734
1734
1735 # Now that we have all theses utility functions to help out and
1735 # Now that we have all theses utility functions to help out and
1736 # logically divide up the task, generate the group.
1736 # logically divide up the task, generate the group.
1737 def gengroup():
1737 def gengroup():
1738 # The set of changed files starts empty.
1738 # The set of changed files starts empty.
1739 changedfiles = {}
1739 changedfiles = {}
1740 # Create a changenode group generator that will call our functions
1740 # Create a changenode group generator that will call our functions
1741 # back to lookup the owning changenode and collect information.
1741 # back to lookup the owning changenode and collect information.
1742 group = cl.group(msng_cl_lst, identity,
1742 group = cl.group(msng_cl_lst, identity,
1743 manifest_and_file_collector(changedfiles))
1743 manifest_and_file_collector(changedfiles))
1744 for chnk in group:
1744 for chnk in group:
1745 yield chnk
1745 yield chnk
1746
1746
1747 # The list of manifests has been collected by the generator
1747 # The list of manifests has been collected by the generator
1748 # calling our functions back.
1748 # calling our functions back.
1749 prune_manifests()
1749 prune_manifests()
1750 add_extra_nodes(1, msng_mnfst_set)
1750 add_extra_nodes(1, msng_mnfst_set)
1751 msng_mnfst_lst = msng_mnfst_set.keys()
1751 msng_mnfst_lst = msng_mnfst_set.keys()
1752 # Sort the manifestnodes by revision number.
1752 # Sort the manifestnodes by revision number.
1753 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1753 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1754 # Create a generator for the manifestnodes that calls our lookup
1754 # Create a generator for the manifestnodes that calls our lookup
1755 # and data collection functions back.
1755 # and data collection functions back.
1756 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1756 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1757 filenode_collector(changedfiles))
1757 filenode_collector(changedfiles))
1758 for chnk in group:
1758 for chnk in group:
1759 yield chnk
1759 yield chnk
1760
1760
1761 # These are no longer needed, dereference and toss the memory for
1761 # These are no longer needed, dereference and toss the memory for
1762 # them.
1762 # them.
1763 msng_mnfst_lst = None
1763 msng_mnfst_lst = None
1764 msng_mnfst_set.clear()
1764 msng_mnfst_set.clear()
1765
1765
1766 if extranodes:
1766 if extranodes:
1767 for fname in extranodes:
1767 for fname in extranodes:
1768 if isinstance(fname, int):
1768 if isinstance(fname, int):
1769 continue
1769 continue
1770 add_extra_nodes(fname,
1770 add_extra_nodes(fname,
1771 msng_filenode_set.setdefault(fname, {}))
1771 msng_filenode_set.setdefault(fname, {}))
1772 changedfiles[fname] = 1
1772 changedfiles[fname] = 1
1773 changedfiles = changedfiles.keys()
1773 changedfiles = changedfiles.keys()
1774 changedfiles.sort()
1774 changedfiles.sort()
1775 # Go through all our files in order sorted by name.
1775 # Go through all our files in order sorted by name.
1776 for fname in changedfiles:
1776 for fname in changedfiles:
1777 filerevlog = self.file(fname)
1777 filerevlog = self.file(fname)
1778 if filerevlog.count() == 0:
1778 if filerevlog.count() == 0:
1779 raise util.Abort(_("empty or missing revlog for %s") % fname)
1779 raise util.Abort(_("empty or missing revlog for %s") % fname)
1780 # Toss out the filenodes that the recipient isn't really
1780 # Toss out the filenodes that the recipient isn't really
1781 # missing.
1781 # missing.
1782 if msng_filenode_set.has_key(fname):
1782 if fname in msng_filenode_set:
1783 prune_filenodes(fname, filerevlog)
1783 prune_filenodes(fname, filerevlog)
1784 msng_filenode_lst = msng_filenode_set[fname].keys()
1784 msng_filenode_lst = msng_filenode_set[fname].keys()
1785 else:
1785 else:
1786 msng_filenode_lst = []
1786 msng_filenode_lst = []
1787 # If any filenodes are left, generate the group for them,
1787 # If any filenodes are left, generate the group for them,
1788 # otherwise don't bother.
1788 # otherwise don't bother.
1789 if len(msng_filenode_lst) > 0:
1789 if len(msng_filenode_lst) > 0:
1790 yield changegroup.chunkheader(len(fname))
1790 yield changegroup.chunkheader(len(fname))
1791 yield fname
1791 yield fname
1792 # Sort the filenodes by their revision #
1792 # Sort the filenodes by their revision #
1793 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1793 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1794 # Create a group generator and only pass in a changenode
1794 # Create a group generator and only pass in a changenode
1795 # lookup function as we need to collect no information
1795 # lookup function as we need to collect no information
1796 # from filenodes.
1796 # from filenodes.
1797 group = filerevlog.group(msng_filenode_lst,
1797 group = filerevlog.group(msng_filenode_lst,
1798 lookup_filenode_link_func(fname))
1798 lookup_filenode_link_func(fname))
1799 for chnk in group:
1799 for chnk in group:
1800 yield chnk
1800 yield chnk
1801 if msng_filenode_set.has_key(fname):
1801 if fname in msng_filenode_set:
1802 # Don't need this anymore, toss it to free memory.
1802 # Don't need this anymore, toss it to free memory.
1803 del msng_filenode_set[fname]
1803 del msng_filenode_set[fname]
1804 # Signal that no more groups are left.
1804 # Signal that no more groups are left.
1805 yield changegroup.closechunk()
1805 yield changegroup.closechunk()
1806
1806
1807 if msng_cl_lst:
1807 if msng_cl_lst:
1808 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1808 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1809
1809
1810 return util.chunkbuffer(gengroup())
1810 return util.chunkbuffer(gengroup())
1811
1811
1812 def changegroup(self, basenodes, source):
1812 def changegroup(self, basenodes, source):
1813 """Generate a changegroup of all nodes that we have that a recipient
1813 """Generate a changegroup of all nodes that we have that a recipient
1814 doesn't.
1814 doesn't.
1815
1815
1816 This is much easier than the previous function as we can assume that
1816 This is much easier than the previous function as we can assume that
1817 the recipient has any changenode we aren't sending them."""
1817 the recipient has any changenode we aren't sending them."""
1818
1818
1819 self.hook('preoutgoing', throw=True, source=source)
1819 self.hook('preoutgoing', throw=True, source=source)
1820
1820
1821 cl = self.changelog
1821 cl = self.changelog
1822 nodes = cl.nodesbetween(basenodes, None)[0]
1822 nodes = cl.nodesbetween(basenodes, None)[0]
1823 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1823 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1824 self.changegroupinfo(nodes, source)
1824 self.changegroupinfo(nodes, source)
1825
1825
1826 def identity(x):
1826 def identity(x):
1827 return x
1827 return x
1828
1828
1829 def gennodelst(revlog):
1829 def gennodelst(revlog):
1830 for r in xrange(0, revlog.count()):
1830 for r in xrange(0, revlog.count()):
1831 n = revlog.node(r)
1831 n = revlog.node(r)
1832 if revlog.linkrev(n) in revset:
1832 if revlog.linkrev(n) in revset:
1833 yield n
1833 yield n
1834
1834
1835 def changed_file_collector(changedfileset):
1835 def changed_file_collector(changedfileset):
1836 def collect_changed_files(clnode):
1836 def collect_changed_files(clnode):
1837 c = cl.read(clnode)
1837 c = cl.read(clnode)
1838 for fname in c[3]:
1838 for fname in c[3]:
1839 changedfileset[fname] = 1
1839 changedfileset[fname] = 1
1840 return collect_changed_files
1840 return collect_changed_files
1841
1841
1842 def lookuprevlink_func(revlog):
1842 def lookuprevlink_func(revlog):
1843 def lookuprevlink(n):
1843 def lookuprevlink(n):
1844 return cl.node(revlog.linkrev(n))
1844 return cl.node(revlog.linkrev(n))
1845 return lookuprevlink
1845 return lookuprevlink
1846
1846
1847 def gengroup():
1847 def gengroup():
1848 # construct a list of all changed files
1848 # construct a list of all changed files
1849 changedfiles = {}
1849 changedfiles = {}
1850
1850
1851 for chnk in cl.group(nodes, identity,
1851 for chnk in cl.group(nodes, identity,
1852 changed_file_collector(changedfiles)):
1852 changed_file_collector(changedfiles)):
1853 yield chnk
1853 yield chnk
1854 changedfiles = changedfiles.keys()
1854 changedfiles = changedfiles.keys()
1855 changedfiles.sort()
1855 changedfiles.sort()
1856
1856
1857 mnfst = self.manifest
1857 mnfst = self.manifest
1858 nodeiter = gennodelst(mnfst)
1858 nodeiter = gennodelst(mnfst)
1859 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1859 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1860 yield chnk
1860 yield chnk
1861
1861
1862 for fname in changedfiles:
1862 for fname in changedfiles:
1863 filerevlog = self.file(fname)
1863 filerevlog = self.file(fname)
1864 if filerevlog.count() == 0:
1864 if filerevlog.count() == 0:
1865 raise util.Abort(_("empty or missing revlog for %s") % fname)
1865 raise util.Abort(_("empty or missing revlog for %s") % fname)
1866 nodeiter = gennodelst(filerevlog)
1866 nodeiter = gennodelst(filerevlog)
1867 nodeiter = list(nodeiter)
1867 nodeiter = list(nodeiter)
1868 if nodeiter:
1868 if nodeiter:
1869 yield changegroup.chunkheader(len(fname))
1869 yield changegroup.chunkheader(len(fname))
1870 yield fname
1870 yield fname
1871 lookup = lookuprevlink_func(filerevlog)
1871 lookup = lookuprevlink_func(filerevlog)
1872 for chnk in filerevlog.group(nodeiter, lookup):
1872 for chnk in filerevlog.group(nodeiter, lookup):
1873 yield chnk
1873 yield chnk
1874
1874
1875 yield changegroup.closechunk()
1875 yield changegroup.closechunk()
1876
1876
1877 if nodes:
1877 if nodes:
1878 self.hook('outgoing', node=hex(nodes[0]), source=source)
1878 self.hook('outgoing', node=hex(nodes[0]), source=source)
1879
1879
1880 return util.chunkbuffer(gengroup())
1880 return util.chunkbuffer(gengroup())
1881
1881
1882 def addchangegroup(self, source, srctype, url, emptyok=False):
1882 def addchangegroup(self, source, srctype, url, emptyok=False):
1883 """add changegroup to repo.
1883 """add changegroup to repo.
1884
1884
1885 return values:
1885 return values:
1886 - nothing changed or no source: 0
1886 - nothing changed or no source: 0
1887 - more heads than before: 1+added heads (2..n)
1887 - more heads than before: 1+added heads (2..n)
1888 - less heads than before: -1-removed heads (-2..-n)
1888 - less heads than before: -1-removed heads (-2..-n)
1889 - number of heads stays the same: 1
1889 - number of heads stays the same: 1
1890 """
1890 """
1891 def csmap(x):
1891 def csmap(x):
1892 self.ui.debug(_("add changeset %s\n") % short(x))
1892 self.ui.debug(_("add changeset %s\n") % short(x))
1893 return cl.count()
1893 return cl.count()
1894
1894
1895 def revmap(x):
1895 def revmap(x):
1896 return cl.rev(x)
1896 return cl.rev(x)
1897
1897
1898 if not source:
1898 if not source:
1899 return 0
1899 return 0
1900
1900
1901 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1901 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1902
1902
1903 changesets = files = revisions = 0
1903 changesets = files = revisions = 0
1904
1904
1905 # write changelog data to temp files so concurrent readers will not see
1905 # write changelog data to temp files so concurrent readers will not see
1906 # inconsistent view
1906 # inconsistent view
1907 cl = self.changelog
1907 cl = self.changelog
1908 cl.delayupdate()
1908 cl.delayupdate()
1909 oldheads = len(cl.heads())
1909 oldheads = len(cl.heads())
1910
1910
1911 tr = self.transaction()
1911 tr = self.transaction()
1912 try:
1912 try:
1913 trp = weakref.proxy(tr)
1913 trp = weakref.proxy(tr)
1914 # pull off the changeset group
1914 # pull off the changeset group
1915 self.ui.status(_("adding changesets\n"))
1915 self.ui.status(_("adding changesets\n"))
1916 cor = cl.count() - 1
1916 cor = cl.count() - 1
1917 chunkiter = changegroup.chunkiter(source)
1917 chunkiter = changegroup.chunkiter(source)
1918 if cl.addgroup(chunkiter, csmap, trp, 1) is None and not emptyok:
1918 if cl.addgroup(chunkiter, csmap, trp, 1) is None and not emptyok:
1919 raise util.Abort(_("received changelog group is empty"))
1919 raise util.Abort(_("received changelog group is empty"))
1920 cnr = cl.count() - 1
1920 cnr = cl.count() - 1
1921 changesets = cnr - cor
1921 changesets = cnr - cor
1922
1922
1923 # pull off the manifest group
1923 # pull off the manifest group
1924 self.ui.status(_("adding manifests\n"))
1924 self.ui.status(_("adding manifests\n"))
1925 chunkiter = changegroup.chunkiter(source)
1925 chunkiter = changegroup.chunkiter(source)
1926 # no need to check for empty manifest group here:
1926 # no need to check for empty manifest group here:
1927 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1927 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1928 # no new manifest will be created and the manifest group will
1928 # no new manifest will be created and the manifest group will
1929 # be empty during the pull
1929 # be empty during the pull
1930 self.manifest.addgroup(chunkiter, revmap, trp)
1930 self.manifest.addgroup(chunkiter, revmap, trp)
1931
1931
1932 # process the files
1932 # process the files
1933 self.ui.status(_("adding file changes\n"))
1933 self.ui.status(_("adding file changes\n"))
1934 while 1:
1934 while 1:
1935 f = changegroup.getchunk(source)
1935 f = changegroup.getchunk(source)
1936 if not f:
1936 if not f:
1937 break
1937 break
1938 self.ui.debug(_("adding %s revisions\n") % f)
1938 self.ui.debug(_("adding %s revisions\n") % f)
1939 fl = self.file(f)
1939 fl = self.file(f)
1940 o = fl.count()
1940 o = fl.count()
1941 chunkiter = changegroup.chunkiter(source)
1941 chunkiter = changegroup.chunkiter(source)
1942 if fl.addgroup(chunkiter, revmap, trp) is None:
1942 if fl.addgroup(chunkiter, revmap, trp) is None:
1943 raise util.Abort(_("received file revlog group is empty"))
1943 raise util.Abort(_("received file revlog group is empty"))
1944 revisions += fl.count() - o
1944 revisions += fl.count() - o
1945 files += 1
1945 files += 1
1946
1946
1947 # make changelog see real files again
1947 # make changelog see real files again
1948 cl.finalize(trp)
1948 cl.finalize(trp)
1949
1949
1950 newheads = len(self.changelog.heads())
1950 newheads = len(self.changelog.heads())
1951 heads = ""
1951 heads = ""
1952 if oldheads and newheads != oldheads:
1952 if oldheads and newheads != oldheads:
1953 heads = _(" (%+d heads)") % (newheads - oldheads)
1953 heads = _(" (%+d heads)") % (newheads - oldheads)
1954
1954
1955 self.ui.status(_("added %d changesets"
1955 self.ui.status(_("added %d changesets"
1956 " with %d changes to %d files%s\n")
1956 " with %d changes to %d files%s\n")
1957 % (changesets, revisions, files, heads))
1957 % (changesets, revisions, files, heads))
1958
1958
1959 if changesets > 0:
1959 if changesets > 0:
1960 self.hook('pretxnchangegroup', throw=True,
1960 self.hook('pretxnchangegroup', throw=True,
1961 node=hex(self.changelog.node(cor+1)), source=srctype,
1961 node=hex(self.changelog.node(cor+1)), source=srctype,
1962 url=url)
1962 url=url)
1963
1963
1964 tr.close()
1964 tr.close()
1965 finally:
1965 finally:
1966 del tr
1966 del tr
1967
1967
1968 if changesets > 0:
1968 if changesets > 0:
1969 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1969 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1970 source=srctype, url=url)
1970 source=srctype, url=url)
1971
1971
1972 for i in xrange(cor + 1, cnr + 1):
1972 for i in xrange(cor + 1, cnr + 1):
1973 self.hook("incoming", node=hex(self.changelog.node(i)),
1973 self.hook("incoming", node=hex(self.changelog.node(i)),
1974 source=srctype, url=url)
1974 source=srctype, url=url)
1975
1975
1976 # never return 0 here:
1976 # never return 0 here:
1977 if newheads < oldheads:
1977 if newheads < oldheads:
1978 return newheads - oldheads - 1
1978 return newheads - oldheads - 1
1979 else:
1979 else:
1980 return newheads - oldheads + 1
1980 return newheads - oldheads + 1
1981
1981
1982
1982
1983 def stream_in(self, remote):
1983 def stream_in(self, remote):
1984 fp = remote.stream_out()
1984 fp = remote.stream_out()
1985 l = fp.readline()
1985 l = fp.readline()
1986 try:
1986 try:
1987 resp = int(l)
1987 resp = int(l)
1988 except ValueError:
1988 except ValueError:
1989 raise util.UnexpectedOutput(
1989 raise util.UnexpectedOutput(
1990 _('Unexpected response from remote server:'), l)
1990 _('Unexpected response from remote server:'), l)
1991 if resp == 1:
1991 if resp == 1:
1992 raise util.Abort(_('operation forbidden by server'))
1992 raise util.Abort(_('operation forbidden by server'))
1993 elif resp == 2:
1993 elif resp == 2:
1994 raise util.Abort(_('locking the remote repository failed'))
1994 raise util.Abort(_('locking the remote repository failed'))
1995 elif resp != 0:
1995 elif resp != 0:
1996 raise util.Abort(_('the server sent an unknown error code'))
1996 raise util.Abort(_('the server sent an unknown error code'))
1997 self.ui.status(_('streaming all changes\n'))
1997 self.ui.status(_('streaming all changes\n'))
1998 l = fp.readline()
1998 l = fp.readline()
1999 try:
1999 try:
2000 total_files, total_bytes = map(int, l.split(' ', 1))
2000 total_files, total_bytes = map(int, l.split(' ', 1))
2001 except ValueError, TypeError:
2001 except ValueError, TypeError:
2002 raise util.UnexpectedOutput(
2002 raise util.UnexpectedOutput(
2003 _('Unexpected response from remote server:'), l)
2003 _('Unexpected response from remote server:'), l)
2004 self.ui.status(_('%d files to transfer, %s of data\n') %
2004 self.ui.status(_('%d files to transfer, %s of data\n') %
2005 (total_files, util.bytecount(total_bytes)))
2005 (total_files, util.bytecount(total_bytes)))
2006 start = time.time()
2006 start = time.time()
2007 for i in xrange(total_files):
2007 for i in xrange(total_files):
2008 # XXX doesn't support '\n' or '\r' in filenames
2008 # XXX doesn't support '\n' or '\r' in filenames
2009 l = fp.readline()
2009 l = fp.readline()
2010 try:
2010 try:
2011 name, size = l.split('\0', 1)
2011 name, size = l.split('\0', 1)
2012 size = int(size)
2012 size = int(size)
2013 except ValueError, TypeError:
2013 except ValueError, TypeError:
2014 raise util.UnexpectedOutput(
2014 raise util.UnexpectedOutput(
2015 _('Unexpected response from remote server:'), l)
2015 _('Unexpected response from remote server:'), l)
2016 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2016 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2017 ofp = self.sopener(name, 'w')
2017 ofp = self.sopener(name, 'w')
2018 for chunk in util.filechunkiter(fp, limit=size):
2018 for chunk in util.filechunkiter(fp, limit=size):
2019 ofp.write(chunk)
2019 ofp.write(chunk)
2020 ofp.close()
2020 ofp.close()
2021 elapsed = time.time() - start
2021 elapsed = time.time() - start
2022 if elapsed <= 0:
2022 if elapsed <= 0:
2023 elapsed = 0.001
2023 elapsed = 0.001
2024 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2024 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2025 (util.bytecount(total_bytes), elapsed,
2025 (util.bytecount(total_bytes), elapsed,
2026 util.bytecount(total_bytes / elapsed)))
2026 util.bytecount(total_bytes / elapsed)))
2027 self.invalidate()
2027 self.invalidate()
2028 return len(self.heads()) + 1
2028 return len(self.heads()) + 1
2029
2029
2030 def clone(self, remote, heads=[], stream=False):
2030 def clone(self, remote, heads=[], stream=False):
2031 '''clone remote repository.
2031 '''clone remote repository.
2032
2032
2033 keyword arguments:
2033 keyword arguments:
2034 heads: list of revs to clone (forces use of pull)
2034 heads: list of revs to clone (forces use of pull)
2035 stream: use streaming clone if possible'''
2035 stream: use streaming clone if possible'''
2036
2036
2037 # now, all clients that can request uncompressed clones can
2037 # now, all clients that can request uncompressed clones can
2038 # read repo formats supported by all servers that can serve
2038 # read repo formats supported by all servers that can serve
2039 # them.
2039 # them.
2040
2040
2041 # if revlog format changes, client will have to check version
2041 # if revlog format changes, client will have to check version
2042 # and format flags on "stream" capability, and use
2042 # and format flags on "stream" capability, and use
2043 # uncompressed only if compatible.
2043 # uncompressed only if compatible.
2044
2044
2045 if stream and not heads and remote.capable('stream'):
2045 if stream and not heads and remote.capable('stream'):
2046 return self.stream_in(remote)
2046 return self.stream_in(remote)
2047 return self.pull(remote, heads)
2047 return self.pull(remote, heads)
2048
2048
2049 # used to avoid circular references so destructors work
2049 # used to avoid circular references so destructors work
2050 def aftertrans(files):
2050 def aftertrans(files):
2051 renamefiles = [tuple(t) for t in files]
2051 renamefiles = [tuple(t) for t in files]
2052 def a():
2052 def a():
2053 for src, dest in renamefiles:
2053 for src, dest in renamefiles:
2054 util.rename(src, dest)
2054 util.rename(src, dest)
2055 return a
2055 return a
2056
2056
2057 def instance(ui, path, create):
2057 def instance(ui, path, create):
2058 return localrepository(ui, util.drop_scheme('file', path), create)
2058 return localrepository(ui, util.drop_scheme('file', path), create)
2059
2059
2060 def islocal(path):
2060 def islocal(path):
2061 return True
2061 return True
@@ -1,298 +1,298 b''
1 # templater.py - template expansion for output
1 # templater.py - template expansion for output
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from i18n import _
8 from i18n import _
9 from node import *
9 from node import *
10 import cgi, re, sys, os, time, urllib, util, textwrap
10 import cgi, re, sys, os, time, urllib, util, textwrap
11
11
12 def parsestring(s, quoted=True):
12 def parsestring(s, quoted=True):
13 '''parse a string using simple c-like syntax.
13 '''parse a string using simple c-like syntax.
14 string must be in quotes if quoted is True.'''
14 string must be in quotes if quoted is True.'''
15 if quoted:
15 if quoted:
16 if len(s) < 2 or s[0] != s[-1]:
16 if len(s) < 2 or s[0] != s[-1]:
17 raise SyntaxError(_('unmatched quotes'))
17 raise SyntaxError(_('unmatched quotes'))
18 return s[1:-1].decode('string_escape')
18 return s[1:-1].decode('string_escape')
19
19
20 return s.decode('string_escape')
20 return s.decode('string_escape')
21
21
22 class templater(object):
22 class templater(object):
23 '''template expansion engine.
23 '''template expansion engine.
24
24
25 template expansion works like this. a map file contains key=value
25 template expansion works like this. a map file contains key=value
26 pairs. if value is quoted, it is treated as string. otherwise, it
26 pairs. if value is quoted, it is treated as string. otherwise, it
27 is treated as name of template file.
27 is treated as name of template file.
28
28
29 templater is asked to expand a key in map. it looks up key, and
29 templater is asked to expand a key in map. it looks up key, and
30 looks for strings like this: {foo}. it expands {foo} by looking up
30 looks for strings like this: {foo}. it expands {foo} by looking up
31 foo in map, and substituting it. expansion is recursive: it stops
31 foo in map, and substituting it. expansion is recursive: it stops
32 when there is no more {foo} to replace.
32 when there is no more {foo} to replace.
33
33
34 expansion also allows formatting and filtering.
34 expansion also allows formatting and filtering.
35
35
36 format uses key to expand each item in list. syntax is
36 format uses key to expand each item in list. syntax is
37 {key%format}.
37 {key%format}.
38
38
39 filter uses function to transform value. syntax is
39 filter uses function to transform value. syntax is
40 {key|filter1|filter2|...}.'''
40 {key|filter1|filter2|...}.'''
41
41
42 template_re = re.compile(r"(?:(?:#(?=[\w\|%]+#))|(?:{(?=[\w\|%]+})))"
42 template_re = re.compile(r"(?:(?:#(?=[\w\|%]+#))|(?:{(?=[\w\|%]+})))"
43 r"(\w+)(?:(?:%(\w+))|((?:\|\w+)*))[#}]")
43 r"(\w+)(?:(?:%(\w+))|((?:\|\w+)*))[#}]")
44
44
45 def __init__(self, mapfile, filters={}, defaults={}, cache={}):
45 def __init__(self, mapfile, filters={}, defaults={}, cache={}):
46 '''set up template engine.
46 '''set up template engine.
47 mapfile is name of file to read map definitions from.
47 mapfile is name of file to read map definitions from.
48 filters is dict of functions. each transforms a value into another.
48 filters is dict of functions. each transforms a value into another.
49 defaults is dict of default map definitions.'''
49 defaults is dict of default map definitions.'''
50 self.mapfile = mapfile or 'template'
50 self.mapfile = mapfile or 'template'
51 self.cache = cache.copy()
51 self.cache = cache.copy()
52 self.map = {}
52 self.map = {}
53 self.base = (mapfile and os.path.dirname(mapfile)) or ''
53 self.base = (mapfile and os.path.dirname(mapfile)) or ''
54 self.filters = filters
54 self.filters = filters
55 self.defaults = defaults
55 self.defaults = defaults
56
56
57 if not mapfile:
57 if not mapfile:
58 return
58 return
59 i = 0
59 i = 0
60 for l in file(mapfile):
60 for l in file(mapfile):
61 l = l.strip()
61 l = l.strip()
62 i += 1
62 i += 1
63 if not l or l[0] in '#;': continue
63 if not l or l[0] in '#;': continue
64 m = re.match(r'([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*(.+)$', l)
64 m = re.match(r'([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*(.+)$', l)
65 if m:
65 if m:
66 key, val = m.groups()
66 key, val = m.groups()
67 if val[0] in "'\"":
67 if val[0] in "'\"":
68 try:
68 try:
69 self.cache[key] = parsestring(val)
69 self.cache[key] = parsestring(val)
70 except SyntaxError, inst:
70 except SyntaxError, inst:
71 raise SyntaxError('%s:%s: %s' %
71 raise SyntaxError('%s:%s: %s' %
72 (mapfile, i, inst.args[0]))
72 (mapfile, i, inst.args[0]))
73 else:
73 else:
74 self.map[key] = os.path.join(self.base, val)
74 self.map[key] = os.path.join(self.base, val)
75 else:
75 else:
76 raise SyntaxError(_("%s:%s: parse error") % (mapfile, i))
76 raise SyntaxError(_("%s:%s: parse error") % (mapfile, i))
77
77
78 def __contains__(self, key):
78 def __contains__(self, key):
79 return key in self.cache or key in self.map
79 return key in self.cache or key in self.map
80
80
81 def __call__(self, t, **map):
81 def __call__(self, t, **map):
82 '''perform expansion.
82 '''perform expansion.
83 t is name of map element to expand.
83 t is name of map element to expand.
84 map is added elements to use during expansion.'''
84 map is added elements to use during expansion.'''
85 if not self.cache.has_key(t):
85 if not t in self.cache:
86 try:
86 try:
87 self.cache[t] = file(self.map[t]).read()
87 self.cache[t] = file(self.map[t]).read()
88 except IOError, inst:
88 except IOError, inst:
89 raise IOError(inst.args[0], _('template file %s: %s') %
89 raise IOError(inst.args[0], _('template file %s: %s') %
90 (self.map[t], inst.args[1]))
90 (self.map[t], inst.args[1]))
91 tmpl = self.cache[t]
91 tmpl = self.cache[t]
92
92
93 while tmpl:
93 while tmpl:
94 m = self.template_re.search(tmpl)
94 m = self.template_re.search(tmpl)
95 if not m:
95 if not m:
96 yield tmpl
96 yield tmpl
97 break
97 break
98
98
99 start, end = m.span(0)
99 start, end = m.span(0)
100 key, format, fl = m.groups()
100 key, format, fl = m.groups()
101
101
102 if start:
102 if start:
103 yield tmpl[:start]
103 yield tmpl[:start]
104 tmpl = tmpl[end:]
104 tmpl = tmpl[end:]
105
105
106 if key in map:
106 if key in map:
107 v = map[key]
107 v = map[key]
108 else:
108 else:
109 v = self.defaults.get(key, "")
109 v = self.defaults.get(key, "")
110 if callable(v):
110 if callable(v):
111 v = v(**map)
111 v = v(**map)
112 if format:
112 if format:
113 if not hasattr(v, '__iter__'):
113 if not hasattr(v, '__iter__'):
114 raise SyntaxError(_("Error expanding '%s%s'")
114 raise SyntaxError(_("Error expanding '%s%s'")
115 % (key, format))
115 % (key, format))
116 lm = map.copy()
116 lm = map.copy()
117 for i in v:
117 for i in v:
118 lm.update(i)
118 lm.update(i)
119 yield self(format, **lm)
119 yield self(format, **lm)
120 else:
120 else:
121 if fl:
121 if fl:
122 for f in fl.split("|")[1:]:
122 for f in fl.split("|")[1:]:
123 v = self.filters[f](v)
123 v = self.filters[f](v)
124 yield v
124 yield v
125
125
126 agescales = [("second", 1),
126 agescales = [("second", 1),
127 ("minute", 60),
127 ("minute", 60),
128 ("hour", 3600),
128 ("hour", 3600),
129 ("day", 3600 * 24),
129 ("day", 3600 * 24),
130 ("week", 3600 * 24 * 7),
130 ("week", 3600 * 24 * 7),
131 ("month", 3600 * 24 * 30),
131 ("month", 3600 * 24 * 30),
132 ("year", 3600 * 24 * 365)]
132 ("year", 3600 * 24 * 365)]
133
133
134 agescales.reverse()
134 agescales.reverse()
135
135
136 def age(date):
136 def age(date):
137 '''turn a (timestamp, tzoff) tuple into an age string.'''
137 '''turn a (timestamp, tzoff) tuple into an age string.'''
138
138
139 def plural(t, c):
139 def plural(t, c):
140 if c == 1:
140 if c == 1:
141 return t
141 return t
142 return t + "s"
142 return t + "s"
143 def fmt(t, c):
143 def fmt(t, c):
144 return "%d %s" % (c, plural(t, c))
144 return "%d %s" % (c, plural(t, c))
145
145
146 now = time.time()
146 now = time.time()
147 then = date[0]
147 then = date[0]
148 delta = max(1, int(now - then))
148 delta = max(1, int(now - then))
149
149
150 for t, s in agescales:
150 for t, s in agescales:
151 n = delta / s
151 n = delta / s
152 if n >= 2 or s == 1:
152 if n >= 2 or s == 1:
153 return fmt(t, n)
153 return fmt(t, n)
154
154
155 def stringify(thing):
155 def stringify(thing):
156 '''turn nested template iterator into string.'''
156 '''turn nested template iterator into string.'''
157 if hasattr(thing, '__iter__'):
157 if hasattr(thing, '__iter__'):
158 return "".join([stringify(t) for t in thing if t is not None])
158 return "".join([stringify(t) for t in thing if t is not None])
159 return str(thing)
159 return str(thing)
160
160
161 para_re = None
161 para_re = None
162 space_re = None
162 space_re = None
163
163
164 def fill(text, width):
164 def fill(text, width):
165 '''fill many paragraphs.'''
165 '''fill many paragraphs.'''
166 global para_re, space_re
166 global para_re, space_re
167 if para_re is None:
167 if para_re is None:
168 para_re = re.compile('(\n\n|\n\\s*[-*]\\s*)', re.M)
168 para_re = re.compile('(\n\n|\n\\s*[-*]\\s*)', re.M)
169 space_re = re.compile(r' +')
169 space_re = re.compile(r' +')
170
170
171 def findparas():
171 def findparas():
172 start = 0
172 start = 0
173 while True:
173 while True:
174 m = para_re.search(text, start)
174 m = para_re.search(text, start)
175 if not m:
175 if not m:
176 w = len(text)
176 w = len(text)
177 while w > start and text[w-1].isspace(): w -= 1
177 while w > start and text[w-1].isspace(): w -= 1
178 yield text[start:w], text[w:]
178 yield text[start:w], text[w:]
179 break
179 break
180 yield text[start:m.start(0)], m.group(1)
180 yield text[start:m.start(0)], m.group(1)
181 start = m.end(1)
181 start = m.end(1)
182
182
183 return "".join([space_re.sub(' ', textwrap.fill(para, width)) + rest
183 return "".join([space_re.sub(' ', textwrap.fill(para, width)) + rest
184 for para, rest in findparas()])
184 for para, rest in findparas()])
185
185
186 def firstline(text):
186 def firstline(text):
187 '''return the first line of text'''
187 '''return the first line of text'''
188 try:
188 try:
189 return text.splitlines(1)[0].rstrip('\r\n')
189 return text.splitlines(1)[0].rstrip('\r\n')
190 except IndexError:
190 except IndexError:
191 return ''
191 return ''
192
192
193 def isodate(date):
193 def isodate(date):
194 '''turn a (timestamp, tzoff) tuple into an iso 8631 date and time.'''
194 '''turn a (timestamp, tzoff) tuple into an iso 8631 date and time.'''
195 return util.datestr(date, format='%Y-%m-%d %H:%M')
195 return util.datestr(date, format='%Y-%m-%d %H:%M')
196
196
197 def hgdate(date):
197 def hgdate(date):
198 '''turn a (timestamp, tzoff) tuple into an hg cset timestamp.'''
198 '''turn a (timestamp, tzoff) tuple into an hg cset timestamp.'''
199 return "%d %d" % date
199 return "%d %d" % date
200
200
201 def nl2br(text):
201 def nl2br(text):
202 '''replace raw newlines with xhtml line breaks.'''
202 '''replace raw newlines with xhtml line breaks.'''
203 return text.replace('\n', '<br/>\n')
203 return text.replace('\n', '<br/>\n')
204
204
205 def obfuscate(text):
205 def obfuscate(text):
206 text = unicode(text, util._encoding, 'replace')
206 text = unicode(text, util._encoding, 'replace')
207 return ''.join(['&#%d;' % ord(c) for c in text])
207 return ''.join(['&#%d;' % ord(c) for c in text])
208
208
209 def domain(author):
209 def domain(author):
210 '''get domain of author, or empty string if none.'''
210 '''get domain of author, or empty string if none.'''
211 f = author.find('@')
211 f = author.find('@')
212 if f == -1: return ''
212 if f == -1: return ''
213 author = author[f+1:]
213 author = author[f+1:]
214 f = author.find('>')
214 f = author.find('>')
215 if f >= 0: author = author[:f]
215 if f >= 0: author = author[:f]
216 return author
216 return author
217
217
218 def email(author):
218 def email(author):
219 '''get email of author.'''
219 '''get email of author.'''
220 r = author.find('>')
220 r = author.find('>')
221 if r == -1: r = None
221 if r == -1: r = None
222 return author[author.find('<')+1:r]
222 return author[author.find('<')+1:r]
223
223
224 def person(author):
224 def person(author):
225 '''get name of author, or else username.'''
225 '''get name of author, or else username.'''
226 f = author.find('<')
226 f = author.find('<')
227 if f == -1: return util.shortuser(author)
227 if f == -1: return util.shortuser(author)
228 return author[:f].rstrip()
228 return author[:f].rstrip()
229
229
230 def shortdate(date):
230 def shortdate(date):
231 '''turn (timestamp, tzoff) tuple into iso 8631 date.'''
231 '''turn (timestamp, tzoff) tuple into iso 8631 date.'''
232 return util.datestr(date, format='%Y-%m-%d', timezone=False)
232 return util.datestr(date, format='%Y-%m-%d', timezone=False)
233
233
234 def indent(text, prefix):
234 def indent(text, prefix):
235 '''indent each non-empty line of text after first with prefix.'''
235 '''indent each non-empty line of text after first with prefix.'''
236 lines = text.splitlines()
236 lines = text.splitlines()
237 num_lines = len(lines)
237 num_lines = len(lines)
238 def indenter():
238 def indenter():
239 for i in xrange(num_lines):
239 for i in xrange(num_lines):
240 l = lines[i]
240 l = lines[i]
241 if i and l.strip():
241 if i and l.strip():
242 yield prefix
242 yield prefix
243 yield l
243 yield l
244 if i < num_lines - 1 or text.endswith('\n'):
244 if i < num_lines - 1 or text.endswith('\n'):
245 yield '\n'
245 yield '\n'
246 return "".join(indenter())
246 return "".join(indenter())
247
247
248 def permissions(flags):
248 def permissions(flags):
249 if "l" in flags:
249 if "l" in flags:
250 return "lrwxrwxrwx"
250 return "lrwxrwxrwx"
251 if "x" in flags:
251 if "x" in flags:
252 return "-rwxr-xr-x"
252 return "-rwxr-xr-x"
253 return "-rw-r--r--"
253 return "-rw-r--r--"
254
254
255 common_filters = {
255 common_filters = {
256 "addbreaks": nl2br,
256 "addbreaks": nl2br,
257 "basename": os.path.basename,
257 "basename": os.path.basename,
258 "age": age,
258 "age": age,
259 "date": lambda x: util.datestr(x),
259 "date": lambda x: util.datestr(x),
260 "domain": domain,
260 "domain": domain,
261 "email": email,
261 "email": email,
262 "escape": lambda x: cgi.escape(x, True),
262 "escape": lambda x: cgi.escape(x, True),
263 "fill68": lambda x: fill(x, width=68),
263 "fill68": lambda x: fill(x, width=68),
264 "fill76": lambda x: fill(x, width=76),
264 "fill76": lambda x: fill(x, width=76),
265 "firstline": firstline,
265 "firstline": firstline,
266 "tabindent": lambda x: indent(x, '\t'),
266 "tabindent": lambda x: indent(x, '\t'),
267 "hgdate": hgdate,
267 "hgdate": hgdate,
268 "isodate": isodate,
268 "isodate": isodate,
269 "obfuscate": obfuscate,
269 "obfuscate": obfuscate,
270 "permissions": permissions,
270 "permissions": permissions,
271 "person": person,
271 "person": person,
272 "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S"),
272 "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S"),
273 "rfc3339date": lambda x: util.datestr(x, "%Y-%m-%dT%H:%M:%S", True, "%+03d:%02d"),
273 "rfc3339date": lambda x: util.datestr(x, "%Y-%m-%dT%H:%M:%S", True, "%+03d:%02d"),
274 "short": lambda x: x[:12],
274 "short": lambda x: x[:12],
275 "shortdate": shortdate,
275 "shortdate": shortdate,
276 "stringify": stringify,
276 "stringify": stringify,
277 "strip": lambda x: x.strip(),
277 "strip": lambda x: x.strip(),
278 "urlescape": lambda x: urllib.quote(x),
278 "urlescape": lambda x: urllib.quote(x),
279 "user": lambda x: util.shortuser(x),
279 "user": lambda x: util.shortuser(x),
280 "stringescape": lambda x: x.encode('string_escape'),
280 "stringescape": lambda x: x.encode('string_escape'),
281 }
281 }
282
282
283 def templatepath(name=None):
283 def templatepath(name=None):
284 '''return location of template file or directory (if no name).
284 '''return location of template file or directory (if no name).
285 returns None if not found.'''
285 returns None if not found.'''
286
286
287 # executable version (py2exe) doesn't support __file__
287 # executable version (py2exe) doesn't support __file__
288 if hasattr(sys, 'frozen'):
288 if hasattr(sys, 'frozen'):
289 module = sys.executable
289 module = sys.executable
290 else:
290 else:
291 module = __file__
291 module = __file__
292 for f in 'templates', '../templates':
292 for f in 'templates', '../templates':
293 fl = f.split('/')
293 fl = f.split('/')
294 if name: fl.append(name)
294 if name: fl.append(name)
295 p = os.path.join(os.path.dirname(module), *fl)
295 p = os.path.join(os.path.dirname(module), *fl)
296 if (name and os.path.exists(p)) or os.path.isdir(p):
296 if (name and os.path.exists(p)) or os.path.isdir(p):
297 return os.path.normpath(p)
297 return os.path.normpath(p)
298
298
@@ -1,1110 +1,1110 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # Perforce Defect Tracking Integration Project
3 # Perforce Defect Tracking Integration Project
4 # <http://www.ravenbrook.com/project/p4dti/>
4 # <http://www.ravenbrook.com/project/p4dti/>
5 #
5 #
6 # COVERAGE.PY -- COVERAGE TESTING
6 # COVERAGE.PY -- COVERAGE TESTING
7 #
7 #
8 # Gareth Rees, Ravenbrook Limited, 2001-12-04
8 # Gareth Rees, Ravenbrook Limited, 2001-12-04
9 # Ned Batchelder, 2004-12-12
9 # Ned Batchelder, 2004-12-12
10 # http://nedbatchelder.com/code/modules/coverage.html
10 # http://nedbatchelder.com/code/modules/coverage.html
11 #
11 #
12 #
12 #
13 # 1. INTRODUCTION
13 # 1. INTRODUCTION
14 #
14 #
15 # This module provides coverage testing for Python code.
15 # This module provides coverage testing for Python code.
16 #
16 #
17 # The intended readership is all Python developers.
17 # The intended readership is all Python developers.
18 #
18 #
19 # This document is not confidential.
19 # This document is not confidential.
20 #
20 #
21 # See [GDR 2001-12-04a] for the command-line interface, programmatic
21 # See [GDR 2001-12-04a] for the command-line interface, programmatic
22 # interface and limitations. See [GDR 2001-12-04b] for requirements and
22 # interface and limitations. See [GDR 2001-12-04b] for requirements and
23 # design.
23 # design.
24
24
25 r"""Usage:
25 r"""Usage:
26
26
27 coverage.py -x [-p] MODULE.py [ARG1 ARG2 ...]
27 coverage.py -x [-p] MODULE.py [ARG1 ARG2 ...]
28 Execute module, passing the given command-line arguments, collecting
28 Execute module, passing the given command-line arguments, collecting
29 coverage data. With the -p option, write to a temporary file containing
29 coverage data. With the -p option, write to a temporary file containing
30 the machine name and process ID.
30 the machine name and process ID.
31
31
32 coverage.py -e
32 coverage.py -e
33 Erase collected coverage data.
33 Erase collected coverage data.
34
34
35 coverage.py -c
35 coverage.py -c
36 Collect data from multiple coverage files (as created by -p option above)
36 Collect data from multiple coverage files (as created by -p option above)
37 and store it into a single file representing the union of the coverage.
37 and store it into a single file representing the union of the coverage.
38
38
39 coverage.py -r [-m] [-o dir1,dir2,...] FILE1 FILE2 ...
39 coverage.py -r [-m] [-o dir1,dir2,...] FILE1 FILE2 ...
40 Report on the statement coverage for the given files. With the -m
40 Report on the statement coverage for the given files. With the -m
41 option, show line numbers of the statements that weren't executed.
41 option, show line numbers of the statements that weren't executed.
42
42
43 coverage.py -a [-d dir] [-o dir1,dir2,...] FILE1 FILE2 ...
43 coverage.py -a [-d dir] [-o dir1,dir2,...] FILE1 FILE2 ...
44 Make annotated copies of the given files, marking statements that
44 Make annotated copies of the given files, marking statements that
45 are executed with > and statements that are missed with !. With
45 are executed with > and statements that are missed with !. With
46 the -d option, make the copies in that directory. Without the -d
46 the -d option, make the copies in that directory. Without the -d
47 option, make each copy in the same directory as the original.
47 option, make each copy in the same directory as the original.
48
48
49 -o dir,dir2,...
49 -o dir,dir2,...
50 Omit reporting or annotating files when their filename path starts with
50 Omit reporting or annotating files when their filename path starts with
51 a directory listed in the omit list.
51 a directory listed in the omit list.
52 e.g. python coverage.py -i -r -o c:\python23,lib\enthought\traits
52 e.g. python coverage.py -i -r -o c:\python23,lib\enthought\traits
53
53
54 Coverage data is saved in the file .coverage by default. Set the
54 Coverage data is saved in the file .coverage by default. Set the
55 COVERAGE_FILE environment variable to save it somewhere else."""
55 COVERAGE_FILE environment variable to save it somewhere else."""
56
56
57 __version__ = "2.77.20070729" # see detailed history at the end of this file.
57 __version__ = "2.77.20070729" # see detailed history at the end of this file.
58
58
59 import compiler
59 import compiler
60 import compiler.visitor
60 import compiler.visitor
61 import glob
61 import glob
62 import os
62 import os
63 import re
63 import re
64 import string
64 import string
65 import symbol
65 import symbol
66 import sys
66 import sys
67 import threading
67 import threading
68 import token
68 import token
69 import types
69 import types
70 from socket import gethostname
70 from socket import gethostname
71
71
72 # Python version compatibility
72 # Python version compatibility
73 try:
73 try:
74 strclass = basestring # new to 2.3
74 strclass = basestring # new to 2.3
75 except:
75 except:
76 strclass = str
76 strclass = str
77
77
78 # 2. IMPLEMENTATION
78 # 2. IMPLEMENTATION
79 #
79 #
80 # This uses the "singleton" pattern.
80 # This uses the "singleton" pattern.
81 #
81 #
82 # The word "morf" means a module object (from which the source file can
82 # The word "morf" means a module object (from which the source file can
83 # be deduced by suitable manipulation of the __file__ attribute) or a
83 # be deduced by suitable manipulation of the __file__ attribute) or a
84 # filename.
84 # filename.
85 #
85 #
86 # When we generate a coverage report we have to canonicalize every
86 # When we generate a coverage report we have to canonicalize every
87 # filename in the coverage dictionary just in case it refers to the
87 # filename in the coverage dictionary just in case it refers to the
88 # module we are reporting on. It seems a shame to throw away this
88 # module we are reporting on. It seems a shame to throw away this
89 # information so the data in the coverage dictionary is transferred to
89 # information so the data in the coverage dictionary is transferred to
90 # the 'cexecuted' dictionary under the canonical filenames.
90 # the 'cexecuted' dictionary under the canonical filenames.
91 #
91 #
92 # The coverage dictionary is called "c" and the trace function "t". The
92 # The coverage dictionary is called "c" and the trace function "t". The
93 # reason for these short names is that Python looks up variables by name
93 # reason for these short names is that Python looks up variables by name
94 # at runtime and so execution time depends on the length of variables!
94 # at runtime and so execution time depends on the length of variables!
95 # In the bottleneck of this application it's appropriate to abbreviate
95 # In the bottleneck of this application it's appropriate to abbreviate
96 # names to increase speed.
96 # names to increase speed.
97
97
98 class StatementFindingAstVisitor(compiler.visitor.ASTVisitor):
98 class StatementFindingAstVisitor(compiler.visitor.ASTVisitor):
99 """ A visitor for a parsed Abstract Syntax Tree which finds executable
99 """ A visitor for a parsed Abstract Syntax Tree which finds executable
100 statements.
100 statements.
101 """
101 """
102 def __init__(self, statements, excluded, suite_spots):
102 def __init__(self, statements, excluded, suite_spots):
103 compiler.visitor.ASTVisitor.__init__(self)
103 compiler.visitor.ASTVisitor.__init__(self)
104 self.statements = statements
104 self.statements = statements
105 self.excluded = excluded
105 self.excluded = excluded
106 self.suite_spots = suite_spots
106 self.suite_spots = suite_spots
107 self.excluding_suite = 0
107 self.excluding_suite = 0
108
108
109 def doRecursive(self, node):
109 def doRecursive(self, node):
110 for n in node.getChildNodes():
110 for n in node.getChildNodes():
111 self.dispatch(n)
111 self.dispatch(n)
112
112
113 visitStmt = visitModule = doRecursive
113 visitStmt = visitModule = doRecursive
114
114
115 def doCode(self, node):
115 def doCode(self, node):
116 if hasattr(node, 'decorators') and node.decorators:
116 if hasattr(node, 'decorators') and node.decorators:
117 self.dispatch(node.decorators)
117 self.dispatch(node.decorators)
118 self.recordAndDispatch(node.code)
118 self.recordAndDispatch(node.code)
119 else:
119 else:
120 self.doSuite(node, node.code)
120 self.doSuite(node, node.code)
121
121
122 visitFunction = visitClass = doCode
122 visitFunction = visitClass = doCode
123
123
124 def getFirstLine(self, node):
124 def getFirstLine(self, node):
125 # Find the first line in the tree node.
125 # Find the first line in the tree node.
126 lineno = node.lineno
126 lineno = node.lineno
127 for n in node.getChildNodes():
127 for n in node.getChildNodes():
128 f = self.getFirstLine(n)
128 f = self.getFirstLine(n)
129 if lineno and f:
129 if lineno and f:
130 lineno = min(lineno, f)
130 lineno = min(lineno, f)
131 else:
131 else:
132 lineno = lineno or f
132 lineno = lineno or f
133 return lineno
133 return lineno
134
134
135 def getLastLine(self, node):
135 def getLastLine(self, node):
136 # Find the first line in the tree node.
136 # Find the first line in the tree node.
137 lineno = node.lineno
137 lineno = node.lineno
138 for n in node.getChildNodes():
138 for n in node.getChildNodes():
139 lineno = max(lineno, self.getLastLine(n))
139 lineno = max(lineno, self.getLastLine(n))
140 return lineno
140 return lineno
141
141
142 def doStatement(self, node):
142 def doStatement(self, node):
143 self.recordLine(self.getFirstLine(node))
143 self.recordLine(self.getFirstLine(node))
144
144
145 visitAssert = visitAssign = visitAssTuple = visitPrint = \
145 visitAssert = visitAssign = visitAssTuple = visitPrint = \
146 visitPrintnl = visitRaise = visitSubscript = visitDecorators = \
146 visitPrintnl = visitRaise = visitSubscript = visitDecorators = \
147 doStatement
147 doStatement
148
148
149 def visitPass(self, node):
149 def visitPass(self, node):
150 # Pass statements have weird interactions with docstrings. If this
150 # Pass statements have weird interactions with docstrings. If this
151 # pass statement is part of one of those pairs, claim that the statement
151 # pass statement is part of one of those pairs, claim that the statement
152 # is on the later of the two lines.
152 # is on the later of the two lines.
153 l = node.lineno
153 l = node.lineno
154 if l:
154 if l:
155 lines = self.suite_spots.get(l, [l,l])
155 lines = self.suite_spots.get(l, [l,l])
156 self.statements[lines[1]] = 1
156 self.statements[lines[1]] = 1
157
157
158 def visitDiscard(self, node):
158 def visitDiscard(self, node):
159 # Discard nodes are statements that execute an expression, but then
159 # Discard nodes are statements that execute an expression, but then
160 # discard the results. This includes function calls, so we can't
160 # discard the results. This includes function calls, so we can't
161 # ignore them all. But if the expression is a constant, the statement
161 # ignore them all. But if the expression is a constant, the statement
162 # won't be "executed", so don't count it now.
162 # won't be "executed", so don't count it now.
163 if node.expr.__class__.__name__ != 'Const':
163 if node.expr.__class__.__name__ != 'Const':
164 self.doStatement(node)
164 self.doStatement(node)
165
165
166 def recordNodeLine(self, node):
166 def recordNodeLine(self, node):
167 # Stmt nodes often have None, but shouldn't claim the first line of
167 # Stmt nodes often have None, but shouldn't claim the first line of
168 # their children (because the first child might be an ignorable line
168 # their children (because the first child might be an ignorable line
169 # like "global a").
169 # like "global a").
170 if node.__class__.__name__ != 'Stmt':
170 if node.__class__.__name__ != 'Stmt':
171 return self.recordLine(self.getFirstLine(node))
171 return self.recordLine(self.getFirstLine(node))
172 else:
172 else:
173 return 0
173 return 0
174
174
175 def recordLine(self, lineno):
175 def recordLine(self, lineno):
176 # Returns a bool, whether the line is included or excluded.
176 # Returns a bool, whether the line is included or excluded.
177 if lineno:
177 if lineno:
178 # Multi-line tests introducing suites have to get charged to their
178 # Multi-line tests introducing suites have to get charged to their
179 # keyword.
179 # keyword.
180 if lineno in self.suite_spots:
180 if lineno in self.suite_spots:
181 lineno = self.suite_spots[lineno][0]
181 lineno = self.suite_spots[lineno][0]
182 # If we're inside an excluded suite, record that this line was
182 # If we're inside an excluded suite, record that this line was
183 # excluded.
183 # excluded.
184 if self.excluding_suite:
184 if self.excluding_suite:
185 self.excluded[lineno] = 1
185 self.excluded[lineno] = 1
186 return 0
186 return 0
187 # If this line is excluded, or suite_spots maps this line to
187 # If this line is excluded, or suite_spots maps this line to
188 # another line that is exlcuded, then we're excluded.
188 # another line that is exlcuded, then we're excluded.
189 elif self.excluded.has_key(lineno) or \
189 elif lineno in self.excluded or \
190 self.suite_spots.has_key(lineno) and \
190 lineno in self.suite_spots and \
191 self.excluded.has_key(self.suite_spots[lineno][1]):
191 self.suite_spots[lineno][1] in self.excluded:
192 return 0
192 return 0
193 # Otherwise, this is an executable line.
193 # Otherwise, this is an executable line.
194 else:
194 else:
195 self.statements[lineno] = 1
195 self.statements[lineno] = 1
196 return 1
196 return 1
197 return 0
197 return 0
198
198
199 default = recordNodeLine
199 default = recordNodeLine
200
200
201 def recordAndDispatch(self, node):
201 def recordAndDispatch(self, node):
202 self.recordNodeLine(node)
202 self.recordNodeLine(node)
203 self.dispatch(node)
203 self.dispatch(node)
204
204
205 def doSuite(self, intro, body, exclude=0):
205 def doSuite(self, intro, body, exclude=0):
206 exsuite = self.excluding_suite
206 exsuite = self.excluding_suite
207 if exclude or (intro and not self.recordNodeLine(intro)):
207 if exclude or (intro and not self.recordNodeLine(intro)):
208 self.excluding_suite = 1
208 self.excluding_suite = 1
209 self.recordAndDispatch(body)
209 self.recordAndDispatch(body)
210 self.excluding_suite = exsuite
210 self.excluding_suite = exsuite
211
211
212 def doPlainWordSuite(self, prevsuite, suite):
212 def doPlainWordSuite(self, prevsuite, suite):
213 # Finding the exclude lines for else's is tricky, because they aren't
213 # Finding the exclude lines for else's is tricky, because they aren't
214 # present in the compiler parse tree. Look at the previous suite,
214 # present in the compiler parse tree. Look at the previous suite,
215 # and find its last line. If any line between there and the else's
215 # and find its last line. If any line between there and the else's
216 # first line are excluded, then we exclude the else.
216 # first line are excluded, then we exclude the else.
217 lastprev = self.getLastLine(prevsuite)
217 lastprev = self.getLastLine(prevsuite)
218 firstelse = self.getFirstLine(suite)
218 firstelse = self.getFirstLine(suite)
219 for l in range(lastprev+1, firstelse):
219 for l in range(lastprev+1, firstelse):
220 if self.suite_spots.has_key(l):
220 if l in self.suite_spots:
221 self.doSuite(None, suite, exclude=self.excluded.has_key(l))
221 self.doSuite(None, suite, l in exclude=self.excluded)
222 break
222 break
223 else:
223 else:
224 self.doSuite(None, suite)
224 self.doSuite(None, suite)
225
225
226 def doElse(self, prevsuite, node):
226 def doElse(self, prevsuite, node):
227 if node.else_:
227 if node.else_:
228 self.doPlainWordSuite(prevsuite, node.else_)
228 self.doPlainWordSuite(prevsuite, node.else_)
229
229
230 def visitFor(self, node):
230 def visitFor(self, node):
231 self.doSuite(node, node.body)
231 self.doSuite(node, node.body)
232 self.doElse(node.body, node)
232 self.doElse(node.body, node)
233
233
234 visitWhile = visitFor
234 visitWhile = visitFor
235
235
236 def visitIf(self, node):
236 def visitIf(self, node):
237 # The first test has to be handled separately from the rest.
237 # The first test has to be handled separately from the rest.
238 # The first test is credited to the line with the "if", but the others
238 # The first test is credited to the line with the "if", but the others
239 # are credited to the line with the test for the elif.
239 # are credited to the line with the test for the elif.
240 self.doSuite(node, node.tests[0][1])
240 self.doSuite(node, node.tests[0][1])
241 for t, n in node.tests[1:]:
241 for t, n in node.tests[1:]:
242 self.doSuite(t, n)
242 self.doSuite(t, n)
243 self.doElse(node.tests[-1][1], node)
243 self.doElse(node.tests[-1][1], node)
244
244
245 def visitTryExcept(self, node):
245 def visitTryExcept(self, node):
246 self.doSuite(node, node.body)
246 self.doSuite(node, node.body)
247 for i in range(len(node.handlers)):
247 for i in range(len(node.handlers)):
248 a, b, h = node.handlers[i]
248 a, b, h = node.handlers[i]
249 if not a:
249 if not a:
250 # It's a plain "except:". Find the previous suite.
250 # It's a plain "except:". Find the previous suite.
251 if i > 0:
251 if i > 0:
252 prev = node.handlers[i-1][2]
252 prev = node.handlers[i-1][2]
253 else:
253 else:
254 prev = node.body
254 prev = node.body
255 self.doPlainWordSuite(prev, h)
255 self.doPlainWordSuite(prev, h)
256 else:
256 else:
257 self.doSuite(a, h)
257 self.doSuite(a, h)
258 self.doElse(node.handlers[-1][2], node)
258 self.doElse(node.handlers[-1][2], node)
259
259
260 def visitTryFinally(self, node):
260 def visitTryFinally(self, node):
261 self.doSuite(node, node.body)
261 self.doSuite(node, node.body)
262 self.doPlainWordSuite(node.body, node.final)
262 self.doPlainWordSuite(node.body, node.final)
263
263
264 def visitWith(self, node):
264 def visitWith(self, node):
265 self.doSuite(node, node.body)
265 self.doSuite(node, node.body)
266
266
267 def visitGlobal(self, node):
267 def visitGlobal(self, node):
268 # "global" statements don't execute like others (they don't call the
268 # "global" statements don't execute like others (they don't call the
269 # trace function), so don't record their line numbers.
269 # trace function), so don't record their line numbers.
270 pass
270 pass
271
271
272 the_coverage = None
272 the_coverage = None
273
273
274 class CoverageException(Exception): pass
274 class CoverageException(Exception): pass
275
275
276 class coverage:
276 class coverage:
277 # Name of the cache file (unless environment variable is set).
277 # Name of the cache file (unless environment variable is set).
278 cache_default = ".coverage"
278 cache_default = ".coverage"
279
279
280 # Environment variable naming the cache file.
280 # Environment variable naming the cache file.
281 cache_env = "COVERAGE_FILE"
281 cache_env = "COVERAGE_FILE"
282
282
283 # A dictionary with an entry for (Python source file name, line number
283 # A dictionary with an entry for (Python source file name, line number
284 # in that file) if that line has been executed.
284 # in that file) if that line has been executed.
285 c = {}
285 c = {}
286
286
287 # A map from canonical Python source file name to a dictionary in
287 # A map from canonical Python source file name to a dictionary in
288 # which there's an entry for each line number that has been
288 # which there's an entry for each line number that has been
289 # executed.
289 # executed.
290 cexecuted = {}
290 cexecuted = {}
291
291
292 # Cache of results of calling the analysis2() method, so that you can
292 # Cache of results of calling the analysis2() method, so that you can
293 # specify both -r and -a without doing double work.
293 # specify both -r and -a without doing double work.
294 analysis_cache = {}
294 analysis_cache = {}
295
295
296 # Cache of results of calling the canonical_filename() method, to
296 # Cache of results of calling the canonical_filename() method, to
297 # avoid duplicating work.
297 # avoid duplicating work.
298 canonical_filename_cache = {}
298 canonical_filename_cache = {}
299
299
300 def __init__(self):
300 def __init__(self):
301 global the_coverage
301 global the_coverage
302 if the_coverage:
302 if the_coverage:
303 raise CoverageException, "Only one coverage object allowed."
303 raise CoverageException, "Only one coverage object allowed."
304 self.usecache = 1
304 self.usecache = 1
305 self.cache = None
305 self.cache = None
306 self.parallel_mode = False
306 self.parallel_mode = False
307 self.exclude_re = ''
307 self.exclude_re = ''
308 self.nesting = 0
308 self.nesting = 0
309 self.cstack = []
309 self.cstack = []
310 self.xstack = []
310 self.xstack = []
311 self.relative_dir = os.path.normcase(os.path.abspath(os.curdir)+os.sep)
311 self.relative_dir = os.path.normcase(os.path.abspath(os.curdir)+os.sep)
312 self.exclude('# *pragma[: ]*[nN][oO] *[cC][oO][vV][eE][rR]')
312 self.exclude('# *pragma[: ]*[nN][oO] *[cC][oO][vV][eE][rR]')
313
313
314 # t(f, x, y). This method is passed to sys.settrace as a trace function.
314 # t(f, x, y). This method is passed to sys.settrace as a trace function.
315 # See [van Rossum 2001-07-20b, 9.2] for an explanation of sys.settrace and
315 # See [van Rossum 2001-07-20b, 9.2] for an explanation of sys.settrace and
316 # the arguments and return value of the trace function.
316 # the arguments and return value of the trace function.
317 # See [van Rossum 2001-07-20a, 3.2] for a description of frame and code
317 # See [van Rossum 2001-07-20a, 3.2] for a description of frame and code
318 # objects.
318 # objects.
319
319
320 def t(self, f, w, unused): #pragma: no cover
320 def t(self, f, w, unused): #pragma: no cover
321 if w == 'line':
321 if w == 'line':
322 #print "Executing %s @ %d" % (f.f_code.co_filename, f.f_lineno)
322 #print "Executing %s @ %d" % (f.f_code.co_filename, f.f_lineno)
323 self.c[(f.f_code.co_filename, f.f_lineno)] = 1
323 self.c[(f.f_code.co_filename, f.f_lineno)] = 1
324 for c in self.cstack:
324 for c in self.cstack:
325 c[(f.f_code.co_filename, f.f_lineno)] = 1
325 c[(f.f_code.co_filename, f.f_lineno)] = 1
326 return self.t
326 return self.t
327
327
328 def help(self, error=None): #pragma: no cover
328 def help(self, error=None): #pragma: no cover
329 if error:
329 if error:
330 print error
330 print error
331 print
331 print
332 print __doc__
332 print __doc__
333 sys.exit(1)
333 sys.exit(1)
334
334
335 def command_line(self, argv, help_fn=None):
335 def command_line(self, argv, help_fn=None):
336 import getopt
336 import getopt
337 help_fn = help_fn or self.help
337 help_fn = help_fn or self.help
338 settings = {}
338 settings = {}
339 optmap = {
339 optmap = {
340 '-a': 'annotate',
340 '-a': 'annotate',
341 '-c': 'collect',
341 '-c': 'collect',
342 '-d:': 'directory=',
342 '-d:': 'directory=',
343 '-e': 'erase',
343 '-e': 'erase',
344 '-h': 'help',
344 '-h': 'help',
345 '-i': 'ignore-errors',
345 '-i': 'ignore-errors',
346 '-m': 'show-missing',
346 '-m': 'show-missing',
347 '-p': 'parallel-mode',
347 '-p': 'parallel-mode',
348 '-r': 'report',
348 '-r': 'report',
349 '-x': 'execute',
349 '-x': 'execute',
350 '-o:': 'omit=',
350 '-o:': 'omit=',
351 }
351 }
352 short_opts = string.join(map(lambda o: o[1:], optmap.keys()), '')
352 short_opts = string.join(map(lambda o: o[1:], optmap.keys()), '')
353 long_opts = optmap.values()
353 long_opts = optmap.values()
354 options, args = getopt.getopt(argv, short_opts, long_opts)
354 options, args = getopt.getopt(argv, short_opts, long_opts)
355 for o, a in options:
355 for o, a in options:
356 if optmap.has_key(o):
356 if o in optmap:
357 settings[optmap[o]] = 1
357 settings[optmap[o]] = 1
358 elif optmap.has_key(o + ':'):
358 elif o + ':' in optmap:
359 settings[optmap[o + ':']] = a
359 settings[optmap[o + ':']] = a
360 elif o[2:] in long_opts:
360 elif o[2:] in long_opts:
361 settings[o[2:]] = 1
361 settings[o[2:]] = 1
362 elif o[2:] + '=' in long_opts:
362 elif o[2:] + '=' in long_opts:
363 settings[o[2:]+'='] = a
363 settings[o[2:]+'='] = a
364 else: #pragma: no cover
364 else: #pragma: no cover
365 pass # Can't get here, because getopt won't return anything unknown.
365 pass # Can't get here, because getopt won't return anything unknown.
366
366
367 if settings.get('help'):
367 if settings.get('help'):
368 help_fn()
368 help_fn()
369
369
370 for i in ['erase', 'execute']:
370 for i in ['erase', 'execute']:
371 for j in ['annotate', 'report', 'collect']:
371 for j in ['annotate', 'report', 'collect']:
372 if settings.get(i) and settings.get(j):
372 if settings.get(i) and settings.get(j):
373 help_fn("You can't specify the '%s' and '%s' "
373 help_fn("You can't specify the '%s' and '%s' "
374 "options at the same time." % (i, j))
374 "options at the same time." % (i, j))
375
375
376 args_needed = (settings.get('execute')
376 args_needed = (settings.get('execute')
377 or settings.get('annotate')
377 or settings.get('annotate')
378 or settings.get('report'))
378 or settings.get('report'))
379 action = (settings.get('erase')
379 action = (settings.get('erase')
380 or settings.get('collect')
380 or settings.get('collect')
381 or args_needed)
381 or args_needed)
382 if not action:
382 if not action:
383 help_fn("You must specify at least one of -e, -x, -c, -r, or -a.")
383 help_fn("You must specify at least one of -e, -x, -c, -r, or -a.")
384 if not args_needed and args:
384 if not args_needed and args:
385 help_fn("Unexpected arguments: %s" % " ".join(args))
385 help_fn("Unexpected arguments: %s" % " ".join(args))
386
386
387 self.parallel_mode = settings.get('parallel-mode')
387 self.parallel_mode = settings.get('parallel-mode')
388 self.get_ready()
388 self.get_ready()
389
389
390 if settings.get('erase'):
390 if settings.get('erase'):
391 self.erase()
391 self.erase()
392 if settings.get('execute'):
392 if settings.get('execute'):
393 if not args:
393 if not args:
394 help_fn("Nothing to do.")
394 help_fn("Nothing to do.")
395 sys.argv = args
395 sys.argv = args
396 self.start()
396 self.start()
397 import __main__
397 import __main__
398 sys.path[0] = os.path.dirname(sys.argv[0])
398 sys.path[0] = os.path.dirname(sys.argv[0])
399 execfile(sys.argv[0], __main__.__dict__)
399 execfile(sys.argv[0], __main__.__dict__)
400 if settings.get('collect'):
400 if settings.get('collect'):
401 self.collect()
401 self.collect()
402 if not args:
402 if not args:
403 args = self.cexecuted.keys()
403 args = self.cexecuted.keys()
404
404
405 ignore_errors = settings.get('ignore-errors')
405 ignore_errors = settings.get('ignore-errors')
406 show_missing = settings.get('show-missing')
406 show_missing = settings.get('show-missing')
407 directory = settings.get('directory=')
407 directory = settings.get('directory=')
408
408
409 omit = settings.get('omit=')
409 omit = settings.get('omit=')
410 if omit is not None:
410 if omit is not None:
411 omit = omit.split(',')
411 omit = omit.split(',')
412 else:
412 else:
413 omit = []
413 omit = []
414
414
415 if settings.get('report'):
415 if settings.get('report'):
416 self.report(args, show_missing, ignore_errors, omit_prefixes=omit)
416 self.report(args, show_missing, ignore_errors, omit_prefixes=omit)
417 if settings.get('annotate'):
417 if settings.get('annotate'):
418 self.annotate(args, directory, ignore_errors, omit_prefixes=omit)
418 self.annotate(args, directory, ignore_errors, omit_prefixes=omit)
419
419
420 def use_cache(self, usecache, cache_file=None):
420 def use_cache(self, usecache, cache_file=None):
421 self.usecache = usecache
421 self.usecache = usecache
422 if cache_file and not self.cache:
422 if cache_file and not self.cache:
423 self.cache_default = cache_file
423 self.cache_default = cache_file
424
424
425 def get_ready(self, parallel_mode=False):
425 def get_ready(self, parallel_mode=False):
426 if self.usecache and not self.cache:
426 if self.usecache and not self.cache:
427 self.cache = os.environ.get(self.cache_env, self.cache_default)
427 self.cache = os.environ.get(self.cache_env, self.cache_default)
428 if self.parallel_mode:
428 if self.parallel_mode:
429 self.cache += "." + gethostname() + "." + str(os.getpid())
429 self.cache += "." + gethostname() + "." + str(os.getpid())
430 self.restore()
430 self.restore()
431 self.analysis_cache = {}
431 self.analysis_cache = {}
432
432
433 def start(self, parallel_mode=False):
433 def start(self, parallel_mode=False):
434 self.get_ready()
434 self.get_ready()
435 if self.nesting == 0: #pragma: no cover
435 if self.nesting == 0: #pragma: no cover
436 sys.settrace(self.t)
436 sys.settrace(self.t)
437 if hasattr(threading, 'settrace'):
437 if hasattr(threading, 'settrace'):
438 threading.settrace(self.t)
438 threading.settrace(self.t)
439 self.nesting += 1
439 self.nesting += 1
440
440
441 def stop(self):
441 def stop(self):
442 self.nesting -= 1
442 self.nesting -= 1
443 if self.nesting == 0: #pragma: no cover
443 if self.nesting == 0: #pragma: no cover
444 sys.settrace(None)
444 sys.settrace(None)
445 if hasattr(threading, 'settrace'):
445 if hasattr(threading, 'settrace'):
446 threading.settrace(None)
446 threading.settrace(None)
447
447
448 def erase(self):
448 def erase(self):
449 self.get_ready()
449 self.get_ready()
450 self.c = {}
450 self.c = {}
451 self.analysis_cache = {}
451 self.analysis_cache = {}
452 self.cexecuted = {}
452 self.cexecuted = {}
453 if self.cache and os.path.exists(self.cache):
453 if self.cache and os.path.exists(self.cache):
454 os.remove(self.cache)
454 os.remove(self.cache)
455
455
456 def exclude(self, re):
456 def exclude(self, re):
457 if self.exclude_re:
457 if self.exclude_re:
458 self.exclude_re += "|"
458 self.exclude_re += "|"
459 self.exclude_re += "(" + re + ")"
459 self.exclude_re += "(" + re + ")"
460
460
461 def begin_recursive(self):
461 def begin_recursive(self):
462 self.cstack.append(self.c)
462 self.cstack.append(self.c)
463 self.xstack.append(self.exclude_re)
463 self.xstack.append(self.exclude_re)
464
464
465 def end_recursive(self):
465 def end_recursive(self):
466 self.c = self.cstack.pop()
466 self.c = self.cstack.pop()
467 self.exclude_re = self.xstack.pop()
467 self.exclude_re = self.xstack.pop()
468
468
469 # save(). Save coverage data to the coverage cache.
469 # save(). Save coverage data to the coverage cache.
470
470
471 def save(self):
471 def save(self):
472 if self.usecache and self.cache:
472 if self.usecache and self.cache:
473 self.canonicalize_filenames()
473 self.canonicalize_filenames()
474 cache = open(self.cache, 'wb')
474 cache = open(self.cache, 'wb')
475 import marshal
475 import marshal
476 marshal.dump(self.cexecuted, cache)
476 marshal.dump(self.cexecuted, cache)
477 cache.close()
477 cache.close()
478
478
479 # restore(). Restore coverage data from the coverage cache (if it exists).
479 # restore(). Restore coverage data from the coverage cache (if it exists).
480
480
481 def restore(self):
481 def restore(self):
482 self.c = {}
482 self.c = {}
483 self.cexecuted = {}
483 self.cexecuted = {}
484 assert self.usecache
484 assert self.usecache
485 if os.path.exists(self.cache):
485 if os.path.exists(self.cache):
486 self.cexecuted = self.restore_file(self.cache)
486 self.cexecuted = self.restore_file(self.cache)
487
487
488 def restore_file(self, file_name):
488 def restore_file(self, file_name):
489 try:
489 try:
490 cache = open(file_name, 'rb')
490 cache = open(file_name, 'rb')
491 import marshal
491 import marshal
492 cexecuted = marshal.load(cache)
492 cexecuted = marshal.load(cache)
493 cache.close()
493 cache.close()
494 if isinstance(cexecuted, types.DictType):
494 if isinstance(cexecuted, types.DictType):
495 return cexecuted
495 return cexecuted
496 else:
496 else:
497 return {}
497 return {}
498 except:
498 except:
499 return {}
499 return {}
500
500
501 # collect(). Collect data in multiple files produced by parallel mode
501 # collect(). Collect data in multiple files produced by parallel mode
502
502
503 def collect(self):
503 def collect(self):
504 cache_dir, local = os.path.split(self.cache)
504 cache_dir, local = os.path.split(self.cache)
505 for f in os.listdir(cache_dir or '.'):
505 for f in os.listdir(cache_dir or '.'):
506 if not f.startswith(local):
506 if not f.startswith(local):
507 continue
507 continue
508
508
509 full_path = os.path.join(cache_dir, f)
509 full_path = os.path.join(cache_dir, f)
510 cexecuted = self.restore_file(full_path)
510 cexecuted = self.restore_file(full_path)
511 self.merge_data(cexecuted)
511 self.merge_data(cexecuted)
512
512
513 def merge_data(self, new_data):
513 def merge_data(self, new_data):
514 for file_name, file_data in new_data.items():
514 for file_name, file_data in new_data.items():
515 if self.cexecuted.has_key(file_name):
515 if file_name in self.cexecuted:
516 self.merge_file_data(self.cexecuted[file_name], file_data)
516 self.merge_file_data(self.cexecuted[file_name], file_data)
517 else:
517 else:
518 self.cexecuted[file_name] = file_data
518 self.cexecuted[file_name] = file_data
519
519
520 def merge_file_data(self, cache_data, new_data):
520 def merge_file_data(self, cache_data, new_data):
521 for line_number in new_data.keys():
521 for line_number in new_data.keys():
522 if not cache_data.has_key(line_number):
522 if not line_number in cache_data:
523 cache_data[line_number] = new_data[line_number]
523 cache_data[line_number] = new_data[line_number]
524
524
525 # canonical_filename(filename). Return a canonical filename for the
525 # canonical_filename(filename). Return a canonical filename for the
526 # file (that is, an absolute path with no redundant components and
526 # file (that is, an absolute path with no redundant components and
527 # normalized case). See [GDR 2001-12-04b, 3.3].
527 # normalized case). See [GDR 2001-12-04b, 3.3].
528
528
529 def canonical_filename(self, filename):
529 def canonical_filename(self, filename):
530 if not self.canonical_filename_cache.has_key(filename):
530 if not filename in self.canonical_filename_cache:
531 f = filename
531 f = filename
532 if os.path.isabs(f) and not os.path.exists(f):
532 if os.path.isabs(f) and not os.path.exists(f):
533 f = os.path.basename(f)
533 f = os.path.basename(f)
534 if not os.path.isabs(f):
534 if not os.path.isabs(f):
535 for path in [os.curdir] + sys.path:
535 for path in [os.curdir] + sys.path:
536 g = os.path.join(path, f)
536 g = os.path.join(path, f)
537 if os.path.exists(g):
537 if os.path.exists(g):
538 f = g
538 f = g
539 break
539 break
540 cf = os.path.normcase(os.path.abspath(f))
540 cf = os.path.normcase(os.path.abspath(f))
541 self.canonical_filename_cache[filename] = cf
541 self.canonical_filename_cache[filename] = cf
542 return self.canonical_filename_cache[filename]
542 return self.canonical_filename_cache[filename]
543
543
544 # canonicalize_filenames(). Copy results from "c" to "cexecuted",
544 # canonicalize_filenames(). Copy results from "c" to "cexecuted",
545 # canonicalizing filenames on the way. Clear the "c" map.
545 # canonicalizing filenames on the way. Clear the "c" map.
546
546
547 def canonicalize_filenames(self):
547 def canonicalize_filenames(self):
548 for filename, lineno in self.c.keys():
548 for filename, lineno in self.c.keys():
549 if filename == '<string>':
549 if filename == '<string>':
550 # Can't do anything useful with exec'd strings, so skip them.
550 # Can't do anything useful with exec'd strings, so skip them.
551 continue
551 continue
552 f = self.canonical_filename(filename)
552 f = self.canonical_filename(filename)
553 if not self.cexecuted.has_key(f):
553 if not f in self.cexecuted:
554 self.cexecuted[f] = {}
554 self.cexecuted[f] = {}
555 self.cexecuted[f][lineno] = 1
555 self.cexecuted[f][lineno] = 1
556 self.c = {}
556 self.c = {}
557
557
558 # morf_filename(morf). Return the filename for a module or file.
558 # morf_filename(morf). Return the filename for a module or file.
559
559
560 def morf_filename(self, morf):
560 def morf_filename(self, morf):
561 if isinstance(morf, types.ModuleType):
561 if isinstance(morf, types.ModuleType):
562 if not hasattr(morf, '__file__'):
562 if not hasattr(morf, '__file__'):
563 raise CoverageException, "Module has no __file__ attribute."
563 raise CoverageException, "Module has no __file__ attribute."
564 f = morf.__file__
564 f = morf.__file__
565 else:
565 else:
566 f = morf
566 f = morf
567 return self.canonical_filename(f)
567 return self.canonical_filename(f)
568
568
569 # analyze_morf(morf). Analyze the module or filename passed as
569 # analyze_morf(morf). Analyze the module or filename passed as
570 # the argument. If the source code can't be found, raise an error.
570 # the argument. If the source code can't be found, raise an error.
571 # Otherwise, return a tuple of (1) the canonical filename of the
571 # Otherwise, return a tuple of (1) the canonical filename of the
572 # source code for the module, (2) a list of lines of statements
572 # source code for the module, (2) a list of lines of statements
573 # in the source code, (3) a list of lines of excluded statements,
573 # in the source code, (3) a list of lines of excluded statements,
574 # and (4), a map of line numbers to multi-line line number ranges, for
574 # and (4), a map of line numbers to multi-line line number ranges, for
575 # statements that cross lines.
575 # statements that cross lines.
576
576
577 def analyze_morf(self, morf):
577 def analyze_morf(self, morf):
578 if self.analysis_cache.has_key(morf):
578 if morf in self.analysis_cache:
579 return self.analysis_cache[morf]
579 return self.analysis_cache[morf]
580 filename = self.morf_filename(morf)
580 filename = self.morf_filename(morf)
581 ext = os.path.splitext(filename)[1]
581 ext = os.path.splitext(filename)[1]
582 if ext == '.pyc':
582 if ext == '.pyc':
583 if not os.path.exists(filename[0:-1]):
583 if not os.path.exists(filename[0:-1]):
584 raise CoverageException, ("No source for compiled code '%s'."
584 raise CoverageException, ("No source for compiled code '%s'."
585 % filename)
585 % filename)
586 filename = filename[0:-1]
586 filename = filename[0:-1]
587 elif ext != '.py':
587 elif ext != '.py':
588 raise CoverageException, "File '%s' not Python source." % filename
588 raise CoverageException, "File '%s' not Python source." % filename
589 source = open(filename, 'r')
589 source = open(filename, 'r')
590 lines, excluded_lines, line_map = self.find_executable_statements(
590 lines, excluded_lines, line_map = self.find_executable_statements(
591 source.read(), exclude=self.exclude_re
591 source.read(), exclude=self.exclude_re
592 )
592 )
593 source.close()
593 source.close()
594 result = filename, lines, excluded_lines, line_map
594 result = filename, lines, excluded_lines, line_map
595 self.analysis_cache[morf] = result
595 self.analysis_cache[morf] = result
596 return result
596 return result
597
597
598 def first_line_of_tree(self, tree):
598 def first_line_of_tree(self, tree):
599 while True:
599 while True:
600 if len(tree) == 3 and type(tree[2]) == type(1):
600 if len(tree) == 3 and type(tree[2]) == type(1):
601 return tree[2]
601 return tree[2]
602 tree = tree[1]
602 tree = tree[1]
603
603
604 def last_line_of_tree(self, tree):
604 def last_line_of_tree(self, tree):
605 while True:
605 while True:
606 if len(tree) == 3 and type(tree[2]) == type(1):
606 if len(tree) == 3 and type(tree[2]) == type(1):
607 return tree[2]
607 return tree[2]
608 tree = tree[-1]
608 tree = tree[-1]
609
609
610 def find_docstring_pass_pair(self, tree, spots):
610 def find_docstring_pass_pair(self, tree, spots):
611 for i in range(1, len(tree)):
611 for i in range(1, len(tree)):
612 if self.is_string_constant(tree[i]) and self.is_pass_stmt(tree[i+1]):
612 if self.is_string_constant(tree[i]) and self.is_pass_stmt(tree[i+1]):
613 first_line = self.first_line_of_tree(tree[i])
613 first_line = self.first_line_of_tree(tree[i])
614 last_line = self.last_line_of_tree(tree[i+1])
614 last_line = self.last_line_of_tree(tree[i+1])
615 self.record_multiline(spots, first_line, last_line)
615 self.record_multiline(spots, first_line, last_line)
616
616
617 def is_string_constant(self, tree):
617 def is_string_constant(self, tree):
618 try:
618 try:
619 return tree[0] == symbol.stmt and tree[1][1][1][0] == symbol.expr_stmt
619 return tree[0] == symbol.stmt and tree[1][1][1][0] == symbol.expr_stmt
620 except:
620 except:
621 return False
621 return False
622
622
623 def is_pass_stmt(self, tree):
623 def is_pass_stmt(self, tree):
624 try:
624 try:
625 return tree[0] == symbol.stmt and tree[1][1][1][0] == symbol.pass_stmt
625 return tree[0] == symbol.stmt and tree[1][1][1][0] == symbol.pass_stmt
626 except:
626 except:
627 return False
627 return False
628
628
629 def record_multiline(self, spots, i, j):
629 def record_multiline(self, spots, i, j):
630 for l in range(i, j+1):
630 for l in range(i, j+1):
631 spots[l] = (i, j)
631 spots[l] = (i, j)
632
632
633 def get_suite_spots(self, tree, spots):
633 def get_suite_spots(self, tree, spots):
634 """ Analyze a parse tree to find suite introducers which span a number
634 """ Analyze a parse tree to find suite introducers which span a number
635 of lines.
635 of lines.
636 """
636 """
637 for i in range(1, len(tree)):
637 for i in range(1, len(tree)):
638 if type(tree[i]) == type(()):
638 if type(tree[i]) == type(()):
639 if tree[i][0] == symbol.suite:
639 if tree[i][0] == symbol.suite:
640 # Found a suite, look back for the colon and keyword.
640 # Found a suite, look back for the colon and keyword.
641 lineno_colon = lineno_word = None
641 lineno_colon = lineno_word = None
642 for j in range(i-1, 0, -1):
642 for j in range(i-1, 0, -1):
643 if tree[j][0] == token.COLON:
643 if tree[j][0] == token.COLON:
644 # Colons are never executed themselves: we want the
644 # Colons are never executed themselves: we want the
645 # line number of the last token before the colon.
645 # line number of the last token before the colon.
646 lineno_colon = self.last_line_of_tree(tree[j-1])
646 lineno_colon = self.last_line_of_tree(tree[j-1])
647 elif tree[j][0] == token.NAME:
647 elif tree[j][0] == token.NAME:
648 if tree[j][1] == 'elif':
648 if tree[j][1] == 'elif':
649 # Find the line number of the first non-terminal
649 # Find the line number of the first non-terminal
650 # after the keyword.
650 # after the keyword.
651 t = tree[j+1]
651 t = tree[j+1]
652 while t and token.ISNONTERMINAL(t[0]):
652 while t and token.ISNONTERMINAL(t[0]):
653 t = t[1]
653 t = t[1]
654 if t:
654 if t:
655 lineno_word = t[2]
655 lineno_word = t[2]
656 else:
656 else:
657 lineno_word = tree[j][2]
657 lineno_word = tree[j][2]
658 break
658 break
659 elif tree[j][0] == symbol.except_clause:
659 elif tree[j][0] == symbol.except_clause:
660 # "except" clauses look like:
660 # "except" clauses look like:
661 # ('except_clause', ('NAME', 'except', lineno), ...)
661 # ('except_clause', ('NAME', 'except', lineno), ...)
662 if tree[j][1][0] == token.NAME:
662 if tree[j][1][0] == token.NAME:
663 lineno_word = tree[j][1][2]
663 lineno_word = tree[j][1][2]
664 break
664 break
665 if lineno_colon and lineno_word:
665 if lineno_colon and lineno_word:
666 # Found colon and keyword, mark all the lines
666 # Found colon and keyword, mark all the lines
667 # between the two with the two line numbers.
667 # between the two with the two line numbers.
668 self.record_multiline(spots, lineno_word, lineno_colon)
668 self.record_multiline(spots, lineno_word, lineno_colon)
669
669
670 # "pass" statements are tricky: different versions of Python
670 # "pass" statements are tricky: different versions of Python
671 # treat them differently, especially in the common case of a
671 # treat them differently, especially in the common case of a
672 # function with a doc string and a single pass statement.
672 # function with a doc string and a single pass statement.
673 self.find_docstring_pass_pair(tree[i], spots)
673 self.find_docstring_pass_pair(tree[i], spots)
674
674
675 elif tree[i][0] == symbol.simple_stmt:
675 elif tree[i][0] == symbol.simple_stmt:
676 first_line = self.first_line_of_tree(tree[i])
676 first_line = self.first_line_of_tree(tree[i])
677 last_line = self.last_line_of_tree(tree[i])
677 last_line = self.last_line_of_tree(tree[i])
678 if first_line != last_line:
678 if first_line != last_line:
679 self.record_multiline(spots, first_line, last_line)
679 self.record_multiline(spots, first_line, last_line)
680 self.get_suite_spots(tree[i], spots)
680 self.get_suite_spots(tree[i], spots)
681
681
682 def find_executable_statements(self, text, exclude=None):
682 def find_executable_statements(self, text, exclude=None):
683 # Find lines which match an exclusion pattern.
683 # Find lines which match an exclusion pattern.
684 excluded = {}
684 excluded = {}
685 suite_spots = {}
685 suite_spots = {}
686 if exclude:
686 if exclude:
687 reExclude = re.compile(exclude)
687 reExclude = re.compile(exclude)
688 lines = text.split('\n')
688 lines = text.split('\n')
689 for i in range(len(lines)):
689 for i in range(len(lines)):
690 if reExclude.search(lines[i]):
690 if reExclude.search(lines[i]):
691 excluded[i+1] = 1
691 excluded[i+1] = 1
692
692
693 # Parse the code and analyze the parse tree to find out which statements
693 # Parse the code and analyze the parse tree to find out which statements
694 # are multiline, and where suites begin and end.
694 # are multiline, and where suites begin and end.
695 import parser
695 import parser
696 tree = parser.suite(text+'\n\n').totuple(1)
696 tree = parser.suite(text+'\n\n').totuple(1)
697 self.get_suite_spots(tree, suite_spots)
697 self.get_suite_spots(tree, suite_spots)
698 #print "Suite spots:", suite_spots
698 #print "Suite spots:", suite_spots
699
699
700 # Use the compiler module to parse the text and find the executable
700 # Use the compiler module to parse the text and find the executable
701 # statements. We add newlines to be impervious to final partial lines.
701 # statements. We add newlines to be impervious to final partial lines.
702 statements = {}
702 statements = {}
703 ast = compiler.parse(text+'\n\n')
703 ast = compiler.parse(text+'\n\n')
704 visitor = StatementFindingAstVisitor(statements, excluded, suite_spots)
704 visitor = StatementFindingAstVisitor(statements, excluded, suite_spots)
705 compiler.walk(ast, visitor, walker=visitor)
705 compiler.walk(ast, visitor, walker=visitor)
706
706
707 lines = statements.keys()
707 lines = statements.keys()
708 lines.sort()
708 lines.sort()
709 excluded_lines = excluded.keys()
709 excluded_lines = excluded.keys()
710 excluded_lines.sort()
710 excluded_lines.sort()
711 return lines, excluded_lines, suite_spots
711 return lines, excluded_lines, suite_spots
712
712
713 # format_lines(statements, lines). Format a list of line numbers
713 # format_lines(statements, lines). Format a list of line numbers
714 # for printing by coalescing groups of lines as long as the lines
714 # for printing by coalescing groups of lines as long as the lines
715 # represent consecutive statements. This will coalesce even if
715 # represent consecutive statements. This will coalesce even if
716 # there are gaps between statements, so if statements =
716 # there are gaps between statements, so if statements =
717 # [1,2,3,4,5,10,11,12,13,14] and lines = [1,2,5,10,11,13,14] then
717 # [1,2,3,4,5,10,11,12,13,14] and lines = [1,2,5,10,11,13,14] then
718 # format_lines will return "1-2, 5-11, 13-14".
718 # format_lines will return "1-2, 5-11, 13-14".
719
719
720 def format_lines(self, statements, lines):
720 def format_lines(self, statements, lines):
721 pairs = []
721 pairs = []
722 i = 0
722 i = 0
723 j = 0
723 j = 0
724 start = None
724 start = None
725 pairs = []
725 pairs = []
726 while i < len(statements) and j < len(lines):
726 while i < len(statements) and j < len(lines):
727 if statements[i] == lines[j]:
727 if statements[i] == lines[j]:
728 if start == None:
728 if start == None:
729 start = lines[j]
729 start = lines[j]
730 end = lines[j]
730 end = lines[j]
731 j = j + 1
731 j = j + 1
732 elif start:
732 elif start:
733 pairs.append((start, end))
733 pairs.append((start, end))
734 start = None
734 start = None
735 i = i + 1
735 i = i + 1
736 if start:
736 if start:
737 pairs.append((start, end))
737 pairs.append((start, end))
738 def stringify(pair):
738 def stringify(pair):
739 start, end = pair
739 start, end = pair
740 if start == end:
740 if start == end:
741 return "%d" % start
741 return "%d" % start
742 else:
742 else:
743 return "%d-%d" % (start, end)
743 return "%d-%d" % (start, end)
744 ret = string.join(map(stringify, pairs), ", ")
744 ret = string.join(map(stringify, pairs), ", ")
745 return ret
745 return ret
746
746
747 # Backward compatibility with version 1.
747 # Backward compatibility with version 1.
748 def analysis(self, morf):
748 def analysis(self, morf):
749 f, s, _, m, mf = self.analysis2(morf)
749 f, s, _, m, mf = self.analysis2(morf)
750 return f, s, m, mf
750 return f, s, m, mf
751
751
752 def analysis2(self, morf):
752 def analysis2(self, morf):
753 filename, statements, excluded, line_map = self.analyze_morf(morf)
753 filename, statements, excluded, line_map = self.analyze_morf(morf)
754 self.canonicalize_filenames()
754 self.canonicalize_filenames()
755 if not self.cexecuted.has_key(filename):
755 if not filename in self.cexecuted:
756 self.cexecuted[filename] = {}
756 self.cexecuted[filename] = {}
757 missing = []
757 missing = []
758 for line in statements:
758 for line in statements:
759 lines = line_map.get(line, [line, line])
759 lines = line_map.get(line, [line, line])
760 for l in range(lines[0], lines[1]+1):
760 for l in range(lines[0], lines[1]+1):
761 if self.cexecuted[filename].has_key(l):
761 if l in self.cexecuted[filename]:
762 break
762 break
763 else:
763 else:
764 missing.append(line)
764 missing.append(line)
765 return (filename, statements, excluded, missing,
765 return (filename, statements, excluded, missing,
766 self.format_lines(statements, missing))
766 self.format_lines(statements, missing))
767
767
768 def relative_filename(self, filename):
768 def relative_filename(self, filename):
769 """ Convert filename to relative filename from self.relative_dir.
769 """ Convert filename to relative filename from self.relative_dir.
770 """
770 """
771 return filename.replace(self.relative_dir, "")
771 return filename.replace(self.relative_dir, "")
772
772
773 def morf_name(self, morf):
773 def morf_name(self, morf):
774 """ Return the name of morf as used in report.
774 """ Return the name of morf as used in report.
775 """
775 """
776 if isinstance(morf, types.ModuleType):
776 if isinstance(morf, types.ModuleType):
777 return morf.__name__
777 return morf.__name__
778 else:
778 else:
779 return self.relative_filename(os.path.splitext(morf)[0])
779 return self.relative_filename(os.path.splitext(morf)[0])
780
780
781 def filter_by_prefix(self, morfs, omit_prefixes):
781 def filter_by_prefix(self, morfs, omit_prefixes):
782 """ Return list of morfs where the morf name does not begin
782 """ Return list of morfs where the morf name does not begin
783 with any one of the omit_prefixes.
783 with any one of the omit_prefixes.
784 """
784 """
785 filtered_morfs = []
785 filtered_morfs = []
786 for morf in morfs:
786 for morf in morfs:
787 for prefix in omit_prefixes:
787 for prefix in omit_prefixes:
788 if self.morf_name(morf).startswith(prefix):
788 if self.morf_name(morf).startswith(prefix):
789 break
789 break
790 else:
790 else:
791 filtered_morfs.append(morf)
791 filtered_morfs.append(morf)
792
792
793 return filtered_morfs
793 return filtered_morfs
794
794
795 def morf_name_compare(self, x, y):
795 def morf_name_compare(self, x, y):
796 return cmp(self.morf_name(x), self.morf_name(y))
796 return cmp(self.morf_name(x), self.morf_name(y))
797
797
798 def report(self, morfs, show_missing=1, ignore_errors=0, file=None, omit_prefixes=[]):
798 def report(self, morfs, show_missing=1, ignore_errors=0, file=None, omit_prefixes=[]):
799 if not isinstance(morfs, types.ListType):
799 if not isinstance(morfs, types.ListType):
800 morfs = [morfs]
800 morfs = [morfs]
801 # On windows, the shell doesn't expand wildcards. Do it here.
801 # On windows, the shell doesn't expand wildcards. Do it here.
802 globbed = []
802 globbed = []
803 for morf in morfs:
803 for morf in morfs:
804 if isinstance(morf, strclass):
804 if isinstance(morf, strclass):
805 globbed.extend(glob.glob(morf))
805 globbed.extend(glob.glob(morf))
806 else:
806 else:
807 globbed.append(morf)
807 globbed.append(morf)
808 morfs = globbed
808 morfs = globbed
809
809
810 morfs = self.filter_by_prefix(morfs, omit_prefixes)
810 morfs = self.filter_by_prefix(morfs, omit_prefixes)
811 morfs.sort(self.morf_name_compare)
811 morfs.sort(self.morf_name_compare)
812
812
813 max_name = max([5,] + map(len, map(self.morf_name, morfs)))
813 max_name = max([5,] + map(len, map(self.morf_name, morfs)))
814 fmt_name = "%%- %ds " % max_name
814 fmt_name = "%%- %ds " % max_name
815 fmt_err = fmt_name + "%s: %s"
815 fmt_err = fmt_name + "%s: %s"
816 header = fmt_name % "Name" + " Stmts Exec Cover"
816 header = fmt_name % "Name" + " Stmts Exec Cover"
817 fmt_coverage = fmt_name + "% 6d % 6d % 5d%%"
817 fmt_coverage = fmt_name + "% 6d % 6d % 5d%%"
818 if show_missing:
818 if show_missing:
819 header = header + " Missing"
819 header = header + " Missing"
820 fmt_coverage = fmt_coverage + " %s"
820 fmt_coverage = fmt_coverage + " %s"
821 if not file:
821 if not file:
822 file = sys.stdout
822 file = sys.stdout
823 print >>file, header
823 print >>file, header
824 print >>file, "-" * len(header)
824 print >>file, "-" * len(header)
825 total_statements = 0
825 total_statements = 0
826 total_executed = 0
826 total_executed = 0
827 for morf in morfs:
827 for morf in morfs:
828 name = self.morf_name(morf)
828 name = self.morf_name(morf)
829 try:
829 try:
830 _, statements, _, missing, readable = self.analysis2(morf)
830 _, statements, _, missing, readable = self.analysis2(morf)
831 n = len(statements)
831 n = len(statements)
832 m = n - len(missing)
832 m = n - len(missing)
833 if n > 0:
833 if n > 0:
834 pc = 100.0 * m / n
834 pc = 100.0 * m / n
835 else:
835 else:
836 pc = 100.0
836 pc = 100.0
837 args = (name, n, m, pc)
837 args = (name, n, m, pc)
838 if show_missing:
838 if show_missing:
839 args = args + (readable,)
839 args = args + (readable,)
840 print >>file, fmt_coverage % args
840 print >>file, fmt_coverage % args
841 total_statements = total_statements + n
841 total_statements = total_statements + n
842 total_executed = total_executed + m
842 total_executed = total_executed + m
843 except KeyboardInterrupt: #pragma: no cover
843 except KeyboardInterrupt: #pragma: no cover
844 raise
844 raise
845 except:
845 except:
846 if not ignore_errors:
846 if not ignore_errors:
847 typ, msg = sys.exc_info()[0:2]
847 typ, msg = sys.exc_info()[0:2]
848 print >>file, fmt_err % (name, typ, msg)
848 print >>file, fmt_err % (name, typ, msg)
849 if len(morfs) > 1:
849 if len(morfs) > 1:
850 print >>file, "-" * len(header)
850 print >>file, "-" * len(header)
851 if total_statements > 0:
851 if total_statements > 0:
852 pc = 100.0 * total_executed / total_statements
852 pc = 100.0 * total_executed / total_statements
853 else:
853 else:
854 pc = 100.0
854 pc = 100.0
855 args = ("TOTAL", total_statements, total_executed, pc)
855 args = ("TOTAL", total_statements, total_executed, pc)
856 if show_missing:
856 if show_missing:
857 args = args + ("",)
857 args = args + ("",)
858 print >>file, fmt_coverage % args
858 print >>file, fmt_coverage % args
859
859
860 # annotate(morfs, ignore_errors).
860 # annotate(morfs, ignore_errors).
861
861
862 blank_re = re.compile(r"\s*(#|$)")
862 blank_re = re.compile(r"\s*(#|$)")
863 else_re = re.compile(r"\s*else\s*:\s*(#|$)")
863 else_re = re.compile(r"\s*else\s*:\s*(#|$)")
864
864
865 def annotate(self, morfs, directory=None, ignore_errors=0, omit_prefixes=[]):
865 def annotate(self, morfs, directory=None, ignore_errors=0, omit_prefixes=[]):
866 morfs = self.filter_by_prefix(morfs, omit_prefixes)
866 morfs = self.filter_by_prefix(morfs, omit_prefixes)
867 for morf in morfs:
867 for morf in morfs:
868 try:
868 try:
869 filename, statements, excluded, missing, _ = self.analysis2(morf)
869 filename, statements, excluded, missing, _ = self.analysis2(morf)
870 self.annotate_file(filename, statements, excluded, missing, directory)
870 self.annotate_file(filename, statements, excluded, missing, directory)
871 except KeyboardInterrupt:
871 except KeyboardInterrupt:
872 raise
872 raise
873 except:
873 except:
874 if not ignore_errors:
874 if not ignore_errors:
875 raise
875 raise
876
876
877 def annotate_file(self, filename, statements, excluded, missing, directory=None):
877 def annotate_file(self, filename, statements, excluded, missing, directory=None):
878 source = open(filename, 'r')
878 source = open(filename, 'r')
879 if directory:
879 if directory:
880 dest_file = os.path.join(directory,
880 dest_file = os.path.join(directory,
881 os.path.basename(filename)
881 os.path.basename(filename)
882 + ',cover')
882 + ',cover')
883 else:
883 else:
884 dest_file = filename + ',cover'
884 dest_file = filename + ',cover'
885 dest = open(dest_file, 'w')
885 dest = open(dest_file, 'w')
886 lineno = 0
886 lineno = 0
887 i = 0
887 i = 0
888 j = 0
888 j = 0
889 covered = 1
889 covered = 1
890 while 1:
890 while 1:
891 line = source.readline()
891 line = source.readline()
892 if line == '':
892 if line == '':
893 break
893 break
894 lineno = lineno + 1
894 lineno = lineno + 1
895 while i < len(statements) and statements[i] < lineno:
895 while i < len(statements) and statements[i] < lineno:
896 i = i + 1
896 i = i + 1
897 while j < len(missing) and missing[j] < lineno:
897 while j < len(missing) and missing[j] < lineno:
898 j = j + 1
898 j = j + 1
899 if i < len(statements) and statements[i] == lineno:
899 if i < len(statements) and statements[i] == lineno:
900 covered = j >= len(missing) or missing[j] > lineno
900 covered = j >= len(missing) or missing[j] > lineno
901 if self.blank_re.match(line):
901 if self.blank_re.match(line):
902 dest.write(' ')
902 dest.write(' ')
903 elif self.else_re.match(line):
903 elif self.else_re.match(line):
904 # Special logic for lines containing only 'else:'.
904 # Special logic for lines containing only 'else:'.
905 # See [GDR 2001-12-04b, 3.2].
905 # See [GDR 2001-12-04b, 3.2].
906 if i >= len(statements) and j >= len(missing):
906 if i >= len(statements) and j >= len(missing):
907 dest.write('! ')
907 dest.write('! ')
908 elif i >= len(statements) or j >= len(missing):
908 elif i >= len(statements) or j >= len(missing):
909 dest.write('> ')
909 dest.write('> ')
910 elif statements[i] == missing[j]:
910 elif statements[i] == missing[j]:
911 dest.write('! ')
911 dest.write('! ')
912 else:
912 else:
913 dest.write('> ')
913 dest.write('> ')
914 elif lineno in excluded:
914 elif lineno in excluded:
915 dest.write('- ')
915 dest.write('- ')
916 elif covered:
916 elif covered:
917 dest.write('> ')
917 dest.write('> ')
918 else:
918 else:
919 dest.write('! ')
919 dest.write('! ')
920 dest.write(line)
920 dest.write(line)
921 source.close()
921 source.close()
922 dest.close()
922 dest.close()
923
923
924 # Singleton object.
924 # Singleton object.
925 the_coverage = coverage()
925 the_coverage = coverage()
926
926
927 # Module functions call methods in the singleton object.
927 # Module functions call methods in the singleton object.
928 def use_cache(*args, **kw):
928 def use_cache(*args, **kw):
929 return the_coverage.use_cache(*args, **kw)
929 return the_coverage.use_cache(*args, **kw)
930
930
931 def start(*args, **kw):
931 def start(*args, **kw):
932 return the_coverage.start(*args, **kw)
932 return the_coverage.start(*args, **kw)
933
933
934 def stop(*args, **kw):
934 def stop(*args, **kw):
935 return the_coverage.stop(*args, **kw)
935 return the_coverage.stop(*args, **kw)
936
936
937 def erase(*args, **kw):
937 def erase(*args, **kw):
938 return the_coverage.erase(*args, **kw)
938 return the_coverage.erase(*args, **kw)
939
939
940 def begin_recursive(*args, **kw):
940 def begin_recursive(*args, **kw):
941 return the_coverage.begin_recursive(*args, **kw)
941 return the_coverage.begin_recursive(*args, **kw)
942
942
943 def end_recursive(*args, **kw):
943 def end_recursive(*args, **kw):
944 return the_coverage.end_recursive(*args, **kw)
944 return the_coverage.end_recursive(*args, **kw)
945
945
946 def exclude(*args, **kw):
946 def exclude(*args, **kw):
947 return the_coverage.exclude(*args, **kw)
947 return the_coverage.exclude(*args, **kw)
948
948
949 def analysis(*args, **kw):
949 def analysis(*args, **kw):
950 return the_coverage.analysis(*args, **kw)
950 return the_coverage.analysis(*args, **kw)
951
951
952 def analysis2(*args, **kw):
952 def analysis2(*args, **kw):
953 return the_coverage.analysis2(*args, **kw)
953 return the_coverage.analysis2(*args, **kw)
954
954
955 def report(*args, **kw):
955 def report(*args, **kw):
956 return the_coverage.report(*args, **kw)
956 return the_coverage.report(*args, **kw)
957
957
958 def annotate(*args, **kw):
958 def annotate(*args, **kw):
959 return the_coverage.annotate(*args, **kw)
959 return the_coverage.annotate(*args, **kw)
960
960
961 def annotate_file(*args, **kw):
961 def annotate_file(*args, **kw):
962 return the_coverage.annotate_file(*args, **kw)
962 return the_coverage.annotate_file(*args, **kw)
963
963
964 # Save coverage data when Python exits. (The atexit module wasn't
964 # Save coverage data when Python exits. (The atexit module wasn't
965 # introduced until Python 2.0, so use sys.exitfunc when it's not
965 # introduced until Python 2.0, so use sys.exitfunc when it's not
966 # available.)
966 # available.)
967 try:
967 try:
968 import atexit
968 import atexit
969 atexit.register(the_coverage.save)
969 atexit.register(the_coverage.save)
970 except ImportError:
970 except ImportError:
971 sys.exitfunc = the_coverage.save
971 sys.exitfunc = the_coverage.save
972
972
973 # Command-line interface.
973 # Command-line interface.
974 if __name__ == '__main__':
974 if __name__ == '__main__':
975 the_coverage.command_line(sys.argv[1:])
975 the_coverage.command_line(sys.argv[1:])
976
976
977
977
978 # A. REFERENCES
978 # A. REFERENCES
979 #
979 #
980 # [GDR 2001-12-04a] "Statement coverage for Python"; Gareth Rees;
980 # [GDR 2001-12-04a] "Statement coverage for Python"; Gareth Rees;
981 # Ravenbrook Limited; 2001-12-04;
981 # Ravenbrook Limited; 2001-12-04;
982 # <http://www.nedbatchelder.com/code/modules/rees-coverage.html>.
982 # <http://www.nedbatchelder.com/code/modules/rees-coverage.html>.
983 #
983 #
984 # [GDR 2001-12-04b] "Statement coverage for Python: design and
984 # [GDR 2001-12-04b] "Statement coverage for Python: design and
985 # analysis"; Gareth Rees; Ravenbrook Limited; 2001-12-04;
985 # analysis"; Gareth Rees; Ravenbrook Limited; 2001-12-04;
986 # <http://www.nedbatchelder.com/code/modules/rees-design.html>.
986 # <http://www.nedbatchelder.com/code/modules/rees-design.html>.
987 #
987 #
988 # [van Rossum 2001-07-20a] "Python Reference Manual (releae 2.1.1)";
988 # [van Rossum 2001-07-20a] "Python Reference Manual (releae 2.1.1)";
989 # Guide van Rossum; 2001-07-20;
989 # Guide van Rossum; 2001-07-20;
990 # <http://www.python.org/doc/2.1.1/ref/ref.html>.
990 # <http://www.python.org/doc/2.1.1/ref/ref.html>.
991 #
991 #
992 # [van Rossum 2001-07-20b] "Python Library Reference"; Guido van Rossum;
992 # [van Rossum 2001-07-20b] "Python Library Reference"; Guido van Rossum;
993 # 2001-07-20; <http://www.python.org/doc/2.1.1/lib/lib.html>.
993 # 2001-07-20; <http://www.python.org/doc/2.1.1/lib/lib.html>.
994 #
994 #
995 #
995 #
996 # B. DOCUMENT HISTORY
996 # B. DOCUMENT HISTORY
997 #
997 #
998 # 2001-12-04 GDR Created.
998 # 2001-12-04 GDR Created.
999 #
999 #
1000 # 2001-12-06 GDR Added command-line interface and source code
1000 # 2001-12-06 GDR Added command-line interface and source code
1001 # annotation.
1001 # annotation.
1002 #
1002 #
1003 # 2001-12-09 GDR Moved design and interface to separate documents.
1003 # 2001-12-09 GDR Moved design and interface to separate documents.
1004 #
1004 #
1005 # 2001-12-10 GDR Open cache file as binary on Windows. Allow
1005 # 2001-12-10 GDR Open cache file as binary on Windows. Allow
1006 # simultaneous -e and -x, or -a and -r.
1006 # simultaneous -e and -x, or -a and -r.
1007 #
1007 #
1008 # 2001-12-12 GDR Added command-line help. Cache analysis so that it
1008 # 2001-12-12 GDR Added command-line help. Cache analysis so that it
1009 # only needs to be done once when you specify -a and -r.
1009 # only needs to be done once when you specify -a and -r.
1010 #
1010 #
1011 # 2001-12-13 GDR Improved speed while recording. Portable between
1011 # 2001-12-13 GDR Improved speed while recording. Portable between
1012 # Python 1.5.2 and 2.1.1.
1012 # Python 1.5.2 and 2.1.1.
1013 #
1013 #
1014 # 2002-01-03 GDR Module-level functions work correctly.
1014 # 2002-01-03 GDR Module-level functions work correctly.
1015 #
1015 #
1016 # 2002-01-07 GDR Update sys.path when running a file with the -x option,
1016 # 2002-01-07 GDR Update sys.path when running a file with the -x option,
1017 # so that it matches the value the program would get if it were run on
1017 # so that it matches the value the program would get if it were run on
1018 # its own.
1018 # its own.
1019 #
1019 #
1020 # 2004-12-12 NMB Significant code changes.
1020 # 2004-12-12 NMB Significant code changes.
1021 # - Finding executable statements has been rewritten so that docstrings and
1021 # - Finding executable statements has been rewritten so that docstrings and
1022 # other quirks of Python execution aren't mistakenly identified as missing
1022 # other quirks of Python execution aren't mistakenly identified as missing
1023 # lines.
1023 # lines.
1024 # - Lines can be excluded from consideration, even entire suites of lines.
1024 # - Lines can be excluded from consideration, even entire suites of lines.
1025 # - The filesystem cache of covered lines can be disabled programmatically.
1025 # - The filesystem cache of covered lines can be disabled programmatically.
1026 # - Modernized the code.
1026 # - Modernized the code.
1027 #
1027 #
1028 # 2004-12-14 NMB Minor tweaks. Return 'analysis' to its original behavior
1028 # 2004-12-14 NMB Minor tweaks. Return 'analysis' to its original behavior
1029 # and add 'analysis2'. Add a global for 'annotate', and factor it, adding
1029 # and add 'analysis2'. Add a global for 'annotate', and factor it, adding
1030 # 'annotate_file'.
1030 # 'annotate_file'.
1031 #
1031 #
1032 # 2004-12-31 NMB Allow for keyword arguments in the module global functions.
1032 # 2004-12-31 NMB Allow for keyword arguments in the module global functions.
1033 # Thanks, Allen.
1033 # Thanks, Allen.
1034 #
1034 #
1035 # 2005-12-02 NMB Call threading.settrace so that all threads are measured.
1035 # 2005-12-02 NMB Call threading.settrace so that all threads are measured.
1036 # Thanks Martin Fuzzey. Add a file argument to report so that reports can be
1036 # Thanks Martin Fuzzey. Add a file argument to report so that reports can be
1037 # captured to a different destination.
1037 # captured to a different destination.
1038 #
1038 #
1039 # 2005-12-03 NMB coverage.py can now measure itself.
1039 # 2005-12-03 NMB coverage.py can now measure itself.
1040 #
1040 #
1041 # 2005-12-04 NMB Adapted Greg Rogers' patch for using relative filenames,
1041 # 2005-12-04 NMB Adapted Greg Rogers' patch for using relative filenames,
1042 # and sorting and omitting files to report on.
1042 # and sorting and omitting files to report on.
1043 #
1043 #
1044 # 2006-07-23 NMB Applied Joseph Tate's patch for function decorators.
1044 # 2006-07-23 NMB Applied Joseph Tate's patch for function decorators.
1045 #
1045 #
1046 # 2006-08-21 NMB Applied Sigve Tjora and Mark van der Wal's fixes for argument
1046 # 2006-08-21 NMB Applied Sigve Tjora and Mark van der Wal's fixes for argument
1047 # handling.
1047 # handling.
1048 #
1048 #
1049 # 2006-08-22 NMB Applied Geoff Bache's parallel mode patch.
1049 # 2006-08-22 NMB Applied Geoff Bache's parallel mode patch.
1050 #
1050 #
1051 # 2006-08-23 NMB Refactorings to improve testability. Fixes to command-line
1051 # 2006-08-23 NMB Refactorings to improve testability. Fixes to command-line
1052 # logic for parallel mode and collect.
1052 # logic for parallel mode and collect.
1053 #
1053 #
1054 # 2006-08-25 NMB "#pragma: nocover" is excluded by default.
1054 # 2006-08-25 NMB "#pragma: nocover" is excluded by default.
1055 #
1055 #
1056 # 2006-09-10 NMB Properly ignore docstrings and other constant expressions that
1056 # 2006-09-10 NMB Properly ignore docstrings and other constant expressions that
1057 # appear in the middle of a function, a problem reported by Tim Leslie.
1057 # appear in the middle of a function, a problem reported by Tim Leslie.
1058 # Minor changes to avoid lint warnings.
1058 # Minor changes to avoid lint warnings.
1059 #
1059 #
1060 # 2006-09-17 NMB coverage.erase() shouldn't clobber the exclude regex.
1060 # 2006-09-17 NMB coverage.erase() shouldn't clobber the exclude regex.
1061 # Change how parallel mode is invoked, and fix erase() so that it erases the
1061 # Change how parallel mode is invoked, and fix erase() so that it erases the
1062 # cache when called programmatically.
1062 # cache when called programmatically.
1063 #
1063 #
1064 # 2007-07-21 NMB In reports, ignore code executed from strings, since we can't
1064 # 2007-07-21 NMB In reports, ignore code executed from strings, since we can't
1065 # do anything useful with it anyway.
1065 # do anything useful with it anyway.
1066 # Better file handling on Linux, thanks Guillaume Chazarain.
1066 # Better file handling on Linux, thanks Guillaume Chazarain.
1067 # Better shell support on Windows, thanks Noel O'Boyle.
1067 # Better shell support on Windows, thanks Noel O'Boyle.
1068 # Python 2.2 support maintained, thanks Catherine Proulx.
1068 # Python 2.2 support maintained, thanks Catherine Proulx.
1069 #
1069 #
1070 # 2007-07-22 NMB Python 2.5 now fully supported. The method of dealing with
1070 # 2007-07-22 NMB Python 2.5 now fully supported. The method of dealing with
1071 # multi-line statements is now less sensitive to the exact line that Python
1071 # multi-line statements is now less sensitive to the exact line that Python
1072 # reports during execution. Pass statements are handled specially so that their
1072 # reports during execution. Pass statements are handled specially so that their
1073 # disappearance during execution won't throw off the measurement.
1073 # disappearance during execution won't throw off the measurement.
1074 #
1074 #
1075 # 2007-07-23 NMB Now Python 2.5 is *really* fully supported: the body of the
1075 # 2007-07-23 NMB Now Python 2.5 is *really* fully supported: the body of the
1076 # new with statement is counted as executable.
1076 # new with statement is counted as executable.
1077 #
1077 #
1078 # 2007-07-29 NMB Better packaging.
1078 # 2007-07-29 NMB Better packaging.
1079
1079
1080 # C. COPYRIGHT AND LICENCE
1080 # C. COPYRIGHT AND LICENCE
1081 #
1081 #
1082 # Copyright 2001 Gareth Rees. All rights reserved.
1082 # Copyright 2001 Gareth Rees. All rights reserved.
1083 # Copyright 2004-2007 Ned Batchelder. All rights reserved.
1083 # Copyright 2004-2007 Ned Batchelder. All rights reserved.
1084 #
1084 #
1085 # Redistribution and use in source and binary forms, with or without
1085 # Redistribution and use in source and binary forms, with or without
1086 # modification, are permitted provided that the following conditions are
1086 # modification, are permitted provided that the following conditions are
1087 # met:
1087 # met:
1088 #
1088 #
1089 # 1. Redistributions of source code must retain the above copyright
1089 # 1. Redistributions of source code must retain the above copyright
1090 # notice, this list of conditions and the following disclaimer.
1090 # notice, this list of conditions and the following disclaimer.
1091 #
1091 #
1092 # 2. Redistributions in binary form must reproduce the above copyright
1092 # 2. Redistributions in binary form must reproduce the above copyright
1093 # notice, this list of conditions and the following disclaimer in the
1093 # notice, this list of conditions and the following disclaimer in the
1094 # documentation and/or other materials provided with the
1094 # documentation and/or other materials provided with the
1095 # distribution.
1095 # distribution.
1096 #
1096 #
1097 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
1097 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
1098 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
1098 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
1099 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
1099 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
1100 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
1100 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
1101 # HOLDERS AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
1101 # HOLDERS AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
1102 # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
1102 # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
1103 # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
1103 # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
1104 # OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
1104 # OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
1105 # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
1105 # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
1106 # TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
1106 # TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
1107 # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
1107 # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
1108 # DAMAGE.
1108 # DAMAGE.
1109 #
1109 #
1110 # $Id: coverage.py 74 2007-07-29 22:28:35Z nedbat $
1110 # $Id: coverage.py 74 2007-07-29 22:28:35Z nedbat $
General Comments 0
You need to be logged in to leave comments. Login now