##// END OF EJS Templates
check-code: there must also be whitespace between ')' and operator...
Mads Kiilerich -
r18054:b35e3364 default
parent child Browse files
Show More
@@ -1,454 +1,454 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # check-code - a style and portability checker for Mercurial
3 # check-code - a style and portability checker for Mercurial
4 #
4 #
5 # Copyright 2010 Matt Mackall <mpm@selenic.com>
5 # Copyright 2010 Matt Mackall <mpm@selenic.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 import re, glob, os, sys
10 import re, glob, os, sys
11 import keyword
11 import keyword
12 import optparse
12 import optparse
13
13
14 def repquote(m):
14 def repquote(m):
15 t = re.sub(r"\w", "x", m.group('text'))
15 t = re.sub(r"\w", "x", m.group('text'))
16 t = re.sub(r"[^\s\nx]", "o", t)
16 t = re.sub(r"[^\s\nx]", "o", t)
17 return m.group('quote') + t + m.group('quote')
17 return m.group('quote') + t + m.group('quote')
18
18
19 def reppython(m):
19 def reppython(m):
20 comment = m.group('comment')
20 comment = m.group('comment')
21 if comment:
21 if comment:
22 return "#" * len(comment)
22 return "#" * len(comment)
23 return repquote(m)
23 return repquote(m)
24
24
25 def repcomment(m):
25 def repcomment(m):
26 return m.group(1) + "#" * len(m.group(2))
26 return m.group(1) + "#" * len(m.group(2))
27
27
28 def repccomment(m):
28 def repccomment(m):
29 t = re.sub(r"((?<=\n) )|\S", "x", m.group(2))
29 t = re.sub(r"((?<=\n) )|\S", "x", m.group(2))
30 return m.group(1) + t + "*/"
30 return m.group(1) + t + "*/"
31
31
32 def repcallspaces(m):
32 def repcallspaces(m):
33 t = re.sub(r"\n\s+", "\n", m.group(2))
33 t = re.sub(r"\n\s+", "\n", m.group(2))
34 return m.group(1) + t
34 return m.group(1) + t
35
35
36 def repinclude(m):
36 def repinclude(m):
37 return m.group(1) + "<foo>"
37 return m.group(1) + "<foo>"
38
38
39 def rephere(m):
39 def rephere(m):
40 t = re.sub(r"\S", "x", m.group(2))
40 t = re.sub(r"\S", "x", m.group(2))
41 return m.group(1) + t
41 return m.group(1) + t
42
42
43
43
44 testpats = [
44 testpats = [
45 [
45 [
46 (r'pushd|popd', "don't use 'pushd' or 'popd', use 'cd'"),
46 (r'pushd|popd', "don't use 'pushd' or 'popd', use 'cd'"),
47 (r'\W\$?\(\([^\)\n]*\)\)', "don't use (()) or $(()), use 'expr'"),
47 (r'\W\$?\(\([^\)\n]*\)\)', "don't use (()) or $(()), use 'expr'"),
48 (r'grep.*-q', "don't use 'grep -q', redirect to /dev/null"),
48 (r'grep.*-q', "don't use 'grep -q', redirect to /dev/null"),
49 (r'sed.*-i', "don't use 'sed -i', use a temporary file"),
49 (r'sed.*-i', "don't use 'sed -i', use a temporary file"),
50 (r'\becho\b.*\\n', "don't use 'echo \\n', use printf"),
50 (r'\becho\b.*\\n', "don't use 'echo \\n', use printf"),
51 (r'echo -n', "don't use 'echo -n', use printf"),
51 (r'echo -n', "don't use 'echo -n', use printf"),
52 (r'(^| )wc[^|]*$\n(?!.*\(re\))', "filter wc output"),
52 (r'(^| )wc[^|]*$\n(?!.*\(re\))', "filter wc output"),
53 (r'head -c', "don't use 'head -c', use 'dd'"),
53 (r'head -c', "don't use 'head -c', use 'dd'"),
54 (r'sha1sum', "don't use sha1sum, use $TESTDIR/md5sum.py"),
54 (r'sha1sum', "don't use sha1sum, use $TESTDIR/md5sum.py"),
55 (r'ls.*-\w*R', "don't use 'ls -R', use 'find'"),
55 (r'ls.*-\w*R', "don't use 'ls -R', use 'find'"),
56 (r'printf.*\\([1-9]|0\d)', "don't use 'printf \NNN', use Python"),
56 (r'printf.*\\([1-9]|0\d)', "don't use 'printf \NNN', use Python"),
57 (r'printf.*\\x', "don't use printf \\x, use Python"),
57 (r'printf.*\\x', "don't use printf \\x, use Python"),
58 (r'\$\(.*\)', "don't use $(expr), use `expr`"),
58 (r'\$\(.*\)', "don't use $(expr), use `expr`"),
59 (r'rm -rf \*', "don't use naked rm -rf, target a directory"),
59 (r'rm -rf \*', "don't use naked rm -rf, target a directory"),
60 (r'(^|\|\s*)grep (-\w\s+)*[^|]*[(|]\w',
60 (r'(^|\|\s*)grep (-\w\s+)*[^|]*[(|]\w',
61 "use egrep for extended grep syntax"),
61 "use egrep for extended grep syntax"),
62 (r'/bin/', "don't use explicit paths for tools"),
62 (r'/bin/', "don't use explicit paths for tools"),
63 (r'[^\n]\Z', "no trailing newline"),
63 (r'[^\n]\Z', "no trailing newline"),
64 (r'export.*=', "don't export and assign at once"),
64 (r'export.*=', "don't export and assign at once"),
65 (r'^source\b', "don't use 'source', use '.'"),
65 (r'^source\b', "don't use 'source', use '.'"),
66 (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"),
66 (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"),
67 (r'ls +[^|\n-]+ +-', "options to 'ls' must come before filenames"),
67 (r'ls +[^|\n-]+ +-', "options to 'ls' must come before filenames"),
68 (r'[^>\n]>\s*\$HGRCPATH', "don't overwrite $HGRCPATH, append to it"),
68 (r'[^>\n]>\s*\$HGRCPATH', "don't overwrite $HGRCPATH, append to it"),
69 (r'^stop\(\)', "don't use 'stop' as a shell function name"),
69 (r'^stop\(\)', "don't use 'stop' as a shell function name"),
70 (r'(\[|\btest\b).*-e ', "don't use 'test -e', use 'test -f'"),
70 (r'(\[|\btest\b).*-e ', "don't use 'test -e', use 'test -f'"),
71 (r'^alias\b.*=', "don't use alias, use a function"),
71 (r'^alias\b.*=', "don't use alias, use a function"),
72 (r'if\s*!', "don't use '!' to negate exit status"),
72 (r'if\s*!', "don't use '!' to negate exit status"),
73 (r'/dev/u?random', "don't use entropy, use /dev/zero"),
73 (r'/dev/u?random', "don't use entropy, use /dev/zero"),
74 (r'do\s*true;\s*done', "don't use true as loop body, use sleep 0"),
74 (r'do\s*true;\s*done', "don't use true as loop body, use sleep 0"),
75 (r'^( *)\t', "don't use tabs to indent"),
75 (r'^( *)\t', "don't use tabs to indent"),
76 ],
76 ],
77 # warnings
77 # warnings
78 [
78 [
79 (r'^function', "don't use 'function', use old style"),
79 (r'^function', "don't use 'function', use old style"),
80 (r'^diff.*-\w*N', "don't use 'diff -N'"),
80 (r'^diff.*-\w*N', "don't use 'diff -N'"),
81 (r'\$PWD', "don't use $PWD, use `pwd`"),
81 (r'\$PWD', "don't use $PWD, use `pwd`"),
82 (r'^([^"\'\n]|("[^"\n]*")|(\'[^\'\n]*\'))*\^', "^ must be quoted"),
82 (r'^([^"\'\n]|("[^"\n]*")|(\'[^\'\n]*\'))*\^', "^ must be quoted"),
83 ]
83 ]
84 ]
84 ]
85
85
86 testfilters = [
86 testfilters = [
87 (r"( *)(#([^\n]*\S)?)", repcomment),
87 (r"( *)(#([^\n]*\S)?)", repcomment),
88 (r"<<(\S+)((.|\n)*?\n\1)", rephere),
88 (r"<<(\S+)((.|\n)*?\n\1)", rephere),
89 ]
89 ]
90
90
91 uprefix = r"^ \$ "
91 uprefix = r"^ \$ "
92 utestpats = [
92 utestpats = [
93 [
93 [
94 (r'^(\S.*|| [$>] .*)[ \t]\n', "trailing whitespace on non-output"),
94 (r'^(\S.*|| [$>] .*)[ \t]\n', "trailing whitespace on non-output"),
95 (uprefix + r'.*\|\s*sed[^|>\n]*\n',
95 (uprefix + r'.*\|\s*sed[^|>\n]*\n',
96 "use regex test output patterns instead of sed"),
96 "use regex test output patterns instead of sed"),
97 (uprefix + r'(true|exit 0)', "explicit zero exit unnecessary"),
97 (uprefix + r'(true|exit 0)', "explicit zero exit unnecessary"),
98 (uprefix + r'.*(?<!\[)\$\?', "explicit exit code checks unnecessary"),
98 (uprefix + r'.*(?<!\[)\$\?', "explicit exit code checks unnecessary"),
99 (uprefix + r'.*\|\| echo.*(fail|error)',
99 (uprefix + r'.*\|\| echo.*(fail|error)',
100 "explicit exit code checks unnecessary"),
100 "explicit exit code checks unnecessary"),
101 (uprefix + r'set -e', "don't use set -e"),
101 (uprefix + r'set -e', "don't use set -e"),
102 (uprefix + r'\s', "don't indent commands, use > for continued lines"),
102 (uprefix + r'\s', "don't indent commands, use > for continued lines"),
103 (r'^ saved backup bundle to \$TESTTMP.*\.hg$',
103 (r'^ saved backup bundle to \$TESTTMP.*\.hg$',
104 "use (glob) to match Windows paths too"),
104 "use (glob) to match Windows paths too"),
105 ],
105 ],
106 # warnings
106 # warnings
107 []
107 []
108 ]
108 ]
109
109
110 for i in [0, 1]:
110 for i in [0, 1]:
111 for p, m in testpats[i]:
111 for p, m in testpats[i]:
112 if p.startswith(r'^'):
112 if p.startswith(r'^'):
113 p = r"^ [$>] (%s)" % p[1:]
113 p = r"^ [$>] (%s)" % p[1:]
114 else:
114 else:
115 p = r"^ [$>] .*(%s)" % p
115 p = r"^ [$>] .*(%s)" % p
116 utestpats[i].append((p, m))
116 utestpats[i].append((p, m))
117
117
118 utestfilters = [
118 utestfilters = [
119 (r"<<(\S+)((.|\n)*?\n > \1)", rephere),
119 (r"<<(\S+)((.|\n)*?\n > \1)", rephere),
120 (r"( *)(#([^\n]*\S)?)", repcomment),
120 (r"( *)(#([^\n]*\S)?)", repcomment),
121 ]
121 ]
122
122
123 pypats = [
123 pypats = [
124 [
124 [
125 (r'^\s*def\s*\w+\s*\(.*,\s*\(',
125 (r'^\s*def\s*\w+\s*\(.*,\s*\(',
126 "tuple parameter unpacking not available in Python 3+"),
126 "tuple parameter unpacking not available in Python 3+"),
127 (r'lambda\s*\(.*,.*\)',
127 (r'lambda\s*\(.*,.*\)',
128 "tuple parameter unpacking not available in Python 3+"),
128 "tuple parameter unpacking not available in Python 3+"),
129 (r'(?<!def)\s+(cmp)\(', "cmp is not available in Python 3+"),
129 (r'(?<!def)\s+(cmp)\(', "cmp is not available in Python 3+"),
130 (r'\breduce\s*\(.*', "reduce is not available in Python 3+"),
130 (r'\breduce\s*\(.*', "reduce is not available in Python 3+"),
131 (r'\.has_key\b', "dict.has_key is not available in Python 3+"),
131 (r'\.has_key\b', "dict.has_key is not available in Python 3+"),
132 (r'^\s*\t', "don't use tabs"),
132 (r'^\s*\t', "don't use tabs"),
133 (r'\S;\s*\n', "semicolon"),
133 (r'\S;\s*\n', "semicolon"),
134 (r'[^_]_\("[^"]+"\s*%', "don't use % inside _()"),
134 (r'[^_]_\("[^"]+"\s*%', "don't use % inside _()"),
135 (r"[^_]_\('[^']+'\s*%", "don't use % inside _()"),
135 (r"[^_]_\('[^']+'\s*%", "don't use % inside _()"),
136 (r'\w,\w', "missing whitespace after ,"),
136 (r'(\w|\)),\w', "missing whitespace after ,"),
137 (r'\w[+/*\-<>]\w', "missing whitespace in expression"),
137 (r'(\w|\))[+/*\-<>]\w', "missing whitespace in expression"),
138 (r'^\s+\w+=\w+[^,)\n]$', "missing whitespace in assignment"),
138 (r'^\s+\w+=\w+[^,)\n]$', "missing whitespace in assignment"),
139 (r'(\s+)try:\n((?:\n|\1\s.*\n)+?)\1except.*?:\n'
139 (r'(\s+)try:\n((?:\n|\1\s.*\n)+?)\1except.*?:\n'
140 r'((?:\n|\1\s.*\n)+?)\1finally:', 'no try/except/finally in Python 2.4'),
140 r'((?:\n|\1\s.*\n)+?)\1finally:', 'no try/except/finally in Python 2.4'),
141 (r'(\s+)try:\n((?:\n|\1\s.*\n)*?)\1\s*yield\b.*?'
141 (r'(\s+)try:\n((?:\n|\1\s.*\n)*?)\1\s*yield\b.*?'
142 r'((?:\n|\1\s.*\n)+?)\1finally:',
142 r'((?:\n|\1\s.*\n)+?)\1finally:',
143 'no yield inside try/finally in Python 2.4'),
143 'no yield inside try/finally in Python 2.4'),
144 (r'.{81}', "line too long"),
144 (r'.{81}', "line too long"),
145 (r' x+[xo][\'"]\n\s+[\'"]x', 'string join across lines with no space'),
145 (r' x+[xo][\'"]\n\s+[\'"]x', 'string join across lines with no space'),
146 (r'[^\n]\Z', "no trailing newline"),
146 (r'[^\n]\Z', "no trailing newline"),
147 (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
147 (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
148 # (r'^\s+[^_ \n][^_. \n]+_[^_\n]+\s*=',
148 # (r'^\s+[^_ \n][^_. \n]+_[^_\n]+\s*=',
149 # "don't use underbars in identifiers"),
149 # "don't use underbars in identifiers"),
150 (r'^\s+(self\.)?[A-za-z][a-z0-9]+[A-Z]\w* = ',
150 (r'^\s+(self\.)?[A-za-z][a-z0-9]+[A-Z]\w* = ',
151 "don't use camelcase in identifiers"),
151 "don't use camelcase in identifiers"),
152 (r'^\s*(if|while|def|class|except|try)\s[^[\n]*:\s*[^\\n]#\s]+',
152 (r'^\s*(if|while|def|class|except|try)\s[^[\n]*:\s*[^\\n]#\s]+',
153 "linebreak after :"),
153 "linebreak after :"),
154 (r'class\s[^( \n]+:', "old-style class, use class foo(object)"),
154 (r'class\s[^( \n]+:', "old-style class, use class foo(object)"),
155 (r'class\s[^( \n]+\(\):',
155 (r'class\s[^( \n]+\(\):',
156 "class foo() not available in Python 2.4, use class foo(object)"),
156 "class foo() not available in Python 2.4, use class foo(object)"),
157 (r'\b(%s)\(' % '|'.join(keyword.kwlist),
157 (r'\b(%s)\(' % '|'.join(keyword.kwlist),
158 "Python keyword is not a function"),
158 "Python keyword is not a function"),
159 (r',]', "unneeded trailing ',' in list"),
159 (r',]', "unneeded trailing ',' in list"),
160 # (r'class\s[A-Z][^\(]*\((?!Exception)',
160 # (r'class\s[A-Z][^\(]*\((?!Exception)',
161 # "don't capitalize non-exception classes"),
161 # "don't capitalize non-exception classes"),
162 # (r'in range\(', "use xrange"),
162 # (r'in range\(', "use xrange"),
163 # (r'^\s*print\s+', "avoid using print in core and extensions"),
163 # (r'^\s*print\s+', "avoid using print in core and extensions"),
164 (r'[\x80-\xff]', "non-ASCII character literal"),
164 (r'[\x80-\xff]', "non-ASCII character literal"),
165 (r'("\')\.format\(', "str.format() not available in Python 2.4"),
165 (r'("\')\.format\(', "str.format() not available in Python 2.4"),
166 (r'^\s*with\s+', "with not available in Python 2.4"),
166 (r'^\s*with\s+', "with not available in Python 2.4"),
167 (r'\.isdisjoint\(', "set.isdisjoint not available in Python 2.4"),
167 (r'\.isdisjoint\(', "set.isdisjoint not available in Python 2.4"),
168 (r'^\s*except.* as .*:', "except as not available in Python 2.4"),
168 (r'^\s*except.* as .*:', "except as not available in Python 2.4"),
169 (r'^\s*os\.path\.relpath', "relpath not available in Python 2.4"),
169 (r'^\s*os\.path\.relpath', "relpath not available in Python 2.4"),
170 (r'(?<!def)\s+(any|all|format)\(',
170 (r'(?<!def)\s+(any|all|format)\(',
171 "any/all/format not available in Python 2.4"),
171 "any/all/format not available in Python 2.4"),
172 (r'(?<!def)\s+(callable)\(',
172 (r'(?<!def)\s+(callable)\(',
173 "callable not available in Python 3, use getattr(f, '__call__', None)"),
173 "callable not available in Python 3, use getattr(f, '__call__', None)"),
174 (r'if\s.*\selse', "if ... else form not available in Python 2.4"),
174 (r'if\s.*\selse', "if ... else form not available in Python 2.4"),
175 (r'^\s*(%s)\s\s' % '|'.join(keyword.kwlist),
175 (r'^\s*(%s)\s\s' % '|'.join(keyword.kwlist),
176 "gratuitous whitespace after Python keyword"),
176 "gratuitous whitespace after Python keyword"),
177 (r'([\(\[][ \t]\S)|(\S[ \t][\)\]])', "gratuitous whitespace in () or []"),
177 (r'([\(\[][ \t]\S)|(\S[ \t][\)\]])', "gratuitous whitespace in () or []"),
178 # (r'\s\s=', "gratuitous whitespace before ="),
178 # (r'\s\s=', "gratuitous whitespace before ="),
179 (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\S',
179 (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\S',
180 "missing whitespace around operator"),
180 "missing whitespace around operator"),
181 (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\s',
181 (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\s',
182 "missing whitespace around operator"),
182 "missing whitespace around operator"),
183 (r'\s(\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\S',
183 (r'\s(\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\S',
184 "missing whitespace around operator"),
184 "missing whitespace around operator"),
185 (r'[^^+=*/!<>&| %-](\s=|=\s)[^= ]',
185 (r'[^^+=*/!<>&| %-](\s=|=\s)[^= ]',
186 "wrong whitespace around ="),
186 "wrong whitespace around ="),
187 (r'raise Exception', "don't raise generic exceptions"),
187 (r'raise Exception', "don't raise generic exceptions"),
188 (r' is\s+(not\s+)?["\'0-9-]', "object comparison with literal"),
188 (r' is\s+(not\s+)?["\'0-9-]', "object comparison with literal"),
189 (r' [=!]=\s+(True|False|None)',
189 (r' [=!]=\s+(True|False|None)',
190 "comparison with singleton, use 'is' or 'is not' instead"),
190 "comparison with singleton, use 'is' or 'is not' instead"),
191 (r'^\s*(while|if) [01]:',
191 (r'^\s*(while|if) [01]:',
192 "use True/False for constant Boolean expression"),
192 "use True/False for constant Boolean expression"),
193 (r'(?:(?<!def)\s+|\()hasattr',
193 (r'(?:(?<!def)\s+|\()hasattr',
194 'hasattr(foo, bar) is broken, use util.safehasattr(foo, bar) instead'),
194 'hasattr(foo, bar) is broken, use util.safehasattr(foo, bar) instead'),
195 (r'opener\([^)]*\).read\(',
195 (r'opener\([^)]*\).read\(',
196 "use opener.read() instead"),
196 "use opener.read() instead"),
197 (r'BaseException', 'not in Python 2.4, use Exception'),
197 (r'BaseException', 'not in Python 2.4, use Exception'),
198 (r'os\.path\.relpath', 'os.path.relpath is not in Python 2.5'),
198 (r'os\.path\.relpath', 'os.path.relpath is not in Python 2.5'),
199 (r'opener\([^)]*\).write\(',
199 (r'opener\([^)]*\).write\(',
200 "use opener.write() instead"),
200 "use opener.write() instead"),
201 (r'[\s\(](open|file)\([^)]*\)\.read\(',
201 (r'[\s\(](open|file)\([^)]*\)\.read\(',
202 "use util.readfile() instead"),
202 "use util.readfile() instead"),
203 (r'[\s\(](open|file)\([^)]*\)\.write\(',
203 (r'[\s\(](open|file)\([^)]*\)\.write\(',
204 "use util.readfile() instead"),
204 "use util.readfile() instead"),
205 (r'^[\s\(]*(open(er)?|file)\([^)]*\)',
205 (r'^[\s\(]*(open(er)?|file)\([^)]*\)',
206 "always assign an opened file to a variable, and close it afterwards"),
206 "always assign an opened file to a variable, and close it afterwards"),
207 (r'[\s\(](open|file)\([^)]*\)\.',
207 (r'[\s\(](open|file)\([^)]*\)\.',
208 "always assign an opened file to a variable, and close it afterwards"),
208 "always assign an opened file to a variable, and close it afterwards"),
209 (r'(?i)descendent', "the proper spelling is descendAnt"),
209 (r'(?i)descendent', "the proper spelling is descendAnt"),
210 (r'\.debug\(\_', "don't mark debug messages for translation"),
210 (r'\.debug\(\_', "don't mark debug messages for translation"),
211 (r'\.strip\(\)\.split\(\)', "no need to strip before splitting"),
211 (r'\.strip\(\)\.split\(\)', "no need to strip before splitting"),
212 (r'^\s*except\s*:', "warning: naked except clause", r'#.*re-raises'),
212 (r'^\s*except\s*:', "warning: naked except clause", r'#.*re-raises'),
213 (r':\n( )*( ){1,3}[^ ]', "must indent 4 spaces"),
213 (r':\n( )*( ){1,3}[^ ]', "must indent 4 spaces"),
214 (r'ui\.(status|progress|write|note|warn)\([\'\"]x',
214 (r'ui\.(status|progress|write|note|warn)\([\'\"]x',
215 "missing _() in ui message (use () to hide false-positives)"),
215 "missing _() in ui message (use () to hide false-positives)"),
216 ],
216 ],
217 # warnings
217 # warnings
218 [
218 [
219 ]
219 ]
220 ]
220 ]
221
221
222 pyfilters = [
222 pyfilters = [
223 (r"""(?msx)(?P<comment>\#.*?$)|
223 (r"""(?msx)(?P<comment>\#.*?$)|
224 ((?P<quote>('''|\"\"\"|(?<!')'(?!')|(?<!")"(?!")))
224 ((?P<quote>('''|\"\"\"|(?<!')'(?!')|(?<!")"(?!")))
225 (?P<text>(([^\\]|\\.)*?))
225 (?P<text>(([^\\]|\\.)*?))
226 (?P=quote))""", reppython),
226 (?P=quote))""", reppython),
227 ]
227 ]
228
228
229 cpats = [
229 cpats = [
230 [
230 [
231 (r'//', "don't use //-style comments"),
231 (r'//', "don't use //-style comments"),
232 (r'^ ', "don't use spaces to indent"),
232 (r'^ ', "don't use spaces to indent"),
233 (r'\S\t', "don't use tabs except for indent"),
233 (r'\S\t', "don't use tabs except for indent"),
234 (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
234 (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
235 (r'.{81}', "line too long"),
235 (r'.{81}', "line too long"),
236 (r'(while|if|do|for)\(', "use space after while/if/do/for"),
236 (r'(while|if|do|for)\(', "use space after while/if/do/for"),
237 (r'return\(', "return is not a function"),
237 (r'return\(', "return is not a function"),
238 (r' ;', "no space before ;"),
238 (r' ;', "no space before ;"),
239 (r'\w+\* \w+', "use int *foo, not int* foo"),
239 (r'\w+\* \w+', "use int *foo, not int* foo"),
240 (r'\([^\)]+\) \w+', "use (int)foo, not (int) foo"),
240 (r'\([^\)]+\) \w+', "use (int)foo, not (int) foo"),
241 (r'\w+ (\+\+|--)', "use foo++, not foo ++"),
241 (r'\w+ (\+\+|--)', "use foo++, not foo ++"),
242 (r'\w,\w', "missing whitespace after ,"),
242 (r'\w,\w', "missing whitespace after ,"),
243 (r'^[^#]\w[+/*]\w', "missing whitespace in expression"),
243 (r'^[^#]\w[+/*]\w', "missing whitespace in expression"),
244 (r'^#\s+\w', "use #foo, not # foo"),
244 (r'^#\s+\w', "use #foo, not # foo"),
245 (r'[^\n]\Z', "no trailing newline"),
245 (r'[^\n]\Z', "no trailing newline"),
246 (r'^\s*#import\b', "use only #include in standard C code"),
246 (r'^\s*#import\b', "use only #include in standard C code"),
247 ],
247 ],
248 # warnings
248 # warnings
249 []
249 []
250 ]
250 ]
251
251
252 cfilters = [
252 cfilters = [
253 (r'(/\*)(((\*(?!/))|[^*])*)\*/', repccomment),
253 (r'(/\*)(((\*(?!/))|[^*])*)\*/', repccomment),
254 (r'''(?P<quote>(?<!")")(?P<text>([^"]|\\")+)"(?!")''', repquote),
254 (r'''(?P<quote>(?<!")")(?P<text>([^"]|\\")+)"(?!")''', repquote),
255 (r'''(#\s*include\s+<)([^>]+)>''', repinclude),
255 (r'''(#\s*include\s+<)([^>]+)>''', repinclude),
256 (r'(\()([^)]+\))', repcallspaces),
256 (r'(\()([^)]+\))', repcallspaces),
257 ]
257 ]
258
258
259 inutilpats = [
259 inutilpats = [
260 [
260 [
261 (r'\bui\.', "don't use ui in util"),
261 (r'\bui\.', "don't use ui in util"),
262 ],
262 ],
263 # warnings
263 # warnings
264 []
264 []
265 ]
265 ]
266
266
267 inrevlogpats = [
267 inrevlogpats = [
268 [
268 [
269 (r'\brepo\.', "don't use repo in revlog"),
269 (r'\brepo\.', "don't use repo in revlog"),
270 ],
270 ],
271 # warnings
271 # warnings
272 []
272 []
273 ]
273 ]
274
274
275 checks = [
275 checks = [
276 ('python', r'.*\.(py|cgi)$', pyfilters, pypats),
276 ('python', r'.*\.(py|cgi)$', pyfilters, pypats),
277 ('test script', r'(.*/)?test-[^.~]*$', testfilters, testpats),
277 ('test script', r'(.*/)?test-[^.~]*$', testfilters, testpats),
278 ('c', r'.*\.c$', cfilters, cpats),
278 ('c', r'.*\.c$', cfilters, cpats),
279 ('unified test', r'.*\.t$', utestfilters, utestpats),
279 ('unified test', r'.*\.t$', utestfilters, utestpats),
280 ('layering violation repo in revlog', r'mercurial/revlog\.py', pyfilters,
280 ('layering violation repo in revlog', r'mercurial/revlog\.py', pyfilters,
281 inrevlogpats),
281 inrevlogpats),
282 ('layering violation ui in util', r'mercurial/util\.py', pyfilters,
282 ('layering violation ui in util', r'mercurial/util\.py', pyfilters,
283 inutilpats),
283 inutilpats),
284 ]
284 ]
285
285
286 class norepeatlogger(object):
286 class norepeatlogger(object):
287 def __init__(self):
287 def __init__(self):
288 self._lastseen = None
288 self._lastseen = None
289
289
290 def log(self, fname, lineno, line, msg, blame):
290 def log(self, fname, lineno, line, msg, blame):
291 """print error related a to given line of a given file.
291 """print error related a to given line of a given file.
292
292
293 The faulty line will also be printed but only once in the case
293 The faulty line will also be printed but only once in the case
294 of multiple errors.
294 of multiple errors.
295
295
296 :fname: filename
296 :fname: filename
297 :lineno: line number
297 :lineno: line number
298 :line: actual content of the line
298 :line: actual content of the line
299 :msg: error message
299 :msg: error message
300 """
300 """
301 msgid = fname, lineno, line
301 msgid = fname, lineno, line
302 if msgid != self._lastseen:
302 if msgid != self._lastseen:
303 if blame:
303 if blame:
304 print "%s:%d (%s):" % (fname, lineno, blame)
304 print "%s:%d (%s):" % (fname, lineno, blame)
305 else:
305 else:
306 print "%s:%d:" % (fname, lineno)
306 print "%s:%d:" % (fname, lineno)
307 print " > %s" % line
307 print " > %s" % line
308 self._lastseen = msgid
308 self._lastseen = msgid
309 print " " + msg
309 print " " + msg
310
310
311 _defaultlogger = norepeatlogger()
311 _defaultlogger = norepeatlogger()
312
312
313 def getblame(f):
313 def getblame(f):
314 lines = []
314 lines = []
315 for l in os.popen('hg annotate -un %s' % f):
315 for l in os.popen('hg annotate -un %s' % f):
316 start, line = l.split(':', 1)
316 start, line = l.split(':', 1)
317 user, rev = start.split()
317 user, rev = start.split()
318 lines.append((line[1:-1], user, rev))
318 lines.append((line[1:-1], user, rev))
319 return lines
319 return lines
320
320
321 def checkfile(f, logfunc=_defaultlogger.log, maxerr=None, warnings=False,
321 def checkfile(f, logfunc=_defaultlogger.log, maxerr=None, warnings=False,
322 blame=False, debug=False, lineno=True):
322 blame=False, debug=False, lineno=True):
323 """checks style and portability of a given file
323 """checks style and portability of a given file
324
324
325 :f: filepath
325 :f: filepath
326 :logfunc: function used to report error
326 :logfunc: function used to report error
327 logfunc(filename, linenumber, linecontent, errormessage)
327 logfunc(filename, linenumber, linecontent, errormessage)
328 :maxerr: number of error to display before aborting.
328 :maxerr: number of error to display before aborting.
329 Set to false (default) to report all errors
329 Set to false (default) to report all errors
330
330
331 return True if no error is found, False otherwise.
331 return True if no error is found, False otherwise.
332 """
332 """
333 blamecache = None
333 blamecache = None
334 result = True
334 result = True
335 for name, match, filters, pats in checks:
335 for name, match, filters, pats in checks:
336 if debug:
336 if debug:
337 print name, f
337 print name, f
338 fc = 0
338 fc = 0
339 if not re.match(match, f):
339 if not re.match(match, f):
340 if debug:
340 if debug:
341 print "Skipping %s for %s it doesn't match %s" % (
341 print "Skipping %s for %s it doesn't match %s" % (
342 name, match, f)
342 name, match, f)
343 continue
343 continue
344 fp = open(f)
344 fp = open(f)
345 pre = post = fp.read()
345 pre = post = fp.read()
346 fp.close()
346 fp.close()
347 if "no-" + "check-code" in pre:
347 if "no-" + "check-code" in pre:
348 if debug:
348 if debug:
349 print "Skipping %s for %s it has no- and check-code" % (
349 print "Skipping %s for %s it has no- and check-code" % (
350 name, f)
350 name, f)
351 break
351 break
352 for p, r in filters:
352 for p, r in filters:
353 post = re.sub(p, r, post)
353 post = re.sub(p, r, post)
354 if warnings:
354 if warnings:
355 pats = pats[0] + pats[1]
355 pats = pats[0] + pats[1]
356 else:
356 else:
357 pats = pats[0]
357 pats = pats[0]
358 # print post # uncomment to show filtered version
358 # print post # uncomment to show filtered version
359
359
360 if debug:
360 if debug:
361 print "Checking %s for %s" % (name, f)
361 print "Checking %s for %s" % (name, f)
362
362
363 prelines = None
363 prelines = None
364 errors = []
364 errors = []
365 for pat in pats:
365 for pat in pats:
366 if len(pat) == 3:
366 if len(pat) == 3:
367 p, msg, ignore = pat
367 p, msg, ignore = pat
368 else:
368 else:
369 p, msg = pat
369 p, msg = pat
370 ignore = None
370 ignore = None
371
371
372 # fix-up regexes for multi-line searches
372 # fix-up regexes for multi-line searches
373 po = p
373 po = p
374 # \s doesn't match \n
374 # \s doesn't match \n
375 p = re.sub(r'(?<!\\)\\s', r'[ \\t]', p)
375 p = re.sub(r'(?<!\\)\\s', r'[ \\t]', p)
376 # [^...] doesn't match newline
376 # [^...] doesn't match newline
377 p = re.sub(r'(?<!\\)\[\^', r'[^\\n', p)
377 p = re.sub(r'(?<!\\)\[\^', r'[^\\n', p)
378
378
379 #print po, '=>', p
379 #print po, '=>', p
380
380
381 pos = 0
381 pos = 0
382 n = 0
382 n = 0
383 for m in re.finditer(p, post, re.MULTILINE):
383 for m in re.finditer(p, post, re.MULTILINE):
384 if prelines is None:
384 if prelines is None:
385 prelines = pre.splitlines()
385 prelines = pre.splitlines()
386 postlines = post.splitlines(True)
386 postlines = post.splitlines(True)
387
387
388 start = m.start()
388 start = m.start()
389 while n < len(postlines):
389 while n < len(postlines):
390 step = len(postlines[n])
390 step = len(postlines[n])
391 if pos + step > start:
391 if pos + step > start:
392 break
392 break
393 pos += step
393 pos += step
394 n += 1
394 n += 1
395 l = prelines[n]
395 l = prelines[n]
396
396
397 if "check-code" + "-ignore" in l:
397 if "check-code" + "-ignore" in l:
398 if debug:
398 if debug:
399 print "Skipping %s for %s:%s (check-code -ignore)" % (
399 print "Skipping %s for %s:%s (check-code -ignore)" % (
400 name, f, n)
400 name, f, n)
401 continue
401 continue
402 elif ignore and re.search(ignore, l, re.MULTILINE):
402 elif ignore and re.search(ignore, l, re.MULTILINE):
403 continue
403 continue
404 bd = ""
404 bd = ""
405 if blame:
405 if blame:
406 bd = 'working directory'
406 bd = 'working directory'
407 if not blamecache:
407 if not blamecache:
408 blamecache = getblame(f)
408 blamecache = getblame(f)
409 if n < len(blamecache):
409 if n < len(blamecache):
410 bl, bu, br = blamecache[n]
410 bl, bu, br = blamecache[n]
411 if bl == l:
411 if bl == l:
412 bd = '%s@%s' % (bu, br)
412 bd = '%s@%s' % (bu, br)
413 errors.append((f, lineno and n + 1, l, msg, bd))
413 errors.append((f, lineno and n + 1, l, msg, bd))
414 result = False
414 result = False
415
415
416 errors.sort()
416 errors.sort()
417 for e in errors:
417 for e in errors:
418 logfunc(*e)
418 logfunc(*e)
419 fc += 1
419 fc += 1
420 if maxerr and fc >= maxerr:
420 if maxerr and fc >= maxerr:
421 print " (too many errors, giving up)"
421 print " (too many errors, giving up)"
422 break
422 break
423
423
424 return result
424 return result
425
425
426 if __name__ == "__main__":
426 if __name__ == "__main__":
427 parser = optparse.OptionParser("%prog [options] [files]")
427 parser = optparse.OptionParser("%prog [options] [files]")
428 parser.add_option("-w", "--warnings", action="store_true",
428 parser.add_option("-w", "--warnings", action="store_true",
429 help="include warning-level checks")
429 help="include warning-level checks")
430 parser.add_option("-p", "--per-file", type="int",
430 parser.add_option("-p", "--per-file", type="int",
431 help="max warnings per file")
431 help="max warnings per file")
432 parser.add_option("-b", "--blame", action="store_true",
432 parser.add_option("-b", "--blame", action="store_true",
433 help="use annotate to generate blame info")
433 help="use annotate to generate blame info")
434 parser.add_option("", "--debug", action="store_true",
434 parser.add_option("", "--debug", action="store_true",
435 help="show debug information")
435 help="show debug information")
436 parser.add_option("", "--nolineno", action="store_false",
436 parser.add_option("", "--nolineno", action="store_false",
437 dest='lineno', help="don't show line numbers")
437 dest='lineno', help="don't show line numbers")
438
438
439 parser.set_defaults(per_file=15, warnings=False, blame=False, debug=False,
439 parser.set_defaults(per_file=15, warnings=False, blame=False, debug=False,
440 lineno=True)
440 lineno=True)
441 (options, args) = parser.parse_args()
441 (options, args) = parser.parse_args()
442
442
443 if len(args) == 0:
443 if len(args) == 0:
444 check = glob.glob("*")
444 check = glob.glob("*")
445 else:
445 else:
446 check = args
446 check = args
447
447
448 ret = 0
448 ret = 0
449 for f in check:
449 for f in check:
450 if not checkfile(f, maxerr=options.per_file, warnings=options.warnings,
450 if not checkfile(f, maxerr=options.per_file, warnings=options.warnings,
451 blame=options.blame, debug=options.debug,
451 blame=options.blame, debug=options.debug,
452 lineno=options.lineno):
452 lineno=options.lineno):
453 ret = 1
453 ret = 1
454 sys.exit(ret)
454 sys.exit(ret)
@@ -1,1110 +1,1110 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 # $Id: manpage.py 6110 2009-08-31 14:40:33Z grubert $
2 # $Id: manpage.py 6110 2009-08-31 14:40:33Z grubert $
3 # Author: Engelbert Gruber <grubert@users.sourceforge.net>
3 # Author: Engelbert Gruber <grubert@users.sourceforge.net>
4 # Copyright: This module is put into the public domain.
4 # Copyright: This module is put into the public domain.
5
5
6 """
6 """
7 Simple man page writer for reStructuredText.
7 Simple man page writer for reStructuredText.
8
8
9 Man pages (short for "manual pages") contain system documentation on unix-like
9 Man pages (short for "manual pages") contain system documentation on unix-like
10 systems. The pages are grouped in numbered sections:
10 systems. The pages are grouped in numbered sections:
11
11
12 1 executable programs and shell commands
12 1 executable programs and shell commands
13 2 system calls
13 2 system calls
14 3 library functions
14 3 library functions
15 4 special files
15 4 special files
16 5 file formats
16 5 file formats
17 6 games
17 6 games
18 7 miscellaneous
18 7 miscellaneous
19 8 system administration
19 8 system administration
20
20
21 Man pages are written *troff*, a text file formatting system.
21 Man pages are written *troff*, a text file formatting system.
22
22
23 See http://www.tldp.org/HOWTO/Man-Page for a start.
23 See http://www.tldp.org/HOWTO/Man-Page for a start.
24
24
25 Man pages have no subsection only parts.
25 Man pages have no subsection only parts.
26 Standard parts
26 Standard parts
27
27
28 NAME ,
28 NAME ,
29 SYNOPSIS ,
29 SYNOPSIS ,
30 DESCRIPTION ,
30 DESCRIPTION ,
31 OPTIONS ,
31 OPTIONS ,
32 FILES ,
32 FILES ,
33 SEE ALSO ,
33 SEE ALSO ,
34 BUGS ,
34 BUGS ,
35
35
36 and
36 and
37
37
38 AUTHOR .
38 AUTHOR .
39
39
40 A unix-like system keeps an index of the DESCRIPTIONs, which is accesable
40 A unix-like system keeps an index of the DESCRIPTIONs, which is accesable
41 by the command whatis or apropos.
41 by the command whatis or apropos.
42
42
43 """
43 """
44
44
45 __docformat__ = 'reStructuredText'
45 __docformat__ = 'reStructuredText'
46
46
47 import re
47 import re
48
48
49 from docutils import nodes, writers, languages
49 from docutils import nodes, writers, languages
50 try:
50 try:
51 import roman
51 import roman
52 except ImportError:
52 except ImportError:
53 from docutils.utils import roman
53 from docutils.utils import roman
54 import inspect
54 import inspect
55
55
56 FIELD_LIST_INDENT = 7
56 FIELD_LIST_INDENT = 7
57 DEFINITION_LIST_INDENT = 7
57 DEFINITION_LIST_INDENT = 7
58 OPTION_LIST_INDENT = 7
58 OPTION_LIST_INDENT = 7
59 BLOCKQOUTE_INDENT = 3.5
59 BLOCKQOUTE_INDENT = 3.5
60
60
61 # Define two macros so man/roff can calculate the
61 # Define two macros so man/roff can calculate the
62 # indent/unindent margins by itself
62 # indent/unindent margins by itself
63 MACRO_DEF = (r""".
63 MACRO_DEF = (r""".
64 .nr rst2man-indent-level 0
64 .nr rst2man-indent-level 0
65 .
65 .
66 .de1 rstReportMargin
66 .de1 rstReportMargin
67 \\$1 \\n[an-margin]
67 \\$1 \\n[an-margin]
68 level \\n[rst2man-indent-level]
68 level \\n[rst2man-indent-level]
69 level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
69 level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
70 -
70 -
71 \\n[rst2man-indent0]
71 \\n[rst2man-indent0]
72 \\n[rst2man-indent1]
72 \\n[rst2man-indent1]
73 \\n[rst2man-indent2]
73 \\n[rst2man-indent2]
74 ..
74 ..
75 .de1 INDENT
75 .de1 INDENT
76 .\" .rstReportMargin pre:
76 .\" .rstReportMargin pre:
77 . RS \\$1
77 . RS \\$1
78 . nr rst2man-indent\\n[rst2man-indent-level] \\n[an-margin]
78 . nr rst2man-indent\\n[rst2man-indent-level] \\n[an-margin]
79 . nr rst2man-indent-level +1
79 . nr rst2man-indent-level +1
80 .\" .rstReportMargin post:
80 .\" .rstReportMargin post:
81 ..
81 ..
82 .de UNINDENT
82 .de UNINDENT
83 . RE
83 . RE
84 .\" indent \\n[an-margin]
84 .\" indent \\n[an-margin]
85 .\" old: \\n[rst2man-indent\\n[rst2man-indent-level]]
85 .\" old: \\n[rst2man-indent\\n[rst2man-indent-level]]
86 .nr rst2man-indent-level -1
86 .nr rst2man-indent-level -1
87 .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
87 .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
88 .in \\n[rst2man-indent\\n[rst2man-indent-level]]u
88 .in \\n[rst2man-indent\\n[rst2man-indent-level]]u
89 ..
89 ..
90 """)
90 """)
91
91
92 class Writer(writers.Writer):
92 class Writer(writers.Writer):
93
93
94 supported = ('manpage')
94 supported = ('manpage')
95 """Formats this writer supports."""
95 """Formats this writer supports."""
96
96
97 output = None
97 output = None
98 """Final translated form of `document`."""
98 """Final translated form of `document`."""
99
99
100 def __init__(self):
100 def __init__(self):
101 writers.Writer.__init__(self)
101 writers.Writer.__init__(self)
102 self.translator_class = Translator
102 self.translator_class = Translator
103
103
104 def translate(self):
104 def translate(self):
105 visitor = self.translator_class(self.document)
105 visitor = self.translator_class(self.document)
106 self.document.walkabout(visitor)
106 self.document.walkabout(visitor)
107 self.output = visitor.astext()
107 self.output = visitor.astext()
108
108
109
109
110 class Table(object):
110 class Table(object):
111 def __init__(self):
111 def __init__(self):
112 self._rows = []
112 self._rows = []
113 self._options = ['center']
113 self._options = ['center']
114 self._tab_char = '\t'
114 self._tab_char = '\t'
115 self._coldefs = []
115 self._coldefs = []
116 def new_row(self):
116 def new_row(self):
117 self._rows.append([])
117 self._rows.append([])
118 def append_separator(self, separator):
118 def append_separator(self, separator):
119 """Append the separator for table head."""
119 """Append the separator for table head."""
120 self._rows.append([separator])
120 self._rows.append([separator])
121 def append_cell(self, cell_lines):
121 def append_cell(self, cell_lines):
122 """cell_lines is an array of lines"""
122 """cell_lines is an array of lines"""
123 start = 0
123 start = 0
124 if len(cell_lines) > 0 and cell_lines[0] == '.sp\n':
124 if len(cell_lines) > 0 and cell_lines[0] == '.sp\n':
125 start = 1
125 start = 1
126 self._rows[-1].append(cell_lines[start:])
126 self._rows[-1].append(cell_lines[start:])
127 if len(self._coldefs) < len(self._rows[-1]):
127 if len(self._coldefs) < len(self._rows[-1]):
128 self._coldefs.append('l')
128 self._coldefs.append('l')
129 def _minimize_cell(self, cell_lines):
129 def _minimize_cell(self, cell_lines):
130 """Remove leading and trailing blank and ``.sp`` lines"""
130 """Remove leading and trailing blank and ``.sp`` lines"""
131 while (cell_lines and cell_lines[0] in ('\n', '.sp\n')):
131 while (cell_lines and cell_lines[0] in ('\n', '.sp\n')):
132 del cell_lines[0]
132 del cell_lines[0]
133 while (cell_lines and cell_lines[-1] in ('\n', '.sp\n')):
133 while (cell_lines and cell_lines[-1] in ('\n', '.sp\n')):
134 del cell_lines[-1]
134 del cell_lines[-1]
135 def as_list(self):
135 def as_list(self):
136 text = ['.TS\n']
136 text = ['.TS\n']
137 text.append(' '.join(self._options) + ';\n')
137 text.append(' '.join(self._options) + ';\n')
138 text.append('|%s|.\n' % ('|'.join(self._coldefs)))
138 text.append('|%s|.\n' % ('|'.join(self._coldefs)))
139 for row in self._rows:
139 for row in self._rows:
140 # row = array of cells. cell = array of lines.
140 # row = array of cells. cell = array of lines.
141 text.append('_\n') # line above
141 text.append('_\n') # line above
142 text.append('T{\n')
142 text.append('T{\n')
143 for i in range(len(row)):
143 for i in range(len(row)):
144 cell = row[i]
144 cell = row[i]
145 self._minimize_cell(cell)
145 self._minimize_cell(cell)
146 text.extend(cell)
146 text.extend(cell)
147 if not text[-1].endswith('\n'):
147 if not text[-1].endswith('\n'):
148 text[-1] += '\n'
148 text[-1] += '\n'
149 if i < len(row)-1:
149 if i < len(row) - 1:
150 text.append('T}'+self._tab_char+'T{\n')
150 text.append('T}'+self._tab_char+'T{\n')
151 else:
151 else:
152 text.append('T}\n')
152 text.append('T}\n')
153 text.append('_\n')
153 text.append('_\n')
154 text.append('.TE\n')
154 text.append('.TE\n')
155 return text
155 return text
156
156
157 class Translator(nodes.NodeVisitor):
157 class Translator(nodes.NodeVisitor):
158 """"""
158 """"""
159
159
160 words_and_spaces = re.compile(r'\S+| +|\n')
160 words_and_spaces = re.compile(r'\S+| +|\n')
161 document_start = """Man page generated from reStructuredText."""
161 document_start = """Man page generated from reStructuredText."""
162
162
163 def __init__(self, document):
163 def __init__(self, document):
164 nodes.NodeVisitor.__init__(self, document)
164 nodes.NodeVisitor.__init__(self, document)
165 self.settings = settings = document.settings
165 self.settings = settings = document.settings
166 lcode = settings.language_code
166 lcode = settings.language_code
167 arglen = len(inspect.getargspec(languages.get_language)[0])
167 arglen = len(inspect.getargspec(languages.get_language)[0])
168 if arglen == 2:
168 if arglen == 2:
169 self.language = languages.get_language(lcode,
169 self.language = languages.get_language(lcode,
170 self.document.reporter)
170 self.document.reporter)
171 else:
171 else:
172 self.language = languages.get_language(lcode)
172 self.language = languages.get_language(lcode)
173 self.head = []
173 self.head = []
174 self.body = []
174 self.body = []
175 self.foot = []
175 self.foot = []
176 self.section_level = 0
176 self.section_level = 0
177 self.context = []
177 self.context = []
178 self.topic_class = ''
178 self.topic_class = ''
179 self.colspecs = []
179 self.colspecs = []
180 self.compact_p = 1
180 self.compact_p = 1
181 self.compact_simple = None
181 self.compact_simple = None
182 # the list style "*" bullet or "#" numbered
182 # the list style "*" bullet or "#" numbered
183 self._list_char = []
183 self._list_char = []
184 # writing the header .TH and .SH NAME is postboned after
184 # writing the header .TH and .SH NAME is postboned after
185 # docinfo.
185 # docinfo.
186 self._docinfo = {
186 self._docinfo = {
187 "title" : "", "title_upper": "",
187 "title" : "", "title_upper": "",
188 "subtitle" : "",
188 "subtitle" : "",
189 "manual_section" : "", "manual_group" : "",
189 "manual_section" : "", "manual_group" : "",
190 "author" : [],
190 "author" : [],
191 "date" : "",
191 "date" : "",
192 "copyright" : "",
192 "copyright" : "",
193 "version" : "",
193 "version" : "",
194 }
194 }
195 self._docinfo_keys = [] # a list to keep the sequence as in source.
195 self._docinfo_keys = [] # a list to keep the sequence as in source.
196 self._docinfo_names = {} # to get name from text not normalized.
196 self._docinfo_names = {} # to get name from text not normalized.
197 self._in_docinfo = None
197 self._in_docinfo = None
198 self._active_table = None
198 self._active_table = None
199 self._in_literal = False
199 self._in_literal = False
200 self.header_written = 0
200 self.header_written = 0
201 self._line_block = 0
201 self._line_block = 0
202 self.authors = []
202 self.authors = []
203 self.section_level = 0
203 self.section_level = 0
204 self._indent = [0]
204 self._indent = [0]
205 # central definition of simple processing rules
205 # central definition of simple processing rules
206 # what to output on : visit, depart
206 # what to output on : visit, depart
207 # Do not use paragraph requests ``.PP`` because these set indentation.
207 # Do not use paragraph requests ``.PP`` because these set indentation.
208 # use ``.sp``. Remove superfluous ``.sp`` in ``astext``.
208 # use ``.sp``. Remove superfluous ``.sp`` in ``astext``.
209 #
209 #
210 # Fonts are put on a stack, the top one is used.
210 # Fonts are put on a stack, the top one is used.
211 # ``.ft P`` or ``\\fP`` pop from stack.
211 # ``.ft P`` or ``\\fP`` pop from stack.
212 # ``B`` bold, ``I`` italic, ``R`` roman should be available.
212 # ``B`` bold, ``I`` italic, ``R`` roman should be available.
213 # Hopefully ``C`` courier too.
213 # Hopefully ``C`` courier too.
214 self.defs = {
214 self.defs = {
215 'indent' : ('.INDENT %.1f\n', '.UNINDENT\n'),
215 'indent' : ('.INDENT %.1f\n', '.UNINDENT\n'),
216 'definition_list_item' : ('.TP', ''),
216 'definition_list_item' : ('.TP', ''),
217 'field_name' : ('.TP\n.B ', '\n'),
217 'field_name' : ('.TP\n.B ', '\n'),
218 'literal' : ('\\fB', '\\fP'),
218 'literal' : ('\\fB', '\\fP'),
219 'literal_block' : ('.sp\n.nf\n.ft C\n', '\n.ft P\n.fi\n'),
219 'literal_block' : ('.sp\n.nf\n.ft C\n', '\n.ft P\n.fi\n'),
220
220
221 'option_list_item' : ('.TP\n', ''),
221 'option_list_item' : ('.TP\n', ''),
222
222
223 'reference' : (r'\%', r'\:'),
223 'reference' : (r'\%', r'\:'),
224 'emphasis': ('\\fI', '\\fP'),
224 'emphasis': ('\\fI', '\\fP'),
225 'strong' : ('\\fB', '\\fP'),
225 'strong' : ('\\fB', '\\fP'),
226 'term' : ('\n.B ', '\n'),
226 'term' : ('\n.B ', '\n'),
227 'title_reference' : ('\\fI', '\\fP'),
227 'title_reference' : ('\\fI', '\\fP'),
228
228
229 'topic-title' : ('.SS ',),
229 'topic-title' : ('.SS ',),
230 'sidebar-title' : ('.SS ',),
230 'sidebar-title' : ('.SS ',),
231
231
232 'problematic' : ('\n.nf\n', '\n.fi\n'),
232 'problematic' : ('\n.nf\n', '\n.fi\n'),
233 }
233 }
234 # NOTE don't specify the newline before a dot-command, but ensure
234 # NOTE don't specify the newline before a dot-command, but ensure
235 # it is there.
235 # it is there.
236
236
237 def comment_begin(self, text):
237 def comment_begin(self, text):
238 """Return commented version of the passed text WITHOUT end of
238 """Return commented version of the passed text WITHOUT end of
239 line/comment."""
239 line/comment."""
240 prefix = '.\\" '
240 prefix = '.\\" '
241 out_text = ''.join(
241 out_text = ''.join(
242 [(prefix + in_line + '\n')
242 [(prefix + in_line + '\n')
243 for in_line in text.split('\n')])
243 for in_line in text.split('\n')])
244 return out_text
244 return out_text
245
245
246 def comment(self, text):
246 def comment(self, text):
247 """Return commented version of the passed text."""
247 """Return commented version of the passed text."""
248 return self.comment_begin(text)+'.\n'
248 return self.comment_begin(text)+'.\n'
249
249
250 def ensure_eol(self):
250 def ensure_eol(self):
251 """Ensure the last line in body is terminated by new line."""
251 """Ensure the last line in body is terminated by new line."""
252 if self.body[-1][-1] != '\n':
252 if self.body[-1][-1] != '\n':
253 self.body.append('\n')
253 self.body.append('\n')
254
254
255 def astext(self):
255 def astext(self):
256 """Return the final formatted document as a string."""
256 """Return the final formatted document as a string."""
257 if not self.header_written:
257 if not self.header_written:
258 # ensure we get a ".TH" as viewers require it.
258 # ensure we get a ".TH" as viewers require it.
259 self.head.append(self.header())
259 self.head.append(self.header())
260 # filter body
260 # filter body
261 for i in xrange(len(self.body)-1, 0, -1):
261 for i in xrange(len(self.body) - 1, 0, -1):
262 # remove superfluous vertical gaps.
262 # remove superfluous vertical gaps.
263 if self.body[i] == '.sp\n':
263 if self.body[i] == '.sp\n':
264 if self.body[i - 1][:4] in ('.BI ','.IP '):
264 if self.body[i - 1][:4] in ('.BI ','.IP '):
265 self.body[i] = '.\n'
265 self.body[i] = '.\n'
266 elif (self.body[i - 1][:3] == '.B ' and
266 elif (self.body[i - 1][:3] == '.B ' and
267 self.body[i - 2][:4] == '.TP\n'):
267 self.body[i - 2][:4] == '.TP\n'):
268 self.body[i] = '.\n'
268 self.body[i] = '.\n'
269 elif (self.body[i - 1] == '\n' and
269 elif (self.body[i - 1] == '\n' and
270 self.body[i - 2][0] != '.' and
270 self.body[i - 2][0] != '.' and
271 (self.body[i - 3][:7] == '.TP\n.B '
271 (self.body[i - 3][:7] == '.TP\n.B '
272 or self.body[i - 3][:4] == '\n.B ')
272 or self.body[i - 3][:4] == '\n.B ')
273 ):
273 ):
274 self.body[i] = '.\n'
274 self.body[i] = '.\n'
275 return ''.join(self.head + self.body + self.foot)
275 return ''.join(self.head + self.body + self.foot)
276
276
277 def deunicode(self, text):
277 def deunicode(self, text):
278 text = text.replace(u'\xa0', '\\ ')
278 text = text.replace(u'\xa0', '\\ ')
279 text = text.replace(u'\u2020', '\\(dg')
279 text = text.replace(u'\u2020', '\\(dg')
280 return text
280 return text
281
281
282 def visit_Text(self, node):
282 def visit_Text(self, node):
283 text = node.astext()
283 text = node.astext()
284 text = text.replace('\\','\\e')
284 text = text.replace('\\','\\e')
285 replace_pairs = [
285 replace_pairs = [
286 (u'-', ur'\-'),
286 (u'-', ur'\-'),
287 (u'\'', ur'\(aq'),
287 (u'\'', ur'\(aq'),
288 (u'Β΄', ur'\''),
288 (u'Β΄', ur'\''),
289 (u'`', ur'\(ga'),
289 (u'`', ur'\(ga'),
290 ]
290 ]
291 for (in_char, out_markup) in replace_pairs:
291 for (in_char, out_markup) in replace_pairs:
292 text = text.replace(in_char, out_markup)
292 text = text.replace(in_char, out_markup)
293 # unicode
293 # unicode
294 text = self.deunicode(text)
294 text = self.deunicode(text)
295 if self._in_literal:
295 if self._in_literal:
296 # prevent interpretation of "." at line start
296 # prevent interpretation of "." at line start
297 if text[0] == '.':
297 if text[0] == '.':
298 text = '\\&' + text
298 text = '\\&' + text
299 text = text.replace('\n.', '\n\\&.')
299 text = text.replace('\n.', '\n\\&.')
300 self.body.append(text)
300 self.body.append(text)
301
301
302 def depart_Text(self, node):
302 def depart_Text(self, node):
303 pass
303 pass
304
304
305 def list_start(self, node):
305 def list_start(self, node):
306 class enum_char(object):
306 class enum_char(object):
307 enum_style = {
307 enum_style = {
308 'bullet' : '\\(bu',
308 'bullet' : '\\(bu',
309 'emdash' : '\\(em',
309 'emdash' : '\\(em',
310 }
310 }
311
311
312 def __init__(self, style):
312 def __init__(self, style):
313 self._style = style
313 self._style = style
314 if 'start' in node:
314 if 'start' in node:
315 self._cnt = node['start'] - 1
315 self._cnt = node['start'] - 1
316 else:
316 else:
317 self._cnt = 0
317 self._cnt = 0
318 self._indent = 2
318 self._indent = 2
319 if style == 'arabic':
319 if style == 'arabic':
320 # indentation depends on number of childrens
320 # indentation depends on number of childrens
321 # and start value.
321 # and start value.
322 self._indent = len(str(len(node.children)))
322 self._indent = len(str(len(node.children)))
323 self._indent += len(str(self._cnt)) + 1
323 self._indent += len(str(self._cnt)) + 1
324 elif style == 'loweralpha':
324 elif style == 'loweralpha':
325 self._cnt += ord('a') - 1
325 self._cnt += ord('a') - 1
326 self._indent = 3
326 self._indent = 3
327 elif style == 'upperalpha':
327 elif style == 'upperalpha':
328 self._cnt += ord('A') - 1
328 self._cnt += ord('A') - 1
329 self._indent = 3
329 self._indent = 3
330 elif style.endswith('roman'):
330 elif style.endswith('roman'):
331 self._indent = 5
331 self._indent = 5
332
332
333 def next(self):
333 def next(self):
334 if self._style == 'bullet':
334 if self._style == 'bullet':
335 return self.enum_style[self._style]
335 return self.enum_style[self._style]
336 elif self._style == 'emdash':
336 elif self._style == 'emdash':
337 return self.enum_style[self._style]
337 return self.enum_style[self._style]
338 self._cnt += 1
338 self._cnt += 1
339 # TODO add prefix postfix
339 # TODO add prefix postfix
340 if self._style == 'arabic':
340 if self._style == 'arabic':
341 return "%d." % self._cnt
341 return "%d." % self._cnt
342 elif self._style in ('loweralpha', 'upperalpha'):
342 elif self._style in ('loweralpha', 'upperalpha'):
343 return "%c." % self._cnt
343 return "%c." % self._cnt
344 elif self._style.endswith('roman'):
344 elif self._style.endswith('roman'):
345 res = roman.toRoman(self._cnt) + '.'
345 res = roman.toRoman(self._cnt) + '.'
346 if self._style.startswith('upper'):
346 if self._style.startswith('upper'):
347 return res.upper()
347 return res.upper()
348 return res.lower()
348 return res.lower()
349 else:
349 else:
350 return "%d." % self._cnt
350 return "%d." % self._cnt
351 def get_width(self):
351 def get_width(self):
352 return self._indent
352 return self._indent
353 def __repr__(self):
353 def __repr__(self):
354 return 'enum_style-%s' % list(self._style)
354 return 'enum_style-%s' % list(self._style)
355
355
356 if 'enumtype' in node:
356 if 'enumtype' in node:
357 self._list_char.append(enum_char(node['enumtype']))
357 self._list_char.append(enum_char(node['enumtype']))
358 else:
358 else:
359 self._list_char.append(enum_char('bullet'))
359 self._list_char.append(enum_char('bullet'))
360 if len(self._list_char) > 1:
360 if len(self._list_char) > 1:
361 # indent nested lists
361 # indent nested lists
362 self.indent(self._list_char[-2].get_width())
362 self.indent(self._list_char[-2].get_width())
363 else:
363 else:
364 self.indent(self._list_char[-1].get_width())
364 self.indent(self._list_char[-1].get_width())
365
365
366 def list_end(self):
366 def list_end(self):
367 self.dedent()
367 self.dedent()
368 self._list_char.pop()
368 self._list_char.pop()
369
369
370 def header(self):
370 def header(self):
371 tmpl = (".TH %(title_upper)s %(manual_section)s"
371 tmpl = (".TH %(title_upper)s %(manual_section)s"
372 " \"%(date)s\" \"%(version)s\" \"%(manual_group)s\"\n"
372 " \"%(date)s\" \"%(version)s\" \"%(manual_group)s\"\n"
373 ".SH NAME\n"
373 ".SH NAME\n"
374 "%(title)s \- %(subtitle)s\n")
374 "%(title)s \- %(subtitle)s\n")
375 return tmpl % self._docinfo
375 return tmpl % self._docinfo
376
376
377 def append_header(self):
377 def append_header(self):
378 """append header with .TH and .SH NAME"""
378 """append header with .TH and .SH NAME"""
379 # NOTE before everything
379 # NOTE before everything
380 # .TH title_upper section date source manual
380 # .TH title_upper section date source manual
381 if self.header_written:
381 if self.header_written:
382 return
382 return
383 self.body.append(self.header())
383 self.body.append(self.header())
384 self.body.append(MACRO_DEF)
384 self.body.append(MACRO_DEF)
385 self.header_written = 1
385 self.header_written = 1
386
386
387 def visit_address(self, node):
387 def visit_address(self, node):
388 self.visit_docinfo_item(node, 'address')
388 self.visit_docinfo_item(node, 'address')
389
389
390 def depart_address(self, node):
390 def depart_address(self, node):
391 pass
391 pass
392
392
393 def visit_admonition(self, node, name=None):
393 def visit_admonition(self, node, name=None):
394 if name:
394 if name:
395 self.body.append('.IP %s\n' %
395 self.body.append('.IP %s\n' %
396 self.language.labels.get(name, name))
396 self.language.labels.get(name, name))
397
397
398 def depart_admonition(self, node):
398 def depart_admonition(self, node):
399 self.body.append('.RE\n')
399 self.body.append('.RE\n')
400
400
401 def visit_attention(self, node):
401 def visit_attention(self, node):
402 self.visit_admonition(node, 'attention')
402 self.visit_admonition(node, 'attention')
403
403
404 depart_attention = depart_admonition
404 depart_attention = depart_admonition
405
405
406 def visit_docinfo_item(self, node, name):
406 def visit_docinfo_item(self, node, name):
407 if name == 'author':
407 if name == 'author':
408 self._docinfo[name].append(node.astext())
408 self._docinfo[name].append(node.astext())
409 else:
409 else:
410 self._docinfo[name] = node.astext()
410 self._docinfo[name] = node.astext()
411 self._docinfo_keys.append(name)
411 self._docinfo_keys.append(name)
412 raise nodes.SkipNode
412 raise nodes.SkipNode
413
413
414 def depart_docinfo_item(self, node):
414 def depart_docinfo_item(self, node):
415 pass
415 pass
416
416
417 def visit_author(self, node):
417 def visit_author(self, node):
418 self.visit_docinfo_item(node, 'author')
418 self.visit_docinfo_item(node, 'author')
419
419
420 depart_author = depart_docinfo_item
420 depart_author = depart_docinfo_item
421
421
422 def visit_authors(self, node):
422 def visit_authors(self, node):
423 # _author is called anyway.
423 # _author is called anyway.
424 pass
424 pass
425
425
426 def depart_authors(self, node):
426 def depart_authors(self, node):
427 pass
427 pass
428
428
429 def visit_block_quote(self, node):
429 def visit_block_quote(self, node):
430 # BUG/HACK: indent alway uses the _last_ indention,
430 # BUG/HACK: indent alway uses the _last_ indention,
431 # thus we need two of them.
431 # thus we need two of them.
432 self.indent(BLOCKQOUTE_INDENT)
432 self.indent(BLOCKQOUTE_INDENT)
433 self.indent(0)
433 self.indent(0)
434
434
435 def depart_block_quote(self, node):
435 def depart_block_quote(self, node):
436 self.dedent()
436 self.dedent()
437 self.dedent()
437 self.dedent()
438
438
439 def visit_bullet_list(self, node):
439 def visit_bullet_list(self, node):
440 self.list_start(node)
440 self.list_start(node)
441
441
442 def depart_bullet_list(self, node):
442 def depart_bullet_list(self, node):
443 self.list_end()
443 self.list_end()
444
444
445 def visit_caption(self, node):
445 def visit_caption(self, node):
446 pass
446 pass
447
447
448 def depart_caption(self, node):
448 def depart_caption(self, node):
449 pass
449 pass
450
450
451 def visit_caution(self, node):
451 def visit_caution(self, node):
452 self.visit_admonition(node, 'caution')
452 self.visit_admonition(node, 'caution')
453
453
454 depart_caution = depart_admonition
454 depart_caution = depart_admonition
455
455
456 def visit_citation(self, node):
456 def visit_citation(self, node):
457 num, text = node.astext().split(None, 1)
457 num, text = node.astext().split(None, 1)
458 num = num.strip()
458 num = num.strip()
459 self.body.append('.IP [%s] 5\n' % num)
459 self.body.append('.IP [%s] 5\n' % num)
460
460
461 def depart_citation(self, node):
461 def depart_citation(self, node):
462 pass
462 pass
463
463
464 def visit_citation_reference(self, node):
464 def visit_citation_reference(self, node):
465 self.body.append('['+node.astext()+']')
465 self.body.append('['+node.astext()+']')
466 raise nodes.SkipNode
466 raise nodes.SkipNode
467
467
468 def visit_classifier(self, node):
468 def visit_classifier(self, node):
469 pass
469 pass
470
470
471 def depart_classifier(self, node):
471 def depart_classifier(self, node):
472 pass
472 pass
473
473
474 def visit_colspec(self, node):
474 def visit_colspec(self, node):
475 self.colspecs.append(node)
475 self.colspecs.append(node)
476
476
477 def depart_colspec(self, node):
477 def depart_colspec(self, node):
478 pass
478 pass
479
479
480 def write_colspecs(self):
480 def write_colspecs(self):
481 self.body.append("%s.\n" % ('L '*len(self.colspecs)))
481 self.body.append("%s.\n" % ('L '*len(self.colspecs)))
482
482
483 def visit_comment(self, node,
483 def visit_comment(self, node,
484 sub=re.compile('-(?=-)').sub):
484 sub=re.compile('-(?=-)').sub):
485 self.body.append(self.comment(node.astext()))
485 self.body.append(self.comment(node.astext()))
486 raise nodes.SkipNode
486 raise nodes.SkipNode
487
487
488 def visit_contact(self, node):
488 def visit_contact(self, node):
489 self.visit_docinfo_item(node, 'contact')
489 self.visit_docinfo_item(node, 'contact')
490
490
491 depart_contact = depart_docinfo_item
491 depart_contact = depart_docinfo_item
492
492
493 def visit_container(self, node):
493 def visit_container(self, node):
494 pass
494 pass
495
495
496 def depart_container(self, node):
496 def depart_container(self, node):
497 pass
497 pass
498
498
499 def visit_compound(self, node):
499 def visit_compound(self, node):
500 pass
500 pass
501
501
502 def depart_compound(self, node):
502 def depart_compound(self, node):
503 pass
503 pass
504
504
505 def visit_copyright(self, node):
505 def visit_copyright(self, node):
506 self.visit_docinfo_item(node, 'copyright')
506 self.visit_docinfo_item(node, 'copyright')
507
507
508 def visit_danger(self, node):
508 def visit_danger(self, node):
509 self.visit_admonition(node, 'danger')
509 self.visit_admonition(node, 'danger')
510
510
511 depart_danger = depart_admonition
511 depart_danger = depart_admonition
512
512
513 def visit_date(self, node):
513 def visit_date(self, node):
514 self.visit_docinfo_item(node, 'date')
514 self.visit_docinfo_item(node, 'date')
515
515
516 def visit_decoration(self, node):
516 def visit_decoration(self, node):
517 pass
517 pass
518
518
519 def depart_decoration(self, node):
519 def depart_decoration(self, node):
520 pass
520 pass
521
521
522 def visit_definition(self, node):
522 def visit_definition(self, node):
523 pass
523 pass
524
524
525 def depart_definition(self, node):
525 def depart_definition(self, node):
526 pass
526 pass
527
527
528 def visit_definition_list(self, node):
528 def visit_definition_list(self, node):
529 self.indent(DEFINITION_LIST_INDENT)
529 self.indent(DEFINITION_LIST_INDENT)
530
530
531 def depart_definition_list(self, node):
531 def depart_definition_list(self, node):
532 self.dedent()
532 self.dedent()
533
533
534 def visit_definition_list_item(self, node):
534 def visit_definition_list_item(self, node):
535 self.body.append(self.defs['definition_list_item'][0])
535 self.body.append(self.defs['definition_list_item'][0])
536
536
537 def depart_definition_list_item(self, node):
537 def depart_definition_list_item(self, node):
538 self.body.append(self.defs['definition_list_item'][1])
538 self.body.append(self.defs['definition_list_item'][1])
539
539
540 def visit_description(self, node):
540 def visit_description(self, node):
541 pass
541 pass
542
542
543 def depart_description(self, node):
543 def depart_description(self, node):
544 pass
544 pass
545
545
546 def visit_docinfo(self, node):
546 def visit_docinfo(self, node):
547 self._in_docinfo = 1
547 self._in_docinfo = 1
548
548
549 def depart_docinfo(self, node):
549 def depart_docinfo(self, node):
550 self._in_docinfo = None
550 self._in_docinfo = None
551 # NOTE nothing should be written before this
551 # NOTE nothing should be written before this
552 self.append_header()
552 self.append_header()
553
553
554 def visit_doctest_block(self, node):
554 def visit_doctest_block(self, node):
555 self.body.append(self.defs['literal_block'][0])
555 self.body.append(self.defs['literal_block'][0])
556 self._in_literal = True
556 self._in_literal = True
557
557
558 def depart_doctest_block(self, node):
558 def depart_doctest_block(self, node):
559 self._in_literal = False
559 self._in_literal = False
560 self.body.append(self.defs['literal_block'][1])
560 self.body.append(self.defs['literal_block'][1])
561
561
562 def visit_document(self, node):
562 def visit_document(self, node):
563 # no blank line between comment and header.
563 # no blank line between comment and header.
564 self.body.append(self.comment(self.document_start).rstrip()+'\n')
564 self.body.append(self.comment(self.document_start).rstrip()+'\n')
565 # writing header is postboned
565 # writing header is postboned
566 self.header_written = 0
566 self.header_written = 0
567
567
568 def depart_document(self, node):
568 def depart_document(self, node):
569 if self._docinfo['author']:
569 if self._docinfo['author']:
570 self.body.append('.SH AUTHOR\n%s\n'
570 self.body.append('.SH AUTHOR\n%s\n'
571 % ', '.join(self._docinfo['author']))
571 % ', '.join(self._docinfo['author']))
572 skip = ('author', 'copyright', 'date',
572 skip = ('author', 'copyright', 'date',
573 'manual_group', 'manual_section',
573 'manual_group', 'manual_section',
574 'subtitle',
574 'subtitle',
575 'title', 'title_upper', 'version')
575 'title', 'title_upper', 'version')
576 for name in self._docinfo_keys:
576 for name in self._docinfo_keys:
577 if name == 'address':
577 if name == 'address':
578 self.body.append("\n%s:\n%s%s.nf\n%s\n.fi\n%s%s" % (
578 self.body.append("\n%s:\n%s%s.nf\n%s\n.fi\n%s%s" % (
579 self.language.labels.get(name, name),
579 self.language.labels.get(name, name),
580 self.defs['indent'][0] % 0,
580 self.defs['indent'][0] % 0,
581 self.defs['indent'][0] % BLOCKQOUTE_INDENT,
581 self.defs['indent'][0] % BLOCKQOUTE_INDENT,
582 self._docinfo[name],
582 self._docinfo[name],
583 self.defs['indent'][1],
583 self.defs['indent'][1],
584 self.defs['indent'][1]))
584 self.defs['indent'][1]))
585 elif name not in skip:
585 elif name not in skip:
586 if name in self._docinfo_names:
586 if name in self._docinfo_names:
587 label = self._docinfo_names[name]
587 label = self._docinfo_names[name]
588 else:
588 else:
589 label = self.language.labels.get(name, name)
589 label = self.language.labels.get(name, name)
590 self.body.append("\n%s: %s\n" % (label, self._docinfo[name]))
590 self.body.append("\n%s: %s\n" % (label, self._docinfo[name]))
591 if self._docinfo['copyright']:
591 if self._docinfo['copyright']:
592 self.body.append('.SH COPYRIGHT\n%s\n'
592 self.body.append('.SH COPYRIGHT\n%s\n'
593 % self._docinfo['copyright'])
593 % self._docinfo['copyright'])
594 self.body.append(self.comment(
594 self.body.append(self.comment(
595 'Generated by docutils manpage writer.\n'))
595 'Generated by docutils manpage writer.\n'))
596
596
597 def visit_emphasis(self, node):
597 def visit_emphasis(self, node):
598 self.body.append(self.defs['emphasis'][0])
598 self.body.append(self.defs['emphasis'][0])
599
599
600 def depart_emphasis(self, node):
600 def depart_emphasis(self, node):
601 self.body.append(self.defs['emphasis'][1])
601 self.body.append(self.defs['emphasis'][1])
602
602
603 def visit_entry(self, node):
603 def visit_entry(self, node):
604 # a cell in a table row
604 # a cell in a table row
605 if 'morerows' in node:
605 if 'morerows' in node:
606 self.document.reporter.warning('"table row spanning" not supported',
606 self.document.reporter.warning('"table row spanning" not supported',
607 base_node=node)
607 base_node=node)
608 if 'morecols' in node:
608 if 'morecols' in node:
609 self.document.reporter.warning(
609 self.document.reporter.warning(
610 '"table cell spanning" not supported', base_node=node)
610 '"table cell spanning" not supported', base_node=node)
611 self.context.append(len(self.body))
611 self.context.append(len(self.body))
612
612
613 def depart_entry(self, node):
613 def depart_entry(self, node):
614 start = self.context.pop()
614 start = self.context.pop()
615 self._active_table.append_cell(self.body[start:])
615 self._active_table.append_cell(self.body[start:])
616 del self.body[start:]
616 del self.body[start:]
617
617
618 def visit_enumerated_list(self, node):
618 def visit_enumerated_list(self, node):
619 self.list_start(node)
619 self.list_start(node)
620
620
621 def depart_enumerated_list(self, node):
621 def depart_enumerated_list(self, node):
622 self.list_end()
622 self.list_end()
623
623
624 def visit_error(self, node):
624 def visit_error(self, node):
625 self.visit_admonition(node, 'error')
625 self.visit_admonition(node, 'error')
626
626
627 depart_error = depart_admonition
627 depart_error = depart_admonition
628
628
629 def visit_field(self, node):
629 def visit_field(self, node):
630 pass
630 pass
631
631
632 def depart_field(self, node):
632 def depart_field(self, node):
633 pass
633 pass
634
634
635 def visit_field_body(self, node):
635 def visit_field_body(self, node):
636 if self._in_docinfo:
636 if self._in_docinfo:
637 name_normalized = self._field_name.lower().replace(" ","_")
637 name_normalized = self._field_name.lower().replace(" ","_")
638 self._docinfo_names[name_normalized] = self._field_name
638 self._docinfo_names[name_normalized] = self._field_name
639 self.visit_docinfo_item(node, name_normalized)
639 self.visit_docinfo_item(node, name_normalized)
640 raise nodes.SkipNode
640 raise nodes.SkipNode
641
641
642 def depart_field_body(self, node):
642 def depart_field_body(self, node):
643 pass
643 pass
644
644
645 def visit_field_list(self, node):
645 def visit_field_list(self, node):
646 self.indent(FIELD_LIST_INDENT)
646 self.indent(FIELD_LIST_INDENT)
647
647
648 def depart_field_list(self, node):
648 def depart_field_list(self, node):
649 self.dedent()
649 self.dedent()
650
650
651 def visit_field_name(self, node):
651 def visit_field_name(self, node):
652 if self._in_docinfo:
652 if self._in_docinfo:
653 self._field_name = node.astext()
653 self._field_name = node.astext()
654 raise nodes.SkipNode
654 raise nodes.SkipNode
655 else:
655 else:
656 self.body.append(self.defs['field_name'][0])
656 self.body.append(self.defs['field_name'][0])
657
657
658 def depart_field_name(self, node):
658 def depart_field_name(self, node):
659 self.body.append(self.defs['field_name'][1])
659 self.body.append(self.defs['field_name'][1])
660
660
661 def visit_figure(self, node):
661 def visit_figure(self, node):
662 self.indent(2.5)
662 self.indent(2.5)
663 self.indent(0)
663 self.indent(0)
664
664
665 def depart_figure(self, node):
665 def depart_figure(self, node):
666 self.dedent()
666 self.dedent()
667 self.dedent()
667 self.dedent()
668
668
669 def visit_footer(self, node):
669 def visit_footer(self, node):
670 self.document.reporter.warning('"footer" not supported',
670 self.document.reporter.warning('"footer" not supported',
671 base_node=node)
671 base_node=node)
672
672
673 def depart_footer(self, node):
673 def depart_footer(self, node):
674 pass
674 pass
675
675
676 def visit_footnote(self, node):
676 def visit_footnote(self, node):
677 num, text = node.astext().split(None, 1)
677 num, text = node.astext().split(None, 1)
678 num = num.strip()
678 num = num.strip()
679 self.body.append('.IP [%s] 5\n' % self.deunicode(num))
679 self.body.append('.IP [%s] 5\n' % self.deunicode(num))
680
680
681 def depart_footnote(self, node):
681 def depart_footnote(self, node):
682 pass
682 pass
683
683
684 def footnote_backrefs(self, node):
684 def footnote_backrefs(self, node):
685 self.document.reporter.warning('"footnote_backrefs" not supported',
685 self.document.reporter.warning('"footnote_backrefs" not supported',
686 base_node=node)
686 base_node=node)
687
687
688 def visit_footnote_reference(self, node):
688 def visit_footnote_reference(self, node):
689 self.body.append('['+self.deunicode(node.astext())+']')
689 self.body.append('['+self.deunicode(node.astext())+']')
690 raise nodes.SkipNode
690 raise nodes.SkipNode
691
691
692 def depart_footnote_reference(self, node):
692 def depart_footnote_reference(self, node):
693 pass
693 pass
694
694
695 def visit_generated(self, node):
695 def visit_generated(self, node):
696 pass
696 pass
697
697
698 def depart_generated(self, node):
698 def depart_generated(self, node):
699 pass
699 pass
700
700
701 def visit_header(self, node):
701 def visit_header(self, node):
702 raise NotImplementedError, node.astext()
702 raise NotImplementedError, node.astext()
703
703
704 def depart_header(self, node):
704 def depart_header(self, node):
705 pass
705 pass
706
706
707 def visit_hint(self, node):
707 def visit_hint(self, node):
708 self.visit_admonition(node, 'hint')
708 self.visit_admonition(node, 'hint')
709
709
710 depart_hint = depart_admonition
710 depart_hint = depart_admonition
711
711
712 def visit_subscript(self, node):
712 def visit_subscript(self, node):
713 self.body.append('\\s-2\\d')
713 self.body.append('\\s-2\\d')
714
714
715 def depart_subscript(self, node):
715 def depart_subscript(self, node):
716 self.body.append('\\u\\s0')
716 self.body.append('\\u\\s0')
717
717
718 def visit_superscript(self, node):
718 def visit_superscript(self, node):
719 self.body.append('\\s-2\\u')
719 self.body.append('\\s-2\\u')
720
720
721 def depart_superscript(self, node):
721 def depart_superscript(self, node):
722 self.body.append('\\d\\s0')
722 self.body.append('\\d\\s0')
723
723
724 def visit_attribution(self, node):
724 def visit_attribution(self, node):
725 self.body.append('\\(em ')
725 self.body.append('\\(em ')
726
726
727 def depart_attribution(self, node):
727 def depart_attribution(self, node):
728 self.body.append('\n')
728 self.body.append('\n')
729
729
730 def visit_image(self, node):
730 def visit_image(self, node):
731 self.document.reporter.warning('"image" not supported',
731 self.document.reporter.warning('"image" not supported',
732 base_node=node)
732 base_node=node)
733 text = []
733 text = []
734 if 'alt' in node.attributes:
734 if 'alt' in node.attributes:
735 text.append(node.attributes['alt'])
735 text.append(node.attributes['alt'])
736 if 'uri' in node.attributes:
736 if 'uri' in node.attributes:
737 text.append(node.attributes['uri'])
737 text.append(node.attributes['uri'])
738 self.body.append('[image: %s]\n' % ('/'.join(text)))
738 self.body.append('[image: %s]\n' % ('/'.join(text)))
739 raise nodes.SkipNode
739 raise nodes.SkipNode
740
740
741 def visit_important(self, node):
741 def visit_important(self, node):
742 self.visit_admonition(node, 'important')
742 self.visit_admonition(node, 'important')
743
743
744 depart_important = depart_admonition
744 depart_important = depart_admonition
745
745
746 def visit_label(self, node):
746 def visit_label(self, node):
747 # footnote and citation
747 # footnote and citation
748 if (isinstance(node.parent, nodes.footnote)
748 if (isinstance(node.parent, nodes.footnote)
749 or isinstance(node.parent, nodes.citation)):
749 or isinstance(node.parent, nodes.citation)):
750 raise nodes.SkipNode
750 raise nodes.SkipNode
751 self.document.reporter.warning('"unsupported "label"',
751 self.document.reporter.warning('"unsupported "label"',
752 base_node=node)
752 base_node=node)
753 self.body.append('[')
753 self.body.append('[')
754
754
755 def depart_label(self, node):
755 def depart_label(self, node):
756 self.body.append(']\n')
756 self.body.append(']\n')
757
757
758 def visit_legend(self, node):
758 def visit_legend(self, node):
759 pass
759 pass
760
760
761 def depart_legend(self, node):
761 def depart_legend(self, node):
762 pass
762 pass
763
763
764 # WHAT should we use .INDENT, .UNINDENT ?
764 # WHAT should we use .INDENT, .UNINDENT ?
765 def visit_line_block(self, node):
765 def visit_line_block(self, node):
766 self._line_block += 1
766 self._line_block += 1
767 if self._line_block == 1:
767 if self._line_block == 1:
768 self.body.append('.sp\n')
768 self.body.append('.sp\n')
769 self.body.append('.nf\n')
769 self.body.append('.nf\n')
770 else:
770 else:
771 self.body.append('.in +2\n')
771 self.body.append('.in +2\n')
772
772
773 def depart_line_block(self, node):
773 def depart_line_block(self, node):
774 self._line_block -= 1
774 self._line_block -= 1
775 if self._line_block == 0:
775 if self._line_block == 0:
776 self.body.append('.fi\n')
776 self.body.append('.fi\n')
777 self.body.append('.sp\n')
777 self.body.append('.sp\n')
778 else:
778 else:
779 self.body.append('.in -2\n')
779 self.body.append('.in -2\n')
780
780
781 def visit_line(self, node):
781 def visit_line(self, node):
782 pass
782 pass
783
783
784 def depart_line(self, node):
784 def depart_line(self, node):
785 self.body.append('\n')
785 self.body.append('\n')
786
786
787 def visit_list_item(self, node):
787 def visit_list_item(self, node):
788 # man 7 man argues to use ".IP" instead of ".TP"
788 # man 7 man argues to use ".IP" instead of ".TP"
789 self.body.append('.IP %s %d\n' % (
789 self.body.append('.IP %s %d\n' % (
790 self._list_char[-1].next(),
790 self._list_char[-1].next(),
791 self._list_char[-1].get_width(),))
791 self._list_char[-1].get_width(),))
792
792
793 def depart_list_item(self, node):
793 def depart_list_item(self, node):
794 pass
794 pass
795
795
796 def visit_literal(self, node):
796 def visit_literal(self, node):
797 self.body.append(self.defs['literal'][0])
797 self.body.append(self.defs['literal'][0])
798
798
799 def depart_literal(self, node):
799 def depart_literal(self, node):
800 self.body.append(self.defs['literal'][1])
800 self.body.append(self.defs['literal'][1])
801
801
802 def visit_literal_block(self, node):
802 def visit_literal_block(self, node):
803 self.body.append(self.defs['literal_block'][0])
803 self.body.append(self.defs['literal_block'][0])
804 self._in_literal = True
804 self._in_literal = True
805
805
806 def depart_literal_block(self, node):
806 def depart_literal_block(self, node):
807 self._in_literal = False
807 self._in_literal = False
808 self.body.append(self.defs['literal_block'][1])
808 self.body.append(self.defs['literal_block'][1])
809
809
810 def visit_meta(self, node):
810 def visit_meta(self, node):
811 raise NotImplementedError, node.astext()
811 raise NotImplementedError, node.astext()
812
812
813 def depart_meta(self, node):
813 def depart_meta(self, node):
814 pass
814 pass
815
815
816 def visit_note(self, node):
816 def visit_note(self, node):
817 self.visit_admonition(node, 'note')
817 self.visit_admonition(node, 'note')
818
818
819 depart_note = depart_admonition
819 depart_note = depart_admonition
820
820
821 def indent(self, by=0.5):
821 def indent(self, by=0.5):
822 # if we are in a section ".SH" there already is a .RS
822 # if we are in a section ".SH" there already is a .RS
823 step = self._indent[-1]
823 step = self._indent[-1]
824 self._indent.append(by)
824 self._indent.append(by)
825 self.body.append(self.defs['indent'][0] % step)
825 self.body.append(self.defs['indent'][0] % step)
826
826
827 def dedent(self):
827 def dedent(self):
828 self._indent.pop()
828 self._indent.pop()
829 self.body.append(self.defs['indent'][1])
829 self.body.append(self.defs['indent'][1])
830
830
831 def visit_option_list(self, node):
831 def visit_option_list(self, node):
832 self.indent(OPTION_LIST_INDENT)
832 self.indent(OPTION_LIST_INDENT)
833
833
834 def depart_option_list(self, node):
834 def depart_option_list(self, node):
835 self.dedent()
835 self.dedent()
836
836
837 def visit_option_list_item(self, node):
837 def visit_option_list_item(self, node):
838 # one item of the list
838 # one item of the list
839 self.body.append(self.defs['option_list_item'][0])
839 self.body.append(self.defs['option_list_item'][0])
840
840
841 def depart_option_list_item(self, node):
841 def depart_option_list_item(self, node):
842 self.body.append(self.defs['option_list_item'][1])
842 self.body.append(self.defs['option_list_item'][1])
843
843
844 def visit_option_group(self, node):
844 def visit_option_group(self, node):
845 # as one option could have several forms it is a group
845 # as one option could have several forms it is a group
846 # options without parameter bold only, .B, -v
846 # options without parameter bold only, .B, -v
847 # options with parameter bold italic, .BI, -f file
847 # options with parameter bold italic, .BI, -f file
848 #
848 #
849 # we do not know if .B or .BI
849 # we do not know if .B or .BI
850 self.context.append('.B') # blind guess
850 self.context.append('.B') # blind guess
851 self.context.append(len(self.body)) # to be able to insert later
851 self.context.append(len(self.body)) # to be able to insert later
852 self.context.append(0) # option counter
852 self.context.append(0) # option counter
853
853
854 def depart_option_group(self, node):
854 def depart_option_group(self, node):
855 self.context.pop() # the counter
855 self.context.pop() # the counter
856 start_position = self.context.pop()
856 start_position = self.context.pop()
857 text = self.body[start_position:]
857 text = self.body[start_position:]
858 del self.body[start_position:]
858 del self.body[start_position:]
859 self.body.append('%s%s\n' % (self.context.pop(), ''.join(text)))
859 self.body.append('%s%s\n' % (self.context.pop(), ''.join(text)))
860
860
861 def visit_option(self, node):
861 def visit_option(self, node):
862 # each form of the option will be presented separately
862 # each form of the option will be presented separately
863 if self.context[-1] > 0:
863 if self.context[-1] > 0:
864 self.body.append(', ')
864 self.body.append(', ')
865 if self.context[-3] == '.BI':
865 if self.context[-3] == '.BI':
866 self.body.append('\\')
866 self.body.append('\\')
867 self.body.append(' ')
867 self.body.append(' ')
868
868
869 def depart_option(self, node):
869 def depart_option(self, node):
870 self.context[-1] += 1
870 self.context[-1] += 1
871
871
872 def visit_option_string(self, node):
872 def visit_option_string(self, node):
873 # do not know if .B or .BI
873 # do not know if .B or .BI
874 pass
874 pass
875
875
876 def depart_option_string(self, node):
876 def depart_option_string(self, node):
877 pass
877 pass
878
878
879 def visit_option_argument(self, node):
879 def visit_option_argument(self, node):
880 self.context[-3] = '.BI' # bold/italic alternate
880 self.context[-3] = '.BI' # bold/italic alternate
881 if node['delimiter'] != ' ':
881 if node['delimiter'] != ' ':
882 self.body.append('\\fB%s ' % node['delimiter'])
882 self.body.append('\\fB%s ' % node['delimiter'])
883 elif self.body[len(self.body)-1].endswith('='):
883 elif self.body[len(self.body) - 1].endswith('='):
884 # a blank only means no blank in output, just changing font
884 # a blank only means no blank in output, just changing font
885 self.body.append(' ')
885 self.body.append(' ')
886 else:
886 else:
887 # blank backslash blank, switch font then a blank
887 # blank backslash blank, switch font then a blank
888 self.body.append(' \\ ')
888 self.body.append(' \\ ')
889
889
890 def depart_option_argument(self, node):
890 def depart_option_argument(self, node):
891 pass
891 pass
892
892
893 def visit_organization(self, node):
893 def visit_organization(self, node):
894 self.visit_docinfo_item(node, 'organization')
894 self.visit_docinfo_item(node, 'organization')
895
895
896 def depart_organization(self, node):
896 def depart_organization(self, node):
897 pass
897 pass
898
898
899 def visit_paragraph(self, node):
899 def visit_paragraph(self, node):
900 # ``.PP`` : Start standard indented paragraph.
900 # ``.PP`` : Start standard indented paragraph.
901 # ``.LP`` : Start block paragraph, all except the first.
901 # ``.LP`` : Start block paragraph, all except the first.
902 # ``.P [type]`` : Start paragraph type.
902 # ``.P [type]`` : Start paragraph type.
903 # NOTE don't use paragraph starts because they reset indentation.
903 # NOTE don't use paragraph starts because they reset indentation.
904 # ``.sp`` is only vertical space
904 # ``.sp`` is only vertical space
905 self.ensure_eol()
905 self.ensure_eol()
906 self.body.append('.sp\n')
906 self.body.append('.sp\n')
907
907
908 def depart_paragraph(self, node):
908 def depart_paragraph(self, node):
909 self.body.append('\n')
909 self.body.append('\n')
910
910
911 def visit_problematic(self, node):
911 def visit_problematic(self, node):
912 self.body.append(self.defs['problematic'][0])
912 self.body.append(self.defs['problematic'][0])
913
913
914 def depart_problematic(self, node):
914 def depart_problematic(self, node):
915 self.body.append(self.defs['problematic'][1])
915 self.body.append(self.defs['problematic'][1])
916
916
917 def visit_raw(self, node):
917 def visit_raw(self, node):
918 if node.get('format') == 'manpage':
918 if node.get('format') == 'manpage':
919 self.body.append(node.astext() + "\n")
919 self.body.append(node.astext() + "\n")
920 # Keep non-manpage raw text out of output:
920 # Keep non-manpage raw text out of output:
921 raise nodes.SkipNode
921 raise nodes.SkipNode
922
922
923 def visit_reference(self, node):
923 def visit_reference(self, node):
924 """E.g. link or email address."""
924 """E.g. link or email address."""
925 self.body.append(self.defs['reference'][0])
925 self.body.append(self.defs['reference'][0])
926
926
927 def depart_reference(self, node):
927 def depart_reference(self, node):
928 self.body.append(self.defs['reference'][1])
928 self.body.append(self.defs['reference'][1])
929
929
930 def visit_revision(self, node):
930 def visit_revision(self, node):
931 self.visit_docinfo_item(node, 'revision')
931 self.visit_docinfo_item(node, 'revision')
932
932
933 depart_revision = depart_docinfo_item
933 depart_revision = depart_docinfo_item
934
934
935 def visit_row(self, node):
935 def visit_row(self, node):
936 self._active_table.new_row()
936 self._active_table.new_row()
937
937
938 def depart_row(self, node):
938 def depart_row(self, node):
939 pass
939 pass
940
940
941 def visit_section(self, node):
941 def visit_section(self, node):
942 self.section_level += 1
942 self.section_level += 1
943
943
944 def depart_section(self, node):
944 def depart_section(self, node):
945 self.section_level -= 1
945 self.section_level -= 1
946
946
947 def visit_status(self, node):
947 def visit_status(self, node):
948 self.visit_docinfo_item(node, 'status')
948 self.visit_docinfo_item(node, 'status')
949
949
950 depart_status = depart_docinfo_item
950 depart_status = depart_docinfo_item
951
951
952 def visit_strong(self, node):
952 def visit_strong(self, node):
953 self.body.append(self.defs['strong'][0])
953 self.body.append(self.defs['strong'][0])
954
954
955 def depart_strong(self, node):
955 def depart_strong(self, node):
956 self.body.append(self.defs['strong'][1])
956 self.body.append(self.defs['strong'][1])
957
957
958 def visit_substitution_definition(self, node):
958 def visit_substitution_definition(self, node):
959 """Internal only."""
959 """Internal only."""
960 raise nodes.SkipNode
960 raise nodes.SkipNode
961
961
962 def visit_substitution_reference(self, node):
962 def visit_substitution_reference(self, node):
963 self.document.reporter.warning('"substitution_reference" not supported',
963 self.document.reporter.warning('"substitution_reference" not supported',
964 base_node=node)
964 base_node=node)
965
965
966 def visit_subtitle(self, node):
966 def visit_subtitle(self, node):
967 if isinstance(node.parent, nodes.sidebar):
967 if isinstance(node.parent, nodes.sidebar):
968 self.body.append(self.defs['strong'][0])
968 self.body.append(self.defs['strong'][0])
969 elif isinstance(node.parent, nodes.document):
969 elif isinstance(node.parent, nodes.document):
970 self.visit_docinfo_item(node, 'subtitle')
970 self.visit_docinfo_item(node, 'subtitle')
971 elif isinstance(node.parent, nodes.section):
971 elif isinstance(node.parent, nodes.section):
972 self.body.append(self.defs['strong'][0])
972 self.body.append(self.defs['strong'][0])
973
973
974 def depart_subtitle(self, node):
974 def depart_subtitle(self, node):
975 # document subtitle calls SkipNode
975 # document subtitle calls SkipNode
976 self.body.append(self.defs['strong'][1]+'\n.PP\n')
976 self.body.append(self.defs['strong'][1]+'\n.PP\n')
977
977
978 def visit_system_message(self, node):
978 def visit_system_message(self, node):
979 # TODO add report_level
979 # TODO add report_level
980 #if node['level'] < self.document.reporter['writer'].report_level:
980 #if node['level'] < self.document.reporter['writer'].report_level:
981 # Level is too low to display:
981 # Level is too low to display:
982 # raise nodes.SkipNode
982 # raise nodes.SkipNode
983 attr = {}
983 attr = {}
984 backref_text = ''
984 backref_text = ''
985 if node.hasattr('id'):
985 if node.hasattr('id'):
986 attr['name'] = node['id']
986 attr['name'] = node['id']
987 if node.hasattr('line'):
987 if node.hasattr('line'):
988 line = ', line %s' % node['line']
988 line = ', line %s' % node['line']
989 else:
989 else:
990 line = ''
990 line = ''
991 self.body.append('.IP "System Message: %s/%s (%s:%s)"\n'
991 self.body.append('.IP "System Message: %s/%s (%s:%s)"\n'
992 % (node['type'], node['level'], node['source'], line))
992 % (node['type'], node['level'], node['source'], line))
993
993
994 def depart_system_message(self, node):
994 def depart_system_message(self, node):
995 pass
995 pass
996
996
997 def visit_table(self, node):
997 def visit_table(self, node):
998 self._active_table = Table()
998 self._active_table = Table()
999
999
1000 def depart_table(self, node):
1000 def depart_table(self, node):
1001 self.ensure_eol()
1001 self.ensure_eol()
1002 self.body.extend(self._active_table.as_list())
1002 self.body.extend(self._active_table.as_list())
1003 self._active_table = None
1003 self._active_table = None
1004
1004
1005 def visit_target(self, node):
1005 def visit_target(self, node):
1006 # targets are in-document hyper targets, without any use for man-pages.
1006 # targets are in-document hyper targets, without any use for man-pages.
1007 raise nodes.SkipNode
1007 raise nodes.SkipNode
1008
1008
1009 def visit_tbody(self, node):
1009 def visit_tbody(self, node):
1010 pass
1010 pass
1011
1011
1012 def depart_tbody(self, node):
1012 def depart_tbody(self, node):
1013 pass
1013 pass
1014
1014
1015 def visit_term(self, node):
1015 def visit_term(self, node):
1016 self.body.append(self.defs['term'][0])
1016 self.body.append(self.defs['term'][0])
1017
1017
1018 def depart_term(self, node):
1018 def depart_term(self, node):
1019 self.body.append(self.defs['term'][1])
1019 self.body.append(self.defs['term'][1])
1020
1020
1021 def visit_tgroup(self, node):
1021 def visit_tgroup(self, node):
1022 pass
1022 pass
1023
1023
1024 def depart_tgroup(self, node):
1024 def depart_tgroup(self, node):
1025 pass
1025 pass
1026
1026
1027 def visit_thead(self, node):
1027 def visit_thead(self, node):
1028 # MAYBE double line '='
1028 # MAYBE double line '='
1029 pass
1029 pass
1030
1030
1031 def depart_thead(self, node):
1031 def depart_thead(self, node):
1032 # MAYBE double line '='
1032 # MAYBE double line '='
1033 pass
1033 pass
1034
1034
1035 def visit_tip(self, node):
1035 def visit_tip(self, node):
1036 self.visit_admonition(node, 'tip')
1036 self.visit_admonition(node, 'tip')
1037
1037
1038 depart_tip = depart_admonition
1038 depart_tip = depart_admonition
1039
1039
1040 def visit_title(self, node):
1040 def visit_title(self, node):
1041 if isinstance(node.parent, nodes.topic):
1041 if isinstance(node.parent, nodes.topic):
1042 self.body.append(self.defs['topic-title'][0])
1042 self.body.append(self.defs['topic-title'][0])
1043 elif isinstance(node.parent, nodes.sidebar):
1043 elif isinstance(node.parent, nodes.sidebar):
1044 self.body.append(self.defs['sidebar-title'][0])
1044 self.body.append(self.defs['sidebar-title'][0])
1045 elif isinstance(node.parent, nodes.admonition):
1045 elif isinstance(node.parent, nodes.admonition):
1046 self.body.append('.IP "')
1046 self.body.append('.IP "')
1047 elif self.section_level == 0:
1047 elif self.section_level == 0:
1048 self._docinfo['title'] = node.astext()
1048 self._docinfo['title'] = node.astext()
1049 # document title for .TH
1049 # document title for .TH
1050 self._docinfo['title_upper'] = node.astext().upper()
1050 self._docinfo['title_upper'] = node.astext().upper()
1051 raise nodes.SkipNode
1051 raise nodes.SkipNode
1052 elif self.section_level == 1:
1052 elif self.section_level == 1:
1053 self.body.append('.SH ')
1053 self.body.append('.SH ')
1054 for n in node.traverse(nodes.Text):
1054 for n in node.traverse(nodes.Text):
1055 n.parent.replace(n, nodes.Text(n.astext().upper()))
1055 n.parent.replace(n, nodes.Text(n.astext().upper()))
1056 else:
1056 else:
1057 self.body.append('.SS ')
1057 self.body.append('.SS ')
1058
1058
1059 def depart_title(self, node):
1059 def depart_title(self, node):
1060 if isinstance(node.parent, nodes.admonition):
1060 if isinstance(node.parent, nodes.admonition):
1061 self.body.append('"')
1061 self.body.append('"')
1062 self.body.append('\n')
1062 self.body.append('\n')
1063
1063
1064 def visit_title_reference(self, node):
1064 def visit_title_reference(self, node):
1065 """inline citation reference"""
1065 """inline citation reference"""
1066 self.body.append(self.defs['title_reference'][0])
1066 self.body.append(self.defs['title_reference'][0])
1067
1067
1068 def depart_title_reference(self, node):
1068 def depart_title_reference(self, node):
1069 self.body.append(self.defs['title_reference'][1])
1069 self.body.append(self.defs['title_reference'][1])
1070
1070
1071 def visit_topic(self, node):
1071 def visit_topic(self, node):
1072 pass
1072 pass
1073
1073
1074 def depart_topic(self, node):
1074 def depart_topic(self, node):
1075 pass
1075 pass
1076
1076
1077 def visit_sidebar(self, node):
1077 def visit_sidebar(self, node):
1078 pass
1078 pass
1079
1079
1080 def depart_sidebar(self, node):
1080 def depart_sidebar(self, node):
1081 pass
1081 pass
1082
1082
1083 def visit_rubric(self, node):
1083 def visit_rubric(self, node):
1084 pass
1084 pass
1085
1085
1086 def depart_rubric(self, node):
1086 def depart_rubric(self, node):
1087 pass
1087 pass
1088
1088
1089 def visit_transition(self, node):
1089 def visit_transition(self, node):
1090 # .PP Begin a new paragraph and reset prevailing indent.
1090 # .PP Begin a new paragraph and reset prevailing indent.
1091 # .sp N leaves N lines of blank space.
1091 # .sp N leaves N lines of blank space.
1092 # .ce centers the next line
1092 # .ce centers the next line
1093 self.body.append('\n.sp\n.ce\n----\n')
1093 self.body.append('\n.sp\n.ce\n----\n')
1094
1094
1095 def depart_transition(self, node):
1095 def depart_transition(self, node):
1096 self.body.append('\n.ce 0\n.sp\n')
1096 self.body.append('\n.ce 0\n.sp\n')
1097
1097
1098 def visit_version(self, node):
1098 def visit_version(self, node):
1099 self.visit_docinfo_item(node, 'version')
1099 self.visit_docinfo_item(node, 'version')
1100
1100
1101 def visit_warning(self, node):
1101 def visit_warning(self, node):
1102 self.visit_admonition(node, 'warning')
1102 self.visit_admonition(node, 'warning')
1103
1103
1104 depart_warning = depart_admonition
1104 depart_warning = depart_admonition
1105
1105
1106 def unimplemented_visit(self, node):
1106 def unimplemented_visit(self, node):
1107 raise NotImplementedError('visiting unimplemented node type: %s'
1107 raise NotImplementedError('visiting unimplemented node type: %s'
1108 % node.__class__.__name__)
1108 % node.__class__.__name__)
1109
1109
1110 # vim: set fileencoding=utf-8 et ts=4 ai :
1110 # vim: set fileencoding=utf-8 et ts=4 ai :
@@ -1,61 +1,61 b''
1 # highlight.py - highlight extension implementation file
1 # highlight.py - highlight extension implementation file
2 #
2 #
3 # Copyright 2007-2009 Adam Hupp <adam@hupp.org> and others
3 # Copyright 2007-2009 Adam Hupp <adam@hupp.org> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 #
7 #
8 # The original module was split in an interface and an implementation
8 # The original module was split in an interface and an implementation
9 # file to defer pygments loading and speedup extension setup.
9 # file to defer pygments loading and speedup extension setup.
10
10
11 from mercurial import demandimport
11 from mercurial import demandimport
12 demandimport.ignore.extend(['pkgutil', 'pkg_resources', '__main__'])
12 demandimport.ignore.extend(['pkgutil', 'pkg_resources', '__main__'])
13 from mercurial import util, encoding
13 from mercurial import util, encoding
14
14
15 from pygments import highlight
15 from pygments import highlight
16 from pygments.util import ClassNotFound
16 from pygments.util import ClassNotFound
17 from pygments.lexers import guess_lexer, guess_lexer_for_filename, TextLexer
17 from pygments.lexers import guess_lexer, guess_lexer_for_filename, TextLexer
18 from pygments.formatters import HtmlFormatter
18 from pygments.formatters import HtmlFormatter
19
19
20 SYNTAX_CSS = ('\n<link rel="stylesheet" href="{url}highlightcss" '
20 SYNTAX_CSS = ('\n<link rel="stylesheet" href="{url}highlightcss" '
21 'type="text/css" />')
21 'type="text/css" />')
22
22
23 def pygmentize(field, fctx, style, tmpl):
23 def pygmentize(field, fctx, style, tmpl):
24
24
25 # append a <link ...> to the syntax highlighting css
25 # append a <link ...> to the syntax highlighting css
26 old_header = tmpl.load('header')
26 old_header = tmpl.load('header')
27 if SYNTAX_CSS not in old_header:
27 if SYNTAX_CSS not in old_header:
28 new_header = old_header + SYNTAX_CSS
28 new_header = old_header + SYNTAX_CSS
29 tmpl.cache['header'] = new_header
29 tmpl.cache['header'] = new_header
30
30
31 text = fctx.data()
31 text = fctx.data()
32 if util.binary(text):
32 if util.binary(text):
33 return
33 return
34
34
35 # Pygments is best used with Unicode strings:
35 # Pygments is best used with Unicode strings:
36 # <http://pygments.org/docs/unicode/>
36 # <http://pygments.org/docs/unicode/>
37 text = text.decode(encoding.encoding, 'replace')
37 text = text.decode(encoding.encoding, 'replace')
38
38
39 # To get multi-line strings right, we can't format line-by-line
39 # To get multi-line strings right, we can't format line-by-line
40 try:
40 try:
41 lexer = guess_lexer_for_filename(fctx.path(), text[:1024])
41 lexer = guess_lexer_for_filename(fctx.path(), text[:1024])
42 except (ClassNotFound, ValueError):
42 except (ClassNotFound, ValueError):
43 try:
43 try:
44 lexer = guess_lexer(text[:1024])
44 lexer = guess_lexer(text[:1024])
45 except (ClassNotFound, ValueError):
45 except (ClassNotFound, ValueError):
46 lexer = TextLexer()
46 lexer = TextLexer()
47
47
48 formatter = HtmlFormatter(style=style)
48 formatter = HtmlFormatter(style=style)
49
49
50 colorized = highlight(text, lexer, formatter)
50 colorized = highlight(text, lexer, formatter)
51 # strip wrapping div
51 # strip wrapping div
52 colorized = colorized[:colorized.find('\n</pre>')]
52 colorized = colorized[:colorized.find('\n</pre>')]
53 colorized = colorized[colorized.find('<pre>')+5:]
53 colorized = colorized[colorized.find('<pre>') + 5:]
54 coloriter = (s.encode(encoding.encoding, 'replace')
54 coloriter = (s.encode(encoding.encoding, 'replace')
55 for s in colorized.splitlines())
55 for s in colorized.splitlines())
56
56
57 tmpl.filters['colorize'] = lambda x: coloriter.next()
57 tmpl.filters['colorize'] = lambda x: coloriter.next()
58
58
59 oldl = tmpl.cache[field]
59 oldl = tmpl.cache[field]
60 newl = oldl.replace('line|escape', 'line|colorize')
60 newl = oldl.replace('line|escape', 'line|colorize')
61 tmpl.cache[field] = newl
61 tmpl.cache[field] = newl
@@ -1,335 +1,335 b''
1 # watcher.py - high-level interfaces to the Linux inotify subsystem
1 # watcher.py - high-level interfaces to the Linux inotify subsystem
2
2
3 # Copyright 2006 Bryan O'Sullivan <bos@serpentine.com>
3 # Copyright 2006 Bryan O'Sullivan <bos@serpentine.com>
4
4
5 # This library is free software; you can redistribute it and/or modify
5 # This library is free software; you can redistribute it and/or modify
6 # it under the terms of version 2.1 of the GNU Lesser General Public
6 # it under the terms of version 2.1 of the GNU Lesser General Public
7 # License, or any later version.
7 # License, or any later version.
8
8
9 '''High-level interfaces to the Linux inotify subsystem.
9 '''High-level interfaces to the Linux inotify subsystem.
10
10
11 The inotify subsystem provides an efficient mechanism for file status
11 The inotify subsystem provides an efficient mechanism for file status
12 monitoring and change notification.
12 monitoring and change notification.
13
13
14 The watcher class hides the low-level details of the inotify
14 The watcher class hides the low-level details of the inotify
15 interface, and provides a Pythonic wrapper around it. It generates
15 interface, and provides a Pythonic wrapper around it. It generates
16 events that provide somewhat more information than raw inotify makes
16 events that provide somewhat more information than raw inotify makes
17 available.
17 available.
18
18
19 The autowatcher class is more useful, as it automatically watches
19 The autowatcher class is more useful, as it automatically watches
20 newly-created directories on your behalf.'''
20 newly-created directories on your behalf.'''
21
21
22 __author__ = "Bryan O'Sullivan <bos@serpentine.com>"
22 __author__ = "Bryan O'Sullivan <bos@serpentine.com>"
23
23
24 import _inotify as inotify
24 import _inotify as inotify
25 import array
25 import array
26 import errno
26 import errno
27 import fcntl
27 import fcntl
28 import os
28 import os
29 import termios
29 import termios
30
30
31
31
32 class event(object):
32 class event(object):
33 '''Derived inotify event class.
33 '''Derived inotify event class.
34
34
35 The following fields are available:
35 The following fields are available:
36
36
37 mask: event mask, indicating what kind of event this is
37 mask: event mask, indicating what kind of event this is
38
38
39 cookie: rename cookie, if a rename-related event
39 cookie: rename cookie, if a rename-related event
40
40
41 path: path of the directory in which the event occurred
41 path: path of the directory in which the event occurred
42
42
43 name: name of the directory entry to which the event occurred
43 name: name of the directory entry to which the event occurred
44 (may be None if the event happened to a watched directory)
44 (may be None if the event happened to a watched directory)
45
45
46 fullpath: complete path at which the event occurred
46 fullpath: complete path at which the event occurred
47
47
48 wd: watch descriptor that triggered this event'''
48 wd: watch descriptor that triggered this event'''
49
49
50 __slots__ = (
50 __slots__ = (
51 'cookie',
51 'cookie',
52 'fullpath',
52 'fullpath',
53 'mask',
53 'mask',
54 'name',
54 'name',
55 'path',
55 'path',
56 'raw',
56 'raw',
57 'wd',
57 'wd',
58 )
58 )
59
59
60 def __init__(self, raw, path):
60 def __init__(self, raw, path):
61 self.path = path
61 self.path = path
62 self.raw = raw
62 self.raw = raw
63 if raw.name:
63 if raw.name:
64 self.fullpath = path + '/' + raw.name
64 self.fullpath = path + '/' + raw.name
65 else:
65 else:
66 self.fullpath = path
66 self.fullpath = path
67
67
68 self.wd = raw.wd
68 self.wd = raw.wd
69 self.mask = raw.mask
69 self.mask = raw.mask
70 self.cookie = raw.cookie
70 self.cookie = raw.cookie
71 self.name = raw.name
71 self.name = raw.name
72
72
73 def __repr__(self):
73 def __repr__(self):
74 r = repr(self.raw)
74 r = repr(self.raw)
75 return 'event(path=' + repr(self.path) + ', ' + r[r.find('(')+1:]
75 return 'event(path=' + repr(self.path) + ', ' + r[r.find('(') + 1:]
76
76
77
77
78 _event_props = {
78 _event_props = {
79 'access': 'File was accessed',
79 'access': 'File was accessed',
80 'modify': 'File was modified',
80 'modify': 'File was modified',
81 'attrib': 'Attribute of a directory entry was changed',
81 'attrib': 'Attribute of a directory entry was changed',
82 'close_write': 'File was closed after being written to',
82 'close_write': 'File was closed after being written to',
83 'close_nowrite': 'File was closed without being written to',
83 'close_nowrite': 'File was closed without being written to',
84 'open': 'File was opened',
84 'open': 'File was opened',
85 'moved_from': 'Directory entry was renamed from this name',
85 'moved_from': 'Directory entry was renamed from this name',
86 'moved_to': 'Directory entry was renamed to this name',
86 'moved_to': 'Directory entry was renamed to this name',
87 'create': 'Directory entry was created',
87 'create': 'Directory entry was created',
88 'delete': 'Directory entry was deleted',
88 'delete': 'Directory entry was deleted',
89 'delete_self': 'The watched directory entry was deleted',
89 'delete_self': 'The watched directory entry was deleted',
90 'move_self': 'The watched directory entry was renamed',
90 'move_self': 'The watched directory entry was renamed',
91 'unmount': 'Directory was unmounted, and can no longer be watched',
91 'unmount': 'Directory was unmounted, and can no longer be watched',
92 'q_overflow': 'Kernel dropped events due to queue overflow',
92 'q_overflow': 'Kernel dropped events due to queue overflow',
93 'ignored': 'Directory entry is no longer being watched',
93 'ignored': 'Directory entry is no longer being watched',
94 'isdir': 'Event occurred on a directory',
94 'isdir': 'Event occurred on a directory',
95 }
95 }
96
96
97 for k, v in _event_props.iteritems():
97 for k, v in _event_props.iteritems():
98 mask = getattr(inotify, 'IN_' + k.upper())
98 mask = getattr(inotify, 'IN_' + k.upper())
99 def getter(self):
99 def getter(self):
100 return self.mask & mask
100 return self.mask & mask
101 getter.__name__ = k
101 getter.__name__ = k
102 getter.__doc__ = v
102 getter.__doc__ = v
103 setattr(event, k, property(getter, doc=v))
103 setattr(event, k, property(getter, doc=v))
104
104
105 del _event_props
105 del _event_props
106
106
107
107
108 class watcher(object):
108 class watcher(object):
109 '''Provide a Pythonic interface to the low-level inotify API.
109 '''Provide a Pythonic interface to the low-level inotify API.
110
110
111 Also adds derived information to each event that is not available
111 Also adds derived information to each event that is not available
112 through the normal inotify API, such as directory name.'''
112 through the normal inotify API, such as directory name.'''
113
113
114 __slots__ = (
114 __slots__ = (
115 'fd',
115 'fd',
116 '_paths',
116 '_paths',
117 '_wds',
117 '_wds',
118 )
118 )
119
119
120 def __init__(self):
120 def __init__(self):
121 '''Create a new inotify instance.'''
121 '''Create a new inotify instance.'''
122
122
123 self.fd = inotify.init()
123 self.fd = inotify.init()
124 self._paths = {}
124 self._paths = {}
125 self._wds = {}
125 self._wds = {}
126
126
127 def fileno(self):
127 def fileno(self):
128 '''Return the file descriptor this watcher uses.
128 '''Return the file descriptor this watcher uses.
129
129
130 Useful for passing to select and poll.'''
130 Useful for passing to select and poll.'''
131
131
132 return self.fd
132 return self.fd
133
133
134 def add(self, path, mask):
134 def add(self, path, mask):
135 '''Add or modify a watch.
135 '''Add or modify a watch.
136
136
137 Return the watch descriptor added or modified.'''
137 Return the watch descriptor added or modified.'''
138
138
139 path = os.path.normpath(path)
139 path = os.path.normpath(path)
140 wd = inotify.add_watch(self.fd, path, mask)
140 wd = inotify.add_watch(self.fd, path, mask)
141 self._paths[path] = wd, mask
141 self._paths[path] = wd, mask
142 self._wds[wd] = path, mask
142 self._wds[wd] = path, mask
143 return wd
143 return wd
144
144
145 def remove(self, wd):
145 def remove(self, wd):
146 '''Remove the given watch.'''
146 '''Remove the given watch.'''
147
147
148 inotify.remove_watch(self.fd, wd)
148 inotify.remove_watch(self.fd, wd)
149 self._remove(wd)
149 self._remove(wd)
150
150
151 def _remove(self, wd):
151 def _remove(self, wd):
152 path_mask = self._wds.pop(wd, None)
152 path_mask = self._wds.pop(wd, None)
153 if path_mask is not None:
153 if path_mask is not None:
154 self._paths.pop(path_mask[0])
154 self._paths.pop(path_mask[0])
155
155
156 def path(self, path):
156 def path(self, path):
157 '''Return a (watch descriptor, event mask) pair for the given path.
157 '''Return a (watch descriptor, event mask) pair for the given path.
158
158
159 If the path is not being watched, return None.'''
159 If the path is not being watched, return None.'''
160
160
161 return self._paths.get(path)
161 return self._paths.get(path)
162
162
163 def wd(self, wd):
163 def wd(self, wd):
164 '''Return a (path, event mask) pair for the given watch descriptor.
164 '''Return a (path, event mask) pair for the given watch descriptor.
165
165
166 If the watch descriptor is not valid or not associated with
166 If the watch descriptor is not valid or not associated with
167 this watcher, return None.'''
167 this watcher, return None.'''
168
168
169 return self._wds.get(wd)
169 return self._wds.get(wd)
170
170
171 def read(self, bufsize=None):
171 def read(self, bufsize=None):
172 '''Read a list of queued inotify events.
172 '''Read a list of queued inotify events.
173
173
174 If bufsize is zero, only return those events that can be read
174 If bufsize is zero, only return those events that can be read
175 immediately without blocking. Otherwise, block until events are
175 immediately without blocking. Otherwise, block until events are
176 available.'''
176 available.'''
177
177
178 events = []
178 events = []
179 for evt in inotify.read(self.fd, bufsize):
179 for evt in inotify.read(self.fd, bufsize):
180 events.append(event(evt, self._wds[evt.wd][0]))
180 events.append(event(evt, self._wds[evt.wd][0]))
181 if evt.mask & inotify.IN_IGNORED:
181 if evt.mask & inotify.IN_IGNORED:
182 self._remove(evt.wd)
182 self._remove(evt.wd)
183 elif evt.mask & inotify.IN_UNMOUNT:
183 elif evt.mask & inotify.IN_UNMOUNT:
184 self.close()
184 self.close()
185 return events
185 return events
186
186
187 def close(self):
187 def close(self):
188 '''Shut down this watcher.
188 '''Shut down this watcher.
189
189
190 All subsequent method calls are likely to raise exceptions.'''
190 All subsequent method calls are likely to raise exceptions.'''
191
191
192 os.close(self.fd)
192 os.close(self.fd)
193 self.fd = None
193 self.fd = None
194 self._paths = None
194 self._paths = None
195 self._wds = None
195 self._wds = None
196
196
197 def __len__(self):
197 def __len__(self):
198 '''Return the number of active watches.'''
198 '''Return the number of active watches.'''
199
199
200 return len(self._paths)
200 return len(self._paths)
201
201
202 def __iter__(self):
202 def __iter__(self):
203 '''Yield a (path, watch descriptor, event mask) tuple for each
203 '''Yield a (path, watch descriptor, event mask) tuple for each
204 entry being watched.'''
204 entry being watched.'''
205
205
206 for path, (wd, mask) in self._paths.iteritems():
206 for path, (wd, mask) in self._paths.iteritems():
207 yield path, wd, mask
207 yield path, wd, mask
208
208
209 def __del__(self):
209 def __del__(self):
210 if self.fd is not None:
210 if self.fd is not None:
211 os.close(self.fd)
211 os.close(self.fd)
212
212
213 ignored_errors = [errno.ENOENT, errno.EPERM, errno.ENOTDIR]
213 ignored_errors = [errno.ENOENT, errno.EPERM, errno.ENOTDIR]
214
214
215 def add_iter(self, path, mask, onerror=None):
215 def add_iter(self, path, mask, onerror=None):
216 '''Add or modify watches over path and its subdirectories.
216 '''Add or modify watches over path and its subdirectories.
217
217
218 Yield each added or modified watch descriptor.
218 Yield each added or modified watch descriptor.
219
219
220 To ensure that this method runs to completion, you must
220 To ensure that this method runs to completion, you must
221 iterate over all of its results, even if you do not care what
221 iterate over all of its results, even if you do not care what
222 they are. For example:
222 they are. For example:
223
223
224 for wd in w.add_iter(path, mask):
224 for wd in w.add_iter(path, mask):
225 pass
225 pass
226
226
227 By default, errors are ignored. If optional arg "onerror" is
227 By default, errors are ignored. If optional arg "onerror" is
228 specified, it should be a function; it will be called with one
228 specified, it should be a function; it will be called with one
229 argument, an OSError instance. It can report the error to
229 argument, an OSError instance. It can report the error to
230 continue with the walk, or raise the exception to abort the
230 continue with the walk, or raise the exception to abort the
231 walk.'''
231 walk.'''
232
232
233 # Add the IN_ONLYDIR flag to the event mask, to avoid a possible
233 # Add the IN_ONLYDIR flag to the event mask, to avoid a possible
234 # race when adding a subdirectory. In the time between the
234 # race when adding a subdirectory. In the time between the
235 # event being queued by the kernel and us processing it, the
235 # event being queued by the kernel and us processing it, the
236 # directory may have been deleted, or replaced with a different
236 # directory may have been deleted, or replaced with a different
237 # kind of entry with the same name.
237 # kind of entry with the same name.
238
238
239 submask = mask | inotify.IN_ONLYDIR
239 submask = mask | inotify.IN_ONLYDIR
240
240
241 try:
241 try:
242 yield self.add(path, mask)
242 yield self.add(path, mask)
243 except OSError, err:
243 except OSError, err:
244 if onerror and err.errno not in self.ignored_errors:
244 if onerror and err.errno not in self.ignored_errors:
245 onerror(err)
245 onerror(err)
246 for root, dirs, names in os.walk(path, topdown=False, onerror=onerror):
246 for root, dirs, names in os.walk(path, topdown=False, onerror=onerror):
247 for d in dirs:
247 for d in dirs:
248 try:
248 try:
249 yield self.add(root + '/' + d, submask)
249 yield self.add(root + '/' + d, submask)
250 except OSError, err:
250 except OSError, err:
251 if onerror and err.errno not in self.ignored_errors:
251 if onerror and err.errno not in self.ignored_errors:
252 onerror(err)
252 onerror(err)
253
253
254 def add_all(self, path, mask, onerror=None):
254 def add_all(self, path, mask, onerror=None):
255 '''Add or modify watches over path and its subdirectories.
255 '''Add or modify watches over path and its subdirectories.
256
256
257 Return a list of added or modified watch descriptors.
257 Return a list of added or modified watch descriptors.
258
258
259 By default, errors are ignored. If optional arg "onerror" is
259 By default, errors are ignored. If optional arg "onerror" is
260 specified, it should be a function; it will be called with one
260 specified, it should be a function; it will be called with one
261 argument, an OSError instance. It can report the error to
261 argument, an OSError instance. It can report the error to
262 continue with the walk, or raise the exception to abort the
262 continue with the walk, or raise the exception to abort the
263 walk.'''
263 walk.'''
264
264
265 return [w for w in self.add_iter(path, mask, onerror)]
265 return [w for w in self.add_iter(path, mask, onerror)]
266
266
267
267
268 class autowatcher(watcher):
268 class autowatcher(watcher):
269 '''watcher class that automatically watches newly created directories.'''
269 '''watcher class that automatically watches newly created directories.'''
270
270
271 __slots__ = (
271 __slots__ = (
272 'addfilter',
272 'addfilter',
273 )
273 )
274
274
275 def __init__(self, addfilter=None):
275 def __init__(self, addfilter=None):
276 '''Create a new inotify instance.
276 '''Create a new inotify instance.
277
277
278 This instance will automatically watch newly created
278 This instance will automatically watch newly created
279 directories.
279 directories.
280
280
281 If the optional addfilter parameter is not None, it must be a
281 If the optional addfilter parameter is not None, it must be a
282 callable that takes one parameter. It will be called each time
282 callable that takes one parameter. It will be called each time
283 a directory is about to be automatically watched. If it returns
283 a directory is about to be automatically watched. If it returns
284 True, the directory will be watched if it still exists,
284 True, the directory will be watched if it still exists,
285 otherwise, it will be skipped.'''
285 otherwise, it will be skipped.'''
286
286
287 super(autowatcher, self).__init__()
287 super(autowatcher, self).__init__()
288 self.addfilter = addfilter
288 self.addfilter = addfilter
289
289
290 _dir_create_mask = inotify.IN_ISDIR | inotify.IN_CREATE
290 _dir_create_mask = inotify.IN_ISDIR | inotify.IN_CREATE
291
291
292 def read(self, bufsize=None):
292 def read(self, bufsize=None):
293 events = super(autowatcher, self).read(bufsize)
293 events = super(autowatcher, self).read(bufsize)
294 for evt in events:
294 for evt in events:
295 if evt.mask & self._dir_create_mask == self._dir_create_mask:
295 if evt.mask & self._dir_create_mask == self._dir_create_mask:
296 if self.addfilter is None or self.addfilter(evt):
296 if self.addfilter is None or self.addfilter(evt):
297 parentmask = self._wds[evt.wd][1]
297 parentmask = self._wds[evt.wd][1]
298 # See note about race avoidance via IN_ONLYDIR above.
298 # See note about race avoidance via IN_ONLYDIR above.
299 mask = parentmask | inotify.IN_ONLYDIR
299 mask = parentmask | inotify.IN_ONLYDIR
300 try:
300 try:
301 self.add_all(evt.fullpath, mask)
301 self.add_all(evt.fullpath, mask)
302 except OSError, err:
302 except OSError, err:
303 if err.errno not in self.ignored_errors:
303 if err.errno not in self.ignored_errors:
304 raise
304 raise
305 return events
305 return events
306
306
307
307
308 class threshold(object):
308 class threshold(object):
309 '''Class that indicates whether a file descriptor has reached a
309 '''Class that indicates whether a file descriptor has reached a
310 threshold of readable bytes available.
310 threshold of readable bytes available.
311
311
312 This class is not thread-safe.'''
312 This class is not thread-safe.'''
313
313
314 __slots__ = (
314 __slots__ = (
315 'fd',
315 'fd',
316 'threshold',
316 'threshold',
317 '_iocbuf',
317 '_iocbuf',
318 )
318 )
319
319
320 def __init__(self, fd, threshold=1024):
320 def __init__(self, fd, threshold=1024):
321 self.fd = fd
321 self.fd = fd
322 self.threshold = threshold
322 self.threshold = threshold
323 self._iocbuf = array.array('i', [0])
323 self._iocbuf = array.array('i', [0])
324
324
325 def readable(self):
325 def readable(self):
326 '''Return the number of bytes readable on this file descriptor.'''
326 '''Return the number of bytes readable on this file descriptor.'''
327
327
328 fcntl.ioctl(self.fd, termios.FIONREAD, self._iocbuf, True)
328 fcntl.ioctl(self.fd, termios.FIONREAD, self._iocbuf, True)
329 return self._iocbuf[0]
329 return self._iocbuf[0]
330
330
331 def __call__(self):
331 def __call__(self):
332 '''Indicate whether the number of readable bytes has met or
332 '''Indicate whether the number of readable bytes has met or
333 exceeded the threshold.'''
333 exceeded the threshold.'''
334
334
335 return self.readable() >= self.threshold
335 return self.readable() >= self.threshold
@@ -1,3622 +1,3622 b''
1 # mq.py - patch queues for mercurial
1 # mq.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''manage a stack of patches
8 '''manage a stack of patches
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use :hg:`help command` for more details)::
17 Common tasks (use :hg:`help command` for more details)::
18
18
19 create new patch qnew
19 create new patch qnew
20 import existing patch qimport
20 import existing patch qimport
21
21
22 print patch series qseries
22 print patch series qseries
23 print applied patches qapplied
23 print applied patches qapplied
24
24
25 add known patch to applied stack qpush
25 add known patch to applied stack qpush
26 remove patch from applied stack qpop
26 remove patch from applied stack qpop
27 refresh contents of top applied patch qrefresh
27 refresh contents of top applied patch qrefresh
28
28
29 By default, mq will automatically use git patches when required to
29 By default, mq will automatically use git patches when required to
30 avoid losing file mode changes, copy records, binary files or empty
30 avoid losing file mode changes, copy records, binary files or empty
31 files creations or deletions. This behaviour can be configured with::
31 files creations or deletions. This behaviour can be configured with::
32
32
33 [mq]
33 [mq]
34 git = auto/keep/yes/no
34 git = auto/keep/yes/no
35
35
36 If set to 'keep', mq will obey the [diff] section configuration while
36 If set to 'keep', mq will obey the [diff] section configuration while
37 preserving existing git patches upon qrefresh. If set to 'yes' or
37 preserving existing git patches upon qrefresh. If set to 'yes' or
38 'no', mq will override the [diff] section and always generate git or
38 'no', mq will override the [diff] section and always generate git or
39 regular patches, possibly losing data in the second case.
39 regular patches, possibly losing data in the second case.
40
40
41 It may be desirable for mq changesets to be kept in the secret phase (see
41 It may be desirable for mq changesets to be kept in the secret phase (see
42 :hg:`help phases`), which can be enabled with the following setting::
42 :hg:`help phases`), which can be enabled with the following setting::
43
43
44 [mq]
44 [mq]
45 secret = True
45 secret = True
46
46
47 You will by default be managing a patch queue named "patches". You can
47 You will by default be managing a patch queue named "patches". You can
48 create other, independent patch queues with the :hg:`qqueue` command.
48 create other, independent patch queues with the :hg:`qqueue` command.
49
49
50 If the working directory contains uncommitted files, qpush, qpop and
50 If the working directory contains uncommitted files, qpush, qpop and
51 qgoto abort immediately. If -f/--force is used, the changes are
51 qgoto abort immediately. If -f/--force is used, the changes are
52 discarded. Setting::
52 discarded. Setting::
53
53
54 [mq]
54 [mq]
55 keepchanges = True
55 keepchanges = True
56
56
57 make them behave as if --keep-changes were passed, and non-conflicting
57 make them behave as if --keep-changes were passed, and non-conflicting
58 local changes will be tolerated and preserved. If incompatible options
58 local changes will be tolerated and preserved. If incompatible options
59 such as -f/--force or --exact are passed, this setting is ignored.
59 such as -f/--force or --exact are passed, this setting is ignored.
60 '''
60 '''
61
61
62 from mercurial.i18n import _
62 from mercurial.i18n import _
63 from mercurial.node import bin, hex, short, nullid, nullrev
63 from mercurial.node import bin, hex, short, nullid, nullrev
64 from mercurial.lock import release
64 from mercurial.lock import release
65 from mercurial import commands, cmdutil, hg, scmutil, util, revset
65 from mercurial import commands, cmdutil, hg, scmutil, util, revset
66 from mercurial import repair, extensions, error, phases
66 from mercurial import repair, extensions, error, phases
67 from mercurial import patch as patchmod
67 from mercurial import patch as patchmod
68 import os, re, errno, shutil
68 import os, re, errno, shutil
69
69
70 commands.norepo += " qclone"
70 commands.norepo += " qclone"
71
71
72 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
72 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
73
73
74 cmdtable = {}
74 cmdtable = {}
75 command = cmdutil.command(cmdtable)
75 command = cmdutil.command(cmdtable)
76 testedwith = 'internal'
76 testedwith = 'internal'
77
77
78 # Patch names looks like unix-file names.
78 # Patch names looks like unix-file names.
79 # They must be joinable with queue directory and result in the patch path.
79 # They must be joinable with queue directory and result in the patch path.
80 normname = util.normpath
80 normname = util.normpath
81
81
82 class statusentry(object):
82 class statusentry(object):
83 def __init__(self, node, name):
83 def __init__(self, node, name):
84 self.node, self.name = node, name
84 self.node, self.name = node, name
85 def __repr__(self):
85 def __repr__(self):
86 return hex(self.node) + ':' + self.name
86 return hex(self.node) + ':' + self.name
87
87
88 class patchheader(object):
88 class patchheader(object):
89 def __init__(self, pf, plainmode=False):
89 def __init__(self, pf, plainmode=False):
90 def eatdiff(lines):
90 def eatdiff(lines):
91 while lines:
91 while lines:
92 l = lines[-1]
92 l = lines[-1]
93 if (l.startswith("diff -") or
93 if (l.startswith("diff -") or
94 l.startswith("Index:") or
94 l.startswith("Index:") or
95 l.startswith("===========")):
95 l.startswith("===========")):
96 del lines[-1]
96 del lines[-1]
97 else:
97 else:
98 break
98 break
99 def eatempty(lines):
99 def eatempty(lines):
100 while lines:
100 while lines:
101 if not lines[-1].strip():
101 if not lines[-1].strip():
102 del lines[-1]
102 del lines[-1]
103 else:
103 else:
104 break
104 break
105
105
106 message = []
106 message = []
107 comments = []
107 comments = []
108 user = None
108 user = None
109 date = None
109 date = None
110 parent = None
110 parent = None
111 format = None
111 format = None
112 subject = None
112 subject = None
113 branch = None
113 branch = None
114 nodeid = None
114 nodeid = None
115 diffstart = 0
115 diffstart = 0
116
116
117 for line in file(pf):
117 for line in file(pf):
118 line = line.rstrip()
118 line = line.rstrip()
119 if (line.startswith('diff --git')
119 if (line.startswith('diff --git')
120 or (diffstart and line.startswith('+++ '))):
120 or (diffstart and line.startswith('+++ '))):
121 diffstart = 2
121 diffstart = 2
122 break
122 break
123 diffstart = 0 # reset
123 diffstart = 0 # reset
124 if line.startswith("--- "):
124 if line.startswith("--- "):
125 diffstart = 1
125 diffstart = 1
126 continue
126 continue
127 elif format == "hgpatch":
127 elif format == "hgpatch":
128 # parse values when importing the result of an hg export
128 # parse values when importing the result of an hg export
129 if line.startswith("# User "):
129 if line.startswith("# User "):
130 user = line[7:]
130 user = line[7:]
131 elif line.startswith("# Date "):
131 elif line.startswith("# Date "):
132 date = line[7:]
132 date = line[7:]
133 elif line.startswith("# Parent "):
133 elif line.startswith("# Parent "):
134 parent = line[9:].lstrip()
134 parent = line[9:].lstrip()
135 elif line.startswith("# Branch "):
135 elif line.startswith("# Branch "):
136 branch = line[9:]
136 branch = line[9:]
137 elif line.startswith("# Node ID "):
137 elif line.startswith("# Node ID "):
138 nodeid = line[10:]
138 nodeid = line[10:]
139 elif not line.startswith("# ") and line:
139 elif not line.startswith("# ") and line:
140 message.append(line)
140 message.append(line)
141 format = None
141 format = None
142 elif line == '# HG changeset patch':
142 elif line == '# HG changeset patch':
143 message = []
143 message = []
144 format = "hgpatch"
144 format = "hgpatch"
145 elif (format != "tagdone" and (line.startswith("Subject: ") or
145 elif (format != "tagdone" and (line.startswith("Subject: ") or
146 line.startswith("subject: "))):
146 line.startswith("subject: "))):
147 subject = line[9:]
147 subject = line[9:]
148 format = "tag"
148 format = "tag"
149 elif (format != "tagdone" and (line.startswith("From: ") or
149 elif (format != "tagdone" and (line.startswith("From: ") or
150 line.startswith("from: "))):
150 line.startswith("from: "))):
151 user = line[6:]
151 user = line[6:]
152 format = "tag"
152 format = "tag"
153 elif (format != "tagdone" and (line.startswith("Date: ") or
153 elif (format != "tagdone" and (line.startswith("Date: ") or
154 line.startswith("date: "))):
154 line.startswith("date: "))):
155 date = line[6:]
155 date = line[6:]
156 format = "tag"
156 format = "tag"
157 elif format == "tag" and line == "":
157 elif format == "tag" and line == "":
158 # when looking for tags (subject: from: etc) they
158 # when looking for tags (subject: from: etc) they
159 # end once you find a blank line in the source
159 # end once you find a blank line in the source
160 format = "tagdone"
160 format = "tagdone"
161 elif message or line:
161 elif message or line:
162 message.append(line)
162 message.append(line)
163 comments.append(line)
163 comments.append(line)
164
164
165 eatdiff(message)
165 eatdiff(message)
166 eatdiff(comments)
166 eatdiff(comments)
167 # Remember the exact starting line of the patch diffs before consuming
167 # Remember the exact starting line of the patch diffs before consuming
168 # empty lines, for external use by TortoiseHg and others
168 # empty lines, for external use by TortoiseHg and others
169 self.diffstartline = len(comments)
169 self.diffstartline = len(comments)
170 eatempty(message)
170 eatempty(message)
171 eatempty(comments)
171 eatempty(comments)
172
172
173 # make sure message isn't empty
173 # make sure message isn't empty
174 if format and format.startswith("tag") and subject:
174 if format and format.startswith("tag") and subject:
175 message.insert(0, "")
175 message.insert(0, "")
176 message.insert(0, subject)
176 message.insert(0, subject)
177
177
178 self.message = message
178 self.message = message
179 self.comments = comments
179 self.comments = comments
180 self.user = user
180 self.user = user
181 self.date = date
181 self.date = date
182 self.parent = parent
182 self.parent = parent
183 # nodeid and branch are for external use by TortoiseHg and others
183 # nodeid and branch are for external use by TortoiseHg and others
184 self.nodeid = nodeid
184 self.nodeid = nodeid
185 self.branch = branch
185 self.branch = branch
186 self.haspatch = diffstart > 1
186 self.haspatch = diffstart > 1
187 self.plainmode = plainmode
187 self.plainmode = plainmode
188
188
189 def setuser(self, user):
189 def setuser(self, user):
190 if not self.updateheader(['From: ', '# User '], user):
190 if not self.updateheader(['From: ', '# User '], user):
191 try:
191 try:
192 patchheaderat = self.comments.index('# HG changeset patch')
192 patchheaderat = self.comments.index('# HG changeset patch')
193 self.comments.insert(patchheaderat + 1, '# User ' + user)
193 self.comments.insert(patchheaderat + 1, '# User ' + user)
194 except ValueError:
194 except ValueError:
195 if self.plainmode or self._hasheader(['Date: ']):
195 if self.plainmode or self._hasheader(['Date: ']):
196 self.comments = ['From: ' + user] + self.comments
196 self.comments = ['From: ' + user] + self.comments
197 else:
197 else:
198 tmp = ['# HG changeset patch', '# User ' + user, '']
198 tmp = ['# HG changeset patch', '# User ' + user, '']
199 self.comments = tmp + self.comments
199 self.comments = tmp + self.comments
200 self.user = user
200 self.user = user
201
201
202 def setdate(self, date):
202 def setdate(self, date):
203 if not self.updateheader(['Date: ', '# Date '], date):
203 if not self.updateheader(['Date: ', '# Date '], date):
204 try:
204 try:
205 patchheaderat = self.comments.index('# HG changeset patch')
205 patchheaderat = self.comments.index('# HG changeset patch')
206 self.comments.insert(patchheaderat + 1, '# Date ' + date)
206 self.comments.insert(patchheaderat + 1, '# Date ' + date)
207 except ValueError:
207 except ValueError:
208 if self.plainmode or self._hasheader(['From: ']):
208 if self.plainmode or self._hasheader(['From: ']):
209 self.comments = ['Date: ' + date] + self.comments
209 self.comments = ['Date: ' + date] + self.comments
210 else:
210 else:
211 tmp = ['# HG changeset patch', '# Date ' + date, '']
211 tmp = ['# HG changeset patch', '# Date ' + date, '']
212 self.comments = tmp + self.comments
212 self.comments = tmp + self.comments
213 self.date = date
213 self.date = date
214
214
215 def setparent(self, parent):
215 def setparent(self, parent):
216 if not self.updateheader(['# Parent '], parent):
216 if not self.updateheader(['# Parent '], parent):
217 try:
217 try:
218 patchheaderat = self.comments.index('# HG changeset patch')
218 patchheaderat = self.comments.index('# HG changeset patch')
219 self.comments.insert(patchheaderat + 1, '# Parent ' + parent)
219 self.comments.insert(patchheaderat + 1, '# Parent ' + parent)
220 except ValueError:
220 except ValueError:
221 pass
221 pass
222 self.parent = parent
222 self.parent = parent
223
223
224 def setmessage(self, message):
224 def setmessage(self, message):
225 if self.comments:
225 if self.comments:
226 self._delmsg()
226 self._delmsg()
227 self.message = [message]
227 self.message = [message]
228 self.comments += self.message
228 self.comments += self.message
229
229
230 def updateheader(self, prefixes, new):
230 def updateheader(self, prefixes, new):
231 '''Update all references to a field in the patch header.
231 '''Update all references to a field in the patch header.
232 Return whether the field is present.'''
232 Return whether the field is present.'''
233 res = False
233 res = False
234 for prefix in prefixes:
234 for prefix in prefixes:
235 for i in xrange(len(self.comments)):
235 for i in xrange(len(self.comments)):
236 if self.comments[i].startswith(prefix):
236 if self.comments[i].startswith(prefix):
237 self.comments[i] = prefix + new
237 self.comments[i] = prefix + new
238 res = True
238 res = True
239 break
239 break
240 return res
240 return res
241
241
242 def _hasheader(self, prefixes):
242 def _hasheader(self, prefixes):
243 '''Check if a header starts with any of the given prefixes.'''
243 '''Check if a header starts with any of the given prefixes.'''
244 for prefix in prefixes:
244 for prefix in prefixes:
245 for comment in self.comments:
245 for comment in self.comments:
246 if comment.startswith(prefix):
246 if comment.startswith(prefix):
247 return True
247 return True
248 return False
248 return False
249
249
250 def __str__(self):
250 def __str__(self):
251 if not self.comments:
251 if not self.comments:
252 return ''
252 return ''
253 return '\n'.join(self.comments) + '\n\n'
253 return '\n'.join(self.comments) + '\n\n'
254
254
255 def _delmsg(self):
255 def _delmsg(self):
256 '''Remove existing message, keeping the rest of the comments fields.
256 '''Remove existing message, keeping the rest of the comments fields.
257 If comments contains 'subject: ', message will prepend
257 If comments contains 'subject: ', message will prepend
258 the field and a blank line.'''
258 the field and a blank line.'''
259 if self.message:
259 if self.message:
260 subj = 'subject: ' + self.message[0].lower()
260 subj = 'subject: ' + self.message[0].lower()
261 for i in xrange(len(self.comments)):
261 for i in xrange(len(self.comments)):
262 if subj == self.comments[i].lower():
262 if subj == self.comments[i].lower():
263 del self.comments[i]
263 del self.comments[i]
264 self.message = self.message[2:]
264 self.message = self.message[2:]
265 break
265 break
266 ci = 0
266 ci = 0
267 for mi in self.message:
267 for mi in self.message:
268 while mi != self.comments[ci]:
268 while mi != self.comments[ci]:
269 ci += 1
269 ci += 1
270 del self.comments[ci]
270 del self.comments[ci]
271
271
272 def newcommit(repo, phase, *args, **kwargs):
272 def newcommit(repo, phase, *args, **kwargs):
273 """helper dedicated to ensure a commit respect mq.secret setting
273 """helper dedicated to ensure a commit respect mq.secret setting
274
274
275 It should be used instead of repo.commit inside the mq source for operation
275 It should be used instead of repo.commit inside the mq source for operation
276 creating new changeset.
276 creating new changeset.
277 """
277 """
278 repo = repo.unfiltered()
278 repo = repo.unfiltered()
279 if phase is None:
279 if phase is None:
280 if repo.ui.configbool('mq', 'secret', False):
280 if repo.ui.configbool('mq', 'secret', False):
281 phase = phases.secret
281 phase = phases.secret
282 if phase is not None:
282 if phase is not None:
283 backup = repo.ui.backupconfig('phases', 'new-commit')
283 backup = repo.ui.backupconfig('phases', 'new-commit')
284 # Marking the repository as committing an mq patch can be used
284 # Marking the repository as committing an mq patch can be used
285 # to optimize operations like _branchtags().
285 # to optimize operations like _branchtags().
286 repo._committingpatch = True
286 repo._committingpatch = True
287 try:
287 try:
288 if phase is not None:
288 if phase is not None:
289 repo.ui.setconfig('phases', 'new-commit', phase)
289 repo.ui.setconfig('phases', 'new-commit', phase)
290 return repo.commit(*args, **kwargs)
290 return repo.commit(*args, **kwargs)
291 finally:
291 finally:
292 repo._committingpatch = False
292 repo._committingpatch = False
293 if phase is not None:
293 if phase is not None:
294 repo.ui.restoreconfig(backup)
294 repo.ui.restoreconfig(backup)
295
295
296 class AbortNoCleanup(error.Abort):
296 class AbortNoCleanup(error.Abort):
297 pass
297 pass
298
298
299 class queue(object):
299 class queue(object):
300 def __init__(self, ui, path, patchdir=None):
300 def __init__(self, ui, path, patchdir=None):
301 self.basepath = path
301 self.basepath = path
302 try:
302 try:
303 fh = open(os.path.join(path, 'patches.queue'))
303 fh = open(os.path.join(path, 'patches.queue'))
304 cur = fh.read().rstrip()
304 cur = fh.read().rstrip()
305 fh.close()
305 fh.close()
306 if not cur:
306 if not cur:
307 curpath = os.path.join(path, 'patches')
307 curpath = os.path.join(path, 'patches')
308 else:
308 else:
309 curpath = os.path.join(path, 'patches-' + cur)
309 curpath = os.path.join(path, 'patches-' + cur)
310 except IOError:
310 except IOError:
311 curpath = os.path.join(path, 'patches')
311 curpath = os.path.join(path, 'patches')
312 self.path = patchdir or curpath
312 self.path = patchdir or curpath
313 self.opener = scmutil.opener(self.path)
313 self.opener = scmutil.opener(self.path)
314 self.ui = ui
314 self.ui = ui
315 self.applieddirty = False
315 self.applieddirty = False
316 self.seriesdirty = False
316 self.seriesdirty = False
317 self.added = []
317 self.added = []
318 self.seriespath = "series"
318 self.seriespath = "series"
319 self.statuspath = "status"
319 self.statuspath = "status"
320 self.guardspath = "guards"
320 self.guardspath = "guards"
321 self.activeguards = None
321 self.activeguards = None
322 self.guardsdirty = False
322 self.guardsdirty = False
323 # Handle mq.git as a bool with extended values
323 # Handle mq.git as a bool with extended values
324 try:
324 try:
325 gitmode = ui.configbool('mq', 'git', None)
325 gitmode = ui.configbool('mq', 'git', None)
326 if gitmode is None:
326 if gitmode is None:
327 raise error.ConfigError
327 raise error.ConfigError
328 self.gitmode = gitmode and 'yes' or 'no'
328 self.gitmode = gitmode and 'yes' or 'no'
329 except error.ConfigError:
329 except error.ConfigError:
330 self.gitmode = ui.config('mq', 'git', 'auto').lower()
330 self.gitmode = ui.config('mq', 'git', 'auto').lower()
331 self.plainmode = ui.configbool('mq', 'plain', False)
331 self.plainmode = ui.configbool('mq', 'plain', False)
332
332
333 @util.propertycache
333 @util.propertycache
334 def applied(self):
334 def applied(self):
335 def parselines(lines):
335 def parselines(lines):
336 for l in lines:
336 for l in lines:
337 entry = l.split(':', 1)
337 entry = l.split(':', 1)
338 if len(entry) > 1:
338 if len(entry) > 1:
339 n, name = entry
339 n, name = entry
340 yield statusentry(bin(n), name)
340 yield statusentry(bin(n), name)
341 elif l.strip():
341 elif l.strip():
342 self.ui.warn(_('malformated mq status line: %s\n') % entry)
342 self.ui.warn(_('malformated mq status line: %s\n') % entry)
343 # else we ignore empty lines
343 # else we ignore empty lines
344 try:
344 try:
345 lines = self.opener.read(self.statuspath).splitlines()
345 lines = self.opener.read(self.statuspath).splitlines()
346 return list(parselines(lines))
346 return list(parselines(lines))
347 except IOError, e:
347 except IOError, e:
348 if e.errno == errno.ENOENT:
348 if e.errno == errno.ENOENT:
349 return []
349 return []
350 raise
350 raise
351
351
352 @util.propertycache
352 @util.propertycache
353 def fullseries(self):
353 def fullseries(self):
354 try:
354 try:
355 return self.opener.read(self.seriespath).splitlines()
355 return self.opener.read(self.seriespath).splitlines()
356 except IOError, e:
356 except IOError, e:
357 if e.errno == errno.ENOENT:
357 if e.errno == errno.ENOENT:
358 return []
358 return []
359 raise
359 raise
360
360
361 @util.propertycache
361 @util.propertycache
362 def series(self):
362 def series(self):
363 self.parseseries()
363 self.parseseries()
364 return self.series
364 return self.series
365
365
366 @util.propertycache
366 @util.propertycache
367 def seriesguards(self):
367 def seriesguards(self):
368 self.parseseries()
368 self.parseseries()
369 return self.seriesguards
369 return self.seriesguards
370
370
371 def invalidate(self):
371 def invalidate(self):
372 for a in 'applied fullseries series seriesguards'.split():
372 for a in 'applied fullseries series seriesguards'.split():
373 if a in self.__dict__:
373 if a in self.__dict__:
374 delattr(self, a)
374 delattr(self, a)
375 self.applieddirty = False
375 self.applieddirty = False
376 self.seriesdirty = False
376 self.seriesdirty = False
377 self.guardsdirty = False
377 self.guardsdirty = False
378 self.activeguards = None
378 self.activeguards = None
379
379
380 def diffopts(self, opts={}, patchfn=None):
380 def diffopts(self, opts={}, patchfn=None):
381 diffopts = patchmod.diffopts(self.ui, opts)
381 diffopts = patchmod.diffopts(self.ui, opts)
382 if self.gitmode == 'auto':
382 if self.gitmode == 'auto':
383 diffopts.upgrade = True
383 diffopts.upgrade = True
384 elif self.gitmode == 'keep':
384 elif self.gitmode == 'keep':
385 pass
385 pass
386 elif self.gitmode in ('yes', 'no'):
386 elif self.gitmode in ('yes', 'no'):
387 diffopts.git = self.gitmode == 'yes'
387 diffopts.git = self.gitmode == 'yes'
388 else:
388 else:
389 raise util.Abort(_('mq.git option can be auto/keep/yes/no'
389 raise util.Abort(_('mq.git option can be auto/keep/yes/no'
390 ' got %s') % self.gitmode)
390 ' got %s') % self.gitmode)
391 if patchfn:
391 if patchfn:
392 diffopts = self.patchopts(diffopts, patchfn)
392 diffopts = self.patchopts(diffopts, patchfn)
393 return diffopts
393 return diffopts
394
394
395 def patchopts(self, diffopts, *patches):
395 def patchopts(self, diffopts, *patches):
396 """Return a copy of input diff options with git set to true if
396 """Return a copy of input diff options with git set to true if
397 referenced patch is a git patch and should be preserved as such.
397 referenced patch is a git patch and should be preserved as such.
398 """
398 """
399 diffopts = diffopts.copy()
399 diffopts = diffopts.copy()
400 if not diffopts.git and self.gitmode == 'keep':
400 if not diffopts.git and self.gitmode == 'keep':
401 for patchfn in patches:
401 for patchfn in patches:
402 patchf = self.opener(patchfn, 'r')
402 patchf = self.opener(patchfn, 'r')
403 # if the patch was a git patch, refresh it as a git patch
403 # if the patch was a git patch, refresh it as a git patch
404 for line in patchf:
404 for line in patchf:
405 if line.startswith('diff --git'):
405 if line.startswith('diff --git'):
406 diffopts.git = True
406 diffopts.git = True
407 break
407 break
408 patchf.close()
408 patchf.close()
409 return diffopts
409 return diffopts
410
410
411 def join(self, *p):
411 def join(self, *p):
412 return os.path.join(self.path, *p)
412 return os.path.join(self.path, *p)
413
413
414 def findseries(self, patch):
414 def findseries(self, patch):
415 def matchpatch(l):
415 def matchpatch(l):
416 l = l.split('#', 1)[0]
416 l = l.split('#', 1)[0]
417 return l.strip() == patch
417 return l.strip() == patch
418 for index, l in enumerate(self.fullseries):
418 for index, l in enumerate(self.fullseries):
419 if matchpatch(l):
419 if matchpatch(l):
420 return index
420 return index
421 return None
421 return None
422
422
423 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
423 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
424
424
425 def parseseries(self):
425 def parseseries(self):
426 self.series = []
426 self.series = []
427 self.seriesguards = []
427 self.seriesguards = []
428 for l in self.fullseries:
428 for l in self.fullseries:
429 h = l.find('#')
429 h = l.find('#')
430 if h == -1:
430 if h == -1:
431 patch = l
431 patch = l
432 comment = ''
432 comment = ''
433 elif h == 0:
433 elif h == 0:
434 continue
434 continue
435 else:
435 else:
436 patch = l[:h]
436 patch = l[:h]
437 comment = l[h:]
437 comment = l[h:]
438 patch = patch.strip()
438 patch = patch.strip()
439 if patch:
439 if patch:
440 if patch in self.series:
440 if patch in self.series:
441 raise util.Abort(_('%s appears more than once in %s') %
441 raise util.Abort(_('%s appears more than once in %s') %
442 (patch, self.join(self.seriespath)))
442 (patch, self.join(self.seriespath)))
443 self.series.append(patch)
443 self.series.append(patch)
444 self.seriesguards.append(self.guard_re.findall(comment))
444 self.seriesguards.append(self.guard_re.findall(comment))
445
445
446 def checkguard(self, guard):
446 def checkguard(self, guard):
447 if not guard:
447 if not guard:
448 return _('guard cannot be an empty string')
448 return _('guard cannot be an empty string')
449 bad_chars = '# \t\r\n\f'
449 bad_chars = '# \t\r\n\f'
450 first = guard[0]
450 first = guard[0]
451 if first in '-+':
451 if first in '-+':
452 return (_('guard %r starts with invalid character: %r') %
452 return (_('guard %r starts with invalid character: %r') %
453 (guard, first))
453 (guard, first))
454 for c in bad_chars:
454 for c in bad_chars:
455 if c in guard:
455 if c in guard:
456 return _('invalid character in guard %r: %r') % (guard, c)
456 return _('invalid character in guard %r: %r') % (guard, c)
457
457
458 def setactive(self, guards):
458 def setactive(self, guards):
459 for guard in guards:
459 for guard in guards:
460 bad = self.checkguard(guard)
460 bad = self.checkguard(guard)
461 if bad:
461 if bad:
462 raise util.Abort(bad)
462 raise util.Abort(bad)
463 guards = sorted(set(guards))
463 guards = sorted(set(guards))
464 self.ui.debug('active guards: %s\n' % ' '.join(guards))
464 self.ui.debug('active guards: %s\n' % ' '.join(guards))
465 self.activeguards = guards
465 self.activeguards = guards
466 self.guardsdirty = True
466 self.guardsdirty = True
467
467
468 def active(self):
468 def active(self):
469 if self.activeguards is None:
469 if self.activeguards is None:
470 self.activeguards = []
470 self.activeguards = []
471 try:
471 try:
472 guards = self.opener.read(self.guardspath).split()
472 guards = self.opener.read(self.guardspath).split()
473 except IOError, err:
473 except IOError, err:
474 if err.errno != errno.ENOENT:
474 if err.errno != errno.ENOENT:
475 raise
475 raise
476 guards = []
476 guards = []
477 for i, guard in enumerate(guards):
477 for i, guard in enumerate(guards):
478 bad = self.checkguard(guard)
478 bad = self.checkguard(guard)
479 if bad:
479 if bad:
480 self.ui.warn('%s:%d: %s\n' %
480 self.ui.warn('%s:%d: %s\n' %
481 (self.join(self.guardspath), i + 1, bad))
481 (self.join(self.guardspath), i + 1, bad))
482 else:
482 else:
483 self.activeguards.append(guard)
483 self.activeguards.append(guard)
484 return self.activeguards
484 return self.activeguards
485
485
486 def setguards(self, idx, guards):
486 def setguards(self, idx, guards):
487 for g in guards:
487 for g in guards:
488 if len(g) < 2:
488 if len(g) < 2:
489 raise util.Abort(_('guard %r too short') % g)
489 raise util.Abort(_('guard %r too short') % g)
490 if g[0] not in '-+':
490 if g[0] not in '-+':
491 raise util.Abort(_('guard %r starts with invalid char') % g)
491 raise util.Abort(_('guard %r starts with invalid char') % g)
492 bad = self.checkguard(g[1:])
492 bad = self.checkguard(g[1:])
493 if bad:
493 if bad:
494 raise util.Abort(bad)
494 raise util.Abort(bad)
495 drop = self.guard_re.sub('', self.fullseries[idx])
495 drop = self.guard_re.sub('', self.fullseries[idx])
496 self.fullseries[idx] = drop + ''.join([' #' + g for g in guards])
496 self.fullseries[idx] = drop + ''.join([' #' + g for g in guards])
497 self.parseseries()
497 self.parseseries()
498 self.seriesdirty = True
498 self.seriesdirty = True
499
499
500 def pushable(self, idx):
500 def pushable(self, idx):
501 if isinstance(idx, str):
501 if isinstance(idx, str):
502 idx = self.series.index(idx)
502 idx = self.series.index(idx)
503 patchguards = self.seriesguards[idx]
503 patchguards = self.seriesguards[idx]
504 if not patchguards:
504 if not patchguards:
505 return True, None
505 return True, None
506 guards = self.active()
506 guards = self.active()
507 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
507 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
508 if exactneg:
508 if exactneg:
509 return False, repr(exactneg[0])
509 return False, repr(exactneg[0])
510 pos = [g for g in patchguards if g[0] == '+']
510 pos = [g for g in patchguards if g[0] == '+']
511 exactpos = [g for g in pos if g[1:] in guards]
511 exactpos = [g for g in pos if g[1:] in guards]
512 if pos:
512 if pos:
513 if exactpos:
513 if exactpos:
514 return True, repr(exactpos[0])
514 return True, repr(exactpos[0])
515 return False, ' '.join(map(repr, pos))
515 return False, ' '.join(map(repr, pos))
516 return True, ''
516 return True, ''
517
517
518 def explainpushable(self, idx, all_patches=False):
518 def explainpushable(self, idx, all_patches=False):
519 write = all_patches and self.ui.write or self.ui.warn
519 write = all_patches and self.ui.write or self.ui.warn
520 if all_patches or self.ui.verbose:
520 if all_patches or self.ui.verbose:
521 if isinstance(idx, str):
521 if isinstance(idx, str):
522 idx = self.series.index(idx)
522 idx = self.series.index(idx)
523 pushable, why = self.pushable(idx)
523 pushable, why = self.pushable(idx)
524 if all_patches and pushable:
524 if all_patches and pushable:
525 if why is None:
525 if why is None:
526 write(_('allowing %s - no guards in effect\n') %
526 write(_('allowing %s - no guards in effect\n') %
527 self.series[idx])
527 self.series[idx])
528 else:
528 else:
529 if not why:
529 if not why:
530 write(_('allowing %s - no matching negative guards\n') %
530 write(_('allowing %s - no matching negative guards\n') %
531 self.series[idx])
531 self.series[idx])
532 else:
532 else:
533 write(_('allowing %s - guarded by %s\n') %
533 write(_('allowing %s - guarded by %s\n') %
534 (self.series[idx], why))
534 (self.series[idx], why))
535 if not pushable:
535 if not pushable:
536 if why:
536 if why:
537 write(_('skipping %s - guarded by %s\n') %
537 write(_('skipping %s - guarded by %s\n') %
538 (self.series[idx], why))
538 (self.series[idx], why))
539 else:
539 else:
540 write(_('skipping %s - no matching guards\n') %
540 write(_('skipping %s - no matching guards\n') %
541 self.series[idx])
541 self.series[idx])
542
542
543 def savedirty(self):
543 def savedirty(self):
544 def writelist(items, path):
544 def writelist(items, path):
545 fp = self.opener(path, 'w')
545 fp = self.opener(path, 'w')
546 for i in items:
546 for i in items:
547 fp.write("%s\n" % i)
547 fp.write("%s\n" % i)
548 fp.close()
548 fp.close()
549 if self.applieddirty:
549 if self.applieddirty:
550 writelist(map(str, self.applied), self.statuspath)
550 writelist(map(str, self.applied), self.statuspath)
551 self.applieddirty = False
551 self.applieddirty = False
552 if self.seriesdirty:
552 if self.seriesdirty:
553 writelist(self.fullseries, self.seriespath)
553 writelist(self.fullseries, self.seriespath)
554 self.seriesdirty = False
554 self.seriesdirty = False
555 if self.guardsdirty:
555 if self.guardsdirty:
556 writelist(self.activeguards, self.guardspath)
556 writelist(self.activeguards, self.guardspath)
557 self.guardsdirty = False
557 self.guardsdirty = False
558 if self.added:
558 if self.added:
559 qrepo = self.qrepo()
559 qrepo = self.qrepo()
560 if qrepo:
560 if qrepo:
561 qrepo[None].add(f for f in self.added if f not in qrepo[None])
561 qrepo[None].add(f for f in self.added if f not in qrepo[None])
562 self.added = []
562 self.added = []
563
563
564 def removeundo(self, repo):
564 def removeundo(self, repo):
565 undo = repo.sjoin('undo')
565 undo = repo.sjoin('undo')
566 if not os.path.exists(undo):
566 if not os.path.exists(undo):
567 return
567 return
568 try:
568 try:
569 os.unlink(undo)
569 os.unlink(undo)
570 except OSError, inst:
570 except OSError, inst:
571 self.ui.warn(_('error removing undo: %s\n') % str(inst))
571 self.ui.warn(_('error removing undo: %s\n') % str(inst))
572
572
573 def backup(self, repo, files, copy=False):
573 def backup(self, repo, files, copy=False):
574 # backup local changes in --force case
574 # backup local changes in --force case
575 for f in sorted(files):
575 for f in sorted(files):
576 absf = repo.wjoin(f)
576 absf = repo.wjoin(f)
577 if os.path.lexists(absf):
577 if os.path.lexists(absf):
578 self.ui.note(_('saving current version of %s as %s\n') %
578 self.ui.note(_('saving current version of %s as %s\n') %
579 (f, f + '.orig'))
579 (f, f + '.orig'))
580 if copy:
580 if copy:
581 util.copyfile(absf, absf + '.orig')
581 util.copyfile(absf, absf + '.orig')
582 else:
582 else:
583 util.rename(absf, absf + '.orig')
583 util.rename(absf, absf + '.orig')
584
584
585 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
585 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
586 fp=None, changes=None, opts={}):
586 fp=None, changes=None, opts={}):
587 stat = opts.get('stat')
587 stat = opts.get('stat')
588 m = scmutil.match(repo[node1], files, opts)
588 m = scmutil.match(repo[node1], files, opts)
589 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
589 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
590 changes, stat, fp)
590 changes, stat, fp)
591
591
592 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
592 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
593 # first try just applying the patch
593 # first try just applying the patch
594 (err, n) = self.apply(repo, [patch], update_status=False,
594 (err, n) = self.apply(repo, [patch], update_status=False,
595 strict=True, merge=rev)
595 strict=True, merge=rev)
596
596
597 if err == 0:
597 if err == 0:
598 return (err, n)
598 return (err, n)
599
599
600 if n is None:
600 if n is None:
601 raise util.Abort(_("apply failed for patch %s") % patch)
601 raise util.Abort(_("apply failed for patch %s") % patch)
602
602
603 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
603 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
604
604
605 # apply failed, strip away that rev and merge.
605 # apply failed, strip away that rev and merge.
606 hg.clean(repo, head)
606 hg.clean(repo, head)
607 self.strip(repo, [n], update=False, backup='strip')
607 self.strip(repo, [n], update=False, backup='strip')
608
608
609 ctx = repo[rev]
609 ctx = repo[rev]
610 ret = hg.merge(repo, rev)
610 ret = hg.merge(repo, rev)
611 if ret:
611 if ret:
612 raise util.Abort(_("update returned %d") % ret)
612 raise util.Abort(_("update returned %d") % ret)
613 n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
613 n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
614 if n is None:
614 if n is None:
615 raise util.Abort(_("repo commit failed"))
615 raise util.Abort(_("repo commit failed"))
616 try:
616 try:
617 ph = patchheader(mergeq.join(patch), self.plainmode)
617 ph = patchheader(mergeq.join(patch), self.plainmode)
618 except Exception:
618 except Exception:
619 raise util.Abort(_("unable to read %s") % patch)
619 raise util.Abort(_("unable to read %s") % patch)
620
620
621 diffopts = self.patchopts(diffopts, patch)
621 diffopts = self.patchopts(diffopts, patch)
622 patchf = self.opener(patch, "w")
622 patchf = self.opener(patch, "w")
623 comments = str(ph)
623 comments = str(ph)
624 if comments:
624 if comments:
625 patchf.write(comments)
625 patchf.write(comments)
626 self.printdiff(repo, diffopts, head, n, fp=patchf)
626 self.printdiff(repo, diffopts, head, n, fp=patchf)
627 patchf.close()
627 patchf.close()
628 self.removeundo(repo)
628 self.removeundo(repo)
629 return (0, n)
629 return (0, n)
630
630
631 def qparents(self, repo, rev=None):
631 def qparents(self, repo, rev=None):
632 if rev is None:
632 if rev is None:
633 (p1, p2) = repo.dirstate.parents()
633 (p1, p2) = repo.dirstate.parents()
634 if p2 == nullid:
634 if p2 == nullid:
635 return p1
635 return p1
636 if not self.applied:
636 if not self.applied:
637 return None
637 return None
638 return self.applied[-1].node
638 return self.applied[-1].node
639 p1, p2 = repo.changelog.parents(rev)
639 p1, p2 = repo.changelog.parents(rev)
640 if p2 != nullid and p2 in [x.node for x in self.applied]:
640 if p2 != nullid and p2 in [x.node for x in self.applied]:
641 return p2
641 return p2
642 return p1
642 return p1
643
643
644 def mergepatch(self, repo, mergeq, series, diffopts):
644 def mergepatch(self, repo, mergeq, series, diffopts):
645 if not self.applied:
645 if not self.applied:
646 # each of the patches merged in will have two parents. This
646 # each of the patches merged in will have two parents. This
647 # can confuse the qrefresh, qdiff, and strip code because it
647 # can confuse the qrefresh, qdiff, and strip code because it
648 # needs to know which parent is actually in the patch queue.
648 # needs to know which parent is actually in the patch queue.
649 # so, we insert a merge marker with only one parent. This way
649 # so, we insert a merge marker with only one parent. This way
650 # the first patch in the queue is never a merge patch
650 # the first patch in the queue is never a merge patch
651 #
651 #
652 pname = ".hg.patches.merge.marker"
652 pname = ".hg.patches.merge.marker"
653 n = newcommit(repo, None, '[mq]: merge marker', force=True)
653 n = newcommit(repo, None, '[mq]: merge marker', force=True)
654 self.removeundo(repo)
654 self.removeundo(repo)
655 self.applied.append(statusentry(n, pname))
655 self.applied.append(statusentry(n, pname))
656 self.applieddirty = True
656 self.applieddirty = True
657
657
658 head = self.qparents(repo)
658 head = self.qparents(repo)
659
659
660 for patch in series:
660 for patch in series:
661 patch = mergeq.lookup(patch, strict=True)
661 patch = mergeq.lookup(patch, strict=True)
662 if not patch:
662 if not patch:
663 self.ui.warn(_("patch %s does not exist\n") % patch)
663 self.ui.warn(_("patch %s does not exist\n") % patch)
664 return (1, None)
664 return (1, None)
665 pushable, reason = self.pushable(patch)
665 pushable, reason = self.pushable(patch)
666 if not pushable:
666 if not pushable:
667 self.explainpushable(patch, all_patches=True)
667 self.explainpushable(patch, all_patches=True)
668 continue
668 continue
669 info = mergeq.isapplied(patch)
669 info = mergeq.isapplied(patch)
670 if not info:
670 if not info:
671 self.ui.warn(_("patch %s is not applied\n") % patch)
671 self.ui.warn(_("patch %s is not applied\n") % patch)
672 return (1, None)
672 return (1, None)
673 rev = info[1]
673 rev = info[1]
674 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
674 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
675 if head:
675 if head:
676 self.applied.append(statusentry(head, patch))
676 self.applied.append(statusentry(head, patch))
677 self.applieddirty = True
677 self.applieddirty = True
678 if err:
678 if err:
679 return (err, head)
679 return (err, head)
680 self.savedirty()
680 self.savedirty()
681 return (0, head)
681 return (0, head)
682
682
683 def patch(self, repo, patchfile):
683 def patch(self, repo, patchfile):
684 '''Apply patchfile to the working directory.
684 '''Apply patchfile to the working directory.
685 patchfile: name of patch file'''
685 patchfile: name of patch file'''
686 files = set()
686 files = set()
687 try:
687 try:
688 fuzz = patchmod.patch(self.ui, repo, patchfile, strip=1,
688 fuzz = patchmod.patch(self.ui, repo, patchfile, strip=1,
689 files=files, eolmode=None)
689 files=files, eolmode=None)
690 return (True, list(files), fuzz)
690 return (True, list(files), fuzz)
691 except Exception, inst:
691 except Exception, inst:
692 self.ui.note(str(inst) + '\n')
692 self.ui.note(str(inst) + '\n')
693 if not self.ui.verbose:
693 if not self.ui.verbose:
694 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
694 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
695 self.ui.traceback()
695 self.ui.traceback()
696 return (False, list(files), False)
696 return (False, list(files), False)
697
697
698 def apply(self, repo, series, list=False, update_status=True,
698 def apply(self, repo, series, list=False, update_status=True,
699 strict=False, patchdir=None, merge=None, all_files=None,
699 strict=False, patchdir=None, merge=None, all_files=None,
700 tobackup=None, keepchanges=False):
700 tobackup=None, keepchanges=False):
701 wlock = lock = tr = None
701 wlock = lock = tr = None
702 try:
702 try:
703 wlock = repo.wlock()
703 wlock = repo.wlock()
704 lock = repo.lock()
704 lock = repo.lock()
705 tr = repo.transaction("qpush")
705 tr = repo.transaction("qpush")
706 try:
706 try:
707 ret = self._apply(repo, series, list, update_status,
707 ret = self._apply(repo, series, list, update_status,
708 strict, patchdir, merge, all_files=all_files,
708 strict, patchdir, merge, all_files=all_files,
709 tobackup=tobackup, keepchanges=keepchanges)
709 tobackup=tobackup, keepchanges=keepchanges)
710 tr.close()
710 tr.close()
711 self.savedirty()
711 self.savedirty()
712 return ret
712 return ret
713 except AbortNoCleanup:
713 except AbortNoCleanup:
714 tr.close()
714 tr.close()
715 self.savedirty()
715 self.savedirty()
716 return 2, repo.dirstate.p1()
716 return 2, repo.dirstate.p1()
717 except: # re-raises
717 except: # re-raises
718 try:
718 try:
719 tr.abort()
719 tr.abort()
720 finally:
720 finally:
721 repo.invalidate()
721 repo.invalidate()
722 repo.dirstate.invalidate()
722 repo.dirstate.invalidate()
723 self.invalidate()
723 self.invalidate()
724 raise
724 raise
725 finally:
725 finally:
726 release(tr, lock, wlock)
726 release(tr, lock, wlock)
727 self.removeundo(repo)
727 self.removeundo(repo)
728
728
729 def _apply(self, repo, series, list=False, update_status=True,
729 def _apply(self, repo, series, list=False, update_status=True,
730 strict=False, patchdir=None, merge=None, all_files=None,
730 strict=False, patchdir=None, merge=None, all_files=None,
731 tobackup=None, keepchanges=False):
731 tobackup=None, keepchanges=False):
732 """returns (error, hash)
732 """returns (error, hash)
733
733
734 error = 1 for unable to read, 2 for patch failed, 3 for patch
734 error = 1 for unable to read, 2 for patch failed, 3 for patch
735 fuzz. tobackup is None or a set of files to backup before they
735 fuzz. tobackup is None or a set of files to backup before they
736 are modified by a patch.
736 are modified by a patch.
737 """
737 """
738 # TODO unify with commands.py
738 # TODO unify with commands.py
739 if not patchdir:
739 if not patchdir:
740 patchdir = self.path
740 patchdir = self.path
741 err = 0
741 err = 0
742 n = None
742 n = None
743 for patchname in series:
743 for patchname in series:
744 pushable, reason = self.pushable(patchname)
744 pushable, reason = self.pushable(patchname)
745 if not pushable:
745 if not pushable:
746 self.explainpushable(patchname, all_patches=True)
746 self.explainpushable(patchname, all_patches=True)
747 continue
747 continue
748 self.ui.status(_("applying %s\n") % patchname)
748 self.ui.status(_("applying %s\n") % patchname)
749 pf = os.path.join(patchdir, patchname)
749 pf = os.path.join(patchdir, patchname)
750
750
751 try:
751 try:
752 ph = patchheader(self.join(patchname), self.plainmode)
752 ph = patchheader(self.join(patchname), self.plainmode)
753 except IOError:
753 except IOError:
754 self.ui.warn(_("unable to read %s\n") % patchname)
754 self.ui.warn(_("unable to read %s\n") % patchname)
755 err = 1
755 err = 1
756 break
756 break
757
757
758 message = ph.message
758 message = ph.message
759 if not message:
759 if not message:
760 # The commit message should not be translated
760 # The commit message should not be translated
761 message = "imported patch %s\n" % patchname
761 message = "imported patch %s\n" % patchname
762 else:
762 else:
763 if list:
763 if list:
764 # The commit message should not be translated
764 # The commit message should not be translated
765 message.append("\nimported patch %s" % patchname)
765 message.append("\nimported patch %s" % patchname)
766 message = '\n'.join(message)
766 message = '\n'.join(message)
767
767
768 if ph.haspatch:
768 if ph.haspatch:
769 if tobackup:
769 if tobackup:
770 touched = patchmod.changedfiles(self.ui, repo, pf)
770 touched = patchmod.changedfiles(self.ui, repo, pf)
771 touched = set(touched) & tobackup
771 touched = set(touched) & tobackup
772 if touched and keepchanges:
772 if touched and keepchanges:
773 raise AbortNoCleanup(
773 raise AbortNoCleanup(
774 _("local changes found, refresh first"))
774 _("local changes found, refresh first"))
775 self.backup(repo, touched, copy=True)
775 self.backup(repo, touched, copy=True)
776 tobackup = tobackup - touched
776 tobackup = tobackup - touched
777 (patcherr, files, fuzz) = self.patch(repo, pf)
777 (patcherr, files, fuzz) = self.patch(repo, pf)
778 if all_files is not None:
778 if all_files is not None:
779 all_files.update(files)
779 all_files.update(files)
780 patcherr = not patcherr
780 patcherr = not patcherr
781 else:
781 else:
782 self.ui.warn(_("patch %s is empty\n") % patchname)
782 self.ui.warn(_("patch %s is empty\n") % patchname)
783 patcherr, files, fuzz = 0, [], 0
783 patcherr, files, fuzz = 0, [], 0
784
784
785 if merge and files:
785 if merge and files:
786 # Mark as removed/merged and update dirstate parent info
786 # Mark as removed/merged and update dirstate parent info
787 removed = []
787 removed = []
788 merged = []
788 merged = []
789 for f in files:
789 for f in files:
790 if os.path.lexists(repo.wjoin(f)):
790 if os.path.lexists(repo.wjoin(f)):
791 merged.append(f)
791 merged.append(f)
792 else:
792 else:
793 removed.append(f)
793 removed.append(f)
794 for f in removed:
794 for f in removed:
795 repo.dirstate.remove(f)
795 repo.dirstate.remove(f)
796 for f in merged:
796 for f in merged:
797 repo.dirstate.merge(f)
797 repo.dirstate.merge(f)
798 p1, p2 = repo.dirstate.parents()
798 p1, p2 = repo.dirstate.parents()
799 repo.setparents(p1, merge)
799 repo.setparents(p1, merge)
800
800
801 match = scmutil.matchfiles(repo, files or [])
801 match = scmutil.matchfiles(repo, files or [])
802 oldtip = repo['tip']
802 oldtip = repo['tip']
803 n = newcommit(repo, None, message, ph.user, ph.date, match=match,
803 n = newcommit(repo, None, message, ph.user, ph.date, match=match,
804 force=True)
804 force=True)
805 if repo['tip'] == oldtip:
805 if repo['tip'] == oldtip:
806 raise util.Abort(_("qpush exactly duplicates child changeset"))
806 raise util.Abort(_("qpush exactly duplicates child changeset"))
807 if n is None:
807 if n is None:
808 raise util.Abort(_("repository commit failed"))
808 raise util.Abort(_("repository commit failed"))
809
809
810 if update_status:
810 if update_status:
811 self.applied.append(statusentry(n, patchname))
811 self.applied.append(statusentry(n, patchname))
812
812
813 if patcherr:
813 if patcherr:
814 self.ui.warn(_("patch failed, rejects left in working dir\n"))
814 self.ui.warn(_("patch failed, rejects left in working dir\n"))
815 err = 2
815 err = 2
816 break
816 break
817
817
818 if fuzz and strict:
818 if fuzz and strict:
819 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
819 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
820 err = 3
820 err = 3
821 break
821 break
822 return (err, n)
822 return (err, n)
823
823
824 def _cleanup(self, patches, numrevs, keep=False):
824 def _cleanup(self, patches, numrevs, keep=False):
825 if not keep:
825 if not keep:
826 r = self.qrepo()
826 r = self.qrepo()
827 if r:
827 if r:
828 r[None].forget(patches)
828 r[None].forget(patches)
829 for p in patches:
829 for p in patches:
830 os.unlink(self.join(p))
830 os.unlink(self.join(p))
831
831
832 qfinished = []
832 qfinished = []
833 if numrevs:
833 if numrevs:
834 qfinished = self.applied[:numrevs]
834 qfinished = self.applied[:numrevs]
835 del self.applied[:numrevs]
835 del self.applied[:numrevs]
836 self.applieddirty = True
836 self.applieddirty = True
837
837
838 unknown = []
838 unknown = []
839
839
840 for (i, p) in sorted([(self.findseries(p), p) for p in patches],
840 for (i, p) in sorted([(self.findseries(p), p) for p in patches],
841 reverse=True):
841 reverse=True):
842 if i is not None:
842 if i is not None:
843 del self.fullseries[i]
843 del self.fullseries[i]
844 else:
844 else:
845 unknown.append(p)
845 unknown.append(p)
846
846
847 if unknown:
847 if unknown:
848 if numrevs:
848 if numrevs:
849 rev = dict((entry.name, entry.node) for entry in qfinished)
849 rev = dict((entry.name, entry.node) for entry in qfinished)
850 for p in unknown:
850 for p in unknown:
851 msg = _('revision %s refers to unknown patches: %s\n')
851 msg = _('revision %s refers to unknown patches: %s\n')
852 self.ui.warn(msg % (short(rev[p]), p))
852 self.ui.warn(msg % (short(rev[p]), p))
853 else:
853 else:
854 msg = _('unknown patches: %s\n')
854 msg = _('unknown patches: %s\n')
855 raise util.Abort(''.join(msg % p for p in unknown))
855 raise util.Abort(''.join(msg % p for p in unknown))
856
856
857 self.parseseries()
857 self.parseseries()
858 self.seriesdirty = True
858 self.seriesdirty = True
859 return [entry.node for entry in qfinished]
859 return [entry.node for entry in qfinished]
860
860
861 def _revpatches(self, repo, revs):
861 def _revpatches(self, repo, revs):
862 firstrev = repo[self.applied[0].node].rev()
862 firstrev = repo[self.applied[0].node].rev()
863 patches = []
863 patches = []
864 for i, rev in enumerate(revs):
864 for i, rev in enumerate(revs):
865
865
866 if rev < firstrev:
866 if rev < firstrev:
867 raise util.Abort(_('revision %d is not managed') % rev)
867 raise util.Abort(_('revision %d is not managed') % rev)
868
868
869 ctx = repo[rev]
869 ctx = repo[rev]
870 base = self.applied[i].node
870 base = self.applied[i].node
871 if ctx.node() != base:
871 if ctx.node() != base:
872 msg = _('cannot delete revision %d above applied patches')
872 msg = _('cannot delete revision %d above applied patches')
873 raise util.Abort(msg % rev)
873 raise util.Abort(msg % rev)
874
874
875 patch = self.applied[i].name
875 patch = self.applied[i].name
876 for fmt in ('[mq]: %s', 'imported patch %s'):
876 for fmt in ('[mq]: %s', 'imported patch %s'):
877 if ctx.description() == fmt % patch:
877 if ctx.description() == fmt % patch:
878 msg = _('patch %s finalized without changeset message\n')
878 msg = _('patch %s finalized without changeset message\n')
879 repo.ui.status(msg % patch)
879 repo.ui.status(msg % patch)
880 break
880 break
881
881
882 patches.append(patch)
882 patches.append(patch)
883 return patches
883 return patches
884
884
885 def finish(self, repo, revs):
885 def finish(self, repo, revs):
886 # Manually trigger phase computation to ensure phasedefaults is
886 # Manually trigger phase computation to ensure phasedefaults is
887 # executed before we remove the patches.
887 # executed before we remove the patches.
888 repo._phasecache
888 repo._phasecache
889 patches = self._revpatches(repo, sorted(revs))
889 patches = self._revpatches(repo, sorted(revs))
890 qfinished = self._cleanup(patches, len(patches))
890 qfinished = self._cleanup(patches, len(patches))
891 if qfinished and repo.ui.configbool('mq', 'secret', False):
891 if qfinished and repo.ui.configbool('mq', 'secret', False):
892 # only use this logic when the secret option is added
892 # only use this logic when the secret option is added
893 oldqbase = repo[qfinished[0]]
893 oldqbase = repo[qfinished[0]]
894 tphase = repo.ui.config('phases', 'new-commit', phases.draft)
894 tphase = repo.ui.config('phases', 'new-commit', phases.draft)
895 if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
895 if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
896 phases.advanceboundary(repo, tphase, qfinished)
896 phases.advanceboundary(repo, tphase, qfinished)
897
897
898 def delete(self, repo, patches, opts):
898 def delete(self, repo, patches, opts):
899 if not patches and not opts.get('rev'):
899 if not patches and not opts.get('rev'):
900 raise util.Abort(_('qdelete requires at least one revision or '
900 raise util.Abort(_('qdelete requires at least one revision or '
901 'patch name'))
901 'patch name'))
902
902
903 realpatches = []
903 realpatches = []
904 for patch in patches:
904 for patch in patches:
905 patch = self.lookup(patch, strict=True)
905 patch = self.lookup(patch, strict=True)
906 info = self.isapplied(patch)
906 info = self.isapplied(patch)
907 if info:
907 if info:
908 raise util.Abort(_("cannot delete applied patch %s") % patch)
908 raise util.Abort(_("cannot delete applied patch %s") % patch)
909 if patch not in self.series:
909 if patch not in self.series:
910 raise util.Abort(_("patch %s not in series file") % patch)
910 raise util.Abort(_("patch %s not in series file") % patch)
911 if patch not in realpatches:
911 if patch not in realpatches:
912 realpatches.append(patch)
912 realpatches.append(patch)
913
913
914 numrevs = 0
914 numrevs = 0
915 if opts.get('rev'):
915 if opts.get('rev'):
916 if not self.applied:
916 if not self.applied:
917 raise util.Abort(_('no patches applied'))
917 raise util.Abort(_('no patches applied'))
918 revs = scmutil.revrange(repo, opts.get('rev'))
918 revs = scmutil.revrange(repo, opts.get('rev'))
919 if len(revs) > 1 and revs[0] > revs[1]:
919 if len(revs) > 1 and revs[0] > revs[1]:
920 revs.reverse()
920 revs.reverse()
921 revpatches = self._revpatches(repo, revs)
921 revpatches = self._revpatches(repo, revs)
922 realpatches += revpatches
922 realpatches += revpatches
923 numrevs = len(revpatches)
923 numrevs = len(revpatches)
924
924
925 self._cleanup(realpatches, numrevs, opts.get('keep'))
925 self._cleanup(realpatches, numrevs, opts.get('keep'))
926
926
927 def checktoppatch(self, repo):
927 def checktoppatch(self, repo):
928 if self.applied:
928 if self.applied:
929 top = self.applied[-1].node
929 top = self.applied[-1].node
930 patch = self.applied[-1].name
930 patch = self.applied[-1].name
931 pp = repo.dirstate.parents()
931 pp = repo.dirstate.parents()
932 if top not in pp:
932 if top not in pp:
933 raise util.Abort(_("working directory revision is not qtip"))
933 raise util.Abort(_("working directory revision is not qtip"))
934 return top, patch
934 return top, patch
935 return None, None
935 return None, None
936
936
937 def checksubstate(self, repo, baserev=None):
937 def checksubstate(self, repo, baserev=None):
938 '''return list of subrepos at a different revision than substate.
938 '''return list of subrepos at a different revision than substate.
939 Abort if any subrepos have uncommitted changes.'''
939 Abort if any subrepos have uncommitted changes.'''
940 inclsubs = []
940 inclsubs = []
941 wctx = repo[None]
941 wctx = repo[None]
942 if baserev:
942 if baserev:
943 bctx = repo[baserev]
943 bctx = repo[baserev]
944 else:
944 else:
945 bctx = wctx.parents()[0]
945 bctx = wctx.parents()[0]
946 for s in wctx.substate:
946 for s in wctx.substate:
947 if wctx.sub(s).dirty(True):
947 if wctx.sub(s).dirty(True):
948 raise util.Abort(
948 raise util.Abort(
949 _("uncommitted changes in subrepository %s") % s)
949 _("uncommitted changes in subrepository %s") % s)
950 elif s not in bctx.substate or bctx.sub(s).dirty():
950 elif s not in bctx.substate or bctx.sub(s).dirty():
951 inclsubs.append(s)
951 inclsubs.append(s)
952 return inclsubs
952 return inclsubs
953
953
954 def putsubstate2changes(self, substatestate, changes):
954 def putsubstate2changes(self, substatestate, changes):
955 for files in changes[:3]:
955 for files in changes[:3]:
956 if '.hgsubstate' in files:
956 if '.hgsubstate' in files:
957 return # already listed up
957 return # already listed up
958 # not yet listed up
958 # not yet listed up
959 if substatestate in 'a?':
959 if substatestate in 'a?':
960 changes[1].append('.hgsubstate')
960 changes[1].append('.hgsubstate')
961 elif substatestate in 'r':
961 elif substatestate in 'r':
962 changes[2].append('.hgsubstate')
962 changes[2].append('.hgsubstate')
963 else: # modified
963 else: # modified
964 changes[0].append('.hgsubstate')
964 changes[0].append('.hgsubstate')
965
965
966 def localchangesfound(self, refresh=True):
966 def localchangesfound(self, refresh=True):
967 if refresh:
967 if refresh:
968 raise util.Abort(_("local changes found, refresh first"))
968 raise util.Abort(_("local changes found, refresh first"))
969 else:
969 else:
970 raise util.Abort(_("local changes found"))
970 raise util.Abort(_("local changes found"))
971
971
972 def checklocalchanges(self, repo, force=False, refresh=True):
972 def checklocalchanges(self, repo, force=False, refresh=True):
973 m, a, r, d = repo.status()[:4]
973 m, a, r, d = repo.status()[:4]
974 if (m or a or r or d) and not force:
974 if (m or a or r or d) and not force:
975 self.localchangesfound(refresh)
975 self.localchangesfound(refresh)
976 return m, a, r, d
976 return m, a, r, d
977
977
978 _reserved = ('series', 'status', 'guards', '.', '..')
978 _reserved = ('series', 'status', 'guards', '.', '..')
979 def checkreservedname(self, name):
979 def checkreservedname(self, name):
980 if name in self._reserved:
980 if name in self._reserved:
981 raise util.Abort(_('"%s" cannot be used as the name of a patch')
981 raise util.Abort(_('"%s" cannot be used as the name of a patch')
982 % name)
982 % name)
983 for prefix in ('.hg', '.mq'):
983 for prefix in ('.hg', '.mq'):
984 if name.startswith(prefix):
984 if name.startswith(prefix):
985 raise util.Abort(_('patch name cannot begin with "%s"')
985 raise util.Abort(_('patch name cannot begin with "%s"')
986 % prefix)
986 % prefix)
987 for c in ('#', ':'):
987 for c in ('#', ':'):
988 if c in name:
988 if c in name:
989 raise util.Abort(_('"%s" cannot be used in the name of a patch')
989 raise util.Abort(_('"%s" cannot be used in the name of a patch')
990 % c)
990 % c)
991
991
992 def checkpatchname(self, name, force=False):
992 def checkpatchname(self, name, force=False):
993 self.checkreservedname(name)
993 self.checkreservedname(name)
994 if not force and os.path.exists(self.join(name)):
994 if not force and os.path.exists(self.join(name)):
995 if os.path.isdir(self.join(name)):
995 if os.path.isdir(self.join(name)):
996 raise util.Abort(_('"%s" already exists as a directory')
996 raise util.Abort(_('"%s" already exists as a directory')
997 % name)
997 % name)
998 else:
998 else:
999 raise util.Abort(_('patch "%s" already exists') % name)
999 raise util.Abort(_('patch "%s" already exists') % name)
1000
1000
1001 def checkkeepchanges(self, keepchanges, force):
1001 def checkkeepchanges(self, keepchanges, force):
1002 if force and keepchanges:
1002 if force and keepchanges:
1003 raise util.Abort(_('cannot use both --force and --keep-changes'))
1003 raise util.Abort(_('cannot use both --force and --keep-changes'))
1004
1004
1005 def new(self, repo, patchfn, *pats, **opts):
1005 def new(self, repo, patchfn, *pats, **opts):
1006 """options:
1006 """options:
1007 msg: a string or a no-argument function returning a string
1007 msg: a string or a no-argument function returning a string
1008 """
1008 """
1009 msg = opts.get('msg')
1009 msg = opts.get('msg')
1010 user = opts.get('user')
1010 user = opts.get('user')
1011 date = opts.get('date')
1011 date = opts.get('date')
1012 if date:
1012 if date:
1013 date = util.parsedate(date)
1013 date = util.parsedate(date)
1014 diffopts = self.diffopts({'git': opts.get('git')})
1014 diffopts = self.diffopts({'git': opts.get('git')})
1015 if opts.get('checkname', True):
1015 if opts.get('checkname', True):
1016 self.checkpatchname(patchfn)
1016 self.checkpatchname(patchfn)
1017 inclsubs = self.checksubstate(repo)
1017 inclsubs = self.checksubstate(repo)
1018 if inclsubs:
1018 if inclsubs:
1019 inclsubs.append('.hgsubstate')
1019 inclsubs.append('.hgsubstate')
1020 substatestate = repo.dirstate['.hgsubstate']
1020 substatestate = repo.dirstate['.hgsubstate']
1021 if opts.get('include') or opts.get('exclude') or pats:
1021 if opts.get('include') or opts.get('exclude') or pats:
1022 if inclsubs:
1022 if inclsubs:
1023 pats = list(pats or []) + inclsubs
1023 pats = list(pats or []) + inclsubs
1024 match = scmutil.match(repo[None], pats, opts)
1024 match = scmutil.match(repo[None], pats, opts)
1025 # detect missing files in pats
1025 # detect missing files in pats
1026 def badfn(f, msg):
1026 def badfn(f, msg):
1027 if f != '.hgsubstate': # .hgsubstate is auto-created
1027 if f != '.hgsubstate': # .hgsubstate is auto-created
1028 raise util.Abort('%s: %s' % (f, msg))
1028 raise util.Abort('%s: %s' % (f, msg))
1029 match.bad = badfn
1029 match.bad = badfn
1030 changes = repo.status(match=match)
1030 changes = repo.status(match=match)
1031 m, a, r, d = changes[:4]
1031 m, a, r, d = changes[:4]
1032 else:
1032 else:
1033 changes = self.checklocalchanges(repo, force=True)
1033 changes = self.checklocalchanges(repo, force=True)
1034 m, a, r, d = changes
1034 m, a, r, d = changes
1035 match = scmutil.matchfiles(repo, m + a + r + inclsubs)
1035 match = scmutil.matchfiles(repo, m + a + r + inclsubs)
1036 if len(repo[None].parents()) > 1:
1036 if len(repo[None].parents()) > 1:
1037 raise util.Abort(_('cannot manage merge changesets'))
1037 raise util.Abort(_('cannot manage merge changesets'))
1038 commitfiles = m + a + r
1038 commitfiles = m + a + r
1039 self.checktoppatch(repo)
1039 self.checktoppatch(repo)
1040 insert = self.fullseriesend()
1040 insert = self.fullseriesend()
1041 wlock = repo.wlock()
1041 wlock = repo.wlock()
1042 try:
1042 try:
1043 try:
1043 try:
1044 # if patch file write fails, abort early
1044 # if patch file write fails, abort early
1045 p = self.opener(patchfn, "w")
1045 p = self.opener(patchfn, "w")
1046 except IOError, e:
1046 except IOError, e:
1047 raise util.Abort(_('cannot write patch "%s": %s')
1047 raise util.Abort(_('cannot write patch "%s": %s')
1048 % (patchfn, e.strerror))
1048 % (patchfn, e.strerror))
1049 try:
1049 try:
1050 if self.plainmode:
1050 if self.plainmode:
1051 if user:
1051 if user:
1052 p.write("From: " + user + "\n")
1052 p.write("From: " + user + "\n")
1053 if not date:
1053 if not date:
1054 p.write("\n")
1054 p.write("\n")
1055 if date:
1055 if date:
1056 p.write("Date: %d %d\n\n" % date)
1056 p.write("Date: %d %d\n\n" % date)
1057 else:
1057 else:
1058 p.write("# HG changeset patch\n")
1058 p.write("# HG changeset patch\n")
1059 p.write("# Parent "
1059 p.write("# Parent "
1060 + hex(repo[None].p1().node()) + "\n")
1060 + hex(repo[None].p1().node()) + "\n")
1061 if user:
1061 if user:
1062 p.write("# User " + user + "\n")
1062 p.write("# User " + user + "\n")
1063 if date:
1063 if date:
1064 p.write("# Date %s %s\n\n" % date)
1064 p.write("# Date %s %s\n\n" % date)
1065 if util.safehasattr(msg, '__call__'):
1065 if util.safehasattr(msg, '__call__'):
1066 msg = msg()
1066 msg = msg()
1067 commitmsg = msg and msg or ("[mq]: %s" % patchfn)
1067 commitmsg = msg and msg or ("[mq]: %s" % patchfn)
1068 n = newcommit(repo, None, commitmsg, user, date, match=match,
1068 n = newcommit(repo, None, commitmsg, user, date, match=match,
1069 force=True)
1069 force=True)
1070 if n is None:
1070 if n is None:
1071 raise util.Abort(_("repo commit failed"))
1071 raise util.Abort(_("repo commit failed"))
1072 try:
1072 try:
1073 self.fullseries[insert:insert] = [patchfn]
1073 self.fullseries[insert:insert] = [patchfn]
1074 self.applied.append(statusentry(n, patchfn))
1074 self.applied.append(statusentry(n, patchfn))
1075 self.parseseries()
1075 self.parseseries()
1076 self.seriesdirty = True
1076 self.seriesdirty = True
1077 self.applieddirty = True
1077 self.applieddirty = True
1078 if msg:
1078 if msg:
1079 msg = msg + "\n\n"
1079 msg = msg + "\n\n"
1080 p.write(msg)
1080 p.write(msg)
1081 if commitfiles:
1081 if commitfiles:
1082 parent = self.qparents(repo, n)
1082 parent = self.qparents(repo, n)
1083 if inclsubs:
1083 if inclsubs:
1084 self.putsubstate2changes(substatestate, changes)
1084 self.putsubstate2changes(substatestate, changes)
1085 chunks = patchmod.diff(repo, node1=parent, node2=n,
1085 chunks = patchmod.diff(repo, node1=parent, node2=n,
1086 changes=changes, opts=diffopts)
1086 changes=changes, opts=diffopts)
1087 for chunk in chunks:
1087 for chunk in chunks:
1088 p.write(chunk)
1088 p.write(chunk)
1089 p.close()
1089 p.close()
1090 r = self.qrepo()
1090 r = self.qrepo()
1091 if r:
1091 if r:
1092 r[None].add([patchfn])
1092 r[None].add([patchfn])
1093 except: # re-raises
1093 except: # re-raises
1094 repo.rollback()
1094 repo.rollback()
1095 raise
1095 raise
1096 except Exception:
1096 except Exception:
1097 patchpath = self.join(patchfn)
1097 patchpath = self.join(patchfn)
1098 try:
1098 try:
1099 os.unlink(patchpath)
1099 os.unlink(patchpath)
1100 except OSError:
1100 except OSError:
1101 self.ui.warn(_('error unlinking %s\n') % patchpath)
1101 self.ui.warn(_('error unlinking %s\n') % patchpath)
1102 raise
1102 raise
1103 self.removeundo(repo)
1103 self.removeundo(repo)
1104 finally:
1104 finally:
1105 release(wlock)
1105 release(wlock)
1106
1106
1107 def strip(self, repo, revs, update=True, backup="all", force=None):
1107 def strip(self, repo, revs, update=True, backup="all", force=None):
1108 wlock = lock = None
1108 wlock = lock = None
1109 try:
1109 try:
1110 wlock = repo.wlock()
1110 wlock = repo.wlock()
1111 lock = repo.lock()
1111 lock = repo.lock()
1112
1112
1113 if update:
1113 if update:
1114 self.checklocalchanges(repo, force=force, refresh=False)
1114 self.checklocalchanges(repo, force=force, refresh=False)
1115 urev = self.qparents(repo, revs[0])
1115 urev = self.qparents(repo, revs[0])
1116 hg.clean(repo, urev)
1116 hg.clean(repo, urev)
1117 repo.dirstate.write()
1117 repo.dirstate.write()
1118
1118
1119 repair.strip(self.ui, repo, revs, backup)
1119 repair.strip(self.ui, repo, revs, backup)
1120 finally:
1120 finally:
1121 release(lock, wlock)
1121 release(lock, wlock)
1122
1122
1123 def isapplied(self, patch):
1123 def isapplied(self, patch):
1124 """returns (index, rev, patch)"""
1124 """returns (index, rev, patch)"""
1125 for i, a in enumerate(self.applied):
1125 for i, a in enumerate(self.applied):
1126 if a.name == patch:
1126 if a.name == patch:
1127 return (i, a.node, a.name)
1127 return (i, a.node, a.name)
1128 return None
1128 return None
1129
1129
1130 # if the exact patch name does not exist, we try a few
1130 # if the exact patch name does not exist, we try a few
1131 # variations. If strict is passed, we try only #1
1131 # variations. If strict is passed, we try only #1
1132 #
1132 #
1133 # 1) a number (as string) to indicate an offset in the series file
1133 # 1) a number (as string) to indicate an offset in the series file
1134 # 2) a unique substring of the patch name was given
1134 # 2) a unique substring of the patch name was given
1135 # 3) patchname[-+]num to indicate an offset in the series file
1135 # 3) patchname[-+]num to indicate an offset in the series file
1136 def lookup(self, patch, strict=False):
1136 def lookup(self, patch, strict=False):
1137 def partialname(s):
1137 def partialname(s):
1138 if s in self.series:
1138 if s in self.series:
1139 return s
1139 return s
1140 matches = [x for x in self.series if s in x]
1140 matches = [x for x in self.series if s in x]
1141 if len(matches) > 1:
1141 if len(matches) > 1:
1142 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
1142 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
1143 for m in matches:
1143 for m in matches:
1144 self.ui.warn(' %s\n' % m)
1144 self.ui.warn(' %s\n' % m)
1145 return None
1145 return None
1146 if matches:
1146 if matches:
1147 return matches[0]
1147 return matches[0]
1148 if self.series and self.applied:
1148 if self.series and self.applied:
1149 if s == 'qtip':
1149 if s == 'qtip':
1150 return self.series[self.seriesend(True)-1]
1150 return self.series[self.seriesend(True) - 1]
1151 if s == 'qbase':
1151 if s == 'qbase':
1152 return self.series[0]
1152 return self.series[0]
1153 return None
1153 return None
1154
1154
1155 if patch in self.series:
1155 if patch in self.series:
1156 return patch
1156 return patch
1157
1157
1158 if not os.path.isfile(self.join(patch)):
1158 if not os.path.isfile(self.join(patch)):
1159 try:
1159 try:
1160 sno = int(patch)
1160 sno = int(patch)
1161 except (ValueError, OverflowError):
1161 except (ValueError, OverflowError):
1162 pass
1162 pass
1163 else:
1163 else:
1164 if -len(self.series) <= sno < len(self.series):
1164 if -len(self.series) <= sno < len(self.series):
1165 return self.series[sno]
1165 return self.series[sno]
1166
1166
1167 if not strict:
1167 if not strict:
1168 res = partialname(patch)
1168 res = partialname(patch)
1169 if res:
1169 if res:
1170 return res
1170 return res
1171 minus = patch.rfind('-')
1171 minus = patch.rfind('-')
1172 if minus >= 0:
1172 if minus >= 0:
1173 res = partialname(patch[:minus])
1173 res = partialname(patch[:minus])
1174 if res:
1174 if res:
1175 i = self.series.index(res)
1175 i = self.series.index(res)
1176 try:
1176 try:
1177 off = int(patch[minus + 1:] or 1)
1177 off = int(patch[minus + 1:] or 1)
1178 except (ValueError, OverflowError):
1178 except (ValueError, OverflowError):
1179 pass
1179 pass
1180 else:
1180 else:
1181 if i - off >= 0:
1181 if i - off >= 0:
1182 return self.series[i - off]
1182 return self.series[i - off]
1183 plus = patch.rfind('+')
1183 plus = patch.rfind('+')
1184 if plus >= 0:
1184 if plus >= 0:
1185 res = partialname(patch[:plus])
1185 res = partialname(patch[:plus])
1186 if res:
1186 if res:
1187 i = self.series.index(res)
1187 i = self.series.index(res)
1188 try:
1188 try:
1189 off = int(patch[plus + 1:] or 1)
1189 off = int(patch[plus + 1:] or 1)
1190 except (ValueError, OverflowError):
1190 except (ValueError, OverflowError):
1191 pass
1191 pass
1192 else:
1192 else:
1193 if i + off < len(self.series):
1193 if i + off < len(self.series):
1194 return self.series[i + off]
1194 return self.series[i + off]
1195 raise util.Abort(_("patch %s not in series") % patch)
1195 raise util.Abort(_("patch %s not in series") % patch)
1196
1196
1197 def push(self, repo, patch=None, force=False, list=False, mergeq=None,
1197 def push(self, repo, patch=None, force=False, list=False, mergeq=None,
1198 all=False, move=False, exact=False, nobackup=False,
1198 all=False, move=False, exact=False, nobackup=False,
1199 keepchanges=False):
1199 keepchanges=False):
1200 self.checkkeepchanges(keepchanges, force)
1200 self.checkkeepchanges(keepchanges, force)
1201 diffopts = self.diffopts()
1201 diffopts = self.diffopts()
1202 wlock = repo.wlock()
1202 wlock = repo.wlock()
1203 try:
1203 try:
1204 heads = []
1204 heads = []
1205 for b, ls in repo.branchmap().iteritems():
1205 for b, ls in repo.branchmap().iteritems():
1206 heads += ls
1206 heads += ls
1207 if not heads:
1207 if not heads:
1208 heads = [nullid]
1208 heads = [nullid]
1209 if repo.dirstate.p1() not in heads and not exact:
1209 if repo.dirstate.p1() not in heads and not exact:
1210 self.ui.status(_("(working directory not at a head)\n"))
1210 self.ui.status(_("(working directory not at a head)\n"))
1211
1211
1212 if not self.series:
1212 if not self.series:
1213 self.ui.warn(_('no patches in series\n'))
1213 self.ui.warn(_('no patches in series\n'))
1214 return 0
1214 return 0
1215
1215
1216 # Suppose our series file is: A B C and the current 'top'
1216 # Suppose our series file is: A B C and the current 'top'
1217 # patch is B. qpush C should be performed (moving forward)
1217 # patch is B. qpush C should be performed (moving forward)
1218 # qpush B is a NOP (no change) qpush A is an error (can't
1218 # qpush B is a NOP (no change) qpush A is an error (can't
1219 # go backwards with qpush)
1219 # go backwards with qpush)
1220 if patch:
1220 if patch:
1221 patch = self.lookup(patch)
1221 patch = self.lookup(patch)
1222 info = self.isapplied(patch)
1222 info = self.isapplied(patch)
1223 if info and info[0] >= len(self.applied) - 1:
1223 if info and info[0] >= len(self.applied) - 1:
1224 self.ui.warn(
1224 self.ui.warn(
1225 _('qpush: %s is already at the top\n') % patch)
1225 _('qpush: %s is already at the top\n') % patch)
1226 return 0
1226 return 0
1227
1227
1228 pushable, reason = self.pushable(patch)
1228 pushable, reason = self.pushable(patch)
1229 if pushable:
1229 if pushable:
1230 if self.series.index(patch) < self.seriesend():
1230 if self.series.index(patch) < self.seriesend():
1231 raise util.Abort(
1231 raise util.Abort(
1232 _("cannot push to a previous patch: %s") % patch)
1232 _("cannot push to a previous patch: %s") % patch)
1233 else:
1233 else:
1234 if reason:
1234 if reason:
1235 reason = _('guarded by %s') % reason
1235 reason = _('guarded by %s') % reason
1236 else:
1236 else:
1237 reason = _('no matching guards')
1237 reason = _('no matching guards')
1238 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1238 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1239 return 1
1239 return 1
1240 elif all:
1240 elif all:
1241 patch = self.series[-1]
1241 patch = self.series[-1]
1242 if self.isapplied(patch):
1242 if self.isapplied(patch):
1243 self.ui.warn(_('all patches are currently applied\n'))
1243 self.ui.warn(_('all patches are currently applied\n'))
1244 return 0
1244 return 0
1245
1245
1246 # Following the above example, starting at 'top' of B:
1246 # Following the above example, starting at 'top' of B:
1247 # qpush should be performed (pushes C), but a subsequent
1247 # qpush should be performed (pushes C), but a subsequent
1248 # qpush without an argument is an error (nothing to
1248 # qpush without an argument is an error (nothing to
1249 # apply). This allows a loop of "...while hg qpush..." to
1249 # apply). This allows a loop of "...while hg qpush..." to
1250 # work as it detects an error when done
1250 # work as it detects an error when done
1251 start = self.seriesend()
1251 start = self.seriesend()
1252 if start == len(self.series):
1252 if start == len(self.series):
1253 self.ui.warn(_('patch series already fully applied\n'))
1253 self.ui.warn(_('patch series already fully applied\n'))
1254 return 1
1254 return 1
1255 if not force and not keepchanges:
1255 if not force and not keepchanges:
1256 self.checklocalchanges(repo, refresh=self.applied)
1256 self.checklocalchanges(repo, refresh=self.applied)
1257
1257
1258 if exact:
1258 if exact:
1259 if keepchanges:
1259 if keepchanges:
1260 raise util.Abort(
1260 raise util.Abort(
1261 _("cannot use --exact and --keep-changes together"))
1261 _("cannot use --exact and --keep-changes together"))
1262 if move:
1262 if move:
1263 raise util.Abort(_('cannot use --exact and --move '
1263 raise util.Abort(_('cannot use --exact and --move '
1264 'together'))
1264 'together'))
1265 if self.applied:
1265 if self.applied:
1266 raise util.Abort(_('cannot push --exact with applied '
1266 raise util.Abort(_('cannot push --exact with applied '
1267 'patches'))
1267 'patches'))
1268 root = self.series[start]
1268 root = self.series[start]
1269 target = patchheader(self.join(root), self.plainmode).parent
1269 target = patchheader(self.join(root), self.plainmode).parent
1270 if not target:
1270 if not target:
1271 raise util.Abort(
1271 raise util.Abort(
1272 _("%s does not have a parent recorded") % root)
1272 _("%s does not have a parent recorded") % root)
1273 if not repo[target] == repo['.']:
1273 if not repo[target] == repo['.']:
1274 hg.update(repo, target)
1274 hg.update(repo, target)
1275
1275
1276 if move:
1276 if move:
1277 if not patch:
1277 if not patch:
1278 raise util.Abort(_("please specify the patch to move"))
1278 raise util.Abort(_("please specify the patch to move"))
1279 for fullstart, rpn in enumerate(self.fullseries):
1279 for fullstart, rpn in enumerate(self.fullseries):
1280 # strip markers for patch guards
1280 # strip markers for patch guards
1281 if self.guard_re.split(rpn, 1)[0] == self.series[start]:
1281 if self.guard_re.split(rpn, 1)[0] == self.series[start]:
1282 break
1282 break
1283 for i, rpn in enumerate(self.fullseries[fullstart:]):
1283 for i, rpn in enumerate(self.fullseries[fullstart:]):
1284 # strip markers for patch guards
1284 # strip markers for patch guards
1285 if self.guard_re.split(rpn, 1)[0] == patch:
1285 if self.guard_re.split(rpn, 1)[0] == patch:
1286 break
1286 break
1287 index = fullstart + i
1287 index = fullstart + i
1288 assert index < len(self.fullseries)
1288 assert index < len(self.fullseries)
1289 fullpatch = self.fullseries[index]
1289 fullpatch = self.fullseries[index]
1290 del self.fullseries[index]
1290 del self.fullseries[index]
1291 self.fullseries.insert(fullstart, fullpatch)
1291 self.fullseries.insert(fullstart, fullpatch)
1292 self.parseseries()
1292 self.parseseries()
1293 self.seriesdirty = True
1293 self.seriesdirty = True
1294
1294
1295 self.applieddirty = True
1295 self.applieddirty = True
1296 if start > 0:
1296 if start > 0:
1297 self.checktoppatch(repo)
1297 self.checktoppatch(repo)
1298 if not patch:
1298 if not patch:
1299 patch = self.series[start]
1299 patch = self.series[start]
1300 end = start + 1
1300 end = start + 1
1301 else:
1301 else:
1302 end = self.series.index(patch, start) + 1
1302 end = self.series.index(patch, start) + 1
1303
1303
1304 tobackup = set()
1304 tobackup = set()
1305 if (not nobackup and force) or keepchanges:
1305 if (not nobackup and force) or keepchanges:
1306 m, a, r, d = self.checklocalchanges(repo, force=True)
1306 m, a, r, d = self.checklocalchanges(repo, force=True)
1307 if keepchanges:
1307 if keepchanges:
1308 tobackup.update(m + a + r + d)
1308 tobackup.update(m + a + r + d)
1309 else:
1309 else:
1310 tobackup.update(m + a)
1310 tobackup.update(m + a)
1311
1311
1312 s = self.series[start:end]
1312 s = self.series[start:end]
1313 all_files = set()
1313 all_files = set()
1314 try:
1314 try:
1315 if mergeq:
1315 if mergeq:
1316 ret = self.mergepatch(repo, mergeq, s, diffopts)
1316 ret = self.mergepatch(repo, mergeq, s, diffopts)
1317 else:
1317 else:
1318 ret = self.apply(repo, s, list, all_files=all_files,
1318 ret = self.apply(repo, s, list, all_files=all_files,
1319 tobackup=tobackup, keepchanges=keepchanges)
1319 tobackup=tobackup, keepchanges=keepchanges)
1320 except: # re-raises
1320 except: # re-raises
1321 self.ui.warn(_('cleaning up working directory...'))
1321 self.ui.warn(_('cleaning up working directory...'))
1322 node = repo.dirstate.p1()
1322 node = repo.dirstate.p1()
1323 hg.revert(repo, node, None)
1323 hg.revert(repo, node, None)
1324 # only remove unknown files that we know we touched or
1324 # only remove unknown files that we know we touched or
1325 # created while patching
1325 # created while patching
1326 for f in all_files:
1326 for f in all_files:
1327 if f not in repo.dirstate:
1327 if f not in repo.dirstate:
1328 try:
1328 try:
1329 util.unlinkpath(repo.wjoin(f))
1329 util.unlinkpath(repo.wjoin(f))
1330 except OSError, inst:
1330 except OSError, inst:
1331 if inst.errno != errno.ENOENT:
1331 if inst.errno != errno.ENOENT:
1332 raise
1332 raise
1333 self.ui.warn(_('done\n'))
1333 self.ui.warn(_('done\n'))
1334 raise
1334 raise
1335
1335
1336 if not self.applied:
1336 if not self.applied:
1337 return ret[0]
1337 return ret[0]
1338 top = self.applied[-1].name
1338 top = self.applied[-1].name
1339 if ret[0] and ret[0] > 1:
1339 if ret[0] and ret[0] > 1:
1340 msg = _("errors during apply, please fix and refresh %s\n")
1340 msg = _("errors during apply, please fix and refresh %s\n")
1341 self.ui.write(msg % top)
1341 self.ui.write(msg % top)
1342 else:
1342 else:
1343 self.ui.write(_("now at: %s\n") % top)
1343 self.ui.write(_("now at: %s\n") % top)
1344 return ret[0]
1344 return ret[0]
1345
1345
1346 finally:
1346 finally:
1347 wlock.release()
1347 wlock.release()
1348
1348
1349 def pop(self, repo, patch=None, force=False, update=True, all=False,
1349 def pop(self, repo, patch=None, force=False, update=True, all=False,
1350 nobackup=False, keepchanges=False):
1350 nobackup=False, keepchanges=False):
1351 self.checkkeepchanges(keepchanges, force)
1351 self.checkkeepchanges(keepchanges, force)
1352 wlock = repo.wlock()
1352 wlock = repo.wlock()
1353 try:
1353 try:
1354 if patch:
1354 if patch:
1355 # index, rev, patch
1355 # index, rev, patch
1356 info = self.isapplied(patch)
1356 info = self.isapplied(patch)
1357 if not info:
1357 if not info:
1358 patch = self.lookup(patch)
1358 patch = self.lookup(patch)
1359 info = self.isapplied(patch)
1359 info = self.isapplied(patch)
1360 if not info:
1360 if not info:
1361 raise util.Abort(_("patch %s is not applied") % patch)
1361 raise util.Abort(_("patch %s is not applied") % patch)
1362
1362
1363 if not self.applied:
1363 if not self.applied:
1364 # Allow qpop -a to work repeatedly,
1364 # Allow qpop -a to work repeatedly,
1365 # but not qpop without an argument
1365 # but not qpop without an argument
1366 self.ui.warn(_("no patches applied\n"))
1366 self.ui.warn(_("no patches applied\n"))
1367 return not all
1367 return not all
1368
1368
1369 if all:
1369 if all:
1370 start = 0
1370 start = 0
1371 elif patch:
1371 elif patch:
1372 start = info[0] + 1
1372 start = info[0] + 1
1373 else:
1373 else:
1374 start = len(self.applied) - 1
1374 start = len(self.applied) - 1
1375
1375
1376 if start >= len(self.applied):
1376 if start >= len(self.applied):
1377 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1377 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1378 return
1378 return
1379
1379
1380 if not update:
1380 if not update:
1381 parents = repo.dirstate.parents()
1381 parents = repo.dirstate.parents()
1382 rr = [x.node for x in self.applied]
1382 rr = [x.node for x in self.applied]
1383 for p in parents:
1383 for p in parents:
1384 if p in rr:
1384 if p in rr:
1385 self.ui.warn(_("qpop: forcing dirstate update\n"))
1385 self.ui.warn(_("qpop: forcing dirstate update\n"))
1386 update = True
1386 update = True
1387 else:
1387 else:
1388 parents = [p.node() for p in repo[None].parents()]
1388 parents = [p.node() for p in repo[None].parents()]
1389 needupdate = False
1389 needupdate = False
1390 for entry in self.applied[start:]:
1390 for entry in self.applied[start:]:
1391 if entry.node in parents:
1391 if entry.node in parents:
1392 needupdate = True
1392 needupdate = True
1393 break
1393 break
1394 update = needupdate
1394 update = needupdate
1395
1395
1396 tobackup = set()
1396 tobackup = set()
1397 if update:
1397 if update:
1398 m, a, r, d = self.checklocalchanges(
1398 m, a, r, d = self.checklocalchanges(
1399 repo, force=force or keepchanges)
1399 repo, force=force or keepchanges)
1400 if force:
1400 if force:
1401 if not nobackup:
1401 if not nobackup:
1402 tobackup.update(m + a)
1402 tobackup.update(m + a)
1403 elif keepchanges:
1403 elif keepchanges:
1404 tobackup.update(m + a + r + d)
1404 tobackup.update(m + a + r + d)
1405
1405
1406 self.applieddirty = True
1406 self.applieddirty = True
1407 end = len(self.applied)
1407 end = len(self.applied)
1408 rev = self.applied[start].node
1408 rev = self.applied[start].node
1409 if update:
1409 if update:
1410 top = self.checktoppatch(repo)[0]
1410 top = self.checktoppatch(repo)[0]
1411
1411
1412 try:
1412 try:
1413 heads = repo.changelog.heads(rev)
1413 heads = repo.changelog.heads(rev)
1414 except error.LookupError:
1414 except error.LookupError:
1415 node = short(rev)
1415 node = short(rev)
1416 raise util.Abort(_('trying to pop unknown node %s') % node)
1416 raise util.Abort(_('trying to pop unknown node %s') % node)
1417
1417
1418 if heads != [self.applied[-1].node]:
1418 if heads != [self.applied[-1].node]:
1419 raise util.Abort(_("popping would remove a revision not "
1419 raise util.Abort(_("popping would remove a revision not "
1420 "managed by this patch queue"))
1420 "managed by this patch queue"))
1421 if not repo[self.applied[-1].node].mutable():
1421 if not repo[self.applied[-1].node].mutable():
1422 raise util.Abort(
1422 raise util.Abort(
1423 _("popping would remove an immutable revision"),
1423 _("popping would remove an immutable revision"),
1424 hint=_('see "hg help phases" for details'))
1424 hint=_('see "hg help phases" for details'))
1425
1425
1426 # we know there are no local changes, so we can make a simplified
1426 # we know there are no local changes, so we can make a simplified
1427 # form of hg.update.
1427 # form of hg.update.
1428 if update:
1428 if update:
1429 qp = self.qparents(repo, rev)
1429 qp = self.qparents(repo, rev)
1430 ctx = repo[qp]
1430 ctx = repo[qp]
1431 m, a, r, d = repo.status(qp, top)[:4]
1431 m, a, r, d = repo.status(qp, top)[:4]
1432 if d:
1432 if d:
1433 raise util.Abort(_("deletions found between repo revs"))
1433 raise util.Abort(_("deletions found between repo revs"))
1434
1434
1435 tobackup = set(a + m + r) & tobackup
1435 tobackup = set(a + m + r) & tobackup
1436 if keepchanges and tobackup:
1436 if keepchanges and tobackup:
1437 self.localchangesfound()
1437 self.localchangesfound()
1438 self.backup(repo, tobackup)
1438 self.backup(repo, tobackup)
1439
1439
1440 for f in a:
1440 for f in a:
1441 try:
1441 try:
1442 util.unlinkpath(repo.wjoin(f))
1442 util.unlinkpath(repo.wjoin(f))
1443 except OSError, e:
1443 except OSError, e:
1444 if e.errno != errno.ENOENT:
1444 if e.errno != errno.ENOENT:
1445 raise
1445 raise
1446 repo.dirstate.drop(f)
1446 repo.dirstate.drop(f)
1447 for f in m + r:
1447 for f in m + r:
1448 fctx = ctx[f]
1448 fctx = ctx[f]
1449 repo.wwrite(f, fctx.data(), fctx.flags())
1449 repo.wwrite(f, fctx.data(), fctx.flags())
1450 repo.dirstate.normal(f)
1450 repo.dirstate.normal(f)
1451 repo.setparents(qp, nullid)
1451 repo.setparents(qp, nullid)
1452 for patch in reversed(self.applied[start:end]):
1452 for patch in reversed(self.applied[start:end]):
1453 self.ui.status(_("popping %s\n") % patch.name)
1453 self.ui.status(_("popping %s\n") % patch.name)
1454 del self.applied[start:end]
1454 del self.applied[start:end]
1455 self.strip(repo, [rev], update=False, backup='strip')
1455 self.strip(repo, [rev], update=False, backup='strip')
1456 if self.applied:
1456 if self.applied:
1457 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1457 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1458 else:
1458 else:
1459 self.ui.write(_("patch queue now empty\n"))
1459 self.ui.write(_("patch queue now empty\n"))
1460 finally:
1460 finally:
1461 wlock.release()
1461 wlock.release()
1462
1462
1463 def diff(self, repo, pats, opts):
1463 def diff(self, repo, pats, opts):
1464 top, patch = self.checktoppatch(repo)
1464 top, patch = self.checktoppatch(repo)
1465 if not top:
1465 if not top:
1466 self.ui.write(_("no patches applied\n"))
1466 self.ui.write(_("no patches applied\n"))
1467 return
1467 return
1468 qp = self.qparents(repo, top)
1468 qp = self.qparents(repo, top)
1469 if opts.get('reverse'):
1469 if opts.get('reverse'):
1470 node1, node2 = None, qp
1470 node1, node2 = None, qp
1471 else:
1471 else:
1472 node1, node2 = qp, None
1472 node1, node2 = qp, None
1473 diffopts = self.diffopts(opts, patch)
1473 diffopts = self.diffopts(opts, patch)
1474 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1474 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1475
1475
1476 def refresh(self, repo, pats=None, **opts):
1476 def refresh(self, repo, pats=None, **opts):
1477 if not self.applied:
1477 if not self.applied:
1478 self.ui.write(_("no patches applied\n"))
1478 self.ui.write(_("no patches applied\n"))
1479 return 1
1479 return 1
1480 msg = opts.get('msg', '').rstrip()
1480 msg = opts.get('msg', '').rstrip()
1481 newuser = opts.get('user')
1481 newuser = opts.get('user')
1482 newdate = opts.get('date')
1482 newdate = opts.get('date')
1483 if newdate:
1483 if newdate:
1484 newdate = '%d %d' % util.parsedate(newdate)
1484 newdate = '%d %d' % util.parsedate(newdate)
1485 wlock = repo.wlock()
1485 wlock = repo.wlock()
1486
1486
1487 try:
1487 try:
1488 self.checktoppatch(repo)
1488 self.checktoppatch(repo)
1489 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1489 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1490 if repo.changelog.heads(top) != [top]:
1490 if repo.changelog.heads(top) != [top]:
1491 raise util.Abort(_("cannot refresh a revision with children"))
1491 raise util.Abort(_("cannot refresh a revision with children"))
1492 if not repo[top].mutable():
1492 if not repo[top].mutable():
1493 raise util.Abort(_("cannot refresh immutable revision"),
1493 raise util.Abort(_("cannot refresh immutable revision"),
1494 hint=_('see "hg help phases" for details'))
1494 hint=_('see "hg help phases" for details'))
1495
1495
1496 cparents = repo.changelog.parents(top)
1496 cparents = repo.changelog.parents(top)
1497 patchparent = self.qparents(repo, top)
1497 patchparent = self.qparents(repo, top)
1498
1498
1499 inclsubs = self.checksubstate(repo, hex(patchparent))
1499 inclsubs = self.checksubstate(repo, hex(patchparent))
1500 if inclsubs:
1500 if inclsubs:
1501 inclsubs.append('.hgsubstate')
1501 inclsubs.append('.hgsubstate')
1502 substatestate = repo.dirstate['.hgsubstate']
1502 substatestate = repo.dirstate['.hgsubstate']
1503
1503
1504 ph = patchheader(self.join(patchfn), self.plainmode)
1504 ph = patchheader(self.join(patchfn), self.plainmode)
1505 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1505 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1506 if msg:
1506 if msg:
1507 ph.setmessage(msg)
1507 ph.setmessage(msg)
1508 if newuser:
1508 if newuser:
1509 ph.setuser(newuser)
1509 ph.setuser(newuser)
1510 if newdate:
1510 if newdate:
1511 ph.setdate(newdate)
1511 ph.setdate(newdate)
1512 ph.setparent(hex(patchparent))
1512 ph.setparent(hex(patchparent))
1513
1513
1514 # only commit new patch when write is complete
1514 # only commit new patch when write is complete
1515 patchf = self.opener(patchfn, 'w', atomictemp=True)
1515 patchf = self.opener(patchfn, 'w', atomictemp=True)
1516
1516
1517 comments = str(ph)
1517 comments = str(ph)
1518 if comments:
1518 if comments:
1519 patchf.write(comments)
1519 patchf.write(comments)
1520
1520
1521 # update the dirstate in place, strip off the qtip commit
1521 # update the dirstate in place, strip off the qtip commit
1522 # and then commit.
1522 # and then commit.
1523 #
1523 #
1524 # this should really read:
1524 # this should really read:
1525 # mm, dd, aa = repo.status(top, patchparent)[:3]
1525 # mm, dd, aa = repo.status(top, patchparent)[:3]
1526 # but we do it backwards to take advantage of manifest/changelog
1526 # but we do it backwards to take advantage of manifest/changelog
1527 # caching against the next repo.status call
1527 # caching against the next repo.status call
1528 mm, aa, dd = repo.status(patchparent, top)[:3]
1528 mm, aa, dd = repo.status(patchparent, top)[:3]
1529 changes = repo.changelog.read(top)
1529 changes = repo.changelog.read(top)
1530 man = repo.manifest.read(changes[0])
1530 man = repo.manifest.read(changes[0])
1531 aaa = aa[:]
1531 aaa = aa[:]
1532 matchfn = scmutil.match(repo[None], pats, opts)
1532 matchfn = scmutil.match(repo[None], pats, opts)
1533 # in short mode, we only diff the files included in the
1533 # in short mode, we only diff the files included in the
1534 # patch already plus specified files
1534 # patch already plus specified files
1535 if opts.get('short'):
1535 if opts.get('short'):
1536 # if amending a patch, we start with existing
1536 # if amending a patch, we start with existing
1537 # files plus specified files - unfiltered
1537 # files plus specified files - unfiltered
1538 match = scmutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1538 match = scmutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1539 # filter with include/exclude options
1539 # filter with include/exclude options
1540 matchfn = scmutil.match(repo[None], opts=opts)
1540 matchfn = scmutil.match(repo[None], opts=opts)
1541 else:
1541 else:
1542 match = scmutil.matchall(repo)
1542 match = scmutil.matchall(repo)
1543 m, a, r, d = repo.status(match=match)[:4]
1543 m, a, r, d = repo.status(match=match)[:4]
1544 mm = set(mm)
1544 mm = set(mm)
1545 aa = set(aa)
1545 aa = set(aa)
1546 dd = set(dd)
1546 dd = set(dd)
1547
1547
1548 # we might end up with files that were added between
1548 # we might end up with files that were added between
1549 # qtip and the dirstate parent, but then changed in the
1549 # qtip and the dirstate parent, but then changed in the
1550 # local dirstate. in this case, we want them to only
1550 # local dirstate. in this case, we want them to only
1551 # show up in the added section
1551 # show up in the added section
1552 for x in m:
1552 for x in m:
1553 if x not in aa:
1553 if x not in aa:
1554 mm.add(x)
1554 mm.add(x)
1555 # we might end up with files added by the local dirstate that
1555 # we might end up with files added by the local dirstate that
1556 # were deleted by the patch. In this case, they should only
1556 # were deleted by the patch. In this case, they should only
1557 # show up in the changed section.
1557 # show up in the changed section.
1558 for x in a:
1558 for x in a:
1559 if x in dd:
1559 if x in dd:
1560 dd.remove(x)
1560 dd.remove(x)
1561 mm.add(x)
1561 mm.add(x)
1562 else:
1562 else:
1563 aa.add(x)
1563 aa.add(x)
1564 # make sure any files deleted in the local dirstate
1564 # make sure any files deleted in the local dirstate
1565 # are not in the add or change column of the patch
1565 # are not in the add or change column of the patch
1566 forget = []
1566 forget = []
1567 for x in d + r:
1567 for x in d + r:
1568 if x in aa:
1568 if x in aa:
1569 aa.remove(x)
1569 aa.remove(x)
1570 forget.append(x)
1570 forget.append(x)
1571 continue
1571 continue
1572 else:
1572 else:
1573 mm.discard(x)
1573 mm.discard(x)
1574 dd.add(x)
1574 dd.add(x)
1575
1575
1576 m = list(mm)
1576 m = list(mm)
1577 r = list(dd)
1577 r = list(dd)
1578 a = list(aa)
1578 a = list(aa)
1579
1579
1580 # create 'match' that includes the files to be recommited.
1580 # create 'match' that includes the files to be recommited.
1581 # apply matchfn via repo.status to ensure correct case handling.
1581 # apply matchfn via repo.status to ensure correct case handling.
1582 cm, ca, cr, cd = repo.status(patchparent, match=matchfn)[:4]
1582 cm, ca, cr, cd = repo.status(patchparent, match=matchfn)[:4]
1583 allmatches = set(cm + ca + cr + cd)
1583 allmatches = set(cm + ca + cr + cd)
1584 refreshchanges = [x.intersection(allmatches) for x in (mm, aa, dd)]
1584 refreshchanges = [x.intersection(allmatches) for x in (mm, aa, dd)]
1585
1585
1586 files = set(inclsubs)
1586 files = set(inclsubs)
1587 for x in refreshchanges:
1587 for x in refreshchanges:
1588 files.update(x)
1588 files.update(x)
1589 match = scmutil.matchfiles(repo, files)
1589 match = scmutil.matchfiles(repo, files)
1590
1590
1591 bmlist = repo[top].bookmarks()
1591 bmlist = repo[top].bookmarks()
1592
1592
1593 try:
1593 try:
1594 if diffopts.git or diffopts.upgrade:
1594 if diffopts.git or diffopts.upgrade:
1595 copies = {}
1595 copies = {}
1596 for dst in a:
1596 for dst in a:
1597 src = repo.dirstate.copied(dst)
1597 src = repo.dirstate.copied(dst)
1598 # during qfold, the source file for copies may
1598 # during qfold, the source file for copies may
1599 # be removed. Treat this as a simple add.
1599 # be removed. Treat this as a simple add.
1600 if src is not None and src in repo.dirstate:
1600 if src is not None and src in repo.dirstate:
1601 copies.setdefault(src, []).append(dst)
1601 copies.setdefault(src, []).append(dst)
1602 repo.dirstate.add(dst)
1602 repo.dirstate.add(dst)
1603 # remember the copies between patchparent and qtip
1603 # remember the copies between patchparent and qtip
1604 for dst in aaa:
1604 for dst in aaa:
1605 f = repo.file(dst)
1605 f = repo.file(dst)
1606 src = f.renamed(man[dst])
1606 src = f.renamed(man[dst])
1607 if src:
1607 if src:
1608 copies.setdefault(src[0], []).extend(
1608 copies.setdefault(src[0], []).extend(
1609 copies.get(dst, []))
1609 copies.get(dst, []))
1610 if dst in a:
1610 if dst in a:
1611 copies[src[0]].append(dst)
1611 copies[src[0]].append(dst)
1612 # we can't copy a file created by the patch itself
1612 # we can't copy a file created by the patch itself
1613 if dst in copies:
1613 if dst in copies:
1614 del copies[dst]
1614 del copies[dst]
1615 for src, dsts in copies.iteritems():
1615 for src, dsts in copies.iteritems():
1616 for dst in dsts:
1616 for dst in dsts:
1617 repo.dirstate.copy(src, dst)
1617 repo.dirstate.copy(src, dst)
1618 else:
1618 else:
1619 for dst in a:
1619 for dst in a:
1620 repo.dirstate.add(dst)
1620 repo.dirstate.add(dst)
1621 # Drop useless copy information
1621 # Drop useless copy information
1622 for f in list(repo.dirstate.copies()):
1622 for f in list(repo.dirstate.copies()):
1623 repo.dirstate.copy(None, f)
1623 repo.dirstate.copy(None, f)
1624 for f in r:
1624 for f in r:
1625 repo.dirstate.remove(f)
1625 repo.dirstate.remove(f)
1626 # if the patch excludes a modified file, mark that
1626 # if the patch excludes a modified file, mark that
1627 # file with mtime=0 so status can see it.
1627 # file with mtime=0 so status can see it.
1628 mm = []
1628 mm = []
1629 for i in xrange(len(m)-1, -1, -1):
1629 for i in xrange(len(m) - 1, -1, -1):
1630 if not matchfn(m[i]):
1630 if not matchfn(m[i]):
1631 mm.append(m[i])
1631 mm.append(m[i])
1632 del m[i]
1632 del m[i]
1633 for f in m:
1633 for f in m:
1634 repo.dirstate.normal(f)
1634 repo.dirstate.normal(f)
1635 for f in mm:
1635 for f in mm:
1636 repo.dirstate.normallookup(f)
1636 repo.dirstate.normallookup(f)
1637 for f in forget:
1637 for f in forget:
1638 repo.dirstate.drop(f)
1638 repo.dirstate.drop(f)
1639
1639
1640 if not msg:
1640 if not msg:
1641 if not ph.message:
1641 if not ph.message:
1642 message = "[mq]: %s\n" % patchfn
1642 message = "[mq]: %s\n" % patchfn
1643 else:
1643 else:
1644 message = "\n".join(ph.message)
1644 message = "\n".join(ph.message)
1645 else:
1645 else:
1646 message = msg
1646 message = msg
1647
1647
1648 user = ph.user or changes[1]
1648 user = ph.user or changes[1]
1649
1649
1650 oldphase = repo[top].phase()
1650 oldphase = repo[top].phase()
1651
1651
1652 # assumes strip can roll itself back if interrupted
1652 # assumes strip can roll itself back if interrupted
1653 repo.setparents(*cparents)
1653 repo.setparents(*cparents)
1654 self.applied.pop()
1654 self.applied.pop()
1655 self.applieddirty = True
1655 self.applieddirty = True
1656 self.strip(repo, [top], update=False,
1656 self.strip(repo, [top], update=False,
1657 backup='strip')
1657 backup='strip')
1658 except: # re-raises
1658 except: # re-raises
1659 repo.dirstate.invalidate()
1659 repo.dirstate.invalidate()
1660 raise
1660 raise
1661
1661
1662 try:
1662 try:
1663 # might be nice to attempt to roll back strip after this
1663 # might be nice to attempt to roll back strip after this
1664
1664
1665 # Ensure we create a new changeset in the same phase than
1665 # Ensure we create a new changeset in the same phase than
1666 # the old one.
1666 # the old one.
1667 n = newcommit(repo, oldphase, message, user, ph.date,
1667 n = newcommit(repo, oldphase, message, user, ph.date,
1668 match=match, force=True)
1668 match=match, force=True)
1669 # only write patch after a successful commit
1669 # only write patch after a successful commit
1670 c = [list(x) for x in refreshchanges]
1670 c = [list(x) for x in refreshchanges]
1671 if inclsubs:
1671 if inclsubs:
1672 self.putsubstate2changes(substatestate, c)
1672 self.putsubstate2changes(substatestate, c)
1673 chunks = patchmod.diff(repo, patchparent,
1673 chunks = patchmod.diff(repo, patchparent,
1674 changes=c, opts=diffopts)
1674 changes=c, opts=diffopts)
1675 for chunk in chunks:
1675 for chunk in chunks:
1676 patchf.write(chunk)
1676 patchf.write(chunk)
1677 patchf.close()
1677 patchf.close()
1678
1678
1679 marks = repo._bookmarks
1679 marks = repo._bookmarks
1680 for bm in bmlist:
1680 for bm in bmlist:
1681 marks[bm] = n
1681 marks[bm] = n
1682 marks.write()
1682 marks.write()
1683
1683
1684 self.applied.append(statusentry(n, patchfn))
1684 self.applied.append(statusentry(n, patchfn))
1685 except: # re-raises
1685 except: # re-raises
1686 ctx = repo[cparents[0]]
1686 ctx = repo[cparents[0]]
1687 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1687 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1688 self.savedirty()
1688 self.savedirty()
1689 self.ui.warn(_('refresh interrupted while patch was popped! '
1689 self.ui.warn(_('refresh interrupted while patch was popped! '
1690 '(revert --all, qpush to recover)\n'))
1690 '(revert --all, qpush to recover)\n'))
1691 raise
1691 raise
1692 finally:
1692 finally:
1693 wlock.release()
1693 wlock.release()
1694 self.removeundo(repo)
1694 self.removeundo(repo)
1695
1695
1696 def init(self, repo, create=False):
1696 def init(self, repo, create=False):
1697 if not create and os.path.isdir(self.path):
1697 if not create and os.path.isdir(self.path):
1698 raise util.Abort(_("patch queue directory already exists"))
1698 raise util.Abort(_("patch queue directory already exists"))
1699 try:
1699 try:
1700 os.mkdir(self.path)
1700 os.mkdir(self.path)
1701 except OSError, inst:
1701 except OSError, inst:
1702 if inst.errno != errno.EEXIST or not create:
1702 if inst.errno != errno.EEXIST or not create:
1703 raise
1703 raise
1704 if create:
1704 if create:
1705 return self.qrepo(create=True)
1705 return self.qrepo(create=True)
1706
1706
1707 def unapplied(self, repo, patch=None):
1707 def unapplied(self, repo, patch=None):
1708 if patch and patch not in self.series:
1708 if patch and patch not in self.series:
1709 raise util.Abort(_("patch %s is not in series file") % patch)
1709 raise util.Abort(_("patch %s is not in series file") % patch)
1710 if not patch:
1710 if not patch:
1711 start = self.seriesend()
1711 start = self.seriesend()
1712 else:
1712 else:
1713 start = self.series.index(patch) + 1
1713 start = self.series.index(patch) + 1
1714 unapplied = []
1714 unapplied = []
1715 for i in xrange(start, len(self.series)):
1715 for i in xrange(start, len(self.series)):
1716 pushable, reason = self.pushable(i)
1716 pushable, reason = self.pushable(i)
1717 if pushable:
1717 if pushable:
1718 unapplied.append((i, self.series[i]))
1718 unapplied.append((i, self.series[i]))
1719 self.explainpushable(i)
1719 self.explainpushable(i)
1720 return unapplied
1720 return unapplied
1721
1721
1722 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1722 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1723 summary=False):
1723 summary=False):
1724 def displayname(pfx, patchname, state):
1724 def displayname(pfx, patchname, state):
1725 if pfx:
1725 if pfx:
1726 self.ui.write(pfx)
1726 self.ui.write(pfx)
1727 if summary:
1727 if summary:
1728 ph = patchheader(self.join(patchname), self.plainmode)
1728 ph = patchheader(self.join(patchname), self.plainmode)
1729 msg = ph.message and ph.message[0] or ''
1729 msg = ph.message and ph.message[0] or ''
1730 if self.ui.formatted():
1730 if self.ui.formatted():
1731 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1731 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1732 if width > 0:
1732 if width > 0:
1733 msg = util.ellipsis(msg, width)
1733 msg = util.ellipsis(msg, width)
1734 else:
1734 else:
1735 msg = ''
1735 msg = ''
1736 self.ui.write(patchname, label='qseries.' + state)
1736 self.ui.write(patchname, label='qseries.' + state)
1737 self.ui.write(': ')
1737 self.ui.write(': ')
1738 self.ui.write(msg, label='qseries.message.' + state)
1738 self.ui.write(msg, label='qseries.message.' + state)
1739 else:
1739 else:
1740 self.ui.write(patchname, label='qseries.' + state)
1740 self.ui.write(patchname, label='qseries.' + state)
1741 self.ui.write('\n')
1741 self.ui.write('\n')
1742
1742
1743 applied = set([p.name for p in self.applied])
1743 applied = set([p.name for p in self.applied])
1744 if length is None:
1744 if length is None:
1745 length = len(self.series) - start
1745 length = len(self.series) - start
1746 if not missing:
1746 if not missing:
1747 if self.ui.verbose:
1747 if self.ui.verbose:
1748 idxwidth = len(str(start + length - 1))
1748 idxwidth = len(str(start + length - 1))
1749 for i in xrange(start, start + length):
1749 for i in xrange(start, start + length):
1750 patch = self.series[i]
1750 patch = self.series[i]
1751 if patch in applied:
1751 if patch in applied:
1752 char, state = 'A', 'applied'
1752 char, state = 'A', 'applied'
1753 elif self.pushable(i)[0]:
1753 elif self.pushable(i)[0]:
1754 char, state = 'U', 'unapplied'
1754 char, state = 'U', 'unapplied'
1755 else:
1755 else:
1756 char, state = 'G', 'guarded'
1756 char, state = 'G', 'guarded'
1757 pfx = ''
1757 pfx = ''
1758 if self.ui.verbose:
1758 if self.ui.verbose:
1759 pfx = '%*d %s ' % (idxwidth, i, char)
1759 pfx = '%*d %s ' % (idxwidth, i, char)
1760 elif status and status != char:
1760 elif status and status != char:
1761 continue
1761 continue
1762 displayname(pfx, patch, state)
1762 displayname(pfx, patch, state)
1763 else:
1763 else:
1764 msng_list = []
1764 msng_list = []
1765 for root, dirs, files in os.walk(self.path):
1765 for root, dirs, files in os.walk(self.path):
1766 d = root[len(self.path) + 1:]
1766 d = root[len(self.path) + 1:]
1767 for f in files:
1767 for f in files:
1768 fl = os.path.join(d, f)
1768 fl = os.path.join(d, f)
1769 if (fl not in self.series and
1769 if (fl not in self.series and
1770 fl not in (self.statuspath, self.seriespath,
1770 fl not in (self.statuspath, self.seriespath,
1771 self.guardspath)
1771 self.guardspath)
1772 and not fl.startswith('.')):
1772 and not fl.startswith('.')):
1773 msng_list.append(fl)
1773 msng_list.append(fl)
1774 for x in sorted(msng_list):
1774 for x in sorted(msng_list):
1775 pfx = self.ui.verbose and ('D ') or ''
1775 pfx = self.ui.verbose and ('D ') or ''
1776 displayname(pfx, x, 'missing')
1776 displayname(pfx, x, 'missing')
1777
1777
1778 def issaveline(self, l):
1778 def issaveline(self, l):
1779 if l.name == '.hg.patches.save.line':
1779 if l.name == '.hg.patches.save.line':
1780 return True
1780 return True
1781
1781
1782 def qrepo(self, create=False):
1782 def qrepo(self, create=False):
1783 ui = self.ui.copy()
1783 ui = self.ui.copy()
1784 ui.setconfig('paths', 'default', '', overlay=False)
1784 ui.setconfig('paths', 'default', '', overlay=False)
1785 ui.setconfig('paths', 'default-push', '', overlay=False)
1785 ui.setconfig('paths', 'default-push', '', overlay=False)
1786 if create or os.path.isdir(self.join(".hg")):
1786 if create or os.path.isdir(self.join(".hg")):
1787 return hg.repository(ui, path=self.path, create=create)
1787 return hg.repository(ui, path=self.path, create=create)
1788
1788
1789 def restore(self, repo, rev, delete=None, qupdate=None):
1789 def restore(self, repo, rev, delete=None, qupdate=None):
1790 desc = repo[rev].description().strip()
1790 desc = repo[rev].description().strip()
1791 lines = desc.splitlines()
1791 lines = desc.splitlines()
1792 i = 0
1792 i = 0
1793 datastart = None
1793 datastart = None
1794 series = []
1794 series = []
1795 applied = []
1795 applied = []
1796 qpp = None
1796 qpp = None
1797 for i, line in enumerate(lines):
1797 for i, line in enumerate(lines):
1798 if line == 'Patch Data:':
1798 if line == 'Patch Data:':
1799 datastart = i + 1
1799 datastart = i + 1
1800 elif line.startswith('Dirstate:'):
1800 elif line.startswith('Dirstate:'):
1801 l = line.rstrip()
1801 l = line.rstrip()
1802 l = l[10:].split(' ')
1802 l = l[10:].split(' ')
1803 qpp = [bin(x) for x in l]
1803 qpp = [bin(x) for x in l]
1804 elif datastart is not None:
1804 elif datastart is not None:
1805 l = line.rstrip()
1805 l = line.rstrip()
1806 n, name = l.split(':', 1)
1806 n, name = l.split(':', 1)
1807 if n:
1807 if n:
1808 applied.append(statusentry(bin(n), name))
1808 applied.append(statusentry(bin(n), name))
1809 else:
1809 else:
1810 series.append(l)
1810 series.append(l)
1811 if datastart is None:
1811 if datastart is None:
1812 self.ui.warn(_("no saved patch data found\n"))
1812 self.ui.warn(_("no saved patch data found\n"))
1813 return 1
1813 return 1
1814 self.ui.warn(_("restoring status: %s\n") % lines[0])
1814 self.ui.warn(_("restoring status: %s\n") % lines[0])
1815 self.fullseries = series
1815 self.fullseries = series
1816 self.applied = applied
1816 self.applied = applied
1817 self.parseseries()
1817 self.parseseries()
1818 self.seriesdirty = True
1818 self.seriesdirty = True
1819 self.applieddirty = True
1819 self.applieddirty = True
1820 heads = repo.changelog.heads()
1820 heads = repo.changelog.heads()
1821 if delete:
1821 if delete:
1822 if rev not in heads:
1822 if rev not in heads:
1823 self.ui.warn(_("save entry has children, leaving it alone\n"))
1823 self.ui.warn(_("save entry has children, leaving it alone\n"))
1824 else:
1824 else:
1825 self.ui.warn(_("removing save entry %s\n") % short(rev))
1825 self.ui.warn(_("removing save entry %s\n") % short(rev))
1826 pp = repo.dirstate.parents()
1826 pp = repo.dirstate.parents()
1827 if rev in pp:
1827 if rev in pp:
1828 update = True
1828 update = True
1829 else:
1829 else:
1830 update = False
1830 update = False
1831 self.strip(repo, [rev], update=update, backup='strip')
1831 self.strip(repo, [rev], update=update, backup='strip')
1832 if qpp:
1832 if qpp:
1833 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1833 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1834 (short(qpp[0]), short(qpp[1])))
1834 (short(qpp[0]), short(qpp[1])))
1835 if qupdate:
1835 if qupdate:
1836 self.ui.status(_("updating queue directory\n"))
1836 self.ui.status(_("updating queue directory\n"))
1837 r = self.qrepo()
1837 r = self.qrepo()
1838 if not r:
1838 if not r:
1839 self.ui.warn(_("unable to load queue repository\n"))
1839 self.ui.warn(_("unable to load queue repository\n"))
1840 return 1
1840 return 1
1841 hg.clean(r, qpp[0])
1841 hg.clean(r, qpp[0])
1842
1842
1843 def save(self, repo, msg=None):
1843 def save(self, repo, msg=None):
1844 if not self.applied:
1844 if not self.applied:
1845 self.ui.warn(_("save: no patches applied, exiting\n"))
1845 self.ui.warn(_("save: no patches applied, exiting\n"))
1846 return 1
1846 return 1
1847 if self.issaveline(self.applied[-1]):
1847 if self.issaveline(self.applied[-1]):
1848 self.ui.warn(_("status is already saved\n"))
1848 self.ui.warn(_("status is already saved\n"))
1849 return 1
1849 return 1
1850
1850
1851 if not msg:
1851 if not msg:
1852 msg = _("hg patches saved state")
1852 msg = _("hg patches saved state")
1853 else:
1853 else:
1854 msg = "hg patches: " + msg.rstrip('\r\n')
1854 msg = "hg patches: " + msg.rstrip('\r\n')
1855 r = self.qrepo()
1855 r = self.qrepo()
1856 if r:
1856 if r:
1857 pp = r.dirstate.parents()
1857 pp = r.dirstate.parents()
1858 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1858 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1859 msg += "\n\nPatch Data:\n"
1859 msg += "\n\nPatch Data:\n"
1860 msg += ''.join('%s\n' % x for x in self.applied)
1860 msg += ''.join('%s\n' % x for x in self.applied)
1861 msg += ''.join(':%s\n' % x for x in self.fullseries)
1861 msg += ''.join(':%s\n' % x for x in self.fullseries)
1862 n = repo.commit(msg, force=True)
1862 n = repo.commit(msg, force=True)
1863 if not n:
1863 if not n:
1864 self.ui.warn(_("repo commit failed\n"))
1864 self.ui.warn(_("repo commit failed\n"))
1865 return 1
1865 return 1
1866 self.applied.append(statusentry(n, '.hg.patches.save.line'))
1866 self.applied.append(statusentry(n, '.hg.patches.save.line'))
1867 self.applieddirty = True
1867 self.applieddirty = True
1868 self.removeundo(repo)
1868 self.removeundo(repo)
1869
1869
1870 def fullseriesend(self):
1870 def fullseriesend(self):
1871 if self.applied:
1871 if self.applied:
1872 p = self.applied[-1].name
1872 p = self.applied[-1].name
1873 end = self.findseries(p)
1873 end = self.findseries(p)
1874 if end is None:
1874 if end is None:
1875 return len(self.fullseries)
1875 return len(self.fullseries)
1876 return end + 1
1876 return end + 1
1877 return 0
1877 return 0
1878
1878
1879 def seriesend(self, all_patches=False):
1879 def seriesend(self, all_patches=False):
1880 """If all_patches is False, return the index of the next pushable patch
1880 """If all_patches is False, return the index of the next pushable patch
1881 in the series, or the series length. If all_patches is True, return the
1881 in the series, or the series length. If all_patches is True, return the
1882 index of the first patch past the last applied one.
1882 index of the first patch past the last applied one.
1883 """
1883 """
1884 end = 0
1884 end = 0
1885 def next(start):
1885 def next(start):
1886 if all_patches or start >= len(self.series):
1886 if all_patches or start >= len(self.series):
1887 return start
1887 return start
1888 for i in xrange(start, len(self.series)):
1888 for i in xrange(start, len(self.series)):
1889 p, reason = self.pushable(i)
1889 p, reason = self.pushable(i)
1890 if p:
1890 if p:
1891 return i
1891 return i
1892 self.explainpushable(i)
1892 self.explainpushable(i)
1893 return len(self.series)
1893 return len(self.series)
1894 if self.applied:
1894 if self.applied:
1895 p = self.applied[-1].name
1895 p = self.applied[-1].name
1896 try:
1896 try:
1897 end = self.series.index(p)
1897 end = self.series.index(p)
1898 except ValueError:
1898 except ValueError:
1899 return 0
1899 return 0
1900 return next(end + 1)
1900 return next(end + 1)
1901 return next(end)
1901 return next(end)
1902
1902
1903 def appliedname(self, index):
1903 def appliedname(self, index):
1904 pname = self.applied[index].name
1904 pname = self.applied[index].name
1905 if not self.ui.verbose:
1905 if not self.ui.verbose:
1906 p = pname
1906 p = pname
1907 else:
1907 else:
1908 p = str(self.series.index(pname)) + " " + pname
1908 p = str(self.series.index(pname)) + " " + pname
1909 return p
1909 return p
1910
1910
1911 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1911 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1912 force=None, git=False):
1912 force=None, git=False):
1913 def checkseries(patchname):
1913 def checkseries(patchname):
1914 if patchname in self.series:
1914 if patchname in self.series:
1915 raise util.Abort(_('patch %s is already in the series file')
1915 raise util.Abort(_('patch %s is already in the series file')
1916 % patchname)
1916 % patchname)
1917
1917
1918 if rev:
1918 if rev:
1919 if files:
1919 if files:
1920 raise util.Abort(_('option "-r" not valid when importing '
1920 raise util.Abort(_('option "-r" not valid when importing '
1921 'files'))
1921 'files'))
1922 rev = scmutil.revrange(repo, rev)
1922 rev = scmutil.revrange(repo, rev)
1923 rev.sort(reverse=True)
1923 rev.sort(reverse=True)
1924 elif not files:
1924 elif not files:
1925 raise util.Abort(_('no files or revisions specified'))
1925 raise util.Abort(_('no files or revisions specified'))
1926 if (len(files) > 1 or len(rev) > 1) and patchname:
1926 if (len(files) > 1 or len(rev) > 1) and patchname:
1927 raise util.Abort(_('option "-n" not valid when importing multiple '
1927 raise util.Abort(_('option "-n" not valid when importing multiple '
1928 'patches'))
1928 'patches'))
1929 imported = []
1929 imported = []
1930 if rev:
1930 if rev:
1931 # If mq patches are applied, we can only import revisions
1931 # If mq patches are applied, we can only import revisions
1932 # that form a linear path to qbase.
1932 # that form a linear path to qbase.
1933 # Otherwise, they should form a linear path to a head.
1933 # Otherwise, they should form a linear path to a head.
1934 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1934 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1935 if len(heads) > 1:
1935 if len(heads) > 1:
1936 raise util.Abort(_('revision %d is the root of more than one '
1936 raise util.Abort(_('revision %d is the root of more than one '
1937 'branch') % rev[-1])
1937 'branch') % rev[-1])
1938 if self.applied:
1938 if self.applied:
1939 base = repo.changelog.node(rev[0])
1939 base = repo.changelog.node(rev[0])
1940 if base in [n.node for n in self.applied]:
1940 if base in [n.node for n in self.applied]:
1941 raise util.Abort(_('revision %d is already managed')
1941 raise util.Abort(_('revision %d is already managed')
1942 % rev[0])
1942 % rev[0])
1943 if heads != [self.applied[-1].node]:
1943 if heads != [self.applied[-1].node]:
1944 raise util.Abort(_('revision %d is not the parent of '
1944 raise util.Abort(_('revision %d is not the parent of '
1945 'the queue') % rev[0])
1945 'the queue') % rev[0])
1946 base = repo.changelog.rev(self.applied[0].node)
1946 base = repo.changelog.rev(self.applied[0].node)
1947 lastparent = repo.changelog.parentrevs(base)[0]
1947 lastparent = repo.changelog.parentrevs(base)[0]
1948 else:
1948 else:
1949 if heads != [repo.changelog.node(rev[0])]:
1949 if heads != [repo.changelog.node(rev[0])]:
1950 raise util.Abort(_('revision %d has unmanaged children')
1950 raise util.Abort(_('revision %d has unmanaged children')
1951 % rev[0])
1951 % rev[0])
1952 lastparent = None
1952 lastparent = None
1953
1953
1954 diffopts = self.diffopts({'git': git})
1954 diffopts = self.diffopts({'git': git})
1955 for r in rev:
1955 for r in rev:
1956 if not repo[r].mutable():
1956 if not repo[r].mutable():
1957 raise util.Abort(_('revision %d is not mutable') % r,
1957 raise util.Abort(_('revision %d is not mutable') % r,
1958 hint=_('see "hg help phases" for details'))
1958 hint=_('see "hg help phases" for details'))
1959 p1, p2 = repo.changelog.parentrevs(r)
1959 p1, p2 = repo.changelog.parentrevs(r)
1960 n = repo.changelog.node(r)
1960 n = repo.changelog.node(r)
1961 if p2 != nullrev:
1961 if p2 != nullrev:
1962 raise util.Abort(_('cannot import merge revision %d') % r)
1962 raise util.Abort(_('cannot import merge revision %d') % r)
1963 if lastparent and lastparent != r:
1963 if lastparent and lastparent != r:
1964 raise util.Abort(_('revision %d is not the parent of %d')
1964 raise util.Abort(_('revision %d is not the parent of %d')
1965 % (r, lastparent))
1965 % (r, lastparent))
1966 lastparent = p1
1966 lastparent = p1
1967
1967
1968 if not patchname:
1968 if not patchname:
1969 patchname = normname('%d.diff' % r)
1969 patchname = normname('%d.diff' % r)
1970 checkseries(patchname)
1970 checkseries(patchname)
1971 self.checkpatchname(patchname, force)
1971 self.checkpatchname(patchname, force)
1972 self.fullseries.insert(0, patchname)
1972 self.fullseries.insert(0, patchname)
1973
1973
1974 patchf = self.opener(patchname, "w")
1974 patchf = self.opener(patchname, "w")
1975 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
1975 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
1976 patchf.close()
1976 patchf.close()
1977
1977
1978 se = statusentry(n, patchname)
1978 se = statusentry(n, patchname)
1979 self.applied.insert(0, se)
1979 self.applied.insert(0, se)
1980
1980
1981 self.added.append(patchname)
1981 self.added.append(patchname)
1982 imported.append(patchname)
1982 imported.append(patchname)
1983 patchname = None
1983 patchname = None
1984 if rev and repo.ui.configbool('mq', 'secret', False):
1984 if rev and repo.ui.configbool('mq', 'secret', False):
1985 # if we added anything with --rev, we must move the secret root
1985 # if we added anything with --rev, we must move the secret root
1986 phases.retractboundary(repo, phases.secret, [n])
1986 phases.retractboundary(repo, phases.secret, [n])
1987 self.parseseries()
1987 self.parseseries()
1988 self.applieddirty = True
1988 self.applieddirty = True
1989 self.seriesdirty = True
1989 self.seriesdirty = True
1990
1990
1991 for i, filename in enumerate(files):
1991 for i, filename in enumerate(files):
1992 if existing:
1992 if existing:
1993 if filename == '-':
1993 if filename == '-':
1994 raise util.Abort(_('-e is incompatible with import from -'))
1994 raise util.Abort(_('-e is incompatible with import from -'))
1995 filename = normname(filename)
1995 filename = normname(filename)
1996 self.checkreservedname(filename)
1996 self.checkreservedname(filename)
1997 originpath = self.join(filename)
1997 originpath = self.join(filename)
1998 if not os.path.isfile(originpath):
1998 if not os.path.isfile(originpath):
1999 raise util.Abort(_("patch %s does not exist") % filename)
1999 raise util.Abort(_("patch %s does not exist") % filename)
2000
2000
2001 if patchname:
2001 if patchname:
2002 self.checkpatchname(patchname, force)
2002 self.checkpatchname(patchname, force)
2003
2003
2004 self.ui.write(_('renaming %s to %s\n')
2004 self.ui.write(_('renaming %s to %s\n')
2005 % (filename, patchname))
2005 % (filename, patchname))
2006 util.rename(originpath, self.join(patchname))
2006 util.rename(originpath, self.join(patchname))
2007 else:
2007 else:
2008 patchname = filename
2008 patchname = filename
2009
2009
2010 else:
2010 else:
2011 if filename == '-' and not patchname:
2011 if filename == '-' and not patchname:
2012 raise util.Abort(_('need --name to import a patch from -'))
2012 raise util.Abort(_('need --name to import a patch from -'))
2013 elif not patchname:
2013 elif not patchname:
2014 patchname = normname(os.path.basename(filename.rstrip('/')))
2014 patchname = normname(os.path.basename(filename.rstrip('/')))
2015 self.checkpatchname(patchname, force)
2015 self.checkpatchname(patchname, force)
2016 try:
2016 try:
2017 if filename == '-':
2017 if filename == '-':
2018 text = self.ui.fin.read()
2018 text = self.ui.fin.read()
2019 else:
2019 else:
2020 fp = hg.openpath(self.ui, filename)
2020 fp = hg.openpath(self.ui, filename)
2021 text = fp.read()
2021 text = fp.read()
2022 fp.close()
2022 fp.close()
2023 except (OSError, IOError):
2023 except (OSError, IOError):
2024 raise util.Abort(_("unable to read file %s") % filename)
2024 raise util.Abort(_("unable to read file %s") % filename)
2025 patchf = self.opener(patchname, "w")
2025 patchf = self.opener(patchname, "w")
2026 patchf.write(text)
2026 patchf.write(text)
2027 patchf.close()
2027 patchf.close()
2028 if not force:
2028 if not force:
2029 checkseries(patchname)
2029 checkseries(patchname)
2030 if patchname not in self.series:
2030 if patchname not in self.series:
2031 index = self.fullseriesend() + i
2031 index = self.fullseriesend() + i
2032 self.fullseries[index:index] = [patchname]
2032 self.fullseries[index:index] = [patchname]
2033 self.parseseries()
2033 self.parseseries()
2034 self.seriesdirty = True
2034 self.seriesdirty = True
2035 self.ui.warn(_("adding %s to series file\n") % patchname)
2035 self.ui.warn(_("adding %s to series file\n") % patchname)
2036 self.added.append(patchname)
2036 self.added.append(patchname)
2037 imported.append(patchname)
2037 imported.append(patchname)
2038 patchname = None
2038 patchname = None
2039
2039
2040 self.removeundo(repo)
2040 self.removeundo(repo)
2041 return imported
2041 return imported
2042
2042
2043 def fixkeepchangesopts(ui, opts):
2043 def fixkeepchangesopts(ui, opts):
2044 if (not ui.configbool('mq', 'keepchanges') or opts.get('force')
2044 if (not ui.configbool('mq', 'keepchanges') or opts.get('force')
2045 or opts.get('exact')):
2045 or opts.get('exact')):
2046 return opts
2046 return opts
2047 opts = dict(opts)
2047 opts = dict(opts)
2048 opts['keep_changes'] = True
2048 opts['keep_changes'] = True
2049 return opts
2049 return opts
2050
2050
2051 @command("qdelete|qremove|qrm",
2051 @command("qdelete|qremove|qrm",
2052 [('k', 'keep', None, _('keep patch file')),
2052 [('k', 'keep', None, _('keep patch file')),
2053 ('r', 'rev', [],
2053 ('r', 'rev', [],
2054 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2054 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2055 _('hg qdelete [-k] [PATCH]...'))
2055 _('hg qdelete [-k] [PATCH]...'))
2056 def delete(ui, repo, *patches, **opts):
2056 def delete(ui, repo, *patches, **opts):
2057 """remove patches from queue
2057 """remove patches from queue
2058
2058
2059 The patches must not be applied, and at least one patch is required. Exact
2059 The patches must not be applied, and at least one patch is required. Exact
2060 patch identifiers must be given. With -k/--keep, the patch files are
2060 patch identifiers must be given. With -k/--keep, the patch files are
2061 preserved in the patch directory.
2061 preserved in the patch directory.
2062
2062
2063 To stop managing a patch and move it into permanent history,
2063 To stop managing a patch and move it into permanent history,
2064 use the :hg:`qfinish` command."""
2064 use the :hg:`qfinish` command."""
2065 q = repo.mq
2065 q = repo.mq
2066 q.delete(repo, patches, opts)
2066 q.delete(repo, patches, opts)
2067 q.savedirty()
2067 q.savedirty()
2068 return 0
2068 return 0
2069
2069
2070 @command("qapplied",
2070 @command("qapplied",
2071 [('1', 'last', None, _('show only the preceding applied patch'))
2071 [('1', 'last', None, _('show only the preceding applied patch'))
2072 ] + seriesopts,
2072 ] + seriesopts,
2073 _('hg qapplied [-1] [-s] [PATCH]'))
2073 _('hg qapplied [-1] [-s] [PATCH]'))
2074 def applied(ui, repo, patch=None, **opts):
2074 def applied(ui, repo, patch=None, **opts):
2075 """print the patches already applied
2075 """print the patches already applied
2076
2076
2077 Returns 0 on success."""
2077 Returns 0 on success."""
2078
2078
2079 q = repo.mq
2079 q = repo.mq
2080
2080
2081 if patch:
2081 if patch:
2082 if patch not in q.series:
2082 if patch not in q.series:
2083 raise util.Abort(_("patch %s is not in series file") % patch)
2083 raise util.Abort(_("patch %s is not in series file") % patch)
2084 end = q.series.index(patch) + 1
2084 end = q.series.index(patch) + 1
2085 else:
2085 else:
2086 end = q.seriesend(True)
2086 end = q.seriesend(True)
2087
2087
2088 if opts.get('last') and not end:
2088 if opts.get('last') and not end:
2089 ui.write(_("no patches applied\n"))
2089 ui.write(_("no patches applied\n"))
2090 return 1
2090 return 1
2091 elif opts.get('last') and end == 1:
2091 elif opts.get('last') and end == 1:
2092 ui.write(_("only one patch applied\n"))
2092 ui.write(_("only one patch applied\n"))
2093 return 1
2093 return 1
2094 elif opts.get('last'):
2094 elif opts.get('last'):
2095 start = end - 2
2095 start = end - 2
2096 end = 1
2096 end = 1
2097 else:
2097 else:
2098 start = 0
2098 start = 0
2099
2099
2100 q.qseries(repo, length=end, start=start, status='A',
2100 q.qseries(repo, length=end, start=start, status='A',
2101 summary=opts.get('summary'))
2101 summary=opts.get('summary'))
2102
2102
2103
2103
2104 @command("qunapplied",
2104 @command("qunapplied",
2105 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
2105 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
2106 _('hg qunapplied [-1] [-s] [PATCH]'))
2106 _('hg qunapplied [-1] [-s] [PATCH]'))
2107 def unapplied(ui, repo, patch=None, **opts):
2107 def unapplied(ui, repo, patch=None, **opts):
2108 """print the patches not yet applied
2108 """print the patches not yet applied
2109
2109
2110 Returns 0 on success."""
2110 Returns 0 on success."""
2111
2111
2112 q = repo.mq
2112 q = repo.mq
2113 if patch:
2113 if patch:
2114 if patch not in q.series:
2114 if patch not in q.series:
2115 raise util.Abort(_("patch %s is not in series file") % patch)
2115 raise util.Abort(_("patch %s is not in series file") % patch)
2116 start = q.series.index(patch) + 1
2116 start = q.series.index(patch) + 1
2117 else:
2117 else:
2118 start = q.seriesend(True)
2118 start = q.seriesend(True)
2119
2119
2120 if start == len(q.series) and opts.get('first'):
2120 if start == len(q.series) and opts.get('first'):
2121 ui.write(_("all patches applied\n"))
2121 ui.write(_("all patches applied\n"))
2122 return 1
2122 return 1
2123
2123
2124 length = opts.get('first') and 1 or None
2124 length = opts.get('first') and 1 or None
2125 q.qseries(repo, start=start, length=length, status='U',
2125 q.qseries(repo, start=start, length=length, status='U',
2126 summary=opts.get('summary'))
2126 summary=opts.get('summary'))
2127
2127
2128 @command("qimport",
2128 @command("qimport",
2129 [('e', 'existing', None, _('import file in patch directory')),
2129 [('e', 'existing', None, _('import file in patch directory')),
2130 ('n', 'name', '',
2130 ('n', 'name', '',
2131 _('name of patch file'), _('NAME')),
2131 _('name of patch file'), _('NAME')),
2132 ('f', 'force', None, _('overwrite existing files')),
2132 ('f', 'force', None, _('overwrite existing files')),
2133 ('r', 'rev', [],
2133 ('r', 'rev', [],
2134 _('place existing revisions under mq control'), _('REV')),
2134 _('place existing revisions under mq control'), _('REV')),
2135 ('g', 'git', None, _('use git extended diff format')),
2135 ('g', 'git', None, _('use git extended diff format')),
2136 ('P', 'push', None, _('qpush after importing'))],
2136 ('P', 'push', None, _('qpush after importing'))],
2137 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... [FILE]...'))
2137 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... [FILE]...'))
2138 def qimport(ui, repo, *filename, **opts):
2138 def qimport(ui, repo, *filename, **opts):
2139 """import a patch or existing changeset
2139 """import a patch or existing changeset
2140
2140
2141 The patch is inserted into the series after the last applied
2141 The patch is inserted into the series after the last applied
2142 patch. If no patches have been applied, qimport prepends the patch
2142 patch. If no patches have been applied, qimport prepends the patch
2143 to the series.
2143 to the series.
2144
2144
2145 The patch will have the same name as its source file unless you
2145 The patch will have the same name as its source file unless you
2146 give it a new one with -n/--name.
2146 give it a new one with -n/--name.
2147
2147
2148 You can register an existing patch inside the patch directory with
2148 You can register an existing patch inside the patch directory with
2149 the -e/--existing flag.
2149 the -e/--existing flag.
2150
2150
2151 With -f/--force, an existing patch of the same name will be
2151 With -f/--force, an existing patch of the same name will be
2152 overwritten.
2152 overwritten.
2153
2153
2154 An existing changeset may be placed under mq control with -r/--rev
2154 An existing changeset may be placed under mq control with -r/--rev
2155 (e.g. qimport --rev tip -n patch will place tip under mq control).
2155 (e.g. qimport --rev tip -n patch will place tip under mq control).
2156 With -g/--git, patches imported with --rev will use the git diff
2156 With -g/--git, patches imported with --rev will use the git diff
2157 format. See the diffs help topic for information on why this is
2157 format. See the diffs help topic for information on why this is
2158 important for preserving rename/copy information and permission
2158 important for preserving rename/copy information and permission
2159 changes. Use :hg:`qfinish` to remove changesets from mq control.
2159 changes. Use :hg:`qfinish` to remove changesets from mq control.
2160
2160
2161 To import a patch from standard input, pass - as the patch file.
2161 To import a patch from standard input, pass - as the patch file.
2162 When importing from standard input, a patch name must be specified
2162 When importing from standard input, a patch name must be specified
2163 using the --name flag.
2163 using the --name flag.
2164
2164
2165 To import an existing patch while renaming it::
2165 To import an existing patch while renaming it::
2166
2166
2167 hg qimport -e existing-patch -n new-name
2167 hg qimport -e existing-patch -n new-name
2168
2168
2169 Returns 0 if import succeeded.
2169 Returns 0 if import succeeded.
2170 """
2170 """
2171 lock = repo.lock() # cause this may move phase
2171 lock = repo.lock() # cause this may move phase
2172 try:
2172 try:
2173 q = repo.mq
2173 q = repo.mq
2174 try:
2174 try:
2175 imported = q.qimport(
2175 imported = q.qimport(
2176 repo, filename, patchname=opts.get('name'),
2176 repo, filename, patchname=opts.get('name'),
2177 existing=opts.get('existing'), force=opts.get('force'),
2177 existing=opts.get('existing'), force=opts.get('force'),
2178 rev=opts.get('rev'), git=opts.get('git'))
2178 rev=opts.get('rev'), git=opts.get('git'))
2179 finally:
2179 finally:
2180 q.savedirty()
2180 q.savedirty()
2181 finally:
2181 finally:
2182 lock.release()
2182 lock.release()
2183
2183
2184 if imported and opts.get('push') and not opts.get('rev'):
2184 if imported and opts.get('push') and not opts.get('rev'):
2185 return q.push(repo, imported[-1])
2185 return q.push(repo, imported[-1])
2186 return 0
2186 return 0
2187
2187
2188 def qinit(ui, repo, create):
2188 def qinit(ui, repo, create):
2189 """initialize a new queue repository
2189 """initialize a new queue repository
2190
2190
2191 This command also creates a series file for ordering patches, and
2191 This command also creates a series file for ordering patches, and
2192 an mq-specific .hgignore file in the queue repository, to exclude
2192 an mq-specific .hgignore file in the queue repository, to exclude
2193 the status and guards files (these contain mostly transient state).
2193 the status and guards files (these contain mostly transient state).
2194
2194
2195 Returns 0 if initialization succeeded."""
2195 Returns 0 if initialization succeeded."""
2196 q = repo.mq
2196 q = repo.mq
2197 r = q.init(repo, create)
2197 r = q.init(repo, create)
2198 q.savedirty()
2198 q.savedirty()
2199 if r:
2199 if r:
2200 if not os.path.exists(r.wjoin('.hgignore')):
2200 if not os.path.exists(r.wjoin('.hgignore')):
2201 fp = r.wopener('.hgignore', 'w')
2201 fp = r.wopener('.hgignore', 'w')
2202 fp.write('^\\.hg\n')
2202 fp.write('^\\.hg\n')
2203 fp.write('^\\.mq\n')
2203 fp.write('^\\.mq\n')
2204 fp.write('syntax: glob\n')
2204 fp.write('syntax: glob\n')
2205 fp.write('status\n')
2205 fp.write('status\n')
2206 fp.write('guards\n')
2206 fp.write('guards\n')
2207 fp.close()
2207 fp.close()
2208 if not os.path.exists(r.wjoin('series')):
2208 if not os.path.exists(r.wjoin('series')):
2209 r.wopener('series', 'w').close()
2209 r.wopener('series', 'w').close()
2210 r[None].add(['.hgignore', 'series'])
2210 r[None].add(['.hgignore', 'series'])
2211 commands.add(ui, r)
2211 commands.add(ui, r)
2212 return 0
2212 return 0
2213
2213
2214 @command("^qinit",
2214 @command("^qinit",
2215 [('c', 'create-repo', None, _('create queue repository'))],
2215 [('c', 'create-repo', None, _('create queue repository'))],
2216 _('hg qinit [-c]'))
2216 _('hg qinit [-c]'))
2217 def init(ui, repo, **opts):
2217 def init(ui, repo, **opts):
2218 """init a new queue repository (DEPRECATED)
2218 """init a new queue repository (DEPRECATED)
2219
2219
2220 The queue repository is unversioned by default. If
2220 The queue repository is unversioned by default. If
2221 -c/--create-repo is specified, qinit will create a separate nested
2221 -c/--create-repo is specified, qinit will create a separate nested
2222 repository for patches (qinit -c may also be run later to convert
2222 repository for patches (qinit -c may also be run later to convert
2223 an unversioned patch repository into a versioned one). You can use
2223 an unversioned patch repository into a versioned one). You can use
2224 qcommit to commit changes to this queue repository.
2224 qcommit to commit changes to this queue repository.
2225
2225
2226 This command is deprecated. Without -c, it's implied by other relevant
2226 This command is deprecated. Without -c, it's implied by other relevant
2227 commands. With -c, use :hg:`init --mq` instead."""
2227 commands. With -c, use :hg:`init --mq` instead."""
2228 return qinit(ui, repo, create=opts.get('create_repo'))
2228 return qinit(ui, repo, create=opts.get('create_repo'))
2229
2229
2230 @command("qclone",
2230 @command("qclone",
2231 [('', 'pull', None, _('use pull protocol to copy metadata')),
2231 [('', 'pull', None, _('use pull protocol to copy metadata')),
2232 ('U', 'noupdate', None,
2232 ('U', 'noupdate', None,
2233 _('do not update the new working directories')),
2233 _('do not update the new working directories')),
2234 ('', 'uncompressed', None,
2234 ('', 'uncompressed', None,
2235 _('use uncompressed transfer (fast over LAN)')),
2235 _('use uncompressed transfer (fast over LAN)')),
2236 ('p', 'patches', '',
2236 ('p', 'patches', '',
2237 _('location of source patch repository'), _('REPO')),
2237 _('location of source patch repository'), _('REPO')),
2238 ] + commands.remoteopts,
2238 ] + commands.remoteopts,
2239 _('hg qclone [OPTION]... SOURCE [DEST]'))
2239 _('hg qclone [OPTION]... SOURCE [DEST]'))
2240 def clone(ui, source, dest=None, **opts):
2240 def clone(ui, source, dest=None, **opts):
2241 '''clone main and patch repository at same time
2241 '''clone main and patch repository at same time
2242
2242
2243 If source is local, destination will have no patches applied. If
2243 If source is local, destination will have no patches applied. If
2244 source is remote, this command can not check if patches are
2244 source is remote, this command can not check if patches are
2245 applied in source, so cannot guarantee that patches are not
2245 applied in source, so cannot guarantee that patches are not
2246 applied in destination. If you clone remote repository, be sure
2246 applied in destination. If you clone remote repository, be sure
2247 before that it has no patches applied.
2247 before that it has no patches applied.
2248
2248
2249 Source patch repository is looked for in <src>/.hg/patches by
2249 Source patch repository is looked for in <src>/.hg/patches by
2250 default. Use -p <url> to change.
2250 default. Use -p <url> to change.
2251
2251
2252 The patch directory must be a nested Mercurial repository, as
2252 The patch directory must be a nested Mercurial repository, as
2253 would be created by :hg:`init --mq`.
2253 would be created by :hg:`init --mq`.
2254
2254
2255 Return 0 on success.
2255 Return 0 on success.
2256 '''
2256 '''
2257 def patchdir(repo):
2257 def patchdir(repo):
2258 """compute a patch repo url from a repo object"""
2258 """compute a patch repo url from a repo object"""
2259 url = repo.url()
2259 url = repo.url()
2260 if url.endswith('/'):
2260 if url.endswith('/'):
2261 url = url[:-1]
2261 url = url[:-1]
2262 return url + '/.hg/patches'
2262 return url + '/.hg/patches'
2263
2263
2264 # main repo (destination and sources)
2264 # main repo (destination and sources)
2265 if dest is None:
2265 if dest is None:
2266 dest = hg.defaultdest(source)
2266 dest = hg.defaultdest(source)
2267 sr = hg.peer(ui, opts, ui.expandpath(source))
2267 sr = hg.peer(ui, opts, ui.expandpath(source))
2268
2268
2269 # patches repo (source only)
2269 # patches repo (source only)
2270 if opts.get('patches'):
2270 if opts.get('patches'):
2271 patchespath = ui.expandpath(opts.get('patches'))
2271 patchespath = ui.expandpath(opts.get('patches'))
2272 else:
2272 else:
2273 patchespath = patchdir(sr)
2273 patchespath = patchdir(sr)
2274 try:
2274 try:
2275 hg.peer(ui, opts, patchespath)
2275 hg.peer(ui, opts, patchespath)
2276 except error.RepoError:
2276 except error.RepoError:
2277 raise util.Abort(_('versioned patch repository not found'
2277 raise util.Abort(_('versioned patch repository not found'
2278 ' (see init --mq)'))
2278 ' (see init --mq)'))
2279 qbase, destrev = None, None
2279 qbase, destrev = None, None
2280 if sr.local():
2280 if sr.local():
2281 repo = sr.local()
2281 repo = sr.local()
2282 if repo.mq.applied and repo[qbase].phase() != phases.secret:
2282 if repo.mq.applied and repo[qbase].phase() != phases.secret:
2283 qbase = repo.mq.applied[0].node
2283 qbase = repo.mq.applied[0].node
2284 if not hg.islocal(dest):
2284 if not hg.islocal(dest):
2285 heads = set(repo.heads())
2285 heads = set(repo.heads())
2286 destrev = list(heads.difference(repo.heads(qbase)))
2286 destrev = list(heads.difference(repo.heads(qbase)))
2287 destrev.append(repo.changelog.parents(qbase)[0])
2287 destrev.append(repo.changelog.parents(qbase)[0])
2288 elif sr.capable('lookup'):
2288 elif sr.capable('lookup'):
2289 try:
2289 try:
2290 qbase = sr.lookup('qbase')
2290 qbase = sr.lookup('qbase')
2291 except error.RepoError:
2291 except error.RepoError:
2292 pass
2292 pass
2293
2293
2294 ui.note(_('cloning main repository\n'))
2294 ui.note(_('cloning main repository\n'))
2295 sr, dr = hg.clone(ui, opts, sr.url(), dest,
2295 sr, dr = hg.clone(ui, opts, sr.url(), dest,
2296 pull=opts.get('pull'),
2296 pull=opts.get('pull'),
2297 rev=destrev,
2297 rev=destrev,
2298 update=False,
2298 update=False,
2299 stream=opts.get('uncompressed'))
2299 stream=opts.get('uncompressed'))
2300
2300
2301 ui.note(_('cloning patch repository\n'))
2301 ui.note(_('cloning patch repository\n'))
2302 hg.clone(ui, opts, opts.get('patches') or patchdir(sr), patchdir(dr),
2302 hg.clone(ui, opts, opts.get('patches') or patchdir(sr), patchdir(dr),
2303 pull=opts.get('pull'), update=not opts.get('noupdate'),
2303 pull=opts.get('pull'), update=not opts.get('noupdate'),
2304 stream=opts.get('uncompressed'))
2304 stream=opts.get('uncompressed'))
2305
2305
2306 if dr.local():
2306 if dr.local():
2307 repo = dr.local()
2307 repo = dr.local()
2308 if qbase:
2308 if qbase:
2309 ui.note(_('stripping applied patches from destination '
2309 ui.note(_('stripping applied patches from destination '
2310 'repository\n'))
2310 'repository\n'))
2311 repo.mq.strip(repo, [qbase], update=False, backup=None)
2311 repo.mq.strip(repo, [qbase], update=False, backup=None)
2312 if not opts.get('noupdate'):
2312 if not opts.get('noupdate'):
2313 ui.note(_('updating destination repository\n'))
2313 ui.note(_('updating destination repository\n'))
2314 hg.update(repo, repo.changelog.tip())
2314 hg.update(repo, repo.changelog.tip())
2315
2315
2316 @command("qcommit|qci",
2316 @command("qcommit|qci",
2317 commands.table["^commit|ci"][1],
2317 commands.table["^commit|ci"][1],
2318 _('hg qcommit [OPTION]... [FILE]...'))
2318 _('hg qcommit [OPTION]... [FILE]...'))
2319 def commit(ui, repo, *pats, **opts):
2319 def commit(ui, repo, *pats, **opts):
2320 """commit changes in the queue repository (DEPRECATED)
2320 """commit changes in the queue repository (DEPRECATED)
2321
2321
2322 This command is deprecated; use :hg:`commit --mq` instead."""
2322 This command is deprecated; use :hg:`commit --mq` instead."""
2323 q = repo.mq
2323 q = repo.mq
2324 r = q.qrepo()
2324 r = q.qrepo()
2325 if not r:
2325 if not r:
2326 raise util.Abort('no queue repository')
2326 raise util.Abort('no queue repository')
2327 commands.commit(r.ui, r, *pats, **opts)
2327 commands.commit(r.ui, r, *pats, **opts)
2328
2328
2329 @command("qseries",
2329 @command("qseries",
2330 [('m', 'missing', None, _('print patches not in series')),
2330 [('m', 'missing', None, _('print patches not in series')),
2331 ] + seriesopts,
2331 ] + seriesopts,
2332 _('hg qseries [-ms]'))
2332 _('hg qseries [-ms]'))
2333 def series(ui, repo, **opts):
2333 def series(ui, repo, **opts):
2334 """print the entire series file
2334 """print the entire series file
2335
2335
2336 Returns 0 on success."""
2336 Returns 0 on success."""
2337 repo.mq.qseries(repo, missing=opts.get('missing'),
2337 repo.mq.qseries(repo, missing=opts.get('missing'),
2338 summary=opts.get('summary'))
2338 summary=opts.get('summary'))
2339 return 0
2339 return 0
2340
2340
2341 @command("qtop", seriesopts, _('hg qtop [-s]'))
2341 @command("qtop", seriesopts, _('hg qtop [-s]'))
2342 def top(ui, repo, **opts):
2342 def top(ui, repo, **opts):
2343 """print the name of the current patch
2343 """print the name of the current patch
2344
2344
2345 Returns 0 on success."""
2345 Returns 0 on success."""
2346 q = repo.mq
2346 q = repo.mq
2347 t = q.applied and q.seriesend(True) or 0
2347 t = q.applied and q.seriesend(True) or 0
2348 if t:
2348 if t:
2349 q.qseries(repo, start=t - 1, length=1, status='A',
2349 q.qseries(repo, start=t - 1, length=1, status='A',
2350 summary=opts.get('summary'))
2350 summary=opts.get('summary'))
2351 else:
2351 else:
2352 ui.write(_("no patches applied\n"))
2352 ui.write(_("no patches applied\n"))
2353 return 1
2353 return 1
2354
2354
2355 @command("qnext", seriesopts, _('hg qnext [-s]'))
2355 @command("qnext", seriesopts, _('hg qnext [-s]'))
2356 def next(ui, repo, **opts):
2356 def next(ui, repo, **opts):
2357 """print the name of the next pushable patch
2357 """print the name of the next pushable patch
2358
2358
2359 Returns 0 on success."""
2359 Returns 0 on success."""
2360 q = repo.mq
2360 q = repo.mq
2361 end = q.seriesend()
2361 end = q.seriesend()
2362 if end == len(q.series):
2362 if end == len(q.series):
2363 ui.write(_("all patches applied\n"))
2363 ui.write(_("all patches applied\n"))
2364 return 1
2364 return 1
2365 q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
2365 q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
2366
2366
2367 @command("qprev", seriesopts, _('hg qprev [-s]'))
2367 @command("qprev", seriesopts, _('hg qprev [-s]'))
2368 def prev(ui, repo, **opts):
2368 def prev(ui, repo, **opts):
2369 """print the name of the preceding applied patch
2369 """print the name of the preceding applied patch
2370
2370
2371 Returns 0 on success."""
2371 Returns 0 on success."""
2372 q = repo.mq
2372 q = repo.mq
2373 l = len(q.applied)
2373 l = len(q.applied)
2374 if l == 1:
2374 if l == 1:
2375 ui.write(_("only one patch applied\n"))
2375 ui.write(_("only one patch applied\n"))
2376 return 1
2376 return 1
2377 if not l:
2377 if not l:
2378 ui.write(_("no patches applied\n"))
2378 ui.write(_("no patches applied\n"))
2379 return 1
2379 return 1
2380 idx = q.series.index(q.applied[-2].name)
2380 idx = q.series.index(q.applied[-2].name)
2381 q.qseries(repo, start=idx, length=1, status='A',
2381 q.qseries(repo, start=idx, length=1, status='A',
2382 summary=opts.get('summary'))
2382 summary=opts.get('summary'))
2383
2383
2384 def setupheaderopts(ui, opts):
2384 def setupheaderopts(ui, opts):
2385 if not opts.get('user') and opts.get('currentuser'):
2385 if not opts.get('user') and opts.get('currentuser'):
2386 opts['user'] = ui.username()
2386 opts['user'] = ui.username()
2387 if not opts.get('date') and opts.get('currentdate'):
2387 if not opts.get('date') and opts.get('currentdate'):
2388 opts['date'] = "%d %d" % util.makedate()
2388 opts['date'] = "%d %d" % util.makedate()
2389
2389
2390 @command("^qnew",
2390 @command("^qnew",
2391 [('e', 'edit', None, _('edit commit message')),
2391 [('e', 'edit', None, _('edit commit message')),
2392 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2392 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2393 ('g', 'git', None, _('use git extended diff format')),
2393 ('g', 'git', None, _('use git extended diff format')),
2394 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2394 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2395 ('u', 'user', '',
2395 ('u', 'user', '',
2396 _('add "From: <USER>" to patch'), _('USER')),
2396 _('add "From: <USER>" to patch'), _('USER')),
2397 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2397 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2398 ('d', 'date', '',
2398 ('d', 'date', '',
2399 _('add "Date: <DATE>" to patch'), _('DATE'))
2399 _('add "Date: <DATE>" to patch'), _('DATE'))
2400 ] + commands.walkopts + commands.commitopts,
2400 ] + commands.walkopts + commands.commitopts,
2401 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'))
2401 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'))
2402 def new(ui, repo, patch, *args, **opts):
2402 def new(ui, repo, patch, *args, **opts):
2403 """create a new patch
2403 """create a new patch
2404
2404
2405 qnew creates a new patch on top of the currently-applied patch (if
2405 qnew creates a new patch on top of the currently-applied patch (if
2406 any). The patch will be initialized with any outstanding changes
2406 any). The patch will be initialized with any outstanding changes
2407 in the working directory. You may also use -I/--include,
2407 in the working directory. You may also use -I/--include,
2408 -X/--exclude, and/or a list of files after the patch name to add
2408 -X/--exclude, and/or a list of files after the patch name to add
2409 only changes to matching files to the new patch, leaving the rest
2409 only changes to matching files to the new patch, leaving the rest
2410 as uncommitted modifications.
2410 as uncommitted modifications.
2411
2411
2412 -u/--user and -d/--date can be used to set the (given) user and
2412 -u/--user and -d/--date can be used to set the (given) user and
2413 date, respectively. -U/--currentuser and -D/--currentdate set user
2413 date, respectively. -U/--currentuser and -D/--currentdate set user
2414 to current user and date to current date.
2414 to current user and date to current date.
2415
2415
2416 -e/--edit, -m/--message or -l/--logfile set the patch header as
2416 -e/--edit, -m/--message or -l/--logfile set the patch header as
2417 well as the commit message. If none is specified, the header is
2417 well as the commit message. If none is specified, the header is
2418 empty and the commit message is '[mq]: PATCH'.
2418 empty and the commit message is '[mq]: PATCH'.
2419
2419
2420 Use the -g/--git option to keep the patch in the git extended diff
2420 Use the -g/--git option to keep the patch in the git extended diff
2421 format. Read the diffs help topic for more information on why this
2421 format. Read the diffs help topic for more information on why this
2422 is important for preserving permission changes and copy/rename
2422 is important for preserving permission changes and copy/rename
2423 information.
2423 information.
2424
2424
2425 Returns 0 on successful creation of a new patch.
2425 Returns 0 on successful creation of a new patch.
2426 """
2426 """
2427 msg = cmdutil.logmessage(ui, opts)
2427 msg = cmdutil.logmessage(ui, opts)
2428 def getmsg():
2428 def getmsg():
2429 return ui.edit(msg, opts.get('user') or ui.username())
2429 return ui.edit(msg, opts.get('user') or ui.username())
2430 q = repo.mq
2430 q = repo.mq
2431 opts['msg'] = msg
2431 opts['msg'] = msg
2432 if opts.get('edit'):
2432 if opts.get('edit'):
2433 opts['msg'] = getmsg
2433 opts['msg'] = getmsg
2434 else:
2434 else:
2435 opts['msg'] = msg
2435 opts['msg'] = msg
2436 setupheaderopts(ui, opts)
2436 setupheaderopts(ui, opts)
2437 q.new(repo, patch, *args, **opts)
2437 q.new(repo, patch, *args, **opts)
2438 q.savedirty()
2438 q.savedirty()
2439 return 0
2439 return 0
2440
2440
2441 @command("^qrefresh",
2441 @command("^qrefresh",
2442 [('e', 'edit', None, _('edit commit message')),
2442 [('e', 'edit', None, _('edit commit message')),
2443 ('g', 'git', None, _('use git extended diff format')),
2443 ('g', 'git', None, _('use git extended diff format')),
2444 ('s', 'short', None,
2444 ('s', 'short', None,
2445 _('refresh only files already in the patch and specified files')),
2445 _('refresh only files already in the patch and specified files')),
2446 ('U', 'currentuser', None,
2446 ('U', 'currentuser', None,
2447 _('add/update author field in patch with current user')),
2447 _('add/update author field in patch with current user')),
2448 ('u', 'user', '',
2448 ('u', 'user', '',
2449 _('add/update author field in patch with given user'), _('USER')),
2449 _('add/update author field in patch with given user'), _('USER')),
2450 ('D', 'currentdate', None,
2450 ('D', 'currentdate', None,
2451 _('add/update date field in patch with current date')),
2451 _('add/update date field in patch with current date')),
2452 ('d', 'date', '',
2452 ('d', 'date', '',
2453 _('add/update date field in patch with given date'), _('DATE'))
2453 _('add/update date field in patch with given date'), _('DATE'))
2454 ] + commands.walkopts + commands.commitopts,
2454 ] + commands.walkopts + commands.commitopts,
2455 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'))
2455 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'))
2456 def refresh(ui, repo, *pats, **opts):
2456 def refresh(ui, repo, *pats, **opts):
2457 """update the current patch
2457 """update the current patch
2458
2458
2459 If any file patterns are provided, the refreshed patch will
2459 If any file patterns are provided, the refreshed patch will
2460 contain only the modifications that match those patterns; the
2460 contain only the modifications that match those patterns; the
2461 remaining modifications will remain in the working directory.
2461 remaining modifications will remain in the working directory.
2462
2462
2463 If -s/--short is specified, files currently included in the patch
2463 If -s/--short is specified, files currently included in the patch
2464 will be refreshed just like matched files and remain in the patch.
2464 will be refreshed just like matched files and remain in the patch.
2465
2465
2466 If -e/--edit is specified, Mercurial will start your configured editor for
2466 If -e/--edit is specified, Mercurial will start your configured editor for
2467 you to enter a message. In case qrefresh fails, you will find a backup of
2467 you to enter a message. In case qrefresh fails, you will find a backup of
2468 your message in ``.hg/last-message.txt``.
2468 your message in ``.hg/last-message.txt``.
2469
2469
2470 hg add/remove/copy/rename work as usual, though you might want to
2470 hg add/remove/copy/rename work as usual, though you might want to
2471 use git-style patches (-g/--git or [diff] git=1) to track copies
2471 use git-style patches (-g/--git or [diff] git=1) to track copies
2472 and renames. See the diffs help topic for more information on the
2472 and renames. See the diffs help topic for more information on the
2473 git diff format.
2473 git diff format.
2474
2474
2475 Returns 0 on success.
2475 Returns 0 on success.
2476 """
2476 """
2477 q = repo.mq
2477 q = repo.mq
2478 message = cmdutil.logmessage(ui, opts)
2478 message = cmdutil.logmessage(ui, opts)
2479 if opts.get('edit'):
2479 if opts.get('edit'):
2480 if not q.applied:
2480 if not q.applied:
2481 ui.write(_("no patches applied\n"))
2481 ui.write(_("no patches applied\n"))
2482 return 1
2482 return 1
2483 if message:
2483 if message:
2484 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2484 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2485 patch = q.applied[-1].name
2485 patch = q.applied[-1].name
2486 ph = patchheader(q.join(patch), q.plainmode)
2486 ph = patchheader(q.join(patch), q.plainmode)
2487 message = ui.edit('\n'.join(ph.message), ph.user or ui.username())
2487 message = ui.edit('\n'.join(ph.message), ph.user or ui.username())
2488 # We don't want to lose the patch message if qrefresh fails (issue2062)
2488 # We don't want to lose the patch message if qrefresh fails (issue2062)
2489 repo.savecommitmessage(message)
2489 repo.savecommitmessage(message)
2490 setupheaderopts(ui, opts)
2490 setupheaderopts(ui, opts)
2491 wlock = repo.wlock()
2491 wlock = repo.wlock()
2492 try:
2492 try:
2493 ret = q.refresh(repo, pats, msg=message, **opts)
2493 ret = q.refresh(repo, pats, msg=message, **opts)
2494 q.savedirty()
2494 q.savedirty()
2495 return ret
2495 return ret
2496 finally:
2496 finally:
2497 wlock.release()
2497 wlock.release()
2498
2498
2499 @command("^qdiff",
2499 @command("^qdiff",
2500 commands.diffopts + commands.diffopts2 + commands.walkopts,
2500 commands.diffopts + commands.diffopts2 + commands.walkopts,
2501 _('hg qdiff [OPTION]... [FILE]...'))
2501 _('hg qdiff [OPTION]... [FILE]...'))
2502 def diff(ui, repo, *pats, **opts):
2502 def diff(ui, repo, *pats, **opts):
2503 """diff of the current patch and subsequent modifications
2503 """diff of the current patch and subsequent modifications
2504
2504
2505 Shows a diff which includes the current patch as well as any
2505 Shows a diff which includes the current patch as well as any
2506 changes which have been made in the working directory since the
2506 changes which have been made in the working directory since the
2507 last refresh (thus showing what the current patch would become
2507 last refresh (thus showing what the current patch would become
2508 after a qrefresh).
2508 after a qrefresh).
2509
2509
2510 Use :hg:`diff` if you only want to see the changes made since the
2510 Use :hg:`diff` if you only want to see the changes made since the
2511 last qrefresh, or :hg:`export qtip` if you want to see changes
2511 last qrefresh, or :hg:`export qtip` if you want to see changes
2512 made by the current patch without including changes made since the
2512 made by the current patch without including changes made since the
2513 qrefresh.
2513 qrefresh.
2514
2514
2515 Returns 0 on success.
2515 Returns 0 on success.
2516 """
2516 """
2517 repo.mq.diff(repo, pats, opts)
2517 repo.mq.diff(repo, pats, opts)
2518 return 0
2518 return 0
2519
2519
2520 @command('qfold',
2520 @command('qfold',
2521 [('e', 'edit', None, _('edit patch header')),
2521 [('e', 'edit', None, _('edit patch header')),
2522 ('k', 'keep', None, _('keep folded patch files')),
2522 ('k', 'keep', None, _('keep folded patch files')),
2523 ] + commands.commitopts,
2523 ] + commands.commitopts,
2524 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'))
2524 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'))
2525 def fold(ui, repo, *files, **opts):
2525 def fold(ui, repo, *files, **opts):
2526 """fold the named patches into the current patch
2526 """fold the named patches into the current patch
2527
2527
2528 Patches must not yet be applied. Each patch will be successively
2528 Patches must not yet be applied. Each patch will be successively
2529 applied to the current patch in the order given. If all the
2529 applied to the current patch in the order given. If all the
2530 patches apply successfully, the current patch will be refreshed
2530 patches apply successfully, the current patch will be refreshed
2531 with the new cumulative patch, and the folded patches will be
2531 with the new cumulative patch, and the folded patches will be
2532 deleted. With -k/--keep, the folded patch files will not be
2532 deleted. With -k/--keep, the folded patch files will not be
2533 removed afterwards.
2533 removed afterwards.
2534
2534
2535 The header for each folded patch will be concatenated with the
2535 The header for each folded patch will be concatenated with the
2536 current patch header, separated by a line of ``* * *``.
2536 current patch header, separated by a line of ``* * *``.
2537
2537
2538 Returns 0 on success."""
2538 Returns 0 on success."""
2539 q = repo.mq
2539 q = repo.mq
2540 if not files:
2540 if not files:
2541 raise util.Abort(_('qfold requires at least one patch name'))
2541 raise util.Abort(_('qfold requires at least one patch name'))
2542 if not q.checktoppatch(repo)[0]:
2542 if not q.checktoppatch(repo)[0]:
2543 raise util.Abort(_('no patches applied'))
2543 raise util.Abort(_('no patches applied'))
2544 q.checklocalchanges(repo)
2544 q.checklocalchanges(repo)
2545
2545
2546 message = cmdutil.logmessage(ui, opts)
2546 message = cmdutil.logmessage(ui, opts)
2547 if opts.get('edit'):
2547 if opts.get('edit'):
2548 if message:
2548 if message:
2549 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2549 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2550
2550
2551 parent = q.lookup('qtip')
2551 parent = q.lookup('qtip')
2552 patches = []
2552 patches = []
2553 messages = []
2553 messages = []
2554 for f in files:
2554 for f in files:
2555 p = q.lookup(f)
2555 p = q.lookup(f)
2556 if p in patches or p == parent:
2556 if p in patches or p == parent:
2557 ui.warn(_('skipping already folded patch %s\n') % p)
2557 ui.warn(_('skipping already folded patch %s\n') % p)
2558 if q.isapplied(p):
2558 if q.isapplied(p):
2559 raise util.Abort(_('qfold cannot fold already applied patch %s')
2559 raise util.Abort(_('qfold cannot fold already applied patch %s')
2560 % p)
2560 % p)
2561 patches.append(p)
2561 patches.append(p)
2562
2562
2563 for p in patches:
2563 for p in patches:
2564 if not message:
2564 if not message:
2565 ph = patchheader(q.join(p), q.plainmode)
2565 ph = patchheader(q.join(p), q.plainmode)
2566 if ph.message:
2566 if ph.message:
2567 messages.append(ph.message)
2567 messages.append(ph.message)
2568 pf = q.join(p)
2568 pf = q.join(p)
2569 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2569 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2570 if not patchsuccess:
2570 if not patchsuccess:
2571 raise util.Abort(_('error folding patch %s') % p)
2571 raise util.Abort(_('error folding patch %s') % p)
2572
2572
2573 if not message:
2573 if not message:
2574 ph = patchheader(q.join(parent), q.plainmode)
2574 ph = patchheader(q.join(parent), q.plainmode)
2575 message, user = ph.message, ph.user
2575 message, user = ph.message, ph.user
2576 for msg in messages:
2576 for msg in messages:
2577 message.append('* * *')
2577 message.append('* * *')
2578 message.extend(msg)
2578 message.extend(msg)
2579 message = '\n'.join(message)
2579 message = '\n'.join(message)
2580
2580
2581 if opts.get('edit'):
2581 if opts.get('edit'):
2582 message = ui.edit(message, user or ui.username())
2582 message = ui.edit(message, user or ui.username())
2583
2583
2584 diffopts = q.patchopts(q.diffopts(), *patches)
2584 diffopts = q.patchopts(q.diffopts(), *patches)
2585 wlock = repo.wlock()
2585 wlock = repo.wlock()
2586 try:
2586 try:
2587 q.refresh(repo, msg=message, git=diffopts.git)
2587 q.refresh(repo, msg=message, git=diffopts.git)
2588 q.delete(repo, patches, opts)
2588 q.delete(repo, patches, opts)
2589 q.savedirty()
2589 q.savedirty()
2590 finally:
2590 finally:
2591 wlock.release()
2591 wlock.release()
2592
2592
2593 @command("qgoto",
2593 @command("qgoto",
2594 [('', 'keep-changes', None,
2594 [('', 'keep-changes', None,
2595 _('tolerate non-conflicting local changes')),
2595 _('tolerate non-conflicting local changes')),
2596 ('f', 'force', None, _('overwrite any local changes')),
2596 ('f', 'force', None, _('overwrite any local changes')),
2597 ('', 'no-backup', None, _('do not save backup copies of files'))],
2597 ('', 'no-backup', None, _('do not save backup copies of files'))],
2598 _('hg qgoto [OPTION]... PATCH'))
2598 _('hg qgoto [OPTION]... PATCH'))
2599 def goto(ui, repo, patch, **opts):
2599 def goto(ui, repo, patch, **opts):
2600 '''push or pop patches until named patch is at top of stack
2600 '''push or pop patches until named patch is at top of stack
2601
2601
2602 Returns 0 on success.'''
2602 Returns 0 on success.'''
2603 opts = fixkeepchangesopts(ui, opts)
2603 opts = fixkeepchangesopts(ui, opts)
2604 q = repo.mq
2604 q = repo.mq
2605 patch = q.lookup(patch)
2605 patch = q.lookup(patch)
2606 nobackup = opts.get('no_backup')
2606 nobackup = opts.get('no_backup')
2607 keepchanges = opts.get('keep_changes')
2607 keepchanges = opts.get('keep_changes')
2608 if q.isapplied(patch):
2608 if q.isapplied(patch):
2609 ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
2609 ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
2610 keepchanges=keepchanges)
2610 keepchanges=keepchanges)
2611 else:
2611 else:
2612 ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
2612 ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
2613 keepchanges=keepchanges)
2613 keepchanges=keepchanges)
2614 q.savedirty()
2614 q.savedirty()
2615 return ret
2615 return ret
2616
2616
2617 @command("qguard",
2617 @command("qguard",
2618 [('l', 'list', None, _('list all patches and guards')),
2618 [('l', 'list', None, _('list all patches and guards')),
2619 ('n', 'none', None, _('drop all guards'))],
2619 ('n', 'none', None, _('drop all guards'))],
2620 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'))
2620 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'))
2621 def guard(ui, repo, *args, **opts):
2621 def guard(ui, repo, *args, **opts):
2622 '''set or print guards for a patch
2622 '''set or print guards for a patch
2623
2623
2624 Guards control whether a patch can be pushed. A patch with no
2624 Guards control whether a patch can be pushed. A patch with no
2625 guards is always pushed. A patch with a positive guard ("+foo") is
2625 guards is always pushed. A patch with a positive guard ("+foo") is
2626 pushed only if the :hg:`qselect` command has activated it. A patch with
2626 pushed only if the :hg:`qselect` command has activated it. A patch with
2627 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2627 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2628 has activated it.
2628 has activated it.
2629
2629
2630 With no arguments, print the currently active guards.
2630 With no arguments, print the currently active guards.
2631 With arguments, set guards for the named patch.
2631 With arguments, set guards for the named patch.
2632
2632
2633 .. note::
2633 .. note::
2634 Specifying negative guards now requires '--'.
2634 Specifying negative guards now requires '--'.
2635
2635
2636 To set guards on another patch::
2636 To set guards on another patch::
2637
2637
2638 hg qguard other.patch -- +2.6.17 -stable
2638 hg qguard other.patch -- +2.6.17 -stable
2639
2639
2640 Returns 0 on success.
2640 Returns 0 on success.
2641 '''
2641 '''
2642 def status(idx):
2642 def status(idx):
2643 guards = q.seriesguards[idx] or ['unguarded']
2643 guards = q.seriesguards[idx] or ['unguarded']
2644 if q.series[idx] in applied:
2644 if q.series[idx] in applied:
2645 state = 'applied'
2645 state = 'applied'
2646 elif q.pushable(idx)[0]:
2646 elif q.pushable(idx)[0]:
2647 state = 'unapplied'
2647 state = 'unapplied'
2648 else:
2648 else:
2649 state = 'guarded'
2649 state = 'guarded'
2650 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2650 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2651 ui.write('%s: ' % ui.label(q.series[idx], label))
2651 ui.write('%s: ' % ui.label(q.series[idx], label))
2652
2652
2653 for i, guard in enumerate(guards):
2653 for i, guard in enumerate(guards):
2654 if guard.startswith('+'):
2654 if guard.startswith('+'):
2655 ui.write(guard, label='qguard.positive')
2655 ui.write(guard, label='qguard.positive')
2656 elif guard.startswith('-'):
2656 elif guard.startswith('-'):
2657 ui.write(guard, label='qguard.negative')
2657 ui.write(guard, label='qguard.negative')
2658 else:
2658 else:
2659 ui.write(guard, label='qguard.unguarded')
2659 ui.write(guard, label='qguard.unguarded')
2660 if i != len(guards) - 1:
2660 if i != len(guards) - 1:
2661 ui.write(' ')
2661 ui.write(' ')
2662 ui.write('\n')
2662 ui.write('\n')
2663 q = repo.mq
2663 q = repo.mq
2664 applied = set(p.name for p in q.applied)
2664 applied = set(p.name for p in q.applied)
2665 patch = None
2665 patch = None
2666 args = list(args)
2666 args = list(args)
2667 if opts.get('list'):
2667 if opts.get('list'):
2668 if args or opts.get('none'):
2668 if args or opts.get('none'):
2669 raise util.Abort(_('cannot mix -l/--list with options or '
2669 raise util.Abort(_('cannot mix -l/--list with options or '
2670 'arguments'))
2670 'arguments'))
2671 for i in xrange(len(q.series)):
2671 for i in xrange(len(q.series)):
2672 status(i)
2672 status(i)
2673 return
2673 return
2674 if not args or args[0][0:1] in '-+':
2674 if not args or args[0][0:1] in '-+':
2675 if not q.applied:
2675 if not q.applied:
2676 raise util.Abort(_('no patches applied'))
2676 raise util.Abort(_('no patches applied'))
2677 patch = q.applied[-1].name
2677 patch = q.applied[-1].name
2678 if patch is None and args[0][0:1] not in '-+':
2678 if patch is None and args[0][0:1] not in '-+':
2679 patch = args.pop(0)
2679 patch = args.pop(0)
2680 if patch is None:
2680 if patch is None:
2681 raise util.Abort(_('no patch to work with'))
2681 raise util.Abort(_('no patch to work with'))
2682 if args or opts.get('none'):
2682 if args or opts.get('none'):
2683 idx = q.findseries(patch)
2683 idx = q.findseries(patch)
2684 if idx is None:
2684 if idx is None:
2685 raise util.Abort(_('no patch named %s') % patch)
2685 raise util.Abort(_('no patch named %s') % patch)
2686 q.setguards(idx, args)
2686 q.setguards(idx, args)
2687 q.savedirty()
2687 q.savedirty()
2688 else:
2688 else:
2689 status(q.series.index(q.lookup(patch)))
2689 status(q.series.index(q.lookup(patch)))
2690
2690
2691 @command("qheader", [], _('hg qheader [PATCH]'))
2691 @command("qheader", [], _('hg qheader [PATCH]'))
2692 def header(ui, repo, patch=None):
2692 def header(ui, repo, patch=None):
2693 """print the header of the topmost or specified patch
2693 """print the header of the topmost or specified patch
2694
2694
2695 Returns 0 on success."""
2695 Returns 0 on success."""
2696 q = repo.mq
2696 q = repo.mq
2697
2697
2698 if patch:
2698 if patch:
2699 patch = q.lookup(patch)
2699 patch = q.lookup(patch)
2700 else:
2700 else:
2701 if not q.applied:
2701 if not q.applied:
2702 ui.write(_('no patches applied\n'))
2702 ui.write(_('no patches applied\n'))
2703 return 1
2703 return 1
2704 patch = q.lookup('qtip')
2704 patch = q.lookup('qtip')
2705 ph = patchheader(q.join(patch), q.plainmode)
2705 ph = patchheader(q.join(patch), q.plainmode)
2706
2706
2707 ui.write('\n'.join(ph.message) + '\n')
2707 ui.write('\n'.join(ph.message) + '\n')
2708
2708
2709 def lastsavename(path):
2709 def lastsavename(path):
2710 (directory, base) = os.path.split(path)
2710 (directory, base) = os.path.split(path)
2711 names = os.listdir(directory)
2711 names = os.listdir(directory)
2712 namere = re.compile("%s.([0-9]+)" % base)
2712 namere = re.compile("%s.([0-9]+)" % base)
2713 maxindex = None
2713 maxindex = None
2714 maxname = None
2714 maxname = None
2715 for f in names:
2715 for f in names:
2716 m = namere.match(f)
2716 m = namere.match(f)
2717 if m:
2717 if m:
2718 index = int(m.group(1))
2718 index = int(m.group(1))
2719 if maxindex is None or index > maxindex:
2719 if maxindex is None or index > maxindex:
2720 maxindex = index
2720 maxindex = index
2721 maxname = f
2721 maxname = f
2722 if maxname:
2722 if maxname:
2723 return (os.path.join(directory, maxname), maxindex)
2723 return (os.path.join(directory, maxname), maxindex)
2724 return (None, None)
2724 return (None, None)
2725
2725
2726 def savename(path):
2726 def savename(path):
2727 (last, index) = lastsavename(path)
2727 (last, index) = lastsavename(path)
2728 if last is None:
2728 if last is None:
2729 index = 0
2729 index = 0
2730 newpath = path + ".%d" % (index + 1)
2730 newpath = path + ".%d" % (index + 1)
2731 return newpath
2731 return newpath
2732
2732
2733 @command("^qpush",
2733 @command("^qpush",
2734 [('', 'keep-changes', None,
2734 [('', 'keep-changes', None,
2735 _('tolerate non-conflicting local changes')),
2735 _('tolerate non-conflicting local changes')),
2736 ('f', 'force', None, _('apply on top of local changes')),
2736 ('f', 'force', None, _('apply on top of local changes')),
2737 ('e', 'exact', None,
2737 ('e', 'exact', None,
2738 _('apply the target patch to its recorded parent')),
2738 _('apply the target patch to its recorded parent')),
2739 ('l', 'list', None, _('list patch name in commit text')),
2739 ('l', 'list', None, _('list patch name in commit text')),
2740 ('a', 'all', None, _('apply all patches')),
2740 ('a', 'all', None, _('apply all patches')),
2741 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2741 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2742 ('n', 'name', '',
2742 ('n', 'name', '',
2743 _('merge queue name (DEPRECATED)'), _('NAME')),
2743 _('merge queue name (DEPRECATED)'), _('NAME')),
2744 ('', 'move', None,
2744 ('', 'move', None,
2745 _('reorder patch series and apply only the patch')),
2745 _('reorder patch series and apply only the patch')),
2746 ('', 'no-backup', None, _('do not save backup copies of files'))],
2746 ('', 'no-backup', None, _('do not save backup copies of files'))],
2747 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'))
2747 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'))
2748 def push(ui, repo, patch=None, **opts):
2748 def push(ui, repo, patch=None, **opts):
2749 """push the next patch onto the stack
2749 """push the next patch onto the stack
2750
2750
2751 By default, abort if the working directory contains uncommitted
2751 By default, abort if the working directory contains uncommitted
2752 changes. With --keep-changes, abort only if the uncommitted files
2752 changes. With --keep-changes, abort only if the uncommitted files
2753 overlap with patched files. With -f/--force, backup and patch over
2753 overlap with patched files. With -f/--force, backup and patch over
2754 uncommitted changes.
2754 uncommitted changes.
2755
2755
2756 Return 0 on success.
2756 Return 0 on success.
2757 """
2757 """
2758 q = repo.mq
2758 q = repo.mq
2759 mergeq = None
2759 mergeq = None
2760
2760
2761 opts = fixkeepchangesopts(ui, opts)
2761 opts = fixkeepchangesopts(ui, opts)
2762 if opts.get('merge'):
2762 if opts.get('merge'):
2763 if opts.get('name'):
2763 if opts.get('name'):
2764 newpath = repo.join(opts.get('name'))
2764 newpath = repo.join(opts.get('name'))
2765 else:
2765 else:
2766 newpath, i = lastsavename(q.path)
2766 newpath, i = lastsavename(q.path)
2767 if not newpath:
2767 if not newpath:
2768 ui.warn(_("no saved queues found, please use -n\n"))
2768 ui.warn(_("no saved queues found, please use -n\n"))
2769 return 1
2769 return 1
2770 mergeq = queue(ui, repo.path, newpath)
2770 mergeq = queue(ui, repo.path, newpath)
2771 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2771 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2772 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
2772 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
2773 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
2773 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
2774 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
2774 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
2775 keepchanges=opts.get('keep_changes'))
2775 keepchanges=opts.get('keep_changes'))
2776 return ret
2776 return ret
2777
2777
2778 @command("^qpop",
2778 @command("^qpop",
2779 [('a', 'all', None, _('pop all patches')),
2779 [('a', 'all', None, _('pop all patches')),
2780 ('n', 'name', '',
2780 ('n', 'name', '',
2781 _('queue name to pop (DEPRECATED)'), _('NAME')),
2781 _('queue name to pop (DEPRECATED)'), _('NAME')),
2782 ('', 'keep-changes', None,
2782 ('', 'keep-changes', None,
2783 _('tolerate non-conflicting local changes')),
2783 _('tolerate non-conflicting local changes')),
2784 ('f', 'force', None, _('forget any local changes to patched files')),
2784 ('f', 'force', None, _('forget any local changes to patched files')),
2785 ('', 'no-backup', None, _('do not save backup copies of files'))],
2785 ('', 'no-backup', None, _('do not save backup copies of files'))],
2786 _('hg qpop [-a] [-f] [PATCH | INDEX]'))
2786 _('hg qpop [-a] [-f] [PATCH | INDEX]'))
2787 def pop(ui, repo, patch=None, **opts):
2787 def pop(ui, repo, patch=None, **opts):
2788 """pop the current patch off the stack
2788 """pop the current patch off the stack
2789
2789
2790 Without argument, pops off the top of the patch stack. If given a
2790 Without argument, pops off the top of the patch stack. If given a
2791 patch name, keeps popping off patches until the named patch is at
2791 patch name, keeps popping off patches until the named patch is at
2792 the top of the stack.
2792 the top of the stack.
2793
2793
2794 By default, abort if the working directory contains uncommitted
2794 By default, abort if the working directory contains uncommitted
2795 changes. With --keep-changes, abort only if the uncommitted files
2795 changes. With --keep-changes, abort only if the uncommitted files
2796 overlap with patched files. With -f/--force, backup and discard
2796 overlap with patched files. With -f/--force, backup and discard
2797 changes made to such files.
2797 changes made to such files.
2798
2798
2799 Return 0 on success.
2799 Return 0 on success.
2800 """
2800 """
2801 opts = fixkeepchangesopts(ui, opts)
2801 opts = fixkeepchangesopts(ui, opts)
2802 localupdate = True
2802 localupdate = True
2803 if opts.get('name'):
2803 if opts.get('name'):
2804 q = queue(ui, repo.path, repo.join(opts.get('name')))
2804 q = queue(ui, repo.path, repo.join(opts.get('name')))
2805 ui.warn(_('using patch queue: %s\n') % q.path)
2805 ui.warn(_('using patch queue: %s\n') % q.path)
2806 localupdate = False
2806 localupdate = False
2807 else:
2807 else:
2808 q = repo.mq
2808 q = repo.mq
2809 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
2809 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
2810 all=opts.get('all'), nobackup=opts.get('no_backup'),
2810 all=opts.get('all'), nobackup=opts.get('no_backup'),
2811 keepchanges=opts.get('keep_changes'))
2811 keepchanges=opts.get('keep_changes'))
2812 q.savedirty()
2812 q.savedirty()
2813 return ret
2813 return ret
2814
2814
2815 @command("qrename|qmv", [], _('hg qrename PATCH1 [PATCH2]'))
2815 @command("qrename|qmv", [], _('hg qrename PATCH1 [PATCH2]'))
2816 def rename(ui, repo, patch, name=None, **opts):
2816 def rename(ui, repo, patch, name=None, **opts):
2817 """rename a patch
2817 """rename a patch
2818
2818
2819 With one argument, renames the current patch to PATCH1.
2819 With one argument, renames the current patch to PATCH1.
2820 With two arguments, renames PATCH1 to PATCH2.
2820 With two arguments, renames PATCH1 to PATCH2.
2821
2821
2822 Returns 0 on success."""
2822 Returns 0 on success."""
2823 q = repo.mq
2823 q = repo.mq
2824 if not name:
2824 if not name:
2825 name = patch
2825 name = patch
2826 patch = None
2826 patch = None
2827
2827
2828 if patch:
2828 if patch:
2829 patch = q.lookup(patch)
2829 patch = q.lookup(patch)
2830 else:
2830 else:
2831 if not q.applied:
2831 if not q.applied:
2832 ui.write(_('no patches applied\n'))
2832 ui.write(_('no patches applied\n'))
2833 return
2833 return
2834 patch = q.lookup('qtip')
2834 patch = q.lookup('qtip')
2835 absdest = q.join(name)
2835 absdest = q.join(name)
2836 if os.path.isdir(absdest):
2836 if os.path.isdir(absdest):
2837 name = normname(os.path.join(name, os.path.basename(patch)))
2837 name = normname(os.path.join(name, os.path.basename(patch)))
2838 absdest = q.join(name)
2838 absdest = q.join(name)
2839 q.checkpatchname(name)
2839 q.checkpatchname(name)
2840
2840
2841 ui.note(_('renaming %s to %s\n') % (patch, name))
2841 ui.note(_('renaming %s to %s\n') % (patch, name))
2842 i = q.findseries(patch)
2842 i = q.findseries(patch)
2843 guards = q.guard_re.findall(q.fullseries[i])
2843 guards = q.guard_re.findall(q.fullseries[i])
2844 q.fullseries[i] = name + ''.join([' #' + g for g in guards])
2844 q.fullseries[i] = name + ''.join([' #' + g for g in guards])
2845 q.parseseries()
2845 q.parseseries()
2846 q.seriesdirty = True
2846 q.seriesdirty = True
2847
2847
2848 info = q.isapplied(patch)
2848 info = q.isapplied(patch)
2849 if info:
2849 if info:
2850 q.applied[info[0]] = statusentry(info[1], name)
2850 q.applied[info[0]] = statusentry(info[1], name)
2851 q.applieddirty = True
2851 q.applieddirty = True
2852
2852
2853 destdir = os.path.dirname(absdest)
2853 destdir = os.path.dirname(absdest)
2854 if not os.path.isdir(destdir):
2854 if not os.path.isdir(destdir):
2855 os.makedirs(destdir)
2855 os.makedirs(destdir)
2856 util.rename(q.join(patch), absdest)
2856 util.rename(q.join(patch), absdest)
2857 r = q.qrepo()
2857 r = q.qrepo()
2858 if r and patch in r.dirstate:
2858 if r and patch in r.dirstate:
2859 wctx = r[None]
2859 wctx = r[None]
2860 wlock = r.wlock()
2860 wlock = r.wlock()
2861 try:
2861 try:
2862 if r.dirstate[patch] == 'a':
2862 if r.dirstate[patch] == 'a':
2863 r.dirstate.drop(patch)
2863 r.dirstate.drop(patch)
2864 r.dirstate.add(name)
2864 r.dirstate.add(name)
2865 else:
2865 else:
2866 wctx.copy(patch, name)
2866 wctx.copy(patch, name)
2867 wctx.forget([patch])
2867 wctx.forget([patch])
2868 finally:
2868 finally:
2869 wlock.release()
2869 wlock.release()
2870
2870
2871 q.savedirty()
2871 q.savedirty()
2872
2872
2873 @command("qrestore",
2873 @command("qrestore",
2874 [('d', 'delete', None, _('delete save entry')),
2874 [('d', 'delete', None, _('delete save entry')),
2875 ('u', 'update', None, _('update queue working directory'))],
2875 ('u', 'update', None, _('update queue working directory'))],
2876 _('hg qrestore [-d] [-u] REV'))
2876 _('hg qrestore [-d] [-u] REV'))
2877 def restore(ui, repo, rev, **opts):
2877 def restore(ui, repo, rev, **opts):
2878 """restore the queue state saved by a revision (DEPRECATED)
2878 """restore the queue state saved by a revision (DEPRECATED)
2879
2879
2880 This command is deprecated, use :hg:`rebase` instead."""
2880 This command is deprecated, use :hg:`rebase` instead."""
2881 rev = repo.lookup(rev)
2881 rev = repo.lookup(rev)
2882 q = repo.mq
2882 q = repo.mq
2883 q.restore(repo, rev, delete=opts.get('delete'),
2883 q.restore(repo, rev, delete=opts.get('delete'),
2884 qupdate=opts.get('update'))
2884 qupdate=opts.get('update'))
2885 q.savedirty()
2885 q.savedirty()
2886 return 0
2886 return 0
2887
2887
2888 @command("qsave",
2888 @command("qsave",
2889 [('c', 'copy', None, _('copy patch directory')),
2889 [('c', 'copy', None, _('copy patch directory')),
2890 ('n', 'name', '',
2890 ('n', 'name', '',
2891 _('copy directory name'), _('NAME')),
2891 _('copy directory name'), _('NAME')),
2892 ('e', 'empty', None, _('clear queue status file')),
2892 ('e', 'empty', None, _('clear queue status file')),
2893 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2893 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2894 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'))
2894 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'))
2895 def save(ui, repo, **opts):
2895 def save(ui, repo, **opts):
2896 """save current queue state (DEPRECATED)
2896 """save current queue state (DEPRECATED)
2897
2897
2898 This command is deprecated, use :hg:`rebase` instead."""
2898 This command is deprecated, use :hg:`rebase` instead."""
2899 q = repo.mq
2899 q = repo.mq
2900 message = cmdutil.logmessage(ui, opts)
2900 message = cmdutil.logmessage(ui, opts)
2901 ret = q.save(repo, msg=message)
2901 ret = q.save(repo, msg=message)
2902 if ret:
2902 if ret:
2903 return ret
2903 return ret
2904 q.savedirty() # save to .hg/patches before copying
2904 q.savedirty() # save to .hg/patches before copying
2905 if opts.get('copy'):
2905 if opts.get('copy'):
2906 path = q.path
2906 path = q.path
2907 if opts.get('name'):
2907 if opts.get('name'):
2908 newpath = os.path.join(q.basepath, opts.get('name'))
2908 newpath = os.path.join(q.basepath, opts.get('name'))
2909 if os.path.exists(newpath):
2909 if os.path.exists(newpath):
2910 if not os.path.isdir(newpath):
2910 if not os.path.isdir(newpath):
2911 raise util.Abort(_('destination %s exists and is not '
2911 raise util.Abort(_('destination %s exists and is not '
2912 'a directory') % newpath)
2912 'a directory') % newpath)
2913 if not opts.get('force'):
2913 if not opts.get('force'):
2914 raise util.Abort(_('destination %s exists, '
2914 raise util.Abort(_('destination %s exists, '
2915 'use -f to force') % newpath)
2915 'use -f to force') % newpath)
2916 else:
2916 else:
2917 newpath = savename(path)
2917 newpath = savename(path)
2918 ui.warn(_("copy %s to %s\n") % (path, newpath))
2918 ui.warn(_("copy %s to %s\n") % (path, newpath))
2919 util.copyfiles(path, newpath)
2919 util.copyfiles(path, newpath)
2920 if opts.get('empty'):
2920 if opts.get('empty'):
2921 del q.applied[:]
2921 del q.applied[:]
2922 q.applieddirty = True
2922 q.applieddirty = True
2923 q.savedirty()
2923 q.savedirty()
2924 return 0
2924 return 0
2925
2925
2926 @command("strip",
2926 @command("strip",
2927 [
2927 [
2928 ('r', 'rev', [], _('strip specified revision (optional, '
2928 ('r', 'rev', [], _('strip specified revision (optional, '
2929 'can specify revisions without this '
2929 'can specify revisions without this '
2930 'option)'), _('REV')),
2930 'option)'), _('REV')),
2931 ('f', 'force', None, _('force removal of changesets, discard '
2931 ('f', 'force', None, _('force removal of changesets, discard '
2932 'uncommitted changes (no backup)')),
2932 'uncommitted changes (no backup)')),
2933 ('b', 'backup', None, _('bundle only changesets with local revision'
2933 ('b', 'backup', None, _('bundle only changesets with local revision'
2934 ' number greater than REV which are not'
2934 ' number greater than REV which are not'
2935 ' descendants of REV (DEPRECATED)')),
2935 ' descendants of REV (DEPRECATED)')),
2936 ('', 'no-backup', None, _('no backups')),
2936 ('', 'no-backup', None, _('no backups')),
2937 ('', 'nobackup', None, _('no backups (DEPRECATED)')),
2937 ('', 'nobackup', None, _('no backups (DEPRECATED)')),
2938 ('n', '', None, _('ignored (DEPRECATED)')),
2938 ('n', '', None, _('ignored (DEPRECATED)')),
2939 ('k', 'keep', None, _("do not modify working copy during strip")),
2939 ('k', 'keep', None, _("do not modify working copy during strip")),
2940 ('B', 'bookmark', '', _("remove revs only reachable from given"
2940 ('B', 'bookmark', '', _("remove revs only reachable from given"
2941 " bookmark"))],
2941 " bookmark"))],
2942 _('hg strip [-k] [-f] [-n] [-B bookmark] [-r] REV...'))
2942 _('hg strip [-k] [-f] [-n] [-B bookmark] [-r] REV...'))
2943 def strip(ui, repo, *revs, **opts):
2943 def strip(ui, repo, *revs, **opts):
2944 """strip changesets and all their descendants from the repository
2944 """strip changesets and all their descendants from the repository
2945
2945
2946 The strip command removes the specified changesets and all their
2946 The strip command removes the specified changesets and all their
2947 descendants. If the working directory has uncommitted changes, the
2947 descendants. If the working directory has uncommitted changes, the
2948 operation is aborted unless the --force flag is supplied, in which
2948 operation is aborted unless the --force flag is supplied, in which
2949 case changes will be discarded.
2949 case changes will be discarded.
2950
2950
2951 If a parent of the working directory is stripped, then the working
2951 If a parent of the working directory is stripped, then the working
2952 directory will automatically be updated to the most recent
2952 directory will automatically be updated to the most recent
2953 available ancestor of the stripped parent after the operation
2953 available ancestor of the stripped parent after the operation
2954 completes.
2954 completes.
2955
2955
2956 Any stripped changesets are stored in ``.hg/strip-backup`` as a
2956 Any stripped changesets are stored in ``.hg/strip-backup`` as a
2957 bundle (see :hg:`help bundle` and :hg:`help unbundle`). They can
2957 bundle (see :hg:`help bundle` and :hg:`help unbundle`). They can
2958 be restored by running :hg:`unbundle .hg/strip-backup/BUNDLE`,
2958 be restored by running :hg:`unbundle .hg/strip-backup/BUNDLE`,
2959 where BUNDLE is the bundle file created by the strip. Note that
2959 where BUNDLE is the bundle file created by the strip. Note that
2960 the local revision numbers will in general be different after the
2960 the local revision numbers will in general be different after the
2961 restore.
2961 restore.
2962
2962
2963 Use the --no-backup option to discard the backup bundle once the
2963 Use the --no-backup option to discard the backup bundle once the
2964 operation completes.
2964 operation completes.
2965
2965
2966 Strip is not a history-rewriting operation and can be used on
2966 Strip is not a history-rewriting operation and can be used on
2967 changesets in the public phase. But if the stripped changesets have
2967 changesets in the public phase. But if the stripped changesets have
2968 been pushed to a remote repository you will likely pull them again.
2968 been pushed to a remote repository you will likely pull them again.
2969
2969
2970 Return 0 on success.
2970 Return 0 on success.
2971 """
2971 """
2972 backup = 'all'
2972 backup = 'all'
2973 if opts.get('backup'):
2973 if opts.get('backup'):
2974 backup = 'strip'
2974 backup = 'strip'
2975 elif opts.get('no_backup') or opts.get('nobackup'):
2975 elif opts.get('no_backup') or opts.get('nobackup'):
2976 backup = 'none'
2976 backup = 'none'
2977
2977
2978 cl = repo.changelog
2978 cl = repo.changelog
2979 revs = list(revs) + opts.get('rev')
2979 revs = list(revs) + opts.get('rev')
2980 revs = set(scmutil.revrange(repo, revs))
2980 revs = set(scmutil.revrange(repo, revs))
2981
2981
2982 if opts.get('bookmark'):
2982 if opts.get('bookmark'):
2983 mark = opts.get('bookmark')
2983 mark = opts.get('bookmark')
2984 marks = repo._bookmarks
2984 marks = repo._bookmarks
2985 if mark not in marks:
2985 if mark not in marks:
2986 raise util.Abort(_("bookmark '%s' not found") % mark)
2986 raise util.Abort(_("bookmark '%s' not found") % mark)
2987
2987
2988 # If the requested bookmark is not the only one pointing to a
2988 # If the requested bookmark is not the only one pointing to a
2989 # a revision we have to only delete the bookmark and not strip
2989 # a revision we have to only delete the bookmark and not strip
2990 # anything. revsets cannot detect that case.
2990 # anything. revsets cannot detect that case.
2991 uniquebm = True
2991 uniquebm = True
2992 for m, n in marks.iteritems():
2992 for m, n in marks.iteritems():
2993 if m != mark and n == repo[mark].node():
2993 if m != mark and n == repo[mark].node():
2994 uniquebm = False
2994 uniquebm = False
2995 break
2995 break
2996 if uniquebm:
2996 if uniquebm:
2997 rsrevs = repo.revs("ancestors(bookmark(%s)) - "
2997 rsrevs = repo.revs("ancestors(bookmark(%s)) - "
2998 "ancestors(head() and not bookmark(%s)) - "
2998 "ancestors(head() and not bookmark(%s)) - "
2999 "ancestors(bookmark() and not bookmark(%s))",
2999 "ancestors(bookmark() and not bookmark(%s))",
3000 mark, mark, mark)
3000 mark, mark, mark)
3001 revs.update(set(rsrevs))
3001 revs.update(set(rsrevs))
3002 if not revs:
3002 if not revs:
3003 del marks[mark]
3003 del marks[mark]
3004 marks.write()
3004 marks.write()
3005 ui.write(_("bookmark '%s' deleted\n") % mark)
3005 ui.write(_("bookmark '%s' deleted\n") % mark)
3006
3006
3007 if not revs:
3007 if not revs:
3008 raise util.Abort(_('empty revision set'))
3008 raise util.Abort(_('empty revision set'))
3009
3009
3010 descendants = set(cl.descendants(revs))
3010 descendants = set(cl.descendants(revs))
3011 strippedrevs = revs.union(descendants)
3011 strippedrevs = revs.union(descendants)
3012 roots = revs.difference(descendants)
3012 roots = revs.difference(descendants)
3013
3013
3014 update = False
3014 update = False
3015 # if one of the wdir parent is stripped we'll need
3015 # if one of the wdir parent is stripped we'll need
3016 # to update away to an earlier revision
3016 # to update away to an earlier revision
3017 for p in repo.dirstate.parents():
3017 for p in repo.dirstate.parents():
3018 if p != nullid and cl.rev(p) in strippedrevs:
3018 if p != nullid and cl.rev(p) in strippedrevs:
3019 update = True
3019 update = True
3020 break
3020 break
3021
3021
3022 rootnodes = set(cl.node(r) for r in roots)
3022 rootnodes = set(cl.node(r) for r in roots)
3023
3023
3024 q = repo.mq
3024 q = repo.mq
3025 if q.applied:
3025 if q.applied:
3026 # refresh queue state if we're about to strip
3026 # refresh queue state if we're about to strip
3027 # applied patches
3027 # applied patches
3028 if cl.rev(repo.lookup('qtip')) in strippedrevs:
3028 if cl.rev(repo.lookup('qtip')) in strippedrevs:
3029 q.applieddirty = True
3029 q.applieddirty = True
3030 start = 0
3030 start = 0
3031 end = len(q.applied)
3031 end = len(q.applied)
3032 for i, statusentry in enumerate(q.applied):
3032 for i, statusentry in enumerate(q.applied):
3033 if statusentry.node in rootnodes:
3033 if statusentry.node in rootnodes:
3034 # if one of the stripped roots is an applied
3034 # if one of the stripped roots is an applied
3035 # patch, only part of the queue is stripped
3035 # patch, only part of the queue is stripped
3036 start = i
3036 start = i
3037 break
3037 break
3038 del q.applied[start:end]
3038 del q.applied[start:end]
3039 q.savedirty()
3039 q.savedirty()
3040
3040
3041 revs = list(rootnodes)
3041 revs = list(rootnodes)
3042 if update and opts.get('keep'):
3042 if update and opts.get('keep'):
3043 wlock = repo.wlock()
3043 wlock = repo.wlock()
3044 try:
3044 try:
3045 urev = repo.mq.qparents(repo, revs[0])
3045 urev = repo.mq.qparents(repo, revs[0])
3046 repo.dirstate.rebuild(urev, repo[urev].manifest())
3046 repo.dirstate.rebuild(urev, repo[urev].manifest())
3047 repo.dirstate.write()
3047 repo.dirstate.write()
3048 update = False
3048 update = False
3049 finally:
3049 finally:
3050 wlock.release()
3050 wlock.release()
3051
3051
3052 if opts.get('bookmark'):
3052 if opts.get('bookmark'):
3053 del marks[mark]
3053 del marks[mark]
3054 marks.write()
3054 marks.write()
3055 ui.write(_("bookmark '%s' deleted\n") % mark)
3055 ui.write(_("bookmark '%s' deleted\n") % mark)
3056
3056
3057 repo.mq.strip(repo, revs, backup=backup, update=update,
3057 repo.mq.strip(repo, revs, backup=backup, update=update,
3058 force=opts.get('force'))
3058 force=opts.get('force'))
3059
3059
3060 return 0
3060 return 0
3061
3061
3062 @command("qselect",
3062 @command("qselect",
3063 [('n', 'none', None, _('disable all guards')),
3063 [('n', 'none', None, _('disable all guards')),
3064 ('s', 'series', None, _('list all guards in series file')),
3064 ('s', 'series', None, _('list all guards in series file')),
3065 ('', 'pop', None, _('pop to before first guarded applied patch')),
3065 ('', 'pop', None, _('pop to before first guarded applied patch')),
3066 ('', 'reapply', None, _('pop, then reapply patches'))],
3066 ('', 'reapply', None, _('pop, then reapply patches'))],
3067 _('hg qselect [OPTION]... [GUARD]...'))
3067 _('hg qselect [OPTION]... [GUARD]...'))
3068 def select(ui, repo, *args, **opts):
3068 def select(ui, repo, *args, **opts):
3069 '''set or print guarded patches to push
3069 '''set or print guarded patches to push
3070
3070
3071 Use the :hg:`qguard` command to set or print guards on patch, then use
3071 Use the :hg:`qguard` command to set or print guards on patch, then use
3072 qselect to tell mq which guards to use. A patch will be pushed if
3072 qselect to tell mq which guards to use. A patch will be pushed if
3073 it has no guards or any positive guards match the currently
3073 it has no guards or any positive guards match the currently
3074 selected guard, but will not be pushed if any negative guards
3074 selected guard, but will not be pushed if any negative guards
3075 match the current guard. For example::
3075 match the current guard. For example::
3076
3076
3077 qguard foo.patch -- -stable (negative guard)
3077 qguard foo.patch -- -stable (negative guard)
3078 qguard bar.patch +stable (positive guard)
3078 qguard bar.patch +stable (positive guard)
3079 qselect stable
3079 qselect stable
3080
3080
3081 This activates the "stable" guard. mq will skip foo.patch (because
3081 This activates the "stable" guard. mq will skip foo.patch (because
3082 it has a negative match) but push bar.patch (because it has a
3082 it has a negative match) but push bar.patch (because it has a
3083 positive match).
3083 positive match).
3084
3084
3085 With no arguments, prints the currently active guards.
3085 With no arguments, prints the currently active guards.
3086 With one argument, sets the active guard.
3086 With one argument, sets the active guard.
3087
3087
3088 Use -n/--none to deactivate guards (no other arguments needed).
3088 Use -n/--none to deactivate guards (no other arguments needed).
3089 When no guards are active, patches with positive guards are
3089 When no guards are active, patches with positive guards are
3090 skipped and patches with negative guards are pushed.
3090 skipped and patches with negative guards are pushed.
3091
3091
3092 qselect can change the guards on applied patches. It does not pop
3092 qselect can change the guards on applied patches. It does not pop
3093 guarded patches by default. Use --pop to pop back to the last
3093 guarded patches by default. Use --pop to pop back to the last
3094 applied patch that is not guarded. Use --reapply (which implies
3094 applied patch that is not guarded. Use --reapply (which implies
3095 --pop) to push back to the current patch afterwards, but skip
3095 --pop) to push back to the current patch afterwards, but skip
3096 guarded patches.
3096 guarded patches.
3097
3097
3098 Use -s/--series to print a list of all guards in the series file
3098 Use -s/--series to print a list of all guards in the series file
3099 (no other arguments needed). Use -v for more information.
3099 (no other arguments needed). Use -v for more information.
3100
3100
3101 Returns 0 on success.'''
3101 Returns 0 on success.'''
3102
3102
3103 q = repo.mq
3103 q = repo.mq
3104 guards = q.active()
3104 guards = q.active()
3105 if args or opts.get('none'):
3105 if args or opts.get('none'):
3106 old_unapplied = q.unapplied(repo)
3106 old_unapplied = q.unapplied(repo)
3107 old_guarded = [i for i in xrange(len(q.applied)) if
3107 old_guarded = [i for i in xrange(len(q.applied)) if
3108 not q.pushable(i)[0]]
3108 not q.pushable(i)[0]]
3109 q.setactive(args)
3109 q.setactive(args)
3110 q.savedirty()
3110 q.savedirty()
3111 if not args:
3111 if not args:
3112 ui.status(_('guards deactivated\n'))
3112 ui.status(_('guards deactivated\n'))
3113 if not opts.get('pop') and not opts.get('reapply'):
3113 if not opts.get('pop') and not opts.get('reapply'):
3114 unapplied = q.unapplied(repo)
3114 unapplied = q.unapplied(repo)
3115 guarded = [i for i in xrange(len(q.applied))
3115 guarded = [i for i in xrange(len(q.applied))
3116 if not q.pushable(i)[0]]
3116 if not q.pushable(i)[0]]
3117 if len(unapplied) != len(old_unapplied):
3117 if len(unapplied) != len(old_unapplied):
3118 ui.status(_('number of unguarded, unapplied patches has '
3118 ui.status(_('number of unguarded, unapplied patches has '
3119 'changed from %d to %d\n') %
3119 'changed from %d to %d\n') %
3120 (len(old_unapplied), len(unapplied)))
3120 (len(old_unapplied), len(unapplied)))
3121 if len(guarded) != len(old_guarded):
3121 if len(guarded) != len(old_guarded):
3122 ui.status(_('number of guarded, applied patches has changed '
3122 ui.status(_('number of guarded, applied patches has changed '
3123 'from %d to %d\n') %
3123 'from %d to %d\n') %
3124 (len(old_guarded), len(guarded)))
3124 (len(old_guarded), len(guarded)))
3125 elif opts.get('series'):
3125 elif opts.get('series'):
3126 guards = {}
3126 guards = {}
3127 noguards = 0
3127 noguards = 0
3128 for gs in q.seriesguards:
3128 for gs in q.seriesguards:
3129 if not gs:
3129 if not gs:
3130 noguards += 1
3130 noguards += 1
3131 for g in gs:
3131 for g in gs:
3132 guards.setdefault(g, 0)
3132 guards.setdefault(g, 0)
3133 guards[g] += 1
3133 guards[g] += 1
3134 if ui.verbose:
3134 if ui.verbose:
3135 guards['NONE'] = noguards
3135 guards['NONE'] = noguards
3136 guards = guards.items()
3136 guards = guards.items()
3137 guards.sort(key=lambda x: x[0][1:])
3137 guards.sort(key=lambda x: x[0][1:])
3138 if guards:
3138 if guards:
3139 ui.note(_('guards in series file:\n'))
3139 ui.note(_('guards in series file:\n'))
3140 for guard, count in guards:
3140 for guard, count in guards:
3141 ui.note('%2d ' % count)
3141 ui.note('%2d ' % count)
3142 ui.write(guard, '\n')
3142 ui.write(guard, '\n')
3143 else:
3143 else:
3144 ui.note(_('no guards in series file\n'))
3144 ui.note(_('no guards in series file\n'))
3145 else:
3145 else:
3146 if guards:
3146 if guards:
3147 ui.note(_('active guards:\n'))
3147 ui.note(_('active guards:\n'))
3148 for g in guards:
3148 for g in guards:
3149 ui.write(g, '\n')
3149 ui.write(g, '\n')
3150 else:
3150 else:
3151 ui.write(_('no active guards\n'))
3151 ui.write(_('no active guards\n'))
3152 reapply = opts.get('reapply') and q.applied and q.appliedname(-1)
3152 reapply = opts.get('reapply') and q.applied and q.appliedname(-1)
3153 popped = False
3153 popped = False
3154 if opts.get('pop') or opts.get('reapply'):
3154 if opts.get('pop') or opts.get('reapply'):
3155 for i in xrange(len(q.applied)):
3155 for i in xrange(len(q.applied)):
3156 pushable, reason = q.pushable(i)
3156 pushable, reason = q.pushable(i)
3157 if not pushable:
3157 if not pushable:
3158 ui.status(_('popping guarded patches\n'))
3158 ui.status(_('popping guarded patches\n'))
3159 popped = True
3159 popped = True
3160 if i == 0:
3160 if i == 0:
3161 q.pop(repo, all=True)
3161 q.pop(repo, all=True)
3162 else:
3162 else:
3163 q.pop(repo, str(i - 1))
3163 q.pop(repo, str(i - 1))
3164 break
3164 break
3165 if popped:
3165 if popped:
3166 try:
3166 try:
3167 if reapply:
3167 if reapply:
3168 ui.status(_('reapplying unguarded patches\n'))
3168 ui.status(_('reapplying unguarded patches\n'))
3169 q.push(repo, reapply)
3169 q.push(repo, reapply)
3170 finally:
3170 finally:
3171 q.savedirty()
3171 q.savedirty()
3172
3172
3173 @command("qfinish",
3173 @command("qfinish",
3174 [('a', 'applied', None, _('finish all applied changesets'))],
3174 [('a', 'applied', None, _('finish all applied changesets'))],
3175 _('hg qfinish [-a] [REV]...'))
3175 _('hg qfinish [-a] [REV]...'))
3176 def finish(ui, repo, *revrange, **opts):
3176 def finish(ui, repo, *revrange, **opts):
3177 """move applied patches into repository history
3177 """move applied patches into repository history
3178
3178
3179 Finishes the specified revisions (corresponding to applied
3179 Finishes the specified revisions (corresponding to applied
3180 patches) by moving them out of mq control into regular repository
3180 patches) by moving them out of mq control into regular repository
3181 history.
3181 history.
3182
3182
3183 Accepts a revision range or the -a/--applied option. If --applied
3183 Accepts a revision range or the -a/--applied option. If --applied
3184 is specified, all applied mq revisions are removed from mq
3184 is specified, all applied mq revisions are removed from mq
3185 control. Otherwise, the given revisions must be at the base of the
3185 control. Otherwise, the given revisions must be at the base of the
3186 stack of applied patches.
3186 stack of applied patches.
3187
3187
3188 This can be especially useful if your changes have been applied to
3188 This can be especially useful if your changes have been applied to
3189 an upstream repository, or if you are about to push your changes
3189 an upstream repository, or if you are about to push your changes
3190 to upstream.
3190 to upstream.
3191
3191
3192 Returns 0 on success.
3192 Returns 0 on success.
3193 """
3193 """
3194 if not opts.get('applied') and not revrange:
3194 if not opts.get('applied') and not revrange:
3195 raise util.Abort(_('no revisions specified'))
3195 raise util.Abort(_('no revisions specified'))
3196 elif opts.get('applied'):
3196 elif opts.get('applied'):
3197 revrange = ('qbase::qtip',) + revrange
3197 revrange = ('qbase::qtip',) + revrange
3198
3198
3199 q = repo.mq
3199 q = repo.mq
3200 if not q.applied:
3200 if not q.applied:
3201 ui.status(_('no patches applied\n'))
3201 ui.status(_('no patches applied\n'))
3202 return 0
3202 return 0
3203
3203
3204 revs = scmutil.revrange(repo, revrange)
3204 revs = scmutil.revrange(repo, revrange)
3205 if repo['.'].rev() in revs and repo[None].files():
3205 if repo['.'].rev() in revs and repo[None].files():
3206 ui.warn(_('warning: uncommitted changes in the working directory\n'))
3206 ui.warn(_('warning: uncommitted changes in the working directory\n'))
3207 # queue.finish may changes phases but leave the responsibility to lock the
3207 # queue.finish may changes phases but leave the responsibility to lock the
3208 # repo to the caller to avoid deadlock with wlock. This command code is
3208 # repo to the caller to avoid deadlock with wlock. This command code is
3209 # responsibility for this locking.
3209 # responsibility for this locking.
3210 lock = repo.lock()
3210 lock = repo.lock()
3211 try:
3211 try:
3212 q.finish(repo, revs)
3212 q.finish(repo, revs)
3213 q.savedirty()
3213 q.savedirty()
3214 finally:
3214 finally:
3215 lock.release()
3215 lock.release()
3216 return 0
3216 return 0
3217
3217
3218 @command("qqueue",
3218 @command("qqueue",
3219 [('l', 'list', False, _('list all available queues')),
3219 [('l', 'list', False, _('list all available queues')),
3220 ('', 'active', False, _('print name of active queue')),
3220 ('', 'active', False, _('print name of active queue')),
3221 ('c', 'create', False, _('create new queue')),
3221 ('c', 'create', False, _('create new queue')),
3222 ('', 'rename', False, _('rename active queue')),
3222 ('', 'rename', False, _('rename active queue')),
3223 ('', 'delete', False, _('delete reference to queue')),
3223 ('', 'delete', False, _('delete reference to queue')),
3224 ('', 'purge', False, _('delete queue, and remove patch dir')),
3224 ('', 'purge', False, _('delete queue, and remove patch dir')),
3225 ],
3225 ],
3226 _('[OPTION] [QUEUE]'))
3226 _('[OPTION] [QUEUE]'))
3227 def qqueue(ui, repo, name=None, **opts):
3227 def qqueue(ui, repo, name=None, **opts):
3228 '''manage multiple patch queues
3228 '''manage multiple patch queues
3229
3229
3230 Supports switching between different patch queues, as well as creating
3230 Supports switching between different patch queues, as well as creating
3231 new patch queues and deleting existing ones.
3231 new patch queues and deleting existing ones.
3232
3232
3233 Omitting a queue name or specifying -l/--list will show you the registered
3233 Omitting a queue name or specifying -l/--list will show you the registered
3234 queues - by default the "normal" patches queue is registered. The currently
3234 queues - by default the "normal" patches queue is registered. The currently
3235 active queue will be marked with "(active)". Specifying --active will print
3235 active queue will be marked with "(active)". Specifying --active will print
3236 only the name of the active queue.
3236 only the name of the active queue.
3237
3237
3238 To create a new queue, use -c/--create. The queue is automatically made
3238 To create a new queue, use -c/--create. The queue is automatically made
3239 active, except in the case where there are applied patches from the
3239 active, except in the case where there are applied patches from the
3240 currently active queue in the repository. Then the queue will only be
3240 currently active queue in the repository. Then the queue will only be
3241 created and switching will fail.
3241 created and switching will fail.
3242
3242
3243 To delete an existing queue, use --delete. You cannot delete the currently
3243 To delete an existing queue, use --delete. You cannot delete the currently
3244 active queue.
3244 active queue.
3245
3245
3246 Returns 0 on success.
3246 Returns 0 on success.
3247 '''
3247 '''
3248 q = repo.mq
3248 q = repo.mq
3249 _defaultqueue = 'patches'
3249 _defaultqueue = 'patches'
3250 _allqueues = 'patches.queues'
3250 _allqueues = 'patches.queues'
3251 _activequeue = 'patches.queue'
3251 _activequeue = 'patches.queue'
3252
3252
3253 def _getcurrent():
3253 def _getcurrent():
3254 cur = os.path.basename(q.path)
3254 cur = os.path.basename(q.path)
3255 if cur.startswith('patches-'):
3255 if cur.startswith('patches-'):
3256 cur = cur[8:]
3256 cur = cur[8:]
3257 return cur
3257 return cur
3258
3258
3259 def _noqueues():
3259 def _noqueues():
3260 try:
3260 try:
3261 fh = repo.opener(_allqueues, 'r')
3261 fh = repo.opener(_allqueues, 'r')
3262 fh.close()
3262 fh.close()
3263 except IOError:
3263 except IOError:
3264 return True
3264 return True
3265
3265
3266 return False
3266 return False
3267
3267
3268 def _getqueues():
3268 def _getqueues():
3269 current = _getcurrent()
3269 current = _getcurrent()
3270
3270
3271 try:
3271 try:
3272 fh = repo.opener(_allqueues, 'r')
3272 fh = repo.opener(_allqueues, 'r')
3273 queues = [queue.strip() for queue in fh if queue.strip()]
3273 queues = [queue.strip() for queue in fh if queue.strip()]
3274 fh.close()
3274 fh.close()
3275 if current not in queues:
3275 if current not in queues:
3276 queues.append(current)
3276 queues.append(current)
3277 except IOError:
3277 except IOError:
3278 queues = [_defaultqueue]
3278 queues = [_defaultqueue]
3279
3279
3280 return sorted(queues)
3280 return sorted(queues)
3281
3281
3282 def _setactive(name):
3282 def _setactive(name):
3283 if q.applied:
3283 if q.applied:
3284 raise util.Abort(_('new queue created, but cannot make active '
3284 raise util.Abort(_('new queue created, but cannot make active '
3285 'as patches are applied'))
3285 'as patches are applied'))
3286 _setactivenocheck(name)
3286 _setactivenocheck(name)
3287
3287
3288 def _setactivenocheck(name):
3288 def _setactivenocheck(name):
3289 fh = repo.opener(_activequeue, 'w')
3289 fh = repo.opener(_activequeue, 'w')
3290 if name != 'patches':
3290 if name != 'patches':
3291 fh.write(name)
3291 fh.write(name)
3292 fh.close()
3292 fh.close()
3293
3293
3294 def _addqueue(name):
3294 def _addqueue(name):
3295 fh = repo.opener(_allqueues, 'a')
3295 fh = repo.opener(_allqueues, 'a')
3296 fh.write('%s\n' % (name,))
3296 fh.write('%s\n' % (name,))
3297 fh.close()
3297 fh.close()
3298
3298
3299 def _queuedir(name):
3299 def _queuedir(name):
3300 if name == 'patches':
3300 if name == 'patches':
3301 return repo.join('patches')
3301 return repo.join('patches')
3302 else:
3302 else:
3303 return repo.join('patches-' + name)
3303 return repo.join('patches-' + name)
3304
3304
3305 def _validname(name):
3305 def _validname(name):
3306 for n in name:
3306 for n in name:
3307 if n in ':\\/.':
3307 if n in ':\\/.':
3308 return False
3308 return False
3309 return True
3309 return True
3310
3310
3311 def _delete(name):
3311 def _delete(name):
3312 if name not in existing:
3312 if name not in existing:
3313 raise util.Abort(_('cannot delete queue that does not exist'))
3313 raise util.Abort(_('cannot delete queue that does not exist'))
3314
3314
3315 current = _getcurrent()
3315 current = _getcurrent()
3316
3316
3317 if name == current:
3317 if name == current:
3318 raise util.Abort(_('cannot delete currently active queue'))
3318 raise util.Abort(_('cannot delete currently active queue'))
3319
3319
3320 fh = repo.opener('patches.queues.new', 'w')
3320 fh = repo.opener('patches.queues.new', 'w')
3321 for queue in existing:
3321 for queue in existing:
3322 if queue == name:
3322 if queue == name:
3323 continue
3323 continue
3324 fh.write('%s\n' % (queue,))
3324 fh.write('%s\n' % (queue,))
3325 fh.close()
3325 fh.close()
3326 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3326 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3327
3327
3328 if not name or opts.get('list') or opts.get('active'):
3328 if not name or opts.get('list') or opts.get('active'):
3329 current = _getcurrent()
3329 current = _getcurrent()
3330 if opts.get('active'):
3330 if opts.get('active'):
3331 ui.write('%s\n' % (current,))
3331 ui.write('%s\n' % (current,))
3332 return
3332 return
3333 for queue in _getqueues():
3333 for queue in _getqueues():
3334 ui.write('%s' % (queue,))
3334 ui.write('%s' % (queue,))
3335 if queue == current and not ui.quiet:
3335 if queue == current and not ui.quiet:
3336 ui.write(_(' (active)\n'))
3336 ui.write(_(' (active)\n'))
3337 else:
3337 else:
3338 ui.write('\n')
3338 ui.write('\n')
3339 return
3339 return
3340
3340
3341 if not _validname(name):
3341 if not _validname(name):
3342 raise util.Abort(
3342 raise util.Abort(
3343 _('invalid queue name, may not contain the characters ":\\/."'))
3343 _('invalid queue name, may not contain the characters ":\\/."'))
3344
3344
3345 existing = _getqueues()
3345 existing = _getqueues()
3346
3346
3347 if opts.get('create'):
3347 if opts.get('create'):
3348 if name in existing:
3348 if name in existing:
3349 raise util.Abort(_('queue "%s" already exists') % name)
3349 raise util.Abort(_('queue "%s" already exists') % name)
3350 if _noqueues():
3350 if _noqueues():
3351 _addqueue(_defaultqueue)
3351 _addqueue(_defaultqueue)
3352 _addqueue(name)
3352 _addqueue(name)
3353 _setactive(name)
3353 _setactive(name)
3354 elif opts.get('rename'):
3354 elif opts.get('rename'):
3355 current = _getcurrent()
3355 current = _getcurrent()
3356 if name == current:
3356 if name == current:
3357 raise util.Abort(_('can\'t rename "%s" to its current name') % name)
3357 raise util.Abort(_('can\'t rename "%s" to its current name') % name)
3358 if name in existing:
3358 if name in existing:
3359 raise util.Abort(_('queue "%s" already exists') % name)
3359 raise util.Abort(_('queue "%s" already exists') % name)
3360
3360
3361 olddir = _queuedir(current)
3361 olddir = _queuedir(current)
3362 newdir = _queuedir(name)
3362 newdir = _queuedir(name)
3363
3363
3364 if os.path.exists(newdir):
3364 if os.path.exists(newdir):
3365 raise util.Abort(_('non-queue directory "%s" already exists') %
3365 raise util.Abort(_('non-queue directory "%s" already exists') %
3366 newdir)
3366 newdir)
3367
3367
3368 fh = repo.opener('patches.queues.new', 'w')
3368 fh = repo.opener('patches.queues.new', 'w')
3369 for queue in existing:
3369 for queue in existing:
3370 if queue == current:
3370 if queue == current:
3371 fh.write('%s\n' % (name,))
3371 fh.write('%s\n' % (name,))
3372 if os.path.exists(olddir):
3372 if os.path.exists(olddir):
3373 util.rename(olddir, newdir)
3373 util.rename(olddir, newdir)
3374 else:
3374 else:
3375 fh.write('%s\n' % (queue,))
3375 fh.write('%s\n' % (queue,))
3376 fh.close()
3376 fh.close()
3377 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3377 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3378 _setactivenocheck(name)
3378 _setactivenocheck(name)
3379 elif opts.get('delete'):
3379 elif opts.get('delete'):
3380 _delete(name)
3380 _delete(name)
3381 elif opts.get('purge'):
3381 elif opts.get('purge'):
3382 if name in existing:
3382 if name in existing:
3383 _delete(name)
3383 _delete(name)
3384 qdir = _queuedir(name)
3384 qdir = _queuedir(name)
3385 if os.path.exists(qdir):
3385 if os.path.exists(qdir):
3386 shutil.rmtree(qdir)
3386 shutil.rmtree(qdir)
3387 else:
3387 else:
3388 if name not in existing:
3388 if name not in existing:
3389 raise util.Abort(_('use --create to create a new queue'))
3389 raise util.Abort(_('use --create to create a new queue'))
3390 _setactive(name)
3390 _setactive(name)
3391
3391
3392 def mqphasedefaults(repo, roots):
3392 def mqphasedefaults(repo, roots):
3393 """callback used to set mq changeset as secret when no phase data exists"""
3393 """callback used to set mq changeset as secret when no phase data exists"""
3394 if repo.mq.applied:
3394 if repo.mq.applied:
3395 if repo.ui.configbool('mq', 'secret', False):
3395 if repo.ui.configbool('mq', 'secret', False):
3396 mqphase = phases.secret
3396 mqphase = phases.secret
3397 else:
3397 else:
3398 mqphase = phases.draft
3398 mqphase = phases.draft
3399 qbase = repo[repo.mq.applied[0].node]
3399 qbase = repo[repo.mq.applied[0].node]
3400 roots[mqphase].add(qbase.node())
3400 roots[mqphase].add(qbase.node())
3401 return roots
3401 return roots
3402
3402
3403 def reposetup(ui, repo):
3403 def reposetup(ui, repo):
3404 class mqrepo(repo.__class__):
3404 class mqrepo(repo.__class__):
3405 @util.propertycache
3405 @util.propertycache
3406 def mq(self):
3406 def mq(self):
3407 return queue(self.ui, self.path)
3407 return queue(self.ui, self.path)
3408
3408
3409 def abortifwdirpatched(self, errmsg, force=False):
3409 def abortifwdirpatched(self, errmsg, force=False):
3410 if self.mq.applied and not force:
3410 if self.mq.applied and not force:
3411 parents = self.dirstate.parents()
3411 parents = self.dirstate.parents()
3412 patches = [s.node for s in self.mq.applied]
3412 patches = [s.node for s in self.mq.applied]
3413 if parents[0] in patches or parents[1] in patches:
3413 if parents[0] in patches or parents[1] in patches:
3414 raise util.Abort(errmsg)
3414 raise util.Abort(errmsg)
3415
3415
3416 def commit(self, text="", user=None, date=None, match=None,
3416 def commit(self, text="", user=None, date=None, match=None,
3417 force=False, editor=False, extra={}):
3417 force=False, editor=False, extra={}):
3418 self.abortifwdirpatched(
3418 self.abortifwdirpatched(
3419 _('cannot commit over an applied mq patch'),
3419 _('cannot commit over an applied mq patch'),
3420 force)
3420 force)
3421
3421
3422 return super(mqrepo, self).commit(text, user, date, match, force,
3422 return super(mqrepo, self).commit(text, user, date, match, force,
3423 editor, extra)
3423 editor, extra)
3424
3424
3425 def checkpush(self, force, revs):
3425 def checkpush(self, force, revs):
3426 if self.mq.applied and not force:
3426 if self.mq.applied and not force:
3427 outapplied = [e.node for e in self.mq.applied]
3427 outapplied = [e.node for e in self.mq.applied]
3428 if revs:
3428 if revs:
3429 # Assume applied patches have no non-patch descendants and
3429 # Assume applied patches have no non-patch descendants and
3430 # are not on remote already. Filtering any changeset not
3430 # are not on remote already. Filtering any changeset not
3431 # pushed.
3431 # pushed.
3432 heads = set(revs)
3432 heads = set(revs)
3433 for node in reversed(outapplied):
3433 for node in reversed(outapplied):
3434 if node in heads:
3434 if node in heads:
3435 break
3435 break
3436 else:
3436 else:
3437 outapplied.pop()
3437 outapplied.pop()
3438 # looking for pushed and shared changeset
3438 # looking for pushed and shared changeset
3439 for node in outapplied:
3439 for node in outapplied:
3440 if self[node].phase() < phases.secret:
3440 if self[node].phase() < phases.secret:
3441 raise util.Abort(_('source has mq patches applied'))
3441 raise util.Abort(_('source has mq patches applied'))
3442 # no non-secret patches pushed
3442 # no non-secret patches pushed
3443 super(mqrepo, self).checkpush(force, revs)
3443 super(mqrepo, self).checkpush(force, revs)
3444
3444
3445 def _findtags(self):
3445 def _findtags(self):
3446 '''augment tags from base class with patch tags'''
3446 '''augment tags from base class with patch tags'''
3447 result = super(mqrepo, self)._findtags()
3447 result = super(mqrepo, self)._findtags()
3448
3448
3449 q = self.mq
3449 q = self.mq
3450 if not q.applied:
3450 if not q.applied:
3451 return result
3451 return result
3452
3452
3453 mqtags = [(patch.node, patch.name) for patch in q.applied]
3453 mqtags = [(patch.node, patch.name) for patch in q.applied]
3454
3454
3455 try:
3455 try:
3456 # for now ignore filtering business
3456 # for now ignore filtering business
3457 self.unfiltered().changelog.rev(mqtags[-1][0])
3457 self.unfiltered().changelog.rev(mqtags[-1][0])
3458 except error.LookupError:
3458 except error.LookupError:
3459 self.ui.warn(_('mq status file refers to unknown node %s\n')
3459 self.ui.warn(_('mq status file refers to unknown node %s\n')
3460 % short(mqtags[-1][0]))
3460 % short(mqtags[-1][0]))
3461 return result
3461 return result
3462
3462
3463 mqtags.append((mqtags[-1][0], 'qtip'))
3463 mqtags.append((mqtags[-1][0], 'qtip'))
3464 mqtags.append((mqtags[0][0], 'qbase'))
3464 mqtags.append((mqtags[0][0], 'qbase'))
3465 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
3465 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
3466 tags = result[0]
3466 tags = result[0]
3467 for patch in mqtags:
3467 for patch in mqtags:
3468 if patch[1] in tags:
3468 if patch[1] in tags:
3469 self.ui.warn(_('tag %s overrides mq patch of the same '
3469 self.ui.warn(_('tag %s overrides mq patch of the same '
3470 'name\n') % patch[1])
3470 'name\n') % patch[1])
3471 else:
3471 else:
3472 tags[patch[1]] = patch[0]
3472 tags[patch[1]] = patch[0]
3473
3473
3474 return result
3474 return result
3475
3475
3476 def _branchtags(self, partial, lrev):
3476 def _branchtags(self, partial, lrev):
3477 q = self.mq
3477 q = self.mq
3478 cl = self.changelog
3478 cl = self.changelog
3479 qbase = None
3479 qbase = None
3480 if not q.applied:
3480 if not q.applied:
3481 if getattr(self, '_committingpatch', False):
3481 if getattr(self, '_committingpatch', False):
3482 # Committing a new patch, must be tip
3482 # Committing a new patch, must be tip
3483 qbase = len(cl) - 1
3483 qbase = len(cl) - 1
3484 else:
3484 else:
3485 qbasenode = q.applied[0].node
3485 qbasenode = q.applied[0].node
3486 try:
3486 try:
3487 qbase = self.unfiltered().changelog.rev(qbasenode)
3487 qbase = self.unfiltered().changelog.rev(qbasenode)
3488 except error.LookupError:
3488 except error.LookupError:
3489 self.ui.warn(_('mq status file refers to unknown node %s\n')
3489 self.ui.warn(_('mq status file refers to unknown node %s\n')
3490 % short(qbasenode))
3490 % short(qbasenode))
3491 if qbase is None:
3491 if qbase is None:
3492 return super(mqrepo, self)._branchtags(partial, lrev)
3492 return super(mqrepo, self)._branchtags(partial, lrev)
3493
3493
3494 start = lrev + 1
3494 start = lrev + 1
3495 if start < qbase:
3495 if start < qbase:
3496 # update the cache (excluding the patches) and save it
3496 # update the cache (excluding the patches) and save it
3497 ctxgen = (self[r] for r in xrange(lrev + 1, qbase))
3497 ctxgen = (self[r] for r in xrange(lrev + 1, qbase))
3498 self._updatebranchcache(partial, ctxgen)
3498 self._updatebranchcache(partial, ctxgen)
3499 self._writebranchcache(partial, cl.node(qbase - 1), qbase - 1)
3499 self._writebranchcache(partial, cl.node(qbase - 1), qbase - 1)
3500 start = qbase
3500 start = qbase
3501 # if start = qbase, the cache is as updated as it should be.
3501 # if start = qbase, the cache is as updated as it should be.
3502 # if start > qbase, the cache includes (part of) the patches.
3502 # if start > qbase, the cache includes (part of) the patches.
3503 # we might as well use it, but we won't save it.
3503 # we might as well use it, but we won't save it.
3504
3504
3505 # update the cache up to the tip
3505 # update the cache up to the tip
3506 ctxgen = (self[r] for r in xrange(start, len(cl)))
3506 ctxgen = (self[r] for r in xrange(start, len(cl)))
3507 self._updatebranchcache(partial, ctxgen)
3507 self._updatebranchcache(partial, ctxgen)
3508
3508
3509 return partial
3509 return partial
3510
3510
3511 if repo.local():
3511 if repo.local():
3512 repo.__class__ = mqrepo
3512 repo.__class__ = mqrepo
3513
3513
3514 repo._phasedefaults.append(mqphasedefaults)
3514 repo._phasedefaults.append(mqphasedefaults)
3515
3515
3516 def mqimport(orig, ui, repo, *args, **kwargs):
3516 def mqimport(orig, ui, repo, *args, **kwargs):
3517 if (util.safehasattr(repo, 'abortifwdirpatched')
3517 if (util.safehasattr(repo, 'abortifwdirpatched')
3518 and not kwargs.get('no_commit', False)):
3518 and not kwargs.get('no_commit', False)):
3519 repo.abortifwdirpatched(_('cannot import over an applied patch'),
3519 repo.abortifwdirpatched(_('cannot import over an applied patch'),
3520 kwargs.get('force'))
3520 kwargs.get('force'))
3521 return orig(ui, repo, *args, **kwargs)
3521 return orig(ui, repo, *args, **kwargs)
3522
3522
3523 def mqinit(orig, ui, *args, **kwargs):
3523 def mqinit(orig, ui, *args, **kwargs):
3524 mq = kwargs.pop('mq', None)
3524 mq = kwargs.pop('mq', None)
3525
3525
3526 if not mq:
3526 if not mq:
3527 return orig(ui, *args, **kwargs)
3527 return orig(ui, *args, **kwargs)
3528
3528
3529 if args:
3529 if args:
3530 repopath = args[0]
3530 repopath = args[0]
3531 if not hg.islocal(repopath):
3531 if not hg.islocal(repopath):
3532 raise util.Abort(_('only a local queue repository '
3532 raise util.Abort(_('only a local queue repository '
3533 'may be initialized'))
3533 'may be initialized'))
3534 else:
3534 else:
3535 repopath = cmdutil.findrepo(os.getcwd())
3535 repopath = cmdutil.findrepo(os.getcwd())
3536 if not repopath:
3536 if not repopath:
3537 raise util.Abort(_('there is no Mercurial repository here '
3537 raise util.Abort(_('there is no Mercurial repository here '
3538 '(.hg not found)'))
3538 '(.hg not found)'))
3539 repo = hg.repository(ui, repopath)
3539 repo = hg.repository(ui, repopath)
3540 return qinit(ui, repo, True)
3540 return qinit(ui, repo, True)
3541
3541
3542 def mqcommand(orig, ui, repo, *args, **kwargs):
3542 def mqcommand(orig, ui, repo, *args, **kwargs):
3543 """Add --mq option to operate on patch repository instead of main"""
3543 """Add --mq option to operate on patch repository instead of main"""
3544
3544
3545 # some commands do not like getting unknown options
3545 # some commands do not like getting unknown options
3546 mq = kwargs.pop('mq', None)
3546 mq = kwargs.pop('mq', None)
3547
3547
3548 if not mq:
3548 if not mq:
3549 return orig(ui, repo, *args, **kwargs)
3549 return orig(ui, repo, *args, **kwargs)
3550
3550
3551 q = repo.mq
3551 q = repo.mq
3552 r = q.qrepo()
3552 r = q.qrepo()
3553 if not r:
3553 if not r:
3554 raise util.Abort(_('no queue repository'))
3554 raise util.Abort(_('no queue repository'))
3555 return orig(r.ui, r, *args, **kwargs)
3555 return orig(r.ui, r, *args, **kwargs)
3556
3556
3557 def summary(orig, ui, repo, *args, **kwargs):
3557 def summary(orig, ui, repo, *args, **kwargs):
3558 r = orig(ui, repo, *args, **kwargs)
3558 r = orig(ui, repo, *args, **kwargs)
3559 q = repo.mq
3559 q = repo.mq
3560 m = []
3560 m = []
3561 a, u = len(q.applied), len(q.unapplied(repo))
3561 a, u = len(q.applied), len(q.unapplied(repo))
3562 if a:
3562 if a:
3563 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3563 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3564 if u:
3564 if u:
3565 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3565 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3566 if m:
3566 if m:
3567 # i18n: column positioning for "hg summary"
3567 # i18n: column positioning for "hg summary"
3568 ui.write(_("mq: %s\n") % ', '.join(m))
3568 ui.write(_("mq: %s\n") % ', '.join(m))
3569 else:
3569 else:
3570 # i18n: column positioning for "hg summary"
3570 # i18n: column positioning for "hg summary"
3571 ui.note(_("mq: (empty queue)\n"))
3571 ui.note(_("mq: (empty queue)\n"))
3572 return r
3572 return r
3573
3573
3574 def revsetmq(repo, subset, x):
3574 def revsetmq(repo, subset, x):
3575 """``mq()``
3575 """``mq()``
3576 Changesets managed by MQ.
3576 Changesets managed by MQ.
3577 """
3577 """
3578 revset.getargs(x, 0, 0, _("mq takes no arguments"))
3578 revset.getargs(x, 0, 0, _("mq takes no arguments"))
3579 applied = set([repo[r.node].rev() for r in repo.mq.applied])
3579 applied = set([repo[r.node].rev() for r in repo.mq.applied])
3580 return [r for r in subset if r in applied]
3580 return [r for r in subset if r in applied]
3581
3581
3582 # tell hggettext to extract docstrings from these functions:
3582 # tell hggettext to extract docstrings from these functions:
3583 i18nfunctions = [revsetmq]
3583 i18nfunctions = [revsetmq]
3584
3584
3585 def extsetup(ui):
3585 def extsetup(ui):
3586 # Ensure mq wrappers are called first, regardless of extension load order by
3586 # Ensure mq wrappers are called first, regardless of extension load order by
3587 # NOT wrapping in uisetup() and instead deferring to init stage two here.
3587 # NOT wrapping in uisetup() and instead deferring to init stage two here.
3588 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3588 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3589
3589
3590 extensions.wrapcommand(commands.table, 'import', mqimport)
3590 extensions.wrapcommand(commands.table, 'import', mqimport)
3591 extensions.wrapcommand(commands.table, 'summary', summary)
3591 extensions.wrapcommand(commands.table, 'summary', summary)
3592
3592
3593 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3593 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3594 entry[1].extend(mqopt)
3594 entry[1].extend(mqopt)
3595
3595
3596 nowrap = set(commands.norepo.split(" "))
3596 nowrap = set(commands.norepo.split(" "))
3597
3597
3598 def dotable(cmdtable):
3598 def dotable(cmdtable):
3599 for cmd in cmdtable.keys():
3599 for cmd in cmdtable.keys():
3600 cmd = cmdutil.parsealiases(cmd)[0]
3600 cmd = cmdutil.parsealiases(cmd)[0]
3601 if cmd in nowrap:
3601 if cmd in nowrap:
3602 continue
3602 continue
3603 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3603 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3604 entry[1].extend(mqopt)
3604 entry[1].extend(mqopt)
3605
3605
3606 dotable(commands.table)
3606 dotable(commands.table)
3607
3607
3608 for extname, extmodule in extensions.extensions():
3608 for extname, extmodule in extensions.extensions():
3609 if extmodule.__file__ != __file__:
3609 if extmodule.__file__ != __file__:
3610 dotable(getattr(extmodule, 'cmdtable', {}))
3610 dotable(getattr(extmodule, 'cmdtable', {}))
3611
3611
3612 revset.symbols['mq'] = revsetmq
3612 revset.symbols['mq'] = revsetmq
3613
3613
3614 colortable = {'qguard.negative': 'red',
3614 colortable = {'qguard.negative': 'red',
3615 'qguard.positive': 'yellow',
3615 'qguard.positive': 'yellow',
3616 'qguard.unguarded': 'green',
3616 'qguard.unguarded': 'green',
3617 'qseries.applied': 'blue bold underline',
3617 'qseries.applied': 'blue bold underline',
3618 'qseries.guarded': 'black bold',
3618 'qseries.guarded': 'black bold',
3619 'qseries.missing': 'red bold',
3619 'qseries.missing': 'red bold',
3620 'qseries.unapplied': 'black bold'}
3620 'qseries.unapplied': 'black bold'}
3621
3621
3622 commands.inferrepo += " qnew qrefresh qdiff qcommit"
3622 commands.inferrepo += " qnew qrefresh qdiff qcommit"
@@ -1,172 +1,172 b''
1 # win32text.py - LF <-> CRLF/CR translation utilities for Windows/Mac users
1 # win32text.py - LF <-> CRLF/CR translation utilities for Windows/Mac users
2 #
2 #
3 # Copyright 2005, 2007-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005, 2007-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''perform automatic newline conversion
8 '''perform automatic newline conversion
9
9
10 Deprecation: The win32text extension requires each user to configure
10 Deprecation: The win32text extension requires each user to configure
11 the extension again and again for each clone since the configuration
11 the extension again and again for each clone since the configuration
12 is not copied when cloning.
12 is not copied when cloning.
13
13
14 We have therefore made the ``eol`` as an alternative. The ``eol``
14 We have therefore made the ``eol`` as an alternative. The ``eol``
15 uses a version controlled file for its configuration and each clone
15 uses a version controlled file for its configuration and each clone
16 will therefore use the right settings from the start.
16 will therefore use the right settings from the start.
17
17
18 To perform automatic newline conversion, use::
18 To perform automatic newline conversion, use::
19
19
20 [extensions]
20 [extensions]
21 win32text =
21 win32text =
22 [encode]
22 [encode]
23 ** = cleverencode:
23 ** = cleverencode:
24 # or ** = macencode:
24 # or ** = macencode:
25
25
26 [decode]
26 [decode]
27 ** = cleverdecode:
27 ** = cleverdecode:
28 # or ** = macdecode:
28 # or ** = macdecode:
29
29
30 If not doing conversion, to make sure you do not commit CRLF/CR by accident::
30 If not doing conversion, to make sure you do not commit CRLF/CR by accident::
31
31
32 [hooks]
32 [hooks]
33 pretxncommit.crlf = python:hgext.win32text.forbidcrlf
33 pretxncommit.crlf = python:hgext.win32text.forbidcrlf
34 # or pretxncommit.cr = python:hgext.win32text.forbidcr
34 # or pretxncommit.cr = python:hgext.win32text.forbidcr
35
35
36 To do the same check on a server to prevent CRLF/CR from being
36 To do the same check on a server to prevent CRLF/CR from being
37 pushed or pulled::
37 pushed or pulled::
38
38
39 [hooks]
39 [hooks]
40 pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf
40 pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf
41 # or pretxnchangegroup.cr = python:hgext.win32text.forbidcr
41 # or pretxnchangegroup.cr = python:hgext.win32text.forbidcr
42 '''
42 '''
43
43
44 from mercurial.i18n import _
44 from mercurial.i18n import _
45 from mercurial.node import short
45 from mercurial.node import short
46 from mercurial import util
46 from mercurial import util
47 import re
47 import re
48
48
49 testedwith = 'internal'
49 testedwith = 'internal'
50
50
51 # regexp for single LF without CR preceding.
51 # regexp for single LF without CR preceding.
52 re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE)
52 re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE)
53
53
54 newlinestr = {'\r\n': 'CRLF', '\r': 'CR'}
54 newlinestr = {'\r\n': 'CRLF', '\r': 'CR'}
55 filterstr = {'\r\n': 'clever', '\r': 'mac'}
55 filterstr = {'\r\n': 'clever', '\r': 'mac'}
56
56
57 def checknewline(s, newline, ui=None, repo=None, filename=None):
57 def checknewline(s, newline, ui=None, repo=None, filename=None):
58 # warn if already has 'newline' in repository.
58 # warn if already has 'newline' in repository.
59 # it might cause unexpected eol conversion.
59 # it might cause unexpected eol conversion.
60 # see issue 302:
60 # see issue 302:
61 # http://mercurial.selenic.com/bts/issue302
61 # http://mercurial.selenic.com/bts/issue302
62 if newline in s and ui and filename and repo:
62 if newline in s and ui and filename and repo:
63 ui.warn(_('WARNING: %s already has %s line endings\n'
63 ui.warn(_('WARNING: %s already has %s line endings\n'
64 'and does not need EOL conversion by the win32text plugin.\n'
64 'and does not need EOL conversion by the win32text plugin.\n'
65 'Before your next commit, please reconsider your '
65 'Before your next commit, please reconsider your '
66 'encode/decode settings in \nMercurial.ini or %s.\n') %
66 'encode/decode settings in \nMercurial.ini or %s.\n') %
67 (filename, newlinestr[newline], repo.join('hgrc')))
67 (filename, newlinestr[newline], repo.join('hgrc')))
68
68
69 def dumbdecode(s, cmd, **kwargs):
69 def dumbdecode(s, cmd, **kwargs):
70 checknewline(s, '\r\n', **kwargs)
70 checknewline(s, '\r\n', **kwargs)
71 # replace single LF to CRLF
71 # replace single LF to CRLF
72 return re_single_lf.sub('\\1\r\n', s)
72 return re_single_lf.sub('\\1\r\n', s)
73
73
74 def dumbencode(s, cmd):
74 def dumbencode(s, cmd):
75 return s.replace('\r\n', '\n')
75 return s.replace('\r\n', '\n')
76
76
77 def macdumbdecode(s, cmd, **kwargs):
77 def macdumbdecode(s, cmd, **kwargs):
78 checknewline(s, '\r', **kwargs)
78 checknewline(s, '\r', **kwargs)
79 return s.replace('\n', '\r')
79 return s.replace('\n', '\r')
80
80
81 def macdumbencode(s, cmd):
81 def macdumbencode(s, cmd):
82 return s.replace('\r', '\n')
82 return s.replace('\r', '\n')
83
83
84 def cleverdecode(s, cmd, **kwargs):
84 def cleverdecode(s, cmd, **kwargs):
85 if not util.binary(s):
85 if not util.binary(s):
86 return dumbdecode(s, cmd, **kwargs)
86 return dumbdecode(s, cmd, **kwargs)
87 return s
87 return s
88
88
89 def cleverencode(s, cmd):
89 def cleverencode(s, cmd):
90 if not util.binary(s):
90 if not util.binary(s):
91 return dumbencode(s, cmd)
91 return dumbencode(s, cmd)
92 return s
92 return s
93
93
94 def macdecode(s, cmd, **kwargs):
94 def macdecode(s, cmd, **kwargs):
95 if not util.binary(s):
95 if not util.binary(s):
96 return macdumbdecode(s, cmd, **kwargs)
96 return macdumbdecode(s, cmd, **kwargs)
97 return s
97 return s
98
98
99 def macencode(s, cmd):
99 def macencode(s, cmd):
100 if not util.binary(s):
100 if not util.binary(s):
101 return macdumbencode(s, cmd)
101 return macdumbencode(s, cmd)
102 return s
102 return s
103
103
104 _filters = {
104 _filters = {
105 'dumbdecode:': dumbdecode,
105 'dumbdecode:': dumbdecode,
106 'dumbencode:': dumbencode,
106 'dumbencode:': dumbencode,
107 'cleverdecode:': cleverdecode,
107 'cleverdecode:': cleverdecode,
108 'cleverencode:': cleverencode,
108 'cleverencode:': cleverencode,
109 'macdumbdecode:': macdumbdecode,
109 'macdumbdecode:': macdumbdecode,
110 'macdumbencode:': macdumbencode,
110 'macdumbencode:': macdumbencode,
111 'macdecode:': macdecode,
111 'macdecode:': macdecode,
112 'macencode:': macencode,
112 'macencode:': macencode,
113 }
113 }
114
114
115 def forbidnewline(ui, repo, hooktype, node, newline, **kwargs):
115 def forbidnewline(ui, repo, hooktype, node, newline, **kwargs):
116 halt = False
116 halt = False
117 seen = set()
117 seen = set()
118 # we try to walk changesets in reverse order from newest to
118 # we try to walk changesets in reverse order from newest to
119 # oldest, so that if we see a file multiple times, we take the
119 # oldest, so that if we see a file multiple times, we take the
120 # newest version as canonical. this prevents us from blocking a
120 # newest version as canonical. this prevents us from blocking a
121 # changegroup that contains an unacceptable commit followed later
121 # changegroup that contains an unacceptable commit followed later
122 # by a commit that fixes the problem.
122 # by a commit that fixes the problem.
123 tip = repo['tip']
123 tip = repo['tip']
124 for rev in xrange(len(repo)-1, repo[node].rev()-1, -1):
124 for rev in xrange(len(repo) - 1, repo[node].rev() - 1, -1):
125 c = repo[rev]
125 c = repo[rev]
126 for f in c.files():
126 for f in c.files():
127 if f in seen or f not in tip or f not in c:
127 if f in seen or f not in tip or f not in c:
128 continue
128 continue
129 seen.add(f)
129 seen.add(f)
130 data = c[f].data()
130 data = c[f].data()
131 if not util.binary(data) and newline in data:
131 if not util.binary(data) and newline in data:
132 if not halt:
132 if not halt:
133 ui.warn(_('attempt to commit or push text file(s) '
133 ui.warn(_('attempt to commit or push text file(s) '
134 'using %s line endings\n') %
134 'using %s line endings\n') %
135 newlinestr[newline])
135 newlinestr[newline])
136 ui.warn(_('in %s: %s\n') % (short(c.node()), f))
136 ui.warn(_('in %s: %s\n') % (short(c.node()), f))
137 halt = True
137 halt = True
138 if halt and hooktype == 'pretxnchangegroup':
138 if halt and hooktype == 'pretxnchangegroup':
139 crlf = newlinestr[newline].lower()
139 crlf = newlinestr[newline].lower()
140 filter = filterstr[newline]
140 filter = filterstr[newline]
141 ui.warn(_('\nTo prevent this mistake in your local repository,\n'
141 ui.warn(_('\nTo prevent this mistake in your local repository,\n'
142 'add to Mercurial.ini or .hg/hgrc:\n'
142 'add to Mercurial.ini or .hg/hgrc:\n'
143 '\n'
143 '\n'
144 '[hooks]\n'
144 '[hooks]\n'
145 'pretxncommit.%s = python:hgext.win32text.forbid%s\n'
145 'pretxncommit.%s = python:hgext.win32text.forbid%s\n'
146 '\n'
146 '\n'
147 'and also consider adding:\n'
147 'and also consider adding:\n'
148 '\n'
148 '\n'
149 '[extensions]\n'
149 '[extensions]\n'
150 'win32text =\n'
150 'win32text =\n'
151 '[encode]\n'
151 '[encode]\n'
152 '** = %sencode:\n'
152 '** = %sencode:\n'
153 '[decode]\n'
153 '[decode]\n'
154 '** = %sdecode:\n') % (crlf, crlf, filter, filter))
154 '** = %sdecode:\n') % (crlf, crlf, filter, filter))
155 return halt
155 return halt
156
156
157 def forbidcrlf(ui, repo, hooktype, node, **kwargs):
157 def forbidcrlf(ui, repo, hooktype, node, **kwargs):
158 return forbidnewline(ui, repo, hooktype, node, '\r\n', **kwargs)
158 return forbidnewline(ui, repo, hooktype, node, '\r\n', **kwargs)
159
159
160 def forbidcr(ui, repo, hooktype, node, **kwargs):
160 def forbidcr(ui, repo, hooktype, node, **kwargs):
161 return forbidnewline(ui, repo, hooktype, node, '\r', **kwargs)
161 return forbidnewline(ui, repo, hooktype, node, '\r', **kwargs)
162
162
163 def reposetup(ui, repo):
163 def reposetup(ui, repo):
164 if not repo.local():
164 if not repo.local():
165 return
165 return
166 for name, fn in _filters.iteritems():
166 for name, fn in _filters.iteritems():
167 repo.adddatafilter(name, fn)
167 repo.adddatafilter(name, fn)
168
168
169 def extsetup(ui):
169 def extsetup(ui):
170 if ui.configbool('win32text', 'warn', True):
170 if ui.configbool('win32text', 'warn', True):
171 ui.warn(_("win32text is deprecated: "
171 ui.warn(_("win32text is deprecated: "
172 "http://mercurial.selenic.com/wiki/Win32TextExtension\n"))
172 "http://mercurial.selenic.com/wiki/Win32TextExtension\n"))
@@ -1,105 +1,105 b''
1 # ignore.py - ignored file handling for mercurial
1 # ignore.py - ignored file handling for mercurial
2 #
2 #
3 # Copyright 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import util, match
9 import util, match
10 import re
10 import re
11
11
12 _commentre = None
12 _commentre = None
13
13
14 def ignorepats(lines):
14 def ignorepats(lines):
15 '''parse lines (iterable) of .hgignore text, returning a tuple of
15 '''parse lines (iterable) of .hgignore text, returning a tuple of
16 (patterns, parse errors). These patterns should be given to compile()
16 (patterns, parse errors). These patterns should be given to compile()
17 to be validated and converted into a match function.'''
17 to be validated and converted into a match function.'''
18 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
18 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
19 syntax = 'relre:'
19 syntax = 'relre:'
20 patterns = []
20 patterns = []
21 warnings = []
21 warnings = []
22
22
23 for line in lines:
23 for line in lines:
24 if "#" in line:
24 if "#" in line:
25 global _commentre
25 global _commentre
26 if not _commentre:
26 if not _commentre:
27 _commentre = re.compile(r'((^|[^\\])(\\\\)*)#.*')
27 _commentre = re.compile(r'((^|[^\\])(\\\\)*)#.*')
28 # remove comments prefixed by an even number of escapes
28 # remove comments prefixed by an even number of escapes
29 line = _commentre.sub(r'\1', line)
29 line = _commentre.sub(r'\1', line)
30 # fixup properly escaped comments that survived the above
30 # fixup properly escaped comments that survived the above
31 line = line.replace("\\#", "#")
31 line = line.replace("\\#", "#")
32 line = line.rstrip()
32 line = line.rstrip()
33 if not line:
33 if not line:
34 continue
34 continue
35
35
36 if line.startswith('syntax:'):
36 if line.startswith('syntax:'):
37 s = line[7:].strip()
37 s = line[7:].strip()
38 try:
38 try:
39 syntax = syntaxes[s]
39 syntax = syntaxes[s]
40 except KeyError:
40 except KeyError:
41 warnings.append(_("ignoring invalid syntax '%s'") % s)
41 warnings.append(_("ignoring invalid syntax '%s'") % s)
42 continue
42 continue
43 pat = syntax + line
43 pat = syntax + line
44 for s, rels in syntaxes.iteritems():
44 for s, rels in syntaxes.iteritems():
45 if line.startswith(rels):
45 if line.startswith(rels):
46 pat = line
46 pat = line
47 break
47 break
48 elif line.startswith(s+':'):
48 elif line.startswith(s+':'):
49 pat = rels + line[len(s)+1:]
49 pat = rels + line[len(s) + 1:]
50 break
50 break
51 patterns.append(pat)
51 patterns.append(pat)
52
52
53 return patterns, warnings
53 return patterns, warnings
54
54
55 def ignore(root, files, warn):
55 def ignore(root, files, warn):
56 '''return matcher covering patterns in 'files'.
56 '''return matcher covering patterns in 'files'.
57
57
58 the files parsed for patterns include:
58 the files parsed for patterns include:
59 .hgignore in the repository root
59 .hgignore in the repository root
60 any additional files specified in the [ui] section of ~/.hgrc
60 any additional files specified in the [ui] section of ~/.hgrc
61
61
62 trailing white space is dropped.
62 trailing white space is dropped.
63 the escape character is backslash.
63 the escape character is backslash.
64 comments start with #.
64 comments start with #.
65 empty lines are skipped.
65 empty lines are skipped.
66
66
67 lines can be of the following formats:
67 lines can be of the following formats:
68
68
69 syntax: regexp # defaults following lines to non-rooted regexps
69 syntax: regexp # defaults following lines to non-rooted regexps
70 syntax: glob # defaults following lines to non-rooted globs
70 syntax: glob # defaults following lines to non-rooted globs
71 re:pattern # non-rooted regular expression
71 re:pattern # non-rooted regular expression
72 glob:pattern # non-rooted glob
72 glob:pattern # non-rooted glob
73 pattern # pattern of the current default type'''
73 pattern # pattern of the current default type'''
74
74
75 pats = {}
75 pats = {}
76 for f in files:
76 for f in files:
77 try:
77 try:
78 pats[f] = []
78 pats[f] = []
79 fp = open(f)
79 fp = open(f)
80 pats[f], warnings = ignorepats(fp)
80 pats[f], warnings = ignorepats(fp)
81 fp.close()
81 fp.close()
82 for warning in warnings:
82 for warning in warnings:
83 warn("%s: %s\n" % (f, warning))
83 warn("%s: %s\n" % (f, warning))
84 except IOError, inst:
84 except IOError, inst:
85 if f != files[0]:
85 if f != files[0]:
86 warn(_("skipping unreadable ignore file '%s': %s\n") %
86 warn(_("skipping unreadable ignore file '%s': %s\n") %
87 (f, inst.strerror))
87 (f, inst.strerror))
88
88
89 allpats = []
89 allpats = []
90 for patlist in pats.values():
90 for patlist in pats.values():
91 allpats.extend(patlist)
91 allpats.extend(patlist)
92 if not allpats:
92 if not allpats:
93 return util.never
93 return util.never
94
94
95 try:
95 try:
96 ignorefunc = match.match(root, '', [], allpats)
96 ignorefunc = match.match(root, '', [], allpats)
97 except util.Abort:
97 except util.Abort:
98 # Re-raise an exception where the src is the right file
98 # Re-raise an exception where the src is the right file
99 for f, patlist in pats.iteritems():
99 for f, patlist in pats.iteritems():
100 try:
100 try:
101 match.match(root, '', [], patlist)
101 match.match(root, '', [], patlist)
102 except util.Abort, inst:
102 except util.Abort, inst:
103 raise util.Abort('%s: %s' % (f, inst[0]))
103 raise util.Abort('%s: %s' % (f, inst[0]))
104
104
105 return ignorefunc
105 return ignorefunc
@@ -1,2680 +1,2680 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from node import bin, hex, nullid, nullrev, short
7 from node import bin, hex, nullid, nullrev, short
8 from i18n import _
8 from i18n import _
9 import peer, changegroup, subrepo, discovery, pushkey, obsolete
9 import peer, changegroup, subrepo, discovery, pushkey, obsolete
10 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
10 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
11 import lock, transaction, store, encoding, base85
11 import lock, transaction, store, encoding, base85
12 import scmutil, util, extensions, hook, error, revset
12 import scmutil, util, extensions, hook, error, revset
13 import match as matchmod
13 import match as matchmod
14 import merge as mergemod
14 import merge as mergemod
15 import tags as tagsmod
15 import tags as tagsmod
16 from lock import release
16 from lock import release
17 import weakref, errno, os, time, inspect
17 import weakref, errno, os, time, inspect
18 propertycache = util.propertycache
18 propertycache = util.propertycache
19 filecache = scmutil.filecache
19 filecache = scmutil.filecache
20
20
21 class repofilecache(filecache):
21 class repofilecache(filecache):
22 """All filecache usage on repo are done for logic that should be unfiltered
22 """All filecache usage on repo are done for logic that should be unfiltered
23 """
23 """
24
24
25 def __get__(self, repo, type=None):
25 def __get__(self, repo, type=None):
26 return super(repofilecache, self).__get__(repo.unfiltered(), type)
26 return super(repofilecache, self).__get__(repo.unfiltered(), type)
27 def __set__(self, repo, value):
27 def __set__(self, repo, value):
28 return super(repofilecache, self).__set__(repo.unfiltered(), value)
28 return super(repofilecache, self).__set__(repo.unfiltered(), value)
29 def __delete__(self, repo):
29 def __delete__(self, repo):
30 return super(repofilecache, self).__delete__(repo.unfiltered())
30 return super(repofilecache, self).__delete__(repo.unfiltered())
31
31
32 class storecache(repofilecache):
32 class storecache(repofilecache):
33 """filecache for files in the store"""
33 """filecache for files in the store"""
34 def join(self, obj, fname):
34 def join(self, obj, fname):
35 return obj.sjoin(fname)
35 return obj.sjoin(fname)
36
36
37 class unfilteredpropertycache(propertycache):
37 class unfilteredpropertycache(propertycache):
38 """propertycache that apply to unfiltered repo only"""
38 """propertycache that apply to unfiltered repo only"""
39
39
40 def __get__(self, repo, type=None):
40 def __get__(self, repo, type=None):
41 return super(unfilteredpropertycache, self).__get__(repo.unfiltered())
41 return super(unfilteredpropertycache, self).__get__(repo.unfiltered())
42
42
43 class filteredpropertycache(propertycache):
43 class filteredpropertycache(propertycache):
44 """propertycache that must take filtering in account"""
44 """propertycache that must take filtering in account"""
45
45
46 def cachevalue(self, obj, value):
46 def cachevalue(self, obj, value):
47 object.__setattr__(obj, self.name, value)
47 object.__setattr__(obj, self.name, value)
48
48
49
49
50 def hasunfilteredcache(repo, name):
50 def hasunfilteredcache(repo, name):
51 """check if an repo and a unfilteredproperty cached value for <name>"""
51 """check if an repo and a unfilteredproperty cached value for <name>"""
52 return name in vars(repo.unfiltered())
52 return name in vars(repo.unfiltered())
53
53
54 def unfilteredmethod(orig):
54 def unfilteredmethod(orig):
55 """decorate method that always need to be run on unfiltered version"""
55 """decorate method that always need to be run on unfiltered version"""
56 def wrapper(repo, *args, **kwargs):
56 def wrapper(repo, *args, **kwargs):
57 return orig(repo.unfiltered(), *args, **kwargs)
57 return orig(repo.unfiltered(), *args, **kwargs)
58 return wrapper
58 return wrapper
59
59
60 MODERNCAPS = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle'))
60 MODERNCAPS = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle'))
61 LEGACYCAPS = MODERNCAPS.union(set(['changegroupsubset']))
61 LEGACYCAPS = MODERNCAPS.union(set(['changegroupsubset']))
62
62
63 class localpeer(peer.peerrepository):
63 class localpeer(peer.peerrepository):
64 '''peer for a local repo; reflects only the most recent API'''
64 '''peer for a local repo; reflects only the most recent API'''
65
65
66 def __init__(self, repo, caps=MODERNCAPS):
66 def __init__(self, repo, caps=MODERNCAPS):
67 peer.peerrepository.__init__(self)
67 peer.peerrepository.__init__(self)
68 self._repo = repo
68 self._repo = repo
69 self.ui = repo.ui
69 self.ui = repo.ui
70 self._caps = repo._restrictcapabilities(caps)
70 self._caps = repo._restrictcapabilities(caps)
71 self.requirements = repo.requirements
71 self.requirements = repo.requirements
72 self.supportedformats = repo.supportedformats
72 self.supportedformats = repo.supportedformats
73
73
74 def close(self):
74 def close(self):
75 self._repo.close()
75 self._repo.close()
76
76
77 def _capabilities(self):
77 def _capabilities(self):
78 return self._caps
78 return self._caps
79
79
80 def local(self):
80 def local(self):
81 return self._repo
81 return self._repo
82
82
83 def canpush(self):
83 def canpush(self):
84 return True
84 return True
85
85
86 def url(self):
86 def url(self):
87 return self._repo.url()
87 return self._repo.url()
88
88
89 def lookup(self, key):
89 def lookup(self, key):
90 return self._repo.lookup(key)
90 return self._repo.lookup(key)
91
91
92 def branchmap(self):
92 def branchmap(self):
93 return discovery.visiblebranchmap(self._repo)
93 return discovery.visiblebranchmap(self._repo)
94
94
95 def heads(self):
95 def heads(self):
96 return discovery.visibleheads(self._repo)
96 return discovery.visibleheads(self._repo)
97
97
98 def known(self, nodes):
98 def known(self, nodes):
99 return self._repo.known(nodes)
99 return self._repo.known(nodes)
100
100
101 def getbundle(self, source, heads=None, common=None):
101 def getbundle(self, source, heads=None, common=None):
102 return self._repo.getbundle(source, heads=heads, common=common)
102 return self._repo.getbundle(source, heads=heads, common=common)
103
103
104 # TODO We might want to move the next two calls into legacypeer and add
104 # TODO We might want to move the next two calls into legacypeer and add
105 # unbundle instead.
105 # unbundle instead.
106
106
107 def lock(self):
107 def lock(self):
108 return self._repo.lock()
108 return self._repo.lock()
109
109
110 def addchangegroup(self, cg, source, url):
110 def addchangegroup(self, cg, source, url):
111 return self._repo.addchangegroup(cg, source, url)
111 return self._repo.addchangegroup(cg, source, url)
112
112
113 def pushkey(self, namespace, key, old, new):
113 def pushkey(self, namespace, key, old, new):
114 return self._repo.pushkey(namespace, key, old, new)
114 return self._repo.pushkey(namespace, key, old, new)
115
115
116 def listkeys(self, namespace):
116 def listkeys(self, namespace):
117 return self._repo.listkeys(namespace)
117 return self._repo.listkeys(namespace)
118
118
119 def debugwireargs(self, one, two, three=None, four=None, five=None):
119 def debugwireargs(self, one, two, three=None, four=None, five=None):
120 '''used to test argument passing over the wire'''
120 '''used to test argument passing over the wire'''
121 return "%s %s %s %s %s" % (one, two, three, four, five)
121 return "%s %s %s %s %s" % (one, two, three, four, five)
122
122
123 class locallegacypeer(localpeer):
123 class locallegacypeer(localpeer):
124 '''peer extension which implements legacy methods too; used for tests with
124 '''peer extension which implements legacy methods too; used for tests with
125 restricted capabilities'''
125 restricted capabilities'''
126
126
127 def __init__(self, repo):
127 def __init__(self, repo):
128 localpeer.__init__(self, repo, caps=LEGACYCAPS)
128 localpeer.__init__(self, repo, caps=LEGACYCAPS)
129
129
130 def branches(self, nodes):
130 def branches(self, nodes):
131 return self._repo.branches(nodes)
131 return self._repo.branches(nodes)
132
132
133 def between(self, pairs):
133 def between(self, pairs):
134 return self._repo.between(pairs)
134 return self._repo.between(pairs)
135
135
136 def changegroup(self, basenodes, source):
136 def changegroup(self, basenodes, source):
137 return self._repo.changegroup(basenodes, source)
137 return self._repo.changegroup(basenodes, source)
138
138
139 def changegroupsubset(self, bases, heads, source):
139 def changegroupsubset(self, bases, heads, source):
140 return self._repo.changegroupsubset(bases, heads, source)
140 return self._repo.changegroupsubset(bases, heads, source)
141
141
142 class localrepository(object):
142 class localrepository(object):
143
143
144 supportedformats = set(('revlogv1', 'generaldelta'))
144 supportedformats = set(('revlogv1', 'generaldelta'))
145 supported = supportedformats | set(('store', 'fncache', 'shared',
145 supported = supportedformats | set(('store', 'fncache', 'shared',
146 'dotencode'))
146 'dotencode'))
147 openerreqs = set(('revlogv1', 'generaldelta'))
147 openerreqs = set(('revlogv1', 'generaldelta'))
148 requirements = ['revlogv1']
148 requirements = ['revlogv1']
149
149
150 def _baserequirements(self, create):
150 def _baserequirements(self, create):
151 return self.requirements[:]
151 return self.requirements[:]
152
152
153 def __init__(self, baseui, path=None, create=False):
153 def __init__(self, baseui, path=None, create=False):
154 self.wvfs = scmutil.vfs(path, expand=True)
154 self.wvfs = scmutil.vfs(path, expand=True)
155 self.wopener = self.wvfs
155 self.wopener = self.wvfs
156 self.root = self.wvfs.base
156 self.root = self.wvfs.base
157 self.path = self.wvfs.join(".hg")
157 self.path = self.wvfs.join(".hg")
158 self.origroot = path
158 self.origroot = path
159 self.auditor = scmutil.pathauditor(self.root, self._checknested)
159 self.auditor = scmutil.pathauditor(self.root, self._checknested)
160 self.vfs = scmutil.vfs(self.path)
160 self.vfs = scmutil.vfs(self.path)
161 self.opener = self.vfs
161 self.opener = self.vfs
162 self.baseui = baseui
162 self.baseui = baseui
163 self.ui = baseui.copy()
163 self.ui = baseui.copy()
164 # A list of callback to shape the phase if no data were found.
164 # A list of callback to shape the phase if no data were found.
165 # Callback are in the form: func(repo, roots) --> processed root.
165 # Callback are in the form: func(repo, roots) --> processed root.
166 # This list it to be filled by extension during repo setup
166 # This list it to be filled by extension during repo setup
167 self._phasedefaults = []
167 self._phasedefaults = []
168 try:
168 try:
169 self.ui.readconfig(self.join("hgrc"), self.root)
169 self.ui.readconfig(self.join("hgrc"), self.root)
170 extensions.loadall(self.ui)
170 extensions.loadall(self.ui)
171 except IOError:
171 except IOError:
172 pass
172 pass
173
173
174 if not self.vfs.isdir():
174 if not self.vfs.isdir():
175 if create:
175 if create:
176 if not self.wvfs.exists():
176 if not self.wvfs.exists():
177 self.wvfs.makedirs()
177 self.wvfs.makedirs()
178 self.vfs.makedir(notindexed=True)
178 self.vfs.makedir(notindexed=True)
179 requirements = self._baserequirements(create)
179 requirements = self._baserequirements(create)
180 if self.ui.configbool('format', 'usestore', True):
180 if self.ui.configbool('format', 'usestore', True):
181 self.vfs.mkdir("store")
181 self.vfs.mkdir("store")
182 requirements.append("store")
182 requirements.append("store")
183 if self.ui.configbool('format', 'usefncache', True):
183 if self.ui.configbool('format', 'usefncache', True):
184 requirements.append("fncache")
184 requirements.append("fncache")
185 if self.ui.configbool('format', 'dotencode', True):
185 if self.ui.configbool('format', 'dotencode', True):
186 requirements.append('dotencode')
186 requirements.append('dotencode')
187 # create an invalid changelog
187 # create an invalid changelog
188 self.vfs.append(
188 self.vfs.append(
189 "00changelog.i",
189 "00changelog.i",
190 '\0\0\0\2' # represents revlogv2
190 '\0\0\0\2' # represents revlogv2
191 ' dummy changelog to prevent using the old repo layout'
191 ' dummy changelog to prevent using the old repo layout'
192 )
192 )
193 if self.ui.configbool('format', 'generaldelta', False):
193 if self.ui.configbool('format', 'generaldelta', False):
194 requirements.append("generaldelta")
194 requirements.append("generaldelta")
195 requirements = set(requirements)
195 requirements = set(requirements)
196 else:
196 else:
197 raise error.RepoError(_("repository %s not found") % path)
197 raise error.RepoError(_("repository %s not found") % path)
198 elif create:
198 elif create:
199 raise error.RepoError(_("repository %s already exists") % path)
199 raise error.RepoError(_("repository %s already exists") % path)
200 else:
200 else:
201 try:
201 try:
202 requirements = scmutil.readrequires(self.vfs, self.supported)
202 requirements = scmutil.readrequires(self.vfs, self.supported)
203 except IOError, inst:
203 except IOError, inst:
204 if inst.errno != errno.ENOENT:
204 if inst.errno != errno.ENOENT:
205 raise
205 raise
206 requirements = set()
206 requirements = set()
207
207
208 self.sharedpath = self.path
208 self.sharedpath = self.path
209 try:
209 try:
210 s = os.path.realpath(self.opener.read("sharedpath").rstrip('\n'))
210 s = os.path.realpath(self.opener.read("sharedpath").rstrip('\n'))
211 if not os.path.exists(s):
211 if not os.path.exists(s):
212 raise error.RepoError(
212 raise error.RepoError(
213 _('.hg/sharedpath points to nonexistent directory %s') % s)
213 _('.hg/sharedpath points to nonexistent directory %s') % s)
214 self.sharedpath = s
214 self.sharedpath = s
215 except IOError, inst:
215 except IOError, inst:
216 if inst.errno != errno.ENOENT:
216 if inst.errno != errno.ENOENT:
217 raise
217 raise
218
218
219 self.store = store.store(requirements, self.sharedpath, scmutil.vfs)
219 self.store = store.store(requirements, self.sharedpath, scmutil.vfs)
220 self.spath = self.store.path
220 self.spath = self.store.path
221 self.svfs = self.store.vfs
221 self.svfs = self.store.vfs
222 self.sopener = self.svfs
222 self.sopener = self.svfs
223 self.sjoin = self.store.join
223 self.sjoin = self.store.join
224 self.vfs.createmode = self.store.createmode
224 self.vfs.createmode = self.store.createmode
225 self._applyrequirements(requirements)
225 self._applyrequirements(requirements)
226 if create:
226 if create:
227 self._writerequirements()
227 self._writerequirements()
228
228
229
229
230 self._branchcache = None
230 self._branchcache = None
231 self._branchcachetip = None
231 self._branchcachetip = None
232 self.filterpats = {}
232 self.filterpats = {}
233 self._datafilters = {}
233 self._datafilters = {}
234 self._transref = self._lockref = self._wlockref = None
234 self._transref = self._lockref = self._wlockref = None
235
235
236 # A cache for various files under .hg/ that tracks file changes,
236 # A cache for various files under .hg/ that tracks file changes,
237 # (used by the filecache decorator)
237 # (used by the filecache decorator)
238 #
238 #
239 # Maps a property name to its util.filecacheentry
239 # Maps a property name to its util.filecacheentry
240 self._filecache = {}
240 self._filecache = {}
241
241
242 def close(self):
242 def close(self):
243 pass
243 pass
244
244
245 def _restrictcapabilities(self, caps):
245 def _restrictcapabilities(self, caps):
246 return caps
246 return caps
247
247
248 def _applyrequirements(self, requirements):
248 def _applyrequirements(self, requirements):
249 self.requirements = requirements
249 self.requirements = requirements
250 self.sopener.options = dict((r, 1) for r in requirements
250 self.sopener.options = dict((r, 1) for r in requirements
251 if r in self.openerreqs)
251 if r in self.openerreqs)
252
252
253 def _writerequirements(self):
253 def _writerequirements(self):
254 reqfile = self.opener("requires", "w")
254 reqfile = self.opener("requires", "w")
255 for r in self.requirements:
255 for r in self.requirements:
256 reqfile.write("%s\n" % r)
256 reqfile.write("%s\n" % r)
257 reqfile.close()
257 reqfile.close()
258
258
259 def _checknested(self, path):
259 def _checknested(self, path):
260 """Determine if path is a legal nested repository."""
260 """Determine if path is a legal nested repository."""
261 if not path.startswith(self.root):
261 if not path.startswith(self.root):
262 return False
262 return False
263 subpath = path[len(self.root) + 1:]
263 subpath = path[len(self.root) + 1:]
264 normsubpath = util.pconvert(subpath)
264 normsubpath = util.pconvert(subpath)
265
265
266 # XXX: Checking against the current working copy is wrong in
266 # XXX: Checking against the current working copy is wrong in
267 # the sense that it can reject things like
267 # the sense that it can reject things like
268 #
268 #
269 # $ hg cat -r 10 sub/x.txt
269 # $ hg cat -r 10 sub/x.txt
270 #
270 #
271 # if sub/ is no longer a subrepository in the working copy
271 # if sub/ is no longer a subrepository in the working copy
272 # parent revision.
272 # parent revision.
273 #
273 #
274 # However, it can of course also allow things that would have
274 # However, it can of course also allow things that would have
275 # been rejected before, such as the above cat command if sub/
275 # been rejected before, such as the above cat command if sub/
276 # is a subrepository now, but was a normal directory before.
276 # is a subrepository now, but was a normal directory before.
277 # The old path auditor would have rejected by mistake since it
277 # The old path auditor would have rejected by mistake since it
278 # panics when it sees sub/.hg/.
278 # panics when it sees sub/.hg/.
279 #
279 #
280 # All in all, checking against the working copy seems sensible
280 # All in all, checking against the working copy seems sensible
281 # since we want to prevent access to nested repositories on
281 # since we want to prevent access to nested repositories on
282 # the filesystem *now*.
282 # the filesystem *now*.
283 ctx = self[None]
283 ctx = self[None]
284 parts = util.splitpath(subpath)
284 parts = util.splitpath(subpath)
285 while parts:
285 while parts:
286 prefix = '/'.join(parts)
286 prefix = '/'.join(parts)
287 if prefix in ctx.substate:
287 if prefix in ctx.substate:
288 if prefix == normsubpath:
288 if prefix == normsubpath:
289 return True
289 return True
290 else:
290 else:
291 sub = ctx.sub(prefix)
291 sub = ctx.sub(prefix)
292 return sub.checknested(subpath[len(prefix) + 1:])
292 return sub.checknested(subpath[len(prefix) + 1:])
293 else:
293 else:
294 parts.pop()
294 parts.pop()
295 return False
295 return False
296
296
297 def peer(self):
297 def peer(self):
298 return localpeer(self) # not cached to avoid reference cycle
298 return localpeer(self) # not cached to avoid reference cycle
299
299
300 def unfiltered(self):
300 def unfiltered(self):
301 """Return unfiltered version of the repository
301 """Return unfiltered version of the repository
302
302
303 Intended to be ovewritten by filtered repo."""
303 Intended to be ovewritten by filtered repo."""
304 return self
304 return self
305
305
306 @repofilecache('bookmarks')
306 @repofilecache('bookmarks')
307 def _bookmarks(self):
307 def _bookmarks(self):
308 return bookmarks.bmstore(self)
308 return bookmarks.bmstore(self)
309
309
310 @repofilecache('bookmarks.current')
310 @repofilecache('bookmarks.current')
311 def _bookmarkcurrent(self):
311 def _bookmarkcurrent(self):
312 return bookmarks.readcurrent(self)
312 return bookmarks.readcurrent(self)
313
313
314 def bookmarkheads(self, bookmark):
314 def bookmarkheads(self, bookmark):
315 name = bookmark.split('@', 1)[0]
315 name = bookmark.split('@', 1)[0]
316 heads = []
316 heads = []
317 for mark, n in self._bookmarks.iteritems():
317 for mark, n in self._bookmarks.iteritems():
318 if mark.split('@', 1)[0] == name:
318 if mark.split('@', 1)[0] == name:
319 heads.append(n)
319 heads.append(n)
320 return heads
320 return heads
321
321
322 @storecache('phaseroots')
322 @storecache('phaseroots')
323 def _phasecache(self):
323 def _phasecache(self):
324 return phases.phasecache(self, self._phasedefaults)
324 return phases.phasecache(self, self._phasedefaults)
325
325
326 @storecache('obsstore')
326 @storecache('obsstore')
327 def obsstore(self):
327 def obsstore(self):
328 store = obsolete.obsstore(self.sopener)
328 store = obsolete.obsstore(self.sopener)
329 if store and not obsolete._enabled:
329 if store and not obsolete._enabled:
330 # message is rare enough to not be translated
330 # message is rare enough to not be translated
331 msg = 'obsolete feature not enabled but %i markers found!\n'
331 msg = 'obsolete feature not enabled but %i markers found!\n'
332 self.ui.warn(msg % len(list(store)))
332 self.ui.warn(msg % len(list(store)))
333 return store
333 return store
334
334
335 @unfilteredpropertycache
335 @unfilteredpropertycache
336 def hiddenrevs(self):
336 def hiddenrevs(self):
337 """hiddenrevs: revs that should be hidden by command and tools
337 """hiddenrevs: revs that should be hidden by command and tools
338
338
339 This set is carried on the repo to ease initialization and lazy
339 This set is carried on the repo to ease initialization and lazy
340 loading; it'll probably move back to changelog for efficiency and
340 loading; it'll probably move back to changelog for efficiency and
341 consistency reasons.
341 consistency reasons.
342
342
343 Note that the hiddenrevs will needs invalidations when
343 Note that the hiddenrevs will needs invalidations when
344 - a new changesets is added (possible unstable above extinct)
344 - a new changesets is added (possible unstable above extinct)
345 - a new obsolete marker is added (possible new extinct changeset)
345 - a new obsolete marker is added (possible new extinct changeset)
346
346
347 hidden changesets cannot have non-hidden descendants
347 hidden changesets cannot have non-hidden descendants
348 """
348 """
349 hidden = set()
349 hidden = set()
350 if self.obsstore:
350 if self.obsstore:
351 ### hide extinct changeset that are not accessible by any mean
351 ### hide extinct changeset that are not accessible by any mean
352 hiddenquery = 'extinct() - ::(. + bookmark())'
352 hiddenquery = 'extinct() - ::(. + bookmark())'
353 hidden.update(self.revs(hiddenquery))
353 hidden.update(self.revs(hiddenquery))
354 return hidden
354 return hidden
355
355
356 @storecache('00changelog.i')
356 @storecache('00changelog.i')
357 def changelog(self):
357 def changelog(self):
358 c = changelog.changelog(self.sopener)
358 c = changelog.changelog(self.sopener)
359 if 'HG_PENDING' in os.environ:
359 if 'HG_PENDING' in os.environ:
360 p = os.environ['HG_PENDING']
360 p = os.environ['HG_PENDING']
361 if p.startswith(self.root):
361 if p.startswith(self.root):
362 c.readpending('00changelog.i.a')
362 c.readpending('00changelog.i.a')
363 return c
363 return c
364
364
365 @storecache('00manifest.i')
365 @storecache('00manifest.i')
366 def manifest(self):
366 def manifest(self):
367 return manifest.manifest(self.sopener)
367 return manifest.manifest(self.sopener)
368
368
369 @repofilecache('dirstate')
369 @repofilecache('dirstate')
370 def dirstate(self):
370 def dirstate(self):
371 warned = [0]
371 warned = [0]
372 def validate(node):
372 def validate(node):
373 try:
373 try:
374 self.changelog.rev(node)
374 self.changelog.rev(node)
375 return node
375 return node
376 except error.LookupError:
376 except error.LookupError:
377 if not warned[0]:
377 if not warned[0]:
378 warned[0] = True
378 warned[0] = True
379 self.ui.warn(_("warning: ignoring unknown"
379 self.ui.warn(_("warning: ignoring unknown"
380 " working parent %s!\n") % short(node))
380 " working parent %s!\n") % short(node))
381 return nullid
381 return nullid
382
382
383 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
383 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
384
384
385 def __getitem__(self, changeid):
385 def __getitem__(self, changeid):
386 if changeid is None:
386 if changeid is None:
387 return context.workingctx(self)
387 return context.workingctx(self)
388 return context.changectx(self, changeid)
388 return context.changectx(self, changeid)
389
389
390 def __contains__(self, changeid):
390 def __contains__(self, changeid):
391 try:
391 try:
392 return bool(self.lookup(changeid))
392 return bool(self.lookup(changeid))
393 except error.RepoLookupError:
393 except error.RepoLookupError:
394 return False
394 return False
395
395
396 def __nonzero__(self):
396 def __nonzero__(self):
397 return True
397 return True
398
398
399 def __len__(self):
399 def __len__(self):
400 return len(self.changelog)
400 return len(self.changelog)
401
401
402 def __iter__(self):
402 def __iter__(self):
403 return iter(self.changelog)
403 return iter(self.changelog)
404
404
405 def revs(self, expr, *args):
405 def revs(self, expr, *args):
406 '''Return a list of revisions matching the given revset'''
406 '''Return a list of revisions matching the given revset'''
407 expr = revset.formatspec(expr, *args)
407 expr = revset.formatspec(expr, *args)
408 m = revset.match(None, expr)
408 m = revset.match(None, expr)
409 return [r for r in m(self, list(self))]
409 return [r for r in m(self, list(self))]
410
410
411 def set(self, expr, *args):
411 def set(self, expr, *args):
412 '''
412 '''
413 Yield a context for each matching revision, after doing arg
413 Yield a context for each matching revision, after doing arg
414 replacement via revset.formatspec
414 replacement via revset.formatspec
415 '''
415 '''
416 for r in self.revs(expr, *args):
416 for r in self.revs(expr, *args):
417 yield self[r]
417 yield self[r]
418
418
419 def url(self):
419 def url(self):
420 return 'file:' + self.root
420 return 'file:' + self.root
421
421
422 def hook(self, name, throw=False, **args):
422 def hook(self, name, throw=False, **args):
423 return hook.hook(self.ui, self, name, throw, **args)
423 return hook.hook(self.ui, self, name, throw, **args)
424
424
425 @unfilteredmethod
425 @unfilteredmethod
426 def _tag(self, names, node, message, local, user, date, extra={}):
426 def _tag(self, names, node, message, local, user, date, extra={}):
427 if isinstance(names, str):
427 if isinstance(names, str):
428 names = (names,)
428 names = (names,)
429
429
430 branches = self.branchmap()
430 branches = self.branchmap()
431 for name in names:
431 for name in names:
432 self.hook('pretag', throw=True, node=hex(node), tag=name,
432 self.hook('pretag', throw=True, node=hex(node), tag=name,
433 local=local)
433 local=local)
434 if name in branches:
434 if name in branches:
435 self.ui.warn(_("warning: tag %s conflicts with existing"
435 self.ui.warn(_("warning: tag %s conflicts with existing"
436 " branch name\n") % name)
436 " branch name\n") % name)
437
437
438 def writetags(fp, names, munge, prevtags):
438 def writetags(fp, names, munge, prevtags):
439 fp.seek(0, 2)
439 fp.seek(0, 2)
440 if prevtags and prevtags[-1] != '\n':
440 if prevtags and prevtags[-1] != '\n':
441 fp.write('\n')
441 fp.write('\n')
442 for name in names:
442 for name in names:
443 m = munge and munge(name) or name
443 m = munge and munge(name) or name
444 if (self._tagscache.tagtypes and
444 if (self._tagscache.tagtypes and
445 name in self._tagscache.tagtypes):
445 name in self._tagscache.tagtypes):
446 old = self.tags().get(name, nullid)
446 old = self.tags().get(name, nullid)
447 fp.write('%s %s\n' % (hex(old), m))
447 fp.write('%s %s\n' % (hex(old), m))
448 fp.write('%s %s\n' % (hex(node), m))
448 fp.write('%s %s\n' % (hex(node), m))
449 fp.close()
449 fp.close()
450
450
451 prevtags = ''
451 prevtags = ''
452 if local:
452 if local:
453 try:
453 try:
454 fp = self.opener('localtags', 'r+')
454 fp = self.opener('localtags', 'r+')
455 except IOError:
455 except IOError:
456 fp = self.opener('localtags', 'a')
456 fp = self.opener('localtags', 'a')
457 else:
457 else:
458 prevtags = fp.read()
458 prevtags = fp.read()
459
459
460 # local tags are stored in the current charset
460 # local tags are stored in the current charset
461 writetags(fp, names, None, prevtags)
461 writetags(fp, names, None, prevtags)
462 for name in names:
462 for name in names:
463 self.hook('tag', node=hex(node), tag=name, local=local)
463 self.hook('tag', node=hex(node), tag=name, local=local)
464 return
464 return
465
465
466 try:
466 try:
467 fp = self.wfile('.hgtags', 'rb+')
467 fp = self.wfile('.hgtags', 'rb+')
468 except IOError, e:
468 except IOError, e:
469 if e.errno != errno.ENOENT:
469 if e.errno != errno.ENOENT:
470 raise
470 raise
471 fp = self.wfile('.hgtags', 'ab')
471 fp = self.wfile('.hgtags', 'ab')
472 else:
472 else:
473 prevtags = fp.read()
473 prevtags = fp.read()
474
474
475 # committed tags are stored in UTF-8
475 # committed tags are stored in UTF-8
476 writetags(fp, names, encoding.fromlocal, prevtags)
476 writetags(fp, names, encoding.fromlocal, prevtags)
477
477
478 fp.close()
478 fp.close()
479
479
480 self.invalidatecaches()
480 self.invalidatecaches()
481
481
482 if '.hgtags' not in self.dirstate:
482 if '.hgtags' not in self.dirstate:
483 self[None].add(['.hgtags'])
483 self[None].add(['.hgtags'])
484
484
485 m = matchmod.exact(self.root, '', ['.hgtags'])
485 m = matchmod.exact(self.root, '', ['.hgtags'])
486 tagnode = self.commit(message, user, date, extra=extra, match=m)
486 tagnode = self.commit(message, user, date, extra=extra, match=m)
487
487
488 for name in names:
488 for name in names:
489 self.hook('tag', node=hex(node), tag=name, local=local)
489 self.hook('tag', node=hex(node), tag=name, local=local)
490
490
491 return tagnode
491 return tagnode
492
492
493 def tag(self, names, node, message, local, user, date):
493 def tag(self, names, node, message, local, user, date):
494 '''tag a revision with one or more symbolic names.
494 '''tag a revision with one or more symbolic names.
495
495
496 names is a list of strings or, when adding a single tag, names may be a
496 names is a list of strings or, when adding a single tag, names may be a
497 string.
497 string.
498
498
499 if local is True, the tags are stored in a per-repository file.
499 if local is True, the tags are stored in a per-repository file.
500 otherwise, they are stored in the .hgtags file, and a new
500 otherwise, they are stored in the .hgtags file, and a new
501 changeset is committed with the change.
501 changeset is committed with the change.
502
502
503 keyword arguments:
503 keyword arguments:
504
504
505 local: whether to store tags in non-version-controlled file
505 local: whether to store tags in non-version-controlled file
506 (default False)
506 (default False)
507
507
508 message: commit message to use if committing
508 message: commit message to use if committing
509
509
510 user: name of user to use if committing
510 user: name of user to use if committing
511
511
512 date: date tuple to use if committing'''
512 date: date tuple to use if committing'''
513
513
514 if not local:
514 if not local:
515 for x in self.status()[:5]:
515 for x in self.status()[:5]:
516 if '.hgtags' in x:
516 if '.hgtags' in x:
517 raise util.Abort(_('working copy of .hgtags is changed '
517 raise util.Abort(_('working copy of .hgtags is changed '
518 '(please commit .hgtags manually)'))
518 '(please commit .hgtags manually)'))
519
519
520 self.tags() # instantiate the cache
520 self.tags() # instantiate the cache
521 self._tag(names, node, message, local, user, date)
521 self._tag(names, node, message, local, user, date)
522
522
523 @filteredpropertycache
523 @filteredpropertycache
524 def _tagscache(self):
524 def _tagscache(self):
525 '''Returns a tagscache object that contains various tags related
525 '''Returns a tagscache object that contains various tags related
526 caches.'''
526 caches.'''
527
527
528 # This simplifies its cache management by having one decorated
528 # This simplifies its cache management by having one decorated
529 # function (this one) and the rest simply fetch things from it.
529 # function (this one) and the rest simply fetch things from it.
530 class tagscache(object):
530 class tagscache(object):
531 def __init__(self):
531 def __init__(self):
532 # These two define the set of tags for this repository. tags
532 # These two define the set of tags for this repository. tags
533 # maps tag name to node; tagtypes maps tag name to 'global' or
533 # maps tag name to node; tagtypes maps tag name to 'global' or
534 # 'local'. (Global tags are defined by .hgtags across all
534 # 'local'. (Global tags are defined by .hgtags across all
535 # heads, and local tags are defined in .hg/localtags.)
535 # heads, and local tags are defined in .hg/localtags.)
536 # They constitute the in-memory cache of tags.
536 # They constitute the in-memory cache of tags.
537 self.tags = self.tagtypes = None
537 self.tags = self.tagtypes = None
538
538
539 self.nodetagscache = self.tagslist = None
539 self.nodetagscache = self.tagslist = None
540
540
541 cache = tagscache()
541 cache = tagscache()
542 cache.tags, cache.tagtypes = self._findtags()
542 cache.tags, cache.tagtypes = self._findtags()
543
543
544 return cache
544 return cache
545
545
546 def tags(self):
546 def tags(self):
547 '''return a mapping of tag to node'''
547 '''return a mapping of tag to node'''
548 t = {}
548 t = {}
549 if self.changelog.filteredrevs:
549 if self.changelog.filteredrevs:
550 tags, tt = self._findtags()
550 tags, tt = self._findtags()
551 else:
551 else:
552 tags = self._tagscache.tags
552 tags = self._tagscache.tags
553 for k, v in tags.iteritems():
553 for k, v in tags.iteritems():
554 try:
554 try:
555 # ignore tags to unknown nodes
555 # ignore tags to unknown nodes
556 self.changelog.rev(v)
556 self.changelog.rev(v)
557 t[k] = v
557 t[k] = v
558 except (error.LookupError, ValueError):
558 except (error.LookupError, ValueError):
559 pass
559 pass
560 return t
560 return t
561
561
562 def _findtags(self):
562 def _findtags(self):
563 '''Do the hard work of finding tags. Return a pair of dicts
563 '''Do the hard work of finding tags. Return a pair of dicts
564 (tags, tagtypes) where tags maps tag name to node, and tagtypes
564 (tags, tagtypes) where tags maps tag name to node, and tagtypes
565 maps tag name to a string like \'global\' or \'local\'.
565 maps tag name to a string like \'global\' or \'local\'.
566 Subclasses or extensions are free to add their own tags, but
566 Subclasses or extensions are free to add their own tags, but
567 should be aware that the returned dicts will be retained for the
567 should be aware that the returned dicts will be retained for the
568 duration of the localrepo object.'''
568 duration of the localrepo object.'''
569
569
570 # XXX what tagtype should subclasses/extensions use? Currently
570 # XXX what tagtype should subclasses/extensions use? Currently
571 # mq and bookmarks add tags, but do not set the tagtype at all.
571 # mq and bookmarks add tags, but do not set the tagtype at all.
572 # Should each extension invent its own tag type? Should there
572 # Should each extension invent its own tag type? Should there
573 # be one tagtype for all such "virtual" tags? Or is the status
573 # be one tagtype for all such "virtual" tags? Or is the status
574 # quo fine?
574 # quo fine?
575
575
576 alltags = {} # map tag name to (node, hist)
576 alltags = {} # map tag name to (node, hist)
577 tagtypes = {}
577 tagtypes = {}
578
578
579 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
579 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
580 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
580 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
581
581
582 # Build the return dicts. Have to re-encode tag names because
582 # Build the return dicts. Have to re-encode tag names because
583 # the tags module always uses UTF-8 (in order not to lose info
583 # the tags module always uses UTF-8 (in order not to lose info
584 # writing to the cache), but the rest of Mercurial wants them in
584 # writing to the cache), but the rest of Mercurial wants them in
585 # local encoding.
585 # local encoding.
586 tags = {}
586 tags = {}
587 for (name, (node, hist)) in alltags.iteritems():
587 for (name, (node, hist)) in alltags.iteritems():
588 if node != nullid:
588 if node != nullid:
589 tags[encoding.tolocal(name)] = node
589 tags[encoding.tolocal(name)] = node
590 tags['tip'] = self.changelog.tip()
590 tags['tip'] = self.changelog.tip()
591 tagtypes = dict([(encoding.tolocal(name), value)
591 tagtypes = dict([(encoding.tolocal(name), value)
592 for (name, value) in tagtypes.iteritems()])
592 for (name, value) in tagtypes.iteritems()])
593 return (tags, tagtypes)
593 return (tags, tagtypes)
594
594
595 def tagtype(self, tagname):
595 def tagtype(self, tagname):
596 '''
596 '''
597 return the type of the given tag. result can be:
597 return the type of the given tag. result can be:
598
598
599 'local' : a local tag
599 'local' : a local tag
600 'global' : a global tag
600 'global' : a global tag
601 None : tag does not exist
601 None : tag does not exist
602 '''
602 '''
603
603
604 return self._tagscache.tagtypes.get(tagname)
604 return self._tagscache.tagtypes.get(tagname)
605
605
606 def tagslist(self):
606 def tagslist(self):
607 '''return a list of tags ordered by revision'''
607 '''return a list of tags ordered by revision'''
608 if not self._tagscache.tagslist:
608 if not self._tagscache.tagslist:
609 l = []
609 l = []
610 for t, n in self.tags().iteritems():
610 for t, n in self.tags().iteritems():
611 r = self.changelog.rev(n)
611 r = self.changelog.rev(n)
612 l.append((r, t, n))
612 l.append((r, t, n))
613 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
613 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
614
614
615 return self._tagscache.tagslist
615 return self._tagscache.tagslist
616
616
617 def nodetags(self, node):
617 def nodetags(self, node):
618 '''return the tags associated with a node'''
618 '''return the tags associated with a node'''
619 if not self._tagscache.nodetagscache:
619 if not self._tagscache.nodetagscache:
620 nodetagscache = {}
620 nodetagscache = {}
621 for t, n in self._tagscache.tags.iteritems():
621 for t, n in self._tagscache.tags.iteritems():
622 nodetagscache.setdefault(n, []).append(t)
622 nodetagscache.setdefault(n, []).append(t)
623 for tags in nodetagscache.itervalues():
623 for tags in nodetagscache.itervalues():
624 tags.sort()
624 tags.sort()
625 self._tagscache.nodetagscache = nodetagscache
625 self._tagscache.nodetagscache = nodetagscache
626 return self._tagscache.nodetagscache.get(node, [])
626 return self._tagscache.nodetagscache.get(node, [])
627
627
628 def nodebookmarks(self, node):
628 def nodebookmarks(self, node):
629 marks = []
629 marks = []
630 for bookmark, n in self._bookmarks.iteritems():
630 for bookmark, n in self._bookmarks.iteritems():
631 if n == node:
631 if n == node:
632 marks.append(bookmark)
632 marks.append(bookmark)
633 return sorted(marks)
633 return sorted(marks)
634
634
635 def _branchtags(self, partial, lrev):
635 def _branchtags(self, partial, lrev):
636 # TODO: rename this function?
636 # TODO: rename this function?
637 tiprev = len(self) - 1
637 tiprev = len(self) - 1
638 if lrev != tiprev:
638 if lrev != tiprev:
639 ctxgen = (self[r] for r in self.changelog.revs(lrev + 1, tiprev))
639 ctxgen = (self[r] for r in self.changelog.revs(lrev + 1, tiprev))
640 self._updatebranchcache(partial, ctxgen)
640 self._updatebranchcache(partial, ctxgen)
641 self._writebranchcache(partial, self.changelog.tip(), tiprev)
641 self._writebranchcache(partial, self.changelog.tip(), tiprev)
642
642
643 return partial
643 return partial
644
644
645 @unfilteredmethod # Until we get a smarter cache management
645 @unfilteredmethod # Until we get a smarter cache management
646 def updatebranchcache(self):
646 def updatebranchcache(self):
647 tip = self.changelog.tip()
647 tip = self.changelog.tip()
648 if self._branchcache is not None and self._branchcachetip == tip:
648 if self._branchcache is not None and self._branchcachetip == tip:
649 return
649 return
650
650
651 oldtip = self._branchcachetip
651 oldtip = self._branchcachetip
652 self._branchcachetip = tip
652 self._branchcachetip = tip
653 if oldtip is None or oldtip not in self.changelog.nodemap:
653 if oldtip is None or oldtip not in self.changelog.nodemap:
654 partial, last, lrev = self._readbranchcache()
654 partial, last, lrev = self._readbranchcache()
655 else:
655 else:
656 lrev = self.changelog.rev(oldtip)
656 lrev = self.changelog.rev(oldtip)
657 partial = self._branchcache
657 partial = self._branchcache
658
658
659 self._branchtags(partial, lrev)
659 self._branchtags(partial, lrev)
660 # this private cache holds all heads (not just the branch tips)
660 # this private cache holds all heads (not just the branch tips)
661 self._branchcache = partial
661 self._branchcache = partial
662
662
663 def branchmap(self):
663 def branchmap(self):
664 '''returns a dictionary {branch: [branchheads]}'''
664 '''returns a dictionary {branch: [branchheads]}'''
665 if self.changelog.filteredrevs:
665 if self.changelog.filteredrevs:
666 # some changeset are excluded we can't use the cache
666 # some changeset are excluded we can't use the cache
667 branchmap = {}
667 branchmap = {}
668 self._updatebranchcache(branchmap, (self[r] for r in self))
668 self._updatebranchcache(branchmap, (self[r] for r in self))
669 return branchmap
669 return branchmap
670 else:
670 else:
671 self.updatebranchcache()
671 self.updatebranchcache()
672 return self._branchcache
672 return self._branchcache
673
673
674
674
675 def _branchtip(self, heads):
675 def _branchtip(self, heads):
676 '''return the tipmost branch head in heads'''
676 '''return the tipmost branch head in heads'''
677 tip = heads[-1]
677 tip = heads[-1]
678 for h in reversed(heads):
678 for h in reversed(heads):
679 if not self[h].closesbranch():
679 if not self[h].closesbranch():
680 tip = h
680 tip = h
681 break
681 break
682 return tip
682 return tip
683
683
684 def branchtip(self, branch):
684 def branchtip(self, branch):
685 '''return the tip node for a given branch'''
685 '''return the tip node for a given branch'''
686 if branch not in self.branchmap():
686 if branch not in self.branchmap():
687 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
687 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
688 return self._branchtip(self.branchmap()[branch])
688 return self._branchtip(self.branchmap()[branch])
689
689
690 def branchtags(self):
690 def branchtags(self):
691 '''return a dict where branch names map to the tipmost head of
691 '''return a dict where branch names map to the tipmost head of
692 the branch, open heads come before closed'''
692 the branch, open heads come before closed'''
693 bt = {}
693 bt = {}
694 for bn, heads in self.branchmap().iteritems():
694 for bn, heads in self.branchmap().iteritems():
695 bt[bn] = self._branchtip(heads)
695 bt[bn] = self._branchtip(heads)
696 return bt
696 return bt
697
697
698 @unfilteredmethod # Until we get a smarter cache management
698 @unfilteredmethod # Until we get a smarter cache management
699 def _readbranchcache(self):
699 def _readbranchcache(self):
700 partial = {}
700 partial = {}
701 try:
701 try:
702 f = self.opener("cache/branchheads")
702 f = self.opener("cache/branchheads")
703 lines = f.read().split('\n')
703 lines = f.read().split('\n')
704 f.close()
704 f.close()
705 except (IOError, OSError):
705 except (IOError, OSError):
706 return {}, nullid, nullrev
706 return {}, nullid, nullrev
707
707
708 try:
708 try:
709 last, lrev = lines.pop(0).split(" ", 1)
709 last, lrev = lines.pop(0).split(" ", 1)
710 last, lrev = bin(last), int(lrev)
710 last, lrev = bin(last), int(lrev)
711 if lrev >= len(self) or self[lrev].node() != last:
711 if lrev >= len(self) or self[lrev].node() != last:
712 # invalidate the cache
712 # invalidate the cache
713 raise ValueError('invalidating branch cache (tip differs)')
713 raise ValueError('invalidating branch cache (tip differs)')
714 for l in lines:
714 for l in lines:
715 if not l:
715 if not l:
716 continue
716 continue
717 node, label = l.split(" ", 1)
717 node, label = l.split(" ", 1)
718 label = encoding.tolocal(label.strip())
718 label = encoding.tolocal(label.strip())
719 if not node in self:
719 if not node in self:
720 raise ValueError('invalidating branch cache because node '+
720 raise ValueError('invalidating branch cache because node '+
721 '%s does not exist' % node)
721 '%s does not exist' % node)
722 partial.setdefault(label, []).append(bin(node))
722 partial.setdefault(label, []).append(bin(node))
723 except KeyboardInterrupt:
723 except KeyboardInterrupt:
724 raise
724 raise
725 except Exception, inst:
725 except Exception, inst:
726 if self.ui.debugflag:
726 if self.ui.debugflag:
727 self.ui.warn(str(inst), '\n')
727 self.ui.warn(str(inst), '\n')
728 partial, last, lrev = {}, nullid, nullrev
728 partial, last, lrev = {}, nullid, nullrev
729 return partial, last, lrev
729 return partial, last, lrev
730
730
731 @unfilteredmethod # Until we get a smarter cache management
731 @unfilteredmethod # Until we get a smarter cache management
732 def _writebranchcache(self, branches, tip, tiprev):
732 def _writebranchcache(self, branches, tip, tiprev):
733 try:
733 try:
734 f = self.opener("cache/branchheads", "w", atomictemp=True)
734 f = self.opener("cache/branchheads", "w", atomictemp=True)
735 f.write("%s %s\n" % (hex(tip), tiprev))
735 f.write("%s %s\n" % (hex(tip), tiprev))
736 for label, nodes in branches.iteritems():
736 for label, nodes in branches.iteritems():
737 for node in nodes:
737 for node in nodes:
738 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
738 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
739 f.close()
739 f.close()
740 except (IOError, OSError):
740 except (IOError, OSError):
741 pass
741 pass
742
742
743 @unfilteredmethod # Until we get a smarter cache management
743 @unfilteredmethod # Until we get a smarter cache management
744 def _updatebranchcache(self, partial, ctxgen):
744 def _updatebranchcache(self, partial, ctxgen):
745 """Given a branchhead cache, partial, that may have extra nodes or be
745 """Given a branchhead cache, partial, that may have extra nodes or be
746 missing heads, and a generator of nodes that are at least a superset of
746 missing heads, and a generator of nodes that are at least a superset of
747 heads missing, this function updates partial to be correct.
747 heads missing, this function updates partial to be correct.
748 """
748 """
749 # collect new branch entries
749 # collect new branch entries
750 newbranches = {}
750 newbranches = {}
751 for c in ctxgen:
751 for c in ctxgen:
752 newbranches.setdefault(c.branch(), []).append(c.node())
752 newbranches.setdefault(c.branch(), []).append(c.node())
753 # if older branchheads are reachable from new ones, they aren't
753 # if older branchheads are reachable from new ones, they aren't
754 # really branchheads. Note checking parents is insufficient:
754 # really branchheads. Note checking parents is insufficient:
755 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
755 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
756 for branch, newnodes in newbranches.iteritems():
756 for branch, newnodes in newbranches.iteritems():
757 bheads = partial.setdefault(branch, [])
757 bheads = partial.setdefault(branch, [])
758 # Remove candidate heads that no longer are in the repo (e.g., as
758 # Remove candidate heads that no longer are in the repo (e.g., as
759 # the result of a strip that just happened). Avoid using 'node in
759 # the result of a strip that just happened). Avoid using 'node in
760 # self' here because that dives down into branchcache code somewhat
760 # self' here because that dives down into branchcache code somewhat
761 # recursively.
761 # recursively.
762 bheadrevs = [self.changelog.rev(node) for node in bheads
762 bheadrevs = [self.changelog.rev(node) for node in bheads
763 if self.changelog.hasnode(node)]
763 if self.changelog.hasnode(node)]
764 newheadrevs = [self.changelog.rev(node) for node in newnodes
764 newheadrevs = [self.changelog.rev(node) for node in newnodes
765 if self.changelog.hasnode(node)]
765 if self.changelog.hasnode(node)]
766 ctxisnew = bheadrevs and min(newheadrevs) > max(bheadrevs)
766 ctxisnew = bheadrevs and min(newheadrevs) > max(bheadrevs)
767 # Remove duplicates - nodes that are in newheadrevs and are already
767 # Remove duplicates - nodes that are in newheadrevs and are already
768 # in bheadrevs. This can happen if you strip a node whose parent
768 # in bheadrevs. This can happen if you strip a node whose parent
769 # was already a head (because they're on different branches).
769 # was already a head (because they're on different branches).
770 bheadrevs = sorted(set(bheadrevs).union(newheadrevs))
770 bheadrevs = sorted(set(bheadrevs).union(newheadrevs))
771
771
772 # Starting from tip means fewer passes over reachable. If we know
772 # Starting from tip means fewer passes over reachable. If we know
773 # the new candidates are not ancestors of existing heads, we don't
773 # the new candidates are not ancestors of existing heads, we don't
774 # have to examine ancestors of existing heads
774 # have to examine ancestors of existing heads
775 if ctxisnew:
775 if ctxisnew:
776 iterrevs = sorted(newheadrevs)
776 iterrevs = sorted(newheadrevs)
777 else:
777 else:
778 iterrevs = list(bheadrevs)
778 iterrevs = list(bheadrevs)
779
779
780 # This loop prunes out two kinds of heads - heads that are
780 # This loop prunes out two kinds of heads - heads that are
781 # superseded by a head in newheadrevs, and newheadrevs that are not
781 # superseded by a head in newheadrevs, and newheadrevs that are not
782 # heads because an existing head is their descendant.
782 # heads because an existing head is their descendant.
783 while iterrevs:
783 while iterrevs:
784 latest = iterrevs.pop()
784 latest = iterrevs.pop()
785 if latest not in bheadrevs:
785 if latest not in bheadrevs:
786 continue
786 continue
787 ancestors = set(self.changelog.ancestors([latest],
787 ancestors = set(self.changelog.ancestors([latest],
788 bheadrevs[0]))
788 bheadrevs[0]))
789 if ancestors:
789 if ancestors:
790 bheadrevs = [b for b in bheadrevs if b not in ancestors]
790 bheadrevs = [b for b in bheadrevs if b not in ancestors]
791 partial[branch] = [self.changelog.node(rev) for rev in bheadrevs]
791 partial[branch] = [self.changelog.node(rev) for rev in bheadrevs]
792
792
793 # There may be branches that cease to exist when the last commit in the
793 # There may be branches that cease to exist when the last commit in the
794 # branch was stripped. This code filters them out. Note that the
794 # branch was stripped. This code filters them out. Note that the
795 # branch that ceased to exist may not be in newbranches because
795 # branch that ceased to exist may not be in newbranches because
796 # newbranches is the set of candidate heads, which when you strip the
796 # newbranches is the set of candidate heads, which when you strip the
797 # last commit in a branch will be the parent branch.
797 # last commit in a branch will be the parent branch.
798 for branch in partial.keys():
798 for branch in partial.keys():
799 nodes = [head for head in partial[branch]
799 nodes = [head for head in partial[branch]
800 if self.changelog.hasnode(head)]
800 if self.changelog.hasnode(head)]
801 if not nodes:
801 if not nodes:
802 del partial[branch]
802 del partial[branch]
803
803
804 def lookup(self, key):
804 def lookup(self, key):
805 return self[key].node()
805 return self[key].node()
806
806
807 def lookupbranch(self, key, remote=None):
807 def lookupbranch(self, key, remote=None):
808 repo = remote or self
808 repo = remote or self
809 if key in repo.branchmap():
809 if key in repo.branchmap():
810 return key
810 return key
811
811
812 repo = (remote and remote.local()) and remote or self
812 repo = (remote and remote.local()) and remote or self
813 return repo[key].branch()
813 return repo[key].branch()
814
814
815 def known(self, nodes):
815 def known(self, nodes):
816 nm = self.changelog.nodemap
816 nm = self.changelog.nodemap
817 pc = self._phasecache
817 pc = self._phasecache
818 result = []
818 result = []
819 for n in nodes:
819 for n in nodes:
820 r = nm.get(n)
820 r = nm.get(n)
821 resp = not (r is None or pc.phase(self, r) >= phases.secret)
821 resp = not (r is None or pc.phase(self, r) >= phases.secret)
822 result.append(resp)
822 result.append(resp)
823 return result
823 return result
824
824
825 def local(self):
825 def local(self):
826 return self
826 return self
827
827
828 def cancopy(self):
828 def cancopy(self):
829 return self.local() # so statichttprepo's override of local() works
829 return self.local() # so statichttprepo's override of local() works
830
830
831 def join(self, f):
831 def join(self, f):
832 return os.path.join(self.path, f)
832 return os.path.join(self.path, f)
833
833
834 def wjoin(self, f):
834 def wjoin(self, f):
835 return os.path.join(self.root, f)
835 return os.path.join(self.root, f)
836
836
837 def file(self, f):
837 def file(self, f):
838 if f[0] == '/':
838 if f[0] == '/':
839 f = f[1:]
839 f = f[1:]
840 return filelog.filelog(self.sopener, f)
840 return filelog.filelog(self.sopener, f)
841
841
842 def changectx(self, changeid):
842 def changectx(self, changeid):
843 return self[changeid]
843 return self[changeid]
844
844
845 def parents(self, changeid=None):
845 def parents(self, changeid=None):
846 '''get list of changectxs for parents of changeid'''
846 '''get list of changectxs for parents of changeid'''
847 return self[changeid].parents()
847 return self[changeid].parents()
848
848
849 def setparents(self, p1, p2=nullid):
849 def setparents(self, p1, p2=nullid):
850 copies = self.dirstate.setparents(p1, p2)
850 copies = self.dirstate.setparents(p1, p2)
851 if copies:
851 if copies:
852 # Adjust copy records, the dirstate cannot do it, it
852 # Adjust copy records, the dirstate cannot do it, it
853 # requires access to parents manifests. Preserve them
853 # requires access to parents manifests. Preserve them
854 # only for entries added to first parent.
854 # only for entries added to first parent.
855 pctx = self[p1]
855 pctx = self[p1]
856 for f in copies:
856 for f in copies:
857 if f not in pctx and copies[f] in pctx:
857 if f not in pctx and copies[f] in pctx:
858 self.dirstate.copy(copies[f], f)
858 self.dirstate.copy(copies[f], f)
859
859
860 def filectx(self, path, changeid=None, fileid=None):
860 def filectx(self, path, changeid=None, fileid=None):
861 """changeid can be a changeset revision, node, or tag.
861 """changeid can be a changeset revision, node, or tag.
862 fileid can be a file revision or node."""
862 fileid can be a file revision or node."""
863 return context.filectx(self, path, changeid, fileid)
863 return context.filectx(self, path, changeid, fileid)
864
864
865 def getcwd(self):
865 def getcwd(self):
866 return self.dirstate.getcwd()
866 return self.dirstate.getcwd()
867
867
868 def pathto(self, f, cwd=None):
868 def pathto(self, f, cwd=None):
869 return self.dirstate.pathto(f, cwd)
869 return self.dirstate.pathto(f, cwd)
870
870
871 def wfile(self, f, mode='r'):
871 def wfile(self, f, mode='r'):
872 return self.wopener(f, mode)
872 return self.wopener(f, mode)
873
873
874 def _link(self, f):
874 def _link(self, f):
875 return os.path.islink(self.wjoin(f))
875 return os.path.islink(self.wjoin(f))
876
876
877 def _loadfilter(self, filter):
877 def _loadfilter(self, filter):
878 if filter not in self.filterpats:
878 if filter not in self.filterpats:
879 l = []
879 l = []
880 for pat, cmd in self.ui.configitems(filter):
880 for pat, cmd in self.ui.configitems(filter):
881 if cmd == '!':
881 if cmd == '!':
882 continue
882 continue
883 mf = matchmod.match(self.root, '', [pat])
883 mf = matchmod.match(self.root, '', [pat])
884 fn = None
884 fn = None
885 params = cmd
885 params = cmd
886 for name, filterfn in self._datafilters.iteritems():
886 for name, filterfn in self._datafilters.iteritems():
887 if cmd.startswith(name):
887 if cmd.startswith(name):
888 fn = filterfn
888 fn = filterfn
889 params = cmd[len(name):].lstrip()
889 params = cmd[len(name):].lstrip()
890 break
890 break
891 if not fn:
891 if not fn:
892 fn = lambda s, c, **kwargs: util.filter(s, c)
892 fn = lambda s, c, **kwargs: util.filter(s, c)
893 # Wrap old filters not supporting keyword arguments
893 # Wrap old filters not supporting keyword arguments
894 if not inspect.getargspec(fn)[2]:
894 if not inspect.getargspec(fn)[2]:
895 oldfn = fn
895 oldfn = fn
896 fn = lambda s, c, **kwargs: oldfn(s, c)
896 fn = lambda s, c, **kwargs: oldfn(s, c)
897 l.append((mf, fn, params))
897 l.append((mf, fn, params))
898 self.filterpats[filter] = l
898 self.filterpats[filter] = l
899 return self.filterpats[filter]
899 return self.filterpats[filter]
900
900
901 def _filter(self, filterpats, filename, data):
901 def _filter(self, filterpats, filename, data):
902 for mf, fn, cmd in filterpats:
902 for mf, fn, cmd in filterpats:
903 if mf(filename):
903 if mf(filename):
904 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
904 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
905 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
905 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
906 break
906 break
907
907
908 return data
908 return data
909
909
910 @unfilteredpropertycache
910 @unfilteredpropertycache
911 def _encodefilterpats(self):
911 def _encodefilterpats(self):
912 return self._loadfilter('encode')
912 return self._loadfilter('encode')
913
913
914 @unfilteredpropertycache
914 @unfilteredpropertycache
915 def _decodefilterpats(self):
915 def _decodefilterpats(self):
916 return self._loadfilter('decode')
916 return self._loadfilter('decode')
917
917
918 def adddatafilter(self, name, filter):
918 def adddatafilter(self, name, filter):
919 self._datafilters[name] = filter
919 self._datafilters[name] = filter
920
920
921 def wread(self, filename):
921 def wread(self, filename):
922 if self._link(filename):
922 if self._link(filename):
923 data = os.readlink(self.wjoin(filename))
923 data = os.readlink(self.wjoin(filename))
924 else:
924 else:
925 data = self.wopener.read(filename)
925 data = self.wopener.read(filename)
926 return self._filter(self._encodefilterpats, filename, data)
926 return self._filter(self._encodefilterpats, filename, data)
927
927
928 def wwrite(self, filename, data, flags):
928 def wwrite(self, filename, data, flags):
929 data = self._filter(self._decodefilterpats, filename, data)
929 data = self._filter(self._decodefilterpats, filename, data)
930 if 'l' in flags:
930 if 'l' in flags:
931 self.wopener.symlink(data, filename)
931 self.wopener.symlink(data, filename)
932 else:
932 else:
933 self.wopener.write(filename, data)
933 self.wopener.write(filename, data)
934 if 'x' in flags:
934 if 'x' in flags:
935 util.setflags(self.wjoin(filename), False, True)
935 util.setflags(self.wjoin(filename), False, True)
936
936
937 def wwritedata(self, filename, data):
937 def wwritedata(self, filename, data):
938 return self._filter(self._decodefilterpats, filename, data)
938 return self._filter(self._decodefilterpats, filename, data)
939
939
940 def transaction(self, desc):
940 def transaction(self, desc):
941 tr = self._transref and self._transref() or None
941 tr = self._transref and self._transref() or None
942 if tr and tr.running():
942 if tr and tr.running():
943 return tr.nest()
943 return tr.nest()
944
944
945 # abort here if the journal already exists
945 # abort here if the journal already exists
946 if os.path.exists(self.sjoin("journal")):
946 if os.path.exists(self.sjoin("journal")):
947 raise error.RepoError(
947 raise error.RepoError(
948 _("abandoned transaction found - run hg recover"))
948 _("abandoned transaction found - run hg recover"))
949
949
950 self._writejournal(desc)
950 self._writejournal(desc)
951 renames = [(x, undoname(x)) for x in self._journalfiles()]
951 renames = [(x, undoname(x)) for x in self._journalfiles()]
952
952
953 tr = transaction.transaction(self.ui.warn, self.sopener,
953 tr = transaction.transaction(self.ui.warn, self.sopener,
954 self.sjoin("journal"),
954 self.sjoin("journal"),
955 aftertrans(renames),
955 aftertrans(renames),
956 self.store.createmode)
956 self.store.createmode)
957 self._transref = weakref.ref(tr)
957 self._transref = weakref.ref(tr)
958 return tr
958 return tr
959
959
960 def _journalfiles(self):
960 def _journalfiles(self):
961 return (self.sjoin('journal'), self.join('journal.dirstate'),
961 return (self.sjoin('journal'), self.join('journal.dirstate'),
962 self.join('journal.branch'), self.join('journal.desc'),
962 self.join('journal.branch'), self.join('journal.desc'),
963 self.join('journal.bookmarks'),
963 self.join('journal.bookmarks'),
964 self.sjoin('journal.phaseroots'))
964 self.sjoin('journal.phaseroots'))
965
965
966 def undofiles(self):
966 def undofiles(self):
967 return [undoname(x) for x in self._journalfiles()]
967 return [undoname(x) for x in self._journalfiles()]
968
968
969 def _writejournal(self, desc):
969 def _writejournal(self, desc):
970 self.opener.write("journal.dirstate",
970 self.opener.write("journal.dirstate",
971 self.opener.tryread("dirstate"))
971 self.opener.tryread("dirstate"))
972 self.opener.write("journal.branch",
972 self.opener.write("journal.branch",
973 encoding.fromlocal(self.dirstate.branch()))
973 encoding.fromlocal(self.dirstate.branch()))
974 self.opener.write("journal.desc",
974 self.opener.write("journal.desc",
975 "%d\n%s\n" % (len(self), desc))
975 "%d\n%s\n" % (len(self), desc))
976 self.opener.write("journal.bookmarks",
976 self.opener.write("journal.bookmarks",
977 self.opener.tryread("bookmarks"))
977 self.opener.tryread("bookmarks"))
978 self.sopener.write("journal.phaseroots",
978 self.sopener.write("journal.phaseroots",
979 self.sopener.tryread("phaseroots"))
979 self.sopener.tryread("phaseroots"))
980
980
981 def recover(self):
981 def recover(self):
982 lock = self.lock()
982 lock = self.lock()
983 try:
983 try:
984 if os.path.exists(self.sjoin("journal")):
984 if os.path.exists(self.sjoin("journal")):
985 self.ui.status(_("rolling back interrupted transaction\n"))
985 self.ui.status(_("rolling back interrupted transaction\n"))
986 transaction.rollback(self.sopener, self.sjoin("journal"),
986 transaction.rollback(self.sopener, self.sjoin("journal"),
987 self.ui.warn)
987 self.ui.warn)
988 self.invalidate()
988 self.invalidate()
989 return True
989 return True
990 else:
990 else:
991 self.ui.warn(_("no interrupted transaction available\n"))
991 self.ui.warn(_("no interrupted transaction available\n"))
992 return False
992 return False
993 finally:
993 finally:
994 lock.release()
994 lock.release()
995
995
996 def rollback(self, dryrun=False, force=False):
996 def rollback(self, dryrun=False, force=False):
997 wlock = lock = None
997 wlock = lock = None
998 try:
998 try:
999 wlock = self.wlock()
999 wlock = self.wlock()
1000 lock = self.lock()
1000 lock = self.lock()
1001 if os.path.exists(self.sjoin("undo")):
1001 if os.path.exists(self.sjoin("undo")):
1002 return self._rollback(dryrun, force)
1002 return self._rollback(dryrun, force)
1003 else:
1003 else:
1004 self.ui.warn(_("no rollback information available\n"))
1004 self.ui.warn(_("no rollback information available\n"))
1005 return 1
1005 return 1
1006 finally:
1006 finally:
1007 release(lock, wlock)
1007 release(lock, wlock)
1008
1008
1009 @unfilteredmethod # Until we get smarter cache management
1009 @unfilteredmethod # Until we get smarter cache management
1010 def _rollback(self, dryrun, force):
1010 def _rollback(self, dryrun, force):
1011 ui = self.ui
1011 ui = self.ui
1012 try:
1012 try:
1013 args = self.opener.read('undo.desc').splitlines()
1013 args = self.opener.read('undo.desc').splitlines()
1014 (oldlen, desc, detail) = (int(args[0]), args[1], None)
1014 (oldlen, desc, detail) = (int(args[0]), args[1], None)
1015 if len(args) >= 3:
1015 if len(args) >= 3:
1016 detail = args[2]
1016 detail = args[2]
1017 oldtip = oldlen - 1
1017 oldtip = oldlen - 1
1018
1018
1019 if detail and ui.verbose:
1019 if detail and ui.verbose:
1020 msg = (_('repository tip rolled back to revision %s'
1020 msg = (_('repository tip rolled back to revision %s'
1021 ' (undo %s: %s)\n')
1021 ' (undo %s: %s)\n')
1022 % (oldtip, desc, detail))
1022 % (oldtip, desc, detail))
1023 else:
1023 else:
1024 msg = (_('repository tip rolled back to revision %s'
1024 msg = (_('repository tip rolled back to revision %s'
1025 ' (undo %s)\n')
1025 ' (undo %s)\n')
1026 % (oldtip, desc))
1026 % (oldtip, desc))
1027 except IOError:
1027 except IOError:
1028 msg = _('rolling back unknown transaction\n')
1028 msg = _('rolling back unknown transaction\n')
1029 desc = None
1029 desc = None
1030
1030
1031 if not force and self['.'] != self['tip'] and desc == 'commit':
1031 if not force and self['.'] != self['tip'] and desc == 'commit':
1032 raise util.Abort(
1032 raise util.Abort(
1033 _('rollback of last commit while not checked out '
1033 _('rollback of last commit while not checked out '
1034 'may lose data'), hint=_('use -f to force'))
1034 'may lose data'), hint=_('use -f to force'))
1035
1035
1036 ui.status(msg)
1036 ui.status(msg)
1037 if dryrun:
1037 if dryrun:
1038 return 0
1038 return 0
1039
1039
1040 parents = self.dirstate.parents()
1040 parents = self.dirstate.parents()
1041 transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
1041 transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
1042 if os.path.exists(self.join('undo.bookmarks')):
1042 if os.path.exists(self.join('undo.bookmarks')):
1043 util.rename(self.join('undo.bookmarks'),
1043 util.rename(self.join('undo.bookmarks'),
1044 self.join('bookmarks'))
1044 self.join('bookmarks'))
1045 if os.path.exists(self.sjoin('undo.phaseroots')):
1045 if os.path.exists(self.sjoin('undo.phaseroots')):
1046 util.rename(self.sjoin('undo.phaseroots'),
1046 util.rename(self.sjoin('undo.phaseroots'),
1047 self.sjoin('phaseroots'))
1047 self.sjoin('phaseroots'))
1048 self.invalidate()
1048 self.invalidate()
1049
1049
1050 # Discard all cache entries to force reloading everything.
1050 # Discard all cache entries to force reloading everything.
1051 self._filecache.clear()
1051 self._filecache.clear()
1052
1052
1053 parentgone = (parents[0] not in self.changelog.nodemap or
1053 parentgone = (parents[0] not in self.changelog.nodemap or
1054 parents[1] not in self.changelog.nodemap)
1054 parents[1] not in self.changelog.nodemap)
1055 if parentgone:
1055 if parentgone:
1056 util.rename(self.join('undo.dirstate'), self.join('dirstate'))
1056 util.rename(self.join('undo.dirstate'), self.join('dirstate'))
1057 try:
1057 try:
1058 branch = self.opener.read('undo.branch')
1058 branch = self.opener.read('undo.branch')
1059 self.dirstate.setbranch(encoding.tolocal(branch))
1059 self.dirstate.setbranch(encoding.tolocal(branch))
1060 except IOError:
1060 except IOError:
1061 ui.warn(_('named branch could not be reset: '
1061 ui.warn(_('named branch could not be reset: '
1062 'current branch is still \'%s\'\n')
1062 'current branch is still \'%s\'\n')
1063 % self.dirstate.branch())
1063 % self.dirstate.branch())
1064
1064
1065 self.dirstate.invalidate()
1065 self.dirstate.invalidate()
1066 parents = tuple([p.rev() for p in self.parents()])
1066 parents = tuple([p.rev() for p in self.parents()])
1067 if len(parents) > 1:
1067 if len(parents) > 1:
1068 ui.status(_('working directory now based on '
1068 ui.status(_('working directory now based on '
1069 'revisions %d and %d\n') % parents)
1069 'revisions %d and %d\n') % parents)
1070 else:
1070 else:
1071 ui.status(_('working directory now based on '
1071 ui.status(_('working directory now based on '
1072 'revision %d\n') % parents)
1072 'revision %d\n') % parents)
1073 # TODO: if we know which new heads may result from this rollback, pass
1073 # TODO: if we know which new heads may result from this rollback, pass
1074 # them to destroy(), which will prevent the branchhead cache from being
1074 # them to destroy(), which will prevent the branchhead cache from being
1075 # invalidated.
1075 # invalidated.
1076 self.destroyed()
1076 self.destroyed()
1077 return 0
1077 return 0
1078
1078
1079 def invalidatecaches(self):
1079 def invalidatecaches(self):
1080
1080
1081 if '_tagscache' in vars(self):
1081 if '_tagscache' in vars(self):
1082 # can't use delattr on proxy
1082 # can't use delattr on proxy
1083 del self.__dict__['_tagscache']
1083 del self.__dict__['_tagscache']
1084
1084
1085 self.unfiltered()._branchcache = None # in UTF-8
1085 self.unfiltered()._branchcache = None # in UTF-8
1086 self.unfiltered()._branchcachetip = None
1086 self.unfiltered()._branchcachetip = None
1087 obsolete.clearobscaches(self)
1087 obsolete.clearobscaches(self)
1088
1088
1089 def invalidatedirstate(self):
1089 def invalidatedirstate(self):
1090 '''Invalidates the dirstate, causing the next call to dirstate
1090 '''Invalidates the dirstate, causing the next call to dirstate
1091 to check if it was modified since the last time it was read,
1091 to check if it was modified since the last time it was read,
1092 rereading it if it has.
1092 rereading it if it has.
1093
1093
1094 This is different to dirstate.invalidate() that it doesn't always
1094 This is different to dirstate.invalidate() that it doesn't always
1095 rereads the dirstate. Use dirstate.invalidate() if you want to
1095 rereads the dirstate. Use dirstate.invalidate() if you want to
1096 explicitly read the dirstate again (i.e. restoring it to a previous
1096 explicitly read the dirstate again (i.e. restoring it to a previous
1097 known good state).'''
1097 known good state).'''
1098 if hasunfilteredcache(self, 'dirstate'):
1098 if hasunfilteredcache(self, 'dirstate'):
1099 for k in self.dirstate._filecache:
1099 for k in self.dirstate._filecache:
1100 try:
1100 try:
1101 delattr(self.dirstate, k)
1101 delattr(self.dirstate, k)
1102 except AttributeError:
1102 except AttributeError:
1103 pass
1103 pass
1104 delattr(self.unfiltered(), 'dirstate')
1104 delattr(self.unfiltered(), 'dirstate')
1105
1105
1106 def invalidate(self):
1106 def invalidate(self):
1107 unfiltered = self.unfiltered() # all filecaches are stored on unfiltered
1107 unfiltered = self.unfiltered() # all filecaches are stored on unfiltered
1108 for k in self._filecache:
1108 for k in self._filecache:
1109 # dirstate is invalidated separately in invalidatedirstate()
1109 # dirstate is invalidated separately in invalidatedirstate()
1110 if k == 'dirstate':
1110 if k == 'dirstate':
1111 continue
1111 continue
1112
1112
1113 try:
1113 try:
1114 delattr(unfiltered, k)
1114 delattr(unfiltered, k)
1115 except AttributeError:
1115 except AttributeError:
1116 pass
1116 pass
1117 self.invalidatecaches()
1117 self.invalidatecaches()
1118
1118
1119 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
1119 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
1120 try:
1120 try:
1121 l = lock.lock(lockname, 0, releasefn, desc=desc)
1121 l = lock.lock(lockname, 0, releasefn, desc=desc)
1122 except error.LockHeld, inst:
1122 except error.LockHeld, inst:
1123 if not wait:
1123 if not wait:
1124 raise
1124 raise
1125 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1125 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1126 (desc, inst.locker))
1126 (desc, inst.locker))
1127 # default to 600 seconds timeout
1127 # default to 600 seconds timeout
1128 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
1128 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
1129 releasefn, desc=desc)
1129 releasefn, desc=desc)
1130 if acquirefn:
1130 if acquirefn:
1131 acquirefn()
1131 acquirefn()
1132 return l
1132 return l
1133
1133
1134 def _afterlock(self, callback):
1134 def _afterlock(self, callback):
1135 """add a callback to the current repository lock.
1135 """add a callback to the current repository lock.
1136
1136
1137 The callback will be executed on lock release."""
1137 The callback will be executed on lock release."""
1138 l = self._lockref and self._lockref()
1138 l = self._lockref and self._lockref()
1139 if l:
1139 if l:
1140 l.postrelease.append(callback)
1140 l.postrelease.append(callback)
1141 else:
1141 else:
1142 callback()
1142 callback()
1143
1143
1144 def lock(self, wait=True):
1144 def lock(self, wait=True):
1145 '''Lock the repository store (.hg/store) and return a weak reference
1145 '''Lock the repository store (.hg/store) and return a weak reference
1146 to the lock. Use this before modifying the store (e.g. committing or
1146 to the lock. Use this before modifying the store (e.g. committing or
1147 stripping). If you are opening a transaction, get a lock as well.)'''
1147 stripping). If you are opening a transaction, get a lock as well.)'''
1148 l = self._lockref and self._lockref()
1148 l = self._lockref and self._lockref()
1149 if l is not None and l.held:
1149 if l is not None and l.held:
1150 l.lock()
1150 l.lock()
1151 return l
1151 return l
1152
1152
1153 def unlock():
1153 def unlock():
1154 self.store.write()
1154 self.store.write()
1155 if hasunfilteredcache(self, '_phasecache'):
1155 if hasunfilteredcache(self, '_phasecache'):
1156 self._phasecache.write()
1156 self._phasecache.write()
1157 for k, ce in self._filecache.items():
1157 for k, ce in self._filecache.items():
1158 if k == 'dirstate':
1158 if k == 'dirstate':
1159 continue
1159 continue
1160 ce.refresh()
1160 ce.refresh()
1161
1161
1162 l = self._lock(self.sjoin("lock"), wait, unlock,
1162 l = self._lock(self.sjoin("lock"), wait, unlock,
1163 self.invalidate, _('repository %s') % self.origroot)
1163 self.invalidate, _('repository %s') % self.origroot)
1164 self._lockref = weakref.ref(l)
1164 self._lockref = weakref.ref(l)
1165 return l
1165 return l
1166
1166
1167 def wlock(self, wait=True):
1167 def wlock(self, wait=True):
1168 '''Lock the non-store parts of the repository (everything under
1168 '''Lock the non-store parts of the repository (everything under
1169 .hg except .hg/store) and return a weak reference to the lock.
1169 .hg except .hg/store) and return a weak reference to the lock.
1170 Use this before modifying files in .hg.'''
1170 Use this before modifying files in .hg.'''
1171 l = self._wlockref and self._wlockref()
1171 l = self._wlockref and self._wlockref()
1172 if l is not None and l.held:
1172 if l is not None and l.held:
1173 l.lock()
1173 l.lock()
1174 return l
1174 return l
1175
1175
1176 def unlock():
1176 def unlock():
1177 self.dirstate.write()
1177 self.dirstate.write()
1178 ce = self._filecache.get('dirstate')
1178 ce = self._filecache.get('dirstate')
1179 if ce:
1179 if ce:
1180 ce.refresh()
1180 ce.refresh()
1181
1181
1182 l = self._lock(self.join("wlock"), wait, unlock,
1182 l = self._lock(self.join("wlock"), wait, unlock,
1183 self.invalidatedirstate, _('working directory of %s') %
1183 self.invalidatedirstate, _('working directory of %s') %
1184 self.origroot)
1184 self.origroot)
1185 self._wlockref = weakref.ref(l)
1185 self._wlockref = weakref.ref(l)
1186 return l
1186 return l
1187
1187
1188 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1188 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1189 """
1189 """
1190 commit an individual file as part of a larger transaction
1190 commit an individual file as part of a larger transaction
1191 """
1191 """
1192
1192
1193 fname = fctx.path()
1193 fname = fctx.path()
1194 text = fctx.data()
1194 text = fctx.data()
1195 flog = self.file(fname)
1195 flog = self.file(fname)
1196 fparent1 = manifest1.get(fname, nullid)
1196 fparent1 = manifest1.get(fname, nullid)
1197 fparent2 = fparent2o = manifest2.get(fname, nullid)
1197 fparent2 = fparent2o = manifest2.get(fname, nullid)
1198
1198
1199 meta = {}
1199 meta = {}
1200 copy = fctx.renamed()
1200 copy = fctx.renamed()
1201 if copy and copy[0] != fname:
1201 if copy and copy[0] != fname:
1202 # Mark the new revision of this file as a copy of another
1202 # Mark the new revision of this file as a copy of another
1203 # file. This copy data will effectively act as a parent
1203 # file. This copy data will effectively act as a parent
1204 # of this new revision. If this is a merge, the first
1204 # of this new revision. If this is a merge, the first
1205 # parent will be the nullid (meaning "look up the copy data")
1205 # parent will be the nullid (meaning "look up the copy data")
1206 # and the second one will be the other parent. For example:
1206 # and the second one will be the other parent. For example:
1207 #
1207 #
1208 # 0 --- 1 --- 3 rev1 changes file foo
1208 # 0 --- 1 --- 3 rev1 changes file foo
1209 # \ / rev2 renames foo to bar and changes it
1209 # \ / rev2 renames foo to bar and changes it
1210 # \- 2 -/ rev3 should have bar with all changes and
1210 # \- 2 -/ rev3 should have bar with all changes and
1211 # should record that bar descends from
1211 # should record that bar descends from
1212 # bar in rev2 and foo in rev1
1212 # bar in rev2 and foo in rev1
1213 #
1213 #
1214 # this allows this merge to succeed:
1214 # this allows this merge to succeed:
1215 #
1215 #
1216 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1216 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1217 # \ / merging rev3 and rev4 should use bar@rev2
1217 # \ / merging rev3 and rev4 should use bar@rev2
1218 # \- 2 --- 4 as the merge base
1218 # \- 2 --- 4 as the merge base
1219 #
1219 #
1220
1220
1221 cfname = copy[0]
1221 cfname = copy[0]
1222 crev = manifest1.get(cfname)
1222 crev = manifest1.get(cfname)
1223 newfparent = fparent2
1223 newfparent = fparent2
1224
1224
1225 if manifest2: # branch merge
1225 if manifest2: # branch merge
1226 if fparent2 == nullid or crev is None: # copied on remote side
1226 if fparent2 == nullid or crev is None: # copied on remote side
1227 if cfname in manifest2:
1227 if cfname in manifest2:
1228 crev = manifest2[cfname]
1228 crev = manifest2[cfname]
1229 newfparent = fparent1
1229 newfparent = fparent1
1230
1230
1231 # find source in nearest ancestor if we've lost track
1231 # find source in nearest ancestor if we've lost track
1232 if not crev:
1232 if not crev:
1233 self.ui.debug(" %s: searching for copy revision for %s\n" %
1233 self.ui.debug(" %s: searching for copy revision for %s\n" %
1234 (fname, cfname))
1234 (fname, cfname))
1235 for ancestor in self[None].ancestors():
1235 for ancestor in self[None].ancestors():
1236 if cfname in ancestor:
1236 if cfname in ancestor:
1237 crev = ancestor[cfname].filenode()
1237 crev = ancestor[cfname].filenode()
1238 break
1238 break
1239
1239
1240 if crev:
1240 if crev:
1241 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1241 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1242 meta["copy"] = cfname
1242 meta["copy"] = cfname
1243 meta["copyrev"] = hex(crev)
1243 meta["copyrev"] = hex(crev)
1244 fparent1, fparent2 = nullid, newfparent
1244 fparent1, fparent2 = nullid, newfparent
1245 else:
1245 else:
1246 self.ui.warn(_("warning: can't find ancestor for '%s' "
1246 self.ui.warn(_("warning: can't find ancestor for '%s' "
1247 "copied from '%s'!\n") % (fname, cfname))
1247 "copied from '%s'!\n") % (fname, cfname))
1248
1248
1249 elif fparent2 != nullid:
1249 elif fparent2 != nullid:
1250 # is one parent an ancestor of the other?
1250 # is one parent an ancestor of the other?
1251 fparentancestor = flog.ancestor(fparent1, fparent2)
1251 fparentancestor = flog.ancestor(fparent1, fparent2)
1252 if fparentancestor == fparent1:
1252 if fparentancestor == fparent1:
1253 fparent1, fparent2 = fparent2, nullid
1253 fparent1, fparent2 = fparent2, nullid
1254 elif fparentancestor == fparent2:
1254 elif fparentancestor == fparent2:
1255 fparent2 = nullid
1255 fparent2 = nullid
1256
1256
1257 # is the file changed?
1257 # is the file changed?
1258 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1258 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1259 changelist.append(fname)
1259 changelist.append(fname)
1260 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1260 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1261
1261
1262 # are just the flags changed during merge?
1262 # are just the flags changed during merge?
1263 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1263 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1264 changelist.append(fname)
1264 changelist.append(fname)
1265
1265
1266 return fparent1
1266 return fparent1
1267
1267
1268 @unfilteredmethod
1268 @unfilteredmethod
1269 def commit(self, text="", user=None, date=None, match=None, force=False,
1269 def commit(self, text="", user=None, date=None, match=None, force=False,
1270 editor=False, extra={}):
1270 editor=False, extra={}):
1271 """Add a new revision to current repository.
1271 """Add a new revision to current repository.
1272
1272
1273 Revision information is gathered from the working directory,
1273 Revision information is gathered from the working directory,
1274 match can be used to filter the committed files. If editor is
1274 match can be used to filter the committed files. If editor is
1275 supplied, it is called to get a commit message.
1275 supplied, it is called to get a commit message.
1276 """
1276 """
1277
1277
1278 def fail(f, msg):
1278 def fail(f, msg):
1279 raise util.Abort('%s: %s' % (f, msg))
1279 raise util.Abort('%s: %s' % (f, msg))
1280
1280
1281 if not match:
1281 if not match:
1282 match = matchmod.always(self.root, '')
1282 match = matchmod.always(self.root, '')
1283
1283
1284 if not force:
1284 if not force:
1285 vdirs = []
1285 vdirs = []
1286 match.dir = vdirs.append
1286 match.dir = vdirs.append
1287 match.bad = fail
1287 match.bad = fail
1288
1288
1289 wlock = self.wlock()
1289 wlock = self.wlock()
1290 try:
1290 try:
1291 wctx = self[None]
1291 wctx = self[None]
1292 merge = len(wctx.parents()) > 1
1292 merge = len(wctx.parents()) > 1
1293
1293
1294 if (not force and merge and match and
1294 if (not force and merge and match and
1295 (match.files() or match.anypats())):
1295 (match.files() or match.anypats())):
1296 raise util.Abort(_('cannot partially commit a merge '
1296 raise util.Abort(_('cannot partially commit a merge '
1297 '(do not specify files or patterns)'))
1297 '(do not specify files or patterns)'))
1298
1298
1299 changes = self.status(match=match, clean=force)
1299 changes = self.status(match=match, clean=force)
1300 if force:
1300 if force:
1301 changes[0].extend(changes[6]) # mq may commit unchanged files
1301 changes[0].extend(changes[6]) # mq may commit unchanged files
1302
1302
1303 # check subrepos
1303 # check subrepos
1304 subs = []
1304 subs = []
1305 commitsubs = set()
1305 commitsubs = set()
1306 newstate = wctx.substate.copy()
1306 newstate = wctx.substate.copy()
1307 # only manage subrepos and .hgsubstate if .hgsub is present
1307 # only manage subrepos and .hgsubstate if .hgsub is present
1308 if '.hgsub' in wctx:
1308 if '.hgsub' in wctx:
1309 # we'll decide whether to track this ourselves, thanks
1309 # we'll decide whether to track this ourselves, thanks
1310 if '.hgsubstate' in changes[0]:
1310 if '.hgsubstate' in changes[0]:
1311 changes[0].remove('.hgsubstate')
1311 changes[0].remove('.hgsubstate')
1312 if '.hgsubstate' in changes[2]:
1312 if '.hgsubstate' in changes[2]:
1313 changes[2].remove('.hgsubstate')
1313 changes[2].remove('.hgsubstate')
1314
1314
1315 # compare current state to last committed state
1315 # compare current state to last committed state
1316 # build new substate based on last committed state
1316 # build new substate based on last committed state
1317 oldstate = wctx.p1().substate
1317 oldstate = wctx.p1().substate
1318 for s in sorted(newstate.keys()):
1318 for s in sorted(newstate.keys()):
1319 if not match(s):
1319 if not match(s):
1320 # ignore working copy, use old state if present
1320 # ignore working copy, use old state if present
1321 if s in oldstate:
1321 if s in oldstate:
1322 newstate[s] = oldstate[s]
1322 newstate[s] = oldstate[s]
1323 continue
1323 continue
1324 if not force:
1324 if not force:
1325 raise util.Abort(
1325 raise util.Abort(
1326 _("commit with new subrepo %s excluded") % s)
1326 _("commit with new subrepo %s excluded") % s)
1327 if wctx.sub(s).dirty(True):
1327 if wctx.sub(s).dirty(True):
1328 if not self.ui.configbool('ui', 'commitsubrepos'):
1328 if not self.ui.configbool('ui', 'commitsubrepos'):
1329 raise util.Abort(
1329 raise util.Abort(
1330 _("uncommitted changes in subrepo %s") % s,
1330 _("uncommitted changes in subrepo %s") % s,
1331 hint=_("use --subrepos for recursive commit"))
1331 hint=_("use --subrepos for recursive commit"))
1332 subs.append(s)
1332 subs.append(s)
1333 commitsubs.add(s)
1333 commitsubs.add(s)
1334 else:
1334 else:
1335 bs = wctx.sub(s).basestate()
1335 bs = wctx.sub(s).basestate()
1336 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1336 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1337 if oldstate.get(s, (None, None, None))[1] != bs:
1337 if oldstate.get(s, (None, None, None))[1] != bs:
1338 subs.append(s)
1338 subs.append(s)
1339
1339
1340 # check for removed subrepos
1340 # check for removed subrepos
1341 for p in wctx.parents():
1341 for p in wctx.parents():
1342 r = [s for s in p.substate if s not in newstate]
1342 r = [s for s in p.substate if s not in newstate]
1343 subs += [s for s in r if match(s)]
1343 subs += [s for s in r if match(s)]
1344 if subs:
1344 if subs:
1345 if (not match('.hgsub') and
1345 if (not match('.hgsub') and
1346 '.hgsub' in (wctx.modified() + wctx.added())):
1346 '.hgsub' in (wctx.modified() + wctx.added())):
1347 raise util.Abort(
1347 raise util.Abort(
1348 _("can't commit subrepos without .hgsub"))
1348 _("can't commit subrepos without .hgsub"))
1349 changes[0].insert(0, '.hgsubstate')
1349 changes[0].insert(0, '.hgsubstate')
1350
1350
1351 elif '.hgsub' in changes[2]:
1351 elif '.hgsub' in changes[2]:
1352 # clean up .hgsubstate when .hgsub is removed
1352 # clean up .hgsubstate when .hgsub is removed
1353 if ('.hgsubstate' in wctx and
1353 if ('.hgsubstate' in wctx and
1354 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1354 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1355 changes[2].insert(0, '.hgsubstate')
1355 changes[2].insert(0, '.hgsubstate')
1356
1356
1357 # make sure all explicit patterns are matched
1357 # make sure all explicit patterns are matched
1358 if not force and match.files():
1358 if not force and match.files():
1359 matched = set(changes[0] + changes[1] + changes[2])
1359 matched = set(changes[0] + changes[1] + changes[2])
1360
1360
1361 for f in match.files():
1361 for f in match.files():
1362 f = self.dirstate.normalize(f)
1362 f = self.dirstate.normalize(f)
1363 if f == '.' or f in matched or f in wctx.substate:
1363 if f == '.' or f in matched or f in wctx.substate:
1364 continue
1364 continue
1365 if f in changes[3]: # missing
1365 if f in changes[3]: # missing
1366 fail(f, _('file not found!'))
1366 fail(f, _('file not found!'))
1367 if f in vdirs: # visited directory
1367 if f in vdirs: # visited directory
1368 d = f + '/'
1368 d = f + '/'
1369 for mf in matched:
1369 for mf in matched:
1370 if mf.startswith(d):
1370 if mf.startswith(d):
1371 break
1371 break
1372 else:
1372 else:
1373 fail(f, _("no match under directory!"))
1373 fail(f, _("no match under directory!"))
1374 elif f not in self.dirstate:
1374 elif f not in self.dirstate:
1375 fail(f, _("file not tracked!"))
1375 fail(f, _("file not tracked!"))
1376
1376
1377 if (not force and not extra.get("close") and not merge
1377 if (not force and not extra.get("close") and not merge
1378 and not (changes[0] or changes[1] or changes[2])
1378 and not (changes[0] or changes[1] or changes[2])
1379 and wctx.branch() == wctx.p1().branch()):
1379 and wctx.branch() == wctx.p1().branch()):
1380 return None
1380 return None
1381
1381
1382 if merge and changes[3]:
1382 if merge and changes[3]:
1383 raise util.Abort(_("cannot commit merge with missing files"))
1383 raise util.Abort(_("cannot commit merge with missing files"))
1384
1384
1385 ms = mergemod.mergestate(self)
1385 ms = mergemod.mergestate(self)
1386 for f in changes[0]:
1386 for f in changes[0]:
1387 if f in ms and ms[f] == 'u':
1387 if f in ms and ms[f] == 'u':
1388 raise util.Abort(_("unresolved merge conflicts "
1388 raise util.Abort(_("unresolved merge conflicts "
1389 "(see hg help resolve)"))
1389 "(see hg help resolve)"))
1390
1390
1391 cctx = context.workingctx(self, text, user, date, extra, changes)
1391 cctx = context.workingctx(self, text, user, date, extra, changes)
1392 if editor:
1392 if editor:
1393 cctx._text = editor(self, cctx, subs)
1393 cctx._text = editor(self, cctx, subs)
1394 edited = (text != cctx._text)
1394 edited = (text != cctx._text)
1395
1395
1396 # commit subs and write new state
1396 # commit subs and write new state
1397 if subs:
1397 if subs:
1398 for s in sorted(commitsubs):
1398 for s in sorted(commitsubs):
1399 sub = wctx.sub(s)
1399 sub = wctx.sub(s)
1400 self.ui.status(_('committing subrepository %s\n') %
1400 self.ui.status(_('committing subrepository %s\n') %
1401 subrepo.subrelpath(sub))
1401 subrepo.subrelpath(sub))
1402 sr = sub.commit(cctx._text, user, date)
1402 sr = sub.commit(cctx._text, user, date)
1403 newstate[s] = (newstate[s][0], sr)
1403 newstate[s] = (newstate[s][0], sr)
1404 subrepo.writestate(self, newstate)
1404 subrepo.writestate(self, newstate)
1405
1405
1406 # Save commit message in case this transaction gets rolled back
1406 # Save commit message in case this transaction gets rolled back
1407 # (e.g. by a pretxncommit hook). Leave the content alone on
1407 # (e.g. by a pretxncommit hook). Leave the content alone on
1408 # the assumption that the user will use the same editor again.
1408 # the assumption that the user will use the same editor again.
1409 msgfn = self.savecommitmessage(cctx._text)
1409 msgfn = self.savecommitmessage(cctx._text)
1410
1410
1411 p1, p2 = self.dirstate.parents()
1411 p1, p2 = self.dirstate.parents()
1412 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1412 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1413 try:
1413 try:
1414 self.hook("precommit", throw=True, parent1=hookp1,
1414 self.hook("precommit", throw=True, parent1=hookp1,
1415 parent2=hookp2)
1415 parent2=hookp2)
1416 ret = self.commitctx(cctx, True)
1416 ret = self.commitctx(cctx, True)
1417 except: # re-raises
1417 except: # re-raises
1418 if edited:
1418 if edited:
1419 self.ui.write(
1419 self.ui.write(
1420 _('note: commit message saved in %s\n') % msgfn)
1420 _('note: commit message saved in %s\n') % msgfn)
1421 raise
1421 raise
1422
1422
1423 # update bookmarks, dirstate and mergestate
1423 # update bookmarks, dirstate and mergestate
1424 bookmarks.update(self, [p1, p2], ret)
1424 bookmarks.update(self, [p1, p2], ret)
1425 for f in changes[0] + changes[1]:
1425 for f in changes[0] + changes[1]:
1426 self.dirstate.normal(f)
1426 self.dirstate.normal(f)
1427 for f in changes[2]:
1427 for f in changes[2]:
1428 self.dirstate.drop(f)
1428 self.dirstate.drop(f)
1429 self.dirstate.setparents(ret)
1429 self.dirstate.setparents(ret)
1430 ms.reset()
1430 ms.reset()
1431 finally:
1431 finally:
1432 wlock.release()
1432 wlock.release()
1433
1433
1434 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1434 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1435 self.hook("commit", node=node, parent1=parent1, parent2=parent2)
1435 self.hook("commit", node=node, parent1=parent1, parent2=parent2)
1436 self._afterlock(commithook)
1436 self._afterlock(commithook)
1437 return ret
1437 return ret
1438
1438
1439 @unfilteredmethod
1439 @unfilteredmethod
1440 def commitctx(self, ctx, error=False):
1440 def commitctx(self, ctx, error=False):
1441 """Add a new revision to current repository.
1441 """Add a new revision to current repository.
1442 Revision information is passed via the context argument.
1442 Revision information is passed via the context argument.
1443 """
1443 """
1444
1444
1445 tr = lock = None
1445 tr = lock = None
1446 removed = list(ctx.removed())
1446 removed = list(ctx.removed())
1447 p1, p2 = ctx.p1(), ctx.p2()
1447 p1, p2 = ctx.p1(), ctx.p2()
1448 user = ctx.user()
1448 user = ctx.user()
1449
1449
1450 lock = self.lock()
1450 lock = self.lock()
1451 try:
1451 try:
1452 tr = self.transaction("commit")
1452 tr = self.transaction("commit")
1453 trp = weakref.proxy(tr)
1453 trp = weakref.proxy(tr)
1454
1454
1455 if ctx.files():
1455 if ctx.files():
1456 m1 = p1.manifest().copy()
1456 m1 = p1.manifest().copy()
1457 m2 = p2.manifest()
1457 m2 = p2.manifest()
1458
1458
1459 # check in files
1459 # check in files
1460 new = {}
1460 new = {}
1461 changed = []
1461 changed = []
1462 linkrev = len(self)
1462 linkrev = len(self)
1463 for f in sorted(ctx.modified() + ctx.added()):
1463 for f in sorted(ctx.modified() + ctx.added()):
1464 self.ui.note(f + "\n")
1464 self.ui.note(f + "\n")
1465 try:
1465 try:
1466 fctx = ctx[f]
1466 fctx = ctx[f]
1467 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1467 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1468 changed)
1468 changed)
1469 m1.set(f, fctx.flags())
1469 m1.set(f, fctx.flags())
1470 except OSError, inst:
1470 except OSError, inst:
1471 self.ui.warn(_("trouble committing %s!\n") % f)
1471 self.ui.warn(_("trouble committing %s!\n") % f)
1472 raise
1472 raise
1473 except IOError, inst:
1473 except IOError, inst:
1474 errcode = getattr(inst, 'errno', errno.ENOENT)
1474 errcode = getattr(inst, 'errno', errno.ENOENT)
1475 if error or errcode and errcode != errno.ENOENT:
1475 if error or errcode and errcode != errno.ENOENT:
1476 self.ui.warn(_("trouble committing %s!\n") % f)
1476 self.ui.warn(_("trouble committing %s!\n") % f)
1477 raise
1477 raise
1478 else:
1478 else:
1479 removed.append(f)
1479 removed.append(f)
1480
1480
1481 # update manifest
1481 # update manifest
1482 m1.update(new)
1482 m1.update(new)
1483 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1483 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1484 drop = [f for f in removed if f in m1]
1484 drop = [f for f in removed if f in m1]
1485 for f in drop:
1485 for f in drop:
1486 del m1[f]
1486 del m1[f]
1487 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1487 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1488 p2.manifestnode(), (new, drop))
1488 p2.manifestnode(), (new, drop))
1489 files = changed + removed
1489 files = changed + removed
1490 else:
1490 else:
1491 mn = p1.manifestnode()
1491 mn = p1.manifestnode()
1492 files = []
1492 files = []
1493
1493
1494 # update changelog
1494 # update changelog
1495 self.changelog.delayupdate()
1495 self.changelog.delayupdate()
1496 n = self.changelog.add(mn, files, ctx.description(),
1496 n = self.changelog.add(mn, files, ctx.description(),
1497 trp, p1.node(), p2.node(),
1497 trp, p1.node(), p2.node(),
1498 user, ctx.date(), ctx.extra().copy())
1498 user, ctx.date(), ctx.extra().copy())
1499 p = lambda: self.changelog.writepending() and self.root or ""
1499 p = lambda: self.changelog.writepending() and self.root or ""
1500 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1500 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1501 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1501 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1502 parent2=xp2, pending=p)
1502 parent2=xp2, pending=p)
1503 self.changelog.finalize(trp)
1503 self.changelog.finalize(trp)
1504 # set the new commit is proper phase
1504 # set the new commit is proper phase
1505 targetphase = phases.newcommitphase(self.ui)
1505 targetphase = phases.newcommitphase(self.ui)
1506 if targetphase:
1506 if targetphase:
1507 # retract boundary do not alter parent changeset.
1507 # retract boundary do not alter parent changeset.
1508 # if a parent have higher the resulting phase will
1508 # if a parent have higher the resulting phase will
1509 # be compliant anyway
1509 # be compliant anyway
1510 #
1510 #
1511 # if minimal phase was 0 we don't need to retract anything
1511 # if minimal phase was 0 we don't need to retract anything
1512 phases.retractboundary(self, targetphase, [n])
1512 phases.retractboundary(self, targetphase, [n])
1513 tr.close()
1513 tr.close()
1514 self.updatebranchcache()
1514 self.updatebranchcache()
1515 return n
1515 return n
1516 finally:
1516 finally:
1517 if tr:
1517 if tr:
1518 tr.release()
1518 tr.release()
1519 lock.release()
1519 lock.release()
1520
1520
1521 @unfilteredmethod
1521 @unfilteredmethod
1522 def destroyed(self, newheadnodes=None):
1522 def destroyed(self, newheadnodes=None):
1523 '''Inform the repository that nodes have been destroyed.
1523 '''Inform the repository that nodes have been destroyed.
1524 Intended for use by strip and rollback, so there's a common
1524 Intended for use by strip and rollback, so there's a common
1525 place for anything that has to be done after destroying history.
1525 place for anything that has to be done after destroying history.
1526
1526
1527 If you know the branchheadcache was uptodate before nodes were removed
1527 If you know the branchheadcache was uptodate before nodes were removed
1528 and you also know the set of candidate new heads that may have resulted
1528 and you also know the set of candidate new heads that may have resulted
1529 from the destruction, you can set newheadnodes. This will enable the
1529 from the destruction, you can set newheadnodes. This will enable the
1530 code to update the branchheads cache, rather than having future code
1530 code to update the branchheads cache, rather than having future code
1531 decide it's invalid and regenerating it from scratch.
1531 decide it's invalid and regenerating it from scratch.
1532 '''
1532 '''
1533 # If we have info, newheadnodes, on how to update the branch cache, do
1533 # If we have info, newheadnodes, on how to update the branch cache, do
1534 # it, Otherwise, since nodes were destroyed, the cache is stale and this
1534 # it, Otherwise, since nodes were destroyed, the cache is stale and this
1535 # will be caught the next time it is read.
1535 # will be caught the next time it is read.
1536 if newheadnodes:
1536 if newheadnodes:
1537 tiprev = len(self) - 1
1537 tiprev = len(self) - 1
1538 ctxgen = (self[node] for node in newheadnodes
1538 ctxgen = (self[node] for node in newheadnodes
1539 if self.changelog.hasnode(node))
1539 if self.changelog.hasnode(node))
1540 self._updatebranchcache(self._branchcache, ctxgen)
1540 self._updatebranchcache(self._branchcache, ctxgen)
1541 self._writebranchcache(self._branchcache, self.changelog.tip(),
1541 self._writebranchcache(self._branchcache, self.changelog.tip(),
1542 tiprev)
1542 tiprev)
1543
1543
1544 # Ensure the persistent tag cache is updated. Doing it now
1544 # Ensure the persistent tag cache is updated. Doing it now
1545 # means that the tag cache only has to worry about destroyed
1545 # means that the tag cache only has to worry about destroyed
1546 # heads immediately after a strip/rollback. That in turn
1546 # heads immediately after a strip/rollback. That in turn
1547 # guarantees that "cachetip == currenttip" (comparing both rev
1547 # guarantees that "cachetip == currenttip" (comparing both rev
1548 # and node) always means no nodes have been added or destroyed.
1548 # and node) always means no nodes have been added or destroyed.
1549
1549
1550 # XXX this is suboptimal when qrefresh'ing: we strip the current
1550 # XXX this is suboptimal when qrefresh'ing: we strip the current
1551 # head, refresh the tag cache, then immediately add a new head.
1551 # head, refresh the tag cache, then immediately add a new head.
1552 # But I think doing it this way is necessary for the "instant
1552 # But I think doing it this way is necessary for the "instant
1553 # tag cache retrieval" case to work.
1553 # tag cache retrieval" case to work.
1554 self.invalidatecaches()
1554 self.invalidatecaches()
1555
1555
1556 # Discard all cache entries to force reloading everything.
1556 # Discard all cache entries to force reloading everything.
1557 self._filecache.clear()
1557 self._filecache.clear()
1558
1558
1559 def walk(self, match, node=None):
1559 def walk(self, match, node=None):
1560 '''
1560 '''
1561 walk recursively through the directory tree or a given
1561 walk recursively through the directory tree or a given
1562 changeset, finding all files matched by the match
1562 changeset, finding all files matched by the match
1563 function
1563 function
1564 '''
1564 '''
1565 return self[node].walk(match)
1565 return self[node].walk(match)
1566
1566
1567 def status(self, node1='.', node2=None, match=None,
1567 def status(self, node1='.', node2=None, match=None,
1568 ignored=False, clean=False, unknown=False,
1568 ignored=False, clean=False, unknown=False,
1569 listsubrepos=False):
1569 listsubrepos=False):
1570 """return status of files between two nodes or node and working
1570 """return status of files between two nodes or node and working
1571 directory.
1571 directory.
1572
1572
1573 If node1 is None, use the first dirstate parent instead.
1573 If node1 is None, use the first dirstate parent instead.
1574 If node2 is None, compare node1 with working directory.
1574 If node2 is None, compare node1 with working directory.
1575 """
1575 """
1576
1576
1577 def mfmatches(ctx):
1577 def mfmatches(ctx):
1578 mf = ctx.manifest().copy()
1578 mf = ctx.manifest().copy()
1579 if match.always():
1579 if match.always():
1580 return mf
1580 return mf
1581 for fn in mf.keys():
1581 for fn in mf.keys():
1582 if not match(fn):
1582 if not match(fn):
1583 del mf[fn]
1583 del mf[fn]
1584 return mf
1584 return mf
1585
1585
1586 if isinstance(node1, context.changectx):
1586 if isinstance(node1, context.changectx):
1587 ctx1 = node1
1587 ctx1 = node1
1588 else:
1588 else:
1589 ctx1 = self[node1]
1589 ctx1 = self[node1]
1590 if isinstance(node2, context.changectx):
1590 if isinstance(node2, context.changectx):
1591 ctx2 = node2
1591 ctx2 = node2
1592 else:
1592 else:
1593 ctx2 = self[node2]
1593 ctx2 = self[node2]
1594
1594
1595 working = ctx2.rev() is None
1595 working = ctx2.rev() is None
1596 parentworking = working and ctx1 == self['.']
1596 parentworking = working and ctx1 == self['.']
1597 match = match or matchmod.always(self.root, self.getcwd())
1597 match = match or matchmod.always(self.root, self.getcwd())
1598 listignored, listclean, listunknown = ignored, clean, unknown
1598 listignored, listclean, listunknown = ignored, clean, unknown
1599
1599
1600 # load earliest manifest first for caching reasons
1600 # load earliest manifest first for caching reasons
1601 if not working and ctx2.rev() < ctx1.rev():
1601 if not working and ctx2.rev() < ctx1.rev():
1602 ctx2.manifest()
1602 ctx2.manifest()
1603
1603
1604 if not parentworking:
1604 if not parentworking:
1605 def bad(f, msg):
1605 def bad(f, msg):
1606 # 'f' may be a directory pattern from 'match.files()',
1606 # 'f' may be a directory pattern from 'match.files()',
1607 # so 'f not in ctx1' is not enough
1607 # so 'f not in ctx1' is not enough
1608 if f not in ctx1 and f not in ctx1.dirs():
1608 if f not in ctx1 and f not in ctx1.dirs():
1609 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1609 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1610 match.bad = bad
1610 match.bad = bad
1611
1611
1612 if working: # we need to scan the working dir
1612 if working: # we need to scan the working dir
1613 subrepos = []
1613 subrepos = []
1614 if '.hgsub' in self.dirstate:
1614 if '.hgsub' in self.dirstate:
1615 subrepos = ctx2.substate.keys()
1615 subrepos = ctx2.substate.keys()
1616 s = self.dirstate.status(match, subrepos, listignored,
1616 s = self.dirstate.status(match, subrepos, listignored,
1617 listclean, listunknown)
1617 listclean, listunknown)
1618 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1618 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1619
1619
1620 # check for any possibly clean files
1620 # check for any possibly clean files
1621 if parentworking and cmp:
1621 if parentworking and cmp:
1622 fixup = []
1622 fixup = []
1623 # do a full compare of any files that might have changed
1623 # do a full compare of any files that might have changed
1624 for f in sorted(cmp):
1624 for f in sorted(cmp):
1625 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1625 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1626 or ctx1[f].cmp(ctx2[f])):
1626 or ctx1[f].cmp(ctx2[f])):
1627 modified.append(f)
1627 modified.append(f)
1628 else:
1628 else:
1629 fixup.append(f)
1629 fixup.append(f)
1630
1630
1631 # update dirstate for files that are actually clean
1631 # update dirstate for files that are actually clean
1632 if fixup:
1632 if fixup:
1633 if listclean:
1633 if listclean:
1634 clean += fixup
1634 clean += fixup
1635
1635
1636 try:
1636 try:
1637 # updating the dirstate is optional
1637 # updating the dirstate is optional
1638 # so we don't wait on the lock
1638 # so we don't wait on the lock
1639 wlock = self.wlock(False)
1639 wlock = self.wlock(False)
1640 try:
1640 try:
1641 for f in fixup:
1641 for f in fixup:
1642 self.dirstate.normal(f)
1642 self.dirstate.normal(f)
1643 finally:
1643 finally:
1644 wlock.release()
1644 wlock.release()
1645 except error.LockError:
1645 except error.LockError:
1646 pass
1646 pass
1647
1647
1648 if not parentworking:
1648 if not parentworking:
1649 mf1 = mfmatches(ctx1)
1649 mf1 = mfmatches(ctx1)
1650 if working:
1650 if working:
1651 # we are comparing working dir against non-parent
1651 # we are comparing working dir against non-parent
1652 # generate a pseudo-manifest for the working dir
1652 # generate a pseudo-manifest for the working dir
1653 mf2 = mfmatches(self['.'])
1653 mf2 = mfmatches(self['.'])
1654 for f in cmp + modified + added:
1654 for f in cmp + modified + added:
1655 mf2[f] = None
1655 mf2[f] = None
1656 mf2.set(f, ctx2.flags(f))
1656 mf2.set(f, ctx2.flags(f))
1657 for f in removed:
1657 for f in removed:
1658 if f in mf2:
1658 if f in mf2:
1659 del mf2[f]
1659 del mf2[f]
1660 else:
1660 else:
1661 # we are comparing two revisions
1661 # we are comparing two revisions
1662 deleted, unknown, ignored = [], [], []
1662 deleted, unknown, ignored = [], [], []
1663 mf2 = mfmatches(ctx2)
1663 mf2 = mfmatches(ctx2)
1664
1664
1665 modified, added, clean = [], [], []
1665 modified, added, clean = [], [], []
1666 withflags = mf1.withflags() | mf2.withflags()
1666 withflags = mf1.withflags() | mf2.withflags()
1667 for fn in mf2:
1667 for fn in mf2:
1668 if fn in mf1:
1668 if fn in mf1:
1669 if (fn not in deleted and
1669 if (fn not in deleted and
1670 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
1670 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
1671 (mf1[fn] != mf2[fn] and
1671 (mf1[fn] != mf2[fn] and
1672 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1672 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1673 modified.append(fn)
1673 modified.append(fn)
1674 elif listclean:
1674 elif listclean:
1675 clean.append(fn)
1675 clean.append(fn)
1676 del mf1[fn]
1676 del mf1[fn]
1677 elif fn not in deleted:
1677 elif fn not in deleted:
1678 added.append(fn)
1678 added.append(fn)
1679 removed = mf1.keys()
1679 removed = mf1.keys()
1680
1680
1681 if working and modified and not self.dirstate._checklink:
1681 if working and modified and not self.dirstate._checklink:
1682 # Symlink placeholders may get non-symlink-like contents
1682 # Symlink placeholders may get non-symlink-like contents
1683 # via user error or dereferencing by NFS or Samba servers,
1683 # via user error or dereferencing by NFS or Samba servers,
1684 # so we filter out any placeholders that don't look like a
1684 # so we filter out any placeholders that don't look like a
1685 # symlink
1685 # symlink
1686 sane = []
1686 sane = []
1687 for f in modified:
1687 for f in modified:
1688 if ctx2.flags(f) == 'l':
1688 if ctx2.flags(f) == 'l':
1689 d = ctx2[f].data()
1689 d = ctx2[f].data()
1690 if len(d) >= 1024 or '\n' in d or util.binary(d):
1690 if len(d) >= 1024 or '\n' in d or util.binary(d):
1691 self.ui.debug('ignoring suspect symlink placeholder'
1691 self.ui.debug('ignoring suspect symlink placeholder'
1692 ' "%s"\n' % f)
1692 ' "%s"\n' % f)
1693 continue
1693 continue
1694 sane.append(f)
1694 sane.append(f)
1695 modified = sane
1695 modified = sane
1696
1696
1697 r = modified, added, removed, deleted, unknown, ignored, clean
1697 r = modified, added, removed, deleted, unknown, ignored, clean
1698
1698
1699 if listsubrepos:
1699 if listsubrepos:
1700 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1700 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1701 if working:
1701 if working:
1702 rev2 = None
1702 rev2 = None
1703 else:
1703 else:
1704 rev2 = ctx2.substate[subpath][1]
1704 rev2 = ctx2.substate[subpath][1]
1705 try:
1705 try:
1706 submatch = matchmod.narrowmatcher(subpath, match)
1706 submatch = matchmod.narrowmatcher(subpath, match)
1707 s = sub.status(rev2, match=submatch, ignored=listignored,
1707 s = sub.status(rev2, match=submatch, ignored=listignored,
1708 clean=listclean, unknown=listunknown,
1708 clean=listclean, unknown=listunknown,
1709 listsubrepos=True)
1709 listsubrepos=True)
1710 for rfiles, sfiles in zip(r, s):
1710 for rfiles, sfiles in zip(r, s):
1711 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1711 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1712 except error.LookupError:
1712 except error.LookupError:
1713 self.ui.status(_("skipping missing subrepository: %s\n")
1713 self.ui.status(_("skipping missing subrepository: %s\n")
1714 % subpath)
1714 % subpath)
1715
1715
1716 for l in r:
1716 for l in r:
1717 l.sort()
1717 l.sort()
1718 return r
1718 return r
1719
1719
1720 def heads(self, start=None):
1720 def heads(self, start=None):
1721 heads = self.changelog.heads(start)
1721 heads = self.changelog.heads(start)
1722 # sort the output in rev descending order
1722 # sort the output in rev descending order
1723 return sorted(heads, key=self.changelog.rev, reverse=True)
1723 return sorted(heads, key=self.changelog.rev, reverse=True)
1724
1724
1725 def branchheads(self, branch=None, start=None, closed=False):
1725 def branchheads(self, branch=None, start=None, closed=False):
1726 '''return a (possibly filtered) list of heads for the given branch
1726 '''return a (possibly filtered) list of heads for the given branch
1727
1727
1728 Heads are returned in topological order, from newest to oldest.
1728 Heads are returned in topological order, from newest to oldest.
1729 If branch is None, use the dirstate branch.
1729 If branch is None, use the dirstate branch.
1730 If start is not None, return only heads reachable from start.
1730 If start is not None, return only heads reachable from start.
1731 If closed is True, return heads that are marked as closed as well.
1731 If closed is True, return heads that are marked as closed as well.
1732 '''
1732 '''
1733 if branch is None:
1733 if branch is None:
1734 branch = self[None].branch()
1734 branch = self[None].branch()
1735 branches = self.branchmap()
1735 branches = self.branchmap()
1736 if branch not in branches:
1736 if branch not in branches:
1737 return []
1737 return []
1738 # the cache returns heads ordered lowest to highest
1738 # the cache returns heads ordered lowest to highest
1739 bheads = list(reversed(branches[branch]))
1739 bheads = list(reversed(branches[branch]))
1740 if start is not None:
1740 if start is not None:
1741 # filter out the heads that cannot be reached from startrev
1741 # filter out the heads that cannot be reached from startrev
1742 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1742 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1743 bheads = [h for h in bheads if h in fbheads]
1743 bheads = [h for h in bheads if h in fbheads]
1744 if not closed:
1744 if not closed:
1745 bheads = [h for h in bheads if not self[h].closesbranch()]
1745 bheads = [h for h in bheads if not self[h].closesbranch()]
1746 return bheads
1746 return bheads
1747
1747
1748 def branches(self, nodes):
1748 def branches(self, nodes):
1749 if not nodes:
1749 if not nodes:
1750 nodes = [self.changelog.tip()]
1750 nodes = [self.changelog.tip()]
1751 b = []
1751 b = []
1752 for n in nodes:
1752 for n in nodes:
1753 t = n
1753 t = n
1754 while True:
1754 while True:
1755 p = self.changelog.parents(n)
1755 p = self.changelog.parents(n)
1756 if p[1] != nullid or p[0] == nullid:
1756 if p[1] != nullid or p[0] == nullid:
1757 b.append((t, n, p[0], p[1]))
1757 b.append((t, n, p[0], p[1]))
1758 break
1758 break
1759 n = p[0]
1759 n = p[0]
1760 return b
1760 return b
1761
1761
1762 def between(self, pairs):
1762 def between(self, pairs):
1763 r = []
1763 r = []
1764
1764
1765 for top, bottom in pairs:
1765 for top, bottom in pairs:
1766 n, l, i = top, [], 0
1766 n, l, i = top, [], 0
1767 f = 1
1767 f = 1
1768
1768
1769 while n != bottom and n != nullid:
1769 while n != bottom and n != nullid:
1770 p = self.changelog.parents(n)[0]
1770 p = self.changelog.parents(n)[0]
1771 if i == f:
1771 if i == f:
1772 l.append(n)
1772 l.append(n)
1773 f = f * 2
1773 f = f * 2
1774 n = p
1774 n = p
1775 i += 1
1775 i += 1
1776
1776
1777 r.append(l)
1777 r.append(l)
1778
1778
1779 return r
1779 return r
1780
1780
1781 def pull(self, remote, heads=None, force=False):
1781 def pull(self, remote, heads=None, force=False):
1782 # don't open transaction for nothing or you break future useful
1782 # don't open transaction for nothing or you break future useful
1783 # rollback call
1783 # rollback call
1784 tr = None
1784 tr = None
1785 trname = 'pull\n' + util.hidepassword(remote.url())
1785 trname = 'pull\n' + util.hidepassword(remote.url())
1786 lock = self.lock()
1786 lock = self.lock()
1787 try:
1787 try:
1788 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1788 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1789 force=force)
1789 force=force)
1790 common, fetch, rheads = tmp
1790 common, fetch, rheads = tmp
1791 if not fetch:
1791 if not fetch:
1792 self.ui.status(_("no changes found\n"))
1792 self.ui.status(_("no changes found\n"))
1793 added = []
1793 added = []
1794 result = 0
1794 result = 0
1795 else:
1795 else:
1796 tr = self.transaction(trname)
1796 tr = self.transaction(trname)
1797 if heads is None and list(common) == [nullid]:
1797 if heads is None and list(common) == [nullid]:
1798 self.ui.status(_("requesting all changes\n"))
1798 self.ui.status(_("requesting all changes\n"))
1799 elif heads is None and remote.capable('changegroupsubset'):
1799 elif heads is None and remote.capable('changegroupsubset'):
1800 # issue1320, avoid a race if remote changed after discovery
1800 # issue1320, avoid a race if remote changed after discovery
1801 heads = rheads
1801 heads = rheads
1802
1802
1803 if remote.capable('getbundle'):
1803 if remote.capable('getbundle'):
1804 cg = remote.getbundle('pull', common=common,
1804 cg = remote.getbundle('pull', common=common,
1805 heads=heads or rheads)
1805 heads=heads or rheads)
1806 elif heads is None:
1806 elif heads is None:
1807 cg = remote.changegroup(fetch, 'pull')
1807 cg = remote.changegroup(fetch, 'pull')
1808 elif not remote.capable('changegroupsubset'):
1808 elif not remote.capable('changegroupsubset'):
1809 raise util.Abort(_("partial pull cannot be done because "
1809 raise util.Abort(_("partial pull cannot be done because "
1810 "other repository doesn't support "
1810 "other repository doesn't support "
1811 "changegroupsubset."))
1811 "changegroupsubset."))
1812 else:
1812 else:
1813 cg = remote.changegroupsubset(fetch, heads, 'pull')
1813 cg = remote.changegroupsubset(fetch, heads, 'pull')
1814 clstart = len(self.changelog)
1814 clstart = len(self.changelog)
1815 result = self.addchangegroup(cg, 'pull', remote.url())
1815 result = self.addchangegroup(cg, 'pull', remote.url())
1816 clend = len(self.changelog)
1816 clend = len(self.changelog)
1817 added = [self.changelog.node(r) for r in xrange(clstart, clend)]
1817 added = [self.changelog.node(r) for r in xrange(clstart, clend)]
1818
1818
1819 # compute target subset
1819 # compute target subset
1820 if heads is None:
1820 if heads is None:
1821 # We pulled every thing possible
1821 # We pulled every thing possible
1822 # sync on everything common
1822 # sync on everything common
1823 subset = common + added
1823 subset = common + added
1824 else:
1824 else:
1825 # We pulled a specific subset
1825 # We pulled a specific subset
1826 # sync on this subset
1826 # sync on this subset
1827 subset = heads
1827 subset = heads
1828
1828
1829 # Get remote phases data from remote
1829 # Get remote phases data from remote
1830 remotephases = remote.listkeys('phases')
1830 remotephases = remote.listkeys('phases')
1831 publishing = bool(remotephases.get('publishing', False))
1831 publishing = bool(remotephases.get('publishing', False))
1832 if remotephases and not publishing:
1832 if remotephases and not publishing:
1833 # remote is new and unpublishing
1833 # remote is new and unpublishing
1834 pheads, _dr = phases.analyzeremotephases(self, subset,
1834 pheads, _dr = phases.analyzeremotephases(self, subset,
1835 remotephases)
1835 remotephases)
1836 phases.advanceboundary(self, phases.public, pheads)
1836 phases.advanceboundary(self, phases.public, pheads)
1837 phases.advanceboundary(self, phases.draft, subset)
1837 phases.advanceboundary(self, phases.draft, subset)
1838 else:
1838 else:
1839 # Remote is old or publishing all common changesets
1839 # Remote is old or publishing all common changesets
1840 # should be seen as public
1840 # should be seen as public
1841 phases.advanceboundary(self, phases.public, subset)
1841 phases.advanceboundary(self, phases.public, subset)
1842
1842
1843 if obsolete._enabled:
1843 if obsolete._enabled:
1844 self.ui.debug('fetching remote obsolete markers\n')
1844 self.ui.debug('fetching remote obsolete markers\n')
1845 remoteobs = remote.listkeys('obsolete')
1845 remoteobs = remote.listkeys('obsolete')
1846 if 'dump0' in remoteobs:
1846 if 'dump0' in remoteobs:
1847 if tr is None:
1847 if tr is None:
1848 tr = self.transaction(trname)
1848 tr = self.transaction(trname)
1849 for key in sorted(remoteobs, reverse=True):
1849 for key in sorted(remoteobs, reverse=True):
1850 if key.startswith('dump'):
1850 if key.startswith('dump'):
1851 data = base85.b85decode(remoteobs[key])
1851 data = base85.b85decode(remoteobs[key])
1852 self.obsstore.mergemarkers(tr, data)
1852 self.obsstore.mergemarkers(tr, data)
1853 if tr is not None:
1853 if tr is not None:
1854 tr.close()
1854 tr.close()
1855 finally:
1855 finally:
1856 if tr is not None:
1856 if tr is not None:
1857 tr.release()
1857 tr.release()
1858 lock.release()
1858 lock.release()
1859
1859
1860 return result
1860 return result
1861
1861
1862 def checkpush(self, force, revs):
1862 def checkpush(self, force, revs):
1863 """Extensions can override this function if additional checks have
1863 """Extensions can override this function if additional checks have
1864 to be performed before pushing, or call it if they override push
1864 to be performed before pushing, or call it if they override push
1865 command.
1865 command.
1866 """
1866 """
1867 pass
1867 pass
1868
1868
1869 def push(self, remote, force=False, revs=None, newbranch=False):
1869 def push(self, remote, force=False, revs=None, newbranch=False):
1870 '''Push outgoing changesets (limited by revs) from the current
1870 '''Push outgoing changesets (limited by revs) from the current
1871 repository to remote. Return an integer:
1871 repository to remote. Return an integer:
1872 - None means nothing to push
1872 - None means nothing to push
1873 - 0 means HTTP error
1873 - 0 means HTTP error
1874 - 1 means we pushed and remote head count is unchanged *or*
1874 - 1 means we pushed and remote head count is unchanged *or*
1875 we have outgoing changesets but refused to push
1875 we have outgoing changesets but refused to push
1876 - other values as described by addchangegroup()
1876 - other values as described by addchangegroup()
1877 '''
1877 '''
1878 # there are two ways to push to remote repo:
1878 # there are two ways to push to remote repo:
1879 #
1879 #
1880 # addchangegroup assumes local user can lock remote
1880 # addchangegroup assumes local user can lock remote
1881 # repo (local filesystem, old ssh servers).
1881 # repo (local filesystem, old ssh servers).
1882 #
1882 #
1883 # unbundle assumes local user cannot lock remote repo (new ssh
1883 # unbundle assumes local user cannot lock remote repo (new ssh
1884 # servers, http servers).
1884 # servers, http servers).
1885
1885
1886 if not remote.canpush():
1886 if not remote.canpush():
1887 raise util.Abort(_("destination does not support push"))
1887 raise util.Abort(_("destination does not support push"))
1888 unfi = self.unfiltered()
1888 unfi = self.unfiltered()
1889 # get local lock as we might write phase data
1889 # get local lock as we might write phase data
1890 locallock = self.lock()
1890 locallock = self.lock()
1891 try:
1891 try:
1892 self.checkpush(force, revs)
1892 self.checkpush(force, revs)
1893 lock = None
1893 lock = None
1894 unbundle = remote.capable('unbundle')
1894 unbundle = remote.capable('unbundle')
1895 if not unbundle:
1895 if not unbundle:
1896 lock = remote.lock()
1896 lock = remote.lock()
1897 try:
1897 try:
1898 # discovery
1898 # discovery
1899 fci = discovery.findcommonincoming
1899 fci = discovery.findcommonincoming
1900 commoninc = fci(unfi, remote, force=force)
1900 commoninc = fci(unfi, remote, force=force)
1901 common, inc, remoteheads = commoninc
1901 common, inc, remoteheads = commoninc
1902 fco = discovery.findcommonoutgoing
1902 fco = discovery.findcommonoutgoing
1903 outgoing = fco(unfi, remote, onlyheads=revs,
1903 outgoing = fco(unfi, remote, onlyheads=revs,
1904 commoninc=commoninc, force=force)
1904 commoninc=commoninc, force=force)
1905
1905
1906
1906
1907 if not outgoing.missing:
1907 if not outgoing.missing:
1908 # nothing to push
1908 # nothing to push
1909 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
1909 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
1910 ret = None
1910 ret = None
1911 else:
1911 else:
1912 # something to push
1912 # something to push
1913 if not force:
1913 if not force:
1914 # if self.obsstore == False --> no obsolete
1914 # if self.obsstore == False --> no obsolete
1915 # then, save the iteration
1915 # then, save the iteration
1916 if unfi.obsstore:
1916 if unfi.obsstore:
1917 # this message are here for 80 char limit reason
1917 # this message are here for 80 char limit reason
1918 mso = _("push includes obsolete changeset: %s!")
1918 mso = _("push includes obsolete changeset: %s!")
1919 msu = _("push includes unstable changeset: %s!")
1919 msu = _("push includes unstable changeset: %s!")
1920 msb = _("push includes bumped changeset: %s!")
1920 msb = _("push includes bumped changeset: %s!")
1921 # If we are to push if there is at least one
1921 # If we are to push if there is at least one
1922 # obsolete or unstable changeset in missing, at
1922 # obsolete or unstable changeset in missing, at
1923 # least one of the missinghead will be obsolete or
1923 # least one of the missinghead will be obsolete or
1924 # unstable. So checking heads only is ok
1924 # unstable. So checking heads only is ok
1925 for node in outgoing.missingheads:
1925 for node in outgoing.missingheads:
1926 ctx = unfi[node]
1926 ctx = unfi[node]
1927 if ctx.obsolete():
1927 if ctx.obsolete():
1928 raise util.Abort(mso % ctx)
1928 raise util.Abort(mso % ctx)
1929 elif ctx.unstable():
1929 elif ctx.unstable():
1930 raise util.Abort(msu % ctx)
1930 raise util.Abort(msu % ctx)
1931 elif ctx.bumped():
1931 elif ctx.bumped():
1932 raise util.Abort(msb % ctx)
1932 raise util.Abort(msb % ctx)
1933 discovery.checkheads(unfi, remote, outgoing,
1933 discovery.checkheads(unfi, remote, outgoing,
1934 remoteheads, newbranch,
1934 remoteheads, newbranch,
1935 bool(inc))
1935 bool(inc))
1936
1936
1937 # create a changegroup from local
1937 # create a changegroup from local
1938 if revs is None and not outgoing.excluded:
1938 if revs is None and not outgoing.excluded:
1939 # push everything,
1939 # push everything,
1940 # use the fast path, no race possible on push
1940 # use the fast path, no race possible on push
1941 cg = self._changegroup(outgoing.missing, 'push')
1941 cg = self._changegroup(outgoing.missing, 'push')
1942 else:
1942 else:
1943 cg = self.getlocalbundle('push', outgoing)
1943 cg = self.getlocalbundle('push', outgoing)
1944
1944
1945 # apply changegroup to remote
1945 # apply changegroup to remote
1946 if unbundle:
1946 if unbundle:
1947 # local repo finds heads on server, finds out what
1947 # local repo finds heads on server, finds out what
1948 # revs it must push. once revs transferred, if server
1948 # revs it must push. once revs transferred, if server
1949 # finds it has different heads (someone else won
1949 # finds it has different heads (someone else won
1950 # commit/push race), server aborts.
1950 # commit/push race), server aborts.
1951 if force:
1951 if force:
1952 remoteheads = ['force']
1952 remoteheads = ['force']
1953 # ssh: return remote's addchangegroup()
1953 # ssh: return remote's addchangegroup()
1954 # http: return remote's addchangegroup() or 0 for error
1954 # http: return remote's addchangegroup() or 0 for error
1955 ret = remote.unbundle(cg, remoteheads, 'push')
1955 ret = remote.unbundle(cg, remoteheads, 'push')
1956 else:
1956 else:
1957 # we return an integer indicating remote head count
1957 # we return an integer indicating remote head count
1958 # change
1958 # change
1959 ret = remote.addchangegroup(cg, 'push', self.url())
1959 ret = remote.addchangegroup(cg, 'push', self.url())
1960
1960
1961 if ret:
1961 if ret:
1962 # push succeed, synchronize target of the push
1962 # push succeed, synchronize target of the push
1963 cheads = outgoing.missingheads
1963 cheads = outgoing.missingheads
1964 elif revs is None:
1964 elif revs is None:
1965 # All out push fails. synchronize all common
1965 # All out push fails. synchronize all common
1966 cheads = outgoing.commonheads
1966 cheads = outgoing.commonheads
1967 else:
1967 else:
1968 # I want cheads = heads(::missingheads and ::commonheads)
1968 # I want cheads = heads(::missingheads and ::commonheads)
1969 # (missingheads is revs with secret changeset filtered out)
1969 # (missingheads is revs with secret changeset filtered out)
1970 #
1970 #
1971 # This can be expressed as:
1971 # This can be expressed as:
1972 # cheads = ( (missingheads and ::commonheads)
1972 # cheads = ( (missingheads and ::commonheads)
1973 # + (commonheads and ::missingheads))"
1973 # + (commonheads and ::missingheads))"
1974 # )
1974 # )
1975 #
1975 #
1976 # while trying to push we already computed the following:
1976 # while trying to push we already computed the following:
1977 # common = (::commonheads)
1977 # common = (::commonheads)
1978 # missing = ((commonheads::missingheads) - commonheads)
1978 # missing = ((commonheads::missingheads) - commonheads)
1979 #
1979 #
1980 # We can pick:
1980 # We can pick:
1981 # * missingheads part of common (::commonheads)
1981 # * missingheads part of common (::commonheads)
1982 common = set(outgoing.common)
1982 common = set(outgoing.common)
1983 cheads = [node for node in revs if node in common]
1983 cheads = [node for node in revs if node in common]
1984 # and
1984 # and
1985 # * commonheads parents on missing
1985 # * commonheads parents on missing
1986 revset = unfi.set('%ln and parents(roots(%ln))',
1986 revset = unfi.set('%ln and parents(roots(%ln))',
1987 outgoing.commonheads,
1987 outgoing.commonheads,
1988 outgoing.missing)
1988 outgoing.missing)
1989 cheads.extend(c.node() for c in revset)
1989 cheads.extend(c.node() for c in revset)
1990 # even when we don't push, exchanging phase data is useful
1990 # even when we don't push, exchanging phase data is useful
1991 remotephases = remote.listkeys('phases')
1991 remotephases = remote.listkeys('phases')
1992 if not remotephases: # old server or public only repo
1992 if not remotephases: # old server or public only repo
1993 phases.advanceboundary(self, phases.public, cheads)
1993 phases.advanceboundary(self, phases.public, cheads)
1994 # don't push any phase data as there is nothing to push
1994 # don't push any phase data as there is nothing to push
1995 else:
1995 else:
1996 ana = phases.analyzeremotephases(self, cheads, remotephases)
1996 ana = phases.analyzeremotephases(self, cheads, remotephases)
1997 pheads, droots = ana
1997 pheads, droots = ana
1998 ### Apply remote phase on local
1998 ### Apply remote phase on local
1999 if remotephases.get('publishing', False):
1999 if remotephases.get('publishing', False):
2000 phases.advanceboundary(self, phases.public, cheads)
2000 phases.advanceboundary(self, phases.public, cheads)
2001 else: # publish = False
2001 else: # publish = False
2002 phases.advanceboundary(self, phases.public, pheads)
2002 phases.advanceboundary(self, phases.public, pheads)
2003 phases.advanceboundary(self, phases.draft, cheads)
2003 phases.advanceboundary(self, phases.draft, cheads)
2004 ### Apply local phase on remote
2004 ### Apply local phase on remote
2005
2005
2006 # Get the list of all revs draft on remote by public here.
2006 # Get the list of all revs draft on remote by public here.
2007 # XXX Beware that revset break if droots is not strictly
2007 # XXX Beware that revset break if droots is not strictly
2008 # XXX root we may want to ensure it is but it is costly
2008 # XXX root we may want to ensure it is but it is costly
2009 outdated = unfi.set('heads((%ln::%ln) and public())',
2009 outdated = unfi.set('heads((%ln::%ln) and public())',
2010 droots, cheads)
2010 droots, cheads)
2011 for newremotehead in outdated:
2011 for newremotehead in outdated:
2012 r = remote.pushkey('phases',
2012 r = remote.pushkey('phases',
2013 newremotehead.hex(),
2013 newremotehead.hex(),
2014 str(phases.draft),
2014 str(phases.draft),
2015 str(phases.public))
2015 str(phases.public))
2016 if not r:
2016 if not r:
2017 self.ui.warn(_('updating %s to public failed!\n')
2017 self.ui.warn(_('updating %s to public failed!\n')
2018 % newremotehead)
2018 % newremotehead)
2019 self.ui.debug('try to push obsolete markers to remote\n')
2019 self.ui.debug('try to push obsolete markers to remote\n')
2020 if (obsolete._enabled and self.obsstore and
2020 if (obsolete._enabled and self.obsstore and
2021 'obsolete' in remote.listkeys('namespaces')):
2021 'obsolete' in remote.listkeys('namespaces')):
2022 rslts = []
2022 rslts = []
2023 remotedata = self.listkeys('obsolete')
2023 remotedata = self.listkeys('obsolete')
2024 for key in sorted(remotedata, reverse=True):
2024 for key in sorted(remotedata, reverse=True):
2025 # reverse sort to ensure we end with dump0
2025 # reverse sort to ensure we end with dump0
2026 data = remotedata[key]
2026 data = remotedata[key]
2027 rslts.append(remote.pushkey('obsolete', key, '', data))
2027 rslts.append(remote.pushkey('obsolete', key, '', data))
2028 if [r for r in rslts if not r]:
2028 if [r for r in rslts if not r]:
2029 msg = _('failed to push some obsolete markers!\n')
2029 msg = _('failed to push some obsolete markers!\n')
2030 self.ui.warn(msg)
2030 self.ui.warn(msg)
2031 finally:
2031 finally:
2032 if lock is not None:
2032 if lock is not None:
2033 lock.release()
2033 lock.release()
2034 finally:
2034 finally:
2035 locallock.release()
2035 locallock.release()
2036
2036
2037 self.ui.debug("checking for updated bookmarks\n")
2037 self.ui.debug("checking for updated bookmarks\n")
2038 rb = remote.listkeys('bookmarks')
2038 rb = remote.listkeys('bookmarks')
2039 for k in rb.keys():
2039 for k in rb.keys():
2040 if k in unfi._bookmarks:
2040 if k in unfi._bookmarks:
2041 nr, nl = rb[k], hex(self._bookmarks[k])
2041 nr, nl = rb[k], hex(self._bookmarks[k])
2042 if nr in unfi:
2042 if nr in unfi:
2043 cr = unfi[nr]
2043 cr = unfi[nr]
2044 cl = unfi[nl]
2044 cl = unfi[nl]
2045 if bookmarks.validdest(unfi, cr, cl):
2045 if bookmarks.validdest(unfi, cr, cl):
2046 r = remote.pushkey('bookmarks', k, nr, nl)
2046 r = remote.pushkey('bookmarks', k, nr, nl)
2047 if r:
2047 if r:
2048 self.ui.status(_("updating bookmark %s\n") % k)
2048 self.ui.status(_("updating bookmark %s\n") % k)
2049 else:
2049 else:
2050 self.ui.warn(_('updating bookmark %s'
2050 self.ui.warn(_('updating bookmark %s'
2051 ' failed!\n') % k)
2051 ' failed!\n') % k)
2052
2052
2053 return ret
2053 return ret
2054
2054
2055 def changegroupinfo(self, nodes, source):
2055 def changegroupinfo(self, nodes, source):
2056 if self.ui.verbose or source == 'bundle':
2056 if self.ui.verbose or source == 'bundle':
2057 self.ui.status(_("%d changesets found\n") % len(nodes))
2057 self.ui.status(_("%d changesets found\n") % len(nodes))
2058 if self.ui.debugflag:
2058 if self.ui.debugflag:
2059 self.ui.debug("list of changesets:\n")
2059 self.ui.debug("list of changesets:\n")
2060 for node in nodes:
2060 for node in nodes:
2061 self.ui.debug("%s\n" % hex(node))
2061 self.ui.debug("%s\n" % hex(node))
2062
2062
2063 def changegroupsubset(self, bases, heads, source):
2063 def changegroupsubset(self, bases, heads, source):
2064 """Compute a changegroup consisting of all the nodes that are
2064 """Compute a changegroup consisting of all the nodes that are
2065 descendants of any of the bases and ancestors of any of the heads.
2065 descendants of any of the bases and ancestors of any of the heads.
2066 Return a chunkbuffer object whose read() method will return
2066 Return a chunkbuffer object whose read() method will return
2067 successive changegroup chunks.
2067 successive changegroup chunks.
2068
2068
2069 It is fairly complex as determining which filenodes and which
2069 It is fairly complex as determining which filenodes and which
2070 manifest nodes need to be included for the changeset to be complete
2070 manifest nodes need to be included for the changeset to be complete
2071 is non-trivial.
2071 is non-trivial.
2072
2072
2073 Another wrinkle is doing the reverse, figuring out which changeset in
2073 Another wrinkle is doing the reverse, figuring out which changeset in
2074 the changegroup a particular filenode or manifestnode belongs to.
2074 the changegroup a particular filenode or manifestnode belongs to.
2075 """
2075 """
2076 cl = self.changelog
2076 cl = self.changelog
2077 if not bases:
2077 if not bases:
2078 bases = [nullid]
2078 bases = [nullid]
2079 csets, bases, heads = cl.nodesbetween(bases, heads)
2079 csets, bases, heads = cl.nodesbetween(bases, heads)
2080 # We assume that all ancestors of bases are known
2080 # We assume that all ancestors of bases are known
2081 common = set(cl.ancestors([cl.rev(n) for n in bases]))
2081 common = set(cl.ancestors([cl.rev(n) for n in bases]))
2082 return self._changegroupsubset(common, csets, heads, source)
2082 return self._changegroupsubset(common, csets, heads, source)
2083
2083
2084 def getlocalbundle(self, source, outgoing):
2084 def getlocalbundle(self, source, outgoing):
2085 """Like getbundle, but taking a discovery.outgoing as an argument.
2085 """Like getbundle, but taking a discovery.outgoing as an argument.
2086
2086
2087 This is only implemented for local repos and reuses potentially
2087 This is only implemented for local repos and reuses potentially
2088 precomputed sets in outgoing."""
2088 precomputed sets in outgoing."""
2089 if not outgoing.missing:
2089 if not outgoing.missing:
2090 return None
2090 return None
2091 return self._changegroupsubset(outgoing.common,
2091 return self._changegroupsubset(outgoing.common,
2092 outgoing.missing,
2092 outgoing.missing,
2093 outgoing.missingheads,
2093 outgoing.missingheads,
2094 source)
2094 source)
2095
2095
2096 def getbundle(self, source, heads=None, common=None):
2096 def getbundle(self, source, heads=None, common=None):
2097 """Like changegroupsubset, but returns the set difference between the
2097 """Like changegroupsubset, but returns the set difference between the
2098 ancestors of heads and the ancestors common.
2098 ancestors of heads and the ancestors common.
2099
2099
2100 If heads is None, use the local heads. If common is None, use [nullid].
2100 If heads is None, use the local heads. If common is None, use [nullid].
2101
2101
2102 The nodes in common might not all be known locally due to the way the
2102 The nodes in common might not all be known locally due to the way the
2103 current discovery protocol works.
2103 current discovery protocol works.
2104 """
2104 """
2105 cl = self.changelog
2105 cl = self.changelog
2106 if common:
2106 if common:
2107 nm = cl.nodemap
2107 nm = cl.nodemap
2108 common = [n for n in common if n in nm]
2108 common = [n for n in common if n in nm]
2109 else:
2109 else:
2110 common = [nullid]
2110 common = [nullid]
2111 if not heads:
2111 if not heads:
2112 heads = cl.heads()
2112 heads = cl.heads()
2113 return self.getlocalbundle(source,
2113 return self.getlocalbundle(source,
2114 discovery.outgoing(cl, common, heads))
2114 discovery.outgoing(cl, common, heads))
2115
2115
2116 @unfilteredmethod
2116 @unfilteredmethod
2117 def _changegroupsubset(self, commonrevs, csets, heads, source):
2117 def _changegroupsubset(self, commonrevs, csets, heads, source):
2118
2118
2119 cl = self.changelog
2119 cl = self.changelog
2120 mf = self.manifest
2120 mf = self.manifest
2121 mfs = {} # needed manifests
2121 mfs = {} # needed manifests
2122 fnodes = {} # needed file nodes
2122 fnodes = {} # needed file nodes
2123 changedfiles = set()
2123 changedfiles = set()
2124 fstate = ['', {}]
2124 fstate = ['', {}]
2125 count = [0, 0]
2125 count = [0, 0]
2126
2126
2127 # can we go through the fast path ?
2127 # can we go through the fast path ?
2128 heads.sort()
2128 heads.sort()
2129 if heads == sorted(self.heads()):
2129 if heads == sorted(self.heads()):
2130 return self._changegroup(csets, source)
2130 return self._changegroup(csets, source)
2131
2131
2132 # slow path
2132 # slow path
2133 self.hook('preoutgoing', throw=True, source=source)
2133 self.hook('preoutgoing', throw=True, source=source)
2134 self.changegroupinfo(csets, source)
2134 self.changegroupinfo(csets, source)
2135
2135
2136 # filter any nodes that claim to be part of the known set
2136 # filter any nodes that claim to be part of the known set
2137 def prune(revlog, missing):
2137 def prune(revlog, missing):
2138 rr, rl = revlog.rev, revlog.linkrev
2138 rr, rl = revlog.rev, revlog.linkrev
2139 return [n for n in missing
2139 return [n for n in missing
2140 if rl(rr(n)) not in commonrevs]
2140 if rl(rr(n)) not in commonrevs]
2141
2141
2142 progress = self.ui.progress
2142 progress = self.ui.progress
2143 _bundling = _('bundling')
2143 _bundling = _('bundling')
2144 _changesets = _('changesets')
2144 _changesets = _('changesets')
2145 _manifests = _('manifests')
2145 _manifests = _('manifests')
2146 _files = _('files')
2146 _files = _('files')
2147
2147
2148 def lookup(revlog, x):
2148 def lookup(revlog, x):
2149 if revlog == cl:
2149 if revlog == cl:
2150 c = cl.read(x)
2150 c = cl.read(x)
2151 changedfiles.update(c[3])
2151 changedfiles.update(c[3])
2152 mfs.setdefault(c[0], x)
2152 mfs.setdefault(c[0], x)
2153 count[0] += 1
2153 count[0] += 1
2154 progress(_bundling, count[0],
2154 progress(_bundling, count[0],
2155 unit=_changesets, total=count[1])
2155 unit=_changesets, total=count[1])
2156 return x
2156 return x
2157 elif revlog == mf:
2157 elif revlog == mf:
2158 clnode = mfs[x]
2158 clnode = mfs[x]
2159 mdata = mf.readfast(x)
2159 mdata = mf.readfast(x)
2160 for f, n in mdata.iteritems():
2160 for f, n in mdata.iteritems():
2161 if f in changedfiles:
2161 if f in changedfiles:
2162 fnodes[f].setdefault(n, clnode)
2162 fnodes[f].setdefault(n, clnode)
2163 count[0] += 1
2163 count[0] += 1
2164 progress(_bundling, count[0],
2164 progress(_bundling, count[0],
2165 unit=_manifests, total=count[1])
2165 unit=_manifests, total=count[1])
2166 return clnode
2166 return clnode
2167 else:
2167 else:
2168 progress(_bundling, count[0], item=fstate[0],
2168 progress(_bundling, count[0], item=fstate[0],
2169 unit=_files, total=count[1])
2169 unit=_files, total=count[1])
2170 return fstate[1][x]
2170 return fstate[1][x]
2171
2171
2172 bundler = changegroup.bundle10(lookup)
2172 bundler = changegroup.bundle10(lookup)
2173 reorder = self.ui.config('bundle', 'reorder', 'auto')
2173 reorder = self.ui.config('bundle', 'reorder', 'auto')
2174 if reorder == 'auto':
2174 if reorder == 'auto':
2175 reorder = None
2175 reorder = None
2176 else:
2176 else:
2177 reorder = util.parsebool(reorder)
2177 reorder = util.parsebool(reorder)
2178
2178
2179 def gengroup():
2179 def gengroup():
2180 # Create a changenode group generator that will call our functions
2180 # Create a changenode group generator that will call our functions
2181 # back to lookup the owning changenode and collect information.
2181 # back to lookup the owning changenode and collect information.
2182 count[:] = [0, len(csets)]
2182 count[:] = [0, len(csets)]
2183 for chunk in cl.group(csets, bundler, reorder=reorder):
2183 for chunk in cl.group(csets, bundler, reorder=reorder):
2184 yield chunk
2184 yield chunk
2185 progress(_bundling, None)
2185 progress(_bundling, None)
2186
2186
2187 # Create a generator for the manifestnodes that calls our lookup
2187 # Create a generator for the manifestnodes that calls our lookup
2188 # and data collection functions back.
2188 # and data collection functions back.
2189 for f in changedfiles:
2189 for f in changedfiles:
2190 fnodes[f] = {}
2190 fnodes[f] = {}
2191 count[:] = [0, len(mfs)]
2191 count[:] = [0, len(mfs)]
2192 for chunk in mf.group(prune(mf, mfs), bundler, reorder=reorder):
2192 for chunk in mf.group(prune(mf, mfs), bundler, reorder=reorder):
2193 yield chunk
2193 yield chunk
2194 progress(_bundling, None)
2194 progress(_bundling, None)
2195
2195
2196 mfs.clear()
2196 mfs.clear()
2197
2197
2198 # Go through all our files in order sorted by name.
2198 # Go through all our files in order sorted by name.
2199 count[:] = [0, len(changedfiles)]
2199 count[:] = [0, len(changedfiles)]
2200 for fname in sorted(changedfiles):
2200 for fname in sorted(changedfiles):
2201 filerevlog = self.file(fname)
2201 filerevlog = self.file(fname)
2202 if not len(filerevlog):
2202 if not len(filerevlog):
2203 raise util.Abort(_("empty or missing revlog for %s")
2203 raise util.Abort(_("empty or missing revlog for %s")
2204 % fname)
2204 % fname)
2205 fstate[0] = fname
2205 fstate[0] = fname
2206 fstate[1] = fnodes.pop(fname, {})
2206 fstate[1] = fnodes.pop(fname, {})
2207
2207
2208 nodelist = prune(filerevlog, fstate[1])
2208 nodelist = prune(filerevlog, fstate[1])
2209 if nodelist:
2209 if nodelist:
2210 count[0] += 1
2210 count[0] += 1
2211 yield bundler.fileheader(fname)
2211 yield bundler.fileheader(fname)
2212 for chunk in filerevlog.group(nodelist, bundler, reorder):
2212 for chunk in filerevlog.group(nodelist, bundler, reorder):
2213 yield chunk
2213 yield chunk
2214
2214
2215 # Signal that no more groups are left.
2215 # Signal that no more groups are left.
2216 yield bundler.close()
2216 yield bundler.close()
2217 progress(_bundling, None)
2217 progress(_bundling, None)
2218
2218
2219 if csets:
2219 if csets:
2220 self.hook('outgoing', node=hex(csets[0]), source=source)
2220 self.hook('outgoing', node=hex(csets[0]), source=source)
2221
2221
2222 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
2222 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
2223
2223
2224 def changegroup(self, basenodes, source):
2224 def changegroup(self, basenodes, source):
2225 # to avoid a race we use changegroupsubset() (issue1320)
2225 # to avoid a race we use changegroupsubset() (issue1320)
2226 return self.changegroupsubset(basenodes, self.heads(), source)
2226 return self.changegroupsubset(basenodes, self.heads(), source)
2227
2227
2228 @unfilteredmethod
2228 @unfilteredmethod
2229 def _changegroup(self, nodes, source):
2229 def _changegroup(self, nodes, source):
2230 """Compute the changegroup of all nodes that we have that a recipient
2230 """Compute the changegroup of all nodes that we have that a recipient
2231 doesn't. Return a chunkbuffer object whose read() method will return
2231 doesn't. Return a chunkbuffer object whose read() method will return
2232 successive changegroup chunks.
2232 successive changegroup chunks.
2233
2233
2234 This is much easier than the previous function as we can assume that
2234 This is much easier than the previous function as we can assume that
2235 the recipient has any changenode we aren't sending them.
2235 the recipient has any changenode we aren't sending them.
2236
2236
2237 nodes is the set of nodes to send"""
2237 nodes is the set of nodes to send"""
2238
2238
2239 cl = self.changelog
2239 cl = self.changelog
2240 mf = self.manifest
2240 mf = self.manifest
2241 mfs = {}
2241 mfs = {}
2242 changedfiles = set()
2242 changedfiles = set()
2243 fstate = ['']
2243 fstate = ['']
2244 count = [0, 0]
2244 count = [0, 0]
2245
2245
2246 self.hook('preoutgoing', throw=True, source=source)
2246 self.hook('preoutgoing', throw=True, source=source)
2247 self.changegroupinfo(nodes, source)
2247 self.changegroupinfo(nodes, source)
2248
2248
2249 revset = set([cl.rev(n) for n in nodes])
2249 revset = set([cl.rev(n) for n in nodes])
2250
2250
2251 def gennodelst(log):
2251 def gennodelst(log):
2252 ln, llr = log.node, log.linkrev
2252 ln, llr = log.node, log.linkrev
2253 return [ln(r) for r in log if llr(r) in revset]
2253 return [ln(r) for r in log if llr(r) in revset]
2254
2254
2255 progress = self.ui.progress
2255 progress = self.ui.progress
2256 _bundling = _('bundling')
2256 _bundling = _('bundling')
2257 _changesets = _('changesets')
2257 _changesets = _('changesets')
2258 _manifests = _('manifests')
2258 _manifests = _('manifests')
2259 _files = _('files')
2259 _files = _('files')
2260
2260
2261 def lookup(revlog, x):
2261 def lookup(revlog, x):
2262 if revlog == cl:
2262 if revlog == cl:
2263 c = cl.read(x)
2263 c = cl.read(x)
2264 changedfiles.update(c[3])
2264 changedfiles.update(c[3])
2265 mfs.setdefault(c[0], x)
2265 mfs.setdefault(c[0], x)
2266 count[0] += 1
2266 count[0] += 1
2267 progress(_bundling, count[0],
2267 progress(_bundling, count[0],
2268 unit=_changesets, total=count[1])
2268 unit=_changesets, total=count[1])
2269 return x
2269 return x
2270 elif revlog == mf:
2270 elif revlog == mf:
2271 count[0] += 1
2271 count[0] += 1
2272 progress(_bundling, count[0],
2272 progress(_bundling, count[0],
2273 unit=_manifests, total=count[1])
2273 unit=_manifests, total=count[1])
2274 return cl.node(revlog.linkrev(revlog.rev(x)))
2274 return cl.node(revlog.linkrev(revlog.rev(x)))
2275 else:
2275 else:
2276 progress(_bundling, count[0], item=fstate[0],
2276 progress(_bundling, count[0], item=fstate[0],
2277 total=count[1], unit=_files)
2277 total=count[1], unit=_files)
2278 return cl.node(revlog.linkrev(revlog.rev(x)))
2278 return cl.node(revlog.linkrev(revlog.rev(x)))
2279
2279
2280 bundler = changegroup.bundle10(lookup)
2280 bundler = changegroup.bundle10(lookup)
2281 reorder = self.ui.config('bundle', 'reorder', 'auto')
2281 reorder = self.ui.config('bundle', 'reorder', 'auto')
2282 if reorder == 'auto':
2282 if reorder == 'auto':
2283 reorder = None
2283 reorder = None
2284 else:
2284 else:
2285 reorder = util.parsebool(reorder)
2285 reorder = util.parsebool(reorder)
2286
2286
2287 def gengroup():
2287 def gengroup():
2288 '''yield a sequence of changegroup chunks (strings)'''
2288 '''yield a sequence of changegroup chunks (strings)'''
2289 # construct a list of all changed files
2289 # construct a list of all changed files
2290
2290
2291 count[:] = [0, len(nodes)]
2291 count[:] = [0, len(nodes)]
2292 for chunk in cl.group(nodes, bundler, reorder=reorder):
2292 for chunk in cl.group(nodes, bundler, reorder=reorder):
2293 yield chunk
2293 yield chunk
2294 progress(_bundling, None)
2294 progress(_bundling, None)
2295
2295
2296 count[:] = [0, len(mfs)]
2296 count[:] = [0, len(mfs)]
2297 for chunk in mf.group(gennodelst(mf), bundler, reorder=reorder):
2297 for chunk in mf.group(gennodelst(mf), bundler, reorder=reorder):
2298 yield chunk
2298 yield chunk
2299 progress(_bundling, None)
2299 progress(_bundling, None)
2300
2300
2301 count[:] = [0, len(changedfiles)]
2301 count[:] = [0, len(changedfiles)]
2302 for fname in sorted(changedfiles):
2302 for fname in sorted(changedfiles):
2303 filerevlog = self.file(fname)
2303 filerevlog = self.file(fname)
2304 if not len(filerevlog):
2304 if not len(filerevlog):
2305 raise util.Abort(_("empty or missing revlog for %s")
2305 raise util.Abort(_("empty or missing revlog for %s")
2306 % fname)
2306 % fname)
2307 fstate[0] = fname
2307 fstate[0] = fname
2308 nodelist = gennodelst(filerevlog)
2308 nodelist = gennodelst(filerevlog)
2309 if nodelist:
2309 if nodelist:
2310 count[0] += 1
2310 count[0] += 1
2311 yield bundler.fileheader(fname)
2311 yield bundler.fileheader(fname)
2312 for chunk in filerevlog.group(nodelist, bundler, reorder):
2312 for chunk in filerevlog.group(nodelist, bundler, reorder):
2313 yield chunk
2313 yield chunk
2314 yield bundler.close()
2314 yield bundler.close()
2315 progress(_bundling, None)
2315 progress(_bundling, None)
2316
2316
2317 if nodes:
2317 if nodes:
2318 self.hook('outgoing', node=hex(nodes[0]), source=source)
2318 self.hook('outgoing', node=hex(nodes[0]), source=source)
2319
2319
2320 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
2320 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
2321
2321
2322 @unfilteredmethod
2322 @unfilteredmethod
2323 def addchangegroup(self, source, srctype, url, emptyok=False):
2323 def addchangegroup(self, source, srctype, url, emptyok=False):
2324 """Add the changegroup returned by source.read() to this repo.
2324 """Add the changegroup returned by source.read() to this repo.
2325 srctype is a string like 'push', 'pull', or 'unbundle'. url is
2325 srctype is a string like 'push', 'pull', or 'unbundle'. url is
2326 the URL of the repo where this changegroup is coming from.
2326 the URL of the repo where this changegroup is coming from.
2327
2327
2328 Return an integer summarizing the change to this repo:
2328 Return an integer summarizing the change to this repo:
2329 - nothing changed or no source: 0
2329 - nothing changed or no source: 0
2330 - more heads than before: 1+added heads (2..n)
2330 - more heads than before: 1+added heads (2..n)
2331 - fewer heads than before: -1-removed heads (-2..-n)
2331 - fewer heads than before: -1-removed heads (-2..-n)
2332 - number of heads stays the same: 1
2332 - number of heads stays the same: 1
2333 """
2333 """
2334 def csmap(x):
2334 def csmap(x):
2335 self.ui.debug("add changeset %s\n" % short(x))
2335 self.ui.debug("add changeset %s\n" % short(x))
2336 return len(cl)
2336 return len(cl)
2337
2337
2338 def revmap(x):
2338 def revmap(x):
2339 return cl.rev(x)
2339 return cl.rev(x)
2340
2340
2341 if not source:
2341 if not source:
2342 return 0
2342 return 0
2343
2343
2344 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2344 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2345
2345
2346 changesets = files = revisions = 0
2346 changesets = files = revisions = 0
2347 efiles = set()
2347 efiles = set()
2348
2348
2349 # write changelog data to temp files so concurrent readers will not see
2349 # write changelog data to temp files so concurrent readers will not see
2350 # inconsistent view
2350 # inconsistent view
2351 cl = self.changelog
2351 cl = self.changelog
2352 cl.delayupdate()
2352 cl.delayupdate()
2353 oldheads = cl.heads()
2353 oldheads = cl.heads()
2354
2354
2355 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
2355 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
2356 try:
2356 try:
2357 trp = weakref.proxy(tr)
2357 trp = weakref.proxy(tr)
2358 # pull off the changeset group
2358 # pull off the changeset group
2359 self.ui.status(_("adding changesets\n"))
2359 self.ui.status(_("adding changesets\n"))
2360 clstart = len(cl)
2360 clstart = len(cl)
2361 class prog(object):
2361 class prog(object):
2362 step = _('changesets')
2362 step = _('changesets')
2363 count = 1
2363 count = 1
2364 ui = self.ui
2364 ui = self.ui
2365 total = None
2365 total = None
2366 def __call__(self):
2366 def __call__(self):
2367 self.ui.progress(self.step, self.count, unit=_('chunks'),
2367 self.ui.progress(self.step, self.count, unit=_('chunks'),
2368 total=self.total)
2368 total=self.total)
2369 self.count += 1
2369 self.count += 1
2370 pr = prog()
2370 pr = prog()
2371 source.callback = pr
2371 source.callback = pr
2372
2372
2373 source.changelogheader()
2373 source.changelogheader()
2374 srccontent = cl.addgroup(source, csmap, trp)
2374 srccontent = cl.addgroup(source, csmap, trp)
2375 if not (srccontent or emptyok):
2375 if not (srccontent or emptyok):
2376 raise util.Abort(_("received changelog group is empty"))
2376 raise util.Abort(_("received changelog group is empty"))
2377 clend = len(cl)
2377 clend = len(cl)
2378 changesets = clend - clstart
2378 changesets = clend - clstart
2379 for c in xrange(clstart, clend):
2379 for c in xrange(clstart, clend):
2380 efiles.update(self[c].files())
2380 efiles.update(self[c].files())
2381 efiles = len(efiles)
2381 efiles = len(efiles)
2382 self.ui.progress(_('changesets'), None)
2382 self.ui.progress(_('changesets'), None)
2383
2383
2384 # pull off the manifest group
2384 # pull off the manifest group
2385 self.ui.status(_("adding manifests\n"))
2385 self.ui.status(_("adding manifests\n"))
2386 pr.step = _('manifests')
2386 pr.step = _('manifests')
2387 pr.count = 1
2387 pr.count = 1
2388 pr.total = changesets # manifests <= changesets
2388 pr.total = changesets # manifests <= changesets
2389 # no need to check for empty manifest group here:
2389 # no need to check for empty manifest group here:
2390 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2390 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2391 # no new manifest will be created and the manifest group will
2391 # no new manifest will be created and the manifest group will
2392 # be empty during the pull
2392 # be empty during the pull
2393 source.manifestheader()
2393 source.manifestheader()
2394 self.manifest.addgroup(source, revmap, trp)
2394 self.manifest.addgroup(source, revmap, trp)
2395 self.ui.progress(_('manifests'), None)
2395 self.ui.progress(_('manifests'), None)
2396
2396
2397 needfiles = {}
2397 needfiles = {}
2398 if self.ui.configbool('server', 'validate', default=False):
2398 if self.ui.configbool('server', 'validate', default=False):
2399 # validate incoming csets have their manifests
2399 # validate incoming csets have their manifests
2400 for cset in xrange(clstart, clend):
2400 for cset in xrange(clstart, clend):
2401 mfest = self.changelog.read(self.changelog.node(cset))[0]
2401 mfest = self.changelog.read(self.changelog.node(cset))[0]
2402 mfest = self.manifest.readdelta(mfest)
2402 mfest = self.manifest.readdelta(mfest)
2403 # store file nodes we must see
2403 # store file nodes we must see
2404 for f, n in mfest.iteritems():
2404 for f, n in mfest.iteritems():
2405 needfiles.setdefault(f, set()).add(n)
2405 needfiles.setdefault(f, set()).add(n)
2406
2406
2407 # process the files
2407 # process the files
2408 self.ui.status(_("adding file changes\n"))
2408 self.ui.status(_("adding file changes\n"))
2409 pr.step = _('files')
2409 pr.step = _('files')
2410 pr.count = 1
2410 pr.count = 1
2411 pr.total = efiles
2411 pr.total = efiles
2412 source.callback = None
2412 source.callback = None
2413
2413
2414 while True:
2414 while True:
2415 chunkdata = source.filelogheader()
2415 chunkdata = source.filelogheader()
2416 if not chunkdata:
2416 if not chunkdata:
2417 break
2417 break
2418 f = chunkdata["filename"]
2418 f = chunkdata["filename"]
2419 self.ui.debug("adding %s revisions\n" % f)
2419 self.ui.debug("adding %s revisions\n" % f)
2420 pr()
2420 pr()
2421 fl = self.file(f)
2421 fl = self.file(f)
2422 o = len(fl)
2422 o = len(fl)
2423 if not fl.addgroup(source, revmap, trp):
2423 if not fl.addgroup(source, revmap, trp):
2424 raise util.Abort(_("received file revlog group is empty"))
2424 raise util.Abort(_("received file revlog group is empty"))
2425 revisions += len(fl) - o
2425 revisions += len(fl) - o
2426 files += 1
2426 files += 1
2427 if f in needfiles:
2427 if f in needfiles:
2428 needs = needfiles[f]
2428 needs = needfiles[f]
2429 for new in xrange(o, len(fl)):
2429 for new in xrange(o, len(fl)):
2430 n = fl.node(new)
2430 n = fl.node(new)
2431 if n in needs:
2431 if n in needs:
2432 needs.remove(n)
2432 needs.remove(n)
2433 if not needs:
2433 if not needs:
2434 del needfiles[f]
2434 del needfiles[f]
2435 self.ui.progress(_('files'), None)
2435 self.ui.progress(_('files'), None)
2436
2436
2437 for f, needs in needfiles.iteritems():
2437 for f, needs in needfiles.iteritems():
2438 fl = self.file(f)
2438 fl = self.file(f)
2439 for n in needs:
2439 for n in needs:
2440 try:
2440 try:
2441 fl.rev(n)
2441 fl.rev(n)
2442 except error.LookupError:
2442 except error.LookupError:
2443 raise util.Abort(
2443 raise util.Abort(
2444 _('missing file data for %s:%s - run hg verify') %
2444 _('missing file data for %s:%s - run hg verify') %
2445 (f, hex(n)))
2445 (f, hex(n)))
2446
2446
2447 dh = 0
2447 dh = 0
2448 if oldheads:
2448 if oldheads:
2449 heads = cl.heads()
2449 heads = cl.heads()
2450 dh = len(heads) - len(oldheads)
2450 dh = len(heads) - len(oldheads)
2451 for h in heads:
2451 for h in heads:
2452 if h not in oldheads and self[h].closesbranch():
2452 if h not in oldheads and self[h].closesbranch():
2453 dh -= 1
2453 dh -= 1
2454 htext = ""
2454 htext = ""
2455 if dh:
2455 if dh:
2456 htext = _(" (%+d heads)") % dh
2456 htext = _(" (%+d heads)") % dh
2457
2457
2458 self.ui.status(_("added %d changesets"
2458 self.ui.status(_("added %d changesets"
2459 " with %d changes to %d files%s\n")
2459 " with %d changes to %d files%s\n")
2460 % (changesets, revisions, files, htext))
2460 % (changesets, revisions, files, htext))
2461 obsolete.clearobscaches(self)
2461 obsolete.clearobscaches(self)
2462
2462
2463 if changesets > 0:
2463 if changesets > 0:
2464 p = lambda: cl.writepending() and self.root or ""
2464 p = lambda: cl.writepending() and self.root or ""
2465 self.hook('pretxnchangegroup', throw=True,
2465 self.hook('pretxnchangegroup', throw=True,
2466 node=hex(cl.node(clstart)), source=srctype,
2466 node=hex(cl.node(clstart)), source=srctype,
2467 url=url, pending=p)
2467 url=url, pending=p)
2468
2468
2469 added = [cl.node(r) for r in xrange(clstart, clend)]
2469 added = [cl.node(r) for r in xrange(clstart, clend)]
2470 publishing = self.ui.configbool('phases', 'publish', True)
2470 publishing = self.ui.configbool('phases', 'publish', True)
2471 if srctype == 'push':
2471 if srctype == 'push':
2472 # Old server can not push the boundary themself.
2472 # Old server can not push the boundary themself.
2473 # New server won't push the boundary if changeset already
2473 # New server won't push the boundary if changeset already
2474 # existed locally as secrete
2474 # existed locally as secrete
2475 #
2475 #
2476 # We should not use added here but the list of all change in
2476 # We should not use added here but the list of all change in
2477 # the bundle
2477 # the bundle
2478 if publishing:
2478 if publishing:
2479 phases.advanceboundary(self, phases.public, srccontent)
2479 phases.advanceboundary(self, phases.public, srccontent)
2480 else:
2480 else:
2481 phases.advanceboundary(self, phases.draft, srccontent)
2481 phases.advanceboundary(self, phases.draft, srccontent)
2482 phases.retractboundary(self, phases.draft, added)
2482 phases.retractboundary(self, phases.draft, added)
2483 elif srctype != 'strip':
2483 elif srctype != 'strip':
2484 # publishing only alter behavior during push
2484 # publishing only alter behavior during push
2485 #
2485 #
2486 # strip should not touch boundary at all
2486 # strip should not touch boundary at all
2487 phases.retractboundary(self, phases.draft, added)
2487 phases.retractboundary(self, phases.draft, added)
2488
2488
2489 # make changelog see real files again
2489 # make changelog see real files again
2490 cl.finalize(trp)
2490 cl.finalize(trp)
2491
2491
2492 tr.close()
2492 tr.close()
2493
2493
2494 if changesets > 0:
2494 if changesets > 0:
2495 self.updatebranchcache()
2495 self.updatebranchcache()
2496 def runhooks():
2496 def runhooks():
2497 # forcefully update the on-disk branch cache
2497 # forcefully update the on-disk branch cache
2498 self.ui.debug("updating the branch cache\n")
2498 self.ui.debug("updating the branch cache\n")
2499 self.hook("changegroup", node=hex(cl.node(clstart)),
2499 self.hook("changegroup", node=hex(cl.node(clstart)),
2500 source=srctype, url=url)
2500 source=srctype, url=url)
2501
2501
2502 for n in added:
2502 for n in added:
2503 self.hook("incoming", node=hex(n), source=srctype,
2503 self.hook("incoming", node=hex(n), source=srctype,
2504 url=url)
2504 url=url)
2505 self._afterlock(runhooks)
2505 self._afterlock(runhooks)
2506
2506
2507 finally:
2507 finally:
2508 tr.release()
2508 tr.release()
2509 # never return 0 here:
2509 # never return 0 here:
2510 if dh < 0:
2510 if dh < 0:
2511 return dh - 1
2511 return dh - 1
2512 else:
2512 else:
2513 return dh + 1
2513 return dh + 1
2514
2514
2515 def stream_in(self, remote, requirements):
2515 def stream_in(self, remote, requirements):
2516 lock = self.lock()
2516 lock = self.lock()
2517 try:
2517 try:
2518 # Save remote branchmap. We will use it later
2518 # Save remote branchmap. We will use it later
2519 # to speed up branchcache creation
2519 # to speed up branchcache creation
2520 rbranchmap = None
2520 rbranchmap = None
2521 if remote.capable("branchmap"):
2521 if remote.capable("branchmap"):
2522 rbranchmap = remote.branchmap()
2522 rbranchmap = remote.branchmap()
2523
2523
2524 fp = remote.stream_out()
2524 fp = remote.stream_out()
2525 l = fp.readline()
2525 l = fp.readline()
2526 try:
2526 try:
2527 resp = int(l)
2527 resp = int(l)
2528 except ValueError:
2528 except ValueError:
2529 raise error.ResponseError(
2529 raise error.ResponseError(
2530 _('unexpected response from remote server:'), l)
2530 _('unexpected response from remote server:'), l)
2531 if resp == 1:
2531 if resp == 1:
2532 raise util.Abort(_('operation forbidden by server'))
2532 raise util.Abort(_('operation forbidden by server'))
2533 elif resp == 2:
2533 elif resp == 2:
2534 raise util.Abort(_('locking the remote repository failed'))
2534 raise util.Abort(_('locking the remote repository failed'))
2535 elif resp != 0:
2535 elif resp != 0:
2536 raise util.Abort(_('the server sent an unknown error code'))
2536 raise util.Abort(_('the server sent an unknown error code'))
2537 self.ui.status(_('streaming all changes\n'))
2537 self.ui.status(_('streaming all changes\n'))
2538 l = fp.readline()
2538 l = fp.readline()
2539 try:
2539 try:
2540 total_files, total_bytes = map(int, l.split(' ', 1))
2540 total_files, total_bytes = map(int, l.split(' ', 1))
2541 except (ValueError, TypeError):
2541 except (ValueError, TypeError):
2542 raise error.ResponseError(
2542 raise error.ResponseError(
2543 _('unexpected response from remote server:'), l)
2543 _('unexpected response from remote server:'), l)
2544 self.ui.status(_('%d files to transfer, %s of data\n') %
2544 self.ui.status(_('%d files to transfer, %s of data\n') %
2545 (total_files, util.bytecount(total_bytes)))
2545 (total_files, util.bytecount(total_bytes)))
2546 handled_bytes = 0
2546 handled_bytes = 0
2547 self.ui.progress(_('clone'), 0, total=total_bytes)
2547 self.ui.progress(_('clone'), 0, total=total_bytes)
2548 start = time.time()
2548 start = time.time()
2549 for i in xrange(total_files):
2549 for i in xrange(total_files):
2550 # XXX doesn't support '\n' or '\r' in filenames
2550 # XXX doesn't support '\n' or '\r' in filenames
2551 l = fp.readline()
2551 l = fp.readline()
2552 try:
2552 try:
2553 name, size = l.split('\0', 1)
2553 name, size = l.split('\0', 1)
2554 size = int(size)
2554 size = int(size)
2555 except (ValueError, TypeError):
2555 except (ValueError, TypeError):
2556 raise error.ResponseError(
2556 raise error.ResponseError(
2557 _('unexpected response from remote server:'), l)
2557 _('unexpected response from remote server:'), l)
2558 if self.ui.debugflag:
2558 if self.ui.debugflag:
2559 self.ui.debug('adding %s (%s)\n' %
2559 self.ui.debug('adding %s (%s)\n' %
2560 (name, util.bytecount(size)))
2560 (name, util.bytecount(size)))
2561 # for backwards compat, name was partially encoded
2561 # for backwards compat, name was partially encoded
2562 ofp = self.sopener(store.decodedir(name), 'w')
2562 ofp = self.sopener(store.decodedir(name), 'w')
2563 for chunk in util.filechunkiter(fp, limit=size):
2563 for chunk in util.filechunkiter(fp, limit=size):
2564 handled_bytes += len(chunk)
2564 handled_bytes += len(chunk)
2565 self.ui.progress(_('clone'), handled_bytes,
2565 self.ui.progress(_('clone'), handled_bytes,
2566 total=total_bytes)
2566 total=total_bytes)
2567 ofp.write(chunk)
2567 ofp.write(chunk)
2568 ofp.close()
2568 ofp.close()
2569 elapsed = time.time() - start
2569 elapsed = time.time() - start
2570 if elapsed <= 0:
2570 if elapsed <= 0:
2571 elapsed = 0.001
2571 elapsed = 0.001
2572 self.ui.progress(_('clone'), None)
2572 self.ui.progress(_('clone'), None)
2573 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2573 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2574 (util.bytecount(total_bytes), elapsed,
2574 (util.bytecount(total_bytes), elapsed,
2575 util.bytecount(total_bytes / elapsed)))
2575 util.bytecount(total_bytes / elapsed)))
2576
2576
2577 # new requirements = old non-format requirements +
2577 # new requirements = old non-format requirements +
2578 # new format-related
2578 # new format-related
2579 # requirements from the streamed-in repository
2579 # requirements from the streamed-in repository
2580 requirements.update(set(self.requirements) - self.supportedformats)
2580 requirements.update(set(self.requirements) - self.supportedformats)
2581 self._applyrequirements(requirements)
2581 self._applyrequirements(requirements)
2582 self._writerequirements()
2582 self._writerequirements()
2583
2583
2584 if rbranchmap:
2584 if rbranchmap:
2585 rbheads = []
2585 rbheads = []
2586 for bheads in rbranchmap.itervalues():
2586 for bheads in rbranchmap.itervalues():
2587 rbheads.extend(bheads)
2587 rbheads.extend(bheads)
2588
2588
2589 self.branchcache = rbranchmap
2589 self.branchcache = rbranchmap
2590 if rbheads:
2590 if rbheads:
2591 rtiprev = max((int(self.changelog.rev(node))
2591 rtiprev = max((int(self.changelog.rev(node))
2592 for node in rbheads))
2592 for node in rbheads))
2593 self._writebranchcache(self.branchcache,
2593 self._writebranchcache(self.branchcache,
2594 self[rtiprev].node(), rtiprev)
2594 self[rtiprev].node(), rtiprev)
2595 self.invalidate()
2595 self.invalidate()
2596 return len(self.heads()) + 1
2596 return len(self.heads()) + 1
2597 finally:
2597 finally:
2598 lock.release()
2598 lock.release()
2599
2599
2600 def clone(self, remote, heads=[], stream=False):
2600 def clone(self, remote, heads=[], stream=False):
2601 '''clone remote repository.
2601 '''clone remote repository.
2602
2602
2603 keyword arguments:
2603 keyword arguments:
2604 heads: list of revs to clone (forces use of pull)
2604 heads: list of revs to clone (forces use of pull)
2605 stream: use streaming clone if possible'''
2605 stream: use streaming clone if possible'''
2606
2606
2607 # now, all clients that can request uncompressed clones can
2607 # now, all clients that can request uncompressed clones can
2608 # read repo formats supported by all servers that can serve
2608 # read repo formats supported by all servers that can serve
2609 # them.
2609 # them.
2610
2610
2611 # if revlog format changes, client will have to check version
2611 # if revlog format changes, client will have to check version
2612 # and format flags on "stream" capability, and use
2612 # and format flags on "stream" capability, and use
2613 # uncompressed only if compatible.
2613 # uncompressed only if compatible.
2614
2614
2615 if not stream:
2615 if not stream:
2616 # if the server explicitly prefers to stream (for fast LANs)
2616 # if the server explicitly prefers to stream (for fast LANs)
2617 stream = remote.capable('stream-preferred')
2617 stream = remote.capable('stream-preferred')
2618
2618
2619 if stream and not heads:
2619 if stream and not heads:
2620 # 'stream' means remote revlog format is revlogv1 only
2620 # 'stream' means remote revlog format is revlogv1 only
2621 if remote.capable('stream'):
2621 if remote.capable('stream'):
2622 return self.stream_in(remote, set(('revlogv1',)))
2622 return self.stream_in(remote, set(('revlogv1',)))
2623 # otherwise, 'streamreqs' contains the remote revlog format
2623 # otherwise, 'streamreqs' contains the remote revlog format
2624 streamreqs = remote.capable('streamreqs')
2624 streamreqs = remote.capable('streamreqs')
2625 if streamreqs:
2625 if streamreqs:
2626 streamreqs = set(streamreqs.split(','))
2626 streamreqs = set(streamreqs.split(','))
2627 # if we support it, stream in and adjust our requirements
2627 # if we support it, stream in and adjust our requirements
2628 if not streamreqs - self.supportedformats:
2628 if not streamreqs - self.supportedformats:
2629 return self.stream_in(remote, streamreqs)
2629 return self.stream_in(remote, streamreqs)
2630 return self.pull(remote, heads)
2630 return self.pull(remote, heads)
2631
2631
2632 def pushkey(self, namespace, key, old, new):
2632 def pushkey(self, namespace, key, old, new):
2633 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
2633 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
2634 old=old, new=new)
2634 old=old, new=new)
2635 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
2635 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
2636 ret = pushkey.push(self, namespace, key, old, new)
2636 ret = pushkey.push(self, namespace, key, old, new)
2637 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2637 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2638 ret=ret)
2638 ret=ret)
2639 return ret
2639 return ret
2640
2640
2641 def listkeys(self, namespace):
2641 def listkeys(self, namespace):
2642 self.hook('prelistkeys', throw=True, namespace=namespace)
2642 self.hook('prelistkeys', throw=True, namespace=namespace)
2643 self.ui.debug('listing keys for "%s"\n' % namespace)
2643 self.ui.debug('listing keys for "%s"\n' % namespace)
2644 values = pushkey.list(self, namespace)
2644 values = pushkey.list(self, namespace)
2645 self.hook('listkeys', namespace=namespace, values=values)
2645 self.hook('listkeys', namespace=namespace, values=values)
2646 return values
2646 return values
2647
2647
2648 def debugwireargs(self, one, two, three=None, four=None, five=None):
2648 def debugwireargs(self, one, two, three=None, four=None, five=None):
2649 '''used to test argument passing over the wire'''
2649 '''used to test argument passing over the wire'''
2650 return "%s %s %s %s %s" % (one, two, three, four, five)
2650 return "%s %s %s %s %s" % (one, two, three, four, five)
2651
2651
2652 def savecommitmessage(self, text):
2652 def savecommitmessage(self, text):
2653 fp = self.opener('last-message.txt', 'wb')
2653 fp = self.opener('last-message.txt', 'wb')
2654 try:
2654 try:
2655 fp.write(text)
2655 fp.write(text)
2656 finally:
2656 finally:
2657 fp.close()
2657 fp.close()
2658 return self.pathto(fp.name[len(self.root)+1:])
2658 return self.pathto(fp.name[len(self.root) + 1:])
2659
2659
2660 # used to avoid circular references so destructors work
2660 # used to avoid circular references so destructors work
2661 def aftertrans(files):
2661 def aftertrans(files):
2662 renamefiles = [tuple(t) for t in files]
2662 renamefiles = [tuple(t) for t in files]
2663 def a():
2663 def a():
2664 for src, dest in renamefiles:
2664 for src, dest in renamefiles:
2665 try:
2665 try:
2666 util.rename(src, dest)
2666 util.rename(src, dest)
2667 except OSError: # journal file does not yet exist
2667 except OSError: # journal file does not yet exist
2668 pass
2668 pass
2669 return a
2669 return a
2670
2670
2671 def undoname(fn):
2671 def undoname(fn):
2672 base, name = os.path.split(fn)
2672 base, name = os.path.split(fn)
2673 assert name.startswith('journal')
2673 assert name.startswith('journal')
2674 return os.path.join(base, name.replace('journal', 'undo', 1))
2674 return os.path.join(base, name.replace('journal', 'undo', 1))
2675
2675
2676 def instance(ui, path, create):
2676 def instance(ui, path, create):
2677 return localrepository(ui, util.urllocalpath(path), create)
2677 return localrepository(ui, util.urllocalpath(path), create)
2678
2678
2679 def islocal(path):
2679 def islocal(path):
2680 return True
2680 return True
@@ -1,1890 +1,1890 b''
1 # patch.py - patch file parsing routines
1 # patch.py - patch file parsing routines
2 #
2 #
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 import cStringIO, email.Parser, os, errno, re, posixpath
9 import cStringIO, email.Parser, os, errno, re, posixpath
10 import tempfile, zlib, shutil
10 import tempfile, zlib, shutil
11
11
12 from i18n import _
12 from i18n import _
13 from node import hex, nullid, short
13 from node import hex, nullid, short
14 import base85, mdiff, scmutil, util, diffhelpers, copies, encoding, error
14 import base85, mdiff, scmutil, util, diffhelpers, copies, encoding, error
15 import context
15 import context
16
16
17 gitre = re.compile('diff --git a/(.*) b/(.*)')
17 gitre = re.compile('diff --git a/(.*) b/(.*)')
18
18
19 class PatchError(Exception):
19 class PatchError(Exception):
20 pass
20 pass
21
21
22
22
23 # public functions
23 # public functions
24
24
25 def split(stream):
25 def split(stream):
26 '''return an iterator of individual patches from a stream'''
26 '''return an iterator of individual patches from a stream'''
27 def isheader(line, inheader):
27 def isheader(line, inheader):
28 if inheader and line[0] in (' ', '\t'):
28 if inheader and line[0] in (' ', '\t'):
29 # continuation
29 # continuation
30 return True
30 return True
31 if line[0] in (' ', '-', '+'):
31 if line[0] in (' ', '-', '+'):
32 # diff line - don't check for header pattern in there
32 # diff line - don't check for header pattern in there
33 return False
33 return False
34 l = line.split(': ', 1)
34 l = line.split(': ', 1)
35 return len(l) == 2 and ' ' not in l[0]
35 return len(l) == 2 and ' ' not in l[0]
36
36
37 def chunk(lines):
37 def chunk(lines):
38 return cStringIO.StringIO(''.join(lines))
38 return cStringIO.StringIO(''.join(lines))
39
39
40 def hgsplit(stream, cur):
40 def hgsplit(stream, cur):
41 inheader = True
41 inheader = True
42
42
43 for line in stream:
43 for line in stream:
44 if not line.strip():
44 if not line.strip():
45 inheader = False
45 inheader = False
46 if not inheader and line.startswith('# HG changeset patch'):
46 if not inheader and line.startswith('# HG changeset patch'):
47 yield chunk(cur)
47 yield chunk(cur)
48 cur = []
48 cur = []
49 inheader = True
49 inheader = True
50
50
51 cur.append(line)
51 cur.append(line)
52
52
53 if cur:
53 if cur:
54 yield chunk(cur)
54 yield chunk(cur)
55
55
56 def mboxsplit(stream, cur):
56 def mboxsplit(stream, cur):
57 for line in stream:
57 for line in stream:
58 if line.startswith('From '):
58 if line.startswith('From '):
59 for c in split(chunk(cur[1:])):
59 for c in split(chunk(cur[1:])):
60 yield c
60 yield c
61 cur = []
61 cur = []
62
62
63 cur.append(line)
63 cur.append(line)
64
64
65 if cur:
65 if cur:
66 for c in split(chunk(cur[1:])):
66 for c in split(chunk(cur[1:])):
67 yield c
67 yield c
68
68
69 def mimesplit(stream, cur):
69 def mimesplit(stream, cur):
70 def msgfp(m):
70 def msgfp(m):
71 fp = cStringIO.StringIO()
71 fp = cStringIO.StringIO()
72 g = email.Generator.Generator(fp, mangle_from_=False)
72 g = email.Generator.Generator(fp, mangle_from_=False)
73 g.flatten(m)
73 g.flatten(m)
74 fp.seek(0)
74 fp.seek(0)
75 return fp
75 return fp
76
76
77 for line in stream:
77 for line in stream:
78 cur.append(line)
78 cur.append(line)
79 c = chunk(cur)
79 c = chunk(cur)
80
80
81 m = email.Parser.Parser().parse(c)
81 m = email.Parser.Parser().parse(c)
82 if not m.is_multipart():
82 if not m.is_multipart():
83 yield msgfp(m)
83 yield msgfp(m)
84 else:
84 else:
85 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
85 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
86 for part in m.walk():
86 for part in m.walk():
87 ct = part.get_content_type()
87 ct = part.get_content_type()
88 if ct not in ok_types:
88 if ct not in ok_types:
89 continue
89 continue
90 yield msgfp(part)
90 yield msgfp(part)
91
91
92 def headersplit(stream, cur):
92 def headersplit(stream, cur):
93 inheader = False
93 inheader = False
94
94
95 for line in stream:
95 for line in stream:
96 if not inheader and isheader(line, inheader):
96 if not inheader and isheader(line, inheader):
97 yield chunk(cur)
97 yield chunk(cur)
98 cur = []
98 cur = []
99 inheader = True
99 inheader = True
100 if inheader and not isheader(line, inheader):
100 if inheader and not isheader(line, inheader):
101 inheader = False
101 inheader = False
102
102
103 cur.append(line)
103 cur.append(line)
104
104
105 if cur:
105 if cur:
106 yield chunk(cur)
106 yield chunk(cur)
107
107
108 def remainder(cur):
108 def remainder(cur):
109 yield chunk(cur)
109 yield chunk(cur)
110
110
111 class fiter(object):
111 class fiter(object):
112 def __init__(self, fp):
112 def __init__(self, fp):
113 self.fp = fp
113 self.fp = fp
114
114
115 def __iter__(self):
115 def __iter__(self):
116 return self
116 return self
117
117
118 def next(self):
118 def next(self):
119 l = self.fp.readline()
119 l = self.fp.readline()
120 if not l:
120 if not l:
121 raise StopIteration
121 raise StopIteration
122 return l
122 return l
123
123
124 inheader = False
124 inheader = False
125 cur = []
125 cur = []
126
126
127 mimeheaders = ['content-type']
127 mimeheaders = ['content-type']
128
128
129 if not util.safehasattr(stream, 'next'):
129 if not util.safehasattr(stream, 'next'):
130 # http responses, for example, have readline but not next
130 # http responses, for example, have readline but not next
131 stream = fiter(stream)
131 stream = fiter(stream)
132
132
133 for line in stream:
133 for line in stream:
134 cur.append(line)
134 cur.append(line)
135 if line.startswith('# HG changeset patch'):
135 if line.startswith('# HG changeset patch'):
136 return hgsplit(stream, cur)
136 return hgsplit(stream, cur)
137 elif line.startswith('From '):
137 elif line.startswith('From '):
138 return mboxsplit(stream, cur)
138 return mboxsplit(stream, cur)
139 elif isheader(line, inheader):
139 elif isheader(line, inheader):
140 inheader = True
140 inheader = True
141 if line.split(':', 1)[0].lower() in mimeheaders:
141 if line.split(':', 1)[0].lower() in mimeheaders:
142 # let email parser handle this
142 # let email parser handle this
143 return mimesplit(stream, cur)
143 return mimesplit(stream, cur)
144 elif line.startswith('--- ') and inheader:
144 elif line.startswith('--- ') and inheader:
145 # No evil headers seen by diff start, split by hand
145 # No evil headers seen by diff start, split by hand
146 return headersplit(stream, cur)
146 return headersplit(stream, cur)
147 # Not enough info, keep reading
147 # Not enough info, keep reading
148
148
149 # if we are here, we have a very plain patch
149 # if we are here, we have a very plain patch
150 return remainder(cur)
150 return remainder(cur)
151
151
152 def extract(ui, fileobj):
152 def extract(ui, fileobj):
153 '''extract patch from data read from fileobj.
153 '''extract patch from data read from fileobj.
154
154
155 patch can be a normal patch or contained in an email message.
155 patch can be a normal patch or contained in an email message.
156
156
157 return tuple (filename, message, user, date, branch, node, p1, p2).
157 return tuple (filename, message, user, date, branch, node, p1, p2).
158 Any item in the returned tuple can be None. If filename is None,
158 Any item in the returned tuple can be None. If filename is None,
159 fileobj did not contain a patch. Caller must unlink filename when done.'''
159 fileobj did not contain a patch. Caller must unlink filename when done.'''
160
160
161 # attempt to detect the start of a patch
161 # attempt to detect the start of a patch
162 # (this heuristic is borrowed from quilt)
162 # (this heuristic is borrowed from quilt)
163 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |'
163 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |'
164 r'retrieving revision [0-9]+(\.[0-9]+)*$|'
164 r'retrieving revision [0-9]+(\.[0-9]+)*$|'
165 r'---[ \t].*?^\+\+\+[ \t]|'
165 r'---[ \t].*?^\+\+\+[ \t]|'
166 r'\*\*\*[ \t].*?^---[ \t])', re.MULTILINE|re.DOTALL)
166 r'\*\*\*[ \t].*?^---[ \t])', re.MULTILINE|re.DOTALL)
167
167
168 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
168 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
169 tmpfp = os.fdopen(fd, 'w')
169 tmpfp = os.fdopen(fd, 'w')
170 try:
170 try:
171 msg = email.Parser.Parser().parse(fileobj)
171 msg = email.Parser.Parser().parse(fileobj)
172
172
173 subject = msg['Subject']
173 subject = msg['Subject']
174 user = msg['From']
174 user = msg['From']
175 if not subject and not user:
175 if not subject and not user:
176 # Not an email, restore parsed headers if any
176 # Not an email, restore parsed headers if any
177 subject = '\n'.join(': '.join(h) for h in msg.items()) + '\n'
177 subject = '\n'.join(': '.join(h) for h in msg.items()) + '\n'
178
178
179 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
179 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
180 # should try to parse msg['Date']
180 # should try to parse msg['Date']
181 date = None
181 date = None
182 nodeid = None
182 nodeid = None
183 branch = None
183 branch = None
184 parents = []
184 parents = []
185
185
186 if subject:
186 if subject:
187 if subject.startswith('[PATCH'):
187 if subject.startswith('[PATCH'):
188 pend = subject.find(']')
188 pend = subject.find(']')
189 if pend >= 0:
189 if pend >= 0:
190 subject = subject[pend + 1:].lstrip()
190 subject = subject[pend + 1:].lstrip()
191 subject = re.sub(r'\n[ \t]+', ' ', subject)
191 subject = re.sub(r'\n[ \t]+', ' ', subject)
192 ui.debug('Subject: %s\n' % subject)
192 ui.debug('Subject: %s\n' % subject)
193 if user:
193 if user:
194 ui.debug('From: %s\n' % user)
194 ui.debug('From: %s\n' % user)
195 diffs_seen = 0
195 diffs_seen = 0
196 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
196 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
197 message = ''
197 message = ''
198 for part in msg.walk():
198 for part in msg.walk():
199 content_type = part.get_content_type()
199 content_type = part.get_content_type()
200 ui.debug('Content-Type: %s\n' % content_type)
200 ui.debug('Content-Type: %s\n' % content_type)
201 if content_type not in ok_types:
201 if content_type not in ok_types:
202 continue
202 continue
203 payload = part.get_payload(decode=True)
203 payload = part.get_payload(decode=True)
204 m = diffre.search(payload)
204 m = diffre.search(payload)
205 if m:
205 if m:
206 hgpatch = False
206 hgpatch = False
207 hgpatchheader = False
207 hgpatchheader = False
208 ignoretext = False
208 ignoretext = False
209
209
210 ui.debug('found patch at byte %d\n' % m.start(0))
210 ui.debug('found patch at byte %d\n' % m.start(0))
211 diffs_seen += 1
211 diffs_seen += 1
212 cfp = cStringIO.StringIO()
212 cfp = cStringIO.StringIO()
213 for line in payload[:m.start(0)].splitlines():
213 for line in payload[:m.start(0)].splitlines():
214 if line.startswith('# HG changeset patch') and not hgpatch:
214 if line.startswith('# HG changeset patch') and not hgpatch:
215 ui.debug('patch generated by hg export\n')
215 ui.debug('patch generated by hg export\n')
216 hgpatch = True
216 hgpatch = True
217 hgpatchheader = True
217 hgpatchheader = True
218 # drop earlier commit message content
218 # drop earlier commit message content
219 cfp.seek(0)
219 cfp.seek(0)
220 cfp.truncate()
220 cfp.truncate()
221 subject = None
221 subject = None
222 elif hgpatchheader:
222 elif hgpatchheader:
223 if line.startswith('# User '):
223 if line.startswith('# User '):
224 user = line[7:]
224 user = line[7:]
225 ui.debug('From: %s\n' % user)
225 ui.debug('From: %s\n' % user)
226 elif line.startswith("# Date "):
226 elif line.startswith("# Date "):
227 date = line[7:]
227 date = line[7:]
228 elif line.startswith("# Branch "):
228 elif line.startswith("# Branch "):
229 branch = line[9:]
229 branch = line[9:]
230 elif line.startswith("# Node ID "):
230 elif line.startswith("# Node ID "):
231 nodeid = line[10:]
231 nodeid = line[10:]
232 elif line.startswith("# Parent "):
232 elif line.startswith("# Parent "):
233 parents.append(line[9:].lstrip())
233 parents.append(line[9:].lstrip())
234 elif not line.startswith("# "):
234 elif not line.startswith("# "):
235 hgpatchheader = False
235 hgpatchheader = False
236 elif line == '---' and gitsendmail:
236 elif line == '---' and gitsendmail:
237 ignoretext = True
237 ignoretext = True
238 if not hgpatchheader and not ignoretext:
238 if not hgpatchheader and not ignoretext:
239 cfp.write(line)
239 cfp.write(line)
240 cfp.write('\n')
240 cfp.write('\n')
241 message = cfp.getvalue()
241 message = cfp.getvalue()
242 if tmpfp:
242 if tmpfp:
243 tmpfp.write(payload)
243 tmpfp.write(payload)
244 if not payload.endswith('\n'):
244 if not payload.endswith('\n'):
245 tmpfp.write('\n')
245 tmpfp.write('\n')
246 elif not diffs_seen and message and content_type == 'text/plain':
246 elif not diffs_seen and message and content_type == 'text/plain':
247 message += '\n' + payload
247 message += '\n' + payload
248 except: # re-raises
248 except: # re-raises
249 tmpfp.close()
249 tmpfp.close()
250 os.unlink(tmpname)
250 os.unlink(tmpname)
251 raise
251 raise
252
252
253 if subject and not message.startswith(subject):
253 if subject and not message.startswith(subject):
254 message = '%s\n%s' % (subject, message)
254 message = '%s\n%s' % (subject, message)
255 tmpfp.close()
255 tmpfp.close()
256 if not diffs_seen:
256 if not diffs_seen:
257 os.unlink(tmpname)
257 os.unlink(tmpname)
258 return None, message, user, date, branch, None, None, None
258 return None, message, user, date, branch, None, None, None
259 p1 = parents and parents.pop(0) or None
259 p1 = parents and parents.pop(0) or None
260 p2 = parents and parents.pop(0) or None
260 p2 = parents and parents.pop(0) or None
261 return tmpname, message, user, date, branch, nodeid, p1, p2
261 return tmpname, message, user, date, branch, nodeid, p1, p2
262
262
263 class patchmeta(object):
263 class patchmeta(object):
264 """Patched file metadata
264 """Patched file metadata
265
265
266 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
266 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
267 or COPY. 'path' is patched file path. 'oldpath' is set to the
267 or COPY. 'path' is patched file path. 'oldpath' is set to the
268 origin file when 'op' is either COPY or RENAME, None otherwise. If
268 origin file when 'op' is either COPY or RENAME, None otherwise. If
269 file mode is changed, 'mode' is a tuple (islink, isexec) where
269 file mode is changed, 'mode' is a tuple (islink, isexec) where
270 'islink' is True if the file is a symlink and 'isexec' is True if
270 'islink' is True if the file is a symlink and 'isexec' is True if
271 the file is executable. Otherwise, 'mode' is None.
271 the file is executable. Otherwise, 'mode' is None.
272 """
272 """
273 def __init__(self, path):
273 def __init__(self, path):
274 self.path = path
274 self.path = path
275 self.oldpath = None
275 self.oldpath = None
276 self.mode = None
276 self.mode = None
277 self.op = 'MODIFY'
277 self.op = 'MODIFY'
278 self.binary = False
278 self.binary = False
279
279
280 def setmode(self, mode):
280 def setmode(self, mode):
281 islink = mode & 020000
281 islink = mode & 020000
282 isexec = mode & 0100
282 isexec = mode & 0100
283 self.mode = (islink, isexec)
283 self.mode = (islink, isexec)
284
284
285 def copy(self):
285 def copy(self):
286 other = patchmeta(self.path)
286 other = patchmeta(self.path)
287 other.oldpath = self.oldpath
287 other.oldpath = self.oldpath
288 other.mode = self.mode
288 other.mode = self.mode
289 other.op = self.op
289 other.op = self.op
290 other.binary = self.binary
290 other.binary = self.binary
291 return other
291 return other
292
292
293 def _ispatchinga(self, afile):
293 def _ispatchinga(self, afile):
294 if afile == '/dev/null':
294 if afile == '/dev/null':
295 return self.op == 'ADD'
295 return self.op == 'ADD'
296 return afile == 'a/' + (self.oldpath or self.path)
296 return afile == 'a/' + (self.oldpath or self.path)
297
297
298 def _ispatchingb(self, bfile):
298 def _ispatchingb(self, bfile):
299 if bfile == '/dev/null':
299 if bfile == '/dev/null':
300 return self.op == 'DELETE'
300 return self.op == 'DELETE'
301 return bfile == 'b/' + self.path
301 return bfile == 'b/' + self.path
302
302
303 def ispatching(self, afile, bfile):
303 def ispatching(self, afile, bfile):
304 return self._ispatchinga(afile) and self._ispatchingb(bfile)
304 return self._ispatchinga(afile) and self._ispatchingb(bfile)
305
305
306 def __repr__(self):
306 def __repr__(self):
307 return "<patchmeta %s %r>" % (self.op, self.path)
307 return "<patchmeta %s %r>" % (self.op, self.path)
308
308
309 def readgitpatch(lr):
309 def readgitpatch(lr):
310 """extract git-style metadata about patches from <patchname>"""
310 """extract git-style metadata about patches from <patchname>"""
311
311
312 # Filter patch for git information
312 # Filter patch for git information
313 gp = None
313 gp = None
314 gitpatches = []
314 gitpatches = []
315 for line in lr:
315 for line in lr:
316 line = line.rstrip(' \r\n')
316 line = line.rstrip(' \r\n')
317 if line.startswith('diff --git'):
317 if line.startswith('diff --git'):
318 m = gitre.match(line)
318 m = gitre.match(line)
319 if m:
319 if m:
320 if gp:
320 if gp:
321 gitpatches.append(gp)
321 gitpatches.append(gp)
322 dst = m.group(2)
322 dst = m.group(2)
323 gp = patchmeta(dst)
323 gp = patchmeta(dst)
324 elif gp:
324 elif gp:
325 if line.startswith('--- '):
325 if line.startswith('--- '):
326 gitpatches.append(gp)
326 gitpatches.append(gp)
327 gp = None
327 gp = None
328 continue
328 continue
329 if line.startswith('rename from '):
329 if line.startswith('rename from '):
330 gp.op = 'RENAME'
330 gp.op = 'RENAME'
331 gp.oldpath = line[12:]
331 gp.oldpath = line[12:]
332 elif line.startswith('rename to '):
332 elif line.startswith('rename to '):
333 gp.path = line[10:]
333 gp.path = line[10:]
334 elif line.startswith('copy from '):
334 elif line.startswith('copy from '):
335 gp.op = 'COPY'
335 gp.op = 'COPY'
336 gp.oldpath = line[10:]
336 gp.oldpath = line[10:]
337 elif line.startswith('copy to '):
337 elif line.startswith('copy to '):
338 gp.path = line[8:]
338 gp.path = line[8:]
339 elif line.startswith('deleted file'):
339 elif line.startswith('deleted file'):
340 gp.op = 'DELETE'
340 gp.op = 'DELETE'
341 elif line.startswith('new file mode '):
341 elif line.startswith('new file mode '):
342 gp.op = 'ADD'
342 gp.op = 'ADD'
343 gp.setmode(int(line[-6:], 8))
343 gp.setmode(int(line[-6:], 8))
344 elif line.startswith('new mode '):
344 elif line.startswith('new mode '):
345 gp.setmode(int(line[-6:], 8))
345 gp.setmode(int(line[-6:], 8))
346 elif line.startswith('GIT binary patch'):
346 elif line.startswith('GIT binary patch'):
347 gp.binary = True
347 gp.binary = True
348 if gp:
348 if gp:
349 gitpatches.append(gp)
349 gitpatches.append(gp)
350
350
351 return gitpatches
351 return gitpatches
352
352
353 class linereader(object):
353 class linereader(object):
354 # simple class to allow pushing lines back into the input stream
354 # simple class to allow pushing lines back into the input stream
355 def __init__(self, fp):
355 def __init__(self, fp):
356 self.fp = fp
356 self.fp = fp
357 self.buf = []
357 self.buf = []
358
358
359 def push(self, line):
359 def push(self, line):
360 if line is not None:
360 if line is not None:
361 self.buf.append(line)
361 self.buf.append(line)
362
362
363 def readline(self):
363 def readline(self):
364 if self.buf:
364 if self.buf:
365 l = self.buf[0]
365 l = self.buf[0]
366 del self.buf[0]
366 del self.buf[0]
367 return l
367 return l
368 return self.fp.readline()
368 return self.fp.readline()
369
369
370 def __iter__(self):
370 def __iter__(self):
371 while True:
371 while True:
372 l = self.readline()
372 l = self.readline()
373 if not l:
373 if not l:
374 break
374 break
375 yield l
375 yield l
376
376
377 class abstractbackend(object):
377 class abstractbackend(object):
378 def __init__(self, ui):
378 def __init__(self, ui):
379 self.ui = ui
379 self.ui = ui
380
380
381 def getfile(self, fname):
381 def getfile(self, fname):
382 """Return target file data and flags as a (data, (islink,
382 """Return target file data and flags as a (data, (islink,
383 isexec)) tuple.
383 isexec)) tuple.
384 """
384 """
385 raise NotImplementedError
385 raise NotImplementedError
386
386
387 def setfile(self, fname, data, mode, copysource):
387 def setfile(self, fname, data, mode, copysource):
388 """Write data to target file fname and set its mode. mode is a
388 """Write data to target file fname and set its mode. mode is a
389 (islink, isexec) tuple. If data is None, the file content should
389 (islink, isexec) tuple. If data is None, the file content should
390 be left unchanged. If the file is modified after being copied,
390 be left unchanged. If the file is modified after being copied,
391 copysource is set to the original file name.
391 copysource is set to the original file name.
392 """
392 """
393 raise NotImplementedError
393 raise NotImplementedError
394
394
395 def unlink(self, fname):
395 def unlink(self, fname):
396 """Unlink target file."""
396 """Unlink target file."""
397 raise NotImplementedError
397 raise NotImplementedError
398
398
399 def writerej(self, fname, failed, total, lines):
399 def writerej(self, fname, failed, total, lines):
400 """Write rejected lines for fname. total is the number of hunks
400 """Write rejected lines for fname. total is the number of hunks
401 which failed to apply and total the total number of hunks for this
401 which failed to apply and total the total number of hunks for this
402 files.
402 files.
403 """
403 """
404 pass
404 pass
405
405
406 def exists(self, fname):
406 def exists(self, fname):
407 raise NotImplementedError
407 raise NotImplementedError
408
408
409 class fsbackend(abstractbackend):
409 class fsbackend(abstractbackend):
410 def __init__(self, ui, basedir):
410 def __init__(self, ui, basedir):
411 super(fsbackend, self).__init__(ui)
411 super(fsbackend, self).__init__(ui)
412 self.opener = scmutil.opener(basedir)
412 self.opener = scmutil.opener(basedir)
413
413
414 def _join(self, f):
414 def _join(self, f):
415 return os.path.join(self.opener.base, f)
415 return os.path.join(self.opener.base, f)
416
416
417 def getfile(self, fname):
417 def getfile(self, fname):
418 path = self._join(fname)
418 path = self._join(fname)
419 if os.path.islink(path):
419 if os.path.islink(path):
420 return (os.readlink(path), (True, False))
420 return (os.readlink(path), (True, False))
421 isexec = False
421 isexec = False
422 try:
422 try:
423 isexec = os.lstat(path).st_mode & 0100 != 0
423 isexec = os.lstat(path).st_mode & 0100 != 0
424 except OSError, e:
424 except OSError, e:
425 if e.errno != errno.ENOENT:
425 if e.errno != errno.ENOENT:
426 raise
426 raise
427 return (self.opener.read(fname), (False, isexec))
427 return (self.opener.read(fname), (False, isexec))
428
428
429 def setfile(self, fname, data, mode, copysource):
429 def setfile(self, fname, data, mode, copysource):
430 islink, isexec = mode
430 islink, isexec = mode
431 if data is None:
431 if data is None:
432 util.setflags(self._join(fname), islink, isexec)
432 util.setflags(self._join(fname), islink, isexec)
433 return
433 return
434 if islink:
434 if islink:
435 self.opener.symlink(data, fname)
435 self.opener.symlink(data, fname)
436 else:
436 else:
437 self.opener.write(fname, data)
437 self.opener.write(fname, data)
438 if isexec:
438 if isexec:
439 util.setflags(self._join(fname), False, True)
439 util.setflags(self._join(fname), False, True)
440
440
441 def unlink(self, fname):
441 def unlink(self, fname):
442 try:
442 try:
443 util.unlinkpath(self._join(fname))
443 util.unlinkpath(self._join(fname))
444 except OSError, inst:
444 except OSError, inst:
445 if inst.errno != errno.ENOENT:
445 if inst.errno != errno.ENOENT:
446 raise
446 raise
447
447
448 def writerej(self, fname, failed, total, lines):
448 def writerej(self, fname, failed, total, lines):
449 fname = fname + ".rej"
449 fname = fname + ".rej"
450 self.ui.warn(
450 self.ui.warn(
451 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
451 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
452 (failed, total, fname))
452 (failed, total, fname))
453 fp = self.opener(fname, 'w')
453 fp = self.opener(fname, 'w')
454 fp.writelines(lines)
454 fp.writelines(lines)
455 fp.close()
455 fp.close()
456
456
457 def exists(self, fname):
457 def exists(self, fname):
458 return os.path.lexists(self._join(fname))
458 return os.path.lexists(self._join(fname))
459
459
460 class workingbackend(fsbackend):
460 class workingbackend(fsbackend):
461 def __init__(self, ui, repo, similarity):
461 def __init__(self, ui, repo, similarity):
462 super(workingbackend, self).__init__(ui, repo.root)
462 super(workingbackend, self).__init__(ui, repo.root)
463 self.repo = repo
463 self.repo = repo
464 self.similarity = similarity
464 self.similarity = similarity
465 self.removed = set()
465 self.removed = set()
466 self.changed = set()
466 self.changed = set()
467 self.copied = []
467 self.copied = []
468
468
469 def _checkknown(self, fname):
469 def _checkknown(self, fname):
470 if self.repo.dirstate[fname] == '?' and self.exists(fname):
470 if self.repo.dirstate[fname] == '?' and self.exists(fname):
471 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
471 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
472
472
473 def setfile(self, fname, data, mode, copysource):
473 def setfile(self, fname, data, mode, copysource):
474 self._checkknown(fname)
474 self._checkknown(fname)
475 super(workingbackend, self).setfile(fname, data, mode, copysource)
475 super(workingbackend, self).setfile(fname, data, mode, copysource)
476 if copysource is not None:
476 if copysource is not None:
477 self.copied.append((copysource, fname))
477 self.copied.append((copysource, fname))
478 self.changed.add(fname)
478 self.changed.add(fname)
479
479
480 def unlink(self, fname):
480 def unlink(self, fname):
481 self._checkknown(fname)
481 self._checkknown(fname)
482 super(workingbackend, self).unlink(fname)
482 super(workingbackend, self).unlink(fname)
483 self.removed.add(fname)
483 self.removed.add(fname)
484 self.changed.add(fname)
484 self.changed.add(fname)
485
485
486 def close(self):
486 def close(self):
487 wctx = self.repo[None]
487 wctx = self.repo[None]
488 addremoved = set(self.changed)
488 addremoved = set(self.changed)
489 for src, dst in self.copied:
489 for src, dst in self.copied:
490 scmutil.dirstatecopy(self.ui, self.repo, wctx, src, dst)
490 scmutil.dirstatecopy(self.ui, self.repo, wctx, src, dst)
491 if self.removed:
491 if self.removed:
492 wctx.forget(sorted(self.removed))
492 wctx.forget(sorted(self.removed))
493 for f in self.removed:
493 for f in self.removed:
494 if f not in self.repo.dirstate:
494 if f not in self.repo.dirstate:
495 # File was deleted and no longer belongs to the
495 # File was deleted and no longer belongs to the
496 # dirstate, it was probably marked added then
496 # dirstate, it was probably marked added then
497 # deleted, and should not be considered by
497 # deleted, and should not be considered by
498 # addremove().
498 # addremove().
499 addremoved.discard(f)
499 addremoved.discard(f)
500 if addremoved:
500 if addremoved:
501 cwd = self.repo.getcwd()
501 cwd = self.repo.getcwd()
502 if cwd:
502 if cwd:
503 addremoved = [util.pathto(self.repo.root, cwd, f)
503 addremoved = [util.pathto(self.repo.root, cwd, f)
504 for f in addremoved]
504 for f in addremoved]
505 scmutil.addremove(self.repo, addremoved, similarity=self.similarity)
505 scmutil.addremove(self.repo, addremoved, similarity=self.similarity)
506 return sorted(self.changed)
506 return sorted(self.changed)
507
507
508 class filestore(object):
508 class filestore(object):
509 def __init__(self, maxsize=None):
509 def __init__(self, maxsize=None):
510 self.opener = None
510 self.opener = None
511 self.files = {}
511 self.files = {}
512 self.created = 0
512 self.created = 0
513 self.maxsize = maxsize
513 self.maxsize = maxsize
514 if self.maxsize is None:
514 if self.maxsize is None:
515 self.maxsize = 4*(2**20)
515 self.maxsize = 4*(2**20)
516 self.size = 0
516 self.size = 0
517 self.data = {}
517 self.data = {}
518
518
519 def setfile(self, fname, data, mode, copied=None):
519 def setfile(self, fname, data, mode, copied=None):
520 if self.maxsize < 0 or (len(data) + self.size) <= self.maxsize:
520 if self.maxsize < 0 or (len(data) + self.size) <= self.maxsize:
521 self.data[fname] = (data, mode, copied)
521 self.data[fname] = (data, mode, copied)
522 self.size += len(data)
522 self.size += len(data)
523 else:
523 else:
524 if self.opener is None:
524 if self.opener is None:
525 root = tempfile.mkdtemp(prefix='hg-patch-')
525 root = tempfile.mkdtemp(prefix='hg-patch-')
526 self.opener = scmutil.opener(root)
526 self.opener = scmutil.opener(root)
527 # Avoid filename issues with these simple names
527 # Avoid filename issues with these simple names
528 fn = str(self.created)
528 fn = str(self.created)
529 self.opener.write(fn, data)
529 self.opener.write(fn, data)
530 self.created += 1
530 self.created += 1
531 self.files[fname] = (fn, mode, copied)
531 self.files[fname] = (fn, mode, copied)
532
532
533 def getfile(self, fname):
533 def getfile(self, fname):
534 if fname in self.data:
534 if fname in self.data:
535 return self.data[fname]
535 return self.data[fname]
536 if not self.opener or fname not in self.files:
536 if not self.opener or fname not in self.files:
537 raise IOError
537 raise IOError
538 fn, mode, copied = self.files[fname]
538 fn, mode, copied = self.files[fname]
539 return self.opener.read(fn), mode, copied
539 return self.opener.read(fn), mode, copied
540
540
541 def close(self):
541 def close(self):
542 if self.opener:
542 if self.opener:
543 shutil.rmtree(self.opener.base)
543 shutil.rmtree(self.opener.base)
544
544
545 class repobackend(abstractbackend):
545 class repobackend(abstractbackend):
546 def __init__(self, ui, repo, ctx, store):
546 def __init__(self, ui, repo, ctx, store):
547 super(repobackend, self).__init__(ui)
547 super(repobackend, self).__init__(ui)
548 self.repo = repo
548 self.repo = repo
549 self.ctx = ctx
549 self.ctx = ctx
550 self.store = store
550 self.store = store
551 self.changed = set()
551 self.changed = set()
552 self.removed = set()
552 self.removed = set()
553 self.copied = {}
553 self.copied = {}
554
554
555 def _checkknown(self, fname):
555 def _checkknown(self, fname):
556 if fname not in self.ctx:
556 if fname not in self.ctx:
557 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
557 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
558
558
559 def getfile(self, fname):
559 def getfile(self, fname):
560 try:
560 try:
561 fctx = self.ctx[fname]
561 fctx = self.ctx[fname]
562 except error.LookupError:
562 except error.LookupError:
563 raise IOError
563 raise IOError
564 flags = fctx.flags()
564 flags = fctx.flags()
565 return fctx.data(), ('l' in flags, 'x' in flags)
565 return fctx.data(), ('l' in flags, 'x' in flags)
566
566
567 def setfile(self, fname, data, mode, copysource):
567 def setfile(self, fname, data, mode, copysource):
568 if copysource:
568 if copysource:
569 self._checkknown(copysource)
569 self._checkknown(copysource)
570 if data is None:
570 if data is None:
571 data = self.ctx[fname].data()
571 data = self.ctx[fname].data()
572 self.store.setfile(fname, data, mode, copysource)
572 self.store.setfile(fname, data, mode, copysource)
573 self.changed.add(fname)
573 self.changed.add(fname)
574 if copysource:
574 if copysource:
575 self.copied[fname] = copysource
575 self.copied[fname] = copysource
576
576
577 def unlink(self, fname):
577 def unlink(self, fname):
578 self._checkknown(fname)
578 self._checkknown(fname)
579 self.removed.add(fname)
579 self.removed.add(fname)
580
580
581 def exists(self, fname):
581 def exists(self, fname):
582 return fname in self.ctx
582 return fname in self.ctx
583
583
584 def close(self):
584 def close(self):
585 return self.changed | self.removed
585 return self.changed | self.removed
586
586
587 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
587 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
588 unidesc = re.compile('@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@')
588 unidesc = re.compile('@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@')
589 contextdesc = re.compile('(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)')
589 contextdesc = re.compile('(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)')
590 eolmodes = ['strict', 'crlf', 'lf', 'auto']
590 eolmodes = ['strict', 'crlf', 'lf', 'auto']
591
591
592 class patchfile(object):
592 class patchfile(object):
593 def __init__(self, ui, gp, backend, store, eolmode='strict'):
593 def __init__(self, ui, gp, backend, store, eolmode='strict'):
594 self.fname = gp.path
594 self.fname = gp.path
595 self.eolmode = eolmode
595 self.eolmode = eolmode
596 self.eol = None
596 self.eol = None
597 self.backend = backend
597 self.backend = backend
598 self.ui = ui
598 self.ui = ui
599 self.lines = []
599 self.lines = []
600 self.exists = False
600 self.exists = False
601 self.missing = True
601 self.missing = True
602 self.mode = gp.mode
602 self.mode = gp.mode
603 self.copysource = gp.oldpath
603 self.copysource = gp.oldpath
604 self.create = gp.op in ('ADD', 'COPY', 'RENAME')
604 self.create = gp.op in ('ADD', 'COPY', 'RENAME')
605 self.remove = gp.op == 'DELETE'
605 self.remove = gp.op == 'DELETE'
606 try:
606 try:
607 if self.copysource is None:
607 if self.copysource is None:
608 data, mode = backend.getfile(self.fname)
608 data, mode = backend.getfile(self.fname)
609 self.exists = True
609 self.exists = True
610 else:
610 else:
611 data, mode = store.getfile(self.copysource)[:2]
611 data, mode = store.getfile(self.copysource)[:2]
612 self.exists = backend.exists(self.fname)
612 self.exists = backend.exists(self.fname)
613 self.missing = False
613 self.missing = False
614 if data:
614 if data:
615 self.lines = mdiff.splitnewlines(data)
615 self.lines = mdiff.splitnewlines(data)
616 if self.mode is None:
616 if self.mode is None:
617 self.mode = mode
617 self.mode = mode
618 if self.lines:
618 if self.lines:
619 # Normalize line endings
619 # Normalize line endings
620 if self.lines[0].endswith('\r\n'):
620 if self.lines[0].endswith('\r\n'):
621 self.eol = '\r\n'
621 self.eol = '\r\n'
622 elif self.lines[0].endswith('\n'):
622 elif self.lines[0].endswith('\n'):
623 self.eol = '\n'
623 self.eol = '\n'
624 if eolmode != 'strict':
624 if eolmode != 'strict':
625 nlines = []
625 nlines = []
626 for l in self.lines:
626 for l in self.lines:
627 if l.endswith('\r\n'):
627 if l.endswith('\r\n'):
628 l = l[:-2] + '\n'
628 l = l[:-2] + '\n'
629 nlines.append(l)
629 nlines.append(l)
630 self.lines = nlines
630 self.lines = nlines
631 except IOError:
631 except IOError:
632 if self.create:
632 if self.create:
633 self.missing = False
633 self.missing = False
634 if self.mode is None:
634 if self.mode is None:
635 self.mode = (False, False)
635 self.mode = (False, False)
636 if self.missing:
636 if self.missing:
637 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
637 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
638
638
639 self.hash = {}
639 self.hash = {}
640 self.dirty = 0
640 self.dirty = 0
641 self.offset = 0
641 self.offset = 0
642 self.skew = 0
642 self.skew = 0
643 self.rej = []
643 self.rej = []
644 self.fileprinted = False
644 self.fileprinted = False
645 self.printfile(False)
645 self.printfile(False)
646 self.hunks = 0
646 self.hunks = 0
647
647
648 def writelines(self, fname, lines, mode):
648 def writelines(self, fname, lines, mode):
649 if self.eolmode == 'auto':
649 if self.eolmode == 'auto':
650 eol = self.eol
650 eol = self.eol
651 elif self.eolmode == 'crlf':
651 elif self.eolmode == 'crlf':
652 eol = '\r\n'
652 eol = '\r\n'
653 else:
653 else:
654 eol = '\n'
654 eol = '\n'
655
655
656 if self.eolmode != 'strict' and eol and eol != '\n':
656 if self.eolmode != 'strict' and eol and eol != '\n':
657 rawlines = []
657 rawlines = []
658 for l in lines:
658 for l in lines:
659 if l and l[-1] == '\n':
659 if l and l[-1] == '\n':
660 l = l[:-1] + eol
660 l = l[:-1] + eol
661 rawlines.append(l)
661 rawlines.append(l)
662 lines = rawlines
662 lines = rawlines
663
663
664 self.backend.setfile(fname, ''.join(lines), mode, self.copysource)
664 self.backend.setfile(fname, ''.join(lines), mode, self.copysource)
665
665
666 def printfile(self, warn):
666 def printfile(self, warn):
667 if self.fileprinted:
667 if self.fileprinted:
668 return
668 return
669 if warn or self.ui.verbose:
669 if warn or self.ui.verbose:
670 self.fileprinted = True
670 self.fileprinted = True
671 s = _("patching file %s\n") % self.fname
671 s = _("patching file %s\n") % self.fname
672 if warn:
672 if warn:
673 self.ui.warn(s)
673 self.ui.warn(s)
674 else:
674 else:
675 self.ui.note(s)
675 self.ui.note(s)
676
676
677
677
678 def findlines(self, l, linenum):
678 def findlines(self, l, linenum):
679 # looks through the hash and finds candidate lines. The
679 # looks through the hash and finds candidate lines. The
680 # result is a list of line numbers sorted based on distance
680 # result is a list of line numbers sorted based on distance
681 # from linenum
681 # from linenum
682
682
683 cand = self.hash.get(l, [])
683 cand = self.hash.get(l, [])
684 if len(cand) > 1:
684 if len(cand) > 1:
685 # resort our list of potentials forward then back.
685 # resort our list of potentials forward then back.
686 cand.sort(key=lambda x: abs(x - linenum))
686 cand.sort(key=lambda x: abs(x - linenum))
687 return cand
687 return cand
688
688
689 def write_rej(self):
689 def write_rej(self):
690 # our rejects are a little different from patch(1). This always
690 # our rejects are a little different from patch(1). This always
691 # creates rejects in the same form as the original patch. A file
691 # creates rejects in the same form as the original patch. A file
692 # header is inserted so that you can run the reject through patch again
692 # header is inserted so that you can run the reject through patch again
693 # without having to type the filename.
693 # without having to type the filename.
694 if not self.rej:
694 if not self.rej:
695 return
695 return
696 base = os.path.basename(self.fname)
696 base = os.path.basename(self.fname)
697 lines = ["--- %s\n+++ %s\n" % (base, base)]
697 lines = ["--- %s\n+++ %s\n" % (base, base)]
698 for x in self.rej:
698 for x in self.rej:
699 for l in x.hunk:
699 for l in x.hunk:
700 lines.append(l)
700 lines.append(l)
701 if l[-1] != '\n':
701 if l[-1] != '\n':
702 lines.append("\n\ No newline at end of file\n")
702 lines.append("\n\ No newline at end of file\n")
703 self.backend.writerej(self.fname, len(self.rej), self.hunks, lines)
703 self.backend.writerej(self.fname, len(self.rej), self.hunks, lines)
704
704
705 def apply(self, h):
705 def apply(self, h):
706 if not h.complete():
706 if not h.complete():
707 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
707 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
708 (h.number, h.desc, len(h.a), h.lena, len(h.b),
708 (h.number, h.desc, len(h.a), h.lena, len(h.b),
709 h.lenb))
709 h.lenb))
710
710
711 self.hunks += 1
711 self.hunks += 1
712
712
713 if self.missing:
713 if self.missing:
714 self.rej.append(h)
714 self.rej.append(h)
715 return -1
715 return -1
716
716
717 if self.exists and self.create:
717 if self.exists and self.create:
718 if self.copysource:
718 if self.copysource:
719 self.ui.warn(_("cannot create %s: destination already "
719 self.ui.warn(_("cannot create %s: destination already "
720 "exists\n" % self.fname))
720 "exists\n" % self.fname))
721 else:
721 else:
722 self.ui.warn(_("file %s already exists\n") % self.fname)
722 self.ui.warn(_("file %s already exists\n") % self.fname)
723 self.rej.append(h)
723 self.rej.append(h)
724 return -1
724 return -1
725
725
726 if isinstance(h, binhunk):
726 if isinstance(h, binhunk):
727 if self.remove:
727 if self.remove:
728 self.backend.unlink(self.fname)
728 self.backend.unlink(self.fname)
729 else:
729 else:
730 self.lines[:] = h.new()
730 self.lines[:] = h.new()
731 self.offset += len(h.new())
731 self.offset += len(h.new())
732 self.dirty = True
732 self.dirty = True
733 return 0
733 return 0
734
734
735 horig = h
735 horig = h
736 if (self.eolmode in ('crlf', 'lf')
736 if (self.eolmode in ('crlf', 'lf')
737 or self.eolmode == 'auto' and self.eol):
737 or self.eolmode == 'auto' and self.eol):
738 # If new eols are going to be normalized, then normalize
738 # If new eols are going to be normalized, then normalize
739 # hunk data before patching. Otherwise, preserve input
739 # hunk data before patching. Otherwise, preserve input
740 # line-endings.
740 # line-endings.
741 h = h.getnormalized()
741 h = h.getnormalized()
742
742
743 # fast case first, no offsets, no fuzz
743 # fast case first, no offsets, no fuzz
744 old, oldstart, new, newstart = h.fuzzit(0, False)
744 old, oldstart, new, newstart = h.fuzzit(0, False)
745 oldstart += self.offset
745 oldstart += self.offset
746 orig_start = oldstart
746 orig_start = oldstart
747 # if there's skew we want to emit the "(offset %d lines)" even
747 # if there's skew we want to emit the "(offset %d lines)" even
748 # when the hunk cleanly applies at start + skew, so skip the
748 # when the hunk cleanly applies at start + skew, so skip the
749 # fast case code
749 # fast case code
750 if (self.skew == 0 and
750 if (self.skew == 0 and
751 diffhelpers.testhunk(old, self.lines, oldstart) == 0):
751 diffhelpers.testhunk(old, self.lines, oldstart) == 0):
752 if self.remove:
752 if self.remove:
753 self.backend.unlink(self.fname)
753 self.backend.unlink(self.fname)
754 else:
754 else:
755 self.lines[oldstart:oldstart + len(old)] = new
755 self.lines[oldstart:oldstart + len(old)] = new
756 self.offset += len(new) - len(old)
756 self.offset += len(new) - len(old)
757 self.dirty = True
757 self.dirty = True
758 return 0
758 return 0
759
759
760 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
760 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
761 self.hash = {}
761 self.hash = {}
762 for x, s in enumerate(self.lines):
762 for x, s in enumerate(self.lines):
763 self.hash.setdefault(s, []).append(x)
763 self.hash.setdefault(s, []).append(x)
764
764
765 for fuzzlen in xrange(3):
765 for fuzzlen in xrange(3):
766 for toponly in [True, False]:
766 for toponly in [True, False]:
767 old, oldstart, new, newstart = h.fuzzit(fuzzlen, toponly)
767 old, oldstart, new, newstart = h.fuzzit(fuzzlen, toponly)
768 oldstart = oldstart + self.offset + self.skew
768 oldstart = oldstart + self.offset + self.skew
769 oldstart = min(oldstart, len(self.lines))
769 oldstart = min(oldstart, len(self.lines))
770 if old:
770 if old:
771 cand = self.findlines(old[0][1:], oldstart)
771 cand = self.findlines(old[0][1:], oldstart)
772 else:
772 else:
773 # Only adding lines with no or fuzzed context, just
773 # Only adding lines with no or fuzzed context, just
774 # take the skew in account
774 # take the skew in account
775 cand = [oldstart]
775 cand = [oldstart]
776
776
777 for l in cand:
777 for l in cand:
778 if not old or diffhelpers.testhunk(old, self.lines, l) == 0:
778 if not old or diffhelpers.testhunk(old, self.lines, l) == 0:
779 self.lines[l : l + len(old)] = new
779 self.lines[l : l + len(old)] = new
780 self.offset += len(new) - len(old)
780 self.offset += len(new) - len(old)
781 self.skew = l - orig_start
781 self.skew = l - orig_start
782 self.dirty = True
782 self.dirty = True
783 offset = l - orig_start - fuzzlen
783 offset = l - orig_start - fuzzlen
784 if fuzzlen:
784 if fuzzlen:
785 msg = _("Hunk #%d succeeded at %d "
785 msg = _("Hunk #%d succeeded at %d "
786 "with fuzz %d "
786 "with fuzz %d "
787 "(offset %d lines).\n")
787 "(offset %d lines).\n")
788 self.printfile(True)
788 self.printfile(True)
789 self.ui.warn(msg %
789 self.ui.warn(msg %
790 (h.number, l + 1, fuzzlen, offset))
790 (h.number, l + 1, fuzzlen, offset))
791 else:
791 else:
792 msg = _("Hunk #%d succeeded at %d "
792 msg = _("Hunk #%d succeeded at %d "
793 "(offset %d lines).\n")
793 "(offset %d lines).\n")
794 self.ui.note(msg % (h.number, l + 1, offset))
794 self.ui.note(msg % (h.number, l + 1, offset))
795 return fuzzlen
795 return fuzzlen
796 self.printfile(True)
796 self.printfile(True)
797 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
797 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
798 self.rej.append(horig)
798 self.rej.append(horig)
799 return -1
799 return -1
800
800
801 def close(self):
801 def close(self):
802 if self.dirty:
802 if self.dirty:
803 self.writelines(self.fname, self.lines, self.mode)
803 self.writelines(self.fname, self.lines, self.mode)
804 self.write_rej()
804 self.write_rej()
805 return len(self.rej)
805 return len(self.rej)
806
806
807 class hunk(object):
807 class hunk(object):
808 def __init__(self, desc, num, lr, context):
808 def __init__(self, desc, num, lr, context):
809 self.number = num
809 self.number = num
810 self.desc = desc
810 self.desc = desc
811 self.hunk = [desc]
811 self.hunk = [desc]
812 self.a = []
812 self.a = []
813 self.b = []
813 self.b = []
814 self.starta = self.lena = None
814 self.starta = self.lena = None
815 self.startb = self.lenb = None
815 self.startb = self.lenb = None
816 if lr is not None:
816 if lr is not None:
817 if context:
817 if context:
818 self.read_context_hunk(lr)
818 self.read_context_hunk(lr)
819 else:
819 else:
820 self.read_unified_hunk(lr)
820 self.read_unified_hunk(lr)
821
821
822 def getnormalized(self):
822 def getnormalized(self):
823 """Return a copy with line endings normalized to LF."""
823 """Return a copy with line endings normalized to LF."""
824
824
825 def normalize(lines):
825 def normalize(lines):
826 nlines = []
826 nlines = []
827 for line in lines:
827 for line in lines:
828 if line.endswith('\r\n'):
828 if line.endswith('\r\n'):
829 line = line[:-2] + '\n'
829 line = line[:-2] + '\n'
830 nlines.append(line)
830 nlines.append(line)
831 return nlines
831 return nlines
832
832
833 # Dummy object, it is rebuilt manually
833 # Dummy object, it is rebuilt manually
834 nh = hunk(self.desc, self.number, None, None)
834 nh = hunk(self.desc, self.number, None, None)
835 nh.number = self.number
835 nh.number = self.number
836 nh.desc = self.desc
836 nh.desc = self.desc
837 nh.hunk = self.hunk
837 nh.hunk = self.hunk
838 nh.a = normalize(self.a)
838 nh.a = normalize(self.a)
839 nh.b = normalize(self.b)
839 nh.b = normalize(self.b)
840 nh.starta = self.starta
840 nh.starta = self.starta
841 nh.startb = self.startb
841 nh.startb = self.startb
842 nh.lena = self.lena
842 nh.lena = self.lena
843 nh.lenb = self.lenb
843 nh.lenb = self.lenb
844 return nh
844 return nh
845
845
846 def read_unified_hunk(self, lr):
846 def read_unified_hunk(self, lr):
847 m = unidesc.match(self.desc)
847 m = unidesc.match(self.desc)
848 if not m:
848 if not m:
849 raise PatchError(_("bad hunk #%d") % self.number)
849 raise PatchError(_("bad hunk #%d") % self.number)
850 self.starta, self.lena, self.startb, self.lenb = m.groups()
850 self.starta, self.lena, self.startb, self.lenb = m.groups()
851 if self.lena is None:
851 if self.lena is None:
852 self.lena = 1
852 self.lena = 1
853 else:
853 else:
854 self.lena = int(self.lena)
854 self.lena = int(self.lena)
855 if self.lenb is None:
855 if self.lenb is None:
856 self.lenb = 1
856 self.lenb = 1
857 else:
857 else:
858 self.lenb = int(self.lenb)
858 self.lenb = int(self.lenb)
859 self.starta = int(self.starta)
859 self.starta = int(self.starta)
860 self.startb = int(self.startb)
860 self.startb = int(self.startb)
861 diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a,
861 diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a,
862 self.b)
862 self.b)
863 # if we hit eof before finishing out the hunk, the last line will
863 # if we hit eof before finishing out the hunk, the last line will
864 # be zero length. Lets try to fix it up.
864 # be zero length. Lets try to fix it up.
865 while len(self.hunk[-1]) == 0:
865 while len(self.hunk[-1]) == 0:
866 del self.hunk[-1]
866 del self.hunk[-1]
867 del self.a[-1]
867 del self.a[-1]
868 del self.b[-1]
868 del self.b[-1]
869 self.lena -= 1
869 self.lena -= 1
870 self.lenb -= 1
870 self.lenb -= 1
871 self._fixnewline(lr)
871 self._fixnewline(lr)
872
872
873 def read_context_hunk(self, lr):
873 def read_context_hunk(self, lr):
874 self.desc = lr.readline()
874 self.desc = lr.readline()
875 m = contextdesc.match(self.desc)
875 m = contextdesc.match(self.desc)
876 if not m:
876 if not m:
877 raise PatchError(_("bad hunk #%d") % self.number)
877 raise PatchError(_("bad hunk #%d") % self.number)
878 self.starta, aend = m.groups()
878 self.starta, aend = m.groups()
879 self.starta = int(self.starta)
879 self.starta = int(self.starta)
880 if aend is None:
880 if aend is None:
881 aend = self.starta
881 aend = self.starta
882 self.lena = int(aend) - self.starta
882 self.lena = int(aend) - self.starta
883 if self.starta:
883 if self.starta:
884 self.lena += 1
884 self.lena += 1
885 for x in xrange(self.lena):
885 for x in xrange(self.lena):
886 l = lr.readline()
886 l = lr.readline()
887 if l.startswith('---'):
887 if l.startswith('---'):
888 # lines addition, old block is empty
888 # lines addition, old block is empty
889 lr.push(l)
889 lr.push(l)
890 break
890 break
891 s = l[2:]
891 s = l[2:]
892 if l.startswith('- ') or l.startswith('! '):
892 if l.startswith('- ') or l.startswith('! '):
893 u = '-' + s
893 u = '-' + s
894 elif l.startswith(' '):
894 elif l.startswith(' '):
895 u = ' ' + s
895 u = ' ' + s
896 else:
896 else:
897 raise PatchError(_("bad hunk #%d old text line %d") %
897 raise PatchError(_("bad hunk #%d old text line %d") %
898 (self.number, x))
898 (self.number, x))
899 self.a.append(u)
899 self.a.append(u)
900 self.hunk.append(u)
900 self.hunk.append(u)
901
901
902 l = lr.readline()
902 l = lr.readline()
903 if l.startswith('\ '):
903 if l.startswith('\ '):
904 s = self.a[-1][:-1]
904 s = self.a[-1][:-1]
905 self.a[-1] = s
905 self.a[-1] = s
906 self.hunk[-1] = s
906 self.hunk[-1] = s
907 l = lr.readline()
907 l = lr.readline()
908 m = contextdesc.match(l)
908 m = contextdesc.match(l)
909 if not m:
909 if not m:
910 raise PatchError(_("bad hunk #%d") % self.number)
910 raise PatchError(_("bad hunk #%d") % self.number)
911 self.startb, bend = m.groups()
911 self.startb, bend = m.groups()
912 self.startb = int(self.startb)
912 self.startb = int(self.startb)
913 if bend is None:
913 if bend is None:
914 bend = self.startb
914 bend = self.startb
915 self.lenb = int(bend) - self.startb
915 self.lenb = int(bend) - self.startb
916 if self.startb:
916 if self.startb:
917 self.lenb += 1
917 self.lenb += 1
918 hunki = 1
918 hunki = 1
919 for x in xrange(self.lenb):
919 for x in xrange(self.lenb):
920 l = lr.readline()
920 l = lr.readline()
921 if l.startswith('\ '):
921 if l.startswith('\ '):
922 # XXX: the only way to hit this is with an invalid line range.
922 # XXX: the only way to hit this is with an invalid line range.
923 # The no-eol marker is not counted in the line range, but I
923 # The no-eol marker is not counted in the line range, but I
924 # guess there are diff(1) out there which behave differently.
924 # guess there are diff(1) out there which behave differently.
925 s = self.b[-1][:-1]
925 s = self.b[-1][:-1]
926 self.b[-1] = s
926 self.b[-1] = s
927 self.hunk[hunki - 1] = s
927 self.hunk[hunki - 1] = s
928 continue
928 continue
929 if not l:
929 if not l:
930 # line deletions, new block is empty and we hit EOF
930 # line deletions, new block is empty and we hit EOF
931 lr.push(l)
931 lr.push(l)
932 break
932 break
933 s = l[2:]
933 s = l[2:]
934 if l.startswith('+ ') or l.startswith('! '):
934 if l.startswith('+ ') or l.startswith('! '):
935 u = '+' + s
935 u = '+' + s
936 elif l.startswith(' '):
936 elif l.startswith(' '):
937 u = ' ' + s
937 u = ' ' + s
938 elif len(self.b) == 0:
938 elif len(self.b) == 0:
939 # line deletions, new block is empty
939 # line deletions, new block is empty
940 lr.push(l)
940 lr.push(l)
941 break
941 break
942 else:
942 else:
943 raise PatchError(_("bad hunk #%d old text line %d") %
943 raise PatchError(_("bad hunk #%d old text line %d") %
944 (self.number, x))
944 (self.number, x))
945 self.b.append(s)
945 self.b.append(s)
946 while True:
946 while True:
947 if hunki >= len(self.hunk):
947 if hunki >= len(self.hunk):
948 h = ""
948 h = ""
949 else:
949 else:
950 h = self.hunk[hunki]
950 h = self.hunk[hunki]
951 hunki += 1
951 hunki += 1
952 if h == u:
952 if h == u:
953 break
953 break
954 elif h.startswith('-'):
954 elif h.startswith('-'):
955 continue
955 continue
956 else:
956 else:
957 self.hunk.insert(hunki - 1, u)
957 self.hunk.insert(hunki - 1, u)
958 break
958 break
959
959
960 if not self.a:
960 if not self.a:
961 # this happens when lines were only added to the hunk
961 # this happens when lines were only added to the hunk
962 for x in self.hunk:
962 for x in self.hunk:
963 if x.startswith('-') or x.startswith(' '):
963 if x.startswith('-') or x.startswith(' '):
964 self.a.append(x)
964 self.a.append(x)
965 if not self.b:
965 if not self.b:
966 # this happens when lines were only deleted from the hunk
966 # this happens when lines were only deleted from the hunk
967 for x in self.hunk:
967 for x in self.hunk:
968 if x.startswith('+') or x.startswith(' '):
968 if x.startswith('+') or x.startswith(' '):
969 self.b.append(x[1:])
969 self.b.append(x[1:])
970 # @@ -start,len +start,len @@
970 # @@ -start,len +start,len @@
971 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
971 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
972 self.startb, self.lenb)
972 self.startb, self.lenb)
973 self.hunk[0] = self.desc
973 self.hunk[0] = self.desc
974 self._fixnewline(lr)
974 self._fixnewline(lr)
975
975
976 def _fixnewline(self, lr):
976 def _fixnewline(self, lr):
977 l = lr.readline()
977 l = lr.readline()
978 if l.startswith('\ '):
978 if l.startswith('\ '):
979 diffhelpers.fix_newline(self.hunk, self.a, self.b)
979 diffhelpers.fix_newline(self.hunk, self.a, self.b)
980 else:
980 else:
981 lr.push(l)
981 lr.push(l)
982
982
983 def complete(self):
983 def complete(self):
984 return len(self.a) == self.lena and len(self.b) == self.lenb
984 return len(self.a) == self.lena and len(self.b) == self.lenb
985
985
986 def _fuzzit(self, old, new, fuzz, toponly):
986 def _fuzzit(self, old, new, fuzz, toponly):
987 # this removes context lines from the top and bottom of list 'l'. It
987 # this removes context lines from the top and bottom of list 'l'. It
988 # checks the hunk to make sure only context lines are removed, and then
988 # checks the hunk to make sure only context lines are removed, and then
989 # returns a new shortened list of lines.
989 # returns a new shortened list of lines.
990 fuzz = min(fuzz, len(old))
990 fuzz = min(fuzz, len(old))
991 if fuzz:
991 if fuzz:
992 top = 0
992 top = 0
993 bot = 0
993 bot = 0
994 hlen = len(self.hunk)
994 hlen = len(self.hunk)
995 for x in xrange(hlen - 1):
995 for x in xrange(hlen - 1):
996 # the hunk starts with the @@ line, so use x+1
996 # the hunk starts with the @@ line, so use x+1
997 if self.hunk[x + 1][0] == ' ':
997 if self.hunk[x + 1][0] == ' ':
998 top += 1
998 top += 1
999 else:
999 else:
1000 break
1000 break
1001 if not toponly:
1001 if not toponly:
1002 for x in xrange(hlen - 1):
1002 for x in xrange(hlen - 1):
1003 if self.hunk[hlen - bot - 1][0] == ' ':
1003 if self.hunk[hlen - bot - 1][0] == ' ':
1004 bot += 1
1004 bot += 1
1005 else:
1005 else:
1006 break
1006 break
1007
1007
1008 bot = min(fuzz, bot)
1008 bot = min(fuzz, bot)
1009 top = min(fuzz, top)
1009 top = min(fuzz, top)
1010 return old[top:len(old)-bot], new[top:len(new)-bot], top
1010 return old[top:len(old) - bot], new[top:len(new) - bot], top
1011 return old, new, 0
1011 return old, new, 0
1012
1012
1013 def fuzzit(self, fuzz, toponly):
1013 def fuzzit(self, fuzz, toponly):
1014 old, new, top = self._fuzzit(self.a, self.b, fuzz, toponly)
1014 old, new, top = self._fuzzit(self.a, self.b, fuzz, toponly)
1015 oldstart = self.starta + top
1015 oldstart = self.starta + top
1016 newstart = self.startb + top
1016 newstart = self.startb + top
1017 # zero length hunk ranges already have their start decremented
1017 # zero length hunk ranges already have their start decremented
1018 if self.lena and oldstart > 0:
1018 if self.lena and oldstart > 0:
1019 oldstart -= 1
1019 oldstart -= 1
1020 if self.lenb and newstart > 0:
1020 if self.lenb and newstart > 0:
1021 newstart -= 1
1021 newstart -= 1
1022 return old, oldstart, new, newstart
1022 return old, oldstart, new, newstart
1023
1023
1024 class binhunk(object):
1024 class binhunk(object):
1025 'A binary patch file. Only understands literals so far.'
1025 'A binary patch file. Only understands literals so far.'
1026 def __init__(self, lr, fname):
1026 def __init__(self, lr, fname):
1027 self.text = None
1027 self.text = None
1028 self.hunk = ['GIT binary patch\n']
1028 self.hunk = ['GIT binary patch\n']
1029 self._fname = fname
1029 self._fname = fname
1030 self._read(lr)
1030 self._read(lr)
1031
1031
1032 def complete(self):
1032 def complete(self):
1033 return self.text is not None
1033 return self.text is not None
1034
1034
1035 def new(self):
1035 def new(self):
1036 return [self.text]
1036 return [self.text]
1037
1037
1038 def _read(self, lr):
1038 def _read(self, lr):
1039 def getline(lr, hunk):
1039 def getline(lr, hunk):
1040 l = lr.readline()
1040 l = lr.readline()
1041 hunk.append(l)
1041 hunk.append(l)
1042 return l.rstrip('\r\n')
1042 return l.rstrip('\r\n')
1043
1043
1044 while True:
1044 while True:
1045 line = getline(lr, self.hunk)
1045 line = getline(lr, self.hunk)
1046 if not line:
1046 if not line:
1047 raise PatchError(_('could not extract "%s" binary data')
1047 raise PatchError(_('could not extract "%s" binary data')
1048 % self._fname)
1048 % self._fname)
1049 if line.startswith('literal '):
1049 if line.startswith('literal '):
1050 break
1050 break
1051 size = int(line[8:].rstrip())
1051 size = int(line[8:].rstrip())
1052 dec = []
1052 dec = []
1053 line = getline(lr, self.hunk)
1053 line = getline(lr, self.hunk)
1054 while len(line) > 1:
1054 while len(line) > 1:
1055 l = line[0]
1055 l = line[0]
1056 if l <= 'Z' and l >= 'A':
1056 if l <= 'Z' and l >= 'A':
1057 l = ord(l) - ord('A') + 1
1057 l = ord(l) - ord('A') + 1
1058 else:
1058 else:
1059 l = ord(l) - ord('a') + 27
1059 l = ord(l) - ord('a') + 27
1060 try:
1060 try:
1061 dec.append(base85.b85decode(line[1:])[:l])
1061 dec.append(base85.b85decode(line[1:])[:l])
1062 except ValueError, e:
1062 except ValueError, e:
1063 raise PatchError(_('could not decode "%s" binary patch: %s')
1063 raise PatchError(_('could not decode "%s" binary patch: %s')
1064 % (self._fname, str(e)))
1064 % (self._fname, str(e)))
1065 line = getline(lr, self.hunk)
1065 line = getline(lr, self.hunk)
1066 text = zlib.decompress(''.join(dec))
1066 text = zlib.decompress(''.join(dec))
1067 if len(text) != size:
1067 if len(text) != size:
1068 raise PatchError(_('"%s" length is %d bytes, should be %d')
1068 raise PatchError(_('"%s" length is %d bytes, should be %d')
1069 % (self._fname, len(text), size))
1069 % (self._fname, len(text), size))
1070 self.text = text
1070 self.text = text
1071
1071
1072 def parsefilename(str):
1072 def parsefilename(str):
1073 # --- filename \t|space stuff
1073 # --- filename \t|space stuff
1074 s = str[4:].rstrip('\r\n')
1074 s = str[4:].rstrip('\r\n')
1075 i = s.find('\t')
1075 i = s.find('\t')
1076 if i < 0:
1076 if i < 0:
1077 i = s.find(' ')
1077 i = s.find(' ')
1078 if i < 0:
1078 if i < 0:
1079 return s
1079 return s
1080 return s[:i]
1080 return s[:i]
1081
1081
1082 def pathstrip(path, strip):
1082 def pathstrip(path, strip):
1083 pathlen = len(path)
1083 pathlen = len(path)
1084 i = 0
1084 i = 0
1085 if strip == 0:
1085 if strip == 0:
1086 return '', path.rstrip()
1086 return '', path.rstrip()
1087 count = strip
1087 count = strip
1088 while count > 0:
1088 while count > 0:
1089 i = path.find('/', i)
1089 i = path.find('/', i)
1090 if i == -1:
1090 if i == -1:
1091 raise PatchError(_("unable to strip away %d of %d dirs from %s") %
1091 raise PatchError(_("unable to strip away %d of %d dirs from %s") %
1092 (count, strip, path))
1092 (count, strip, path))
1093 i += 1
1093 i += 1
1094 # consume '//' in the path
1094 # consume '//' in the path
1095 while i < pathlen - 1 and path[i] == '/':
1095 while i < pathlen - 1 and path[i] == '/':
1096 i += 1
1096 i += 1
1097 count -= 1
1097 count -= 1
1098 return path[:i].lstrip(), path[i:].rstrip()
1098 return path[:i].lstrip(), path[i:].rstrip()
1099
1099
1100 def makepatchmeta(backend, afile_orig, bfile_orig, hunk, strip):
1100 def makepatchmeta(backend, afile_orig, bfile_orig, hunk, strip):
1101 nulla = afile_orig == "/dev/null"
1101 nulla = afile_orig == "/dev/null"
1102 nullb = bfile_orig == "/dev/null"
1102 nullb = bfile_orig == "/dev/null"
1103 create = nulla and hunk.starta == 0 and hunk.lena == 0
1103 create = nulla and hunk.starta == 0 and hunk.lena == 0
1104 remove = nullb and hunk.startb == 0 and hunk.lenb == 0
1104 remove = nullb and hunk.startb == 0 and hunk.lenb == 0
1105 abase, afile = pathstrip(afile_orig, strip)
1105 abase, afile = pathstrip(afile_orig, strip)
1106 gooda = not nulla and backend.exists(afile)
1106 gooda = not nulla and backend.exists(afile)
1107 bbase, bfile = pathstrip(bfile_orig, strip)
1107 bbase, bfile = pathstrip(bfile_orig, strip)
1108 if afile == bfile:
1108 if afile == bfile:
1109 goodb = gooda
1109 goodb = gooda
1110 else:
1110 else:
1111 goodb = not nullb and backend.exists(bfile)
1111 goodb = not nullb and backend.exists(bfile)
1112 missing = not goodb and not gooda and not create
1112 missing = not goodb and not gooda and not create
1113
1113
1114 # some diff programs apparently produce patches where the afile is
1114 # some diff programs apparently produce patches where the afile is
1115 # not /dev/null, but afile starts with bfile
1115 # not /dev/null, but afile starts with bfile
1116 abasedir = afile[:afile.rfind('/') + 1]
1116 abasedir = afile[:afile.rfind('/') + 1]
1117 bbasedir = bfile[:bfile.rfind('/') + 1]
1117 bbasedir = bfile[:bfile.rfind('/') + 1]
1118 if (missing and abasedir == bbasedir and afile.startswith(bfile)
1118 if (missing and abasedir == bbasedir and afile.startswith(bfile)
1119 and hunk.starta == 0 and hunk.lena == 0):
1119 and hunk.starta == 0 and hunk.lena == 0):
1120 create = True
1120 create = True
1121 missing = False
1121 missing = False
1122
1122
1123 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
1123 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
1124 # diff is between a file and its backup. In this case, the original
1124 # diff is between a file and its backup. In this case, the original
1125 # file should be patched (see original mpatch code).
1125 # file should be patched (see original mpatch code).
1126 isbackup = (abase == bbase and bfile.startswith(afile))
1126 isbackup = (abase == bbase and bfile.startswith(afile))
1127 fname = None
1127 fname = None
1128 if not missing:
1128 if not missing:
1129 if gooda and goodb:
1129 if gooda and goodb:
1130 fname = isbackup and afile or bfile
1130 fname = isbackup and afile or bfile
1131 elif gooda:
1131 elif gooda:
1132 fname = afile
1132 fname = afile
1133
1133
1134 if not fname:
1134 if not fname:
1135 if not nullb:
1135 if not nullb:
1136 fname = isbackup and afile or bfile
1136 fname = isbackup and afile or bfile
1137 elif not nulla:
1137 elif not nulla:
1138 fname = afile
1138 fname = afile
1139 else:
1139 else:
1140 raise PatchError(_("undefined source and destination files"))
1140 raise PatchError(_("undefined source and destination files"))
1141
1141
1142 gp = patchmeta(fname)
1142 gp = patchmeta(fname)
1143 if create:
1143 if create:
1144 gp.op = 'ADD'
1144 gp.op = 'ADD'
1145 elif remove:
1145 elif remove:
1146 gp.op = 'DELETE'
1146 gp.op = 'DELETE'
1147 return gp
1147 return gp
1148
1148
1149 def scangitpatch(lr, firstline):
1149 def scangitpatch(lr, firstline):
1150 """
1150 """
1151 Git patches can emit:
1151 Git patches can emit:
1152 - rename a to b
1152 - rename a to b
1153 - change b
1153 - change b
1154 - copy a to c
1154 - copy a to c
1155 - change c
1155 - change c
1156
1156
1157 We cannot apply this sequence as-is, the renamed 'a' could not be
1157 We cannot apply this sequence as-is, the renamed 'a' could not be
1158 found for it would have been renamed already. And we cannot copy
1158 found for it would have been renamed already. And we cannot copy
1159 from 'b' instead because 'b' would have been changed already. So
1159 from 'b' instead because 'b' would have been changed already. So
1160 we scan the git patch for copy and rename commands so we can
1160 we scan the git patch for copy and rename commands so we can
1161 perform the copies ahead of time.
1161 perform the copies ahead of time.
1162 """
1162 """
1163 pos = 0
1163 pos = 0
1164 try:
1164 try:
1165 pos = lr.fp.tell()
1165 pos = lr.fp.tell()
1166 fp = lr.fp
1166 fp = lr.fp
1167 except IOError:
1167 except IOError:
1168 fp = cStringIO.StringIO(lr.fp.read())
1168 fp = cStringIO.StringIO(lr.fp.read())
1169 gitlr = linereader(fp)
1169 gitlr = linereader(fp)
1170 gitlr.push(firstline)
1170 gitlr.push(firstline)
1171 gitpatches = readgitpatch(gitlr)
1171 gitpatches = readgitpatch(gitlr)
1172 fp.seek(pos)
1172 fp.seek(pos)
1173 return gitpatches
1173 return gitpatches
1174
1174
1175 def iterhunks(fp):
1175 def iterhunks(fp):
1176 """Read a patch and yield the following events:
1176 """Read a patch and yield the following events:
1177 - ("file", afile, bfile, firsthunk): select a new target file.
1177 - ("file", afile, bfile, firsthunk): select a new target file.
1178 - ("hunk", hunk): a new hunk is ready to be applied, follows a
1178 - ("hunk", hunk): a new hunk is ready to be applied, follows a
1179 "file" event.
1179 "file" event.
1180 - ("git", gitchanges): current diff is in git format, gitchanges
1180 - ("git", gitchanges): current diff is in git format, gitchanges
1181 maps filenames to gitpatch records. Unique event.
1181 maps filenames to gitpatch records. Unique event.
1182 """
1182 """
1183 afile = ""
1183 afile = ""
1184 bfile = ""
1184 bfile = ""
1185 state = None
1185 state = None
1186 hunknum = 0
1186 hunknum = 0
1187 emitfile = newfile = False
1187 emitfile = newfile = False
1188 gitpatches = None
1188 gitpatches = None
1189
1189
1190 # our states
1190 # our states
1191 BFILE = 1
1191 BFILE = 1
1192 context = None
1192 context = None
1193 lr = linereader(fp)
1193 lr = linereader(fp)
1194
1194
1195 while True:
1195 while True:
1196 x = lr.readline()
1196 x = lr.readline()
1197 if not x:
1197 if not x:
1198 break
1198 break
1199 if state == BFILE and (
1199 if state == BFILE and (
1200 (not context and x[0] == '@')
1200 (not context and x[0] == '@')
1201 or (context is not False and x.startswith('***************'))
1201 or (context is not False and x.startswith('***************'))
1202 or x.startswith('GIT binary patch')):
1202 or x.startswith('GIT binary patch')):
1203 gp = None
1203 gp = None
1204 if (gitpatches and
1204 if (gitpatches and
1205 gitpatches[-1].ispatching(afile, bfile)):
1205 gitpatches[-1].ispatching(afile, bfile)):
1206 gp = gitpatches.pop()
1206 gp = gitpatches.pop()
1207 if x.startswith('GIT binary patch'):
1207 if x.startswith('GIT binary patch'):
1208 h = binhunk(lr, gp.path)
1208 h = binhunk(lr, gp.path)
1209 else:
1209 else:
1210 if context is None and x.startswith('***************'):
1210 if context is None and x.startswith('***************'):
1211 context = True
1211 context = True
1212 h = hunk(x, hunknum + 1, lr, context)
1212 h = hunk(x, hunknum + 1, lr, context)
1213 hunknum += 1
1213 hunknum += 1
1214 if emitfile:
1214 if emitfile:
1215 emitfile = False
1215 emitfile = False
1216 yield 'file', (afile, bfile, h, gp and gp.copy() or None)
1216 yield 'file', (afile, bfile, h, gp and gp.copy() or None)
1217 yield 'hunk', h
1217 yield 'hunk', h
1218 elif x.startswith('diff --git'):
1218 elif x.startswith('diff --git'):
1219 m = gitre.match(x.rstrip(' \r\n'))
1219 m = gitre.match(x.rstrip(' \r\n'))
1220 if not m:
1220 if not m:
1221 continue
1221 continue
1222 if gitpatches is None:
1222 if gitpatches is None:
1223 # scan whole input for git metadata
1223 # scan whole input for git metadata
1224 gitpatches = scangitpatch(lr, x)
1224 gitpatches = scangitpatch(lr, x)
1225 yield 'git', [g.copy() for g in gitpatches
1225 yield 'git', [g.copy() for g in gitpatches
1226 if g.op in ('COPY', 'RENAME')]
1226 if g.op in ('COPY', 'RENAME')]
1227 gitpatches.reverse()
1227 gitpatches.reverse()
1228 afile = 'a/' + m.group(1)
1228 afile = 'a/' + m.group(1)
1229 bfile = 'b/' + m.group(2)
1229 bfile = 'b/' + m.group(2)
1230 while gitpatches and not gitpatches[-1].ispatching(afile, bfile):
1230 while gitpatches and not gitpatches[-1].ispatching(afile, bfile):
1231 gp = gitpatches.pop()
1231 gp = gitpatches.pop()
1232 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1232 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1233 if not gitpatches:
1233 if not gitpatches:
1234 raise PatchError(_('failed to synchronize metadata for "%s"')
1234 raise PatchError(_('failed to synchronize metadata for "%s"')
1235 % afile[2:])
1235 % afile[2:])
1236 gp = gitpatches[-1]
1236 gp = gitpatches[-1]
1237 newfile = True
1237 newfile = True
1238 elif x.startswith('---'):
1238 elif x.startswith('---'):
1239 # check for a unified diff
1239 # check for a unified diff
1240 l2 = lr.readline()
1240 l2 = lr.readline()
1241 if not l2.startswith('+++'):
1241 if not l2.startswith('+++'):
1242 lr.push(l2)
1242 lr.push(l2)
1243 continue
1243 continue
1244 newfile = True
1244 newfile = True
1245 context = False
1245 context = False
1246 afile = parsefilename(x)
1246 afile = parsefilename(x)
1247 bfile = parsefilename(l2)
1247 bfile = parsefilename(l2)
1248 elif x.startswith('***'):
1248 elif x.startswith('***'):
1249 # check for a context diff
1249 # check for a context diff
1250 l2 = lr.readline()
1250 l2 = lr.readline()
1251 if not l2.startswith('---'):
1251 if not l2.startswith('---'):
1252 lr.push(l2)
1252 lr.push(l2)
1253 continue
1253 continue
1254 l3 = lr.readline()
1254 l3 = lr.readline()
1255 lr.push(l3)
1255 lr.push(l3)
1256 if not l3.startswith("***************"):
1256 if not l3.startswith("***************"):
1257 lr.push(l2)
1257 lr.push(l2)
1258 continue
1258 continue
1259 newfile = True
1259 newfile = True
1260 context = True
1260 context = True
1261 afile = parsefilename(x)
1261 afile = parsefilename(x)
1262 bfile = parsefilename(l2)
1262 bfile = parsefilename(l2)
1263
1263
1264 if newfile:
1264 if newfile:
1265 newfile = False
1265 newfile = False
1266 emitfile = True
1266 emitfile = True
1267 state = BFILE
1267 state = BFILE
1268 hunknum = 0
1268 hunknum = 0
1269
1269
1270 while gitpatches:
1270 while gitpatches:
1271 gp = gitpatches.pop()
1271 gp = gitpatches.pop()
1272 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1272 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1273
1273
1274 def applydiff(ui, fp, backend, store, strip=1, eolmode='strict'):
1274 def applydiff(ui, fp, backend, store, strip=1, eolmode='strict'):
1275 """Reads a patch from fp and tries to apply it.
1275 """Reads a patch from fp and tries to apply it.
1276
1276
1277 Returns 0 for a clean patch, -1 if any rejects were found and 1 if
1277 Returns 0 for a clean patch, -1 if any rejects were found and 1 if
1278 there was any fuzz.
1278 there was any fuzz.
1279
1279
1280 If 'eolmode' is 'strict', the patch content and patched file are
1280 If 'eolmode' is 'strict', the patch content and patched file are
1281 read in binary mode. Otherwise, line endings are ignored when
1281 read in binary mode. Otherwise, line endings are ignored when
1282 patching then normalized according to 'eolmode'.
1282 patching then normalized according to 'eolmode'.
1283 """
1283 """
1284 return _applydiff(ui, fp, patchfile, backend, store, strip=strip,
1284 return _applydiff(ui, fp, patchfile, backend, store, strip=strip,
1285 eolmode=eolmode)
1285 eolmode=eolmode)
1286
1286
1287 def _applydiff(ui, fp, patcher, backend, store, strip=1,
1287 def _applydiff(ui, fp, patcher, backend, store, strip=1,
1288 eolmode='strict'):
1288 eolmode='strict'):
1289
1289
1290 def pstrip(p):
1290 def pstrip(p):
1291 return pathstrip(p, strip - 1)[1]
1291 return pathstrip(p, strip - 1)[1]
1292
1292
1293 rejects = 0
1293 rejects = 0
1294 err = 0
1294 err = 0
1295 current_file = None
1295 current_file = None
1296
1296
1297 for state, values in iterhunks(fp):
1297 for state, values in iterhunks(fp):
1298 if state == 'hunk':
1298 if state == 'hunk':
1299 if not current_file:
1299 if not current_file:
1300 continue
1300 continue
1301 ret = current_file.apply(values)
1301 ret = current_file.apply(values)
1302 if ret > 0:
1302 if ret > 0:
1303 err = 1
1303 err = 1
1304 elif state == 'file':
1304 elif state == 'file':
1305 if current_file:
1305 if current_file:
1306 rejects += current_file.close()
1306 rejects += current_file.close()
1307 current_file = None
1307 current_file = None
1308 afile, bfile, first_hunk, gp = values
1308 afile, bfile, first_hunk, gp = values
1309 if gp:
1309 if gp:
1310 gp.path = pstrip(gp.path)
1310 gp.path = pstrip(gp.path)
1311 if gp.oldpath:
1311 if gp.oldpath:
1312 gp.oldpath = pstrip(gp.oldpath)
1312 gp.oldpath = pstrip(gp.oldpath)
1313 else:
1313 else:
1314 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip)
1314 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip)
1315 if gp.op == 'RENAME':
1315 if gp.op == 'RENAME':
1316 backend.unlink(gp.oldpath)
1316 backend.unlink(gp.oldpath)
1317 if not first_hunk:
1317 if not first_hunk:
1318 if gp.op == 'DELETE':
1318 if gp.op == 'DELETE':
1319 backend.unlink(gp.path)
1319 backend.unlink(gp.path)
1320 continue
1320 continue
1321 data, mode = None, None
1321 data, mode = None, None
1322 if gp.op in ('RENAME', 'COPY'):
1322 if gp.op in ('RENAME', 'COPY'):
1323 data, mode = store.getfile(gp.oldpath)[:2]
1323 data, mode = store.getfile(gp.oldpath)[:2]
1324 if gp.mode:
1324 if gp.mode:
1325 mode = gp.mode
1325 mode = gp.mode
1326 if gp.op == 'ADD':
1326 if gp.op == 'ADD':
1327 # Added files without content have no hunk and
1327 # Added files without content have no hunk and
1328 # must be created
1328 # must be created
1329 data = ''
1329 data = ''
1330 if data or mode:
1330 if data or mode:
1331 if (gp.op in ('ADD', 'RENAME', 'COPY')
1331 if (gp.op in ('ADD', 'RENAME', 'COPY')
1332 and backend.exists(gp.path)):
1332 and backend.exists(gp.path)):
1333 raise PatchError(_("cannot create %s: destination "
1333 raise PatchError(_("cannot create %s: destination "
1334 "already exists") % gp.path)
1334 "already exists") % gp.path)
1335 backend.setfile(gp.path, data, mode, gp.oldpath)
1335 backend.setfile(gp.path, data, mode, gp.oldpath)
1336 continue
1336 continue
1337 try:
1337 try:
1338 current_file = patcher(ui, gp, backend, store,
1338 current_file = patcher(ui, gp, backend, store,
1339 eolmode=eolmode)
1339 eolmode=eolmode)
1340 except PatchError, inst:
1340 except PatchError, inst:
1341 ui.warn(str(inst) + '\n')
1341 ui.warn(str(inst) + '\n')
1342 current_file = None
1342 current_file = None
1343 rejects += 1
1343 rejects += 1
1344 continue
1344 continue
1345 elif state == 'git':
1345 elif state == 'git':
1346 for gp in values:
1346 for gp in values:
1347 path = pstrip(gp.oldpath)
1347 path = pstrip(gp.oldpath)
1348 try:
1348 try:
1349 data, mode = backend.getfile(path)
1349 data, mode = backend.getfile(path)
1350 except IOError, e:
1350 except IOError, e:
1351 if e.errno != errno.ENOENT:
1351 if e.errno != errno.ENOENT:
1352 raise
1352 raise
1353 # The error ignored here will trigger a getfile()
1353 # The error ignored here will trigger a getfile()
1354 # error in a place more appropriate for error
1354 # error in a place more appropriate for error
1355 # handling, and will not interrupt the patching
1355 # handling, and will not interrupt the patching
1356 # process.
1356 # process.
1357 else:
1357 else:
1358 store.setfile(path, data, mode)
1358 store.setfile(path, data, mode)
1359 else:
1359 else:
1360 raise util.Abort(_('unsupported parser state: %s') % state)
1360 raise util.Abort(_('unsupported parser state: %s') % state)
1361
1361
1362 if current_file:
1362 if current_file:
1363 rejects += current_file.close()
1363 rejects += current_file.close()
1364
1364
1365 if rejects:
1365 if rejects:
1366 return -1
1366 return -1
1367 return err
1367 return err
1368
1368
1369 def _externalpatch(ui, repo, patcher, patchname, strip, files,
1369 def _externalpatch(ui, repo, patcher, patchname, strip, files,
1370 similarity):
1370 similarity):
1371 """use <patcher> to apply <patchname> to the working directory.
1371 """use <patcher> to apply <patchname> to the working directory.
1372 returns whether patch was applied with fuzz factor."""
1372 returns whether patch was applied with fuzz factor."""
1373
1373
1374 fuzz = False
1374 fuzz = False
1375 args = []
1375 args = []
1376 cwd = repo.root
1376 cwd = repo.root
1377 if cwd:
1377 if cwd:
1378 args.append('-d %s' % util.shellquote(cwd))
1378 args.append('-d %s' % util.shellquote(cwd))
1379 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
1379 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
1380 util.shellquote(patchname)))
1380 util.shellquote(patchname)))
1381 try:
1381 try:
1382 for line in fp:
1382 for line in fp:
1383 line = line.rstrip()
1383 line = line.rstrip()
1384 ui.note(line + '\n')
1384 ui.note(line + '\n')
1385 if line.startswith('patching file '):
1385 if line.startswith('patching file '):
1386 pf = util.parsepatchoutput(line)
1386 pf = util.parsepatchoutput(line)
1387 printed_file = False
1387 printed_file = False
1388 files.add(pf)
1388 files.add(pf)
1389 elif line.find('with fuzz') >= 0:
1389 elif line.find('with fuzz') >= 0:
1390 fuzz = True
1390 fuzz = True
1391 if not printed_file:
1391 if not printed_file:
1392 ui.warn(pf + '\n')
1392 ui.warn(pf + '\n')
1393 printed_file = True
1393 printed_file = True
1394 ui.warn(line + '\n')
1394 ui.warn(line + '\n')
1395 elif line.find('saving rejects to file') >= 0:
1395 elif line.find('saving rejects to file') >= 0:
1396 ui.warn(line + '\n')
1396 ui.warn(line + '\n')
1397 elif line.find('FAILED') >= 0:
1397 elif line.find('FAILED') >= 0:
1398 if not printed_file:
1398 if not printed_file:
1399 ui.warn(pf + '\n')
1399 ui.warn(pf + '\n')
1400 printed_file = True
1400 printed_file = True
1401 ui.warn(line + '\n')
1401 ui.warn(line + '\n')
1402 finally:
1402 finally:
1403 if files:
1403 if files:
1404 cfiles = list(files)
1404 cfiles = list(files)
1405 cwd = repo.getcwd()
1405 cwd = repo.getcwd()
1406 if cwd:
1406 if cwd:
1407 cfiles = [util.pathto(repo.root, cwd, f)
1407 cfiles = [util.pathto(repo.root, cwd, f)
1408 for f in cfiles]
1408 for f in cfiles]
1409 scmutil.addremove(repo, cfiles, similarity=similarity)
1409 scmutil.addremove(repo, cfiles, similarity=similarity)
1410 code = fp.close()
1410 code = fp.close()
1411 if code:
1411 if code:
1412 raise PatchError(_("patch command failed: %s") %
1412 raise PatchError(_("patch command failed: %s") %
1413 util.explainexit(code)[0])
1413 util.explainexit(code)[0])
1414 return fuzz
1414 return fuzz
1415
1415
1416 def patchbackend(ui, backend, patchobj, strip, files=None, eolmode='strict'):
1416 def patchbackend(ui, backend, patchobj, strip, files=None, eolmode='strict'):
1417 if files is None:
1417 if files is None:
1418 files = set()
1418 files = set()
1419 if eolmode is None:
1419 if eolmode is None:
1420 eolmode = ui.config('patch', 'eol', 'strict')
1420 eolmode = ui.config('patch', 'eol', 'strict')
1421 if eolmode.lower() not in eolmodes:
1421 if eolmode.lower() not in eolmodes:
1422 raise util.Abort(_('unsupported line endings type: %s') % eolmode)
1422 raise util.Abort(_('unsupported line endings type: %s') % eolmode)
1423 eolmode = eolmode.lower()
1423 eolmode = eolmode.lower()
1424
1424
1425 store = filestore()
1425 store = filestore()
1426 try:
1426 try:
1427 fp = open(patchobj, 'rb')
1427 fp = open(patchobj, 'rb')
1428 except TypeError:
1428 except TypeError:
1429 fp = patchobj
1429 fp = patchobj
1430 try:
1430 try:
1431 ret = applydiff(ui, fp, backend, store, strip=strip,
1431 ret = applydiff(ui, fp, backend, store, strip=strip,
1432 eolmode=eolmode)
1432 eolmode=eolmode)
1433 finally:
1433 finally:
1434 if fp != patchobj:
1434 if fp != patchobj:
1435 fp.close()
1435 fp.close()
1436 files.update(backend.close())
1436 files.update(backend.close())
1437 store.close()
1437 store.close()
1438 if ret < 0:
1438 if ret < 0:
1439 raise PatchError(_('patch failed to apply'))
1439 raise PatchError(_('patch failed to apply'))
1440 return ret > 0
1440 return ret > 0
1441
1441
1442 def internalpatch(ui, repo, patchobj, strip, files=None, eolmode='strict',
1442 def internalpatch(ui, repo, patchobj, strip, files=None, eolmode='strict',
1443 similarity=0):
1443 similarity=0):
1444 """use builtin patch to apply <patchobj> to the working directory.
1444 """use builtin patch to apply <patchobj> to the working directory.
1445 returns whether patch was applied with fuzz factor."""
1445 returns whether patch was applied with fuzz factor."""
1446 backend = workingbackend(ui, repo, similarity)
1446 backend = workingbackend(ui, repo, similarity)
1447 return patchbackend(ui, backend, patchobj, strip, files, eolmode)
1447 return patchbackend(ui, backend, patchobj, strip, files, eolmode)
1448
1448
1449 def patchrepo(ui, repo, ctx, store, patchobj, strip, files=None,
1449 def patchrepo(ui, repo, ctx, store, patchobj, strip, files=None,
1450 eolmode='strict'):
1450 eolmode='strict'):
1451 backend = repobackend(ui, repo, ctx, store)
1451 backend = repobackend(ui, repo, ctx, store)
1452 return patchbackend(ui, backend, patchobj, strip, files, eolmode)
1452 return patchbackend(ui, backend, patchobj, strip, files, eolmode)
1453
1453
1454 def makememctx(repo, parents, text, user, date, branch, files, store,
1454 def makememctx(repo, parents, text, user, date, branch, files, store,
1455 editor=None):
1455 editor=None):
1456 def getfilectx(repo, memctx, path):
1456 def getfilectx(repo, memctx, path):
1457 data, (islink, isexec), copied = store.getfile(path)
1457 data, (islink, isexec), copied = store.getfile(path)
1458 return context.memfilectx(path, data, islink=islink, isexec=isexec,
1458 return context.memfilectx(path, data, islink=islink, isexec=isexec,
1459 copied=copied)
1459 copied=copied)
1460 extra = {}
1460 extra = {}
1461 if branch:
1461 if branch:
1462 extra['branch'] = encoding.fromlocal(branch)
1462 extra['branch'] = encoding.fromlocal(branch)
1463 ctx = context.memctx(repo, parents, text, files, getfilectx, user,
1463 ctx = context.memctx(repo, parents, text, files, getfilectx, user,
1464 date, extra)
1464 date, extra)
1465 if editor:
1465 if editor:
1466 ctx._text = editor(repo, ctx, [])
1466 ctx._text = editor(repo, ctx, [])
1467 return ctx
1467 return ctx
1468
1468
1469 def patch(ui, repo, patchname, strip=1, files=None, eolmode='strict',
1469 def patch(ui, repo, patchname, strip=1, files=None, eolmode='strict',
1470 similarity=0):
1470 similarity=0):
1471 """Apply <patchname> to the working directory.
1471 """Apply <patchname> to the working directory.
1472
1472
1473 'eolmode' specifies how end of lines should be handled. It can be:
1473 'eolmode' specifies how end of lines should be handled. It can be:
1474 - 'strict': inputs are read in binary mode, EOLs are preserved
1474 - 'strict': inputs are read in binary mode, EOLs are preserved
1475 - 'crlf': EOLs are ignored when patching and reset to CRLF
1475 - 'crlf': EOLs are ignored when patching and reset to CRLF
1476 - 'lf': EOLs are ignored when patching and reset to LF
1476 - 'lf': EOLs are ignored when patching and reset to LF
1477 - None: get it from user settings, default to 'strict'
1477 - None: get it from user settings, default to 'strict'
1478 'eolmode' is ignored when using an external patcher program.
1478 'eolmode' is ignored when using an external patcher program.
1479
1479
1480 Returns whether patch was applied with fuzz factor.
1480 Returns whether patch was applied with fuzz factor.
1481 """
1481 """
1482 patcher = ui.config('ui', 'patch')
1482 patcher = ui.config('ui', 'patch')
1483 if files is None:
1483 if files is None:
1484 files = set()
1484 files = set()
1485 try:
1485 try:
1486 if patcher:
1486 if patcher:
1487 return _externalpatch(ui, repo, patcher, patchname, strip,
1487 return _externalpatch(ui, repo, patcher, patchname, strip,
1488 files, similarity)
1488 files, similarity)
1489 return internalpatch(ui, repo, patchname, strip, files, eolmode,
1489 return internalpatch(ui, repo, patchname, strip, files, eolmode,
1490 similarity)
1490 similarity)
1491 except PatchError, err:
1491 except PatchError, err:
1492 raise util.Abort(str(err))
1492 raise util.Abort(str(err))
1493
1493
1494 def changedfiles(ui, repo, patchpath, strip=1):
1494 def changedfiles(ui, repo, patchpath, strip=1):
1495 backend = fsbackend(ui, repo.root)
1495 backend = fsbackend(ui, repo.root)
1496 fp = open(patchpath, 'rb')
1496 fp = open(patchpath, 'rb')
1497 try:
1497 try:
1498 changed = set()
1498 changed = set()
1499 for state, values in iterhunks(fp):
1499 for state, values in iterhunks(fp):
1500 if state == 'file':
1500 if state == 'file':
1501 afile, bfile, first_hunk, gp = values
1501 afile, bfile, first_hunk, gp = values
1502 if gp:
1502 if gp:
1503 gp.path = pathstrip(gp.path, strip - 1)[1]
1503 gp.path = pathstrip(gp.path, strip - 1)[1]
1504 if gp.oldpath:
1504 if gp.oldpath:
1505 gp.oldpath = pathstrip(gp.oldpath, strip - 1)[1]
1505 gp.oldpath = pathstrip(gp.oldpath, strip - 1)[1]
1506 else:
1506 else:
1507 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip)
1507 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip)
1508 changed.add(gp.path)
1508 changed.add(gp.path)
1509 if gp.op == 'RENAME':
1509 if gp.op == 'RENAME':
1510 changed.add(gp.oldpath)
1510 changed.add(gp.oldpath)
1511 elif state not in ('hunk', 'git'):
1511 elif state not in ('hunk', 'git'):
1512 raise util.Abort(_('unsupported parser state: %s') % state)
1512 raise util.Abort(_('unsupported parser state: %s') % state)
1513 return changed
1513 return changed
1514 finally:
1514 finally:
1515 fp.close()
1515 fp.close()
1516
1516
1517 class GitDiffRequired(Exception):
1517 class GitDiffRequired(Exception):
1518 pass
1518 pass
1519
1519
1520 def diffopts(ui, opts=None, untrusted=False, section='diff'):
1520 def diffopts(ui, opts=None, untrusted=False, section='diff'):
1521 def get(key, name=None, getter=ui.configbool):
1521 def get(key, name=None, getter=ui.configbool):
1522 return ((opts and opts.get(key)) or
1522 return ((opts and opts.get(key)) or
1523 getter(section, name or key, None, untrusted=untrusted))
1523 getter(section, name or key, None, untrusted=untrusted))
1524 return mdiff.diffopts(
1524 return mdiff.diffopts(
1525 text=opts and opts.get('text'),
1525 text=opts and opts.get('text'),
1526 git=get('git'),
1526 git=get('git'),
1527 nodates=get('nodates'),
1527 nodates=get('nodates'),
1528 showfunc=get('show_function', 'showfunc'),
1528 showfunc=get('show_function', 'showfunc'),
1529 ignorews=get('ignore_all_space', 'ignorews'),
1529 ignorews=get('ignore_all_space', 'ignorews'),
1530 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1530 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1531 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1531 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1532 context=get('unified', getter=ui.config))
1532 context=get('unified', getter=ui.config))
1533
1533
1534 def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None,
1534 def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None,
1535 losedatafn=None, prefix=''):
1535 losedatafn=None, prefix=''):
1536 '''yields diff of changes to files between two nodes, or node and
1536 '''yields diff of changes to files between two nodes, or node and
1537 working directory.
1537 working directory.
1538
1538
1539 if node1 is None, use first dirstate parent instead.
1539 if node1 is None, use first dirstate parent instead.
1540 if node2 is None, compare node1 with working directory.
1540 if node2 is None, compare node1 with working directory.
1541
1541
1542 losedatafn(**kwarg) is a callable run when opts.upgrade=True and
1542 losedatafn(**kwarg) is a callable run when opts.upgrade=True and
1543 every time some change cannot be represented with the current
1543 every time some change cannot be represented with the current
1544 patch format. Return False to upgrade to git patch format, True to
1544 patch format. Return False to upgrade to git patch format, True to
1545 accept the loss or raise an exception to abort the diff. It is
1545 accept the loss or raise an exception to abort the diff. It is
1546 called with the name of current file being diffed as 'fn'. If set
1546 called with the name of current file being diffed as 'fn'. If set
1547 to None, patches will always be upgraded to git format when
1547 to None, patches will always be upgraded to git format when
1548 necessary.
1548 necessary.
1549
1549
1550 prefix is a filename prefix that is prepended to all filenames on
1550 prefix is a filename prefix that is prepended to all filenames on
1551 display (used for subrepos).
1551 display (used for subrepos).
1552 '''
1552 '''
1553
1553
1554 if opts is None:
1554 if opts is None:
1555 opts = mdiff.defaultopts
1555 opts = mdiff.defaultopts
1556
1556
1557 if not node1 and not node2:
1557 if not node1 and not node2:
1558 node1 = repo.dirstate.p1()
1558 node1 = repo.dirstate.p1()
1559
1559
1560 def lrugetfilectx():
1560 def lrugetfilectx():
1561 cache = {}
1561 cache = {}
1562 order = util.deque()
1562 order = util.deque()
1563 def getfilectx(f, ctx):
1563 def getfilectx(f, ctx):
1564 fctx = ctx.filectx(f, filelog=cache.get(f))
1564 fctx = ctx.filectx(f, filelog=cache.get(f))
1565 if f not in cache:
1565 if f not in cache:
1566 if len(cache) > 20:
1566 if len(cache) > 20:
1567 del cache[order.popleft()]
1567 del cache[order.popleft()]
1568 cache[f] = fctx.filelog()
1568 cache[f] = fctx.filelog()
1569 else:
1569 else:
1570 order.remove(f)
1570 order.remove(f)
1571 order.append(f)
1571 order.append(f)
1572 return fctx
1572 return fctx
1573 return getfilectx
1573 return getfilectx
1574 getfilectx = lrugetfilectx()
1574 getfilectx = lrugetfilectx()
1575
1575
1576 ctx1 = repo[node1]
1576 ctx1 = repo[node1]
1577 ctx2 = repo[node2]
1577 ctx2 = repo[node2]
1578
1578
1579 if not changes:
1579 if not changes:
1580 changes = repo.status(ctx1, ctx2, match=match)
1580 changes = repo.status(ctx1, ctx2, match=match)
1581 modified, added, removed = changes[:3]
1581 modified, added, removed = changes[:3]
1582
1582
1583 if not modified and not added and not removed:
1583 if not modified and not added and not removed:
1584 return []
1584 return []
1585
1585
1586 revs = None
1586 revs = None
1587 hexfunc = repo.ui.debugflag and hex or short
1587 hexfunc = repo.ui.debugflag and hex or short
1588 revs = [hexfunc(node) for node in [node1, node2] if node]
1588 revs = [hexfunc(node) for node in [node1, node2] if node]
1589
1589
1590 copy = {}
1590 copy = {}
1591 if opts.git or opts.upgrade:
1591 if opts.git or opts.upgrade:
1592 copy = copies.pathcopies(ctx1, ctx2)
1592 copy = copies.pathcopies(ctx1, ctx2)
1593
1593
1594 def difffn(opts, losedata):
1594 def difffn(opts, losedata):
1595 return trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
1595 return trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
1596 copy, getfilectx, opts, losedata, prefix)
1596 copy, getfilectx, opts, losedata, prefix)
1597 if opts.upgrade and not opts.git:
1597 if opts.upgrade and not opts.git:
1598 try:
1598 try:
1599 def losedata(fn):
1599 def losedata(fn):
1600 if not losedatafn or not losedatafn(fn=fn):
1600 if not losedatafn or not losedatafn(fn=fn):
1601 raise GitDiffRequired
1601 raise GitDiffRequired
1602 # Buffer the whole output until we are sure it can be generated
1602 # Buffer the whole output until we are sure it can be generated
1603 return list(difffn(opts.copy(git=False), losedata))
1603 return list(difffn(opts.copy(git=False), losedata))
1604 except GitDiffRequired:
1604 except GitDiffRequired:
1605 return difffn(opts.copy(git=True), None)
1605 return difffn(opts.copy(git=True), None)
1606 else:
1606 else:
1607 return difffn(opts, None)
1607 return difffn(opts, None)
1608
1608
1609 def difflabel(func, *args, **kw):
1609 def difflabel(func, *args, **kw):
1610 '''yields 2-tuples of (output, label) based on the output of func()'''
1610 '''yields 2-tuples of (output, label) based on the output of func()'''
1611 headprefixes = [('diff', 'diff.diffline'),
1611 headprefixes = [('diff', 'diff.diffline'),
1612 ('copy', 'diff.extended'),
1612 ('copy', 'diff.extended'),
1613 ('rename', 'diff.extended'),
1613 ('rename', 'diff.extended'),
1614 ('old', 'diff.extended'),
1614 ('old', 'diff.extended'),
1615 ('new', 'diff.extended'),
1615 ('new', 'diff.extended'),
1616 ('deleted', 'diff.extended'),
1616 ('deleted', 'diff.extended'),
1617 ('---', 'diff.file_a'),
1617 ('---', 'diff.file_a'),
1618 ('+++', 'diff.file_b')]
1618 ('+++', 'diff.file_b')]
1619 textprefixes = [('@', 'diff.hunk'),
1619 textprefixes = [('@', 'diff.hunk'),
1620 ('-', 'diff.deleted'),
1620 ('-', 'diff.deleted'),
1621 ('+', 'diff.inserted')]
1621 ('+', 'diff.inserted')]
1622 head = False
1622 head = False
1623 for chunk in func(*args, **kw):
1623 for chunk in func(*args, **kw):
1624 lines = chunk.split('\n')
1624 lines = chunk.split('\n')
1625 for i, line in enumerate(lines):
1625 for i, line in enumerate(lines):
1626 if i != 0:
1626 if i != 0:
1627 yield ('\n', '')
1627 yield ('\n', '')
1628 if head:
1628 if head:
1629 if line.startswith('@'):
1629 if line.startswith('@'):
1630 head = False
1630 head = False
1631 else:
1631 else:
1632 if line and line[0] not in ' +-@\\':
1632 if line and line[0] not in ' +-@\\':
1633 head = True
1633 head = True
1634 stripline = line
1634 stripline = line
1635 if not head and line and line[0] in '+-':
1635 if not head and line and line[0] in '+-':
1636 # highlight trailing whitespace, but only in changed lines
1636 # highlight trailing whitespace, but only in changed lines
1637 stripline = line.rstrip()
1637 stripline = line.rstrip()
1638 prefixes = textprefixes
1638 prefixes = textprefixes
1639 if head:
1639 if head:
1640 prefixes = headprefixes
1640 prefixes = headprefixes
1641 for prefix, label in prefixes:
1641 for prefix, label in prefixes:
1642 if stripline.startswith(prefix):
1642 if stripline.startswith(prefix):
1643 yield (stripline, label)
1643 yield (stripline, label)
1644 break
1644 break
1645 else:
1645 else:
1646 yield (line, '')
1646 yield (line, '')
1647 if line != stripline:
1647 if line != stripline:
1648 yield (line[len(stripline):], 'diff.trailingwhitespace')
1648 yield (line[len(stripline):], 'diff.trailingwhitespace')
1649
1649
1650 def diffui(*args, **kw):
1650 def diffui(*args, **kw):
1651 '''like diff(), but yields 2-tuples of (output, label) for ui.write()'''
1651 '''like diff(), but yields 2-tuples of (output, label) for ui.write()'''
1652 return difflabel(diff, *args, **kw)
1652 return difflabel(diff, *args, **kw)
1653
1653
1654 def trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
1654 def trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
1655 copy, getfilectx, opts, losedatafn, prefix):
1655 copy, getfilectx, opts, losedatafn, prefix):
1656
1656
1657 def join(f):
1657 def join(f):
1658 return posixpath.join(prefix, f)
1658 return posixpath.join(prefix, f)
1659
1659
1660 def addmodehdr(header, omode, nmode):
1660 def addmodehdr(header, omode, nmode):
1661 if omode != nmode:
1661 if omode != nmode:
1662 header.append('old mode %s\n' % omode)
1662 header.append('old mode %s\n' % omode)
1663 header.append('new mode %s\n' % nmode)
1663 header.append('new mode %s\n' % nmode)
1664
1664
1665 def addindexmeta(meta, revs):
1665 def addindexmeta(meta, revs):
1666 if opts.git:
1666 if opts.git:
1667 i = len(revs)
1667 i = len(revs)
1668 if i==2:
1668 if i==2:
1669 meta.append('index %s..%s\n' % tuple(revs))
1669 meta.append('index %s..%s\n' % tuple(revs))
1670 elif i==3:
1670 elif i==3:
1671 meta.append('index %s,%s..%s\n' % tuple(revs))
1671 meta.append('index %s,%s..%s\n' % tuple(revs))
1672
1672
1673 def gitindex(text):
1673 def gitindex(text):
1674 if not text:
1674 if not text:
1675 return hex(nullid)
1675 return hex(nullid)
1676 l = len(text)
1676 l = len(text)
1677 s = util.sha1('blob %d\0' % l)
1677 s = util.sha1('blob %d\0' % l)
1678 s.update(text)
1678 s.update(text)
1679 return s.hexdigest()
1679 return s.hexdigest()
1680
1680
1681 def diffline(a, b, revs):
1681 def diffline(a, b, revs):
1682 if opts.git:
1682 if opts.git:
1683 line = 'diff --git a/%s b/%s\n' % (a, b)
1683 line = 'diff --git a/%s b/%s\n' % (a, b)
1684 elif not repo.ui.quiet:
1684 elif not repo.ui.quiet:
1685 if revs:
1685 if revs:
1686 revinfo = ' '.join(["-r %s" % rev for rev in revs])
1686 revinfo = ' '.join(["-r %s" % rev for rev in revs])
1687 line = 'diff %s %s\n' % (revinfo, a)
1687 line = 'diff %s %s\n' % (revinfo, a)
1688 else:
1688 else:
1689 line = 'diff %s\n' % a
1689 line = 'diff %s\n' % a
1690 else:
1690 else:
1691 line = ''
1691 line = ''
1692 return line
1692 return line
1693
1693
1694 date1 = util.datestr(ctx1.date())
1694 date1 = util.datestr(ctx1.date())
1695 man1 = ctx1.manifest()
1695 man1 = ctx1.manifest()
1696
1696
1697 gone = set()
1697 gone = set()
1698 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
1698 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
1699
1699
1700 copyto = dict([(v, k) for k, v in copy.items()])
1700 copyto = dict([(v, k) for k, v in copy.items()])
1701
1701
1702 if opts.git:
1702 if opts.git:
1703 revs = None
1703 revs = None
1704
1704
1705 for f in sorted(modified + added + removed):
1705 for f in sorted(modified + added + removed):
1706 to = None
1706 to = None
1707 tn = None
1707 tn = None
1708 dodiff = True
1708 dodiff = True
1709 header = []
1709 header = []
1710 if f in man1:
1710 if f in man1:
1711 to = getfilectx(f, ctx1).data()
1711 to = getfilectx(f, ctx1).data()
1712 if f not in removed:
1712 if f not in removed:
1713 tn = getfilectx(f, ctx2).data()
1713 tn = getfilectx(f, ctx2).data()
1714 a, b = f, f
1714 a, b = f, f
1715 if opts.git or losedatafn:
1715 if opts.git or losedatafn:
1716 if f in added:
1716 if f in added:
1717 mode = gitmode[ctx2.flags(f)]
1717 mode = gitmode[ctx2.flags(f)]
1718 if f in copy or f in copyto:
1718 if f in copy or f in copyto:
1719 if opts.git:
1719 if opts.git:
1720 if f in copy:
1720 if f in copy:
1721 a = copy[f]
1721 a = copy[f]
1722 else:
1722 else:
1723 a = copyto[f]
1723 a = copyto[f]
1724 omode = gitmode[man1.flags(a)]
1724 omode = gitmode[man1.flags(a)]
1725 addmodehdr(header, omode, mode)
1725 addmodehdr(header, omode, mode)
1726 if a in removed and a not in gone:
1726 if a in removed and a not in gone:
1727 op = 'rename'
1727 op = 'rename'
1728 gone.add(a)
1728 gone.add(a)
1729 else:
1729 else:
1730 op = 'copy'
1730 op = 'copy'
1731 header.append('%s from %s\n' % (op, join(a)))
1731 header.append('%s from %s\n' % (op, join(a)))
1732 header.append('%s to %s\n' % (op, join(f)))
1732 header.append('%s to %s\n' % (op, join(f)))
1733 to = getfilectx(a, ctx1).data()
1733 to = getfilectx(a, ctx1).data()
1734 else:
1734 else:
1735 losedatafn(f)
1735 losedatafn(f)
1736 else:
1736 else:
1737 if opts.git:
1737 if opts.git:
1738 header.append('new file mode %s\n' % mode)
1738 header.append('new file mode %s\n' % mode)
1739 elif ctx2.flags(f):
1739 elif ctx2.flags(f):
1740 losedatafn(f)
1740 losedatafn(f)
1741 # In theory, if tn was copied or renamed we should check
1741 # In theory, if tn was copied or renamed we should check
1742 # if the source is binary too but the copy record already
1742 # if the source is binary too but the copy record already
1743 # forces git mode.
1743 # forces git mode.
1744 if util.binary(tn):
1744 if util.binary(tn):
1745 if opts.git:
1745 if opts.git:
1746 dodiff = 'binary'
1746 dodiff = 'binary'
1747 else:
1747 else:
1748 losedatafn(f)
1748 losedatafn(f)
1749 if not opts.git and not tn:
1749 if not opts.git and not tn:
1750 # regular diffs cannot represent new empty file
1750 # regular diffs cannot represent new empty file
1751 losedatafn(f)
1751 losedatafn(f)
1752 elif f in removed:
1752 elif f in removed:
1753 if opts.git:
1753 if opts.git:
1754 # have we already reported a copy above?
1754 # have we already reported a copy above?
1755 if ((f in copy and copy[f] in added
1755 if ((f in copy and copy[f] in added
1756 and copyto[copy[f]] == f) or
1756 and copyto[copy[f]] == f) or
1757 (f in copyto and copyto[f] in added
1757 (f in copyto and copyto[f] in added
1758 and copy[copyto[f]] == f)):
1758 and copy[copyto[f]] == f)):
1759 dodiff = False
1759 dodiff = False
1760 else:
1760 else:
1761 header.append('deleted file mode %s\n' %
1761 header.append('deleted file mode %s\n' %
1762 gitmode[man1.flags(f)])
1762 gitmode[man1.flags(f)])
1763 elif not to or util.binary(to):
1763 elif not to or util.binary(to):
1764 # regular diffs cannot represent empty file deletion
1764 # regular diffs cannot represent empty file deletion
1765 losedatafn(f)
1765 losedatafn(f)
1766 else:
1766 else:
1767 oflag = man1.flags(f)
1767 oflag = man1.flags(f)
1768 nflag = ctx2.flags(f)
1768 nflag = ctx2.flags(f)
1769 binary = util.binary(to) or util.binary(tn)
1769 binary = util.binary(to) or util.binary(tn)
1770 if opts.git:
1770 if opts.git:
1771 addmodehdr(header, gitmode[oflag], gitmode[nflag])
1771 addmodehdr(header, gitmode[oflag], gitmode[nflag])
1772 if binary:
1772 if binary:
1773 dodiff = 'binary'
1773 dodiff = 'binary'
1774 elif binary or nflag != oflag:
1774 elif binary or nflag != oflag:
1775 losedatafn(f)
1775 losedatafn(f)
1776
1776
1777 if dodiff:
1777 if dodiff:
1778 if opts.git or revs:
1778 if opts.git or revs:
1779 header.insert(0, diffline(join(a), join(b), revs))
1779 header.insert(0, diffline(join(a), join(b), revs))
1780 if dodiff == 'binary':
1780 if dodiff == 'binary':
1781 text = mdiff.b85diff(to, tn)
1781 text = mdiff.b85diff(to, tn)
1782 if text:
1782 if text:
1783 addindexmeta(header, [gitindex(to), gitindex(tn)])
1783 addindexmeta(header, [gitindex(to), gitindex(tn)])
1784 else:
1784 else:
1785 text = mdiff.unidiff(to, date1,
1785 text = mdiff.unidiff(to, date1,
1786 # ctx2 date may be dynamic
1786 # ctx2 date may be dynamic
1787 tn, util.datestr(ctx2.date()),
1787 tn, util.datestr(ctx2.date()),
1788 join(a), join(b), opts=opts)
1788 join(a), join(b), opts=opts)
1789 if header and (text or len(header) > 1):
1789 if header and (text or len(header) > 1):
1790 yield ''.join(header)
1790 yield ''.join(header)
1791 if text:
1791 if text:
1792 yield text
1792 yield text
1793
1793
1794 def diffstatsum(stats):
1794 def diffstatsum(stats):
1795 maxfile, maxtotal, addtotal, removetotal, binary = 0, 0, 0, 0, False
1795 maxfile, maxtotal, addtotal, removetotal, binary = 0, 0, 0, 0, False
1796 for f, a, r, b in stats:
1796 for f, a, r, b in stats:
1797 maxfile = max(maxfile, encoding.colwidth(f))
1797 maxfile = max(maxfile, encoding.colwidth(f))
1798 maxtotal = max(maxtotal, a + r)
1798 maxtotal = max(maxtotal, a + r)
1799 addtotal += a
1799 addtotal += a
1800 removetotal += r
1800 removetotal += r
1801 binary = binary or b
1801 binary = binary or b
1802
1802
1803 return maxfile, maxtotal, addtotal, removetotal, binary
1803 return maxfile, maxtotal, addtotal, removetotal, binary
1804
1804
1805 def diffstatdata(lines):
1805 def diffstatdata(lines):
1806 diffre = re.compile('^diff .*-r [a-z0-9]+\s(.*)$')
1806 diffre = re.compile('^diff .*-r [a-z0-9]+\s(.*)$')
1807
1807
1808 results = []
1808 results = []
1809 filename, adds, removes, isbinary = None, 0, 0, False
1809 filename, adds, removes, isbinary = None, 0, 0, False
1810
1810
1811 def addresult():
1811 def addresult():
1812 if filename:
1812 if filename:
1813 results.append((filename, adds, removes, isbinary))
1813 results.append((filename, adds, removes, isbinary))
1814
1814
1815 for line in lines:
1815 for line in lines:
1816 if line.startswith('diff'):
1816 if line.startswith('diff'):
1817 addresult()
1817 addresult()
1818 # set numbers to 0 anyway when starting new file
1818 # set numbers to 0 anyway when starting new file
1819 adds, removes, isbinary = 0, 0, False
1819 adds, removes, isbinary = 0, 0, False
1820 if line.startswith('diff --git'):
1820 if line.startswith('diff --git'):
1821 filename = gitre.search(line).group(1)
1821 filename = gitre.search(line).group(1)
1822 elif line.startswith('diff -r'):
1822 elif line.startswith('diff -r'):
1823 # format: "diff -r ... -r ... filename"
1823 # format: "diff -r ... -r ... filename"
1824 filename = diffre.search(line).group(1)
1824 filename = diffre.search(line).group(1)
1825 elif line.startswith('+') and not line.startswith('+++ '):
1825 elif line.startswith('+') and not line.startswith('+++ '):
1826 adds += 1
1826 adds += 1
1827 elif line.startswith('-') and not line.startswith('--- '):
1827 elif line.startswith('-') and not line.startswith('--- '):
1828 removes += 1
1828 removes += 1
1829 elif (line.startswith('GIT binary patch') or
1829 elif (line.startswith('GIT binary patch') or
1830 line.startswith('Binary file')):
1830 line.startswith('Binary file')):
1831 isbinary = True
1831 isbinary = True
1832 addresult()
1832 addresult()
1833 return results
1833 return results
1834
1834
1835 def diffstat(lines, width=80, git=False):
1835 def diffstat(lines, width=80, git=False):
1836 output = []
1836 output = []
1837 stats = diffstatdata(lines)
1837 stats = diffstatdata(lines)
1838 maxname, maxtotal, totaladds, totalremoves, hasbinary = diffstatsum(stats)
1838 maxname, maxtotal, totaladds, totalremoves, hasbinary = diffstatsum(stats)
1839
1839
1840 countwidth = len(str(maxtotal))
1840 countwidth = len(str(maxtotal))
1841 if hasbinary and countwidth < 3:
1841 if hasbinary and countwidth < 3:
1842 countwidth = 3
1842 countwidth = 3
1843 graphwidth = width - countwidth - maxname - 6
1843 graphwidth = width - countwidth - maxname - 6
1844 if graphwidth < 10:
1844 if graphwidth < 10:
1845 graphwidth = 10
1845 graphwidth = 10
1846
1846
1847 def scale(i):
1847 def scale(i):
1848 if maxtotal <= graphwidth:
1848 if maxtotal <= graphwidth:
1849 return i
1849 return i
1850 # If diffstat runs out of room it doesn't print anything,
1850 # If diffstat runs out of room it doesn't print anything,
1851 # which isn't very useful, so always print at least one + or -
1851 # which isn't very useful, so always print at least one + or -
1852 # if there were at least some changes.
1852 # if there were at least some changes.
1853 return max(i * graphwidth // maxtotal, int(bool(i)))
1853 return max(i * graphwidth // maxtotal, int(bool(i)))
1854
1854
1855 for filename, adds, removes, isbinary in stats:
1855 for filename, adds, removes, isbinary in stats:
1856 if isbinary:
1856 if isbinary:
1857 count = 'Bin'
1857 count = 'Bin'
1858 else:
1858 else:
1859 count = adds + removes
1859 count = adds + removes
1860 pluses = '+' * scale(adds)
1860 pluses = '+' * scale(adds)
1861 minuses = '-' * scale(removes)
1861 minuses = '-' * scale(removes)
1862 output.append(' %s%s | %*s %s%s\n' %
1862 output.append(' %s%s | %*s %s%s\n' %
1863 (filename, ' ' * (maxname - encoding.colwidth(filename)),
1863 (filename, ' ' * (maxname - encoding.colwidth(filename)),
1864 countwidth, count, pluses, minuses))
1864 countwidth, count, pluses, minuses))
1865
1865
1866 if stats:
1866 if stats:
1867 output.append(_(' %d files changed, %d insertions(+), '
1867 output.append(_(' %d files changed, %d insertions(+), '
1868 '%d deletions(-)\n')
1868 '%d deletions(-)\n')
1869 % (len(stats), totaladds, totalremoves))
1869 % (len(stats), totaladds, totalremoves))
1870
1870
1871 return ''.join(output)
1871 return ''.join(output)
1872
1872
1873 def diffstatui(*args, **kw):
1873 def diffstatui(*args, **kw):
1874 '''like diffstat(), but yields 2-tuples of (output, label) for
1874 '''like diffstat(), but yields 2-tuples of (output, label) for
1875 ui.write()
1875 ui.write()
1876 '''
1876 '''
1877
1877
1878 for line in diffstat(*args, **kw).splitlines():
1878 for line in diffstat(*args, **kw).splitlines():
1879 if line and line[-1] in '+-':
1879 if line and line[-1] in '+-':
1880 name, graph = line.rsplit(' ', 1)
1880 name, graph = line.rsplit(' ', 1)
1881 yield (name + ' ', '')
1881 yield (name + ' ', '')
1882 m = re.search(r'\++', graph)
1882 m = re.search(r'\++', graph)
1883 if m:
1883 if m:
1884 yield (m.group(0), 'diffstat.inserted')
1884 yield (m.group(0), 'diffstat.inserted')
1885 m = re.search(r'-+', graph)
1885 m = re.search(r'-+', graph)
1886 if m:
1886 if m:
1887 yield (m.group(0), 'diffstat.deleted')
1887 yield (m.group(0), 'diffstat.deleted')
1888 else:
1888 else:
1889 yield (line, '')
1889 yield (line, '')
1890 yield ('\n', '')
1890 yield ('\n', '')
@@ -1,538 +1,538 b''
1 # store.py - repository store handling for Mercurial
1 # store.py - repository store handling for Mercurial
2 #
2 #
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import scmutil, util, parsers
9 import scmutil, util, parsers
10 import os, stat, errno
10 import os, stat, errno
11
11
12 _sha = util.sha1
12 _sha = util.sha1
13
13
14 # This avoids a collision between a file named foo and a dir named
14 # This avoids a collision between a file named foo and a dir named
15 # foo.i or foo.d
15 # foo.i or foo.d
16 def _encodedir(path):
16 def _encodedir(path):
17 '''
17 '''
18 >>> _encodedir('data/foo.i')
18 >>> _encodedir('data/foo.i')
19 'data/foo.i'
19 'data/foo.i'
20 >>> _encodedir('data/foo.i/bla.i')
20 >>> _encodedir('data/foo.i/bla.i')
21 'data/foo.i.hg/bla.i'
21 'data/foo.i.hg/bla.i'
22 >>> _encodedir('data/foo.i.hg/bla.i')
22 >>> _encodedir('data/foo.i.hg/bla.i')
23 'data/foo.i.hg.hg/bla.i'
23 'data/foo.i.hg.hg/bla.i'
24 >>> _encodedir('data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
24 >>> _encodedir('data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
25 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
25 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
26 '''
26 '''
27 return (path
27 return (path
28 .replace(".hg/", ".hg.hg/")
28 .replace(".hg/", ".hg.hg/")
29 .replace(".i/", ".i.hg/")
29 .replace(".i/", ".i.hg/")
30 .replace(".d/", ".d.hg/"))
30 .replace(".d/", ".d.hg/"))
31
31
32 encodedir = getattr(parsers, 'encodedir', _encodedir)
32 encodedir = getattr(parsers, 'encodedir', _encodedir)
33
33
34 def decodedir(path):
34 def decodedir(path):
35 '''
35 '''
36 >>> decodedir('data/foo.i')
36 >>> decodedir('data/foo.i')
37 'data/foo.i'
37 'data/foo.i'
38 >>> decodedir('data/foo.i.hg/bla.i')
38 >>> decodedir('data/foo.i.hg/bla.i')
39 'data/foo.i/bla.i'
39 'data/foo.i/bla.i'
40 >>> decodedir('data/foo.i.hg.hg/bla.i')
40 >>> decodedir('data/foo.i.hg.hg/bla.i')
41 'data/foo.i.hg/bla.i'
41 'data/foo.i.hg/bla.i'
42 '''
42 '''
43 if ".hg/" not in path:
43 if ".hg/" not in path:
44 return path
44 return path
45 return (path
45 return (path
46 .replace(".d.hg/", ".d/")
46 .replace(".d.hg/", ".d/")
47 .replace(".i.hg/", ".i/")
47 .replace(".i.hg/", ".i/")
48 .replace(".hg.hg/", ".hg/"))
48 .replace(".hg.hg/", ".hg/"))
49
49
50 def _buildencodefun():
50 def _buildencodefun():
51 '''
51 '''
52 >>> enc, dec = _buildencodefun()
52 >>> enc, dec = _buildencodefun()
53
53
54 >>> enc('nothing/special.txt')
54 >>> enc('nothing/special.txt')
55 'nothing/special.txt'
55 'nothing/special.txt'
56 >>> dec('nothing/special.txt')
56 >>> dec('nothing/special.txt')
57 'nothing/special.txt'
57 'nothing/special.txt'
58
58
59 >>> enc('HELLO')
59 >>> enc('HELLO')
60 '_h_e_l_l_o'
60 '_h_e_l_l_o'
61 >>> dec('_h_e_l_l_o')
61 >>> dec('_h_e_l_l_o')
62 'HELLO'
62 'HELLO'
63
63
64 >>> enc('hello:world?')
64 >>> enc('hello:world?')
65 'hello~3aworld~3f'
65 'hello~3aworld~3f'
66 >>> dec('hello~3aworld~3f')
66 >>> dec('hello~3aworld~3f')
67 'hello:world?'
67 'hello:world?'
68
68
69 >>> enc('the\x07quick\xADshot')
69 >>> enc('the\x07quick\xADshot')
70 'the~07quick~adshot'
70 'the~07quick~adshot'
71 >>> dec('the~07quick~adshot')
71 >>> dec('the~07quick~adshot')
72 'the\\x07quick\\xadshot'
72 'the\\x07quick\\xadshot'
73 '''
73 '''
74 e = '_'
74 e = '_'
75 winreserved = [ord(x) for x in '\\:*?"<>|']
75 winreserved = [ord(x) for x in '\\:*?"<>|']
76 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
76 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
77 for x in (range(32) + range(126, 256) + winreserved):
77 for x in (range(32) + range(126, 256) + winreserved):
78 cmap[chr(x)] = "~%02x" % x
78 cmap[chr(x)] = "~%02x" % x
79 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
79 for x in range(ord("A"), ord("Z") + 1) + [ord(e)]:
80 cmap[chr(x)] = e + chr(x).lower()
80 cmap[chr(x)] = e + chr(x).lower()
81 dmap = {}
81 dmap = {}
82 for k, v in cmap.iteritems():
82 for k, v in cmap.iteritems():
83 dmap[v] = k
83 dmap[v] = k
84 def decode(s):
84 def decode(s):
85 i = 0
85 i = 0
86 while i < len(s):
86 while i < len(s):
87 for l in xrange(1, 4):
87 for l in xrange(1, 4):
88 try:
88 try:
89 yield dmap[s[i:i + l]]
89 yield dmap[s[i:i + l]]
90 i += l
90 i += l
91 break
91 break
92 except KeyError:
92 except KeyError:
93 pass
93 pass
94 else:
94 else:
95 raise KeyError
95 raise KeyError
96 return (lambda s: ''.join([cmap[c] for c in s]),
96 return (lambda s: ''.join([cmap[c] for c in s]),
97 lambda s: ''.join(list(decode(s))))
97 lambda s: ''.join(list(decode(s))))
98
98
99 _encodefname, _decodefname = _buildencodefun()
99 _encodefname, _decodefname = _buildencodefun()
100
100
101 def encodefilename(s):
101 def encodefilename(s):
102 '''
102 '''
103 >>> encodefilename('foo.i/bar.d/bla.hg/hi:world?/HELLO')
103 >>> encodefilename('foo.i/bar.d/bla.hg/hi:world?/HELLO')
104 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
104 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
105 '''
105 '''
106 return _encodefname(encodedir(s))
106 return _encodefname(encodedir(s))
107
107
108 def decodefilename(s):
108 def decodefilename(s):
109 '''
109 '''
110 >>> decodefilename('foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
110 >>> decodefilename('foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
111 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
111 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
112 '''
112 '''
113 return decodedir(_decodefname(s))
113 return decodedir(_decodefname(s))
114
114
115 def _buildlowerencodefun():
115 def _buildlowerencodefun():
116 '''
116 '''
117 >>> f = _buildlowerencodefun()
117 >>> f = _buildlowerencodefun()
118 >>> f('nothing/special.txt')
118 >>> f('nothing/special.txt')
119 'nothing/special.txt'
119 'nothing/special.txt'
120 >>> f('HELLO')
120 >>> f('HELLO')
121 'hello'
121 'hello'
122 >>> f('hello:world?')
122 >>> f('hello:world?')
123 'hello~3aworld~3f'
123 'hello~3aworld~3f'
124 >>> f('the\x07quick\xADshot')
124 >>> f('the\x07quick\xADshot')
125 'the~07quick~adshot'
125 'the~07quick~adshot'
126 '''
126 '''
127 winreserved = [ord(x) for x in '\\:*?"<>|']
127 winreserved = [ord(x) for x in '\\:*?"<>|']
128 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
128 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
129 for x in (range(32) + range(126, 256) + winreserved):
129 for x in (range(32) + range(126, 256) + winreserved):
130 cmap[chr(x)] = "~%02x" % x
130 cmap[chr(x)] = "~%02x" % x
131 for x in range(ord("A"), ord("Z")+1):
131 for x in range(ord("A"), ord("Z") + 1):
132 cmap[chr(x)] = chr(x).lower()
132 cmap[chr(x)] = chr(x).lower()
133 return lambda s: "".join([cmap[c] for c in s])
133 return lambda s: "".join([cmap[c] for c in s])
134
134
135 lowerencode = _buildlowerencodefun()
135 lowerencode = _buildlowerencodefun()
136
136
137 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
137 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
138 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
138 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
139 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
139 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
140 def _auxencode(path, dotencode):
140 def _auxencode(path, dotencode):
141 '''
141 '''
142 Encodes filenames containing names reserved by Windows or which end in
142 Encodes filenames containing names reserved by Windows or which end in
143 period or space. Does not touch other single reserved characters c.
143 period or space. Does not touch other single reserved characters c.
144 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
144 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
145 Additionally encodes space or period at the beginning, if dotencode is
145 Additionally encodes space or period at the beginning, if dotencode is
146 True. Parameter path is assumed to be all lowercase.
146 True. Parameter path is assumed to be all lowercase.
147 A segment only needs encoding if a reserved name appears as a
147 A segment only needs encoding if a reserved name appears as a
148 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
148 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
149 doesn't need encoding.
149 doesn't need encoding.
150
150
151 >>> s = '.foo/aux.txt/txt.aux/con/prn/nul/foo.'
151 >>> s = '.foo/aux.txt/txt.aux/con/prn/nul/foo.'
152 >>> _auxencode(s.split('/'), True)
152 >>> _auxencode(s.split('/'), True)
153 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
153 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
154 >>> s = '.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
154 >>> s = '.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
155 >>> _auxencode(s.split('/'), False)
155 >>> _auxencode(s.split('/'), False)
156 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
156 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
157 >>> _auxencode(['foo. '], True)
157 >>> _auxencode(['foo. '], True)
158 ['foo.~20']
158 ['foo.~20']
159 >>> _auxencode([' .foo'], True)
159 >>> _auxencode([' .foo'], True)
160 ['~20.foo']
160 ['~20.foo']
161 '''
161 '''
162 for i, n in enumerate(path):
162 for i, n in enumerate(path):
163 if not n:
163 if not n:
164 continue
164 continue
165 if dotencode and n[0] in '. ':
165 if dotencode and n[0] in '. ':
166 n = "~%02x" % ord(n[0]) + n[1:]
166 n = "~%02x" % ord(n[0]) + n[1:]
167 path[i] = n
167 path[i] = n
168 else:
168 else:
169 l = n.find('.')
169 l = n.find('.')
170 if l == -1:
170 if l == -1:
171 l = len(n)
171 l = len(n)
172 if ((l == 3 and n[:3] in _winres3) or
172 if ((l == 3 and n[:3] in _winres3) or
173 (l == 4 and n[3] <= '9' and n[3] >= '1'
173 (l == 4 and n[3] <= '9' and n[3] >= '1'
174 and n[:3] in _winres4)):
174 and n[:3] in _winres4)):
175 # encode third letter ('aux' -> 'au~78')
175 # encode third letter ('aux' -> 'au~78')
176 ec = "~%02x" % ord(n[2])
176 ec = "~%02x" % ord(n[2])
177 n = n[0:2] + ec + n[3:]
177 n = n[0:2] + ec + n[3:]
178 path[i] = n
178 path[i] = n
179 if n[-1] in '. ':
179 if n[-1] in '. ':
180 # encode last period or space ('foo...' -> 'foo..~2e')
180 # encode last period or space ('foo...' -> 'foo..~2e')
181 path[i] = n[:-1] + "~%02x" % ord(n[-1])
181 path[i] = n[:-1] + "~%02x" % ord(n[-1])
182 return path
182 return path
183
183
184 _maxstorepathlen = 120
184 _maxstorepathlen = 120
185 _dirprefixlen = 8
185 _dirprefixlen = 8
186 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
186 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
187
187
188 def _hashencode(path, dotencode):
188 def _hashencode(path, dotencode):
189 digest = _sha(path).hexdigest()
189 digest = _sha(path).hexdigest()
190 le = lowerencode(path).split('/')[1:]
190 le = lowerencode(path).split('/')[1:]
191 parts = _auxencode(le, dotencode)
191 parts = _auxencode(le, dotencode)
192 basename = parts[-1]
192 basename = parts[-1]
193 _root, ext = os.path.splitext(basename)
193 _root, ext = os.path.splitext(basename)
194 sdirs = []
194 sdirs = []
195 sdirslen = 0
195 sdirslen = 0
196 for p in parts[:-1]:
196 for p in parts[:-1]:
197 d = p[:_dirprefixlen]
197 d = p[:_dirprefixlen]
198 if d[-1] in '. ':
198 if d[-1] in '. ':
199 # Windows can't access dirs ending in period or space
199 # Windows can't access dirs ending in period or space
200 d = d[:-1] + '_'
200 d = d[:-1] + '_'
201 if sdirslen == 0:
201 if sdirslen == 0:
202 t = len(d)
202 t = len(d)
203 else:
203 else:
204 t = sdirslen + 1 + len(d)
204 t = sdirslen + 1 + len(d)
205 if t > _maxshortdirslen:
205 if t > _maxshortdirslen:
206 break
206 break
207 sdirs.append(d)
207 sdirs.append(d)
208 sdirslen = t
208 sdirslen = t
209 dirs = '/'.join(sdirs)
209 dirs = '/'.join(sdirs)
210 if len(dirs) > 0:
210 if len(dirs) > 0:
211 dirs += '/'
211 dirs += '/'
212 res = 'dh/' + dirs + digest + ext
212 res = 'dh/' + dirs + digest + ext
213 spaceleft = _maxstorepathlen - len(res)
213 spaceleft = _maxstorepathlen - len(res)
214 if spaceleft > 0:
214 if spaceleft > 0:
215 filler = basename[:spaceleft]
215 filler = basename[:spaceleft]
216 res = 'dh/' + dirs + filler + digest + ext
216 res = 'dh/' + dirs + filler + digest + ext
217 return res
217 return res
218
218
219 def _hybridencode(path, dotencode):
219 def _hybridencode(path, dotencode):
220 '''encodes path with a length limit
220 '''encodes path with a length limit
221
221
222 Encodes all paths that begin with 'data/', according to the following.
222 Encodes all paths that begin with 'data/', according to the following.
223
223
224 Default encoding (reversible):
224 Default encoding (reversible):
225
225
226 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
226 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
227 characters are encoded as '~xx', where xx is the two digit hex code
227 characters are encoded as '~xx', where xx is the two digit hex code
228 of the character (see encodefilename).
228 of the character (see encodefilename).
229 Relevant path components consisting of Windows reserved filenames are
229 Relevant path components consisting of Windows reserved filenames are
230 masked by encoding the third character ('aux' -> 'au~78', see _auxencode).
230 masked by encoding the third character ('aux' -> 'au~78', see _auxencode).
231
231
232 Hashed encoding (not reversible):
232 Hashed encoding (not reversible):
233
233
234 If the default-encoded path is longer than _maxstorepathlen, a
234 If the default-encoded path is longer than _maxstorepathlen, a
235 non-reversible hybrid hashing of the path is done instead.
235 non-reversible hybrid hashing of the path is done instead.
236 This encoding uses up to _dirprefixlen characters of all directory
236 This encoding uses up to _dirprefixlen characters of all directory
237 levels of the lowerencoded path, but not more levels than can fit into
237 levels of the lowerencoded path, but not more levels than can fit into
238 _maxshortdirslen.
238 _maxshortdirslen.
239 Then follows the filler followed by the sha digest of the full path.
239 Then follows the filler followed by the sha digest of the full path.
240 The filler is the beginning of the basename of the lowerencoded path
240 The filler is the beginning of the basename of the lowerencoded path
241 (the basename is everything after the last path separator). The filler
241 (the basename is everything after the last path separator). The filler
242 is as long as possible, filling in characters from the basename until
242 is as long as possible, filling in characters from the basename until
243 the encoded path has _maxstorepathlen characters (or all chars of the
243 the encoded path has _maxstorepathlen characters (or all chars of the
244 basename have been taken).
244 basename have been taken).
245 The extension (e.g. '.i' or '.d') is preserved.
245 The extension (e.g. '.i' or '.d') is preserved.
246
246
247 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
247 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
248 encoding was used.
248 encoding was used.
249 '''
249 '''
250 path = encodedir(path)
250 path = encodedir(path)
251 ef = _encodefname(path).split('/')
251 ef = _encodefname(path).split('/')
252 res = '/'.join(_auxencode(ef, dotencode))
252 res = '/'.join(_auxencode(ef, dotencode))
253 if len(res) > _maxstorepathlen:
253 if len(res) > _maxstorepathlen:
254 res = _hashencode(path, dotencode)
254 res = _hashencode(path, dotencode)
255 return res
255 return res
256
256
257 def _pathencode(path):
257 def _pathencode(path):
258 if len(path) > _maxstorepathlen:
258 if len(path) > _maxstorepathlen:
259 return None
259 return None
260 ef = _encodefname(encodedir(path)).split('/')
260 ef = _encodefname(encodedir(path)).split('/')
261 res = '/'.join(_auxencode(ef, True))
261 res = '/'.join(_auxencode(ef, True))
262 if len(res) > _maxstorepathlen:
262 if len(res) > _maxstorepathlen:
263 return None
263 return None
264 return res
264 return res
265
265
266 _pathencode = getattr(parsers, 'pathencode', _pathencode)
266 _pathencode = getattr(parsers, 'pathencode', _pathencode)
267
267
268 def _dothybridencode(f):
268 def _dothybridencode(f):
269 ef = _pathencode(f)
269 ef = _pathencode(f)
270 if ef is None:
270 if ef is None:
271 return _hashencode(encodedir(f), True)
271 return _hashencode(encodedir(f), True)
272 return ef
272 return ef
273
273
274 def _plainhybridencode(f):
274 def _plainhybridencode(f):
275 return _hybridencode(f, False)
275 return _hybridencode(f, False)
276
276
277 def _calcmode(vfs):
277 def _calcmode(vfs):
278 try:
278 try:
279 # files in .hg/ will be created using this mode
279 # files in .hg/ will be created using this mode
280 mode = vfs.stat().st_mode
280 mode = vfs.stat().st_mode
281 # avoid some useless chmods
281 # avoid some useless chmods
282 if (0777 & ~util.umask) == (0777 & mode):
282 if (0777 & ~util.umask) == (0777 & mode):
283 mode = None
283 mode = None
284 except OSError:
284 except OSError:
285 mode = None
285 mode = None
286 return mode
286 return mode
287
287
288 _data = ('data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
288 _data = ('data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
289 ' phaseroots obsstore')
289 ' phaseroots obsstore')
290
290
291 class basicstore(object):
291 class basicstore(object):
292 '''base class for local repository stores'''
292 '''base class for local repository stores'''
293 def __init__(self, path, vfstype):
293 def __init__(self, path, vfstype):
294 vfs = vfstype(path)
294 vfs = vfstype(path)
295 self.path = vfs.base
295 self.path = vfs.base
296 self.createmode = _calcmode(vfs)
296 self.createmode = _calcmode(vfs)
297 vfs.createmode = self.createmode
297 vfs.createmode = self.createmode
298 self.rawvfs = vfs
298 self.rawvfs = vfs
299 self.vfs = scmutil.filtervfs(vfs, encodedir)
299 self.vfs = scmutil.filtervfs(vfs, encodedir)
300 self.opener = self.vfs
300 self.opener = self.vfs
301
301
302 def join(self, f):
302 def join(self, f):
303 return self.path + '/' + encodedir(f)
303 return self.path + '/' + encodedir(f)
304
304
305 def _walk(self, relpath, recurse):
305 def _walk(self, relpath, recurse):
306 '''yields (unencoded, encoded, size)'''
306 '''yields (unencoded, encoded, size)'''
307 path = self.path
307 path = self.path
308 if relpath:
308 if relpath:
309 path += '/' + relpath
309 path += '/' + relpath
310 striplen = len(self.path) + 1
310 striplen = len(self.path) + 1
311 l = []
311 l = []
312 if self.rawvfs.isdir(path):
312 if self.rawvfs.isdir(path):
313 visit = [path]
313 visit = [path]
314 readdir = self.rawvfs.readdir
314 readdir = self.rawvfs.readdir
315 while visit:
315 while visit:
316 p = visit.pop()
316 p = visit.pop()
317 for f, kind, st in readdir(p, stat=True):
317 for f, kind, st in readdir(p, stat=True):
318 fp = p + '/' + f
318 fp = p + '/' + f
319 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
319 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
320 n = util.pconvert(fp[striplen:])
320 n = util.pconvert(fp[striplen:])
321 l.append((decodedir(n), n, st.st_size))
321 l.append((decodedir(n), n, st.st_size))
322 elif kind == stat.S_IFDIR and recurse:
322 elif kind == stat.S_IFDIR and recurse:
323 visit.append(fp)
323 visit.append(fp)
324 l.sort()
324 l.sort()
325 return l
325 return l
326
326
327 def datafiles(self):
327 def datafiles(self):
328 return self._walk('data', True)
328 return self._walk('data', True)
329
329
330 def walk(self):
330 def walk(self):
331 '''yields (unencoded, encoded, size)'''
331 '''yields (unencoded, encoded, size)'''
332 # yield data files first
332 # yield data files first
333 for x in self.datafiles():
333 for x in self.datafiles():
334 yield x
334 yield x
335 # yield manifest before changelog
335 # yield manifest before changelog
336 for x in reversed(self._walk('', False)):
336 for x in reversed(self._walk('', False)):
337 yield x
337 yield x
338
338
339 def copylist(self):
339 def copylist(self):
340 return ['requires'] + _data.split()
340 return ['requires'] + _data.split()
341
341
342 def write(self):
342 def write(self):
343 pass
343 pass
344
344
345 def __contains__(self, path):
345 def __contains__(self, path):
346 '''Checks if the store contains path'''
346 '''Checks if the store contains path'''
347 path = "/".join(("data", path))
347 path = "/".join(("data", path))
348 # file?
348 # file?
349 if os.path.exists(self.join(path + ".i")):
349 if os.path.exists(self.join(path + ".i")):
350 return True
350 return True
351 # dir?
351 # dir?
352 if not path.endswith("/"):
352 if not path.endswith("/"):
353 path = path + "/"
353 path = path + "/"
354 return os.path.exists(self.join(path))
354 return os.path.exists(self.join(path))
355
355
356 class encodedstore(basicstore):
356 class encodedstore(basicstore):
357 def __init__(self, path, vfstype):
357 def __init__(self, path, vfstype):
358 vfs = vfstype(path + '/store')
358 vfs = vfstype(path + '/store')
359 self.path = vfs.base
359 self.path = vfs.base
360 self.createmode = _calcmode(vfs)
360 self.createmode = _calcmode(vfs)
361 vfs.createmode = self.createmode
361 vfs.createmode = self.createmode
362 self.rawvfs = vfs
362 self.rawvfs = vfs
363 self.vfs = scmutil.filtervfs(vfs, encodefilename)
363 self.vfs = scmutil.filtervfs(vfs, encodefilename)
364 self.opener = self.vfs
364 self.opener = self.vfs
365
365
366 def datafiles(self):
366 def datafiles(self):
367 for a, b, size in self._walk('data', True):
367 for a, b, size in self._walk('data', True):
368 try:
368 try:
369 a = decodefilename(a)
369 a = decodefilename(a)
370 except KeyError:
370 except KeyError:
371 a = None
371 a = None
372 yield a, b, size
372 yield a, b, size
373
373
374 def join(self, f):
374 def join(self, f):
375 return self.path + '/' + encodefilename(f)
375 return self.path + '/' + encodefilename(f)
376
376
377 def copylist(self):
377 def copylist(self):
378 return (['requires', '00changelog.i'] +
378 return (['requires', '00changelog.i'] +
379 ['store/' + f for f in _data.split()])
379 ['store/' + f for f in _data.split()])
380
380
381 class fncache(object):
381 class fncache(object):
382 # the filename used to be partially encoded
382 # the filename used to be partially encoded
383 # hence the encodedir/decodedir dance
383 # hence the encodedir/decodedir dance
384 def __init__(self, vfs):
384 def __init__(self, vfs):
385 self.vfs = vfs
385 self.vfs = vfs
386 self.entries = None
386 self.entries = None
387 self._dirty = False
387 self._dirty = False
388
388
389 def _load(self):
389 def _load(self):
390 '''fill the entries from the fncache file'''
390 '''fill the entries from the fncache file'''
391 self._dirty = False
391 self._dirty = False
392 try:
392 try:
393 fp = self.vfs('fncache', mode='rb')
393 fp = self.vfs('fncache', mode='rb')
394 except IOError:
394 except IOError:
395 # skip nonexistent file
395 # skip nonexistent file
396 self.entries = set()
396 self.entries = set()
397 return
397 return
398 self.entries = set(decodedir(fp.read()).splitlines())
398 self.entries = set(decodedir(fp.read()).splitlines())
399 if '' in self.entries:
399 if '' in self.entries:
400 fp.seek(0)
400 fp.seek(0)
401 for n, line in enumerate(fp):
401 for n, line in enumerate(fp):
402 if not line.rstrip('\n'):
402 if not line.rstrip('\n'):
403 t = _('invalid entry in fncache, line %s') % (n + 1)
403 t = _('invalid entry in fncache, line %s') % (n + 1)
404 raise util.Abort(t)
404 raise util.Abort(t)
405 fp.close()
405 fp.close()
406
406
407 def _write(self, files, atomictemp):
407 def _write(self, files, atomictemp):
408 fp = self.vfs('fncache', mode='wb', atomictemp=atomictemp)
408 fp = self.vfs('fncache', mode='wb', atomictemp=atomictemp)
409 if files:
409 if files:
410 fp.write(encodedir('\n'.join(files) + '\n'))
410 fp.write(encodedir('\n'.join(files) + '\n'))
411 fp.close()
411 fp.close()
412 self._dirty = False
412 self._dirty = False
413
413
414 def rewrite(self, files):
414 def rewrite(self, files):
415 self._write(files, False)
415 self._write(files, False)
416 self.entries = set(files)
416 self.entries = set(files)
417
417
418 def write(self):
418 def write(self):
419 if self._dirty:
419 if self._dirty:
420 self._write(self.entries, True)
420 self._write(self.entries, True)
421
421
422 def add(self, fn):
422 def add(self, fn):
423 if self.entries is None:
423 if self.entries is None:
424 self._load()
424 self._load()
425 if fn not in self.entries:
425 if fn not in self.entries:
426 self._dirty = True
426 self._dirty = True
427 self.entries.add(fn)
427 self.entries.add(fn)
428
428
429 def __contains__(self, fn):
429 def __contains__(self, fn):
430 if self.entries is None:
430 if self.entries is None:
431 self._load()
431 self._load()
432 return fn in self.entries
432 return fn in self.entries
433
433
434 def __iter__(self):
434 def __iter__(self):
435 if self.entries is None:
435 if self.entries is None:
436 self._load()
436 self._load()
437 return iter(self.entries)
437 return iter(self.entries)
438
438
439 class _fncachevfs(scmutil.abstractvfs, scmutil.auditvfs):
439 class _fncachevfs(scmutil.abstractvfs, scmutil.auditvfs):
440 def __init__(self, vfs, fnc, encode):
440 def __init__(self, vfs, fnc, encode):
441 scmutil.auditvfs.__init__(self, vfs)
441 scmutil.auditvfs.__init__(self, vfs)
442 self.fncache = fnc
442 self.fncache = fnc
443 self.encode = encode
443 self.encode = encode
444
444
445 def __call__(self, path, mode='r', *args, **kw):
445 def __call__(self, path, mode='r', *args, **kw):
446 if mode not in ('r', 'rb') and path.startswith('data/'):
446 if mode not in ('r', 'rb') and path.startswith('data/'):
447 self.fncache.add(path)
447 self.fncache.add(path)
448 return self.vfs(self.encode(path), mode, *args, **kw)
448 return self.vfs(self.encode(path), mode, *args, **kw)
449
449
450 def join(self, path):
450 def join(self, path):
451 if path:
451 if path:
452 return self.vfs.join(self.encode(path))
452 return self.vfs.join(self.encode(path))
453 else:
453 else:
454 return self.vfs.join(path)
454 return self.vfs.join(path)
455
455
456 class fncachestore(basicstore):
456 class fncachestore(basicstore):
457 def __init__(self, path, vfstype, dotencode):
457 def __init__(self, path, vfstype, dotencode):
458 if dotencode:
458 if dotencode:
459 encode = _dothybridencode
459 encode = _dothybridencode
460 else:
460 else:
461 encode = _plainhybridencode
461 encode = _plainhybridencode
462 self.encode = encode
462 self.encode = encode
463 vfs = vfstype(path + '/store')
463 vfs = vfstype(path + '/store')
464 self.path = vfs.base
464 self.path = vfs.base
465 self.pathsep = self.path + '/'
465 self.pathsep = self.path + '/'
466 self.createmode = _calcmode(vfs)
466 self.createmode = _calcmode(vfs)
467 vfs.createmode = self.createmode
467 vfs.createmode = self.createmode
468 self.rawvfs = vfs
468 self.rawvfs = vfs
469 fnc = fncache(vfs)
469 fnc = fncache(vfs)
470 self.fncache = fnc
470 self.fncache = fnc
471 self.vfs = _fncachevfs(vfs, fnc, encode)
471 self.vfs = _fncachevfs(vfs, fnc, encode)
472 self.opener = self.vfs
472 self.opener = self.vfs
473
473
474 def join(self, f):
474 def join(self, f):
475 return self.pathsep + self.encode(f)
475 return self.pathsep + self.encode(f)
476
476
477 def getsize(self, path):
477 def getsize(self, path):
478 return self.rawvfs.stat(path).st_size
478 return self.rawvfs.stat(path).st_size
479
479
480 def datafiles(self):
480 def datafiles(self):
481 rewrite = False
481 rewrite = False
482 existing = []
482 existing = []
483 for f in sorted(self.fncache):
483 for f in sorted(self.fncache):
484 ef = self.encode(f)
484 ef = self.encode(f)
485 try:
485 try:
486 yield f, ef, self.getsize(ef)
486 yield f, ef, self.getsize(ef)
487 existing.append(f)
487 existing.append(f)
488 except OSError, err:
488 except OSError, err:
489 if err.errno != errno.ENOENT:
489 if err.errno != errno.ENOENT:
490 raise
490 raise
491 # nonexistent entry
491 # nonexistent entry
492 rewrite = True
492 rewrite = True
493 if rewrite:
493 if rewrite:
494 # rewrite fncache to remove nonexistent entries
494 # rewrite fncache to remove nonexistent entries
495 # (may be caused by rollback / strip)
495 # (may be caused by rollback / strip)
496 self.fncache.rewrite(existing)
496 self.fncache.rewrite(existing)
497
497
498 def copylist(self):
498 def copylist(self):
499 d = ('data dh fncache phaseroots obsstore'
499 d = ('data dh fncache phaseroots obsstore'
500 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
500 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
501 return (['requires', '00changelog.i'] +
501 return (['requires', '00changelog.i'] +
502 ['store/' + f for f in d.split()])
502 ['store/' + f for f in d.split()])
503
503
504 def write(self):
504 def write(self):
505 self.fncache.write()
505 self.fncache.write()
506
506
507 def _exists(self, f):
507 def _exists(self, f):
508 ef = self.encode(f)
508 ef = self.encode(f)
509 try:
509 try:
510 self.getsize(ef)
510 self.getsize(ef)
511 return True
511 return True
512 except OSError, err:
512 except OSError, err:
513 if err.errno != errno.ENOENT:
513 if err.errno != errno.ENOENT:
514 raise
514 raise
515 # nonexistent entry
515 # nonexistent entry
516 return False
516 return False
517
517
518 def __contains__(self, path):
518 def __contains__(self, path):
519 '''Checks if the store contains path'''
519 '''Checks if the store contains path'''
520 path = "/".join(("data", path))
520 path = "/".join(("data", path))
521 # check for files (exact match)
521 # check for files (exact match)
522 e = path + '.i'
522 e = path + '.i'
523 if e in self.fncache and self._exists(e):
523 if e in self.fncache and self._exists(e):
524 return True
524 return True
525 # now check for directories (prefix match)
525 # now check for directories (prefix match)
526 if not path.endswith('/'):
526 if not path.endswith('/'):
527 path += '/'
527 path += '/'
528 for e in self.fncache:
528 for e in self.fncache:
529 if e.startswith(path) and self._exists(e):
529 if e.startswith(path) and self._exists(e):
530 return True
530 return True
531 return False
531 return False
532
532
533 def store(requirements, path, vfstype):
533 def store(requirements, path, vfstype):
534 if 'store' in requirements:
534 if 'store' in requirements:
535 if 'fncache' in requirements:
535 if 'fncache' in requirements:
536 return fncachestore(path, vfstype, 'dotencode' in requirements)
536 return fncachestore(path, vfstype, 'dotencode' in requirements)
537 return encodedstore(path, vfstype)
537 return encodedstore(path, vfstype)
538 return basicstore(path, vfstype)
538 return basicstore(path, vfstype)
@@ -1,761 +1,761 b''
1 # ui.py - user interface bits for mercurial
1 # ui.py - user interface bits for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import errno, getpass, os, socket, sys, tempfile, traceback
9 import errno, getpass, os, socket, sys, tempfile, traceback
10 import config, scmutil, util, error, formatter
10 import config, scmutil, util, error, formatter
11
11
12 class ui(object):
12 class ui(object):
13 def __init__(self, src=None):
13 def __init__(self, src=None):
14 self._buffers = []
14 self._buffers = []
15 self.quiet = self.verbose = self.debugflag = self.tracebackflag = False
15 self.quiet = self.verbose = self.debugflag = self.tracebackflag = False
16 self._reportuntrusted = True
16 self._reportuntrusted = True
17 self._ocfg = config.config() # overlay
17 self._ocfg = config.config() # overlay
18 self._tcfg = config.config() # trusted
18 self._tcfg = config.config() # trusted
19 self._ucfg = config.config() # untrusted
19 self._ucfg = config.config() # untrusted
20 self._trustusers = set()
20 self._trustusers = set()
21 self._trustgroups = set()
21 self._trustgroups = set()
22 self.callhooks = True
22 self.callhooks = True
23
23
24 if src:
24 if src:
25 self.fout = src.fout
25 self.fout = src.fout
26 self.ferr = src.ferr
26 self.ferr = src.ferr
27 self.fin = src.fin
27 self.fin = src.fin
28
28
29 self._tcfg = src._tcfg.copy()
29 self._tcfg = src._tcfg.copy()
30 self._ucfg = src._ucfg.copy()
30 self._ucfg = src._ucfg.copy()
31 self._ocfg = src._ocfg.copy()
31 self._ocfg = src._ocfg.copy()
32 self._trustusers = src._trustusers.copy()
32 self._trustusers = src._trustusers.copy()
33 self._trustgroups = src._trustgroups.copy()
33 self._trustgroups = src._trustgroups.copy()
34 self.environ = src.environ
34 self.environ = src.environ
35 self.callhooks = src.callhooks
35 self.callhooks = src.callhooks
36 self.fixconfig()
36 self.fixconfig()
37 else:
37 else:
38 self.fout = sys.stdout
38 self.fout = sys.stdout
39 self.ferr = sys.stderr
39 self.ferr = sys.stderr
40 self.fin = sys.stdin
40 self.fin = sys.stdin
41
41
42 # shared read-only environment
42 # shared read-only environment
43 self.environ = os.environ
43 self.environ = os.environ
44 # we always trust global config files
44 # we always trust global config files
45 for f in scmutil.rcpath():
45 for f in scmutil.rcpath():
46 self.readconfig(f, trust=True)
46 self.readconfig(f, trust=True)
47
47
48 def copy(self):
48 def copy(self):
49 return self.__class__(self)
49 return self.__class__(self)
50
50
51 def formatter(self, topic, opts):
51 def formatter(self, topic, opts):
52 return formatter.formatter(self, topic, opts)
52 return formatter.formatter(self, topic, opts)
53
53
54 def _trusted(self, fp, f):
54 def _trusted(self, fp, f):
55 st = util.fstat(fp)
55 st = util.fstat(fp)
56 if util.isowner(st):
56 if util.isowner(st):
57 return True
57 return True
58
58
59 tusers, tgroups = self._trustusers, self._trustgroups
59 tusers, tgroups = self._trustusers, self._trustgroups
60 if '*' in tusers or '*' in tgroups:
60 if '*' in tusers or '*' in tgroups:
61 return True
61 return True
62
62
63 user = util.username(st.st_uid)
63 user = util.username(st.st_uid)
64 group = util.groupname(st.st_gid)
64 group = util.groupname(st.st_gid)
65 if user in tusers or group in tgroups or user == util.username():
65 if user in tusers or group in tgroups or user == util.username():
66 return True
66 return True
67
67
68 if self._reportuntrusted:
68 if self._reportuntrusted:
69 self.warn(_('not trusting file %s from untrusted '
69 self.warn(_('not trusting file %s from untrusted '
70 'user %s, group %s\n') % (f, user, group))
70 'user %s, group %s\n') % (f, user, group))
71 return False
71 return False
72
72
73 def readconfig(self, filename, root=None, trust=False,
73 def readconfig(self, filename, root=None, trust=False,
74 sections=None, remap=None):
74 sections=None, remap=None):
75 try:
75 try:
76 fp = open(filename)
76 fp = open(filename)
77 except IOError:
77 except IOError:
78 if not sections: # ignore unless we were looking for something
78 if not sections: # ignore unless we were looking for something
79 return
79 return
80 raise
80 raise
81
81
82 cfg = config.config()
82 cfg = config.config()
83 trusted = sections or trust or self._trusted(fp, filename)
83 trusted = sections or trust or self._trusted(fp, filename)
84
84
85 try:
85 try:
86 cfg.read(filename, fp, sections=sections, remap=remap)
86 cfg.read(filename, fp, sections=sections, remap=remap)
87 fp.close()
87 fp.close()
88 except error.ConfigError, inst:
88 except error.ConfigError, inst:
89 if trusted:
89 if trusted:
90 raise
90 raise
91 self.warn(_("ignored: %s\n") % str(inst))
91 self.warn(_("ignored: %s\n") % str(inst))
92
92
93 if self.plain():
93 if self.plain():
94 for k in ('debug', 'fallbackencoding', 'quiet', 'slash',
94 for k in ('debug', 'fallbackencoding', 'quiet', 'slash',
95 'logtemplate', 'style',
95 'logtemplate', 'style',
96 'traceback', 'verbose'):
96 'traceback', 'verbose'):
97 if k in cfg['ui']:
97 if k in cfg['ui']:
98 del cfg['ui'][k]
98 del cfg['ui'][k]
99 for k, v in cfg.items('defaults'):
99 for k, v in cfg.items('defaults'):
100 del cfg['defaults'][k]
100 del cfg['defaults'][k]
101 # Don't remove aliases from the configuration if in the exceptionlist
101 # Don't remove aliases from the configuration if in the exceptionlist
102 if self.plain('alias'):
102 if self.plain('alias'):
103 for k, v in cfg.items('alias'):
103 for k, v in cfg.items('alias'):
104 del cfg['alias'][k]
104 del cfg['alias'][k]
105
105
106 if trusted:
106 if trusted:
107 self._tcfg.update(cfg)
107 self._tcfg.update(cfg)
108 self._tcfg.update(self._ocfg)
108 self._tcfg.update(self._ocfg)
109 self._ucfg.update(cfg)
109 self._ucfg.update(cfg)
110 self._ucfg.update(self._ocfg)
110 self._ucfg.update(self._ocfg)
111
111
112 if root is None:
112 if root is None:
113 root = os.path.expanduser('~')
113 root = os.path.expanduser('~')
114 self.fixconfig(root=root)
114 self.fixconfig(root=root)
115
115
116 def fixconfig(self, root=None, section=None):
116 def fixconfig(self, root=None, section=None):
117 if section in (None, 'paths'):
117 if section in (None, 'paths'):
118 # expand vars and ~
118 # expand vars and ~
119 # translate paths relative to root (or home) into absolute paths
119 # translate paths relative to root (or home) into absolute paths
120 root = root or os.getcwd()
120 root = root or os.getcwd()
121 for c in self._tcfg, self._ucfg, self._ocfg:
121 for c in self._tcfg, self._ucfg, self._ocfg:
122 for n, p in c.items('paths'):
122 for n, p in c.items('paths'):
123 if not p:
123 if not p:
124 continue
124 continue
125 if '%%' in p:
125 if '%%' in p:
126 self.warn(_("(deprecated '%%' in path %s=%s from %s)\n")
126 self.warn(_("(deprecated '%%' in path %s=%s from %s)\n")
127 % (n, p, self.configsource('paths', n)))
127 % (n, p, self.configsource('paths', n)))
128 p = p.replace('%%', '%')
128 p = p.replace('%%', '%')
129 p = util.expandpath(p)
129 p = util.expandpath(p)
130 if not util.hasscheme(p) and not os.path.isabs(p):
130 if not util.hasscheme(p) and not os.path.isabs(p):
131 p = os.path.normpath(os.path.join(root, p))
131 p = os.path.normpath(os.path.join(root, p))
132 c.set("paths", n, p)
132 c.set("paths", n, p)
133
133
134 if section in (None, 'ui'):
134 if section in (None, 'ui'):
135 # update ui options
135 # update ui options
136 self.debugflag = self.configbool('ui', 'debug')
136 self.debugflag = self.configbool('ui', 'debug')
137 self.verbose = self.debugflag or self.configbool('ui', 'verbose')
137 self.verbose = self.debugflag or self.configbool('ui', 'verbose')
138 self.quiet = not self.debugflag and self.configbool('ui', 'quiet')
138 self.quiet = not self.debugflag and self.configbool('ui', 'quiet')
139 if self.verbose and self.quiet:
139 if self.verbose and self.quiet:
140 self.quiet = self.verbose = False
140 self.quiet = self.verbose = False
141 self._reportuntrusted = self.debugflag or self.configbool("ui",
141 self._reportuntrusted = self.debugflag or self.configbool("ui",
142 "report_untrusted", True)
142 "report_untrusted", True)
143 self.tracebackflag = self.configbool('ui', 'traceback', False)
143 self.tracebackflag = self.configbool('ui', 'traceback', False)
144
144
145 if section in (None, 'trusted'):
145 if section in (None, 'trusted'):
146 # update trust information
146 # update trust information
147 self._trustusers.update(self.configlist('trusted', 'users'))
147 self._trustusers.update(self.configlist('trusted', 'users'))
148 self._trustgroups.update(self.configlist('trusted', 'groups'))
148 self._trustgroups.update(self.configlist('trusted', 'groups'))
149
149
150 def backupconfig(self, section, item):
150 def backupconfig(self, section, item):
151 return (self._ocfg.backup(section, item),
151 return (self._ocfg.backup(section, item),
152 self._tcfg.backup(section, item),
152 self._tcfg.backup(section, item),
153 self._ucfg.backup(section, item),)
153 self._ucfg.backup(section, item),)
154 def restoreconfig(self, data):
154 def restoreconfig(self, data):
155 self._ocfg.restore(data[0])
155 self._ocfg.restore(data[0])
156 self._tcfg.restore(data[1])
156 self._tcfg.restore(data[1])
157 self._ucfg.restore(data[2])
157 self._ucfg.restore(data[2])
158
158
159 def setconfig(self, section, name, value, overlay=True):
159 def setconfig(self, section, name, value, overlay=True):
160 if overlay:
160 if overlay:
161 self._ocfg.set(section, name, value)
161 self._ocfg.set(section, name, value)
162 self._tcfg.set(section, name, value)
162 self._tcfg.set(section, name, value)
163 self._ucfg.set(section, name, value)
163 self._ucfg.set(section, name, value)
164 self.fixconfig(section=section)
164 self.fixconfig(section=section)
165
165
166 def _data(self, untrusted):
166 def _data(self, untrusted):
167 return untrusted and self._ucfg or self._tcfg
167 return untrusted and self._ucfg or self._tcfg
168
168
169 def configsource(self, section, name, untrusted=False):
169 def configsource(self, section, name, untrusted=False):
170 return self._data(untrusted).source(section, name) or 'none'
170 return self._data(untrusted).source(section, name) or 'none'
171
171
172 def config(self, section, name, default=None, untrusted=False):
172 def config(self, section, name, default=None, untrusted=False):
173 if isinstance(name, list):
173 if isinstance(name, list):
174 alternates = name
174 alternates = name
175 else:
175 else:
176 alternates = [name]
176 alternates = [name]
177
177
178 for n in alternates:
178 for n in alternates:
179 value = self._data(untrusted).get(section, name, None)
179 value = self._data(untrusted).get(section, name, None)
180 if value is not None:
180 if value is not None:
181 name = n
181 name = n
182 break
182 break
183 else:
183 else:
184 value = default
184 value = default
185
185
186 if self.debugflag and not untrusted and self._reportuntrusted:
186 if self.debugflag and not untrusted and self._reportuntrusted:
187 uvalue = self._ucfg.get(section, name)
187 uvalue = self._ucfg.get(section, name)
188 if uvalue is not None and uvalue != value:
188 if uvalue is not None and uvalue != value:
189 self.debug("ignoring untrusted configuration option "
189 self.debug("ignoring untrusted configuration option "
190 "%s.%s = %s\n" % (section, name, uvalue))
190 "%s.%s = %s\n" % (section, name, uvalue))
191 return value
191 return value
192
192
193 def configpath(self, section, name, default=None, untrusted=False):
193 def configpath(self, section, name, default=None, untrusted=False):
194 'get a path config item, expanded relative to repo root or config file'
194 'get a path config item, expanded relative to repo root or config file'
195 v = self.config(section, name, default, untrusted)
195 v = self.config(section, name, default, untrusted)
196 if v is None:
196 if v is None:
197 return None
197 return None
198 if not os.path.isabs(v) or "://" not in v:
198 if not os.path.isabs(v) or "://" not in v:
199 src = self.configsource(section, name, untrusted)
199 src = self.configsource(section, name, untrusted)
200 if ':' in src:
200 if ':' in src:
201 base = os.path.dirname(src.rsplit(':')[0])
201 base = os.path.dirname(src.rsplit(':')[0])
202 v = os.path.join(base, os.path.expanduser(v))
202 v = os.path.join(base, os.path.expanduser(v))
203 return v
203 return v
204
204
205 def configbool(self, section, name, default=False, untrusted=False):
205 def configbool(self, section, name, default=False, untrusted=False):
206 """parse a configuration element as a boolean
206 """parse a configuration element as a boolean
207
207
208 >>> u = ui(); s = 'foo'
208 >>> u = ui(); s = 'foo'
209 >>> u.setconfig(s, 'true', 'yes')
209 >>> u.setconfig(s, 'true', 'yes')
210 >>> u.configbool(s, 'true')
210 >>> u.configbool(s, 'true')
211 True
211 True
212 >>> u.setconfig(s, 'false', 'no')
212 >>> u.setconfig(s, 'false', 'no')
213 >>> u.configbool(s, 'false')
213 >>> u.configbool(s, 'false')
214 False
214 False
215 >>> u.configbool(s, 'unknown')
215 >>> u.configbool(s, 'unknown')
216 False
216 False
217 >>> u.configbool(s, 'unknown', True)
217 >>> u.configbool(s, 'unknown', True)
218 True
218 True
219 >>> u.setconfig(s, 'invalid', 'somevalue')
219 >>> u.setconfig(s, 'invalid', 'somevalue')
220 >>> u.configbool(s, 'invalid')
220 >>> u.configbool(s, 'invalid')
221 Traceback (most recent call last):
221 Traceback (most recent call last):
222 ...
222 ...
223 ConfigError: foo.invalid is not a boolean ('somevalue')
223 ConfigError: foo.invalid is not a boolean ('somevalue')
224 """
224 """
225
225
226 v = self.config(section, name, None, untrusted)
226 v = self.config(section, name, None, untrusted)
227 if v is None:
227 if v is None:
228 return default
228 return default
229 if isinstance(v, bool):
229 if isinstance(v, bool):
230 return v
230 return v
231 b = util.parsebool(v)
231 b = util.parsebool(v)
232 if b is None:
232 if b is None:
233 raise error.ConfigError(_("%s.%s is not a boolean ('%s')")
233 raise error.ConfigError(_("%s.%s is not a boolean ('%s')")
234 % (section, name, v))
234 % (section, name, v))
235 return b
235 return b
236
236
237 def configint(self, section, name, default=None, untrusted=False):
237 def configint(self, section, name, default=None, untrusted=False):
238 """parse a configuration element as an integer
238 """parse a configuration element as an integer
239
239
240 >>> u = ui(); s = 'foo'
240 >>> u = ui(); s = 'foo'
241 >>> u.setconfig(s, 'int1', '42')
241 >>> u.setconfig(s, 'int1', '42')
242 >>> u.configint(s, 'int1')
242 >>> u.configint(s, 'int1')
243 42
243 42
244 >>> u.setconfig(s, 'int2', '-42')
244 >>> u.setconfig(s, 'int2', '-42')
245 >>> u.configint(s, 'int2')
245 >>> u.configint(s, 'int2')
246 -42
246 -42
247 >>> u.configint(s, 'unknown', 7)
247 >>> u.configint(s, 'unknown', 7)
248 7
248 7
249 >>> u.setconfig(s, 'invalid', 'somevalue')
249 >>> u.setconfig(s, 'invalid', 'somevalue')
250 >>> u.configint(s, 'invalid')
250 >>> u.configint(s, 'invalid')
251 Traceback (most recent call last):
251 Traceback (most recent call last):
252 ...
252 ...
253 ConfigError: foo.invalid is not an integer ('somevalue')
253 ConfigError: foo.invalid is not an integer ('somevalue')
254 """
254 """
255
255
256 v = self.config(section, name, None, untrusted)
256 v = self.config(section, name, None, untrusted)
257 if v is None:
257 if v is None:
258 return default
258 return default
259 try:
259 try:
260 return int(v)
260 return int(v)
261 except ValueError:
261 except ValueError:
262 raise error.ConfigError(_("%s.%s is not an integer ('%s')")
262 raise error.ConfigError(_("%s.%s is not an integer ('%s')")
263 % (section, name, v))
263 % (section, name, v))
264
264
265 def configlist(self, section, name, default=None, untrusted=False):
265 def configlist(self, section, name, default=None, untrusted=False):
266 """parse a configuration element as a list of comma/space separated
266 """parse a configuration element as a list of comma/space separated
267 strings
267 strings
268
268
269 >>> u = ui(); s = 'foo'
269 >>> u = ui(); s = 'foo'
270 >>> u.setconfig(s, 'list1', 'this,is "a small" ,test')
270 >>> u.setconfig(s, 'list1', 'this,is "a small" ,test')
271 >>> u.configlist(s, 'list1')
271 >>> u.configlist(s, 'list1')
272 ['this', 'is', 'a small', 'test']
272 ['this', 'is', 'a small', 'test']
273 """
273 """
274
274
275 def _parse_plain(parts, s, offset):
275 def _parse_plain(parts, s, offset):
276 whitespace = False
276 whitespace = False
277 while offset < len(s) and (s[offset].isspace() or s[offset] == ','):
277 while offset < len(s) and (s[offset].isspace() or s[offset] == ','):
278 whitespace = True
278 whitespace = True
279 offset += 1
279 offset += 1
280 if offset >= len(s):
280 if offset >= len(s):
281 return None, parts, offset
281 return None, parts, offset
282 if whitespace:
282 if whitespace:
283 parts.append('')
283 parts.append('')
284 if s[offset] == '"' and not parts[-1]:
284 if s[offset] == '"' and not parts[-1]:
285 return _parse_quote, parts, offset + 1
285 return _parse_quote, parts, offset + 1
286 elif s[offset] == '"' and parts[-1][-1] == '\\':
286 elif s[offset] == '"' and parts[-1][-1] == '\\':
287 parts[-1] = parts[-1][:-1] + s[offset]
287 parts[-1] = parts[-1][:-1] + s[offset]
288 return _parse_plain, parts, offset + 1
288 return _parse_plain, parts, offset + 1
289 parts[-1] += s[offset]
289 parts[-1] += s[offset]
290 return _parse_plain, parts, offset + 1
290 return _parse_plain, parts, offset + 1
291
291
292 def _parse_quote(parts, s, offset):
292 def _parse_quote(parts, s, offset):
293 if offset < len(s) and s[offset] == '"': # ""
293 if offset < len(s) and s[offset] == '"': # ""
294 parts.append('')
294 parts.append('')
295 offset += 1
295 offset += 1
296 while offset < len(s) and (s[offset].isspace() or
296 while offset < len(s) and (s[offset].isspace() or
297 s[offset] == ','):
297 s[offset] == ','):
298 offset += 1
298 offset += 1
299 return _parse_plain, parts, offset
299 return _parse_plain, parts, offset
300
300
301 while offset < len(s) and s[offset] != '"':
301 while offset < len(s) and s[offset] != '"':
302 if (s[offset] == '\\' and offset + 1 < len(s)
302 if (s[offset] == '\\' and offset + 1 < len(s)
303 and s[offset + 1] == '"'):
303 and s[offset + 1] == '"'):
304 offset += 1
304 offset += 1
305 parts[-1] += '"'
305 parts[-1] += '"'
306 else:
306 else:
307 parts[-1] += s[offset]
307 parts[-1] += s[offset]
308 offset += 1
308 offset += 1
309
309
310 if offset >= len(s):
310 if offset >= len(s):
311 real_parts = _configlist(parts[-1])
311 real_parts = _configlist(parts[-1])
312 if not real_parts:
312 if not real_parts:
313 parts[-1] = '"'
313 parts[-1] = '"'
314 else:
314 else:
315 real_parts[0] = '"' + real_parts[0]
315 real_parts[0] = '"' + real_parts[0]
316 parts = parts[:-1]
316 parts = parts[:-1]
317 parts.extend(real_parts)
317 parts.extend(real_parts)
318 return None, parts, offset
318 return None, parts, offset
319
319
320 offset += 1
320 offset += 1
321 while offset < len(s) and s[offset] in [' ', ',']:
321 while offset < len(s) and s[offset] in [' ', ',']:
322 offset += 1
322 offset += 1
323
323
324 if offset < len(s):
324 if offset < len(s):
325 if offset + 1 == len(s) and s[offset] == '"':
325 if offset + 1 == len(s) and s[offset] == '"':
326 parts[-1] += '"'
326 parts[-1] += '"'
327 offset += 1
327 offset += 1
328 else:
328 else:
329 parts.append('')
329 parts.append('')
330 else:
330 else:
331 return None, parts, offset
331 return None, parts, offset
332
332
333 return _parse_plain, parts, offset
333 return _parse_plain, parts, offset
334
334
335 def _configlist(s):
335 def _configlist(s):
336 s = s.rstrip(' ,')
336 s = s.rstrip(' ,')
337 if not s:
337 if not s:
338 return []
338 return []
339 parser, parts, offset = _parse_plain, [''], 0
339 parser, parts, offset = _parse_plain, [''], 0
340 while parser:
340 while parser:
341 parser, parts, offset = parser(parts, s, offset)
341 parser, parts, offset = parser(parts, s, offset)
342 return parts
342 return parts
343
343
344 result = self.config(section, name, untrusted=untrusted)
344 result = self.config(section, name, untrusted=untrusted)
345 if result is None:
345 if result is None:
346 result = default or []
346 result = default or []
347 if isinstance(result, basestring):
347 if isinstance(result, basestring):
348 result = _configlist(result.lstrip(' ,\n'))
348 result = _configlist(result.lstrip(' ,\n'))
349 if result is None:
349 if result is None:
350 result = default or []
350 result = default or []
351 return result
351 return result
352
352
353 def has_section(self, section, untrusted=False):
353 def has_section(self, section, untrusted=False):
354 '''tell whether section exists in config.'''
354 '''tell whether section exists in config.'''
355 return section in self._data(untrusted)
355 return section in self._data(untrusted)
356
356
357 def configitems(self, section, untrusted=False):
357 def configitems(self, section, untrusted=False):
358 items = self._data(untrusted).items(section)
358 items = self._data(untrusted).items(section)
359 if self.debugflag and not untrusted and self._reportuntrusted:
359 if self.debugflag and not untrusted and self._reportuntrusted:
360 for k, v in self._ucfg.items(section):
360 for k, v in self._ucfg.items(section):
361 if self._tcfg.get(section, k) != v:
361 if self._tcfg.get(section, k) != v:
362 self.debug("ignoring untrusted configuration option "
362 self.debug("ignoring untrusted configuration option "
363 "%s.%s = %s\n" % (section, k, v))
363 "%s.%s = %s\n" % (section, k, v))
364 return items
364 return items
365
365
366 def walkconfig(self, untrusted=False):
366 def walkconfig(self, untrusted=False):
367 cfg = self._data(untrusted)
367 cfg = self._data(untrusted)
368 for section in cfg.sections():
368 for section in cfg.sections():
369 for name, value in self.configitems(section, untrusted):
369 for name, value in self.configitems(section, untrusted):
370 yield section, name, value
370 yield section, name, value
371
371
372 def plain(self, feature=None):
372 def plain(self, feature=None):
373 '''is plain mode active?
373 '''is plain mode active?
374
374
375 Plain mode means that all configuration variables which affect
375 Plain mode means that all configuration variables which affect
376 the behavior and output of Mercurial should be
376 the behavior and output of Mercurial should be
377 ignored. Additionally, the output should be stable,
377 ignored. Additionally, the output should be stable,
378 reproducible and suitable for use in scripts or applications.
378 reproducible and suitable for use in scripts or applications.
379
379
380 The only way to trigger plain mode is by setting either the
380 The only way to trigger plain mode is by setting either the
381 `HGPLAIN' or `HGPLAINEXCEPT' environment variables.
381 `HGPLAIN' or `HGPLAINEXCEPT' environment variables.
382
382
383 The return value can either be
383 The return value can either be
384 - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT
384 - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT
385 - True otherwise
385 - True otherwise
386 '''
386 '''
387 if 'HGPLAIN' not in os.environ and 'HGPLAINEXCEPT' not in os.environ:
387 if 'HGPLAIN' not in os.environ and 'HGPLAINEXCEPT' not in os.environ:
388 return False
388 return False
389 exceptions = os.environ.get('HGPLAINEXCEPT', '').strip().split(',')
389 exceptions = os.environ.get('HGPLAINEXCEPT', '').strip().split(',')
390 if feature and exceptions:
390 if feature and exceptions:
391 return feature not in exceptions
391 return feature not in exceptions
392 return True
392 return True
393
393
394 def username(self):
394 def username(self):
395 """Return default username to be used in commits.
395 """Return default username to be used in commits.
396
396
397 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
397 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
398 and stop searching if one of these is set.
398 and stop searching if one of these is set.
399 If not found and ui.askusername is True, ask the user, else use
399 If not found and ui.askusername is True, ask the user, else use
400 ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname".
400 ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname".
401 """
401 """
402 user = os.environ.get("HGUSER")
402 user = os.environ.get("HGUSER")
403 if user is None:
403 if user is None:
404 user = self.config("ui", "username")
404 user = self.config("ui", "username")
405 if user is not None:
405 if user is not None:
406 user = os.path.expandvars(user)
406 user = os.path.expandvars(user)
407 if user is None:
407 if user is None:
408 user = os.environ.get("EMAIL")
408 user = os.environ.get("EMAIL")
409 if user is None and self.configbool("ui", "askusername"):
409 if user is None and self.configbool("ui", "askusername"):
410 user = self.prompt(_("enter a commit username:"), default=None)
410 user = self.prompt(_("enter a commit username:"), default=None)
411 if user is None and not self.interactive():
411 if user is None and not self.interactive():
412 try:
412 try:
413 user = '%s@%s' % (util.getuser(), socket.getfqdn())
413 user = '%s@%s' % (util.getuser(), socket.getfqdn())
414 self.warn(_("no username found, using '%s' instead\n") % user)
414 self.warn(_("no username found, using '%s' instead\n") % user)
415 except KeyError:
415 except KeyError:
416 pass
416 pass
417 if not user:
417 if not user:
418 raise util.Abort(_('no username supplied (see "hg help config")'))
418 raise util.Abort(_('no username supplied (see "hg help config")'))
419 if "\n" in user:
419 if "\n" in user:
420 raise util.Abort(_("username %s contains a newline\n") % repr(user))
420 raise util.Abort(_("username %s contains a newline\n") % repr(user))
421 return user
421 return user
422
422
423 def shortuser(self, user):
423 def shortuser(self, user):
424 """Return a short representation of a user name or email address."""
424 """Return a short representation of a user name or email address."""
425 if not self.verbose:
425 if not self.verbose:
426 user = util.shortuser(user)
426 user = util.shortuser(user)
427 return user
427 return user
428
428
429 def expandpath(self, loc, default=None):
429 def expandpath(self, loc, default=None):
430 """Return repository location relative to cwd or from [paths]"""
430 """Return repository location relative to cwd or from [paths]"""
431 if util.hasscheme(loc) or os.path.isdir(os.path.join(loc, '.hg')):
431 if util.hasscheme(loc) or os.path.isdir(os.path.join(loc, '.hg')):
432 return loc
432 return loc
433
433
434 path = self.config('paths', loc)
434 path = self.config('paths', loc)
435 if not path and default is not None:
435 if not path and default is not None:
436 path = self.config('paths', default)
436 path = self.config('paths', default)
437 return path or loc
437 return path or loc
438
438
439 def pushbuffer(self):
439 def pushbuffer(self):
440 self._buffers.append([])
440 self._buffers.append([])
441
441
442 def popbuffer(self, labeled=False):
442 def popbuffer(self, labeled=False):
443 '''pop the last buffer and return the buffered output
443 '''pop the last buffer and return the buffered output
444
444
445 If labeled is True, any labels associated with buffered
445 If labeled is True, any labels associated with buffered
446 output will be handled. By default, this has no effect
446 output will be handled. By default, this has no effect
447 on the output returned, but extensions and GUI tools may
447 on the output returned, but extensions and GUI tools may
448 handle this argument and returned styled output. If output
448 handle this argument and returned styled output. If output
449 is being buffered so it can be captured and parsed or
449 is being buffered so it can be captured and parsed or
450 processed, labeled should not be set to True.
450 processed, labeled should not be set to True.
451 '''
451 '''
452 return "".join(self._buffers.pop())
452 return "".join(self._buffers.pop())
453
453
454 def write(self, *args, **opts):
454 def write(self, *args, **opts):
455 '''write args to output
455 '''write args to output
456
456
457 By default, this method simply writes to the buffer or stdout,
457 By default, this method simply writes to the buffer or stdout,
458 but extensions or GUI tools may override this method,
458 but extensions or GUI tools may override this method,
459 write_err(), popbuffer(), and label() to style output from
459 write_err(), popbuffer(), and label() to style output from
460 various parts of hg.
460 various parts of hg.
461
461
462 An optional keyword argument, "label", can be passed in.
462 An optional keyword argument, "label", can be passed in.
463 This should be a string containing label names separated by
463 This should be a string containing label names separated by
464 space. Label names take the form of "topic.type". For example,
464 space. Label names take the form of "topic.type". For example,
465 ui.debug() issues a label of "ui.debug".
465 ui.debug() issues a label of "ui.debug".
466
466
467 When labeling output for a specific command, a label of
467 When labeling output for a specific command, a label of
468 "cmdname.type" is recommended. For example, status issues
468 "cmdname.type" is recommended. For example, status issues
469 a label of "status.modified" for modified files.
469 a label of "status.modified" for modified files.
470 '''
470 '''
471 if self._buffers:
471 if self._buffers:
472 self._buffers[-1].extend([str(a) for a in args])
472 self._buffers[-1].extend([str(a) for a in args])
473 else:
473 else:
474 for a in args:
474 for a in args:
475 self.fout.write(str(a))
475 self.fout.write(str(a))
476
476
477 def write_err(self, *args, **opts):
477 def write_err(self, *args, **opts):
478 try:
478 try:
479 if not getattr(self.fout, 'closed', False):
479 if not getattr(self.fout, 'closed', False):
480 self.fout.flush()
480 self.fout.flush()
481 for a in args:
481 for a in args:
482 self.ferr.write(str(a))
482 self.ferr.write(str(a))
483 # stderr may be buffered under win32 when redirected to files,
483 # stderr may be buffered under win32 when redirected to files,
484 # including stdout.
484 # including stdout.
485 if not getattr(self.ferr, 'closed', False):
485 if not getattr(self.ferr, 'closed', False):
486 self.ferr.flush()
486 self.ferr.flush()
487 except IOError, inst:
487 except IOError, inst:
488 if inst.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
488 if inst.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
489 raise
489 raise
490
490
491 def flush(self):
491 def flush(self):
492 try: self.fout.flush()
492 try: self.fout.flush()
493 except (IOError, ValueError): pass
493 except (IOError, ValueError): pass
494 try: self.ferr.flush()
494 try: self.ferr.flush()
495 except (IOError, ValueError): pass
495 except (IOError, ValueError): pass
496
496
497 def _isatty(self, fh):
497 def _isatty(self, fh):
498 if self.configbool('ui', 'nontty', False):
498 if self.configbool('ui', 'nontty', False):
499 return False
499 return False
500 return util.isatty(fh)
500 return util.isatty(fh)
501
501
502 def interactive(self):
502 def interactive(self):
503 '''is interactive input allowed?
503 '''is interactive input allowed?
504
504
505 An interactive session is a session where input can be reasonably read
505 An interactive session is a session where input can be reasonably read
506 from `sys.stdin'. If this function returns false, any attempt to read
506 from `sys.stdin'. If this function returns false, any attempt to read
507 from stdin should fail with an error, unless a sensible default has been
507 from stdin should fail with an error, unless a sensible default has been
508 specified.
508 specified.
509
509
510 Interactiveness is triggered by the value of the `ui.interactive'
510 Interactiveness is triggered by the value of the `ui.interactive'
511 configuration variable or - if it is unset - when `sys.stdin' points
511 configuration variable or - if it is unset - when `sys.stdin' points
512 to a terminal device.
512 to a terminal device.
513
513
514 This function refers to input only; for output, see `ui.formatted()'.
514 This function refers to input only; for output, see `ui.formatted()'.
515 '''
515 '''
516 i = self.configbool("ui", "interactive", None)
516 i = self.configbool("ui", "interactive", None)
517 if i is None:
517 if i is None:
518 # some environments replace stdin without implementing isatty
518 # some environments replace stdin without implementing isatty
519 # usually those are non-interactive
519 # usually those are non-interactive
520 return self._isatty(self.fin)
520 return self._isatty(self.fin)
521
521
522 return i
522 return i
523
523
524 def termwidth(self):
524 def termwidth(self):
525 '''how wide is the terminal in columns?
525 '''how wide is the terminal in columns?
526 '''
526 '''
527 if 'COLUMNS' in os.environ:
527 if 'COLUMNS' in os.environ:
528 try:
528 try:
529 return int(os.environ['COLUMNS'])
529 return int(os.environ['COLUMNS'])
530 except ValueError:
530 except ValueError:
531 pass
531 pass
532 return util.termwidth()
532 return util.termwidth()
533
533
534 def formatted(self):
534 def formatted(self):
535 '''should formatted output be used?
535 '''should formatted output be used?
536
536
537 It is often desirable to format the output to suite the output medium.
537 It is often desirable to format the output to suite the output medium.
538 Examples of this are truncating long lines or colorizing messages.
538 Examples of this are truncating long lines or colorizing messages.
539 However, this is not often not desirable when piping output into other
539 However, this is not often not desirable when piping output into other
540 utilities, e.g. `grep'.
540 utilities, e.g. `grep'.
541
541
542 Formatted output is triggered by the value of the `ui.formatted'
542 Formatted output is triggered by the value of the `ui.formatted'
543 configuration variable or - if it is unset - when `sys.stdout' points
543 configuration variable or - if it is unset - when `sys.stdout' points
544 to a terminal device. Please note that `ui.formatted' should be
544 to a terminal device. Please note that `ui.formatted' should be
545 considered an implementation detail; it is not intended for use outside
545 considered an implementation detail; it is not intended for use outside
546 Mercurial or its extensions.
546 Mercurial or its extensions.
547
547
548 This function refers to output only; for input, see `ui.interactive()'.
548 This function refers to output only; for input, see `ui.interactive()'.
549 This function always returns false when in plain mode, see `ui.plain()'.
549 This function always returns false when in plain mode, see `ui.plain()'.
550 '''
550 '''
551 if self.plain():
551 if self.plain():
552 return False
552 return False
553
553
554 i = self.configbool("ui", "formatted", None)
554 i = self.configbool("ui", "formatted", None)
555 if i is None:
555 if i is None:
556 # some environments replace stdout without implementing isatty
556 # some environments replace stdout without implementing isatty
557 # usually those are non-interactive
557 # usually those are non-interactive
558 return self._isatty(self.fout)
558 return self._isatty(self.fout)
559
559
560 return i
560 return i
561
561
562 def _readline(self, prompt=''):
562 def _readline(self, prompt=''):
563 if self._isatty(self.fin):
563 if self._isatty(self.fin):
564 try:
564 try:
565 # magically add command line editing support, where
565 # magically add command line editing support, where
566 # available
566 # available
567 import readline
567 import readline
568 # force demandimport to really load the module
568 # force demandimport to really load the module
569 readline.read_history_file
569 readline.read_history_file
570 # windows sometimes raises something other than ImportError
570 # windows sometimes raises something other than ImportError
571 except Exception:
571 except Exception:
572 pass
572 pass
573
573
574 # call write() so output goes through subclassed implementation
574 # call write() so output goes through subclassed implementation
575 # e.g. color extension on Windows
575 # e.g. color extension on Windows
576 self.write(prompt)
576 self.write(prompt)
577
577
578 # instead of trying to emulate raw_input, swap (self.fin,
578 # instead of trying to emulate raw_input, swap (self.fin,
579 # self.fout) with (sys.stdin, sys.stdout)
579 # self.fout) with (sys.stdin, sys.stdout)
580 oldin = sys.stdin
580 oldin = sys.stdin
581 oldout = sys.stdout
581 oldout = sys.stdout
582 sys.stdin = self.fin
582 sys.stdin = self.fin
583 sys.stdout = self.fout
583 sys.stdout = self.fout
584 line = raw_input(' ')
584 line = raw_input(' ')
585 sys.stdin = oldin
585 sys.stdin = oldin
586 sys.stdout = oldout
586 sys.stdout = oldout
587
587
588 # When stdin is in binary mode on Windows, it can cause
588 # When stdin is in binary mode on Windows, it can cause
589 # raw_input() to emit an extra trailing carriage return
589 # raw_input() to emit an extra trailing carriage return
590 if os.linesep == '\r\n' and line and line[-1] == '\r':
590 if os.linesep == '\r\n' and line and line[-1] == '\r':
591 line = line[:-1]
591 line = line[:-1]
592 return line
592 return line
593
593
594 def prompt(self, msg, default="y"):
594 def prompt(self, msg, default="y"):
595 """Prompt user with msg, read response.
595 """Prompt user with msg, read response.
596 If ui is not interactive, the default is returned.
596 If ui is not interactive, the default is returned.
597 """
597 """
598 if not self.interactive():
598 if not self.interactive():
599 self.write(msg, ' ', default, "\n")
599 self.write(msg, ' ', default, "\n")
600 return default
600 return default
601 try:
601 try:
602 r = self._readline(self.label(msg, 'ui.prompt'))
602 r = self._readline(self.label(msg, 'ui.prompt'))
603 if not r:
603 if not r:
604 return default
604 return default
605 return r
605 return r
606 except EOFError:
606 except EOFError:
607 raise util.Abort(_('response expected'))
607 raise util.Abort(_('response expected'))
608
608
609 def promptchoice(self, msg, choices, default=0):
609 def promptchoice(self, msg, choices, default=0):
610 """Prompt user with msg, read response, and ensure it matches
610 """Prompt user with msg, read response, and ensure it matches
611 one of the provided choices. The index of the choice is returned.
611 one of the provided choices. The index of the choice is returned.
612 choices is a sequence of acceptable responses with the format:
612 choices is a sequence of acceptable responses with the format:
613 ('&None', 'E&xec', 'Sym&link') Responses are case insensitive.
613 ('&None', 'E&xec', 'Sym&link') Responses are case insensitive.
614 If ui is not interactive, the default is returned.
614 If ui is not interactive, the default is returned.
615 """
615 """
616 resps = [s[s.index('&')+1].lower() for s in choices]
616 resps = [s[s.index('&') + 1].lower() for s in choices]
617 while True:
617 while True:
618 r = self.prompt(msg, resps[default])
618 r = self.prompt(msg, resps[default])
619 if r.lower() in resps:
619 if r.lower() in resps:
620 return resps.index(r.lower())
620 return resps.index(r.lower())
621 self.write(_("unrecognized response\n"))
621 self.write(_("unrecognized response\n"))
622
622
623 def getpass(self, prompt=None, default=None):
623 def getpass(self, prompt=None, default=None):
624 if not self.interactive():
624 if not self.interactive():
625 return default
625 return default
626 try:
626 try:
627 return getpass.getpass(prompt or _('password: '))
627 return getpass.getpass(prompt or _('password: '))
628 except EOFError:
628 except EOFError:
629 raise util.Abort(_('response expected'))
629 raise util.Abort(_('response expected'))
630 def status(self, *msg, **opts):
630 def status(self, *msg, **opts):
631 '''write status message to output (if ui.quiet is False)
631 '''write status message to output (if ui.quiet is False)
632
632
633 This adds an output label of "ui.status".
633 This adds an output label of "ui.status".
634 '''
634 '''
635 if not self.quiet:
635 if not self.quiet:
636 opts['label'] = opts.get('label', '') + ' ui.status'
636 opts['label'] = opts.get('label', '') + ' ui.status'
637 self.write(*msg, **opts)
637 self.write(*msg, **opts)
638 def warn(self, *msg, **opts):
638 def warn(self, *msg, **opts):
639 '''write warning message to output (stderr)
639 '''write warning message to output (stderr)
640
640
641 This adds an output label of "ui.warning".
641 This adds an output label of "ui.warning".
642 '''
642 '''
643 opts['label'] = opts.get('label', '') + ' ui.warning'
643 opts['label'] = opts.get('label', '') + ' ui.warning'
644 self.write_err(*msg, **opts)
644 self.write_err(*msg, **opts)
645 def note(self, *msg, **opts):
645 def note(self, *msg, **opts):
646 '''write note to output (if ui.verbose is True)
646 '''write note to output (if ui.verbose is True)
647
647
648 This adds an output label of "ui.note".
648 This adds an output label of "ui.note".
649 '''
649 '''
650 if self.verbose:
650 if self.verbose:
651 opts['label'] = opts.get('label', '') + ' ui.note'
651 opts['label'] = opts.get('label', '') + ' ui.note'
652 self.write(*msg, **opts)
652 self.write(*msg, **opts)
653 def debug(self, *msg, **opts):
653 def debug(self, *msg, **opts):
654 '''write debug message to output (if ui.debugflag is True)
654 '''write debug message to output (if ui.debugflag is True)
655
655
656 This adds an output label of "ui.debug".
656 This adds an output label of "ui.debug".
657 '''
657 '''
658 if self.debugflag:
658 if self.debugflag:
659 opts['label'] = opts.get('label', '') + ' ui.debug'
659 opts['label'] = opts.get('label', '') + ' ui.debug'
660 self.write(*msg, **opts)
660 self.write(*msg, **opts)
661 def edit(self, text, user):
661 def edit(self, text, user):
662 (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt",
662 (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt",
663 text=True)
663 text=True)
664 try:
664 try:
665 f = os.fdopen(fd, "w")
665 f = os.fdopen(fd, "w")
666 f.write(text)
666 f.write(text)
667 f.close()
667 f.close()
668
668
669 editor = self.geteditor()
669 editor = self.geteditor()
670
670
671 util.system("%s \"%s\"" % (editor, name),
671 util.system("%s \"%s\"" % (editor, name),
672 environ={'HGUSER': user},
672 environ={'HGUSER': user},
673 onerr=util.Abort, errprefix=_("edit failed"),
673 onerr=util.Abort, errprefix=_("edit failed"),
674 out=self.fout)
674 out=self.fout)
675
675
676 f = open(name)
676 f = open(name)
677 t = f.read()
677 t = f.read()
678 f.close()
678 f.close()
679 finally:
679 finally:
680 os.unlink(name)
680 os.unlink(name)
681
681
682 return t
682 return t
683
683
684 def traceback(self, exc=None):
684 def traceback(self, exc=None):
685 '''print exception traceback if traceback printing enabled.
685 '''print exception traceback if traceback printing enabled.
686 only to call in exception handler. returns true if traceback
686 only to call in exception handler. returns true if traceback
687 printed.'''
687 printed.'''
688 if self.tracebackflag:
688 if self.tracebackflag:
689 if exc:
689 if exc:
690 traceback.print_exception(exc[0], exc[1], exc[2],
690 traceback.print_exception(exc[0], exc[1], exc[2],
691 file=self.ferr)
691 file=self.ferr)
692 else:
692 else:
693 traceback.print_exc(file=self.ferr)
693 traceback.print_exc(file=self.ferr)
694 return self.tracebackflag
694 return self.tracebackflag
695
695
696 def geteditor(self):
696 def geteditor(self):
697 '''return editor to use'''
697 '''return editor to use'''
698 if sys.platform == 'plan9':
698 if sys.platform == 'plan9':
699 # vi is the MIPS instruction simulator on Plan 9. We
699 # vi is the MIPS instruction simulator on Plan 9. We
700 # instead default to E to plumb commit messages to
700 # instead default to E to plumb commit messages to
701 # avoid confusion.
701 # avoid confusion.
702 editor = 'E'
702 editor = 'E'
703 else:
703 else:
704 editor = 'vi'
704 editor = 'vi'
705 return (os.environ.get("HGEDITOR") or
705 return (os.environ.get("HGEDITOR") or
706 self.config("ui", "editor") or
706 self.config("ui", "editor") or
707 os.environ.get("VISUAL") or
707 os.environ.get("VISUAL") or
708 os.environ.get("EDITOR", editor))
708 os.environ.get("EDITOR", editor))
709
709
710 def progress(self, topic, pos, item="", unit="", total=None):
710 def progress(self, topic, pos, item="", unit="", total=None):
711 '''show a progress message
711 '''show a progress message
712
712
713 With stock hg, this is simply a debug message that is hidden
713 With stock hg, this is simply a debug message that is hidden
714 by default, but with extensions or GUI tools it may be
714 by default, but with extensions or GUI tools it may be
715 visible. 'topic' is the current operation, 'item' is a
715 visible. 'topic' is the current operation, 'item' is a
716 non-numeric marker of the current position (i.e. the currently
716 non-numeric marker of the current position (i.e. the currently
717 in-process file), 'pos' is the current numeric position (i.e.
717 in-process file), 'pos' is the current numeric position (i.e.
718 revision, bytes, etc.), unit is a corresponding unit label,
718 revision, bytes, etc.), unit is a corresponding unit label,
719 and total is the highest expected pos.
719 and total is the highest expected pos.
720
720
721 Multiple nested topics may be active at a time.
721 Multiple nested topics may be active at a time.
722
722
723 All topics should be marked closed by setting pos to None at
723 All topics should be marked closed by setting pos to None at
724 termination.
724 termination.
725 '''
725 '''
726
726
727 if pos is None or not self.debugflag:
727 if pos is None or not self.debugflag:
728 return
728 return
729
729
730 if unit:
730 if unit:
731 unit = ' ' + unit
731 unit = ' ' + unit
732 if item:
732 if item:
733 item = ' ' + item
733 item = ' ' + item
734
734
735 if total:
735 if total:
736 pct = 100.0 * pos / total
736 pct = 100.0 * pos / total
737 self.debug('%s:%s %s/%s%s (%4.2f%%)\n'
737 self.debug('%s:%s %s/%s%s (%4.2f%%)\n'
738 % (topic, item, pos, total, unit, pct))
738 % (topic, item, pos, total, unit, pct))
739 else:
739 else:
740 self.debug('%s:%s %s%s\n' % (topic, item, pos, unit))
740 self.debug('%s:%s %s%s\n' % (topic, item, pos, unit))
741
741
742 def log(self, service, message):
742 def log(self, service, message):
743 '''hook for logging facility extensions
743 '''hook for logging facility extensions
744
744
745 service should be a readily-identifiable subsystem, which will
745 service should be a readily-identifiable subsystem, which will
746 allow filtering.
746 allow filtering.
747 message should be a newline-terminated string to log.
747 message should be a newline-terminated string to log.
748 '''
748 '''
749 pass
749 pass
750
750
751 def label(self, msg, label):
751 def label(self, msg, label):
752 '''style msg based on supplied label
752 '''style msg based on supplied label
753
753
754 Like ui.write(), this just returns msg unchanged, but extensions
754 Like ui.write(), this just returns msg unchanged, but extensions
755 and GUI tools can override it to allow styling output without
755 and GUI tools can override it to allow styling output without
756 writing it.
756 writing it.
757
757
758 ui.write(s, 'label') is equivalent to
758 ui.write(s, 'label') is equivalent to
759 ui.write(ui.label(s, 'label')).
759 ui.write(ui.label(s, 'label')).
760 '''
760 '''
761 return msg
761 return msg
General Comments 0
You need to be logged in to leave comments. Login now