##// END OF EJS Templates
check-code: ignore naked excepts with a "re-raise" comment...
Brodie Rao -
r16705:c2d9ef43 default
parent child Browse files
Show More
@@ -1,439 +1,447 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # check-code - a style and portability checker for Mercurial
3 # check-code - a style and portability checker for Mercurial
4 #
4 #
5 # Copyright 2010 Matt Mackall <mpm@selenic.com>
5 # Copyright 2010 Matt Mackall <mpm@selenic.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 import re, glob, os, sys
10 import re, glob, os, sys
11 import keyword
11 import keyword
12 import optparse
12 import optparse
13
13
14 def repquote(m):
14 def repquote(m):
15 t = re.sub(r"\w", "x", m.group('text'))
15 t = re.sub(r"\w", "x", m.group('text'))
16 t = re.sub(r"[^\s\nx]", "o", t)
16 t = re.sub(r"[^\s\nx]", "o", t)
17 return m.group('quote') + t + m.group('quote')
17 return m.group('quote') + t + m.group('quote')
18
18
19 def reppython(m):
19 def reppython(m):
20 comment = m.group('comment')
20 comment = m.group('comment')
21 if comment:
21 if comment:
22 return "#" * len(comment)
22 return "#" * len(comment)
23 return repquote(m)
23 return repquote(m)
24
24
25 def repcomment(m):
25 def repcomment(m):
26 return m.group(1) + "#" * len(m.group(2))
26 return m.group(1) + "#" * len(m.group(2))
27
27
28 def repccomment(m):
28 def repccomment(m):
29 t = re.sub(r"((?<=\n) )|\S", "x", m.group(2))
29 t = re.sub(r"((?<=\n) )|\S", "x", m.group(2))
30 return m.group(1) + t + "*/"
30 return m.group(1) + t + "*/"
31
31
32 def repcallspaces(m):
32 def repcallspaces(m):
33 t = re.sub(r"\n\s+", "\n", m.group(2))
33 t = re.sub(r"\n\s+", "\n", m.group(2))
34 return m.group(1) + t
34 return m.group(1) + t
35
35
36 def repinclude(m):
36 def repinclude(m):
37 return m.group(1) + "<foo>"
37 return m.group(1) + "<foo>"
38
38
39 def rephere(m):
39 def rephere(m):
40 t = re.sub(r"\S", "x", m.group(2))
40 t = re.sub(r"\S", "x", m.group(2))
41 return m.group(1) + t
41 return m.group(1) + t
42
42
43
43
44 testpats = [
44 testpats = [
45 [
45 [
46 (r'pushd|popd', "don't use 'pushd' or 'popd', use 'cd'"),
46 (r'pushd|popd', "don't use 'pushd' or 'popd', use 'cd'"),
47 (r'\W\$?\(\([^\)\n]*\)\)', "don't use (()) or $(()), use 'expr'"),
47 (r'\W\$?\(\([^\)\n]*\)\)', "don't use (()) or $(()), use 'expr'"),
48 (r'grep.*-q', "don't use 'grep -q', redirect to /dev/null"),
48 (r'grep.*-q', "don't use 'grep -q', redirect to /dev/null"),
49 (r'sed.*-i', "don't use 'sed -i', use a temporary file"),
49 (r'sed.*-i', "don't use 'sed -i', use a temporary file"),
50 (r'echo.*\\n', "don't use 'echo \\n', use printf"),
50 (r'echo.*\\n', "don't use 'echo \\n', use printf"),
51 (r'echo -n', "don't use 'echo -n', use printf"),
51 (r'echo -n', "don't use 'echo -n', use printf"),
52 (r'(^| )wc[^|]*$\n(?!.*\(re\))', "filter wc output"),
52 (r'(^| )wc[^|]*$\n(?!.*\(re\))', "filter wc output"),
53 (r'head -c', "don't use 'head -c', use 'dd'"),
53 (r'head -c', "don't use 'head -c', use 'dd'"),
54 (r'sha1sum', "don't use sha1sum, use $TESTDIR/md5sum.py"),
54 (r'sha1sum', "don't use sha1sum, use $TESTDIR/md5sum.py"),
55 (r'ls.*-\w*R', "don't use 'ls -R', use 'find'"),
55 (r'ls.*-\w*R', "don't use 'ls -R', use 'find'"),
56 (r'printf.*\\([1-9]|0\d)', "don't use 'printf \NNN', use Python"),
56 (r'printf.*\\([1-9]|0\d)', "don't use 'printf \NNN', use Python"),
57 (r'printf.*\\x', "don't use printf \\x, use Python"),
57 (r'printf.*\\x', "don't use printf \\x, use Python"),
58 (r'\$\(.*\)', "don't use $(expr), use `expr`"),
58 (r'\$\(.*\)', "don't use $(expr), use `expr`"),
59 (r'rm -rf \*', "don't use naked rm -rf, target a directory"),
59 (r'rm -rf \*', "don't use naked rm -rf, target a directory"),
60 (r'(^|\|\s*)grep (-\w\s+)*[^|]*[(|]\w',
60 (r'(^|\|\s*)grep (-\w\s+)*[^|]*[(|]\w',
61 "use egrep for extended grep syntax"),
61 "use egrep for extended grep syntax"),
62 (r'/bin/', "don't use explicit paths for tools"),
62 (r'/bin/', "don't use explicit paths for tools"),
63 (r'[^\n]\Z', "no trailing newline"),
63 (r'[^\n]\Z', "no trailing newline"),
64 (r'export.*=', "don't export and assign at once"),
64 (r'export.*=', "don't export and assign at once"),
65 (r'^source\b', "don't use 'source', use '.'"),
65 (r'^source\b', "don't use 'source', use '.'"),
66 (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"),
66 (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"),
67 (r'ls +[^|\n-]+ +-', "options to 'ls' must come before filenames"),
67 (r'ls +[^|\n-]+ +-', "options to 'ls' must come before filenames"),
68 (r'[^>\n]>\s*\$HGRCPATH', "don't overwrite $HGRCPATH, append to it"),
68 (r'[^>\n]>\s*\$HGRCPATH', "don't overwrite $HGRCPATH, append to it"),
69 (r'^stop\(\)', "don't use 'stop' as a shell function name"),
69 (r'^stop\(\)', "don't use 'stop' as a shell function name"),
70 (r'(\[|\btest\b).*-e ', "don't use 'test -e', use 'test -f'"),
70 (r'(\[|\btest\b).*-e ', "don't use 'test -e', use 'test -f'"),
71 (r'^alias\b.*=', "don't use alias, use a function"),
71 (r'^alias\b.*=', "don't use alias, use a function"),
72 (r'if\s*!', "don't use '!' to negate exit status"),
72 (r'if\s*!', "don't use '!' to negate exit status"),
73 (r'/dev/u?random', "don't use entropy, use /dev/zero"),
73 (r'/dev/u?random', "don't use entropy, use /dev/zero"),
74 (r'do\s*true;\s*done', "don't use true as loop body, use sleep 0"),
74 (r'do\s*true;\s*done', "don't use true as loop body, use sleep 0"),
75 (r'^( *)\t', "don't use tabs to indent"),
75 (r'^( *)\t', "don't use tabs to indent"),
76 ],
76 ],
77 # warnings
77 # warnings
78 [
78 [
79 (r'^function', "don't use 'function', use old style"),
79 (r'^function', "don't use 'function', use old style"),
80 (r'^diff.*-\w*N', "don't use 'diff -N'"),
80 (r'^diff.*-\w*N', "don't use 'diff -N'"),
81 (r'\$PWD', "don't use $PWD, use `pwd`"),
81 (r'\$PWD', "don't use $PWD, use `pwd`"),
82 (r'^([^"\'\n]|("[^"\n]*")|(\'[^\'\n]*\'))*\^', "^ must be quoted"),
82 (r'^([^"\'\n]|("[^"\n]*")|(\'[^\'\n]*\'))*\^', "^ must be quoted"),
83 ]
83 ]
84 ]
84 ]
85
85
86 testfilters = [
86 testfilters = [
87 (r"( *)(#([^\n]*\S)?)", repcomment),
87 (r"( *)(#([^\n]*\S)?)", repcomment),
88 (r"<<(\S+)((.|\n)*?\n\1)", rephere),
88 (r"<<(\S+)((.|\n)*?\n\1)", rephere),
89 ]
89 ]
90
90
91 uprefix = r"^ \$ "
91 uprefix = r"^ \$ "
92 utestpats = [
92 utestpats = [
93 [
93 [
94 (r'^(\S| $ ).*(\S[ \t]+|^[ \t]+)\n', "trailing whitespace on non-output"),
94 (r'^(\S| $ ).*(\S[ \t]+|^[ \t]+)\n', "trailing whitespace on non-output"),
95 (uprefix + r'.*\|\s*sed[^|>\n]*\n',
95 (uprefix + r'.*\|\s*sed[^|>\n]*\n',
96 "use regex test output patterns instead of sed"),
96 "use regex test output patterns instead of sed"),
97 (uprefix + r'(true|exit 0)', "explicit zero exit unnecessary"),
97 (uprefix + r'(true|exit 0)', "explicit zero exit unnecessary"),
98 (uprefix + r'.*(?<!\[)\$\?', "explicit exit code checks unnecessary"),
98 (uprefix + r'.*(?<!\[)\$\?', "explicit exit code checks unnecessary"),
99 (uprefix + r'.*\|\| echo.*(fail|error)',
99 (uprefix + r'.*\|\| echo.*(fail|error)',
100 "explicit exit code checks unnecessary"),
100 "explicit exit code checks unnecessary"),
101 (uprefix + r'set -e', "don't use set -e"),
101 (uprefix + r'set -e', "don't use set -e"),
102 (uprefix + r'\s', "don't indent commands, use > for continued lines"),
102 (uprefix + r'\s', "don't indent commands, use > for continued lines"),
103 ],
103 ],
104 # warnings
104 # warnings
105 []
105 []
106 ]
106 ]
107
107
108 for i in [0, 1]:
108 for i in [0, 1]:
109 for p, m in testpats[i]:
109 for p, m in testpats[i]:
110 if p.startswith(r'^'):
110 if p.startswith(r'^'):
111 p = r"^ [$>] (%s)" % p[1:]
111 p = r"^ [$>] (%s)" % p[1:]
112 else:
112 else:
113 p = r"^ [$>] .*(%s)" % p
113 p = r"^ [$>] .*(%s)" % p
114 utestpats[i].append((p, m))
114 utestpats[i].append((p, m))
115
115
116 utestfilters = [
116 utestfilters = [
117 (r"( *)(#([^\n]*\S)?)", repcomment),
117 (r"( *)(#([^\n]*\S)?)", repcomment),
118 ]
118 ]
119
119
120 pypats = [
120 pypats = [
121 [
121 [
122 (r'^\s*def\s*\w+\s*\(.*,\s*\(',
122 (r'^\s*def\s*\w+\s*\(.*,\s*\(',
123 "tuple parameter unpacking not available in Python 3+"),
123 "tuple parameter unpacking not available in Python 3+"),
124 (r'lambda\s*\(.*,.*\)',
124 (r'lambda\s*\(.*,.*\)',
125 "tuple parameter unpacking not available in Python 3+"),
125 "tuple parameter unpacking not available in Python 3+"),
126 (r'(?<!def)\s+(cmp)\(', "cmp is not available in Python 3+"),
126 (r'(?<!def)\s+(cmp)\(', "cmp is not available in Python 3+"),
127 (r'\breduce\s*\(.*', "reduce is not available in Python 3+"),
127 (r'\breduce\s*\(.*', "reduce is not available in Python 3+"),
128 (r'\.has_key\b', "dict.has_key is not available in Python 3+"),
128 (r'\.has_key\b', "dict.has_key is not available in Python 3+"),
129 (r'^\s*\t', "don't use tabs"),
129 (r'^\s*\t', "don't use tabs"),
130 (r'\S;\s*\n', "semicolon"),
130 (r'\S;\s*\n', "semicolon"),
131 (r'[^_]_\("[^"]+"\s*%', "don't use % inside _()"),
131 (r'[^_]_\("[^"]+"\s*%', "don't use % inside _()"),
132 (r"[^_]_\('[^']+'\s*%", "don't use % inside _()"),
132 (r"[^_]_\('[^']+'\s*%", "don't use % inside _()"),
133 (r'\w,\w', "missing whitespace after ,"),
133 (r'\w,\w', "missing whitespace after ,"),
134 (r'\w[+/*\-<>]\w', "missing whitespace in expression"),
134 (r'\w[+/*\-<>]\w', "missing whitespace in expression"),
135 (r'^\s+\w+=\w+[^,)\n]$', "missing whitespace in assignment"),
135 (r'^\s+\w+=\w+[^,)\n]$', "missing whitespace in assignment"),
136 (r'(\s+)try:\n((?:\n|\1\s.*\n)+?)\1except.*?:\n'
136 (r'(\s+)try:\n((?:\n|\1\s.*\n)+?)\1except.*?:\n'
137 r'((?:\n|\1\s.*\n)+?)\1finally:', 'no try/except/finally in Py2.4'),
137 r'((?:\n|\1\s.*\n)+?)\1finally:', 'no try/except/finally in Py2.4'),
138 (r'.{81}', "line too long"),
138 (r'.{81}', "line too long"),
139 (r' x+[xo][\'"]\n\s+[\'"]x', 'string join across lines with no space'),
139 (r' x+[xo][\'"]\n\s+[\'"]x', 'string join across lines with no space'),
140 (r'[^\n]\Z', "no trailing newline"),
140 (r'[^\n]\Z', "no trailing newline"),
141 (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
141 (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
142 # (r'^\s+[^_ \n][^_. \n]+_[^_\n]+\s*=',
142 # (r'^\s+[^_ \n][^_. \n]+_[^_\n]+\s*=',
143 # "don't use underbars in identifiers"),
143 # "don't use underbars in identifiers"),
144 (r'^\s+(self\.)?[A-za-z][a-z0-9]+[A-Z]\w* = ',
144 (r'^\s+(self\.)?[A-za-z][a-z0-9]+[A-Z]\w* = ',
145 "don't use camelcase in identifiers"),
145 "don't use camelcase in identifiers"),
146 (r'^\s*(if|while|def|class|except|try)\s[^[\n]*:\s*[^\\n]#\s]+',
146 (r'^\s*(if|while|def|class|except|try)\s[^[\n]*:\s*[^\\n]#\s]+',
147 "linebreak after :"),
147 "linebreak after :"),
148 (r'class\s[^( \n]+:', "old-style class, use class foo(object)"),
148 (r'class\s[^( \n]+:', "old-style class, use class foo(object)"),
149 (r'class\s[^( \n]+\(\):',
149 (r'class\s[^( \n]+\(\):',
150 "class foo() not available in Python 2.4, use class foo(object)"),
150 "class foo() not available in Python 2.4, use class foo(object)"),
151 (r'\b(%s)\(' % '|'.join(keyword.kwlist),
151 (r'\b(%s)\(' % '|'.join(keyword.kwlist),
152 "Python keyword is not a function"),
152 "Python keyword is not a function"),
153 (r',]', "unneeded trailing ',' in list"),
153 (r',]', "unneeded trailing ',' in list"),
154 # (r'class\s[A-Z][^\(]*\((?!Exception)',
154 # (r'class\s[A-Z][^\(]*\((?!Exception)',
155 # "don't capitalize non-exception classes"),
155 # "don't capitalize non-exception classes"),
156 # (r'in range\(', "use xrange"),
156 # (r'in range\(', "use xrange"),
157 # (r'^\s*print\s+', "avoid using print in core and extensions"),
157 # (r'^\s*print\s+', "avoid using print in core and extensions"),
158 (r'[\x80-\xff]', "non-ASCII character literal"),
158 (r'[\x80-\xff]', "non-ASCII character literal"),
159 (r'("\')\.format\(', "str.format() not available in Python 2.4"),
159 (r'("\')\.format\(', "str.format() not available in Python 2.4"),
160 (r'^\s*with\s+', "with not available in Python 2.4"),
160 (r'^\s*with\s+', "with not available in Python 2.4"),
161 (r'\.isdisjoint\(', "set.isdisjoint not available in Python 2.4"),
161 (r'\.isdisjoint\(', "set.isdisjoint not available in Python 2.4"),
162 (r'^\s*except.* as .*:', "except as not available in Python 2.4"),
162 (r'^\s*except.* as .*:', "except as not available in Python 2.4"),
163 (r'^\s*os\.path\.relpath', "relpath not available in Python 2.4"),
163 (r'^\s*os\.path\.relpath', "relpath not available in Python 2.4"),
164 (r'(?<!def)\s+(any|all|format)\(',
164 (r'(?<!def)\s+(any|all|format)\(',
165 "any/all/format not available in Python 2.4"),
165 "any/all/format not available in Python 2.4"),
166 (r'(?<!def)\s+(callable)\(',
166 (r'(?<!def)\s+(callable)\(',
167 "callable not available in Python 3, use getattr(f, '__call__', None)"),
167 "callable not available in Python 3, use getattr(f, '__call__', None)"),
168 (r'if\s.*\selse', "if ... else form not available in Python 2.4"),
168 (r'if\s.*\selse', "if ... else form not available in Python 2.4"),
169 (r'^\s*(%s)\s\s' % '|'.join(keyword.kwlist),
169 (r'^\s*(%s)\s\s' % '|'.join(keyword.kwlist),
170 "gratuitous whitespace after Python keyword"),
170 "gratuitous whitespace after Python keyword"),
171 (r'([\(\[][ \t]\S)|(\S[ \t][\)\]])', "gratuitous whitespace in () or []"),
171 (r'([\(\[][ \t]\S)|(\S[ \t][\)\]])', "gratuitous whitespace in () or []"),
172 # (r'\s\s=', "gratuitous whitespace before ="),
172 # (r'\s\s=', "gratuitous whitespace before ="),
173 (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=)\S',
173 (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=)\S',
174 "missing whitespace around operator"),
174 "missing whitespace around operator"),
175 (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=)\s',
175 (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=)\s',
176 "missing whitespace around operator"),
176 "missing whitespace around operator"),
177 (r'\s(\+=|-=|!=|<>|<=|>=|<<=|>>=)\S',
177 (r'\s(\+=|-=|!=|<>|<=|>=|<<=|>>=)\S',
178 "missing whitespace around operator"),
178 "missing whitespace around operator"),
179 (r'[^^+=*/!<>&| -](\s=|=\s)[^= ]',
179 (r'[^^+=*/!<>&| -](\s=|=\s)[^= ]',
180 "wrong whitespace around ="),
180 "wrong whitespace around ="),
181 (r'raise Exception', "don't raise generic exceptions"),
181 (r'raise Exception', "don't raise generic exceptions"),
182 (r' is\s+(not\s+)?["\'0-9-]', "object comparison with literal"),
182 (r' is\s+(not\s+)?["\'0-9-]', "object comparison with literal"),
183 (r' [=!]=\s+(True|False|None)',
183 (r' [=!]=\s+(True|False|None)',
184 "comparison with singleton, use 'is' or 'is not' instead"),
184 "comparison with singleton, use 'is' or 'is not' instead"),
185 (r'^\s*(while|if) [01]:',
185 (r'^\s*(while|if) [01]:',
186 "use True/False for constant Boolean expression"),
186 "use True/False for constant Boolean expression"),
187 (r'(?:(?<!def)\s+|\()hasattr',
187 (r'(?:(?<!def)\s+|\()hasattr',
188 'hasattr(foo, bar) is broken, use util.safehasattr(foo, bar) instead'),
188 'hasattr(foo, bar) is broken, use util.safehasattr(foo, bar) instead'),
189 (r'opener\([^)]*\).read\(',
189 (r'opener\([^)]*\).read\(',
190 "use opener.read() instead"),
190 "use opener.read() instead"),
191 (r'BaseException', 'not in Py2.4, use Exception'),
191 (r'BaseException', 'not in Py2.4, use Exception'),
192 (r'os\.path\.relpath', 'os.path.relpath is not in Py2.5'),
192 (r'os\.path\.relpath', 'os.path.relpath is not in Py2.5'),
193 (r'opener\([^)]*\).write\(',
193 (r'opener\([^)]*\).write\(',
194 "use opener.write() instead"),
194 "use opener.write() instead"),
195 (r'[\s\(](open|file)\([^)]*\)\.read\(',
195 (r'[\s\(](open|file)\([^)]*\)\.read\(',
196 "use util.readfile() instead"),
196 "use util.readfile() instead"),
197 (r'[\s\(](open|file)\([^)]*\)\.write\(',
197 (r'[\s\(](open|file)\([^)]*\)\.write\(',
198 "use util.readfile() instead"),
198 "use util.readfile() instead"),
199 (r'^[\s\(]*(open(er)?|file)\([^)]*\)',
199 (r'^[\s\(]*(open(er)?|file)\([^)]*\)',
200 "always assign an opened file to a variable, and close it afterwards"),
200 "always assign an opened file to a variable, and close it afterwards"),
201 (r'[\s\(](open|file)\([^)]*\)\.',
201 (r'[\s\(](open|file)\([^)]*\)\.',
202 "always assign an opened file to a variable, and close it afterwards"),
202 "always assign an opened file to a variable, and close it afterwards"),
203 (r'(?i)descendent', "the proper spelling is descendAnt"),
203 (r'(?i)descendent', "the proper spelling is descendAnt"),
204 (r'\.debug\(\_', "don't mark debug messages for translation"),
204 (r'\.debug\(\_', "don't mark debug messages for translation"),
205 (r'\.strip\(\)\.split\(\)', "no need to strip before splitting"),
205 (r'\.strip\(\)\.split\(\)', "no need to strip before splitting"),
206 (r'^\s*except\s*:', "warning: naked except clause", r'#.*re-raises'),
206 ],
207 ],
207 # warnings
208 # warnings
208 [
209 [
209 (r'^\s*except\s*:', "warning: naked except clause"),
210 (r'ui\.(status|progress|write|note|warn)\([\'\"]x',
210 (r'ui\.(status|progress|write|note|warn)\([\'\"]x',
211 "warning: unwrapped ui message"),
211 "warning: unwrapped ui message"),
212 ]
212 ]
213 ]
213 ]
214
214
215 pyfilters = [
215 pyfilters = [
216 (r"""(?msx)(?P<comment>\#.*?$)|
216 (r"""(?msx)(?P<comment>\#.*?$)|
217 ((?P<quote>('''|\"\"\"|(?<!')'(?!')|(?<!")"(?!")))
217 ((?P<quote>('''|\"\"\"|(?<!')'(?!')|(?<!")"(?!")))
218 (?P<text>(([^\\]|\\.)*?))
218 (?P<text>(([^\\]|\\.)*?))
219 (?P=quote))""", reppython),
219 (?P=quote))""", reppython),
220 ]
220 ]
221
221
222 cpats = [
222 cpats = [
223 [
223 [
224 (r'//', "don't use //-style comments"),
224 (r'//', "don't use //-style comments"),
225 (r'^ ', "don't use spaces to indent"),
225 (r'^ ', "don't use spaces to indent"),
226 (r'\S\t', "don't use tabs except for indent"),
226 (r'\S\t', "don't use tabs except for indent"),
227 (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
227 (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
228 (r'.{81}', "line too long"),
228 (r'.{81}', "line too long"),
229 (r'(while|if|do|for)\(', "use space after while/if/do/for"),
229 (r'(while|if|do|for)\(', "use space after while/if/do/for"),
230 (r'return\(', "return is not a function"),
230 (r'return\(', "return is not a function"),
231 (r' ;', "no space before ;"),
231 (r' ;', "no space before ;"),
232 (r'\w+\* \w+', "use int *foo, not int* foo"),
232 (r'\w+\* \w+', "use int *foo, not int* foo"),
233 (r'\([^\)]+\) \w+', "use (int)foo, not (int) foo"),
233 (r'\([^\)]+\) \w+', "use (int)foo, not (int) foo"),
234 (r'\w+ (\+\+|--)', "use foo++, not foo ++"),
234 (r'\w+ (\+\+|--)', "use foo++, not foo ++"),
235 (r'\w,\w', "missing whitespace after ,"),
235 (r'\w,\w', "missing whitespace after ,"),
236 (r'^[^#]\w[+/*]\w', "missing whitespace in expression"),
236 (r'^[^#]\w[+/*]\w', "missing whitespace in expression"),
237 (r'^#\s+\w', "use #foo, not # foo"),
237 (r'^#\s+\w', "use #foo, not # foo"),
238 (r'[^\n]\Z', "no trailing newline"),
238 (r'[^\n]\Z', "no trailing newline"),
239 (r'^\s*#import\b', "use only #include in standard C code"),
239 (r'^\s*#import\b', "use only #include in standard C code"),
240 ],
240 ],
241 # warnings
241 # warnings
242 []
242 []
243 ]
243 ]
244
244
245 cfilters = [
245 cfilters = [
246 (r'(/\*)(((\*(?!/))|[^*])*)\*/', repccomment),
246 (r'(/\*)(((\*(?!/))|[^*])*)\*/', repccomment),
247 (r'''(?P<quote>(?<!")")(?P<text>([^"]|\\")+)"(?!")''', repquote),
247 (r'''(?P<quote>(?<!")")(?P<text>([^"]|\\")+)"(?!")''', repquote),
248 (r'''(#\s*include\s+<)([^>]+)>''', repinclude),
248 (r'''(#\s*include\s+<)([^>]+)>''', repinclude),
249 (r'(\()([^)]+\))', repcallspaces),
249 (r'(\()([^)]+\))', repcallspaces),
250 ]
250 ]
251
251
252 inutilpats = [
252 inutilpats = [
253 [
253 [
254 (r'\bui\.', "don't use ui in util"),
254 (r'\bui\.', "don't use ui in util"),
255 ],
255 ],
256 # warnings
256 # warnings
257 []
257 []
258 ]
258 ]
259
259
260 inrevlogpats = [
260 inrevlogpats = [
261 [
261 [
262 (r'\brepo\.', "don't use repo in revlog"),
262 (r'\brepo\.', "don't use repo in revlog"),
263 ],
263 ],
264 # warnings
264 # warnings
265 []
265 []
266 ]
266 ]
267
267
268 checks = [
268 checks = [
269 ('python', r'.*\.(py|cgi)$', pyfilters, pypats),
269 ('python', r'.*\.(py|cgi)$', pyfilters, pypats),
270 ('test script', r'(.*/)?test-[^.~]*$', testfilters, testpats),
270 ('test script', r'(.*/)?test-[^.~]*$', testfilters, testpats),
271 ('c', r'.*\.c$', cfilters, cpats),
271 ('c', r'.*\.c$', cfilters, cpats),
272 ('unified test', r'.*\.t$', utestfilters, utestpats),
272 ('unified test', r'.*\.t$', utestfilters, utestpats),
273 ('layering violation repo in revlog', r'mercurial/revlog\.py', pyfilters,
273 ('layering violation repo in revlog', r'mercurial/revlog\.py', pyfilters,
274 inrevlogpats),
274 inrevlogpats),
275 ('layering violation ui in util', r'mercurial/util\.py', pyfilters,
275 ('layering violation ui in util', r'mercurial/util\.py', pyfilters,
276 inutilpats),
276 inutilpats),
277 ]
277 ]
278
278
279 class norepeatlogger(object):
279 class norepeatlogger(object):
280 def __init__(self):
280 def __init__(self):
281 self._lastseen = None
281 self._lastseen = None
282
282
283 def log(self, fname, lineno, line, msg, blame):
283 def log(self, fname, lineno, line, msg, blame):
284 """print error related a to given line of a given file.
284 """print error related a to given line of a given file.
285
285
286 The faulty line will also be printed but only once in the case
286 The faulty line will also be printed but only once in the case
287 of multiple errors.
287 of multiple errors.
288
288
289 :fname: filename
289 :fname: filename
290 :lineno: line number
290 :lineno: line number
291 :line: actual content of the line
291 :line: actual content of the line
292 :msg: error message
292 :msg: error message
293 """
293 """
294 msgid = fname, lineno, line
294 msgid = fname, lineno, line
295 if msgid != self._lastseen:
295 if msgid != self._lastseen:
296 if blame:
296 if blame:
297 print "%s:%d (%s):" % (fname, lineno, blame)
297 print "%s:%d (%s):" % (fname, lineno, blame)
298 else:
298 else:
299 print "%s:%d:" % (fname, lineno)
299 print "%s:%d:" % (fname, lineno)
300 print " > %s" % line
300 print " > %s" % line
301 self._lastseen = msgid
301 self._lastseen = msgid
302 print " " + msg
302 print " " + msg
303
303
304 _defaultlogger = norepeatlogger()
304 _defaultlogger = norepeatlogger()
305
305
306 def getblame(f):
306 def getblame(f):
307 lines = []
307 lines = []
308 for l in os.popen('hg annotate -un %s' % f):
308 for l in os.popen('hg annotate -un %s' % f):
309 start, line = l.split(':', 1)
309 start, line = l.split(':', 1)
310 user, rev = start.split()
310 user, rev = start.split()
311 lines.append((line[1:-1], user, rev))
311 lines.append((line[1:-1], user, rev))
312 return lines
312 return lines
313
313
314 def checkfile(f, logfunc=_defaultlogger.log, maxerr=None, warnings=False,
314 def checkfile(f, logfunc=_defaultlogger.log, maxerr=None, warnings=False,
315 blame=False, debug=False, lineno=True):
315 blame=False, debug=False, lineno=True):
316 """checks style and portability of a given file
316 """checks style and portability of a given file
317
317
318 :f: filepath
318 :f: filepath
319 :logfunc: function used to report error
319 :logfunc: function used to report error
320 logfunc(filename, linenumber, linecontent, errormessage)
320 logfunc(filename, linenumber, linecontent, errormessage)
321 :maxerr: number of error to display before arborting.
321 :maxerr: number of error to display before arborting.
322 Set to false (default) to report all errors
322 Set to false (default) to report all errors
323
323
324 return True if no error is found, False otherwise.
324 return True if no error is found, False otherwise.
325 """
325 """
326 blamecache = None
326 blamecache = None
327 result = True
327 result = True
328 for name, match, filters, pats in checks:
328 for name, match, filters, pats in checks:
329 if debug:
329 if debug:
330 print name, f
330 print name, f
331 fc = 0
331 fc = 0
332 if not re.match(match, f):
332 if not re.match(match, f):
333 if debug:
333 if debug:
334 print "Skipping %s for %s it doesn't match %s" % (
334 print "Skipping %s for %s it doesn't match %s" % (
335 name, match, f)
335 name, match, f)
336 continue
336 continue
337 fp = open(f)
337 fp = open(f)
338 pre = post = fp.read()
338 pre = post = fp.read()
339 fp.close()
339 fp.close()
340 if "no-" + "check-code" in pre:
340 if "no-" + "check-code" in pre:
341 if debug:
341 if debug:
342 print "Skipping %s for %s it has no- and check-code" % (
342 print "Skipping %s for %s it has no- and check-code" % (
343 name, f)
343 name, f)
344 break
344 break
345 for p, r in filters:
345 for p, r in filters:
346 post = re.sub(p, r, post)
346 post = re.sub(p, r, post)
347 if warnings:
347 if warnings:
348 pats = pats[0] + pats[1]
348 pats = pats[0] + pats[1]
349 else:
349 else:
350 pats = pats[0]
350 pats = pats[0]
351 # print post # uncomment to show filtered version
351 # print post # uncomment to show filtered version
352
352
353 if debug:
353 if debug:
354 print "Checking %s for %s" % (name, f)
354 print "Checking %s for %s" % (name, f)
355
355
356 prelines = None
356 prelines = None
357 errors = []
357 errors = []
358 for p, msg in pats:
358 for pat in pats:
359 if len(pat) == 3:
360 p, msg, ignore = pat
361 else:
362 p, msg = pat
363 ignore = None
364
359 # fix-up regexes for multiline searches
365 # fix-up regexes for multiline searches
360 po = p
366 po = p
361 # \s doesn't match \n
367 # \s doesn't match \n
362 p = re.sub(r'(?<!\\)\\s', r'[ \\t]', p)
368 p = re.sub(r'(?<!\\)\\s', r'[ \\t]', p)
363 # [^...] doesn't match newline
369 # [^...] doesn't match newline
364 p = re.sub(r'(?<!\\)\[\^', r'[^\\n', p)
370 p = re.sub(r'(?<!\\)\[\^', r'[^\\n', p)
365
371
366 #print po, '=>', p
372 #print po, '=>', p
367
373
368 pos = 0
374 pos = 0
369 n = 0
375 n = 0
370 for m in re.finditer(p, post, re.MULTILINE):
376 for m in re.finditer(p, post, re.MULTILINE):
371 if prelines is None:
377 if prelines is None:
372 prelines = pre.splitlines()
378 prelines = pre.splitlines()
373 postlines = post.splitlines(True)
379 postlines = post.splitlines(True)
374
380
375 start = m.start()
381 start = m.start()
376 while n < len(postlines):
382 while n < len(postlines):
377 step = len(postlines[n])
383 step = len(postlines[n])
378 if pos + step > start:
384 if pos + step > start:
379 break
385 break
380 pos += step
386 pos += step
381 n += 1
387 n += 1
382 l = prelines[n]
388 l = prelines[n]
383
389
384 if "check-code" + "-ignore" in l:
390 if "check-code" + "-ignore" in l:
385 if debug:
391 if debug:
386 print "Skipping %s for %s:%s (check-code -ignore)" % (
392 print "Skipping %s for %s:%s (check-code -ignore)" % (
387 name, f, n)
393 name, f, n)
388 continue
394 continue
395 elif ignore and re.search(ignore, l, re.MULTILINE):
396 continue
389 bd = ""
397 bd = ""
390 if blame:
398 if blame:
391 bd = 'working directory'
399 bd = 'working directory'
392 if not blamecache:
400 if not blamecache:
393 blamecache = getblame(f)
401 blamecache = getblame(f)
394 if n < len(blamecache):
402 if n < len(blamecache):
395 bl, bu, br = blamecache[n]
403 bl, bu, br = blamecache[n]
396 if bl == l:
404 if bl == l:
397 bd = '%s@%s' % (bu, br)
405 bd = '%s@%s' % (bu, br)
398 errors.append((f, lineno and n + 1, l, msg, bd))
406 errors.append((f, lineno and n + 1, l, msg, bd))
399 result = False
407 result = False
400
408
401 errors.sort()
409 errors.sort()
402 for e in errors:
410 for e in errors:
403 logfunc(*e)
411 logfunc(*e)
404 fc += 1
412 fc += 1
405 if maxerr and fc >= maxerr:
413 if maxerr and fc >= maxerr:
406 print " (too many errors, giving up)"
414 print " (too many errors, giving up)"
407 break
415 break
408
416
409 return result
417 return result
410
418
411 if __name__ == "__main__":
419 if __name__ == "__main__":
412 parser = optparse.OptionParser("%prog [options] [files]")
420 parser = optparse.OptionParser("%prog [options] [files]")
413 parser.add_option("-w", "--warnings", action="store_true",
421 parser.add_option("-w", "--warnings", action="store_true",
414 help="include warning-level checks")
422 help="include warning-level checks")
415 parser.add_option("-p", "--per-file", type="int",
423 parser.add_option("-p", "--per-file", type="int",
416 help="max warnings per file")
424 help="max warnings per file")
417 parser.add_option("-b", "--blame", action="store_true",
425 parser.add_option("-b", "--blame", action="store_true",
418 help="use annotate to generate blame info")
426 help="use annotate to generate blame info")
419 parser.add_option("", "--debug", action="store_true",
427 parser.add_option("", "--debug", action="store_true",
420 help="show debug information")
428 help="show debug information")
421 parser.add_option("", "--nolineno", action="store_false",
429 parser.add_option("", "--nolineno", action="store_false",
422 dest='lineno', help="don't show line numbers")
430 dest='lineno', help="don't show line numbers")
423
431
424 parser.set_defaults(per_file=15, warnings=False, blame=False, debug=False,
432 parser.set_defaults(per_file=15, warnings=False, blame=False, debug=False,
425 lineno=True)
433 lineno=True)
426 (options, args) = parser.parse_args()
434 (options, args) = parser.parse_args()
427
435
428 if len(args) == 0:
436 if len(args) == 0:
429 check = glob.glob("*")
437 check = glob.glob("*")
430 else:
438 else:
431 check = args
439 check = args
432
440
433 ret = 0
441 ret = 0
434 for f in check:
442 for f in check:
435 if not checkfile(f, maxerr=options.per_file, warnings=options.warnings,
443 if not checkfile(f, maxerr=options.per_file, warnings=options.warnings,
436 blame=options.blame, debug=options.debug,
444 blame=options.blame, debug=options.debug,
437 lineno=options.lineno):
445 lineno=options.lineno):
438 ret = 1
446 ret = 1
439 sys.exit(ret)
447 sys.exit(ret)
@@ -1,294 +1,294 b''
1 """reorder a revlog (the manifest by default) to save space
1 """reorder a revlog (the manifest by default) to save space
2
2
3 Specifically, this topologically sorts the revisions in the revlog so that
3 Specifically, this topologically sorts the revisions in the revlog so that
4 revisions on the same branch are adjacent as much as possible. This is a
4 revisions on the same branch are adjacent as much as possible. This is a
5 workaround for the fact that Mercurial computes deltas relative to the
5 workaround for the fact that Mercurial computes deltas relative to the
6 previous revision rather than relative to a parent revision.
6 previous revision rather than relative to a parent revision.
7
7
8 This is *not* safe to run on a changelog.
8 This is *not* safe to run on a changelog.
9 """
9 """
10
10
11 # Originally written by Benoit Boissinot <benoit.boissinot at ens-lyon.org>
11 # Originally written by Benoit Boissinot <benoit.boissinot at ens-lyon.org>
12 # as a patch to rewrite-log. Cleaned up, refactored, documented, and
12 # as a patch to rewrite-log. Cleaned up, refactored, documented, and
13 # renamed by Greg Ward <greg at gerg.ca>.
13 # renamed by Greg Ward <greg at gerg.ca>.
14
14
15 # XXX would be nice to have a way to verify the repository after shrinking,
15 # XXX would be nice to have a way to verify the repository after shrinking,
16 # e.g. by comparing "before" and "after" states of random changesets
16 # e.g. by comparing "before" and "after" states of random changesets
17 # (maybe: export before, shrink, export after, diff).
17 # (maybe: export before, shrink, export after, diff).
18
18
19 import os, errno
19 import os, errno
20 from mercurial import revlog, transaction, node, util, scmutil
20 from mercurial import revlog, transaction, node, util, scmutil
21 from mercurial import changegroup
21 from mercurial import changegroup
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23
23
24
24
25 def postorder(start, edges):
25 def postorder(start, edges):
26 result = []
26 result = []
27 visit = list(start)
27 visit = list(start)
28 finished = set()
28 finished = set()
29
29
30 while visit:
30 while visit:
31 cur = visit[-1]
31 cur = visit[-1]
32 for p in edges[cur]:
32 for p in edges[cur]:
33 # defend against node.nullrev because it's occasionally
33 # defend against node.nullrev because it's occasionally
34 # possible for a node to have parents (null, something)
34 # possible for a node to have parents (null, something)
35 # rather than (something, null)
35 # rather than (something, null)
36 if p not in finished and p != node.nullrev:
36 if p not in finished and p != node.nullrev:
37 visit.append(p)
37 visit.append(p)
38 break
38 break
39 else:
39 else:
40 result.append(cur)
40 result.append(cur)
41 finished.add(cur)
41 finished.add(cur)
42 visit.pop()
42 visit.pop()
43
43
44 return result
44 return result
45
45
46 def toposort_reversepostorder(ui, rl):
46 def toposort_reversepostorder(ui, rl):
47 # postorder of the reverse directed graph
47 # postorder of the reverse directed graph
48
48
49 # map rev to list of parent revs (p2 first)
49 # map rev to list of parent revs (p2 first)
50 parents = {}
50 parents = {}
51 heads = set()
51 heads = set()
52 ui.status(_('reading revs\n'))
52 ui.status(_('reading revs\n'))
53 try:
53 try:
54 for rev in rl:
54 for rev in rl:
55 ui.progress(_('reading'), rev, total=len(rl))
55 ui.progress(_('reading'), rev, total=len(rl))
56 (p1, p2) = rl.parentrevs(rev)
56 (p1, p2) = rl.parentrevs(rev)
57 if p1 == p2 == node.nullrev:
57 if p1 == p2 == node.nullrev:
58 parents[rev] = () # root node
58 parents[rev] = () # root node
59 elif p1 == p2 or p2 == node.nullrev:
59 elif p1 == p2 or p2 == node.nullrev:
60 parents[rev] = (p1,) # normal node
60 parents[rev] = (p1,) # normal node
61 else:
61 else:
62 parents[rev] = (p2, p1) # merge node
62 parents[rev] = (p2, p1) # merge node
63 heads.add(rev)
63 heads.add(rev)
64 for p in parents[rev]:
64 for p in parents[rev]:
65 heads.discard(p)
65 heads.discard(p)
66 finally:
66 finally:
67 ui.progress(_('reading'), None)
67 ui.progress(_('reading'), None)
68
68
69 heads = list(heads)
69 heads = list(heads)
70 heads.sort(reverse=True)
70 heads.sort(reverse=True)
71
71
72 ui.status(_('sorting revs\n'))
72 ui.status(_('sorting revs\n'))
73 return postorder(heads, parents)
73 return postorder(heads, parents)
74
74
75 def toposort_postorderreverse(ui, rl):
75 def toposort_postorderreverse(ui, rl):
76 # reverse-postorder of the reverse directed graph
76 # reverse-postorder of the reverse directed graph
77
77
78 children = {}
78 children = {}
79 roots = set()
79 roots = set()
80 ui.status(_('reading revs\n'))
80 ui.status(_('reading revs\n'))
81 try:
81 try:
82 for rev in rl:
82 for rev in rl:
83 ui.progress(_('reading'), rev, total=len(rl))
83 ui.progress(_('reading'), rev, total=len(rl))
84 (p1, p2) = rl.parentrevs(rev)
84 (p1, p2) = rl.parentrevs(rev)
85 if p1 == p2 == node.nullrev:
85 if p1 == p2 == node.nullrev:
86 roots.add(rev)
86 roots.add(rev)
87 children[rev] = []
87 children[rev] = []
88 if p1 != node.nullrev:
88 if p1 != node.nullrev:
89 children[p1].append(rev)
89 children[p1].append(rev)
90 if p2 != node.nullrev:
90 if p2 != node.nullrev:
91 children[p2].append(rev)
91 children[p2].append(rev)
92 finally:
92 finally:
93 ui.progress(_('reading'), None)
93 ui.progress(_('reading'), None)
94
94
95 roots = list(roots)
95 roots = list(roots)
96 roots.sort()
96 roots.sort()
97
97
98 ui.status(_('sorting revs\n'))
98 ui.status(_('sorting revs\n'))
99 result = postorder(roots, children)
99 result = postorder(roots, children)
100 result.reverse()
100 result.reverse()
101 return result
101 return result
102
102
103 def writerevs(ui, r1, r2, order, tr):
103 def writerevs(ui, r1, r2, order, tr):
104
104
105 ui.status(_('writing revs\n'))
105 ui.status(_('writing revs\n'))
106
106
107
107
108 order = [r1.node(r) for r in order]
108 order = [r1.node(r) for r in order]
109
109
110 # this is a bit ugly, but it works
110 # this is a bit ugly, but it works
111 count = [0]
111 count = [0]
112 def lookup(revl, x):
112 def lookup(revl, x):
113 count[0] += 1
113 count[0] += 1
114 ui.progress(_('writing'), count[0], total=len(order))
114 ui.progress(_('writing'), count[0], total=len(order))
115 return "%020d" % revl.linkrev(revl.rev(x))
115 return "%020d" % revl.linkrev(revl.rev(x))
116
116
117 unlookup = lambda x: int(x, 10)
117 unlookup = lambda x: int(x, 10)
118
118
119 try:
119 try:
120 bundler = changegroup.bundle10(lookup)
120 bundler = changegroup.bundle10(lookup)
121 group = util.chunkbuffer(r1.group(order, bundler))
121 group = util.chunkbuffer(r1.group(order, bundler))
122 group = changegroup.unbundle10(group, "UN")
122 group = changegroup.unbundle10(group, "UN")
123 r2.addgroup(group, unlookup, tr)
123 r2.addgroup(group, unlookup, tr)
124 finally:
124 finally:
125 ui.progress(_('writing'), None)
125 ui.progress(_('writing'), None)
126
126
127 def report(ui, r1, r2):
127 def report(ui, r1, r2):
128 def getsize(r):
128 def getsize(r):
129 s = 0
129 s = 0
130 for fn in (r.indexfile, r.datafile):
130 for fn in (r.indexfile, r.datafile):
131 try:
131 try:
132 s += os.stat(fn).st_size
132 s += os.stat(fn).st_size
133 except OSError, inst:
133 except OSError, inst:
134 if inst.errno != errno.ENOENT:
134 if inst.errno != errno.ENOENT:
135 raise
135 raise
136 return s
136 return s
137
137
138 oldsize = float(getsize(r1))
138 oldsize = float(getsize(r1))
139 newsize = float(getsize(r2))
139 newsize = float(getsize(r2))
140
140
141 # argh: have to pass an int to %d, because a float >= 2^32
141 # argh: have to pass an int to %d, because a float >= 2^32
142 # blows up under Python 2.5 or earlier
142 # blows up under Python 2.5 or earlier
143 ui.write(_('old file size: %12d bytes (%6.1f MiB)\n')
143 ui.write(_('old file size: %12d bytes (%6.1f MiB)\n')
144 % (int(oldsize), oldsize / 1024 / 1024))
144 % (int(oldsize), oldsize / 1024 / 1024))
145 ui.write(_('new file size: %12d bytes (%6.1f MiB)\n')
145 ui.write(_('new file size: %12d bytes (%6.1f MiB)\n')
146 % (int(newsize), newsize / 1024 / 1024))
146 % (int(newsize), newsize / 1024 / 1024))
147
147
148 shrink_percent = (oldsize - newsize) / oldsize * 100
148 shrink_percent = (oldsize - newsize) / oldsize * 100
149 shrink_factor = oldsize / newsize
149 shrink_factor = oldsize / newsize
150 ui.write(_('shrinkage: %.1f%% (%.1fx)\n')
150 ui.write(_('shrinkage: %.1f%% (%.1fx)\n')
151 % (shrink_percent, shrink_factor))
151 % (shrink_percent, shrink_factor))
152
152
153 def shrink(ui, repo, **opts):
153 def shrink(ui, repo, **opts):
154 """shrink a revlog by reordering revisions
154 """shrink a revlog by reordering revisions
155
155
156 Rewrites all the entries in some revlog of the current repository
156 Rewrites all the entries in some revlog of the current repository
157 (by default, the manifest log) to save space.
157 (by default, the manifest log) to save space.
158
158
159 Different sort algorithms have different performance
159 Different sort algorithms have different performance
160 characteristics. Use ``--sort`` to select a sort algorithm so you
160 characteristics. Use ``--sort`` to select a sort algorithm so you
161 can determine which works best for your data.
161 can determine which works best for your data.
162 """
162 """
163
163
164 if not repo.local():
164 if not repo.local():
165 raise util.Abort(_('not a local repository: %s') % repo.root)
165 raise util.Abort(_('not a local repository: %s') % repo.root)
166
166
167 fn = opts.get('revlog')
167 fn = opts.get('revlog')
168 if not fn:
168 if not fn:
169 indexfn = repo.sjoin('00manifest.i')
169 indexfn = repo.sjoin('00manifest.i')
170 else:
170 else:
171 if not fn.endswith('.i'):
171 if not fn.endswith('.i'):
172 raise util.Abort(_('--revlog option must specify the revlog index '
172 raise util.Abort(_('--revlog option must specify the revlog index '
173 'file (*.i), not %s') % opts.get('revlog'))
173 'file (*.i), not %s') % opts.get('revlog'))
174
174
175 indexfn = os.path.realpath(fn)
175 indexfn = os.path.realpath(fn)
176 store = repo.sjoin('')
176 store = repo.sjoin('')
177 if not indexfn.startswith(store):
177 if not indexfn.startswith(store):
178 raise util.Abort(_('--revlog option must specify a revlog in %s, '
178 raise util.Abort(_('--revlog option must specify a revlog in %s, '
179 'not %s') % (store, indexfn))
179 'not %s') % (store, indexfn))
180
180
181 sortname = opts['sort']
181 sortname = opts['sort']
182 try:
182 try:
183 toposort = globals()['toposort_' + sortname]
183 toposort = globals()['toposort_' + sortname]
184 except KeyError:
184 except KeyError:
185 raise util.Abort(_('no such toposort algorithm: %s') % sortname)
185 raise util.Abort(_('no such toposort algorithm: %s') % sortname)
186
186
187 if not os.path.exists(indexfn):
187 if not os.path.exists(indexfn):
188 raise util.Abort(_('no such file: %s') % indexfn)
188 raise util.Abort(_('no such file: %s') % indexfn)
189 if '00changelog' in indexfn:
189 if '00changelog' in indexfn:
190 raise util.Abort(_('shrinking the changelog '
190 raise util.Abort(_('shrinking the changelog '
191 'will corrupt your repository'))
191 'will corrupt your repository'))
192
192
193 ui.write(_('shrinking %s\n') % indexfn)
193 ui.write(_('shrinking %s\n') % indexfn)
194 tmpindexfn = util.mktempcopy(indexfn, emptyok=True)
194 tmpindexfn = util.mktempcopy(indexfn, emptyok=True)
195
195
196 r1 = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), indexfn)
196 r1 = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), indexfn)
197 r2 = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), tmpindexfn)
197 r2 = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), tmpindexfn)
198
198
199 datafn, tmpdatafn = r1.datafile, r2.datafile
199 datafn, tmpdatafn = r1.datafile, r2.datafile
200
200
201 oldindexfn = indexfn + '.old'
201 oldindexfn = indexfn + '.old'
202 olddatafn = datafn + '.old'
202 olddatafn = datafn + '.old'
203 if os.path.exists(oldindexfn) or os.path.exists(olddatafn):
203 if os.path.exists(oldindexfn) or os.path.exists(olddatafn):
204 raise util.Abort(_('one or both of\n'
204 raise util.Abort(_('one or both of\n'
205 ' %s\n'
205 ' %s\n'
206 ' %s\n'
206 ' %s\n'
207 'exists from a previous run; please clean up '
207 'exists from a previous run; please clean up '
208 'before running again') % (oldindexfn, olddatafn))
208 'before running again') % (oldindexfn, olddatafn))
209
209
210 # Don't use repo.transaction(), because then things get hairy with
210 # Don't use repo.transaction(), because then things get hairy with
211 # paths: some need to be relative to .hg, and some need to be
211 # paths: some need to be relative to .hg, and some need to be
212 # absolute. Doing it this way keeps things simple: everything is an
212 # absolute. Doing it this way keeps things simple: everything is an
213 # absolute path.
213 # absolute path.
214 lock = repo.lock(wait=False)
214 lock = repo.lock(wait=False)
215 tr = transaction.transaction(ui.warn,
215 tr = transaction.transaction(ui.warn,
216 open,
216 open,
217 repo.sjoin('journal'))
217 repo.sjoin('journal'))
218
218
219 def ignoremissing(func):
219 def ignoremissing(func):
220 def f(*args, **kw):
220 def f(*args, **kw):
221 try:
221 try:
222 return func(*args, **kw)
222 return func(*args, **kw)
223 except OSError, inst:
223 except OSError, inst:
224 if inst.errno != errno.ENOENT:
224 if inst.errno != errno.ENOENT:
225 raise
225 raise
226 return f
226 return f
227
227
228 try:
228 try:
229 try:
229 try:
230 order = toposort(ui, r1)
230 order = toposort(ui, r1)
231
231
232 suboptimal = 0
232 suboptimal = 0
233 for i in xrange(1, len(order)):
233 for i in xrange(1, len(order)):
234 parents = [p for p in r1.parentrevs(order[i])
234 parents = [p for p in r1.parentrevs(order[i])
235 if p != node.nullrev]
235 if p != node.nullrev]
236 if parents and order[i - 1] not in parents:
236 if parents and order[i - 1] not in parents:
237 suboptimal += 1
237 suboptimal += 1
238 ui.note(_('%d suboptimal nodes\n') % suboptimal)
238 ui.note(_('%d suboptimal nodes\n') % suboptimal)
239
239
240 writerevs(ui, r1, r2, order, tr)
240 writerevs(ui, r1, r2, order, tr)
241 report(ui, r1, r2)
241 report(ui, r1, r2)
242 tr.close()
242 tr.close()
243 except:
243 except: # re-raises
244 # Abort transaction first, so we truncate the files before
244 # Abort transaction first, so we truncate the files before
245 # deleting them.
245 # deleting them.
246 tr.abort()
246 tr.abort()
247 for fn in (tmpindexfn, tmpdatafn):
247 for fn in (tmpindexfn, tmpdatafn):
248 ignoremissing(os.unlink)(fn)
248 ignoremissing(os.unlink)(fn)
249 raise
249 raise
250 if not opts.get('dry_run'):
250 if not opts.get('dry_run'):
251 # racy, both files cannot be renamed atomically
251 # racy, both files cannot be renamed atomically
252 # copy files
252 # copy files
253 util.oslink(indexfn, oldindexfn)
253 util.oslink(indexfn, oldindexfn)
254 ignoremissing(util.oslink)(datafn, olddatafn)
254 ignoremissing(util.oslink)(datafn, olddatafn)
255
255
256 # rename
256 # rename
257 util.rename(tmpindexfn, indexfn)
257 util.rename(tmpindexfn, indexfn)
258 try:
258 try:
259 os.chmod(tmpdatafn, os.stat(datafn).st_mode)
259 os.chmod(tmpdatafn, os.stat(datafn).st_mode)
260 util.rename(tmpdatafn, datafn)
260 util.rename(tmpdatafn, datafn)
261 except OSError, inst:
261 except OSError, inst:
262 if inst.errno != errno.ENOENT:
262 if inst.errno != errno.ENOENT:
263 raise
263 raise
264 ignoremissing(os.unlink)(datafn)
264 ignoremissing(os.unlink)(datafn)
265 else:
265 else:
266 for fn in (tmpindexfn, tmpdatafn):
266 for fn in (tmpindexfn, tmpdatafn):
267 ignoremissing(os.unlink)(fn)
267 ignoremissing(os.unlink)(fn)
268 finally:
268 finally:
269 lock.release()
269 lock.release()
270
270
271 if not opts.get('dry_run'):
271 if not opts.get('dry_run'):
272 ui.write(
272 ui.write(
273 _('note: old revlog saved in:\n'
273 _('note: old revlog saved in:\n'
274 ' %s\n'
274 ' %s\n'
275 ' %s\n'
275 ' %s\n'
276 '(You can delete those files when you are satisfied that your\n'
276 '(You can delete those files when you are satisfied that your\n'
277 'repository is still sane. '
277 'repository is still sane. '
278 'Running \'hg verify\' is strongly recommended.)\n')
278 'Running \'hg verify\' is strongly recommended.)\n')
279 % (oldindexfn, olddatafn))
279 % (oldindexfn, olddatafn))
280
280
281 cmdtable = {
281 cmdtable = {
282 'shrink': (shrink,
282 'shrink': (shrink,
283 [('', 'revlog', '',
283 [('', 'revlog', '',
284 _('the revlog to shrink (.i)')),
284 _('the revlog to shrink (.i)')),
285 ('n', 'dry-run', None,
285 ('n', 'dry-run', None,
286 _('do not shrink, simulate only')),
286 _('do not shrink, simulate only')),
287 ('', 'sort', 'reversepostorder',
287 ('', 'sort', 'reversepostorder',
288 _('name of sort algorithm to use')),
288 _('name of sort algorithm to use')),
289 ],
289 ],
290 _('hg shrink [--revlog PATH]'))
290 _('hg shrink [--revlog PATH]'))
291 }
291 }
292
292
293 if __name__ == "__main__":
293 if __name__ == "__main__":
294 print "shrink-revlog.py is now an extension (see hg help extensions)"
294 print "shrink-revlog.py is now an extension (see hg help extensions)"
@@ -1,3532 +1,3532 b''
1 # mq.py - patch queues for mercurial
1 # mq.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''manage a stack of patches
8 '''manage a stack of patches
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use :hg:`help command` for more details)::
17 Common tasks (use :hg:`help command` for more details)::
18
18
19 create new patch qnew
19 create new patch qnew
20 import existing patch qimport
20 import existing patch qimport
21
21
22 print patch series qseries
22 print patch series qseries
23 print applied patches qapplied
23 print applied patches qapplied
24
24
25 add known patch to applied stack qpush
25 add known patch to applied stack qpush
26 remove patch from applied stack qpop
26 remove patch from applied stack qpop
27 refresh contents of top applied patch qrefresh
27 refresh contents of top applied patch qrefresh
28
28
29 By default, mq will automatically use git patches when required to
29 By default, mq will automatically use git patches when required to
30 avoid losing file mode changes, copy records, binary files or empty
30 avoid losing file mode changes, copy records, binary files or empty
31 files creations or deletions. This behaviour can be configured with::
31 files creations or deletions. This behaviour can be configured with::
32
32
33 [mq]
33 [mq]
34 git = auto/keep/yes/no
34 git = auto/keep/yes/no
35
35
36 If set to 'keep', mq will obey the [diff] section configuration while
36 If set to 'keep', mq will obey the [diff] section configuration while
37 preserving existing git patches upon qrefresh. If set to 'yes' or
37 preserving existing git patches upon qrefresh. If set to 'yes' or
38 'no', mq will override the [diff] section and always generate git or
38 'no', mq will override the [diff] section and always generate git or
39 regular patches, possibly losing data in the second case.
39 regular patches, possibly losing data in the second case.
40
40
41 It may be desirable for mq changesets to be kept in the secret phase (see
41 It may be desirable for mq changesets to be kept in the secret phase (see
42 :hg:`help phases`), which can be enabled with the following setting::
42 :hg:`help phases`), which can be enabled with the following setting::
43
43
44 [mq]
44 [mq]
45 secret = True
45 secret = True
46
46
47 You will by default be managing a patch queue named "patches". You can
47 You will by default be managing a patch queue named "patches". You can
48 create other, independent patch queues with the :hg:`qqueue` command.
48 create other, independent patch queues with the :hg:`qqueue` command.
49
49
50 If the working directory contains uncommitted files, qpush, qpop and
50 If the working directory contains uncommitted files, qpush, qpop and
51 qgoto abort immediately. If -f/--force is used, the changes are
51 qgoto abort immediately. If -f/--force is used, the changes are
52 discarded. Setting:
52 discarded. Setting:
53
53
54 [mq]
54 [mq]
55 check = True
55 check = True
56
56
57 make them behave as if -c/--check were passed, and non-conflicting
57 make them behave as if -c/--check were passed, and non-conflicting
58 local changes will be tolerated and preserved. If incompatible options
58 local changes will be tolerated and preserved. If incompatible options
59 such as -f/--force or --exact are passed, this setting is ignored.
59 such as -f/--force or --exact are passed, this setting is ignored.
60 '''
60 '''
61
61
62 from mercurial.i18n import _
62 from mercurial.i18n import _
63 from mercurial.node import bin, hex, short, nullid, nullrev
63 from mercurial.node import bin, hex, short, nullid, nullrev
64 from mercurial.lock import release
64 from mercurial.lock import release
65 from mercurial import commands, cmdutil, hg, scmutil, util, revset
65 from mercurial import commands, cmdutil, hg, scmutil, util, revset
66 from mercurial import repair, extensions, url, error, phases
66 from mercurial import repair, extensions, url, error, phases
67 from mercurial import patch as patchmod
67 from mercurial import patch as patchmod
68 import os, re, errno, shutil
68 import os, re, errno, shutil
69
69
70 commands.norepo += " qclone"
70 commands.norepo += " qclone"
71
71
72 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
72 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
73
73
74 cmdtable = {}
74 cmdtable = {}
75 command = cmdutil.command(cmdtable)
75 command = cmdutil.command(cmdtable)
76
76
77 # Patch names looks like unix-file names.
77 # Patch names looks like unix-file names.
78 # They must be joinable with queue directory and result in the patch path.
78 # They must be joinable with queue directory and result in the patch path.
79 normname = util.normpath
79 normname = util.normpath
80
80
81 class statusentry(object):
81 class statusentry(object):
82 def __init__(self, node, name):
82 def __init__(self, node, name):
83 self.node, self.name = node, name
83 self.node, self.name = node, name
84 def __repr__(self):
84 def __repr__(self):
85 return hex(self.node) + ':' + self.name
85 return hex(self.node) + ':' + self.name
86
86
87 class patchheader(object):
87 class patchheader(object):
88 def __init__(self, pf, plainmode=False):
88 def __init__(self, pf, plainmode=False):
89 def eatdiff(lines):
89 def eatdiff(lines):
90 while lines:
90 while lines:
91 l = lines[-1]
91 l = lines[-1]
92 if (l.startswith("diff -") or
92 if (l.startswith("diff -") or
93 l.startswith("Index:") or
93 l.startswith("Index:") or
94 l.startswith("===========")):
94 l.startswith("===========")):
95 del lines[-1]
95 del lines[-1]
96 else:
96 else:
97 break
97 break
98 def eatempty(lines):
98 def eatempty(lines):
99 while lines:
99 while lines:
100 if not lines[-1].strip():
100 if not lines[-1].strip():
101 del lines[-1]
101 del lines[-1]
102 else:
102 else:
103 break
103 break
104
104
105 message = []
105 message = []
106 comments = []
106 comments = []
107 user = None
107 user = None
108 date = None
108 date = None
109 parent = None
109 parent = None
110 format = None
110 format = None
111 subject = None
111 subject = None
112 branch = None
112 branch = None
113 nodeid = None
113 nodeid = None
114 diffstart = 0
114 diffstart = 0
115
115
116 for line in file(pf):
116 for line in file(pf):
117 line = line.rstrip()
117 line = line.rstrip()
118 if (line.startswith('diff --git')
118 if (line.startswith('diff --git')
119 or (diffstart and line.startswith('+++ '))):
119 or (diffstart and line.startswith('+++ '))):
120 diffstart = 2
120 diffstart = 2
121 break
121 break
122 diffstart = 0 # reset
122 diffstart = 0 # reset
123 if line.startswith("--- "):
123 if line.startswith("--- "):
124 diffstart = 1
124 diffstart = 1
125 continue
125 continue
126 elif format == "hgpatch":
126 elif format == "hgpatch":
127 # parse values when importing the result of an hg export
127 # parse values when importing the result of an hg export
128 if line.startswith("# User "):
128 if line.startswith("# User "):
129 user = line[7:]
129 user = line[7:]
130 elif line.startswith("# Date "):
130 elif line.startswith("# Date "):
131 date = line[7:]
131 date = line[7:]
132 elif line.startswith("# Parent "):
132 elif line.startswith("# Parent "):
133 parent = line[9:].lstrip()
133 parent = line[9:].lstrip()
134 elif line.startswith("# Branch "):
134 elif line.startswith("# Branch "):
135 branch = line[9:]
135 branch = line[9:]
136 elif line.startswith("# Node ID "):
136 elif line.startswith("# Node ID "):
137 nodeid = line[10:]
137 nodeid = line[10:]
138 elif not line.startswith("# ") and line:
138 elif not line.startswith("# ") and line:
139 message.append(line)
139 message.append(line)
140 format = None
140 format = None
141 elif line == '# HG changeset patch':
141 elif line == '# HG changeset patch':
142 message = []
142 message = []
143 format = "hgpatch"
143 format = "hgpatch"
144 elif (format != "tagdone" and (line.startswith("Subject: ") or
144 elif (format != "tagdone" and (line.startswith("Subject: ") or
145 line.startswith("subject: "))):
145 line.startswith("subject: "))):
146 subject = line[9:]
146 subject = line[9:]
147 format = "tag"
147 format = "tag"
148 elif (format != "tagdone" and (line.startswith("From: ") or
148 elif (format != "tagdone" and (line.startswith("From: ") or
149 line.startswith("from: "))):
149 line.startswith("from: "))):
150 user = line[6:]
150 user = line[6:]
151 format = "tag"
151 format = "tag"
152 elif (format != "tagdone" and (line.startswith("Date: ") or
152 elif (format != "tagdone" and (line.startswith("Date: ") or
153 line.startswith("date: "))):
153 line.startswith("date: "))):
154 date = line[6:]
154 date = line[6:]
155 format = "tag"
155 format = "tag"
156 elif format == "tag" and line == "":
156 elif format == "tag" and line == "":
157 # when looking for tags (subject: from: etc) they
157 # when looking for tags (subject: from: etc) they
158 # end once you find a blank line in the source
158 # end once you find a blank line in the source
159 format = "tagdone"
159 format = "tagdone"
160 elif message or line:
160 elif message or line:
161 message.append(line)
161 message.append(line)
162 comments.append(line)
162 comments.append(line)
163
163
164 eatdiff(message)
164 eatdiff(message)
165 eatdiff(comments)
165 eatdiff(comments)
166 # Remember the exact starting line of the patch diffs before consuming
166 # Remember the exact starting line of the patch diffs before consuming
167 # empty lines, for external use by TortoiseHg and others
167 # empty lines, for external use by TortoiseHg and others
168 self.diffstartline = len(comments)
168 self.diffstartline = len(comments)
169 eatempty(message)
169 eatempty(message)
170 eatempty(comments)
170 eatempty(comments)
171
171
172 # make sure message isn't empty
172 # make sure message isn't empty
173 if format and format.startswith("tag") and subject:
173 if format and format.startswith("tag") and subject:
174 message.insert(0, "")
174 message.insert(0, "")
175 message.insert(0, subject)
175 message.insert(0, subject)
176
176
177 self.message = message
177 self.message = message
178 self.comments = comments
178 self.comments = comments
179 self.user = user
179 self.user = user
180 self.date = date
180 self.date = date
181 self.parent = parent
181 self.parent = parent
182 # nodeid and branch are for external use by TortoiseHg and others
182 # nodeid and branch are for external use by TortoiseHg and others
183 self.nodeid = nodeid
183 self.nodeid = nodeid
184 self.branch = branch
184 self.branch = branch
185 self.haspatch = diffstart > 1
185 self.haspatch = diffstart > 1
186 self.plainmode = plainmode
186 self.plainmode = plainmode
187
187
188 def setuser(self, user):
188 def setuser(self, user):
189 if not self.updateheader(['From: ', '# User '], user):
189 if not self.updateheader(['From: ', '# User '], user):
190 try:
190 try:
191 patchheaderat = self.comments.index('# HG changeset patch')
191 patchheaderat = self.comments.index('# HG changeset patch')
192 self.comments.insert(patchheaderat + 1, '# User ' + user)
192 self.comments.insert(patchheaderat + 1, '# User ' + user)
193 except ValueError:
193 except ValueError:
194 if self.plainmode or self._hasheader(['Date: ']):
194 if self.plainmode or self._hasheader(['Date: ']):
195 self.comments = ['From: ' + user] + self.comments
195 self.comments = ['From: ' + user] + self.comments
196 else:
196 else:
197 tmp = ['# HG changeset patch', '# User ' + user, '']
197 tmp = ['# HG changeset patch', '# User ' + user, '']
198 self.comments = tmp + self.comments
198 self.comments = tmp + self.comments
199 self.user = user
199 self.user = user
200
200
201 def setdate(self, date):
201 def setdate(self, date):
202 if not self.updateheader(['Date: ', '# Date '], date):
202 if not self.updateheader(['Date: ', '# Date '], date):
203 try:
203 try:
204 patchheaderat = self.comments.index('# HG changeset patch')
204 patchheaderat = self.comments.index('# HG changeset patch')
205 self.comments.insert(patchheaderat + 1, '# Date ' + date)
205 self.comments.insert(patchheaderat + 1, '# Date ' + date)
206 except ValueError:
206 except ValueError:
207 if self.plainmode or self._hasheader(['From: ']):
207 if self.plainmode or self._hasheader(['From: ']):
208 self.comments = ['Date: ' + date] + self.comments
208 self.comments = ['Date: ' + date] + self.comments
209 else:
209 else:
210 tmp = ['# HG changeset patch', '# Date ' + date, '']
210 tmp = ['# HG changeset patch', '# Date ' + date, '']
211 self.comments = tmp + self.comments
211 self.comments = tmp + self.comments
212 self.date = date
212 self.date = date
213
213
214 def setparent(self, parent):
214 def setparent(self, parent):
215 if not self.updateheader(['# Parent '], parent):
215 if not self.updateheader(['# Parent '], parent):
216 try:
216 try:
217 patchheaderat = self.comments.index('# HG changeset patch')
217 patchheaderat = self.comments.index('# HG changeset patch')
218 self.comments.insert(patchheaderat + 1, '# Parent ' + parent)
218 self.comments.insert(patchheaderat + 1, '# Parent ' + parent)
219 except ValueError:
219 except ValueError:
220 pass
220 pass
221 self.parent = parent
221 self.parent = parent
222
222
223 def setmessage(self, message):
223 def setmessage(self, message):
224 if self.comments:
224 if self.comments:
225 self._delmsg()
225 self._delmsg()
226 self.message = [message]
226 self.message = [message]
227 self.comments += self.message
227 self.comments += self.message
228
228
229 def updateheader(self, prefixes, new):
229 def updateheader(self, prefixes, new):
230 '''Update all references to a field in the patch header.
230 '''Update all references to a field in the patch header.
231 Return whether the field is present.'''
231 Return whether the field is present.'''
232 res = False
232 res = False
233 for prefix in prefixes:
233 for prefix in prefixes:
234 for i in xrange(len(self.comments)):
234 for i in xrange(len(self.comments)):
235 if self.comments[i].startswith(prefix):
235 if self.comments[i].startswith(prefix):
236 self.comments[i] = prefix + new
236 self.comments[i] = prefix + new
237 res = True
237 res = True
238 break
238 break
239 return res
239 return res
240
240
241 def _hasheader(self, prefixes):
241 def _hasheader(self, prefixes):
242 '''Check if a header starts with any of the given prefixes.'''
242 '''Check if a header starts with any of the given prefixes.'''
243 for prefix in prefixes:
243 for prefix in prefixes:
244 for comment in self.comments:
244 for comment in self.comments:
245 if comment.startswith(prefix):
245 if comment.startswith(prefix):
246 return True
246 return True
247 return False
247 return False
248
248
249 def __str__(self):
249 def __str__(self):
250 if not self.comments:
250 if not self.comments:
251 return ''
251 return ''
252 return '\n'.join(self.comments) + '\n\n'
252 return '\n'.join(self.comments) + '\n\n'
253
253
254 def _delmsg(self):
254 def _delmsg(self):
255 '''Remove existing message, keeping the rest of the comments fields.
255 '''Remove existing message, keeping the rest of the comments fields.
256 If comments contains 'subject: ', message will prepend
256 If comments contains 'subject: ', message will prepend
257 the field and a blank line.'''
257 the field and a blank line.'''
258 if self.message:
258 if self.message:
259 subj = 'subject: ' + self.message[0].lower()
259 subj = 'subject: ' + self.message[0].lower()
260 for i in xrange(len(self.comments)):
260 for i in xrange(len(self.comments)):
261 if subj == self.comments[i].lower():
261 if subj == self.comments[i].lower():
262 del self.comments[i]
262 del self.comments[i]
263 self.message = self.message[2:]
263 self.message = self.message[2:]
264 break
264 break
265 ci = 0
265 ci = 0
266 for mi in self.message:
266 for mi in self.message:
267 while mi != self.comments[ci]:
267 while mi != self.comments[ci]:
268 ci += 1
268 ci += 1
269 del self.comments[ci]
269 del self.comments[ci]
270
270
271 def newcommit(repo, phase, *args, **kwargs):
271 def newcommit(repo, phase, *args, **kwargs):
272 """helper dedicated to ensure a commit respect mq.secret setting
272 """helper dedicated to ensure a commit respect mq.secret setting
273
273
274 It should be used instead of repo.commit inside the mq source for operation
274 It should be used instead of repo.commit inside the mq source for operation
275 creating new changeset.
275 creating new changeset.
276 """
276 """
277 if phase is None:
277 if phase is None:
278 if repo.ui.configbool('mq', 'secret', False):
278 if repo.ui.configbool('mq', 'secret', False):
279 phase = phases.secret
279 phase = phases.secret
280 if phase is not None:
280 if phase is not None:
281 backup = repo.ui.backupconfig('phases', 'new-commit')
281 backup = repo.ui.backupconfig('phases', 'new-commit')
282 # Marking the repository as committing an mq patch can be used
282 # Marking the repository as committing an mq patch can be used
283 # to optimize operations like _branchtags().
283 # to optimize operations like _branchtags().
284 repo._committingpatch = True
284 repo._committingpatch = True
285 try:
285 try:
286 if phase is not None:
286 if phase is not None:
287 repo.ui.setconfig('phases', 'new-commit', phase)
287 repo.ui.setconfig('phases', 'new-commit', phase)
288 return repo.commit(*args, **kwargs)
288 return repo.commit(*args, **kwargs)
289 finally:
289 finally:
290 repo._committingpatch = False
290 repo._committingpatch = False
291 if phase is not None:
291 if phase is not None:
292 repo.ui.restoreconfig(backup)
292 repo.ui.restoreconfig(backup)
293
293
294 class AbortNoCleanup(error.Abort):
294 class AbortNoCleanup(error.Abort):
295 pass
295 pass
296
296
297 class queue(object):
297 class queue(object):
298 def __init__(self, ui, path, patchdir=None):
298 def __init__(self, ui, path, patchdir=None):
299 self.basepath = path
299 self.basepath = path
300 try:
300 try:
301 fh = open(os.path.join(path, 'patches.queue'))
301 fh = open(os.path.join(path, 'patches.queue'))
302 cur = fh.read().rstrip()
302 cur = fh.read().rstrip()
303 fh.close()
303 fh.close()
304 if not cur:
304 if not cur:
305 curpath = os.path.join(path, 'patches')
305 curpath = os.path.join(path, 'patches')
306 else:
306 else:
307 curpath = os.path.join(path, 'patches-' + cur)
307 curpath = os.path.join(path, 'patches-' + cur)
308 except IOError:
308 except IOError:
309 curpath = os.path.join(path, 'patches')
309 curpath = os.path.join(path, 'patches')
310 self.path = patchdir or curpath
310 self.path = patchdir or curpath
311 self.opener = scmutil.opener(self.path)
311 self.opener = scmutil.opener(self.path)
312 self.ui = ui
312 self.ui = ui
313 self.applieddirty = False
313 self.applieddirty = False
314 self.seriesdirty = False
314 self.seriesdirty = False
315 self.added = []
315 self.added = []
316 self.seriespath = "series"
316 self.seriespath = "series"
317 self.statuspath = "status"
317 self.statuspath = "status"
318 self.guardspath = "guards"
318 self.guardspath = "guards"
319 self.activeguards = None
319 self.activeguards = None
320 self.guardsdirty = False
320 self.guardsdirty = False
321 # Handle mq.git as a bool with extended values
321 # Handle mq.git as a bool with extended values
322 try:
322 try:
323 gitmode = ui.configbool('mq', 'git', None)
323 gitmode = ui.configbool('mq', 'git', None)
324 if gitmode is None:
324 if gitmode is None:
325 raise error.ConfigError
325 raise error.ConfigError
326 self.gitmode = gitmode and 'yes' or 'no'
326 self.gitmode = gitmode and 'yes' or 'no'
327 except error.ConfigError:
327 except error.ConfigError:
328 self.gitmode = ui.config('mq', 'git', 'auto').lower()
328 self.gitmode = ui.config('mq', 'git', 'auto').lower()
329 self.plainmode = ui.configbool('mq', 'plain', False)
329 self.plainmode = ui.configbool('mq', 'plain', False)
330
330
331 @util.propertycache
331 @util.propertycache
332 def applied(self):
332 def applied(self):
333 def parselines(lines):
333 def parselines(lines):
334 for l in lines:
334 for l in lines:
335 entry = l.split(':', 1)
335 entry = l.split(':', 1)
336 if len(entry) > 1:
336 if len(entry) > 1:
337 n, name = entry
337 n, name = entry
338 yield statusentry(bin(n), name)
338 yield statusentry(bin(n), name)
339 elif l.strip():
339 elif l.strip():
340 self.ui.warn(_('malformated mq status line: %s\n') % entry)
340 self.ui.warn(_('malformated mq status line: %s\n') % entry)
341 # else we ignore empty lines
341 # else we ignore empty lines
342 try:
342 try:
343 lines = self.opener.read(self.statuspath).splitlines()
343 lines = self.opener.read(self.statuspath).splitlines()
344 return list(parselines(lines))
344 return list(parselines(lines))
345 except IOError, e:
345 except IOError, e:
346 if e.errno == errno.ENOENT:
346 if e.errno == errno.ENOENT:
347 return []
347 return []
348 raise
348 raise
349
349
350 @util.propertycache
350 @util.propertycache
351 def fullseries(self):
351 def fullseries(self):
352 try:
352 try:
353 return self.opener.read(self.seriespath).splitlines()
353 return self.opener.read(self.seriespath).splitlines()
354 except IOError, e:
354 except IOError, e:
355 if e.errno == errno.ENOENT:
355 if e.errno == errno.ENOENT:
356 return []
356 return []
357 raise
357 raise
358
358
359 @util.propertycache
359 @util.propertycache
360 def series(self):
360 def series(self):
361 self.parseseries()
361 self.parseseries()
362 return self.series
362 return self.series
363
363
364 @util.propertycache
364 @util.propertycache
365 def seriesguards(self):
365 def seriesguards(self):
366 self.parseseries()
366 self.parseseries()
367 return self.seriesguards
367 return self.seriesguards
368
368
369 def invalidate(self):
369 def invalidate(self):
370 for a in 'applied fullseries series seriesguards'.split():
370 for a in 'applied fullseries series seriesguards'.split():
371 if a in self.__dict__:
371 if a in self.__dict__:
372 delattr(self, a)
372 delattr(self, a)
373 self.applieddirty = False
373 self.applieddirty = False
374 self.seriesdirty = False
374 self.seriesdirty = False
375 self.guardsdirty = False
375 self.guardsdirty = False
376 self.activeguards = None
376 self.activeguards = None
377
377
378 def diffopts(self, opts={}, patchfn=None):
378 def diffopts(self, opts={}, patchfn=None):
379 diffopts = patchmod.diffopts(self.ui, opts)
379 diffopts = patchmod.diffopts(self.ui, opts)
380 if self.gitmode == 'auto':
380 if self.gitmode == 'auto':
381 diffopts.upgrade = True
381 diffopts.upgrade = True
382 elif self.gitmode == 'keep':
382 elif self.gitmode == 'keep':
383 pass
383 pass
384 elif self.gitmode in ('yes', 'no'):
384 elif self.gitmode in ('yes', 'no'):
385 diffopts.git = self.gitmode == 'yes'
385 diffopts.git = self.gitmode == 'yes'
386 else:
386 else:
387 raise util.Abort(_('mq.git option can be auto/keep/yes/no'
387 raise util.Abort(_('mq.git option can be auto/keep/yes/no'
388 ' got %s') % self.gitmode)
388 ' got %s') % self.gitmode)
389 if patchfn:
389 if patchfn:
390 diffopts = self.patchopts(diffopts, patchfn)
390 diffopts = self.patchopts(diffopts, patchfn)
391 return diffopts
391 return diffopts
392
392
393 def patchopts(self, diffopts, *patches):
393 def patchopts(self, diffopts, *patches):
394 """Return a copy of input diff options with git set to true if
394 """Return a copy of input diff options with git set to true if
395 referenced patch is a git patch and should be preserved as such.
395 referenced patch is a git patch and should be preserved as such.
396 """
396 """
397 diffopts = diffopts.copy()
397 diffopts = diffopts.copy()
398 if not diffopts.git and self.gitmode == 'keep':
398 if not diffopts.git and self.gitmode == 'keep':
399 for patchfn in patches:
399 for patchfn in patches:
400 patchf = self.opener(patchfn, 'r')
400 patchf = self.opener(patchfn, 'r')
401 # if the patch was a git patch, refresh it as a git patch
401 # if the patch was a git patch, refresh it as a git patch
402 for line in patchf:
402 for line in patchf:
403 if line.startswith('diff --git'):
403 if line.startswith('diff --git'):
404 diffopts.git = True
404 diffopts.git = True
405 break
405 break
406 patchf.close()
406 patchf.close()
407 return diffopts
407 return diffopts
408
408
409 def join(self, *p):
409 def join(self, *p):
410 return os.path.join(self.path, *p)
410 return os.path.join(self.path, *p)
411
411
412 def findseries(self, patch):
412 def findseries(self, patch):
413 def matchpatch(l):
413 def matchpatch(l):
414 l = l.split('#', 1)[0]
414 l = l.split('#', 1)[0]
415 return l.strip() == patch
415 return l.strip() == patch
416 for index, l in enumerate(self.fullseries):
416 for index, l in enumerate(self.fullseries):
417 if matchpatch(l):
417 if matchpatch(l):
418 return index
418 return index
419 return None
419 return None
420
420
421 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
421 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
422
422
423 def parseseries(self):
423 def parseseries(self):
424 self.series = []
424 self.series = []
425 self.seriesguards = []
425 self.seriesguards = []
426 for l in self.fullseries:
426 for l in self.fullseries:
427 h = l.find('#')
427 h = l.find('#')
428 if h == -1:
428 if h == -1:
429 patch = l
429 patch = l
430 comment = ''
430 comment = ''
431 elif h == 0:
431 elif h == 0:
432 continue
432 continue
433 else:
433 else:
434 patch = l[:h]
434 patch = l[:h]
435 comment = l[h:]
435 comment = l[h:]
436 patch = patch.strip()
436 patch = patch.strip()
437 if patch:
437 if patch:
438 if patch in self.series:
438 if patch in self.series:
439 raise util.Abort(_('%s appears more than once in %s') %
439 raise util.Abort(_('%s appears more than once in %s') %
440 (patch, self.join(self.seriespath)))
440 (patch, self.join(self.seriespath)))
441 self.series.append(patch)
441 self.series.append(patch)
442 self.seriesguards.append(self.guard_re.findall(comment))
442 self.seriesguards.append(self.guard_re.findall(comment))
443
443
444 def checkguard(self, guard):
444 def checkguard(self, guard):
445 if not guard:
445 if not guard:
446 return _('guard cannot be an empty string')
446 return _('guard cannot be an empty string')
447 bad_chars = '# \t\r\n\f'
447 bad_chars = '# \t\r\n\f'
448 first = guard[0]
448 first = guard[0]
449 if first in '-+':
449 if first in '-+':
450 return (_('guard %r starts with invalid character: %r') %
450 return (_('guard %r starts with invalid character: %r') %
451 (guard, first))
451 (guard, first))
452 for c in bad_chars:
452 for c in bad_chars:
453 if c in guard:
453 if c in guard:
454 return _('invalid character in guard %r: %r') % (guard, c)
454 return _('invalid character in guard %r: %r') % (guard, c)
455
455
456 def setactive(self, guards):
456 def setactive(self, guards):
457 for guard in guards:
457 for guard in guards:
458 bad = self.checkguard(guard)
458 bad = self.checkguard(guard)
459 if bad:
459 if bad:
460 raise util.Abort(bad)
460 raise util.Abort(bad)
461 guards = sorted(set(guards))
461 guards = sorted(set(guards))
462 self.ui.debug('active guards: %s\n' % ' '.join(guards))
462 self.ui.debug('active guards: %s\n' % ' '.join(guards))
463 self.activeguards = guards
463 self.activeguards = guards
464 self.guardsdirty = True
464 self.guardsdirty = True
465
465
466 def active(self):
466 def active(self):
467 if self.activeguards is None:
467 if self.activeguards is None:
468 self.activeguards = []
468 self.activeguards = []
469 try:
469 try:
470 guards = self.opener.read(self.guardspath).split()
470 guards = self.opener.read(self.guardspath).split()
471 except IOError, err:
471 except IOError, err:
472 if err.errno != errno.ENOENT:
472 if err.errno != errno.ENOENT:
473 raise
473 raise
474 guards = []
474 guards = []
475 for i, guard in enumerate(guards):
475 for i, guard in enumerate(guards):
476 bad = self.checkguard(guard)
476 bad = self.checkguard(guard)
477 if bad:
477 if bad:
478 self.ui.warn('%s:%d: %s\n' %
478 self.ui.warn('%s:%d: %s\n' %
479 (self.join(self.guardspath), i + 1, bad))
479 (self.join(self.guardspath), i + 1, bad))
480 else:
480 else:
481 self.activeguards.append(guard)
481 self.activeguards.append(guard)
482 return self.activeguards
482 return self.activeguards
483
483
484 def setguards(self, idx, guards):
484 def setguards(self, idx, guards):
485 for g in guards:
485 for g in guards:
486 if len(g) < 2:
486 if len(g) < 2:
487 raise util.Abort(_('guard %r too short') % g)
487 raise util.Abort(_('guard %r too short') % g)
488 if g[0] not in '-+':
488 if g[0] not in '-+':
489 raise util.Abort(_('guard %r starts with invalid char') % g)
489 raise util.Abort(_('guard %r starts with invalid char') % g)
490 bad = self.checkguard(g[1:])
490 bad = self.checkguard(g[1:])
491 if bad:
491 if bad:
492 raise util.Abort(bad)
492 raise util.Abort(bad)
493 drop = self.guard_re.sub('', self.fullseries[idx])
493 drop = self.guard_re.sub('', self.fullseries[idx])
494 self.fullseries[idx] = drop + ''.join([' #' + g for g in guards])
494 self.fullseries[idx] = drop + ''.join([' #' + g for g in guards])
495 self.parseseries()
495 self.parseseries()
496 self.seriesdirty = True
496 self.seriesdirty = True
497
497
498 def pushable(self, idx):
498 def pushable(self, idx):
499 if isinstance(idx, str):
499 if isinstance(idx, str):
500 idx = self.series.index(idx)
500 idx = self.series.index(idx)
501 patchguards = self.seriesguards[idx]
501 patchguards = self.seriesguards[idx]
502 if not patchguards:
502 if not patchguards:
503 return True, None
503 return True, None
504 guards = self.active()
504 guards = self.active()
505 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
505 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
506 if exactneg:
506 if exactneg:
507 return False, repr(exactneg[0])
507 return False, repr(exactneg[0])
508 pos = [g for g in patchguards if g[0] == '+']
508 pos = [g for g in patchguards if g[0] == '+']
509 exactpos = [g for g in pos if g[1:] in guards]
509 exactpos = [g for g in pos if g[1:] in guards]
510 if pos:
510 if pos:
511 if exactpos:
511 if exactpos:
512 return True, repr(exactpos[0])
512 return True, repr(exactpos[0])
513 return False, ' '.join(map(repr, pos))
513 return False, ' '.join(map(repr, pos))
514 return True, ''
514 return True, ''
515
515
516 def explainpushable(self, idx, all_patches=False):
516 def explainpushable(self, idx, all_patches=False):
517 write = all_patches and self.ui.write or self.ui.warn
517 write = all_patches and self.ui.write or self.ui.warn
518 if all_patches or self.ui.verbose:
518 if all_patches or self.ui.verbose:
519 if isinstance(idx, str):
519 if isinstance(idx, str):
520 idx = self.series.index(idx)
520 idx = self.series.index(idx)
521 pushable, why = self.pushable(idx)
521 pushable, why = self.pushable(idx)
522 if all_patches and pushable:
522 if all_patches and pushable:
523 if why is None:
523 if why is None:
524 write(_('allowing %s - no guards in effect\n') %
524 write(_('allowing %s - no guards in effect\n') %
525 self.series[idx])
525 self.series[idx])
526 else:
526 else:
527 if not why:
527 if not why:
528 write(_('allowing %s - no matching negative guards\n') %
528 write(_('allowing %s - no matching negative guards\n') %
529 self.series[idx])
529 self.series[idx])
530 else:
530 else:
531 write(_('allowing %s - guarded by %s\n') %
531 write(_('allowing %s - guarded by %s\n') %
532 (self.series[idx], why))
532 (self.series[idx], why))
533 if not pushable:
533 if not pushable:
534 if why:
534 if why:
535 write(_('skipping %s - guarded by %s\n') %
535 write(_('skipping %s - guarded by %s\n') %
536 (self.series[idx], why))
536 (self.series[idx], why))
537 else:
537 else:
538 write(_('skipping %s - no matching guards\n') %
538 write(_('skipping %s - no matching guards\n') %
539 self.series[idx])
539 self.series[idx])
540
540
541 def savedirty(self):
541 def savedirty(self):
542 def writelist(items, path):
542 def writelist(items, path):
543 fp = self.opener(path, 'w')
543 fp = self.opener(path, 'w')
544 for i in items:
544 for i in items:
545 fp.write("%s\n" % i)
545 fp.write("%s\n" % i)
546 fp.close()
546 fp.close()
547 if self.applieddirty:
547 if self.applieddirty:
548 writelist(map(str, self.applied), self.statuspath)
548 writelist(map(str, self.applied), self.statuspath)
549 self.applieddirty = False
549 self.applieddirty = False
550 if self.seriesdirty:
550 if self.seriesdirty:
551 writelist(self.fullseries, self.seriespath)
551 writelist(self.fullseries, self.seriespath)
552 self.seriesdirty = False
552 self.seriesdirty = False
553 if self.guardsdirty:
553 if self.guardsdirty:
554 writelist(self.activeguards, self.guardspath)
554 writelist(self.activeguards, self.guardspath)
555 self.guardsdirty = False
555 self.guardsdirty = False
556 if self.added:
556 if self.added:
557 qrepo = self.qrepo()
557 qrepo = self.qrepo()
558 if qrepo:
558 if qrepo:
559 qrepo[None].add(f for f in self.added if f not in qrepo[None])
559 qrepo[None].add(f for f in self.added if f not in qrepo[None])
560 self.added = []
560 self.added = []
561
561
562 def removeundo(self, repo):
562 def removeundo(self, repo):
563 undo = repo.sjoin('undo')
563 undo = repo.sjoin('undo')
564 if not os.path.exists(undo):
564 if not os.path.exists(undo):
565 return
565 return
566 try:
566 try:
567 os.unlink(undo)
567 os.unlink(undo)
568 except OSError, inst:
568 except OSError, inst:
569 self.ui.warn(_('error removing undo: %s\n') % str(inst))
569 self.ui.warn(_('error removing undo: %s\n') % str(inst))
570
570
571 def backup(self, repo, files, copy=False):
571 def backup(self, repo, files, copy=False):
572 # backup local changes in --force case
572 # backup local changes in --force case
573 for f in sorted(files):
573 for f in sorted(files):
574 absf = repo.wjoin(f)
574 absf = repo.wjoin(f)
575 if os.path.lexists(absf):
575 if os.path.lexists(absf):
576 self.ui.note(_('saving current version of %s as %s\n') %
576 self.ui.note(_('saving current version of %s as %s\n') %
577 (f, f + '.orig'))
577 (f, f + '.orig'))
578 if copy:
578 if copy:
579 util.copyfile(absf, absf + '.orig')
579 util.copyfile(absf, absf + '.orig')
580 else:
580 else:
581 util.rename(absf, absf + '.orig')
581 util.rename(absf, absf + '.orig')
582
582
583 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
583 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
584 fp=None, changes=None, opts={}):
584 fp=None, changes=None, opts={}):
585 stat = opts.get('stat')
585 stat = opts.get('stat')
586 m = scmutil.match(repo[node1], files, opts)
586 m = scmutil.match(repo[node1], files, opts)
587 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
587 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
588 changes, stat, fp)
588 changes, stat, fp)
589
589
590 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
590 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
591 # first try just applying the patch
591 # first try just applying the patch
592 (err, n) = self.apply(repo, [patch], update_status=False,
592 (err, n) = self.apply(repo, [patch], update_status=False,
593 strict=True, merge=rev)
593 strict=True, merge=rev)
594
594
595 if err == 0:
595 if err == 0:
596 return (err, n)
596 return (err, n)
597
597
598 if n is None:
598 if n is None:
599 raise util.Abort(_("apply failed for patch %s") % patch)
599 raise util.Abort(_("apply failed for patch %s") % patch)
600
600
601 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
601 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
602
602
603 # apply failed, strip away that rev and merge.
603 # apply failed, strip away that rev and merge.
604 hg.clean(repo, head)
604 hg.clean(repo, head)
605 self.strip(repo, [n], update=False, backup='strip')
605 self.strip(repo, [n], update=False, backup='strip')
606
606
607 ctx = repo[rev]
607 ctx = repo[rev]
608 ret = hg.merge(repo, rev)
608 ret = hg.merge(repo, rev)
609 if ret:
609 if ret:
610 raise util.Abort(_("update returned %d") % ret)
610 raise util.Abort(_("update returned %d") % ret)
611 n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
611 n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
612 if n is None:
612 if n is None:
613 raise util.Abort(_("repo commit failed"))
613 raise util.Abort(_("repo commit failed"))
614 try:
614 try:
615 ph = patchheader(mergeq.join(patch), self.plainmode)
615 ph = patchheader(mergeq.join(patch), self.plainmode)
616 except Exception:
616 except Exception:
617 raise util.Abort(_("unable to read %s") % patch)
617 raise util.Abort(_("unable to read %s") % patch)
618
618
619 diffopts = self.patchopts(diffopts, patch)
619 diffopts = self.patchopts(diffopts, patch)
620 patchf = self.opener(patch, "w")
620 patchf = self.opener(patch, "w")
621 comments = str(ph)
621 comments = str(ph)
622 if comments:
622 if comments:
623 patchf.write(comments)
623 patchf.write(comments)
624 self.printdiff(repo, diffopts, head, n, fp=patchf)
624 self.printdiff(repo, diffopts, head, n, fp=patchf)
625 patchf.close()
625 patchf.close()
626 self.removeundo(repo)
626 self.removeundo(repo)
627 return (0, n)
627 return (0, n)
628
628
629 def qparents(self, repo, rev=None):
629 def qparents(self, repo, rev=None):
630 if rev is None:
630 if rev is None:
631 (p1, p2) = repo.dirstate.parents()
631 (p1, p2) = repo.dirstate.parents()
632 if p2 == nullid:
632 if p2 == nullid:
633 return p1
633 return p1
634 if not self.applied:
634 if not self.applied:
635 return None
635 return None
636 return self.applied[-1].node
636 return self.applied[-1].node
637 p1, p2 = repo.changelog.parents(rev)
637 p1, p2 = repo.changelog.parents(rev)
638 if p2 != nullid and p2 in [x.node for x in self.applied]:
638 if p2 != nullid and p2 in [x.node for x in self.applied]:
639 return p2
639 return p2
640 return p1
640 return p1
641
641
642 def mergepatch(self, repo, mergeq, series, diffopts):
642 def mergepatch(self, repo, mergeq, series, diffopts):
643 if not self.applied:
643 if not self.applied:
644 # each of the patches merged in will have two parents. This
644 # each of the patches merged in will have two parents. This
645 # can confuse the qrefresh, qdiff, and strip code because it
645 # can confuse the qrefresh, qdiff, and strip code because it
646 # needs to know which parent is actually in the patch queue.
646 # needs to know which parent is actually in the patch queue.
647 # so, we insert a merge marker with only one parent. This way
647 # so, we insert a merge marker with only one parent. This way
648 # the first patch in the queue is never a merge patch
648 # the first patch in the queue is never a merge patch
649 #
649 #
650 pname = ".hg.patches.merge.marker"
650 pname = ".hg.patches.merge.marker"
651 n = newcommit(repo, None, '[mq]: merge marker', force=True)
651 n = newcommit(repo, None, '[mq]: merge marker', force=True)
652 self.removeundo(repo)
652 self.removeundo(repo)
653 self.applied.append(statusentry(n, pname))
653 self.applied.append(statusentry(n, pname))
654 self.applieddirty = True
654 self.applieddirty = True
655
655
656 head = self.qparents(repo)
656 head = self.qparents(repo)
657
657
658 for patch in series:
658 for patch in series:
659 patch = mergeq.lookup(patch, strict=True)
659 patch = mergeq.lookup(patch, strict=True)
660 if not patch:
660 if not patch:
661 self.ui.warn(_("patch %s does not exist\n") % patch)
661 self.ui.warn(_("patch %s does not exist\n") % patch)
662 return (1, None)
662 return (1, None)
663 pushable, reason = self.pushable(patch)
663 pushable, reason = self.pushable(patch)
664 if not pushable:
664 if not pushable:
665 self.explainpushable(patch, all_patches=True)
665 self.explainpushable(patch, all_patches=True)
666 continue
666 continue
667 info = mergeq.isapplied(patch)
667 info = mergeq.isapplied(patch)
668 if not info:
668 if not info:
669 self.ui.warn(_("patch %s is not applied\n") % patch)
669 self.ui.warn(_("patch %s is not applied\n") % patch)
670 return (1, None)
670 return (1, None)
671 rev = info[1]
671 rev = info[1]
672 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
672 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
673 if head:
673 if head:
674 self.applied.append(statusentry(head, patch))
674 self.applied.append(statusentry(head, patch))
675 self.applieddirty = True
675 self.applieddirty = True
676 if err:
676 if err:
677 return (err, head)
677 return (err, head)
678 self.savedirty()
678 self.savedirty()
679 return (0, head)
679 return (0, head)
680
680
681 def patch(self, repo, patchfile):
681 def patch(self, repo, patchfile):
682 '''Apply patchfile to the working directory.
682 '''Apply patchfile to the working directory.
683 patchfile: name of patch file'''
683 patchfile: name of patch file'''
684 files = set()
684 files = set()
685 try:
685 try:
686 fuzz = patchmod.patch(self.ui, repo, patchfile, strip=1,
686 fuzz = patchmod.patch(self.ui, repo, patchfile, strip=1,
687 files=files, eolmode=None)
687 files=files, eolmode=None)
688 return (True, list(files), fuzz)
688 return (True, list(files), fuzz)
689 except Exception, inst:
689 except Exception, inst:
690 self.ui.note(str(inst) + '\n')
690 self.ui.note(str(inst) + '\n')
691 if not self.ui.verbose:
691 if not self.ui.verbose:
692 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
692 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
693 self.ui.traceback()
693 self.ui.traceback()
694 return (False, list(files), False)
694 return (False, list(files), False)
695
695
696 def apply(self, repo, series, list=False, update_status=True,
696 def apply(self, repo, series, list=False, update_status=True,
697 strict=False, patchdir=None, merge=None, all_files=None,
697 strict=False, patchdir=None, merge=None, all_files=None,
698 tobackup=None, check=False):
698 tobackup=None, check=False):
699 wlock = lock = tr = None
699 wlock = lock = tr = None
700 try:
700 try:
701 wlock = repo.wlock()
701 wlock = repo.wlock()
702 lock = repo.lock()
702 lock = repo.lock()
703 tr = repo.transaction("qpush")
703 tr = repo.transaction("qpush")
704 try:
704 try:
705 ret = self._apply(repo, series, list, update_status,
705 ret = self._apply(repo, series, list, update_status,
706 strict, patchdir, merge, all_files=all_files,
706 strict, patchdir, merge, all_files=all_files,
707 tobackup=tobackup, check=check)
707 tobackup=tobackup, check=check)
708 tr.close()
708 tr.close()
709 self.savedirty()
709 self.savedirty()
710 return ret
710 return ret
711 except AbortNoCleanup:
711 except AbortNoCleanup:
712 tr.close()
712 tr.close()
713 self.savedirty()
713 self.savedirty()
714 return 2, repo.dirstate.p1()
714 return 2, repo.dirstate.p1()
715 except:
715 except: # re-raises
716 try:
716 try:
717 tr.abort()
717 tr.abort()
718 finally:
718 finally:
719 repo.invalidate()
719 repo.invalidate()
720 repo.dirstate.invalidate()
720 repo.dirstate.invalidate()
721 self.invalidate()
721 self.invalidate()
722 raise
722 raise
723 finally:
723 finally:
724 release(tr, lock, wlock)
724 release(tr, lock, wlock)
725 self.removeundo(repo)
725 self.removeundo(repo)
726
726
727 def _apply(self, repo, series, list=False, update_status=True,
727 def _apply(self, repo, series, list=False, update_status=True,
728 strict=False, patchdir=None, merge=None, all_files=None,
728 strict=False, patchdir=None, merge=None, all_files=None,
729 tobackup=None, check=False):
729 tobackup=None, check=False):
730 """returns (error, hash)
730 """returns (error, hash)
731
731
732 error = 1 for unable to read, 2 for patch failed, 3 for patch
732 error = 1 for unable to read, 2 for patch failed, 3 for patch
733 fuzz. tobackup is None or a set of files to backup before they
733 fuzz. tobackup is None or a set of files to backup before they
734 are modified by a patch.
734 are modified by a patch.
735 """
735 """
736 # TODO unify with commands.py
736 # TODO unify with commands.py
737 if not patchdir:
737 if not patchdir:
738 patchdir = self.path
738 patchdir = self.path
739 err = 0
739 err = 0
740 n = None
740 n = None
741 for patchname in series:
741 for patchname in series:
742 pushable, reason = self.pushable(patchname)
742 pushable, reason = self.pushable(patchname)
743 if not pushable:
743 if not pushable:
744 self.explainpushable(patchname, all_patches=True)
744 self.explainpushable(patchname, all_patches=True)
745 continue
745 continue
746 self.ui.status(_("applying %s\n") % patchname)
746 self.ui.status(_("applying %s\n") % patchname)
747 pf = os.path.join(patchdir, patchname)
747 pf = os.path.join(patchdir, patchname)
748
748
749 try:
749 try:
750 ph = patchheader(self.join(patchname), self.plainmode)
750 ph = patchheader(self.join(patchname), self.plainmode)
751 except IOError:
751 except IOError:
752 self.ui.warn(_("unable to read %s\n") % patchname)
752 self.ui.warn(_("unable to read %s\n") % patchname)
753 err = 1
753 err = 1
754 break
754 break
755
755
756 message = ph.message
756 message = ph.message
757 if not message:
757 if not message:
758 # The commit message should not be translated
758 # The commit message should not be translated
759 message = "imported patch %s\n" % patchname
759 message = "imported patch %s\n" % patchname
760 else:
760 else:
761 if list:
761 if list:
762 # The commit message should not be translated
762 # The commit message should not be translated
763 message.append("\nimported patch %s" % patchname)
763 message.append("\nimported patch %s" % patchname)
764 message = '\n'.join(message)
764 message = '\n'.join(message)
765
765
766 if ph.haspatch:
766 if ph.haspatch:
767 if tobackup:
767 if tobackup:
768 touched = patchmod.changedfiles(self.ui, repo, pf)
768 touched = patchmod.changedfiles(self.ui, repo, pf)
769 touched = set(touched) & tobackup
769 touched = set(touched) & tobackup
770 if touched and check:
770 if touched and check:
771 raise AbortNoCleanup(
771 raise AbortNoCleanup(
772 _("local changes found, refresh first"))
772 _("local changes found, refresh first"))
773 self.backup(repo, touched, copy=True)
773 self.backup(repo, touched, copy=True)
774 tobackup = tobackup - touched
774 tobackup = tobackup - touched
775 (patcherr, files, fuzz) = self.patch(repo, pf)
775 (patcherr, files, fuzz) = self.patch(repo, pf)
776 if all_files is not None:
776 if all_files is not None:
777 all_files.update(files)
777 all_files.update(files)
778 patcherr = not patcherr
778 patcherr = not patcherr
779 else:
779 else:
780 self.ui.warn(_("patch %s is empty\n") % patchname)
780 self.ui.warn(_("patch %s is empty\n") % patchname)
781 patcherr, files, fuzz = 0, [], 0
781 patcherr, files, fuzz = 0, [], 0
782
782
783 if merge and files:
783 if merge and files:
784 # Mark as removed/merged and update dirstate parent info
784 # Mark as removed/merged and update dirstate parent info
785 removed = []
785 removed = []
786 merged = []
786 merged = []
787 for f in files:
787 for f in files:
788 if os.path.lexists(repo.wjoin(f)):
788 if os.path.lexists(repo.wjoin(f)):
789 merged.append(f)
789 merged.append(f)
790 else:
790 else:
791 removed.append(f)
791 removed.append(f)
792 for f in removed:
792 for f in removed:
793 repo.dirstate.remove(f)
793 repo.dirstate.remove(f)
794 for f in merged:
794 for f in merged:
795 repo.dirstate.merge(f)
795 repo.dirstate.merge(f)
796 p1, p2 = repo.dirstate.parents()
796 p1, p2 = repo.dirstate.parents()
797 repo.setparents(p1, merge)
797 repo.setparents(p1, merge)
798
798
799 match = scmutil.matchfiles(repo, files or [])
799 match = scmutil.matchfiles(repo, files or [])
800 oldtip = repo['tip']
800 oldtip = repo['tip']
801 n = newcommit(repo, None, message, ph.user, ph.date, match=match,
801 n = newcommit(repo, None, message, ph.user, ph.date, match=match,
802 force=True)
802 force=True)
803 if repo['tip'] == oldtip:
803 if repo['tip'] == oldtip:
804 raise util.Abort(_("qpush exactly duplicates child changeset"))
804 raise util.Abort(_("qpush exactly duplicates child changeset"))
805 if n is None:
805 if n is None:
806 raise util.Abort(_("repository commit failed"))
806 raise util.Abort(_("repository commit failed"))
807
807
808 if update_status:
808 if update_status:
809 self.applied.append(statusentry(n, patchname))
809 self.applied.append(statusentry(n, patchname))
810
810
811 if patcherr:
811 if patcherr:
812 self.ui.warn(_("patch failed, rejects left in working dir\n"))
812 self.ui.warn(_("patch failed, rejects left in working dir\n"))
813 err = 2
813 err = 2
814 break
814 break
815
815
816 if fuzz and strict:
816 if fuzz and strict:
817 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
817 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
818 err = 3
818 err = 3
819 break
819 break
820 return (err, n)
820 return (err, n)
821
821
822 def _cleanup(self, patches, numrevs, keep=False):
822 def _cleanup(self, patches, numrevs, keep=False):
823 if not keep:
823 if not keep:
824 r = self.qrepo()
824 r = self.qrepo()
825 if r:
825 if r:
826 r[None].forget(patches)
826 r[None].forget(patches)
827 for p in patches:
827 for p in patches:
828 os.unlink(self.join(p))
828 os.unlink(self.join(p))
829
829
830 qfinished = []
830 qfinished = []
831 if numrevs:
831 if numrevs:
832 qfinished = self.applied[:numrevs]
832 qfinished = self.applied[:numrevs]
833 del self.applied[:numrevs]
833 del self.applied[:numrevs]
834 self.applieddirty = True
834 self.applieddirty = True
835
835
836 unknown = []
836 unknown = []
837
837
838 for (i, p) in sorted([(self.findseries(p), p) for p in patches],
838 for (i, p) in sorted([(self.findseries(p), p) for p in patches],
839 reverse=True):
839 reverse=True):
840 if i is not None:
840 if i is not None:
841 del self.fullseries[i]
841 del self.fullseries[i]
842 else:
842 else:
843 unknown.append(p)
843 unknown.append(p)
844
844
845 if unknown:
845 if unknown:
846 if numrevs:
846 if numrevs:
847 rev = dict((entry.name, entry.node) for entry in qfinished)
847 rev = dict((entry.name, entry.node) for entry in qfinished)
848 for p in unknown:
848 for p in unknown:
849 msg = _('revision %s refers to unknown patches: %s\n')
849 msg = _('revision %s refers to unknown patches: %s\n')
850 self.ui.warn(msg % (short(rev[p]), p))
850 self.ui.warn(msg % (short(rev[p]), p))
851 else:
851 else:
852 msg = _('unknown patches: %s\n')
852 msg = _('unknown patches: %s\n')
853 raise util.Abort(''.join(msg % p for p in unknown))
853 raise util.Abort(''.join(msg % p for p in unknown))
854
854
855 self.parseseries()
855 self.parseseries()
856 self.seriesdirty = True
856 self.seriesdirty = True
857 return [entry.node for entry in qfinished]
857 return [entry.node for entry in qfinished]
858
858
859 def _revpatches(self, repo, revs):
859 def _revpatches(self, repo, revs):
860 firstrev = repo[self.applied[0].node].rev()
860 firstrev = repo[self.applied[0].node].rev()
861 patches = []
861 patches = []
862 for i, rev in enumerate(revs):
862 for i, rev in enumerate(revs):
863
863
864 if rev < firstrev:
864 if rev < firstrev:
865 raise util.Abort(_('revision %d is not managed') % rev)
865 raise util.Abort(_('revision %d is not managed') % rev)
866
866
867 ctx = repo[rev]
867 ctx = repo[rev]
868 base = self.applied[i].node
868 base = self.applied[i].node
869 if ctx.node() != base:
869 if ctx.node() != base:
870 msg = _('cannot delete revision %d above applied patches')
870 msg = _('cannot delete revision %d above applied patches')
871 raise util.Abort(msg % rev)
871 raise util.Abort(msg % rev)
872
872
873 patch = self.applied[i].name
873 patch = self.applied[i].name
874 for fmt in ('[mq]: %s', 'imported patch %s'):
874 for fmt in ('[mq]: %s', 'imported patch %s'):
875 if ctx.description() == fmt % patch:
875 if ctx.description() == fmt % patch:
876 msg = _('patch %s finalized without changeset message\n')
876 msg = _('patch %s finalized without changeset message\n')
877 repo.ui.status(msg % patch)
877 repo.ui.status(msg % patch)
878 break
878 break
879
879
880 patches.append(patch)
880 patches.append(patch)
881 return patches
881 return patches
882
882
883 def finish(self, repo, revs):
883 def finish(self, repo, revs):
884 # Manually trigger phase computation to ensure phasedefaults is
884 # Manually trigger phase computation to ensure phasedefaults is
885 # executed before we remove the patches.
885 # executed before we remove the patches.
886 repo._phasecache
886 repo._phasecache
887 patches = self._revpatches(repo, sorted(revs))
887 patches = self._revpatches(repo, sorted(revs))
888 qfinished = self._cleanup(patches, len(patches))
888 qfinished = self._cleanup(patches, len(patches))
889 if qfinished and repo.ui.configbool('mq', 'secret', False):
889 if qfinished and repo.ui.configbool('mq', 'secret', False):
890 # only use this logic when the secret option is added
890 # only use this logic when the secret option is added
891 oldqbase = repo[qfinished[0]]
891 oldqbase = repo[qfinished[0]]
892 tphase = repo.ui.config('phases', 'new-commit', phases.draft)
892 tphase = repo.ui.config('phases', 'new-commit', phases.draft)
893 if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
893 if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
894 phases.advanceboundary(repo, tphase, qfinished)
894 phases.advanceboundary(repo, tphase, qfinished)
895
895
896 def delete(self, repo, patches, opts):
896 def delete(self, repo, patches, opts):
897 if not patches and not opts.get('rev'):
897 if not patches and not opts.get('rev'):
898 raise util.Abort(_('qdelete requires at least one revision or '
898 raise util.Abort(_('qdelete requires at least one revision or '
899 'patch name'))
899 'patch name'))
900
900
901 realpatches = []
901 realpatches = []
902 for patch in patches:
902 for patch in patches:
903 patch = self.lookup(patch, strict=True)
903 patch = self.lookup(patch, strict=True)
904 info = self.isapplied(patch)
904 info = self.isapplied(patch)
905 if info:
905 if info:
906 raise util.Abort(_("cannot delete applied patch %s") % patch)
906 raise util.Abort(_("cannot delete applied patch %s") % patch)
907 if patch not in self.series:
907 if patch not in self.series:
908 raise util.Abort(_("patch %s not in series file") % patch)
908 raise util.Abort(_("patch %s not in series file") % patch)
909 if patch not in realpatches:
909 if patch not in realpatches:
910 realpatches.append(patch)
910 realpatches.append(patch)
911
911
912 numrevs = 0
912 numrevs = 0
913 if opts.get('rev'):
913 if opts.get('rev'):
914 if not self.applied:
914 if not self.applied:
915 raise util.Abort(_('no patches applied'))
915 raise util.Abort(_('no patches applied'))
916 revs = scmutil.revrange(repo, opts.get('rev'))
916 revs = scmutil.revrange(repo, opts.get('rev'))
917 if len(revs) > 1 and revs[0] > revs[1]:
917 if len(revs) > 1 and revs[0] > revs[1]:
918 revs.reverse()
918 revs.reverse()
919 revpatches = self._revpatches(repo, revs)
919 revpatches = self._revpatches(repo, revs)
920 realpatches += revpatches
920 realpatches += revpatches
921 numrevs = len(revpatches)
921 numrevs = len(revpatches)
922
922
923 self._cleanup(realpatches, numrevs, opts.get('keep'))
923 self._cleanup(realpatches, numrevs, opts.get('keep'))
924
924
925 def checktoppatch(self, repo):
925 def checktoppatch(self, repo):
926 if self.applied:
926 if self.applied:
927 top = self.applied[-1].node
927 top = self.applied[-1].node
928 patch = self.applied[-1].name
928 patch = self.applied[-1].name
929 pp = repo.dirstate.parents()
929 pp = repo.dirstate.parents()
930 if top not in pp:
930 if top not in pp:
931 raise util.Abort(_("working directory revision is not qtip"))
931 raise util.Abort(_("working directory revision is not qtip"))
932 return top, patch
932 return top, patch
933 return None, None
933 return None, None
934
934
935 def checksubstate(self, repo):
935 def checksubstate(self, repo):
936 '''return list of subrepos at a different revision than substate.
936 '''return list of subrepos at a different revision than substate.
937 Abort if any subrepos have uncommitted changes.'''
937 Abort if any subrepos have uncommitted changes.'''
938 inclsubs = []
938 inclsubs = []
939 wctx = repo[None]
939 wctx = repo[None]
940 for s in wctx.substate:
940 for s in wctx.substate:
941 if wctx.sub(s).dirty(True):
941 if wctx.sub(s).dirty(True):
942 raise util.Abort(
942 raise util.Abort(
943 _("uncommitted changes in subrepository %s") % s)
943 _("uncommitted changes in subrepository %s") % s)
944 elif wctx.sub(s).dirty():
944 elif wctx.sub(s).dirty():
945 inclsubs.append(s)
945 inclsubs.append(s)
946 return inclsubs
946 return inclsubs
947
947
948 def localchangesfound(self, refresh=True):
948 def localchangesfound(self, refresh=True):
949 if refresh:
949 if refresh:
950 raise util.Abort(_("local changes found, refresh first"))
950 raise util.Abort(_("local changes found, refresh first"))
951 else:
951 else:
952 raise util.Abort(_("local changes found"))
952 raise util.Abort(_("local changes found"))
953
953
954 def checklocalchanges(self, repo, force=False, refresh=True):
954 def checklocalchanges(self, repo, force=False, refresh=True):
955 m, a, r, d = repo.status()[:4]
955 m, a, r, d = repo.status()[:4]
956 if (m or a or r or d) and not force:
956 if (m or a or r or d) and not force:
957 self.localchangesfound(refresh)
957 self.localchangesfound(refresh)
958 return m, a, r, d
958 return m, a, r, d
959
959
960 _reserved = ('series', 'status', 'guards', '.', '..')
960 _reserved = ('series', 'status', 'guards', '.', '..')
961 def checkreservedname(self, name):
961 def checkreservedname(self, name):
962 if name in self._reserved:
962 if name in self._reserved:
963 raise util.Abort(_('"%s" cannot be used as the name of a patch')
963 raise util.Abort(_('"%s" cannot be used as the name of a patch')
964 % name)
964 % name)
965 for prefix in ('.hg', '.mq'):
965 for prefix in ('.hg', '.mq'):
966 if name.startswith(prefix):
966 if name.startswith(prefix):
967 raise util.Abort(_('patch name cannot begin with "%s"')
967 raise util.Abort(_('patch name cannot begin with "%s"')
968 % prefix)
968 % prefix)
969 for c in ('#', ':'):
969 for c in ('#', ':'):
970 if c in name:
970 if c in name:
971 raise util.Abort(_('"%s" cannot be used in the name of a patch')
971 raise util.Abort(_('"%s" cannot be used in the name of a patch')
972 % c)
972 % c)
973
973
974 def checkpatchname(self, name, force=False):
974 def checkpatchname(self, name, force=False):
975 self.checkreservedname(name)
975 self.checkreservedname(name)
976 if not force and os.path.exists(self.join(name)):
976 if not force and os.path.exists(self.join(name)):
977 if os.path.isdir(self.join(name)):
977 if os.path.isdir(self.join(name)):
978 raise util.Abort(_('"%s" already exists as a directory')
978 raise util.Abort(_('"%s" already exists as a directory')
979 % name)
979 % name)
980 else:
980 else:
981 raise util.Abort(_('patch "%s" already exists') % name)
981 raise util.Abort(_('patch "%s" already exists') % name)
982
982
983 def checkforcecheck(self, check, force):
983 def checkforcecheck(self, check, force):
984 if force and check:
984 if force and check:
985 raise util.Abort(_('cannot use both --force and --check'))
985 raise util.Abort(_('cannot use both --force and --check'))
986
986
987 def new(self, repo, patchfn, *pats, **opts):
987 def new(self, repo, patchfn, *pats, **opts):
988 """options:
988 """options:
989 msg: a string or a no-argument function returning a string
989 msg: a string or a no-argument function returning a string
990 """
990 """
991 msg = opts.get('msg')
991 msg = opts.get('msg')
992 user = opts.get('user')
992 user = opts.get('user')
993 date = opts.get('date')
993 date = opts.get('date')
994 if date:
994 if date:
995 date = util.parsedate(date)
995 date = util.parsedate(date)
996 diffopts = self.diffopts({'git': opts.get('git')})
996 diffopts = self.diffopts({'git': opts.get('git')})
997 if opts.get('checkname', True):
997 if opts.get('checkname', True):
998 self.checkpatchname(patchfn)
998 self.checkpatchname(patchfn)
999 inclsubs = self.checksubstate(repo)
999 inclsubs = self.checksubstate(repo)
1000 if inclsubs:
1000 if inclsubs:
1001 inclsubs.append('.hgsubstate')
1001 inclsubs.append('.hgsubstate')
1002 substatestate = repo.dirstate['.hgsubstate']
1002 substatestate = repo.dirstate['.hgsubstate']
1003 if opts.get('include') or opts.get('exclude') or pats:
1003 if opts.get('include') or opts.get('exclude') or pats:
1004 if inclsubs:
1004 if inclsubs:
1005 pats = list(pats or []) + inclsubs
1005 pats = list(pats or []) + inclsubs
1006 match = scmutil.match(repo[None], pats, opts)
1006 match = scmutil.match(repo[None], pats, opts)
1007 # detect missing files in pats
1007 # detect missing files in pats
1008 def badfn(f, msg):
1008 def badfn(f, msg):
1009 if f != '.hgsubstate': # .hgsubstate is auto-created
1009 if f != '.hgsubstate': # .hgsubstate is auto-created
1010 raise util.Abort('%s: %s' % (f, msg))
1010 raise util.Abort('%s: %s' % (f, msg))
1011 match.bad = badfn
1011 match.bad = badfn
1012 changes = repo.status(match=match)
1012 changes = repo.status(match=match)
1013 m, a, r, d = changes[:4]
1013 m, a, r, d = changes[:4]
1014 else:
1014 else:
1015 changes = self.checklocalchanges(repo, force=True)
1015 changes = self.checklocalchanges(repo, force=True)
1016 m, a, r, d = changes
1016 m, a, r, d = changes
1017 match = scmutil.matchfiles(repo, m + a + r + inclsubs)
1017 match = scmutil.matchfiles(repo, m + a + r + inclsubs)
1018 if len(repo[None].parents()) > 1:
1018 if len(repo[None].parents()) > 1:
1019 raise util.Abort(_('cannot manage merge changesets'))
1019 raise util.Abort(_('cannot manage merge changesets'))
1020 commitfiles = m + a + r
1020 commitfiles = m + a + r
1021 self.checktoppatch(repo)
1021 self.checktoppatch(repo)
1022 insert = self.fullseriesend()
1022 insert = self.fullseriesend()
1023 wlock = repo.wlock()
1023 wlock = repo.wlock()
1024 try:
1024 try:
1025 try:
1025 try:
1026 # if patch file write fails, abort early
1026 # if patch file write fails, abort early
1027 p = self.opener(patchfn, "w")
1027 p = self.opener(patchfn, "w")
1028 except IOError, e:
1028 except IOError, e:
1029 raise util.Abort(_('cannot write patch "%s": %s')
1029 raise util.Abort(_('cannot write patch "%s": %s')
1030 % (patchfn, e.strerror))
1030 % (patchfn, e.strerror))
1031 try:
1031 try:
1032 if self.plainmode:
1032 if self.plainmode:
1033 if user:
1033 if user:
1034 p.write("From: " + user + "\n")
1034 p.write("From: " + user + "\n")
1035 if not date:
1035 if not date:
1036 p.write("\n")
1036 p.write("\n")
1037 if date:
1037 if date:
1038 p.write("Date: %d %d\n\n" % date)
1038 p.write("Date: %d %d\n\n" % date)
1039 else:
1039 else:
1040 p.write("# HG changeset patch\n")
1040 p.write("# HG changeset patch\n")
1041 p.write("# Parent "
1041 p.write("# Parent "
1042 + hex(repo[None].p1().node()) + "\n")
1042 + hex(repo[None].p1().node()) + "\n")
1043 if user:
1043 if user:
1044 p.write("# User " + user + "\n")
1044 p.write("# User " + user + "\n")
1045 if date:
1045 if date:
1046 p.write("# Date %s %s\n\n" % date)
1046 p.write("# Date %s %s\n\n" % date)
1047 if util.safehasattr(msg, '__call__'):
1047 if util.safehasattr(msg, '__call__'):
1048 msg = msg()
1048 msg = msg()
1049 commitmsg = msg and msg or ("[mq]: %s" % patchfn)
1049 commitmsg = msg and msg or ("[mq]: %s" % patchfn)
1050 n = newcommit(repo, None, commitmsg, user, date, match=match,
1050 n = newcommit(repo, None, commitmsg, user, date, match=match,
1051 force=True)
1051 force=True)
1052 if n is None:
1052 if n is None:
1053 raise util.Abort(_("repo commit failed"))
1053 raise util.Abort(_("repo commit failed"))
1054 try:
1054 try:
1055 self.fullseries[insert:insert] = [patchfn]
1055 self.fullseries[insert:insert] = [patchfn]
1056 self.applied.append(statusentry(n, patchfn))
1056 self.applied.append(statusentry(n, patchfn))
1057 self.parseseries()
1057 self.parseseries()
1058 self.seriesdirty = True
1058 self.seriesdirty = True
1059 self.applieddirty = True
1059 self.applieddirty = True
1060 if msg:
1060 if msg:
1061 msg = msg + "\n\n"
1061 msg = msg + "\n\n"
1062 p.write(msg)
1062 p.write(msg)
1063 if commitfiles:
1063 if commitfiles:
1064 parent = self.qparents(repo, n)
1064 parent = self.qparents(repo, n)
1065 if inclsubs:
1065 if inclsubs:
1066 if substatestate in 'a?':
1066 if substatestate in 'a?':
1067 changes[1].append('.hgsubstate')
1067 changes[1].append('.hgsubstate')
1068 elif substatestate in 'r':
1068 elif substatestate in 'r':
1069 changes[2].append('.hgsubstate')
1069 changes[2].append('.hgsubstate')
1070 else: # modified
1070 else: # modified
1071 changes[0].append('.hgsubstate')
1071 changes[0].append('.hgsubstate')
1072 chunks = patchmod.diff(repo, node1=parent, node2=n,
1072 chunks = patchmod.diff(repo, node1=parent, node2=n,
1073 changes=changes, opts=diffopts)
1073 changes=changes, opts=diffopts)
1074 for chunk in chunks:
1074 for chunk in chunks:
1075 p.write(chunk)
1075 p.write(chunk)
1076 p.close()
1076 p.close()
1077 r = self.qrepo()
1077 r = self.qrepo()
1078 if r:
1078 if r:
1079 r[None].add([patchfn])
1079 r[None].add([patchfn])
1080 except:
1080 except: # re-raises
1081 repo.rollback()
1081 repo.rollback()
1082 raise
1082 raise
1083 except Exception:
1083 except Exception:
1084 patchpath = self.join(patchfn)
1084 patchpath = self.join(patchfn)
1085 try:
1085 try:
1086 os.unlink(patchpath)
1086 os.unlink(patchpath)
1087 except OSError:
1087 except OSError:
1088 self.ui.warn(_('error unlinking %s\n') % patchpath)
1088 self.ui.warn(_('error unlinking %s\n') % patchpath)
1089 raise
1089 raise
1090 self.removeundo(repo)
1090 self.removeundo(repo)
1091 finally:
1091 finally:
1092 release(wlock)
1092 release(wlock)
1093
1093
1094 def strip(self, repo, revs, update=True, backup="all", force=None):
1094 def strip(self, repo, revs, update=True, backup="all", force=None):
1095 wlock = lock = None
1095 wlock = lock = None
1096 try:
1096 try:
1097 wlock = repo.wlock()
1097 wlock = repo.wlock()
1098 lock = repo.lock()
1098 lock = repo.lock()
1099
1099
1100 if update:
1100 if update:
1101 self.checklocalchanges(repo, force=force, refresh=False)
1101 self.checklocalchanges(repo, force=force, refresh=False)
1102 urev = self.qparents(repo, revs[0])
1102 urev = self.qparents(repo, revs[0])
1103 hg.clean(repo, urev)
1103 hg.clean(repo, urev)
1104 repo.dirstate.write()
1104 repo.dirstate.write()
1105
1105
1106 repair.strip(self.ui, repo, revs, backup)
1106 repair.strip(self.ui, repo, revs, backup)
1107 finally:
1107 finally:
1108 release(lock, wlock)
1108 release(lock, wlock)
1109
1109
1110 def isapplied(self, patch):
1110 def isapplied(self, patch):
1111 """returns (index, rev, patch)"""
1111 """returns (index, rev, patch)"""
1112 for i, a in enumerate(self.applied):
1112 for i, a in enumerate(self.applied):
1113 if a.name == patch:
1113 if a.name == patch:
1114 return (i, a.node, a.name)
1114 return (i, a.node, a.name)
1115 return None
1115 return None
1116
1116
1117 # if the exact patch name does not exist, we try a few
1117 # if the exact patch name does not exist, we try a few
1118 # variations. If strict is passed, we try only #1
1118 # variations. If strict is passed, we try only #1
1119 #
1119 #
1120 # 1) a number (as string) to indicate an offset in the series file
1120 # 1) a number (as string) to indicate an offset in the series file
1121 # 2) a unique substring of the patch name was given
1121 # 2) a unique substring of the patch name was given
1122 # 3) patchname[-+]num to indicate an offset in the series file
1122 # 3) patchname[-+]num to indicate an offset in the series file
1123 def lookup(self, patch, strict=False):
1123 def lookup(self, patch, strict=False):
1124 def partialname(s):
1124 def partialname(s):
1125 if s in self.series:
1125 if s in self.series:
1126 return s
1126 return s
1127 matches = [x for x in self.series if s in x]
1127 matches = [x for x in self.series if s in x]
1128 if len(matches) > 1:
1128 if len(matches) > 1:
1129 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
1129 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
1130 for m in matches:
1130 for m in matches:
1131 self.ui.warn(' %s\n' % m)
1131 self.ui.warn(' %s\n' % m)
1132 return None
1132 return None
1133 if matches:
1133 if matches:
1134 return matches[0]
1134 return matches[0]
1135 if self.series and self.applied:
1135 if self.series and self.applied:
1136 if s == 'qtip':
1136 if s == 'qtip':
1137 return self.series[self.seriesend(True)-1]
1137 return self.series[self.seriesend(True)-1]
1138 if s == 'qbase':
1138 if s == 'qbase':
1139 return self.series[0]
1139 return self.series[0]
1140 return None
1140 return None
1141
1141
1142 if patch in self.series:
1142 if patch in self.series:
1143 return patch
1143 return patch
1144
1144
1145 if not os.path.isfile(self.join(patch)):
1145 if not os.path.isfile(self.join(patch)):
1146 try:
1146 try:
1147 sno = int(patch)
1147 sno = int(patch)
1148 except (ValueError, OverflowError):
1148 except (ValueError, OverflowError):
1149 pass
1149 pass
1150 else:
1150 else:
1151 if -len(self.series) <= sno < len(self.series):
1151 if -len(self.series) <= sno < len(self.series):
1152 return self.series[sno]
1152 return self.series[sno]
1153
1153
1154 if not strict:
1154 if not strict:
1155 res = partialname(patch)
1155 res = partialname(patch)
1156 if res:
1156 if res:
1157 return res
1157 return res
1158 minus = patch.rfind('-')
1158 minus = patch.rfind('-')
1159 if minus >= 0:
1159 if minus >= 0:
1160 res = partialname(patch[:minus])
1160 res = partialname(patch[:minus])
1161 if res:
1161 if res:
1162 i = self.series.index(res)
1162 i = self.series.index(res)
1163 try:
1163 try:
1164 off = int(patch[minus + 1:] or 1)
1164 off = int(patch[minus + 1:] or 1)
1165 except (ValueError, OverflowError):
1165 except (ValueError, OverflowError):
1166 pass
1166 pass
1167 else:
1167 else:
1168 if i - off >= 0:
1168 if i - off >= 0:
1169 return self.series[i - off]
1169 return self.series[i - off]
1170 plus = patch.rfind('+')
1170 plus = patch.rfind('+')
1171 if plus >= 0:
1171 if plus >= 0:
1172 res = partialname(patch[:plus])
1172 res = partialname(patch[:plus])
1173 if res:
1173 if res:
1174 i = self.series.index(res)
1174 i = self.series.index(res)
1175 try:
1175 try:
1176 off = int(patch[plus + 1:] or 1)
1176 off = int(patch[plus + 1:] or 1)
1177 except (ValueError, OverflowError):
1177 except (ValueError, OverflowError):
1178 pass
1178 pass
1179 else:
1179 else:
1180 if i + off < len(self.series):
1180 if i + off < len(self.series):
1181 return self.series[i + off]
1181 return self.series[i + off]
1182 raise util.Abort(_("patch %s not in series") % patch)
1182 raise util.Abort(_("patch %s not in series") % patch)
1183
1183
1184 def push(self, repo, patch=None, force=False, list=False, mergeq=None,
1184 def push(self, repo, patch=None, force=False, list=False, mergeq=None,
1185 all=False, move=False, exact=False, nobackup=False, check=False):
1185 all=False, move=False, exact=False, nobackup=False, check=False):
1186 self.checkforcecheck(check, force)
1186 self.checkforcecheck(check, force)
1187 diffopts = self.diffopts()
1187 diffopts = self.diffopts()
1188 wlock = repo.wlock()
1188 wlock = repo.wlock()
1189 try:
1189 try:
1190 heads = []
1190 heads = []
1191 for b, ls in repo.branchmap().iteritems():
1191 for b, ls in repo.branchmap().iteritems():
1192 heads += ls
1192 heads += ls
1193 if not heads:
1193 if not heads:
1194 heads = [nullid]
1194 heads = [nullid]
1195 if repo.dirstate.p1() not in heads and not exact:
1195 if repo.dirstate.p1() not in heads and not exact:
1196 self.ui.status(_("(working directory not at a head)\n"))
1196 self.ui.status(_("(working directory not at a head)\n"))
1197
1197
1198 if not self.series:
1198 if not self.series:
1199 self.ui.warn(_('no patches in series\n'))
1199 self.ui.warn(_('no patches in series\n'))
1200 return 0
1200 return 0
1201
1201
1202 # Suppose our series file is: A B C and the current 'top'
1202 # Suppose our series file is: A B C and the current 'top'
1203 # patch is B. qpush C should be performed (moving forward)
1203 # patch is B. qpush C should be performed (moving forward)
1204 # qpush B is a NOP (no change) qpush A is an error (can't
1204 # qpush B is a NOP (no change) qpush A is an error (can't
1205 # go backwards with qpush)
1205 # go backwards with qpush)
1206 if patch:
1206 if patch:
1207 patch = self.lookup(patch)
1207 patch = self.lookup(patch)
1208 info = self.isapplied(patch)
1208 info = self.isapplied(patch)
1209 if info and info[0] >= len(self.applied) - 1:
1209 if info and info[0] >= len(self.applied) - 1:
1210 self.ui.warn(
1210 self.ui.warn(
1211 _('qpush: %s is already at the top\n') % patch)
1211 _('qpush: %s is already at the top\n') % patch)
1212 return 0
1212 return 0
1213
1213
1214 pushable, reason = self.pushable(patch)
1214 pushable, reason = self.pushable(patch)
1215 if pushable:
1215 if pushable:
1216 if self.series.index(patch) < self.seriesend():
1216 if self.series.index(patch) < self.seriesend():
1217 raise util.Abort(
1217 raise util.Abort(
1218 _("cannot push to a previous patch: %s") % patch)
1218 _("cannot push to a previous patch: %s") % patch)
1219 else:
1219 else:
1220 if reason:
1220 if reason:
1221 reason = _('guarded by %s') % reason
1221 reason = _('guarded by %s') % reason
1222 else:
1222 else:
1223 reason = _('no matching guards')
1223 reason = _('no matching guards')
1224 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1224 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1225 return 1
1225 return 1
1226 elif all:
1226 elif all:
1227 patch = self.series[-1]
1227 patch = self.series[-1]
1228 if self.isapplied(patch):
1228 if self.isapplied(patch):
1229 self.ui.warn(_('all patches are currently applied\n'))
1229 self.ui.warn(_('all patches are currently applied\n'))
1230 return 0
1230 return 0
1231
1231
1232 # Following the above example, starting at 'top' of B:
1232 # Following the above example, starting at 'top' of B:
1233 # qpush should be performed (pushes C), but a subsequent
1233 # qpush should be performed (pushes C), but a subsequent
1234 # qpush without an argument is an error (nothing to
1234 # qpush without an argument is an error (nothing to
1235 # apply). This allows a loop of "...while hg qpush..." to
1235 # apply). This allows a loop of "...while hg qpush..." to
1236 # work as it detects an error when done
1236 # work as it detects an error when done
1237 start = self.seriesend()
1237 start = self.seriesend()
1238 if start == len(self.series):
1238 if start == len(self.series):
1239 self.ui.warn(_('patch series already fully applied\n'))
1239 self.ui.warn(_('patch series already fully applied\n'))
1240 return 1
1240 return 1
1241 if not force and not check:
1241 if not force and not check:
1242 self.checklocalchanges(repo, refresh=self.applied)
1242 self.checklocalchanges(repo, refresh=self.applied)
1243
1243
1244 if exact:
1244 if exact:
1245 if check:
1245 if check:
1246 raise util.Abort(
1246 raise util.Abort(
1247 _("cannot use --exact and --check together"))
1247 _("cannot use --exact and --check together"))
1248 if move:
1248 if move:
1249 raise util.Abort(_('cannot use --exact and --move '
1249 raise util.Abort(_('cannot use --exact and --move '
1250 'together'))
1250 'together'))
1251 if self.applied:
1251 if self.applied:
1252 raise util.Abort(_('cannot push --exact with applied '
1252 raise util.Abort(_('cannot push --exact with applied '
1253 'patches'))
1253 'patches'))
1254 root = self.series[start]
1254 root = self.series[start]
1255 target = patchheader(self.join(root), self.plainmode).parent
1255 target = patchheader(self.join(root), self.plainmode).parent
1256 if not target:
1256 if not target:
1257 raise util.Abort(
1257 raise util.Abort(
1258 _("%s does not have a parent recorded") % root)
1258 _("%s does not have a parent recorded") % root)
1259 if not repo[target] == repo['.']:
1259 if not repo[target] == repo['.']:
1260 hg.update(repo, target)
1260 hg.update(repo, target)
1261
1261
1262 if move:
1262 if move:
1263 if not patch:
1263 if not patch:
1264 raise util.Abort(_("please specify the patch to move"))
1264 raise util.Abort(_("please specify the patch to move"))
1265 for fullstart, rpn in enumerate(self.fullseries):
1265 for fullstart, rpn in enumerate(self.fullseries):
1266 # strip markers for patch guards
1266 # strip markers for patch guards
1267 if self.guard_re.split(rpn, 1)[0] == self.series[start]:
1267 if self.guard_re.split(rpn, 1)[0] == self.series[start]:
1268 break
1268 break
1269 for i, rpn in enumerate(self.fullseries[fullstart:]):
1269 for i, rpn in enumerate(self.fullseries[fullstart:]):
1270 # strip markers for patch guards
1270 # strip markers for patch guards
1271 if self.guard_re.split(rpn, 1)[0] == patch:
1271 if self.guard_re.split(rpn, 1)[0] == patch:
1272 break
1272 break
1273 index = fullstart + i
1273 index = fullstart + i
1274 assert index < len(self.fullseries)
1274 assert index < len(self.fullseries)
1275 fullpatch = self.fullseries[index]
1275 fullpatch = self.fullseries[index]
1276 del self.fullseries[index]
1276 del self.fullseries[index]
1277 self.fullseries.insert(fullstart, fullpatch)
1277 self.fullseries.insert(fullstart, fullpatch)
1278 self.parseseries()
1278 self.parseseries()
1279 self.seriesdirty = True
1279 self.seriesdirty = True
1280
1280
1281 self.applieddirty = True
1281 self.applieddirty = True
1282 if start > 0:
1282 if start > 0:
1283 self.checktoppatch(repo)
1283 self.checktoppatch(repo)
1284 if not patch:
1284 if not patch:
1285 patch = self.series[start]
1285 patch = self.series[start]
1286 end = start + 1
1286 end = start + 1
1287 else:
1287 else:
1288 end = self.series.index(patch, start) + 1
1288 end = self.series.index(patch, start) + 1
1289
1289
1290 tobackup = set()
1290 tobackup = set()
1291 if (not nobackup and force) or check:
1291 if (not nobackup and force) or check:
1292 m, a, r, d = self.checklocalchanges(repo, force=True)
1292 m, a, r, d = self.checklocalchanges(repo, force=True)
1293 if check:
1293 if check:
1294 tobackup.update(m + a + r + d)
1294 tobackup.update(m + a + r + d)
1295 else:
1295 else:
1296 tobackup.update(m + a)
1296 tobackup.update(m + a)
1297
1297
1298 s = self.series[start:end]
1298 s = self.series[start:end]
1299 all_files = set()
1299 all_files = set()
1300 try:
1300 try:
1301 if mergeq:
1301 if mergeq:
1302 ret = self.mergepatch(repo, mergeq, s, diffopts)
1302 ret = self.mergepatch(repo, mergeq, s, diffopts)
1303 else:
1303 else:
1304 ret = self.apply(repo, s, list, all_files=all_files,
1304 ret = self.apply(repo, s, list, all_files=all_files,
1305 tobackup=tobackup, check=check)
1305 tobackup=tobackup, check=check)
1306 except:
1306 except: # re-raises
1307 self.ui.warn(_('cleaning up working directory...'))
1307 self.ui.warn(_('cleaning up working directory...'))
1308 node = repo.dirstate.p1()
1308 node = repo.dirstate.p1()
1309 hg.revert(repo, node, None)
1309 hg.revert(repo, node, None)
1310 # only remove unknown files that we know we touched or
1310 # only remove unknown files that we know we touched or
1311 # created while patching
1311 # created while patching
1312 for f in all_files:
1312 for f in all_files:
1313 if f not in repo.dirstate:
1313 if f not in repo.dirstate:
1314 try:
1314 try:
1315 util.unlinkpath(repo.wjoin(f))
1315 util.unlinkpath(repo.wjoin(f))
1316 except OSError, inst:
1316 except OSError, inst:
1317 if inst.errno != errno.ENOENT:
1317 if inst.errno != errno.ENOENT:
1318 raise
1318 raise
1319 self.ui.warn(_('done\n'))
1319 self.ui.warn(_('done\n'))
1320 raise
1320 raise
1321
1321
1322 if not self.applied:
1322 if not self.applied:
1323 return ret[0]
1323 return ret[0]
1324 top = self.applied[-1].name
1324 top = self.applied[-1].name
1325 if ret[0] and ret[0] > 1:
1325 if ret[0] and ret[0] > 1:
1326 msg = _("errors during apply, please fix and refresh %s\n")
1326 msg = _("errors during apply, please fix and refresh %s\n")
1327 self.ui.write(msg % top)
1327 self.ui.write(msg % top)
1328 else:
1328 else:
1329 self.ui.write(_("now at: %s\n") % top)
1329 self.ui.write(_("now at: %s\n") % top)
1330 return ret[0]
1330 return ret[0]
1331
1331
1332 finally:
1332 finally:
1333 wlock.release()
1333 wlock.release()
1334
1334
1335 def pop(self, repo, patch=None, force=False, update=True, all=False,
1335 def pop(self, repo, patch=None, force=False, update=True, all=False,
1336 nobackup=False, check=False):
1336 nobackup=False, check=False):
1337 self.checkforcecheck(check, force)
1337 self.checkforcecheck(check, force)
1338 wlock = repo.wlock()
1338 wlock = repo.wlock()
1339 try:
1339 try:
1340 if patch:
1340 if patch:
1341 # index, rev, patch
1341 # index, rev, patch
1342 info = self.isapplied(patch)
1342 info = self.isapplied(patch)
1343 if not info:
1343 if not info:
1344 patch = self.lookup(patch)
1344 patch = self.lookup(patch)
1345 info = self.isapplied(patch)
1345 info = self.isapplied(patch)
1346 if not info:
1346 if not info:
1347 raise util.Abort(_("patch %s is not applied") % patch)
1347 raise util.Abort(_("patch %s is not applied") % patch)
1348
1348
1349 if not self.applied:
1349 if not self.applied:
1350 # Allow qpop -a to work repeatedly,
1350 # Allow qpop -a to work repeatedly,
1351 # but not qpop without an argument
1351 # but not qpop without an argument
1352 self.ui.warn(_("no patches applied\n"))
1352 self.ui.warn(_("no patches applied\n"))
1353 return not all
1353 return not all
1354
1354
1355 if all:
1355 if all:
1356 start = 0
1356 start = 0
1357 elif patch:
1357 elif patch:
1358 start = info[0] + 1
1358 start = info[0] + 1
1359 else:
1359 else:
1360 start = len(self.applied) - 1
1360 start = len(self.applied) - 1
1361
1361
1362 if start >= len(self.applied):
1362 if start >= len(self.applied):
1363 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1363 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1364 return
1364 return
1365
1365
1366 if not update:
1366 if not update:
1367 parents = repo.dirstate.parents()
1367 parents = repo.dirstate.parents()
1368 rr = [x.node for x in self.applied]
1368 rr = [x.node for x in self.applied]
1369 for p in parents:
1369 for p in parents:
1370 if p in rr:
1370 if p in rr:
1371 self.ui.warn(_("qpop: forcing dirstate update\n"))
1371 self.ui.warn(_("qpop: forcing dirstate update\n"))
1372 update = True
1372 update = True
1373 else:
1373 else:
1374 parents = [p.node() for p in repo[None].parents()]
1374 parents = [p.node() for p in repo[None].parents()]
1375 needupdate = False
1375 needupdate = False
1376 for entry in self.applied[start:]:
1376 for entry in self.applied[start:]:
1377 if entry.node in parents:
1377 if entry.node in parents:
1378 needupdate = True
1378 needupdate = True
1379 break
1379 break
1380 update = needupdate
1380 update = needupdate
1381
1381
1382 tobackup = set()
1382 tobackup = set()
1383 if update:
1383 if update:
1384 m, a, r, d = self.checklocalchanges(repo, force=force or check)
1384 m, a, r, d = self.checklocalchanges(repo, force=force or check)
1385 if force:
1385 if force:
1386 if not nobackup:
1386 if not nobackup:
1387 tobackup.update(m + a)
1387 tobackup.update(m + a)
1388 elif check:
1388 elif check:
1389 tobackup.update(m + a + r + d)
1389 tobackup.update(m + a + r + d)
1390
1390
1391 self.applieddirty = True
1391 self.applieddirty = True
1392 end = len(self.applied)
1392 end = len(self.applied)
1393 rev = self.applied[start].node
1393 rev = self.applied[start].node
1394 if update:
1394 if update:
1395 top = self.checktoppatch(repo)[0]
1395 top = self.checktoppatch(repo)[0]
1396
1396
1397 try:
1397 try:
1398 heads = repo.changelog.heads(rev)
1398 heads = repo.changelog.heads(rev)
1399 except error.LookupError:
1399 except error.LookupError:
1400 node = short(rev)
1400 node = short(rev)
1401 raise util.Abort(_('trying to pop unknown node %s') % node)
1401 raise util.Abort(_('trying to pop unknown node %s') % node)
1402
1402
1403 if heads != [self.applied[-1].node]:
1403 if heads != [self.applied[-1].node]:
1404 raise util.Abort(_("popping would remove a revision not "
1404 raise util.Abort(_("popping would remove a revision not "
1405 "managed by this patch queue"))
1405 "managed by this patch queue"))
1406 if not repo[self.applied[-1].node].mutable():
1406 if not repo[self.applied[-1].node].mutable():
1407 raise util.Abort(
1407 raise util.Abort(
1408 _("popping would remove an immutable revision"),
1408 _("popping would remove an immutable revision"),
1409 hint=_('see "hg help phases" for details'))
1409 hint=_('see "hg help phases" for details'))
1410
1410
1411 # we know there are no local changes, so we can make a simplified
1411 # we know there are no local changes, so we can make a simplified
1412 # form of hg.update.
1412 # form of hg.update.
1413 if update:
1413 if update:
1414 qp = self.qparents(repo, rev)
1414 qp = self.qparents(repo, rev)
1415 ctx = repo[qp]
1415 ctx = repo[qp]
1416 m, a, r, d = repo.status(qp, top)[:4]
1416 m, a, r, d = repo.status(qp, top)[:4]
1417 if d:
1417 if d:
1418 raise util.Abort(_("deletions found between repo revs"))
1418 raise util.Abort(_("deletions found between repo revs"))
1419
1419
1420 tobackup = set(a + m + r) & tobackup
1420 tobackup = set(a + m + r) & tobackup
1421 if check and tobackup:
1421 if check and tobackup:
1422 self.localchangesfound()
1422 self.localchangesfound()
1423 self.backup(repo, tobackup)
1423 self.backup(repo, tobackup)
1424
1424
1425 for f in a:
1425 for f in a:
1426 try:
1426 try:
1427 util.unlinkpath(repo.wjoin(f))
1427 util.unlinkpath(repo.wjoin(f))
1428 except OSError, e:
1428 except OSError, e:
1429 if e.errno != errno.ENOENT:
1429 if e.errno != errno.ENOENT:
1430 raise
1430 raise
1431 repo.dirstate.drop(f)
1431 repo.dirstate.drop(f)
1432 for f in m + r:
1432 for f in m + r:
1433 fctx = ctx[f]
1433 fctx = ctx[f]
1434 repo.wwrite(f, fctx.data(), fctx.flags())
1434 repo.wwrite(f, fctx.data(), fctx.flags())
1435 repo.dirstate.normal(f)
1435 repo.dirstate.normal(f)
1436 repo.setparents(qp, nullid)
1436 repo.setparents(qp, nullid)
1437 for patch in reversed(self.applied[start:end]):
1437 for patch in reversed(self.applied[start:end]):
1438 self.ui.status(_("popping %s\n") % patch.name)
1438 self.ui.status(_("popping %s\n") % patch.name)
1439 del self.applied[start:end]
1439 del self.applied[start:end]
1440 self.strip(repo, [rev], update=False, backup='strip')
1440 self.strip(repo, [rev], update=False, backup='strip')
1441 if self.applied:
1441 if self.applied:
1442 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1442 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1443 else:
1443 else:
1444 self.ui.write(_("patch queue now empty\n"))
1444 self.ui.write(_("patch queue now empty\n"))
1445 finally:
1445 finally:
1446 wlock.release()
1446 wlock.release()
1447
1447
1448 def diff(self, repo, pats, opts):
1448 def diff(self, repo, pats, opts):
1449 top, patch = self.checktoppatch(repo)
1449 top, patch = self.checktoppatch(repo)
1450 if not top:
1450 if not top:
1451 self.ui.write(_("no patches applied\n"))
1451 self.ui.write(_("no patches applied\n"))
1452 return
1452 return
1453 qp = self.qparents(repo, top)
1453 qp = self.qparents(repo, top)
1454 if opts.get('reverse'):
1454 if opts.get('reverse'):
1455 node1, node2 = None, qp
1455 node1, node2 = None, qp
1456 else:
1456 else:
1457 node1, node2 = qp, None
1457 node1, node2 = qp, None
1458 diffopts = self.diffopts(opts, patch)
1458 diffopts = self.diffopts(opts, patch)
1459 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1459 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1460
1460
1461 def refresh(self, repo, pats=None, **opts):
1461 def refresh(self, repo, pats=None, **opts):
1462 if not self.applied:
1462 if not self.applied:
1463 self.ui.write(_("no patches applied\n"))
1463 self.ui.write(_("no patches applied\n"))
1464 return 1
1464 return 1
1465 msg = opts.get('msg', '').rstrip()
1465 msg = opts.get('msg', '').rstrip()
1466 newuser = opts.get('user')
1466 newuser = opts.get('user')
1467 newdate = opts.get('date')
1467 newdate = opts.get('date')
1468 if newdate:
1468 if newdate:
1469 newdate = '%d %d' % util.parsedate(newdate)
1469 newdate = '%d %d' % util.parsedate(newdate)
1470 wlock = repo.wlock()
1470 wlock = repo.wlock()
1471
1471
1472 try:
1472 try:
1473 self.checktoppatch(repo)
1473 self.checktoppatch(repo)
1474 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1474 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1475 if repo.changelog.heads(top) != [top]:
1475 if repo.changelog.heads(top) != [top]:
1476 raise util.Abort(_("cannot refresh a revision with children"))
1476 raise util.Abort(_("cannot refresh a revision with children"))
1477 if not repo[top].mutable():
1477 if not repo[top].mutable():
1478 raise util.Abort(_("cannot refresh immutable revision"),
1478 raise util.Abort(_("cannot refresh immutable revision"),
1479 hint=_('see "hg help phases" for details'))
1479 hint=_('see "hg help phases" for details'))
1480
1480
1481 inclsubs = self.checksubstate(repo)
1481 inclsubs = self.checksubstate(repo)
1482
1482
1483 cparents = repo.changelog.parents(top)
1483 cparents = repo.changelog.parents(top)
1484 patchparent = self.qparents(repo, top)
1484 patchparent = self.qparents(repo, top)
1485 ph = patchheader(self.join(patchfn), self.plainmode)
1485 ph = patchheader(self.join(patchfn), self.plainmode)
1486 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1486 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1487 if msg:
1487 if msg:
1488 ph.setmessage(msg)
1488 ph.setmessage(msg)
1489 if newuser:
1489 if newuser:
1490 ph.setuser(newuser)
1490 ph.setuser(newuser)
1491 if newdate:
1491 if newdate:
1492 ph.setdate(newdate)
1492 ph.setdate(newdate)
1493 ph.setparent(hex(patchparent))
1493 ph.setparent(hex(patchparent))
1494
1494
1495 # only commit new patch when write is complete
1495 # only commit new patch when write is complete
1496 patchf = self.opener(patchfn, 'w', atomictemp=True)
1496 patchf = self.opener(patchfn, 'w', atomictemp=True)
1497
1497
1498 comments = str(ph)
1498 comments = str(ph)
1499 if comments:
1499 if comments:
1500 patchf.write(comments)
1500 patchf.write(comments)
1501
1501
1502 # update the dirstate in place, strip off the qtip commit
1502 # update the dirstate in place, strip off the qtip commit
1503 # and then commit.
1503 # and then commit.
1504 #
1504 #
1505 # this should really read:
1505 # this should really read:
1506 # mm, dd, aa = repo.status(top, patchparent)[:3]
1506 # mm, dd, aa = repo.status(top, patchparent)[:3]
1507 # but we do it backwards to take advantage of manifest/chlog
1507 # but we do it backwards to take advantage of manifest/chlog
1508 # caching against the next repo.status call
1508 # caching against the next repo.status call
1509 mm, aa, dd = repo.status(patchparent, top)[:3]
1509 mm, aa, dd = repo.status(patchparent, top)[:3]
1510 changes = repo.changelog.read(top)
1510 changes = repo.changelog.read(top)
1511 man = repo.manifest.read(changes[0])
1511 man = repo.manifest.read(changes[0])
1512 aaa = aa[:]
1512 aaa = aa[:]
1513 matchfn = scmutil.match(repo[None], pats, opts)
1513 matchfn = scmutil.match(repo[None], pats, opts)
1514 # in short mode, we only diff the files included in the
1514 # in short mode, we only diff the files included in the
1515 # patch already plus specified files
1515 # patch already plus specified files
1516 if opts.get('short'):
1516 if opts.get('short'):
1517 # if amending a patch, we start with existing
1517 # if amending a patch, we start with existing
1518 # files plus specified files - unfiltered
1518 # files plus specified files - unfiltered
1519 match = scmutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1519 match = scmutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1520 # filter with inc/exl options
1520 # filter with inc/exl options
1521 matchfn = scmutil.match(repo[None], opts=opts)
1521 matchfn = scmutil.match(repo[None], opts=opts)
1522 else:
1522 else:
1523 match = scmutil.matchall(repo)
1523 match = scmutil.matchall(repo)
1524 m, a, r, d = repo.status(match=match)[:4]
1524 m, a, r, d = repo.status(match=match)[:4]
1525 mm = set(mm)
1525 mm = set(mm)
1526 aa = set(aa)
1526 aa = set(aa)
1527 dd = set(dd)
1527 dd = set(dd)
1528
1528
1529 # we might end up with files that were added between
1529 # we might end up with files that were added between
1530 # qtip and the dirstate parent, but then changed in the
1530 # qtip and the dirstate parent, but then changed in the
1531 # local dirstate. in this case, we want them to only
1531 # local dirstate. in this case, we want them to only
1532 # show up in the added section
1532 # show up in the added section
1533 for x in m:
1533 for x in m:
1534 if x not in aa:
1534 if x not in aa:
1535 mm.add(x)
1535 mm.add(x)
1536 # we might end up with files added by the local dirstate that
1536 # we might end up with files added by the local dirstate that
1537 # were deleted by the patch. In this case, they should only
1537 # were deleted by the patch. In this case, they should only
1538 # show up in the changed section.
1538 # show up in the changed section.
1539 for x in a:
1539 for x in a:
1540 if x in dd:
1540 if x in dd:
1541 dd.remove(x)
1541 dd.remove(x)
1542 mm.add(x)
1542 mm.add(x)
1543 else:
1543 else:
1544 aa.add(x)
1544 aa.add(x)
1545 # make sure any files deleted in the local dirstate
1545 # make sure any files deleted in the local dirstate
1546 # are not in the add or change column of the patch
1546 # are not in the add or change column of the patch
1547 forget = []
1547 forget = []
1548 for x in d + r:
1548 for x in d + r:
1549 if x in aa:
1549 if x in aa:
1550 aa.remove(x)
1550 aa.remove(x)
1551 forget.append(x)
1551 forget.append(x)
1552 continue
1552 continue
1553 else:
1553 else:
1554 mm.discard(x)
1554 mm.discard(x)
1555 dd.add(x)
1555 dd.add(x)
1556
1556
1557 m = list(mm)
1557 m = list(mm)
1558 r = list(dd)
1558 r = list(dd)
1559 a = list(aa)
1559 a = list(aa)
1560 c = [filter(matchfn, l) for l in (m, a, r)]
1560 c = [filter(matchfn, l) for l in (m, a, r)]
1561 match = scmutil.matchfiles(repo, set(c[0] + c[1] + c[2] + inclsubs))
1561 match = scmutil.matchfiles(repo, set(c[0] + c[1] + c[2] + inclsubs))
1562 chunks = patchmod.diff(repo, patchparent, match=match,
1562 chunks = patchmod.diff(repo, patchparent, match=match,
1563 changes=c, opts=diffopts)
1563 changes=c, opts=diffopts)
1564 for chunk in chunks:
1564 for chunk in chunks:
1565 patchf.write(chunk)
1565 patchf.write(chunk)
1566
1566
1567 try:
1567 try:
1568 if diffopts.git or diffopts.upgrade:
1568 if diffopts.git or diffopts.upgrade:
1569 copies = {}
1569 copies = {}
1570 for dst in a:
1570 for dst in a:
1571 src = repo.dirstate.copied(dst)
1571 src = repo.dirstate.copied(dst)
1572 # during qfold, the source file for copies may
1572 # during qfold, the source file for copies may
1573 # be removed. Treat this as a simple add.
1573 # be removed. Treat this as a simple add.
1574 if src is not None and src in repo.dirstate:
1574 if src is not None and src in repo.dirstate:
1575 copies.setdefault(src, []).append(dst)
1575 copies.setdefault(src, []).append(dst)
1576 repo.dirstate.add(dst)
1576 repo.dirstate.add(dst)
1577 # remember the copies between patchparent and qtip
1577 # remember the copies between patchparent and qtip
1578 for dst in aaa:
1578 for dst in aaa:
1579 f = repo.file(dst)
1579 f = repo.file(dst)
1580 src = f.renamed(man[dst])
1580 src = f.renamed(man[dst])
1581 if src:
1581 if src:
1582 copies.setdefault(src[0], []).extend(
1582 copies.setdefault(src[0], []).extend(
1583 copies.get(dst, []))
1583 copies.get(dst, []))
1584 if dst in a:
1584 if dst in a:
1585 copies[src[0]].append(dst)
1585 copies[src[0]].append(dst)
1586 # we can't copy a file created by the patch itself
1586 # we can't copy a file created by the patch itself
1587 if dst in copies:
1587 if dst in copies:
1588 del copies[dst]
1588 del copies[dst]
1589 for src, dsts in copies.iteritems():
1589 for src, dsts in copies.iteritems():
1590 for dst in dsts:
1590 for dst in dsts:
1591 repo.dirstate.copy(src, dst)
1591 repo.dirstate.copy(src, dst)
1592 else:
1592 else:
1593 for dst in a:
1593 for dst in a:
1594 repo.dirstate.add(dst)
1594 repo.dirstate.add(dst)
1595 # Drop useless copy information
1595 # Drop useless copy information
1596 for f in list(repo.dirstate.copies()):
1596 for f in list(repo.dirstate.copies()):
1597 repo.dirstate.copy(None, f)
1597 repo.dirstate.copy(None, f)
1598 for f in r:
1598 for f in r:
1599 repo.dirstate.remove(f)
1599 repo.dirstate.remove(f)
1600 # if the patch excludes a modified file, mark that
1600 # if the patch excludes a modified file, mark that
1601 # file with mtime=0 so status can see it.
1601 # file with mtime=0 so status can see it.
1602 mm = []
1602 mm = []
1603 for i in xrange(len(m)-1, -1, -1):
1603 for i in xrange(len(m)-1, -1, -1):
1604 if not matchfn(m[i]):
1604 if not matchfn(m[i]):
1605 mm.append(m[i])
1605 mm.append(m[i])
1606 del m[i]
1606 del m[i]
1607 for f in m:
1607 for f in m:
1608 repo.dirstate.normal(f)
1608 repo.dirstate.normal(f)
1609 for f in mm:
1609 for f in mm:
1610 repo.dirstate.normallookup(f)
1610 repo.dirstate.normallookup(f)
1611 for f in forget:
1611 for f in forget:
1612 repo.dirstate.drop(f)
1612 repo.dirstate.drop(f)
1613
1613
1614 if not msg:
1614 if not msg:
1615 if not ph.message:
1615 if not ph.message:
1616 message = "[mq]: %s\n" % patchfn
1616 message = "[mq]: %s\n" % patchfn
1617 else:
1617 else:
1618 message = "\n".join(ph.message)
1618 message = "\n".join(ph.message)
1619 else:
1619 else:
1620 message = msg
1620 message = msg
1621
1621
1622 user = ph.user or changes[1]
1622 user = ph.user or changes[1]
1623
1623
1624 oldphase = repo[top].phase()
1624 oldphase = repo[top].phase()
1625
1625
1626 # assumes strip can roll itself back if interrupted
1626 # assumes strip can roll itself back if interrupted
1627 repo.setparents(*cparents)
1627 repo.setparents(*cparents)
1628 self.applied.pop()
1628 self.applied.pop()
1629 self.applieddirty = True
1629 self.applieddirty = True
1630 self.strip(repo, [top], update=False,
1630 self.strip(repo, [top], update=False,
1631 backup='strip')
1631 backup='strip')
1632 except:
1632 except: # re-raises
1633 repo.dirstate.invalidate()
1633 repo.dirstate.invalidate()
1634 raise
1634 raise
1635
1635
1636 try:
1636 try:
1637 # might be nice to attempt to roll back strip after this
1637 # might be nice to attempt to roll back strip after this
1638
1638
1639 # Ensure we create a new changeset in the same phase than
1639 # Ensure we create a new changeset in the same phase than
1640 # the old one.
1640 # the old one.
1641 n = newcommit(repo, oldphase, message, user, ph.date,
1641 n = newcommit(repo, oldphase, message, user, ph.date,
1642 match=match, force=True)
1642 match=match, force=True)
1643 # only write patch after a successful commit
1643 # only write patch after a successful commit
1644 patchf.close()
1644 patchf.close()
1645 self.applied.append(statusentry(n, patchfn))
1645 self.applied.append(statusentry(n, patchfn))
1646 except:
1646 except: # re-raises
1647 ctx = repo[cparents[0]]
1647 ctx = repo[cparents[0]]
1648 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1648 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1649 self.savedirty()
1649 self.savedirty()
1650 self.ui.warn(_('refresh interrupted while patch was popped! '
1650 self.ui.warn(_('refresh interrupted while patch was popped! '
1651 '(revert --all, qpush to recover)\n'))
1651 '(revert --all, qpush to recover)\n'))
1652 raise
1652 raise
1653 finally:
1653 finally:
1654 wlock.release()
1654 wlock.release()
1655 self.removeundo(repo)
1655 self.removeundo(repo)
1656
1656
1657 def init(self, repo, create=False):
1657 def init(self, repo, create=False):
1658 if not create and os.path.isdir(self.path):
1658 if not create and os.path.isdir(self.path):
1659 raise util.Abort(_("patch queue directory already exists"))
1659 raise util.Abort(_("patch queue directory already exists"))
1660 try:
1660 try:
1661 os.mkdir(self.path)
1661 os.mkdir(self.path)
1662 except OSError, inst:
1662 except OSError, inst:
1663 if inst.errno != errno.EEXIST or not create:
1663 if inst.errno != errno.EEXIST or not create:
1664 raise
1664 raise
1665 if create:
1665 if create:
1666 return self.qrepo(create=True)
1666 return self.qrepo(create=True)
1667
1667
1668 def unapplied(self, repo, patch=None):
1668 def unapplied(self, repo, patch=None):
1669 if patch and patch not in self.series:
1669 if patch and patch not in self.series:
1670 raise util.Abort(_("patch %s is not in series file") % patch)
1670 raise util.Abort(_("patch %s is not in series file") % patch)
1671 if not patch:
1671 if not patch:
1672 start = self.seriesend()
1672 start = self.seriesend()
1673 else:
1673 else:
1674 start = self.series.index(patch) + 1
1674 start = self.series.index(patch) + 1
1675 unapplied = []
1675 unapplied = []
1676 for i in xrange(start, len(self.series)):
1676 for i in xrange(start, len(self.series)):
1677 pushable, reason = self.pushable(i)
1677 pushable, reason = self.pushable(i)
1678 if pushable:
1678 if pushable:
1679 unapplied.append((i, self.series[i]))
1679 unapplied.append((i, self.series[i]))
1680 self.explainpushable(i)
1680 self.explainpushable(i)
1681 return unapplied
1681 return unapplied
1682
1682
1683 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1683 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1684 summary=False):
1684 summary=False):
1685 def displayname(pfx, patchname, state):
1685 def displayname(pfx, patchname, state):
1686 if pfx:
1686 if pfx:
1687 self.ui.write(pfx)
1687 self.ui.write(pfx)
1688 if summary:
1688 if summary:
1689 ph = patchheader(self.join(patchname), self.plainmode)
1689 ph = patchheader(self.join(patchname), self.plainmode)
1690 msg = ph.message and ph.message[0] or ''
1690 msg = ph.message and ph.message[0] or ''
1691 if self.ui.formatted():
1691 if self.ui.formatted():
1692 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1692 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1693 if width > 0:
1693 if width > 0:
1694 msg = util.ellipsis(msg, width)
1694 msg = util.ellipsis(msg, width)
1695 else:
1695 else:
1696 msg = ''
1696 msg = ''
1697 self.ui.write(patchname, label='qseries.' + state)
1697 self.ui.write(patchname, label='qseries.' + state)
1698 self.ui.write(': ')
1698 self.ui.write(': ')
1699 self.ui.write(msg, label='qseries.message.' + state)
1699 self.ui.write(msg, label='qseries.message.' + state)
1700 else:
1700 else:
1701 self.ui.write(patchname, label='qseries.' + state)
1701 self.ui.write(patchname, label='qseries.' + state)
1702 self.ui.write('\n')
1702 self.ui.write('\n')
1703
1703
1704 applied = set([p.name for p in self.applied])
1704 applied = set([p.name for p in self.applied])
1705 if length is None:
1705 if length is None:
1706 length = len(self.series) - start
1706 length = len(self.series) - start
1707 if not missing:
1707 if not missing:
1708 if self.ui.verbose:
1708 if self.ui.verbose:
1709 idxwidth = len(str(start + length - 1))
1709 idxwidth = len(str(start + length - 1))
1710 for i in xrange(start, start + length):
1710 for i in xrange(start, start + length):
1711 patch = self.series[i]
1711 patch = self.series[i]
1712 if patch in applied:
1712 if patch in applied:
1713 char, state = 'A', 'applied'
1713 char, state = 'A', 'applied'
1714 elif self.pushable(i)[0]:
1714 elif self.pushable(i)[0]:
1715 char, state = 'U', 'unapplied'
1715 char, state = 'U', 'unapplied'
1716 else:
1716 else:
1717 char, state = 'G', 'guarded'
1717 char, state = 'G', 'guarded'
1718 pfx = ''
1718 pfx = ''
1719 if self.ui.verbose:
1719 if self.ui.verbose:
1720 pfx = '%*d %s ' % (idxwidth, i, char)
1720 pfx = '%*d %s ' % (idxwidth, i, char)
1721 elif status and status != char:
1721 elif status and status != char:
1722 continue
1722 continue
1723 displayname(pfx, patch, state)
1723 displayname(pfx, patch, state)
1724 else:
1724 else:
1725 msng_list = []
1725 msng_list = []
1726 for root, dirs, files in os.walk(self.path):
1726 for root, dirs, files in os.walk(self.path):
1727 d = root[len(self.path) + 1:]
1727 d = root[len(self.path) + 1:]
1728 for f in files:
1728 for f in files:
1729 fl = os.path.join(d, f)
1729 fl = os.path.join(d, f)
1730 if (fl not in self.series and
1730 if (fl not in self.series and
1731 fl not in (self.statuspath, self.seriespath,
1731 fl not in (self.statuspath, self.seriespath,
1732 self.guardspath)
1732 self.guardspath)
1733 and not fl.startswith('.')):
1733 and not fl.startswith('.')):
1734 msng_list.append(fl)
1734 msng_list.append(fl)
1735 for x in sorted(msng_list):
1735 for x in sorted(msng_list):
1736 pfx = self.ui.verbose and ('D ') or ''
1736 pfx = self.ui.verbose and ('D ') or ''
1737 displayname(pfx, x, 'missing')
1737 displayname(pfx, x, 'missing')
1738
1738
1739 def issaveline(self, l):
1739 def issaveline(self, l):
1740 if l.name == '.hg.patches.save.line':
1740 if l.name == '.hg.patches.save.line':
1741 return True
1741 return True
1742
1742
1743 def qrepo(self, create=False):
1743 def qrepo(self, create=False):
1744 ui = self.ui.copy()
1744 ui = self.ui.copy()
1745 ui.setconfig('paths', 'default', '', overlay=False)
1745 ui.setconfig('paths', 'default', '', overlay=False)
1746 ui.setconfig('paths', 'default-push', '', overlay=False)
1746 ui.setconfig('paths', 'default-push', '', overlay=False)
1747 if create or os.path.isdir(self.join(".hg")):
1747 if create or os.path.isdir(self.join(".hg")):
1748 return hg.repository(ui, path=self.path, create=create)
1748 return hg.repository(ui, path=self.path, create=create)
1749
1749
1750 def restore(self, repo, rev, delete=None, qupdate=None):
1750 def restore(self, repo, rev, delete=None, qupdate=None):
1751 desc = repo[rev].description().strip()
1751 desc = repo[rev].description().strip()
1752 lines = desc.splitlines()
1752 lines = desc.splitlines()
1753 i = 0
1753 i = 0
1754 datastart = None
1754 datastart = None
1755 series = []
1755 series = []
1756 applied = []
1756 applied = []
1757 qpp = None
1757 qpp = None
1758 for i, line in enumerate(lines):
1758 for i, line in enumerate(lines):
1759 if line == 'Patch Data:':
1759 if line == 'Patch Data:':
1760 datastart = i + 1
1760 datastart = i + 1
1761 elif line.startswith('Dirstate:'):
1761 elif line.startswith('Dirstate:'):
1762 l = line.rstrip()
1762 l = line.rstrip()
1763 l = l[10:].split(' ')
1763 l = l[10:].split(' ')
1764 qpp = [bin(x) for x in l]
1764 qpp = [bin(x) for x in l]
1765 elif datastart is not None:
1765 elif datastart is not None:
1766 l = line.rstrip()
1766 l = line.rstrip()
1767 n, name = l.split(':', 1)
1767 n, name = l.split(':', 1)
1768 if n:
1768 if n:
1769 applied.append(statusentry(bin(n), name))
1769 applied.append(statusentry(bin(n), name))
1770 else:
1770 else:
1771 series.append(l)
1771 series.append(l)
1772 if datastart is None:
1772 if datastart is None:
1773 self.ui.warn(_("No saved patch data found\n"))
1773 self.ui.warn(_("No saved patch data found\n"))
1774 return 1
1774 return 1
1775 self.ui.warn(_("restoring status: %s\n") % lines[0])
1775 self.ui.warn(_("restoring status: %s\n") % lines[0])
1776 self.fullseries = series
1776 self.fullseries = series
1777 self.applied = applied
1777 self.applied = applied
1778 self.parseseries()
1778 self.parseseries()
1779 self.seriesdirty = True
1779 self.seriesdirty = True
1780 self.applieddirty = True
1780 self.applieddirty = True
1781 heads = repo.changelog.heads()
1781 heads = repo.changelog.heads()
1782 if delete:
1782 if delete:
1783 if rev not in heads:
1783 if rev not in heads:
1784 self.ui.warn(_("save entry has children, leaving it alone\n"))
1784 self.ui.warn(_("save entry has children, leaving it alone\n"))
1785 else:
1785 else:
1786 self.ui.warn(_("removing save entry %s\n") % short(rev))
1786 self.ui.warn(_("removing save entry %s\n") % short(rev))
1787 pp = repo.dirstate.parents()
1787 pp = repo.dirstate.parents()
1788 if rev in pp:
1788 if rev in pp:
1789 update = True
1789 update = True
1790 else:
1790 else:
1791 update = False
1791 update = False
1792 self.strip(repo, [rev], update=update, backup='strip')
1792 self.strip(repo, [rev], update=update, backup='strip')
1793 if qpp:
1793 if qpp:
1794 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1794 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1795 (short(qpp[0]), short(qpp[1])))
1795 (short(qpp[0]), short(qpp[1])))
1796 if qupdate:
1796 if qupdate:
1797 self.ui.status(_("updating queue directory\n"))
1797 self.ui.status(_("updating queue directory\n"))
1798 r = self.qrepo()
1798 r = self.qrepo()
1799 if not r:
1799 if not r:
1800 self.ui.warn(_("Unable to load queue repository\n"))
1800 self.ui.warn(_("Unable to load queue repository\n"))
1801 return 1
1801 return 1
1802 hg.clean(r, qpp[0])
1802 hg.clean(r, qpp[0])
1803
1803
1804 def save(self, repo, msg=None):
1804 def save(self, repo, msg=None):
1805 if not self.applied:
1805 if not self.applied:
1806 self.ui.warn(_("save: no patches applied, exiting\n"))
1806 self.ui.warn(_("save: no patches applied, exiting\n"))
1807 return 1
1807 return 1
1808 if self.issaveline(self.applied[-1]):
1808 if self.issaveline(self.applied[-1]):
1809 self.ui.warn(_("status is already saved\n"))
1809 self.ui.warn(_("status is already saved\n"))
1810 return 1
1810 return 1
1811
1811
1812 if not msg:
1812 if not msg:
1813 msg = _("hg patches saved state")
1813 msg = _("hg patches saved state")
1814 else:
1814 else:
1815 msg = "hg patches: " + msg.rstrip('\r\n')
1815 msg = "hg patches: " + msg.rstrip('\r\n')
1816 r = self.qrepo()
1816 r = self.qrepo()
1817 if r:
1817 if r:
1818 pp = r.dirstate.parents()
1818 pp = r.dirstate.parents()
1819 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1819 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1820 msg += "\n\nPatch Data:\n"
1820 msg += "\n\nPatch Data:\n"
1821 msg += ''.join('%s\n' % x for x in self.applied)
1821 msg += ''.join('%s\n' % x for x in self.applied)
1822 msg += ''.join(':%s\n' % x for x in self.fullseries)
1822 msg += ''.join(':%s\n' % x for x in self.fullseries)
1823 n = repo.commit(msg, force=True)
1823 n = repo.commit(msg, force=True)
1824 if not n:
1824 if not n:
1825 self.ui.warn(_("repo commit failed\n"))
1825 self.ui.warn(_("repo commit failed\n"))
1826 return 1
1826 return 1
1827 self.applied.append(statusentry(n, '.hg.patches.save.line'))
1827 self.applied.append(statusentry(n, '.hg.patches.save.line'))
1828 self.applieddirty = True
1828 self.applieddirty = True
1829 self.removeundo(repo)
1829 self.removeundo(repo)
1830
1830
1831 def fullseriesend(self):
1831 def fullseriesend(self):
1832 if self.applied:
1832 if self.applied:
1833 p = self.applied[-1].name
1833 p = self.applied[-1].name
1834 end = self.findseries(p)
1834 end = self.findseries(p)
1835 if end is None:
1835 if end is None:
1836 return len(self.fullseries)
1836 return len(self.fullseries)
1837 return end + 1
1837 return end + 1
1838 return 0
1838 return 0
1839
1839
1840 def seriesend(self, all_patches=False):
1840 def seriesend(self, all_patches=False):
1841 """If all_patches is False, return the index of the next pushable patch
1841 """If all_patches is False, return the index of the next pushable patch
1842 in the series, or the series length. If all_patches is True, return the
1842 in the series, or the series length. If all_patches is True, return the
1843 index of the first patch past the last applied one.
1843 index of the first patch past the last applied one.
1844 """
1844 """
1845 end = 0
1845 end = 0
1846 def next(start):
1846 def next(start):
1847 if all_patches or start >= len(self.series):
1847 if all_patches or start >= len(self.series):
1848 return start
1848 return start
1849 for i in xrange(start, len(self.series)):
1849 for i in xrange(start, len(self.series)):
1850 p, reason = self.pushable(i)
1850 p, reason = self.pushable(i)
1851 if p:
1851 if p:
1852 return i
1852 return i
1853 self.explainpushable(i)
1853 self.explainpushable(i)
1854 return len(self.series)
1854 return len(self.series)
1855 if self.applied:
1855 if self.applied:
1856 p = self.applied[-1].name
1856 p = self.applied[-1].name
1857 try:
1857 try:
1858 end = self.series.index(p)
1858 end = self.series.index(p)
1859 except ValueError:
1859 except ValueError:
1860 return 0
1860 return 0
1861 return next(end + 1)
1861 return next(end + 1)
1862 return next(end)
1862 return next(end)
1863
1863
1864 def appliedname(self, index):
1864 def appliedname(self, index):
1865 pname = self.applied[index].name
1865 pname = self.applied[index].name
1866 if not self.ui.verbose:
1866 if not self.ui.verbose:
1867 p = pname
1867 p = pname
1868 else:
1868 else:
1869 p = str(self.series.index(pname)) + " " + pname
1869 p = str(self.series.index(pname)) + " " + pname
1870 return p
1870 return p
1871
1871
1872 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1872 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1873 force=None, git=False):
1873 force=None, git=False):
1874 def checkseries(patchname):
1874 def checkseries(patchname):
1875 if patchname in self.series:
1875 if patchname in self.series:
1876 raise util.Abort(_('patch %s is already in the series file')
1876 raise util.Abort(_('patch %s is already in the series file')
1877 % patchname)
1877 % patchname)
1878
1878
1879 if rev:
1879 if rev:
1880 if files:
1880 if files:
1881 raise util.Abort(_('option "-r" not valid when importing '
1881 raise util.Abort(_('option "-r" not valid when importing '
1882 'files'))
1882 'files'))
1883 rev = scmutil.revrange(repo, rev)
1883 rev = scmutil.revrange(repo, rev)
1884 rev.sort(reverse=True)
1884 rev.sort(reverse=True)
1885 if (len(files) > 1 or len(rev) > 1) and patchname:
1885 if (len(files) > 1 or len(rev) > 1) and patchname:
1886 raise util.Abort(_('option "-n" not valid when importing multiple '
1886 raise util.Abort(_('option "-n" not valid when importing multiple '
1887 'patches'))
1887 'patches'))
1888 imported = []
1888 imported = []
1889 if rev:
1889 if rev:
1890 # If mq patches are applied, we can only import revisions
1890 # If mq patches are applied, we can only import revisions
1891 # that form a linear path to qbase.
1891 # that form a linear path to qbase.
1892 # Otherwise, they should form a linear path to a head.
1892 # Otherwise, they should form a linear path to a head.
1893 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1893 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1894 if len(heads) > 1:
1894 if len(heads) > 1:
1895 raise util.Abort(_('revision %d is the root of more than one '
1895 raise util.Abort(_('revision %d is the root of more than one '
1896 'branch') % rev[-1])
1896 'branch') % rev[-1])
1897 if self.applied:
1897 if self.applied:
1898 base = repo.changelog.node(rev[0])
1898 base = repo.changelog.node(rev[0])
1899 if base in [n.node for n in self.applied]:
1899 if base in [n.node for n in self.applied]:
1900 raise util.Abort(_('revision %d is already managed')
1900 raise util.Abort(_('revision %d is already managed')
1901 % rev[0])
1901 % rev[0])
1902 if heads != [self.applied[-1].node]:
1902 if heads != [self.applied[-1].node]:
1903 raise util.Abort(_('revision %d is not the parent of '
1903 raise util.Abort(_('revision %d is not the parent of '
1904 'the queue') % rev[0])
1904 'the queue') % rev[0])
1905 base = repo.changelog.rev(self.applied[0].node)
1905 base = repo.changelog.rev(self.applied[0].node)
1906 lastparent = repo.changelog.parentrevs(base)[0]
1906 lastparent = repo.changelog.parentrevs(base)[0]
1907 else:
1907 else:
1908 if heads != [repo.changelog.node(rev[0])]:
1908 if heads != [repo.changelog.node(rev[0])]:
1909 raise util.Abort(_('revision %d has unmanaged children')
1909 raise util.Abort(_('revision %d has unmanaged children')
1910 % rev[0])
1910 % rev[0])
1911 lastparent = None
1911 lastparent = None
1912
1912
1913 diffopts = self.diffopts({'git': git})
1913 diffopts = self.diffopts({'git': git})
1914 for r in rev:
1914 for r in rev:
1915 if not repo[r].mutable():
1915 if not repo[r].mutable():
1916 raise util.Abort(_('revision %d is not mutable') % r,
1916 raise util.Abort(_('revision %d is not mutable') % r,
1917 hint=_('see "hg help phases" for details'))
1917 hint=_('see "hg help phases" for details'))
1918 p1, p2 = repo.changelog.parentrevs(r)
1918 p1, p2 = repo.changelog.parentrevs(r)
1919 n = repo.changelog.node(r)
1919 n = repo.changelog.node(r)
1920 if p2 != nullrev:
1920 if p2 != nullrev:
1921 raise util.Abort(_('cannot import merge revision %d') % r)
1921 raise util.Abort(_('cannot import merge revision %d') % r)
1922 if lastparent and lastparent != r:
1922 if lastparent and lastparent != r:
1923 raise util.Abort(_('revision %d is not the parent of %d')
1923 raise util.Abort(_('revision %d is not the parent of %d')
1924 % (r, lastparent))
1924 % (r, lastparent))
1925 lastparent = p1
1925 lastparent = p1
1926
1926
1927 if not patchname:
1927 if not patchname:
1928 patchname = normname('%d.diff' % r)
1928 patchname = normname('%d.diff' % r)
1929 checkseries(patchname)
1929 checkseries(patchname)
1930 self.checkpatchname(patchname, force)
1930 self.checkpatchname(patchname, force)
1931 self.fullseries.insert(0, patchname)
1931 self.fullseries.insert(0, patchname)
1932
1932
1933 patchf = self.opener(patchname, "w")
1933 patchf = self.opener(patchname, "w")
1934 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
1934 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
1935 patchf.close()
1935 patchf.close()
1936
1936
1937 se = statusentry(n, patchname)
1937 se = statusentry(n, patchname)
1938 self.applied.insert(0, se)
1938 self.applied.insert(0, se)
1939
1939
1940 self.added.append(patchname)
1940 self.added.append(patchname)
1941 imported.append(patchname)
1941 imported.append(patchname)
1942 patchname = None
1942 patchname = None
1943 if rev and repo.ui.configbool('mq', 'secret', False):
1943 if rev and repo.ui.configbool('mq', 'secret', False):
1944 # if we added anything with --rev, we must move the secret root
1944 # if we added anything with --rev, we must move the secret root
1945 phases.retractboundary(repo, phases.secret, [n])
1945 phases.retractboundary(repo, phases.secret, [n])
1946 self.parseseries()
1946 self.parseseries()
1947 self.applieddirty = True
1947 self.applieddirty = True
1948 self.seriesdirty = True
1948 self.seriesdirty = True
1949
1949
1950 for i, filename in enumerate(files):
1950 for i, filename in enumerate(files):
1951 if existing:
1951 if existing:
1952 if filename == '-':
1952 if filename == '-':
1953 raise util.Abort(_('-e is incompatible with import from -'))
1953 raise util.Abort(_('-e is incompatible with import from -'))
1954 filename = normname(filename)
1954 filename = normname(filename)
1955 self.checkreservedname(filename)
1955 self.checkreservedname(filename)
1956 originpath = self.join(filename)
1956 originpath = self.join(filename)
1957 if not os.path.isfile(originpath):
1957 if not os.path.isfile(originpath):
1958 raise util.Abort(_("patch %s does not exist") % filename)
1958 raise util.Abort(_("patch %s does not exist") % filename)
1959
1959
1960 if patchname:
1960 if patchname:
1961 self.checkpatchname(patchname, force)
1961 self.checkpatchname(patchname, force)
1962
1962
1963 self.ui.write(_('renaming %s to %s\n')
1963 self.ui.write(_('renaming %s to %s\n')
1964 % (filename, patchname))
1964 % (filename, patchname))
1965 util.rename(originpath, self.join(patchname))
1965 util.rename(originpath, self.join(patchname))
1966 else:
1966 else:
1967 patchname = filename
1967 patchname = filename
1968
1968
1969 else:
1969 else:
1970 if filename == '-' and not patchname:
1970 if filename == '-' and not patchname:
1971 raise util.Abort(_('need --name to import a patch from -'))
1971 raise util.Abort(_('need --name to import a patch from -'))
1972 elif not patchname:
1972 elif not patchname:
1973 patchname = normname(os.path.basename(filename.rstrip('/')))
1973 patchname = normname(os.path.basename(filename.rstrip('/')))
1974 self.checkpatchname(patchname, force)
1974 self.checkpatchname(patchname, force)
1975 try:
1975 try:
1976 if filename == '-':
1976 if filename == '-':
1977 text = self.ui.fin.read()
1977 text = self.ui.fin.read()
1978 else:
1978 else:
1979 fp = url.open(self.ui, filename)
1979 fp = url.open(self.ui, filename)
1980 text = fp.read()
1980 text = fp.read()
1981 fp.close()
1981 fp.close()
1982 except (OSError, IOError):
1982 except (OSError, IOError):
1983 raise util.Abort(_("unable to read file %s") % filename)
1983 raise util.Abort(_("unable to read file %s") % filename)
1984 patchf = self.opener(patchname, "w")
1984 patchf = self.opener(patchname, "w")
1985 patchf.write(text)
1985 patchf.write(text)
1986 patchf.close()
1986 patchf.close()
1987 if not force:
1987 if not force:
1988 checkseries(patchname)
1988 checkseries(patchname)
1989 if patchname not in self.series:
1989 if patchname not in self.series:
1990 index = self.fullseriesend() + i
1990 index = self.fullseriesend() + i
1991 self.fullseries[index:index] = [patchname]
1991 self.fullseries[index:index] = [patchname]
1992 self.parseseries()
1992 self.parseseries()
1993 self.seriesdirty = True
1993 self.seriesdirty = True
1994 self.ui.warn(_("adding %s to series file\n") % patchname)
1994 self.ui.warn(_("adding %s to series file\n") % patchname)
1995 self.added.append(patchname)
1995 self.added.append(patchname)
1996 imported.append(patchname)
1996 imported.append(patchname)
1997 patchname = None
1997 patchname = None
1998
1998
1999 self.removeundo(repo)
1999 self.removeundo(repo)
2000 return imported
2000 return imported
2001
2001
2002 def fixcheckopts(ui, opts):
2002 def fixcheckopts(ui, opts):
2003 if (not ui.configbool('mq', 'check') or opts.get('force')
2003 if (not ui.configbool('mq', 'check') or opts.get('force')
2004 or opts.get('exact')):
2004 or opts.get('exact')):
2005 return opts
2005 return opts
2006 opts = dict(opts)
2006 opts = dict(opts)
2007 opts['check'] = True
2007 opts['check'] = True
2008 return opts
2008 return opts
2009
2009
2010 @command("qdelete|qremove|qrm",
2010 @command("qdelete|qremove|qrm",
2011 [('k', 'keep', None, _('keep patch file')),
2011 [('k', 'keep', None, _('keep patch file')),
2012 ('r', 'rev', [],
2012 ('r', 'rev', [],
2013 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2013 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2014 _('hg qdelete [-k] [PATCH]...'))
2014 _('hg qdelete [-k] [PATCH]...'))
2015 def delete(ui, repo, *patches, **opts):
2015 def delete(ui, repo, *patches, **opts):
2016 """remove patches from queue
2016 """remove patches from queue
2017
2017
2018 The patches must not be applied, and at least one patch is required. Exact
2018 The patches must not be applied, and at least one patch is required. Exact
2019 patch identifiers must be given. With -k/--keep, the patch files are
2019 patch identifiers must be given. With -k/--keep, the patch files are
2020 preserved in the patch directory.
2020 preserved in the patch directory.
2021
2021
2022 To stop managing a patch and move it into permanent history,
2022 To stop managing a patch and move it into permanent history,
2023 use the :hg:`qfinish` command."""
2023 use the :hg:`qfinish` command."""
2024 q = repo.mq
2024 q = repo.mq
2025 q.delete(repo, patches, opts)
2025 q.delete(repo, patches, opts)
2026 q.savedirty()
2026 q.savedirty()
2027 return 0
2027 return 0
2028
2028
2029 @command("qapplied",
2029 @command("qapplied",
2030 [('1', 'last', None, _('show only the preceding applied patch'))
2030 [('1', 'last', None, _('show only the preceding applied patch'))
2031 ] + seriesopts,
2031 ] + seriesopts,
2032 _('hg qapplied [-1] [-s] [PATCH]'))
2032 _('hg qapplied [-1] [-s] [PATCH]'))
2033 def applied(ui, repo, patch=None, **opts):
2033 def applied(ui, repo, patch=None, **opts):
2034 """print the patches already applied
2034 """print the patches already applied
2035
2035
2036 Returns 0 on success."""
2036 Returns 0 on success."""
2037
2037
2038 q = repo.mq
2038 q = repo.mq
2039
2039
2040 if patch:
2040 if patch:
2041 if patch not in q.series:
2041 if patch not in q.series:
2042 raise util.Abort(_("patch %s is not in series file") % patch)
2042 raise util.Abort(_("patch %s is not in series file") % patch)
2043 end = q.series.index(patch) + 1
2043 end = q.series.index(patch) + 1
2044 else:
2044 else:
2045 end = q.seriesend(True)
2045 end = q.seriesend(True)
2046
2046
2047 if opts.get('last') and not end:
2047 if opts.get('last') and not end:
2048 ui.write(_("no patches applied\n"))
2048 ui.write(_("no patches applied\n"))
2049 return 1
2049 return 1
2050 elif opts.get('last') and end == 1:
2050 elif opts.get('last') and end == 1:
2051 ui.write(_("only one patch applied\n"))
2051 ui.write(_("only one patch applied\n"))
2052 return 1
2052 return 1
2053 elif opts.get('last'):
2053 elif opts.get('last'):
2054 start = end - 2
2054 start = end - 2
2055 end = 1
2055 end = 1
2056 else:
2056 else:
2057 start = 0
2057 start = 0
2058
2058
2059 q.qseries(repo, length=end, start=start, status='A',
2059 q.qseries(repo, length=end, start=start, status='A',
2060 summary=opts.get('summary'))
2060 summary=opts.get('summary'))
2061
2061
2062
2062
2063 @command("qunapplied",
2063 @command("qunapplied",
2064 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
2064 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
2065 _('hg qunapplied [-1] [-s] [PATCH]'))
2065 _('hg qunapplied [-1] [-s] [PATCH]'))
2066 def unapplied(ui, repo, patch=None, **opts):
2066 def unapplied(ui, repo, patch=None, **opts):
2067 """print the patches not yet applied
2067 """print the patches not yet applied
2068
2068
2069 Returns 0 on success."""
2069 Returns 0 on success."""
2070
2070
2071 q = repo.mq
2071 q = repo.mq
2072 if patch:
2072 if patch:
2073 if patch not in q.series:
2073 if patch not in q.series:
2074 raise util.Abort(_("patch %s is not in series file") % patch)
2074 raise util.Abort(_("patch %s is not in series file") % patch)
2075 start = q.series.index(patch) + 1
2075 start = q.series.index(patch) + 1
2076 else:
2076 else:
2077 start = q.seriesend(True)
2077 start = q.seriesend(True)
2078
2078
2079 if start == len(q.series) and opts.get('first'):
2079 if start == len(q.series) and opts.get('first'):
2080 ui.write(_("all patches applied\n"))
2080 ui.write(_("all patches applied\n"))
2081 return 1
2081 return 1
2082
2082
2083 length = opts.get('first') and 1 or None
2083 length = opts.get('first') and 1 or None
2084 q.qseries(repo, start=start, length=length, status='U',
2084 q.qseries(repo, start=start, length=length, status='U',
2085 summary=opts.get('summary'))
2085 summary=opts.get('summary'))
2086
2086
2087 @command("qimport",
2087 @command("qimport",
2088 [('e', 'existing', None, _('import file in patch directory')),
2088 [('e', 'existing', None, _('import file in patch directory')),
2089 ('n', 'name', '',
2089 ('n', 'name', '',
2090 _('name of patch file'), _('NAME')),
2090 _('name of patch file'), _('NAME')),
2091 ('f', 'force', None, _('overwrite existing files')),
2091 ('f', 'force', None, _('overwrite existing files')),
2092 ('r', 'rev', [],
2092 ('r', 'rev', [],
2093 _('place existing revisions under mq control'), _('REV')),
2093 _('place existing revisions under mq control'), _('REV')),
2094 ('g', 'git', None, _('use git extended diff format')),
2094 ('g', 'git', None, _('use git extended diff format')),
2095 ('P', 'push', None, _('qpush after importing'))],
2095 ('P', 'push', None, _('qpush after importing'))],
2096 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... FILE...'))
2096 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... FILE...'))
2097 def qimport(ui, repo, *filename, **opts):
2097 def qimport(ui, repo, *filename, **opts):
2098 """import a patch or existing changeset
2098 """import a patch or existing changeset
2099
2099
2100 The patch is inserted into the series after the last applied
2100 The patch is inserted into the series after the last applied
2101 patch. If no patches have been applied, qimport prepends the patch
2101 patch. If no patches have been applied, qimport prepends the patch
2102 to the series.
2102 to the series.
2103
2103
2104 The patch will have the same name as its source file unless you
2104 The patch will have the same name as its source file unless you
2105 give it a new one with -n/--name.
2105 give it a new one with -n/--name.
2106
2106
2107 You can register an existing patch inside the patch directory with
2107 You can register an existing patch inside the patch directory with
2108 the -e/--existing flag.
2108 the -e/--existing flag.
2109
2109
2110 With -f/--force, an existing patch of the same name will be
2110 With -f/--force, an existing patch of the same name will be
2111 overwritten.
2111 overwritten.
2112
2112
2113 An existing changeset may be placed under mq control with -r/--rev
2113 An existing changeset may be placed under mq control with -r/--rev
2114 (e.g. qimport --rev tip -n patch will place tip under mq control).
2114 (e.g. qimport --rev tip -n patch will place tip under mq control).
2115 With -g/--git, patches imported with --rev will use the git diff
2115 With -g/--git, patches imported with --rev will use the git diff
2116 format. See the diffs help topic for information on why this is
2116 format. See the diffs help topic for information on why this is
2117 important for preserving rename/copy information and permission
2117 important for preserving rename/copy information and permission
2118 changes. Use :hg:`qfinish` to remove changesets from mq control.
2118 changes. Use :hg:`qfinish` to remove changesets from mq control.
2119
2119
2120 To import a patch from standard input, pass - as the patch file.
2120 To import a patch from standard input, pass - as the patch file.
2121 When importing from standard input, a patch name must be specified
2121 When importing from standard input, a patch name must be specified
2122 using the --name flag.
2122 using the --name flag.
2123
2123
2124 To import an existing patch while renaming it::
2124 To import an existing patch while renaming it::
2125
2125
2126 hg qimport -e existing-patch -n new-name
2126 hg qimport -e existing-patch -n new-name
2127
2127
2128 Returns 0 if import succeeded.
2128 Returns 0 if import succeeded.
2129 """
2129 """
2130 lock = repo.lock() # cause this may move phase
2130 lock = repo.lock() # cause this may move phase
2131 try:
2131 try:
2132 q = repo.mq
2132 q = repo.mq
2133 try:
2133 try:
2134 imported = q.qimport(
2134 imported = q.qimport(
2135 repo, filename, patchname=opts.get('name'),
2135 repo, filename, patchname=opts.get('name'),
2136 existing=opts.get('existing'), force=opts.get('force'),
2136 existing=opts.get('existing'), force=opts.get('force'),
2137 rev=opts.get('rev'), git=opts.get('git'))
2137 rev=opts.get('rev'), git=opts.get('git'))
2138 finally:
2138 finally:
2139 q.savedirty()
2139 q.savedirty()
2140 finally:
2140 finally:
2141 lock.release()
2141 lock.release()
2142
2142
2143 if imported and opts.get('push') and not opts.get('rev'):
2143 if imported and opts.get('push') and not opts.get('rev'):
2144 return q.push(repo, imported[-1])
2144 return q.push(repo, imported[-1])
2145 return 0
2145 return 0
2146
2146
2147 def qinit(ui, repo, create):
2147 def qinit(ui, repo, create):
2148 """initialize a new queue repository
2148 """initialize a new queue repository
2149
2149
2150 This command also creates a series file for ordering patches, and
2150 This command also creates a series file for ordering patches, and
2151 an mq-specific .hgignore file in the queue repository, to exclude
2151 an mq-specific .hgignore file in the queue repository, to exclude
2152 the status and guards files (these contain mostly transient state).
2152 the status and guards files (these contain mostly transient state).
2153
2153
2154 Returns 0 if initialization succeeded."""
2154 Returns 0 if initialization succeeded."""
2155 q = repo.mq
2155 q = repo.mq
2156 r = q.init(repo, create)
2156 r = q.init(repo, create)
2157 q.savedirty()
2157 q.savedirty()
2158 if r:
2158 if r:
2159 if not os.path.exists(r.wjoin('.hgignore')):
2159 if not os.path.exists(r.wjoin('.hgignore')):
2160 fp = r.wopener('.hgignore', 'w')
2160 fp = r.wopener('.hgignore', 'w')
2161 fp.write('^\\.hg\n')
2161 fp.write('^\\.hg\n')
2162 fp.write('^\\.mq\n')
2162 fp.write('^\\.mq\n')
2163 fp.write('syntax: glob\n')
2163 fp.write('syntax: glob\n')
2164 fp.write('status\n')
2164 fp.write('status\n')
2165 fp.write('guards\n')
2165 fp.write('guards\n')
2166 fp.close()
2166 fp.close()
2167 if not os.path.exists(r.wjoin('series')):
2167 if not os.path.exists(r.wjoin('series')):
2168 r.wopener('series', 'w').close()
2168 r.wopener('series', 'w').close()
2169 r[None].add(['.hgignore', 'series'])
2169 r[None].add(['.hgignore', 'series'])
2170 commands.add(ui, r)
2170 commands.add(ui, r)
2171 return 0
2171 return 0
2172
2172
2173 @command("^qinit",
2173 @command("^qinit",
2174 [('c', 'create-repo', None, _('create queue repository'))],
2174 [('c', 'create-repo', None, _('create queue repository'))],
2175 _('hg qinit [-c]'))
2175 _('hg qinit [-c]'))
2176 def init(ui, repo, **opts):
2176 def init(ui, repo, **opts):
2177 """init a new queue repository (DEPRECATED)
2177 """init a new queue repository (DEPRECATED)
2178
2178
2179 The queue repository is unversioned by default. If
2179 The queue repository is unversioned by default. If
2180 -c/--create-repo is specified, qinit will create a separate nested
2180 -c/--create-repo is specified, qinit will create a separate nested
2181 repository for patches (qinit -c may also be run later to convert
2181 repository for patches (qinit -c may also be run later to convert
2182 an unversioned patch repository into a versioned one). You can use
2182 an unversioned patch repository into a versioned one). You can use
2183 qcommit to commit changes to this queue repository.
2183 qcommit to commit changes to this queue repository.
2184
2184
2185 This command is deprecated. Without -c, it's implied by other relevant
2185 This command is deprecated. Without -c, it's implied by other relevant
2186 commands. With -c, use :hg:`init --mq` instead."""
2186 commands. With -c, use :hg:`init --mq` instead."""
2187 return qinit(ui, repo, create=opts.get('create_repo'))
2187 return qinit(ui, repo, create=opts.get('create_repo'))
2188
2188
2189 @command("qclone",
2189 @command("qclone",
2190 [('', 'pull', None, _('use pull protocol to copy metadata')),
2190 [('', 'pull', None, _('use pull protocol to copy metadata')),
2191 ('U', 'noupdate', None,
2191 ('U', 'noupdate', None,
2192 _('do not update the new working directories')),
2192 _('do not update the new working directories')),
2193 ('', 'uncompressed', None,
2193 ('', 'uncompressed', None,
2194 _('use uncompressed transfer (fast over LAN)')),
2194 _('use uncompressed transfer (fast over LAN)')),
2195 ('p', 'patches', '',
2195 ('p', 'patches', '',
2196 _('location of source patch repository'), _('REPO')),
2196 _('location of source patch repository'), _('REPO')),
2197 ] + commands.remoteopts,
2197 ] + commands.remoteopts,
2198 _('hg qclone [OPTION]... SOURCE [DEST]'))
2198 _('hg qclone [OPTION]... SOURCE [DEST]'))
2199 def clone(ui, source, dest=None, **opts):
2199 def clone(ui, source, dest=None, **opts):
2200 '''clone main and patch repository at same time
2200 '''clone main and patch repository at same time
2201
2201
2202 If source is local, destination will have no patches applied. If
2202 If source is local, destination will have no patches applied. If
2203 source is remote, this command can not check if patches are
2203 source is remote, this command can not check if patches are
2204 applied in source, so cannot guarantee that patches are not
2204 applied in source, so cannot guarantee that patches are not
2205 applied in destination. If you clone remote repository, be sure
2205 applied in destination. If you clone remote repository, be sure
2206 before that it has no patches applied.
2206 before that it has no patches applied.
2207
2207
2208 Source patch repository is looked for in <src>/.hg/patches by
2208 Source patch repository is looked for in <src>/.hg/patches by
2209 default. Use -p <url> to change.
2209 default. Use -p <url> to change.
2210
2210
2211 The patch directory must be a nested Mercurial repository, as
2211 The patch directory must be a nested Mercurial repository, as
2212 would be created by :hg:`init --mq`.
2212 would be created by :hg:`init --mq`.
2213
2213
2214 Return 0 on success.
2214 Return 0 on success.
2215 '''
2215 '''
2216 def patchdir(repo):
2216 def patchdir(repo):
2217 """compute a patch repo url from a repo object"""
2217 """compute a patch repo url from a repo object"""
2218 url = repo.url()
2218 url = repo.url()
2219 if url.endswith('/'):
2219 if url.endswith('/'):
2220 url = url[:-1]
2220 url = url[:-1]
2221 return url + '/.hg/patches'
2221 return url + '/.hg/patches'
2222
2222
2223 # main repo (destination and sources)
2223 # main repo (destination and sources)
2224 if dest is None:
2224 if dest is None:
2225 dest = hg.defaultdest(source)
2225 dest = hg.defaultdest(source)
2226 sr = hg.repository(hg.remoteui(ui, opts), ui.expandpath(source))
2226 sr = hg.repository(hg.remoteui(ui, opts), ui.expandpath(source))
2227
2227
2228 # patches repo (source only)
2228 # patches repo (source only)
2229 if opts.get('patches'):
2229 if opts.get('patches'):
2230 patchespath = ui.expandpath(opts.get('patches'))
2230 patchespath = ui.expandpath(opts.get('patches'))
2231 else:
2231 else:
2232 patchespath = patchdir(sr)
2232 patchespath = patchdir(sr)
2233 try:
2233 try:
2234 hg.repository(ui, patchespath)
2234 hg.repository(ui, patchespath)
2235 except error.RepoError:
2235 except error.RepoError:
2236 raise util.Abort(_('versioned patch repository not found'
2236 raise util.Abort(_('versioned patch repository not found'
2237 ' (see init --mq)'))
2237 ' (see init --mq)'))
2238 qbase, destrev = None, None
2238 qbase, destrev = None, None
2239 if sr.local():
2239 if sr.local():
2240 if sr.mq.applied and sr[qbase].phase() != phases.secret:
2240 if sr.mq.applied and sr[qbase].phase() != phases.secret:
2241 qbase = sr.mq.applied[0].node
2241 qbase = sr.mq.applied[0].node
2242 if not hg.islocal(dest):
2242 if not hg.islocal(dest):
2243 heads = set(sr.heads())
2243 heads = set(sr.heads())
2244 destrev = list(heads.difference(sr.heads(qbase)))
2244 destrev = list(heads.difference(sr.heads(qbase)))
2245 destrev.append(sr.changelog.parents(qbase)[0])
2245 destrev.append(sr.changelog.parents(qbase)[0])
2246 elif sr.capable('lookup'):
2246 elif sr.capable('lookup'):
2247 try:
2247 try:
2248 qbase = sr.lookup('qbase')
2248 qbase = sr.lookup('qbase')
2249 except error.RepoError:
2249 except error.RepoError:
2250 pass
2250 pass
2251
2251
2252 ui.note(_('cloning main repository\n'))
2252 ui.note(_('cloning main repository\n'))
2253 sr, dr = hg.clone(ui, opts, sr.url(), dest,
2253 sr, dr = hg.clone(ui, opts, sr.url(), dest,
2254 pull=opts.get('pull'),
2254 pull=opts.get('pull'),
2255 rev=destrev,
2255 rev=destrev,
2256 update=False,
2256 update=False,
2257 stream=opts.get('uncompressed'))
2257 stream=opts.get('uncompressed'))
2258
2258
2259 ui.note(_('cloning patch repository\n'))
2259 ui.note(_('cloning patch repository\n'))
2260 hg.clone(ui, opts, opts.get('patches') or patchdir(sr), patchdir(dr),
2260 hg.clone(ui, opts, opts.get('patches') or patchdir(sr), patchdir(dr),
2261 pull=opts.get('pull'), update=not opts.get('noupdate'),
2261 pull=opts.get('pull'), update=not opts.get('noupdate'),
2262 stream=opts.get('uncompressed'))
2262 stream=opts.get('uncompressed'))
2263
2263
2264 if dr.local():
2264 if dr.local():
2265 if qbase:
2265 if qbase:
2266 ui.note(_('stripping applied patches from destination '
2266 ui.note(_('stripping applied patches from destination '
2267 'repository\n'))
2267 'repository\n'))
2268 dr.mq.strip(dr, [qbase], update=False, backup=None)
2268 dr.mq.strip(dr, [qbase], update=False, backup=None)
2269 if not opts.get('noupdate'):
2269 if not opts.get('noupdate'):
2270 ui.note(_('updating destination repository\n'))
2270 ui.note(_('updating destination repository\n'))
2271 hg.update(dr, dr.changelog.tip())
2271 hg.update(dr, dr.changelog.tip())
2272
2272
2273 @command("qcommit|qci",
2273 @command("qcommit|qci",
2274 commands.table["^commit|ci"][1],
2274 commands.table["^commit|ci"][1],
2275 _('hg qcommit [OPTION]... [FILE]...'))
2275 _('hg qcommit [OPTION]... [FILE]...'))
2276 def commit(ui, repo, *pats, **opts):
2276 def commit(ui, repo, *pats, **opts):
2277 """commit changes in the queue repository (DEPRECATED)
2277 """commit changes in the queue repository (DEPRECATED)
2278
2278
2279 This command is deprecated; use :hg:`commit --mq` instead."""
2279 This command is deprecated; use :hg:`commit --mq` instead."""
2280 q = repo.mq
2280 q = repo.mq
2281 r = q.qrepo()
2281 r = q.qrepo()
2282 if not r:
2282 if not r:
2283 raise util.Abort('no queue repository')
2283 raise util.Abort('no queue repository')
2284 commands.commit(r.ui, r, *pats, **opts)
2284 commands.commit(r.ui, r, *pats, **opts)
2285
2285
2286 @command("qseries",
2286 @command("qseries",
2287 [('m', 'missing', None, _('print patches not in series')),
2287 [('m', 'missing', None, _('print patches not in series')),
2288 ] + seriesopts,
2288 ] + seriesopts,
2289 _('hg qseries [-ms]'))
2289 _('hg qseries [-ms]'))
2290 def series(ui, repo, **opts):
2290 def series(ui, repo, **opts):
2291 """print the entire series file
2291 """print the entire series file
2292
2292
2293 Returns 0 on success."""
2293 Returns 0 on success."""
2294 repo.mq.qseries(repo, missing=opts.get('missing'),
2294 repo.mq.qseries(repo, missing=opts.get('missing'),
2295 summary=opts.get('summary'))
2295 summary=opts.get('summary'))
2296 return 0
2296 return 0
2297
2297
2298 @command("qtop", seriesopts, _('hg qtop [-s]'))
2298 @command("qtop", seriesopts, _('hg qtop [-s]'))
2299 def top(ui, repo, **opts):
2299 def top(ui, repo, **opts):
2300 """print the name of the current patch
2300 """print the name of the current patch
2301
2301
2302 Returns 0 on success."""
2302 Returns 0 on success."""
2303 q = repo.mq
2303 q = repo.mq
2304 t = q.applied and q.seriesend(True) or 0
2304 t = q.applied and q.seriesend(True) or 0
2305 if t:
2305 if t:
2306 q.qseries(repo, start=t - 1, length=1, status='A',
2306 q.qseries(repo, start=t - 1, length=1, status='A',
2307 summary=opts.get('summary'))
2307 summary=opts.get('summary'))
2308 else:
2308 else:
2309 ui.write(_("no patches applied\n"))
2309 ui.write(_("no patches applied\n"))
2310 return 1
2310 return 1
2311
2311
2312 @command("qnext", seriesopts, _('hg qnext [-s]'))
2312 @command("qnext", seriesopts, _('hg qnext [-s]'))
2313 def next(ui, repo, **opts):
2313 def next(ui, repo, **opts):
2314 """print the name of the next pushable patch
2314 """print the name of the next pushable patch
2315
2315
2316 Returns 0 on success."""
2316 Returns 0 on success."""
2317 q = repo.mq
2317 q = repo.mq
2318 end = q.seriesend()
2318 end = q.seriesend()
2319 if end == len(q.series):
2319 if end == len(q.series):
2320 ui.write(_("all patches applied\n"))
2320 ui.write(_("all patches applied\n"))
2321 return 1
2321 return 1
2322 q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
2322 q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
2323
2323
2324 @command("qprev", seriesopts, _('hg qprev [-s]'))
2324 @command("qprev", seriesopts, _('hg qprev [-s]'))
2325 def prev(ui, repo, **opts):
2325 def prev(ui, repo, **opts):
2326 """print the name of the preceding applied patch
2326 """print the name of the preceding applied patch
2327
2327
2328 Returns 0 on success."""
2328 Returns 0 on success."""
2329 q = repo.mq
2329 q = repo.mq
2330 l = len(q.applied)
2330 l = len(q.applied)
2331 if l == 1:
2331 if l == 1:
2332 ui.write(_("only one patch applied\n"))
2332 ui.write(_("only one patch applied\n"))
2333 return 1
2333 return 1
2334 if not l:
2334 if not l:
2335 ui.write(_("no patches applied\n"))
2335 ui.write(_("no patches applied\n"))
2336 return 1
2336 return 1
2337 idx = q.series.index(q.applied[-2].name)
2337 idx = q.series.index(q.applied[-2].name)
2338 q.qseries(repo, start=idx, length=1, status='A',
2338 q.qseries(repo, start=idx, length=1, status='A',
2339 summary=opts.get('summary'))
2339 summary=opts.get('summary'))
2340
2340
2341 def setupheaderopts(ui, opts):
2341 def setupheaderopts(ui, opts):
2342 if not opts.get('user') and opts.get('currentuser'):
2342 if not opts.get('user') and opts.get('currentuser'):
2343 opts['user'] = ui.username()
2343 opts['user'] = ui.username()
2344 if not opts.get('date') and opts.get('currentdate'):
2344 if not opts.get('date') and opts.get('currentdate'):
2345 opts['date'] = "%d %d" % util.makedate()
2345 opts['date'] = "%d %d" % util.makedate()
2346
2346
2347 @command("^qnew",
2347 @command("^qnew",
2348 [('e', 'edit', None, _('edit commit message')),
2348 [('e', 'edit', None, _('edit commit message')),
2349 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2349 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2350 ('g', 'git', None, _('use git extended diff format')),
2350 ('g', 'git', None, _('use git extended diff format')),
2351 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2351 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2352 ('u', 'user', '',
2352 ('u', 'user', '',
2353 _('add "From: <USER>" to patch'), _('USER')),
2353 _('add "From: <USER>" to patch'), _('USER')),
2354 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2354 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2355 ('d', 'date', '',
2355 ('d', 'date', '',
2356 _('add "Date: <DATE>" to patch'), _('DATE'))
2356 _('add "Date: <DATE>" to patch'), _('DATE'))
2357 ] + commands.walkopts + commands.commitopts,
2357 ] + commands.walkopts + commands.commitopts,
2358 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'))
2358 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'))
2359 def new(ui, repo, patch, *args, **opts):
2359 def new(ui, repo, patch, *args, **opts):
2360 """create a new patch
2360 """create a new patch
2361
2361
2362 qnew creates a new patch on top of the currently-applied patch (if
2362 qnew creates a new patch on top of the currently-applied patch (if
2363 any). The patch will be initialized with any outstanding changes
2363 any). The patch will be initialized with any outstanding changes
2364 in the working directory. You may also use -I/--include,
2364 in the working directory. You may also use -I/--include,
2365 -X/--exclude, and/or a list of files after the patch name to add
2365 -X/--exclude, and/or a list of files after the patch name to add
2366 only changes to matching files to the new patch, leaving the rest
2366 only changes to matching files to the new patch, leaving the rest
2367 as uncommitted modifications.
2367 as uncommitted modifications.
2368
2368
2369 -u/--user and -d/--date can be used to set the (given) user and
2369 -u/--user and -d/--date can be used to set the (given) user and
2370 date, respectively. -U/--currentuser and -D/--currentdate set user
2370 date, respectively. -U/--currentuser and -D/--currentdate set user
2371 to current user and date to current date.
2371 to current user and date to current date.
2372
2372
2373 -e/--edit, -m/--message or -l/--logfile set the patch header as
2373 -e/--edit, -m/--message or -l/--logfile set the patch header as
2374 well as the commit message. If none is specified, the header is
2374 well as the commit message. If none is specified, the header is
2375 empty and the commit message is '[mq]: PATCH'.
2375 empty and the commit message is '[mq]: PATCH'.
2376
2376
2377 Use the -g/--git option to keep the patch in the git extended diff
2377 Use the -g/--git option to keep the patch in the git extended diff
2378 format. Read the diffs help topic for more information on why this
2378 format. Read the diffs help topic for more information on why this
2379 is important for preserving permission changes and copy/rename
2379 is important for preserving permission changes and copy/rename
2380 information.
2380 information.
2381
2381
2382 Returns 0 on successful creation of a new patch.
2382 Returns 0 on successful creation of a new patch.
2383 """
2383 """
2384 msg = cmdutil.logmessage(ui, opts)
2384 msg = cmdutil.logmessage(ui, opts)
2385 def getmsg():
2385 def getmsg():
2386 return ui.edit(msg, opts.get('user') or ui.username())
2386 return ui.edit(msg, opts.get('user') or ui.username())
2387 q = repo.mq
2387 q = repo.mq
2388 opts['msg'] = msg
2388 opts['msg'] = msg
2389 if opts.get('edit'):
2389 if opts.get('edit'):
2390 opts['msg'] = getmsg
2390 opts['msg'] = getmsg
2391 else:
2391 else:
2392 opts['msg'] = msg
2392 opts['msg'] = msg
2393 setupheaderopts(ui, opts)
2393 setupheaderopts(ui, opts)
2394 q.new(repo, patch, *args, **opts)
2394 q.new(repo, patch, *args, **opts)
2395 q.savedirty()
2395 q.savedirty()
2396 return 0
2396 return 0
2397
2397
2398 @command("^qrefresh",
2398 @command("^qrefresh",
2399 [('e', 'edit', None, _('edit commit message')),
2399 [('e', 'edit', None, _('edit commit message')),
2400 ('g', 'git', None, _('use git extended diff format')),
2400 ('g', 'git', None, _('use git extended diff format')),
2401 ('s', 'short', None,
2401 ('s', 'short', None,
2402 _('refresh only files already in the patch and specified files')),
2402 _('refresh only files already in the patch and specified files')),
2403 ('U', 'currentuser', None,
2403 ('U', 'currentuser', None,
2404 _('add/update author field in patch with current user')),
2404 _('add/update author field in patch with current user')),
2405 ('u', 'user', '',
2405 ('u', 'user', '',
2406 _('add/update author field in patch with given user'), _('USER')),
2406 _('add/update author field in patch with given user'), _('USER')),
2407 ('D', 'currentdate', None,
2407 ('D', 'currentdate', None,
2408 _('add/update date field in patch with current date')),
2408 _('add/update date field in patch with current date')),
2409 ('d', 'date', '',
2409 ('d', 'date', '',
2410 _('add/update date field in patch with given date'), _('DATE'))
2410 _('add/update date field in patch with given date'), _('DATE'))
2411 ] + commands.walkopts + commands.commitopts,
2411 ] + commands.walkopts + commands.commitopts,
2412 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'))
2412 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'))
2413 def refresh(ui, repo, *pats, **opts):
2413 def refresh(ui, repo, *pats, **opts):
2414 """update the current patch
2414 """update the current patch
2415
2415
2416 If any file patterns are provided, the refreshed patch will
2416 If any file patterns are provided, the refreshed patch will
2417 contain only the modifications that match those patterns; the
2417 contain only the modifications that match those patterns; the
2418 remaining modifications will remain in the working directory.
2418 remaining modifications will remain in the working directory.
2419
2419
2420 If -s/--short is specified, files currently included in the patch
2420 If -s/--short is specified, files currently included in the patch
2421 will be refreshed just like matched files and remain in the patch.
2421 will be refreshed just like matched files and remain in the patch.
2422
2422
2423 If -e/--edit is specified, Mercurial will start your configured editor for
2423 If -e/--edit is specified, Mercurial will start your configured editor for
2424 you to enter a message. In case qrefresh fails, you will find a backup of
2424 you to enter a message. In case qrefresh fails, you will find a backup of
2425 your message in ``.hg/last-message.txt``.
2425 your message in ``.hg/last-message.txt``.
2426
2426
2427 hg add/remove/copy/rename work as usual, though you might want to
2427 hg add/remove/copy/rename work as usual, though you might want to
2428 use git-style patches (-g/--git or [diff] git=1) to track copies
2428 use git-style patches (-g/--git or [diff] git=1) to track copies
2429 and renames. See the diffs help topic for more information on the
2429 and renames. See the diffs help topic for more information on the
2430 git diff format.
2430 git diff format.
2431
2431
2432 Returns 0 on success.
2432 Returns 0 on success.
2433 """
2433 """
2434 q = repo.mq
2434 q = repo.mq
2435 message = cmdutil.logmessage(ui, opts)
2435 message = cmdutil.logmessage(ui, opts)
2436 if opts.get('edit'):
2436 if opts.get('edit'):
2437 if not q.applied:
2437 if not q.applied:
2438 ui.write(_("no patches applied\n"))
2438 ui.write(_("no patches applied\n"))
2439 return 1
2439 return 1
2440 if message:
2440 if message:
2441 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2441 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2442 patch = q.applied[-1].name
2442 patch = q.applied[-1].name
2443 ph = patchheader(q.join(patch), q.plainmode)
2443 ph = patchheader(q.join(patch), q.plainmode)
2444 message = ui.edit('\n'.join(ph.message), ph.user or ui.username())
2444 message = ui.edit('\n'.join(ph.message), ph.user or ui.username())
2445 # We don't want to lose the patch message if qrefresh fails (issue2062)
2445 # We don't want to lose the patch message if qrefresh fails (issue2062)
2446 repo.savecommitmessage(message)
2446 repo.savecommitmessage(message)
2447 setupheaderopts(ui, opts)
2447 setupheaderopts(ui, opts)
2448 wlock = repo.wlock()
2448 wlock = repo.wlock()
2449 try:
2449 try:
2450 ret = q.refresh(repo, pats, msg=message, **opts)
2450 ret = q.refresh(repo, pats, msg=message, **opts)
2451 q.savedirty()
2451 q.savedirty()
2452 return ret
2452 return ret
2453 finally:
2453 finally:
2454 wlock.release()
2454 wlock.release()
2455
2455
2456 @command("^qdiff",
2456 @command("^qdiff",
2457 commands.diffopts + commands.diffopts2 + commands.walkopts,
2457 commands.diffopts + commands.diffopts2 + commands.walkopts,
2458 _('hg qdiff [OPTION]... [FILE]...'))
2458 _('hg qdiff [OPTION]... [FILE]...'))
2459 def diff(ui, repo, *pats, **opts):
2459 def diff(ui, repo, *pats, **opts):
2460 """diff of the current patch and subsequent modifications
2460 """diff of the current patch and subsequent modifications
2461
2461
2462 Shows a diff which includes the current patch as well as any
2462 Shows a diff which includes the current patch as well as any
2463 changes which have been made in the working directory since the
2463 changes which have been made in the working directory since the
2464 last refresh (thus showing what the current patch would become
2464 last refresh (thus showing what the current patch would become
2465 after a qrefresh).
2465 after a qrefresh).
2466
2466
2467 Use :hg:`diff` if you only want to see the changes made since the
2467 Use :hg:`diff` if you only want to see the changes made since the
2468 last qrefresh, or :hg:`export qtip` if you want to see changes
2468 last qrefresh, or :hg:`export qtip` if you want to see changes
2469 made by the current patch without including changes made since the
2469 made by the current patch without including changes made since the
2470 qrefresh.
2470 qrefresh.
2471
2471
2472 Returns 0 on success.
2472 Returns 0 on success.
2473 """
2473 """
2474 repo.mq.diff(repo, pats, opts)
2474 repo.mq.diff(repo, pats, opts)
2475 return 0
2475 return 0
2476
2476
2477 @command('qfold',
2477 @command('qfold',
2478 [('e', 'edit', None, _('edit patch header')),
2478 [('e', 'edit', None, _('edit patch header')),
2479 ('k', 'keep', None, _('keep folded patch files')),
2479 ('k', 'keep', None, _('keep folded patch files')),
2480 ] + commands.commitopts,
2480 ] + commands.commitopts,
2481 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'))
2481 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'))
2482 def fold(ui, repo, *files, **opts):
2482 def fold(ui, repo, *files, **opts):
2483 """fold the named patches into the current patch
2483 """fold the named patches into the current patch
2484
2484
2485 Patches must not yet be applied. Each patch will be successively
2485 Patches must not yet be applied. Each patch will be successively
2486 applied to the current patch in the order given. If all the
2486 applied to the current patch in the order given. If all the
2487 patches apply successfully, the current patch will be refreshed
2487 patches apply successfully, the current patch will be refreshed
2488 with the new cumulative patch, and the folded patches will be
2488 with the new cumulative patch, and the folded patches will be
2489 deleted. With -k/--keep, the folded patch files will not be
2489 deleted. With -k/--keep, the folded patch files will not be
2490 removed afterwards.
2490 removed afterwards.
2491
2491
2492 The header for each folded patch will be concatenated with the
2492 The header for each folded patch will be concatenated with the
2493 current patch header, separated by a line of ``* * *``.
2493 current patch header, separated by a line of ``* * *``.
2494
2494
2495 Returns 0 on success."""
2495 Returns 0 on success."""
2496 q = repo.mq
2496 q = repo.mq
2497 if not files:
2497 if not files:
2498 raise util.Abort(_('qfold requires at least one patch name'))
2498 raise util.Abort(_('qfold requires at least one patch name'))
2499 if not q.checktoppatch(repo)[0]:
2499 if not q.checktoppatch(repo)[0]:
2500 raise util.Abort(_('no patches applied'))
2500 raise util.Abort(_('no patches applied'))
2501 q.checklocalchanges(repo)
2501 q.checklocalchanges(repo)
2502
2502
2503 message = cmdutil.logmessage(ui, opts)
2503 message = cmdutil.logmessage(ui, opts)
2504 if opts.get('edit'):
2504 if opts.get('edit'):
2505 if message:
2505 if message:
2506 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2506 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2507
2507
2508 parent = q.lookup('qtip')
2508 parent = q.lookup('qtip')
2509 patches = []
2509 patches = []
2510 messages = []
2510 messages = []
2511 for f in files:
2511 for f in files:
2512 p = q.lookup(f)
2512 p = q.lookup(f)
2513 if p in patches or p == parent:
2513 if p in patches or p == parent:
2514 ui.warn(_('Skipping already folded patch %s\n') % p)
2514 ui.warn(_('Skipping already folded patch %s\n') % p)
2515 if q.isapplied(p):
2515 if q.isapplied(p):
2516 raise util.Abort(_('qfold cannot fold already applied patch %s')
2516 raise util.Abort(_('qfold cannot fold already applied patch %s')
2517 % p)
2517 % p)
2518 patches.append(p)
2518 patches.append(p)
2519
2519
2520 for p in patches:
2520 for p in patches:
2521 if not message:
2521 if not message:
2522 ph = patchheader(q.join(p), q.plainmode)
2522 ph = patchheader(q.join(p), q.plainmode)
2523 if ph.message:
2523 if ph.message:
2524 messages.append(ph.message)
2524 messages.append(ph.message)
2525 pf = q.join(p)
2525 pf = q.join(p)
2526 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2526 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2527 if not patchsuccess:
2527 if not patchsuccess:
2528 raise util.Abort(_('error folding patch %s') % p)
2528 raise util.Abort(_('error folding patch %s') % p)
2529
2529
2530 if not message:
2530 if not message:
2531 ph = patchheader(q.join(parent), q.plainmode)
2531 ph = patchheader(q.join(parent), q.plainmode)
2532 message, user = ph.message, ph.user
2532 message, user = ph.message, ph.user
2533 for msg in messages:
2533 for msg in messages:
2534 message.append('* * *')
2534 message.append('* * *')
2535 message.extend(msg)
2535 message.extend(msg)
2536 message = '\n'.join(message)
2536 message = '\n'.join(message)
2537
2537
2538 if opts.get('edit'):
2538 if opts.get('edit'):
2539 message = ui.edit(message, user or ui.username())
2539 message = ui.edit(message, user or ui.username())
2540
2540
2541 diffopts = q.patchopts(q.diffopts(), *patches)
2541 diffopts = q.patchopts(q.diffopts(), *patches)
2542 wlock = repo.wlock()
2542 wlock = repo.wlock()
2543 try:
2543 try:
2544 q.refresh(repo, msg=message, git=diffopts.git)
2544 q.refresh(repo, msg=message, git=diffopts.git)
2545 q.delete(repo, patches, opts)
2545 q.delete(repo, patches, opts)
2546 q.savedirty()
2546 q.savedirty()
2547 finally:
2547 finally:
2548 wlock.release()
2548 wlock.release()
2549
2549
2550 @command("qgoto",
2550 @command("qgoto",
2551 [('c', 'check', None, _('tolerate non-conflicting local changes')),
2551 [('c', 'check', None, _('tolerate non-conflicting local changes')),
2552 ('f', 'force', None, _('overwrite any local changes')),
2552 ('f', 'force', None, _('overwrite any local changes')),
2553 ('', 'no-backup', None, _('do not save backup copies of files'))],
2553 ('', 'no-backup', None, _('do not save backup copies of files'))],
2554 _('hg qgoto [OPTION]... PATCH'))
2554 _('hg qgoto [OPTION]... PATCH'))
2555 def goto(ui, repo, patch, **opts):
2555 def goto(ui, repo, patch, **opts):
2556 '''push or pop patches until named patch is at top of stack
2556 '''push or pop patches until named patch is at top of stack
2557
2557
2558 Returns 0 on success.'''
2558 Returns 0 on success.'''
2559 opts = fixcheckopts(ui, opts)
2559 opts = fixcheckopts(ui, opts)
2560 q = repo.mq
2560 q = repo.mq
2561 patch = q.lookup(patch)
2561 patch = q.lookup(patch)
2562 nobackup = opts.get('no_backup')
2562 nobackup = opts.get('no_backup')
2563 check = opts.get('check')
2563 check = opts.get('check')
2564 if q.isapplied(patch):
2564 if q.isapplied(patch):
2565 ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
2565 ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
2566 check=check)
2566 check=check)
2567 else:
2567 else:
2568 ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
2568 ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
2569 check=check)
2569 check=check)
2570 q.savedirty()
2570 q.savedirty()
2571 return ret
2571 return ret
2572
2572
2573 @command("qguard",
2573 @command("qguard",
2574 [('l', 'list', None, _('list all patches and guards')),
2574 [('l', 'list', None, _('list all patches and guards')),
2575 ('n', 'none', None, _('drop all guards'))],
2575 ('n', 'none', None, _('drop all guards'))],
2576 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'))
2576 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'))
2577 def guard(ui, repo, *args, **opts):
2577 def guard(ui, repo, *args, **opts):
2578 '''set or print guards for a patch
2578 '''set or print guards for a patch
2579
2579
2580 Guards control whether a patch can be pushed. A patch with no
2580 Guards control whether a patch can be pushed. A patch with no
2581 guards is always pushed. A patch with a positive guard ("+foo") is
2581 guards is always pushed. A patch with a positive guard ("+foo") is
2582 pushed only if the :hg:`qselect` command has activated it. A patch with
2582 pushed only if the :hg:`qselect` command has activated it. A patch with
2583 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2583 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2584 has activated it.
2584 has activated it.
2585
2585
2586 With no arguments, print the currently active guards.
2586 With no arguments, print the currently active guards.
2587 With arguments, set guards for the named patch.
2587 With arguments, set guards for the named patch.
2588
2588
2589 .. note::
2589 .. note::
2590 Specifying negative guards now requires '--'.
2590 Specifying negative guards now requires '--'.
2591
2591
2592 To set guards on another patch::
2592 To set guards on another patch::
2593
2593
2594 hg qguard other.patch -- +2.6.17 -stable
2594 hg qguard other.patch -- +2.6.17 -stable
2595
2595
2596 Returns 0 on success.
2596 Returns 0 on success.
2597 '''
2597 '''
2598 def status(idx):
2598 def status(idx):
2599 guards = q.seriesguards[idx] or ['unguarded']
2599 guards = q.seriesguards[idx] or ['unguarded']
2600 if q.series[idx] in applied:
2600 if q.series[idx] in applied:
2601 state = 'applied'
2601 state = 'applied'
2602 elif q.pushable(idx)[0]:
2602 elif q.pushable(idx)[0]:
2603 state = 'unapplied'
2603 state = 'unapplied'
2604 else:
2604 else:
2605 state = 'guarded'
2605 state = 'guarded'
2606 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2606 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2607 ui.write('%s: ' % ui.label(q.series[idx], label))
2607 ui.write('%s: ' % ui.label(q.series[idx], label))
2608
2608
2609 for i, guard in enumerate(guards):
2609 for i, guard in enumerate(guards):
2610 if guard.startswith('+'):
2610 if guard.startswith('+'):
2611 ui.write(guard, label='qguard.positive')
2611 ui.write(guard, label='qguard.positive')
2612 elif guard.startswith('-'):
2612 elif guard.startswith('-'):
2613 ui.write(guard, label='qguard.negative')
2613 ui.write(guard, label='qguard.negative')
2614 else:
2614 else:
2615 ui.write(guard, label='qguard.unguarded')
2615 ui.write(guard, label='qguard.unguarded')
2616 if i != len(guards) - 1:
2616 if i != len(guards) - 1:
2617 ui.write(' ')
2617 ui.write(' ')
2618 ui.write('\n')
2618 ui.write('\n')
2619 q = repo.mq
2619 q = repo.mq
2620 applied = set(p.name for p in q.applied)
2620 applied = set(p.name for p in q.applied)
2621 patch = None
2621 patch = None
2622 args = list(args)
2622 args = list(args)
2623 if opts.get('list'):
2623 if opts.get('list'):
2624 if args or opts.get('none'):
2624 if args or opts.get('none'):
2625 raise util.Abort(_('cannot mix -l/--list with options or '
2625 raise util.Abort(_('cannot mix -l/--list with options or '
2626 'arguments'))
2626 'arguments'))
2627 for i in xrange(len(q.series)):
2627 for i in xrange(len(q.series)):
2628 status(i)
2628 status(i)
2629 return
2629 return
2630 if not args or args[0][0:1] in '-+':
2630 if not args or args[0][0:1] in '-+':
2631 if not q.applied:
2631 if not q.applied:
2632 raise util.Abort(_('no patches applied'))
2632 raise util.Abort(_('no patches applied'))
2633 patch = q.applied[-1].name
2633 patch = q.applied[-1].name
2634 if patch is None and args[0][0:1] not in '-+':
2634 if patch is None and args[0][0:1] not in '-+':
2635 patch = args.pop(0)
2635 patch = args.pop(0)
2636 if patch is None:
2636 if patch is None:
2637 raise util.Abort(_('no patch to work with'))
2637 raise util.Abort(_('no patch to work with'))
2638 if args or opts.get('none'):
2638 if args or opts.get('none'):
2639 idx = q.findseries(patch)
2639 idx = q.findseries(patch)
2640 if idx is None:
2640 if idx is None:
2641 raise util.Abort(_('no patch named %s') % patch)
2641 raise util.Abort(_('no patch named %s') % patch)
2642 q.setguards(idx, args)
2642 q.setguards(idx, args)
2643 q.savedirty()
2643 q.savedirty()
2644 else:
2644 else:
2645 status(q.series.index(q.lookup(patch)))
2645 status(q.series.index(q.lookup(patch)))
2646
2646
2647 @command("qheader", [], _('hg qheader [PATCH]'))
2647 @command("qheader", [], _('hg qheader [PATCH]'))
2648 def header(ui, repo, patch=None):
2648 def header(ui, repo, patch=None):
2649 """print the header of the topmost or specified patch
2649 """print the header of the topmost or specified patch
2650
2650
2651 Returns 0 on success."""
2651 Returns 0 on success."""
2652 q = repo.mq
2652 q = repo.mq
2653
2653
2654 if patch:
2654 if patch:
2655 patch = q.lookup(patch)
2655 patch = q.lookup(patch)
2656 else:
2656 else:
2657 if not q.applied:
2657 if not q.applied:
2658 ui.write(_('no patches applied\n'))
2658 ui.write(_('no patches applied\n'))
2659 return 1
2659 return 1
2660 patch = q.lookup('qtip')
2660 patch = q.lookup('qtip')
2661 ph = patchheader(q.join(patch), q.plainmode)
2661 ph = patchheader(q.join(patch), q.plainmode)
2662
2662
2663 ui.write('\n'.join(ph.message) + '\n')
2663 ui.write('\n'.join(ph.message) + '\n')
2664
2664
2665 def lastsavename(path):
2665 def lastsavename(path):
2666 (directory, base) = os.path.split(path)
2666 (directory, base) = os.path.split(path)
2667 names = os.listdir(directory)
2667 names = os.listdir(directory)
2668 namere = re.compile("%s.([0-9]+)" % base)
2668 namere = re.compile("%s.([0-9]+)" % base)
2669 maxindex = None
2669 maxindex = None
2670 maxname = None
2670 maxname = None
2671 for f in names:
2671 for f in names:
2672 m = namere.match(f)
2672 m = namere.match(f)
2673 if m:
2673 if m:
2674 index = int(m.group(1))
2674 index = int(m.group(1))
2675 if maxindex is None or index > maxindex:
2675 if maxindex is None or index > maxindex:
2676 maxindex = index
2676 maxindex = index
2677 maxname = f
2677 maxname = f
2678 if maxname:
2678 if maxname:
2679 return (os.path.join(directory, maxname), maxindex)
2679 return (os.path.join(directory, maxname), maxindex)
2680 return (None, None)
2680 return (None, None)
2681
2681
2682 def savename(path):
2682 def savename(path):
2683 (last, index) = lastsavename(path)
2683 (last, index) = lastsavename(path)
2684 if last is None:
2684 if last is None:
2685 index = 0
2685 index = 0
2686 newpath = path + ".%d" % (index + 1)
2686 newpath = path + ".%d" % (index + 1)
2687 return newpath
2687 return newpath
2688
2688
2689 @command("^qpush",
2689 @command("^qpush",
2690 [('c', 'check', None, _('tolerate non-conflicting local changes')),
2690 [('c', 'check', None, _('tolerate non-conflicting local changes')),
2691 ('f', 'force', None, _('apply on top of local changes')),
2691 ('f', 'force', None, _('apply on top of local changes')),
2692 ('e', 'exact', None,
2692 ('e', 'exact', None,
2693 _('apply the target patch to its recorded parent')),
2693 _('apply the target patch to its recorded parent')),
2694 ('l', 'list', None, _('list patch name in commit text')),
2694 ('l', 'list', None, _('list patch name in commit text')),
2695 ('a', 'all', None, _('apply all patches')),
2695 ('a', 'all', None, _('apply all patches')),
2696 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2696 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2697 ('n', 'name', '',
2697 ('n', 'name', '',
2698 _('merge queue name (DEPRECATED)'), _('NAME')),
2698 _('merge queue name (DEPRECATED)'), _('NAME')),
2699 ('', 'move', None,
2699 ('', 'move', None,
2700 _('reorder patch series and apply only the patch')),
2700 _('reorder patch series and apply only the patch')),
2701 ('', 'no-backup', None, _('do not save backup copies of files'))],
2701 ('', 'no-backup', None, _('do not save backup copies of files'))],
2702 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'))
2702 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'))
2703 def push(ui, repo, patch=None, **opts):
2703 def push(ui, repo, patch=None, **opts):
2704 """push the next patch onto the stack
2704 """push the next patch onto the stack
2705
2705
2706 By default, abort if the working directory contains uncommitted
2706 By default, abort if the working directory contains uncommitted
2707 changes. With -c/--check, abort only if the uncommitted files
2707 changes. With -c/--check, abort only if the uncommitted files
2708 overlap with patched files. With -f/--force, backup and patch over
2708 overlap with patched files. With -f/--force, backup and patch over
2709 uncommitted changes.
2709 uncommitted changes.
2710
2710
2711 Return 0 on success.
2711 Return 0 on success.
2712 """
2712 """
2713 q = repo.mq
2713 q = repo.mq
2714 mergeq = None
2714 mergeq = None
2715
2715
2716 opts = fixcheckopts(ui, opts)
2716 opts = fixcheckopts(ui, opts)
2717 if opts.get('merge'):
2717 if opts.get('merge'):
2718 if opts.get('name'):
2718 if opts.get('name'):
2719 newpath = repo.join(opts.get('name'))
2719 newpath = repo.join(opts.get('name'))
2720 else:
2720 else:
2721 newpath, i = lastsavename(q.path)
2721 newpath, i = lastsavename(q.path)
2722 if not newpath:
2722 if not newpath:
2723 ui.warn(_("no saved queues found, please use -n\n"))
2723 ui.warn(_("no saved queues found, please use -n\n"))
2724 return 1
2724 return 1
2725 mergeq = queue(ui, repo.path, newpath)
2725 mergeq = queue(ui, repo.path, newpath)
2726 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2726 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2727 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
2727 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
2728 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
2728 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
2729 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
2729 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
2730 check=opts.get('check'))
2730 check=opts.get('check'))
2731 return ret
2731 return ret
2732
2732
2733 @command("^qpop",
2733 @command("^qpop",
2734 [('a', 'all', None, _('pop all patches')),
2734 [('a', 'all', None, _('pop all patches')),
2735 ('n', 'name', '',
2735 ('n', 'name', '',
2736 _('queue name to pop (DEPRECATED)'), _('NAME')),
2736 _('queue name to pop (DEPRECATED)'), _('NAME')),
2737 ('c', 'check', None, _('tolerate non-conflicting local changes')),
2737 ('c', 'check', None, _('tolerate non-conflicting local changes')),
2738 ('f', 'force', None, _('forget any local changes to patched files')),
2738 ('f', 'force', None, _('forget any local changes to patched files')),
2739 ('', 'no-backup', None, _('do not save backup copies of files'))],
2739 ('', 'no-backup', None, _('do not save backup copies of files'))],
2740 _('hg qpop [-a] [-f] [PATCH | INDEX]'))
2740 _('hg qpop [-a] [-f] [PATCH | INDEX]'))
2741 def pop(ui, repo, patch=None, **opts):
2741 def pop(ui, repo, patch=None, **opts):
2742 """pop the current patch off the stack
2742 """pop the current patch off the stack
2743
2743
2744 Without argument, pops off the top of the patch stack. If given a
2744 Without argument, pops off the top of the patch stack. If given a
2745 patch name, keeps popping off patches until the named patch is at
2745 patch name, keeps popping off patches until the named patch is at
2746 the top of the stack.
2746 the top of the stack.
2747
2747
2748 By default, abort if the working directory contains uncommitted
2748 By default, abort if the working directory contains uncommitted
2749 changes. With -c/--check, abort only if the uncommitted files
2749 changes. With -c/--check, abort only if the uncommitted files
2750 overlap with patched files. With -f/--force, backup and discard
2750 overlap with patched files. With -f/--force, backup and discard
2751 changes made to such files.
2751 changes made to such files.
2752
2752
2753 Return 0 on success.
2753 Return 0 on success.
2754 """
2754 """
2755 opts = fixcheckopts(ui, opts)
2755 opts = fixcheckopts(ui, opts)
2756 localupdate = True
2756 localupdate = True
2757 if opts.get('name'):
2757 if opts.get('name'):
2758 q = queue(ui, repo.path, repo.join(opts.get('name')))
2758 q = queue(ui, repo.path, repo.join(opts.get('name')))
2759 ui.warn(_('using patch queue: %s\n') % q.path)
2759 ui.warn(_('using patch queue: %s\n') % q.path)
2760 localupdate = False
2760 localupdate = False
2761 else:
2761 else:
2762 q = repo.mq
2762 q = repo.mq
2763 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
2763 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
2764 all=opts.get('all'), nobackup=opts.get('no_backup'),
2764 all=opts.get('all'), nobackup=opts.get('no_backup'),
2765 check=opts.get('check'))
2765 check=opts.get('check'))
2766 q.savedirty()
2766 q.savedirty()
2767 return ret
2767 return ret
2768
2768
2769 @command("qrename|qmv", [], _('hg qrename PATCH1 [PATCH2]'))
2769 @command("qrename|qmv", [], _('hg qrename PATCH1 [PATCH2]'))
2770 def rename(ui, repo, patch, name=None, **opts):
2770 def rename(ui, repo, patch, name=None, **opts):
2771 """rename a patch
2771 """rename a patch
2772
2772
2773 With one argument, renames the current patch to PATCH1.
2773 With one argument, renames the current patch to PATCH1.
2774 With two arguments, renames PATCH1 to PATCH2.
2774 With two arguments, renames PATCH1 to PATCH2.
2775
2775
2776 Returns 0 on success."""
2776 Returns 0 on success."""
2777 q = repo.mq
2777 q = repo.mq
2778 if not name:
2778 if not name:
2779 name = patch
2779 name = patch
2780 patch = None
2780 patch = None
2781
2781
2782 if patch:
2782 if patch:
2783 patch = q.lookup(patch)
2783 patch = q.lookup(patch)
2784 else:
2784 else:
2785 if not q.applied:
2785 if not q.applied:
2786 ui.write(_('no patches applied\n'))
2786 ui.write(_('no patches applied\n'))
2787 return
2787 return
2788 patch = q.lookup('qtip')
2788 patch = q.lookup('qtip')
2789 absdest = q.join(name)
2789 absdest = q.join(name)
2790 if os.path.isdir(absdest):
2790 if os.path.isdir(absdest):
2791 name = normname(os.path.join(name, os.path.basename(patch)))
2791 name = normname(os.path.join(name, os.path.basename(patch)))
2792 absdest = q.join(name)
2792 absdest = q.join(name)
2793 q.checkpatchname(name)
2793 q.checkpatchname(name)
2794
2794
2795 ui.note(_('renaming %s to %s\n') % (patch, name))
2795 ui.note(_('renaming %s to %s\n') % (patch, name))
2796 i = q.findseries(patch)
2796 i = q.findseries(patch)
2797 guards = q.guard_re.findall(q.fullseries[i])
2797 guards = q.guard_re.findall(q.fullseries[i])
2798 q.fullseries[i] = name + ''.join([' #' + g for g in guards])
2798 q.fullseries[i] = name + ''.join([' #' + g for g in guards])
2799 q.parseseries()
2799 q.parseseries()
2800 q.seriesdirty = True
2800 q.seriesdirty = True
2801
2801
2802 info = q.isapplied(patch)
2802 info = q.isapplied(patch)
2803 if info:
2803 if info:
2804 q.applied[info[0]] = statusentry(info[1], name)
2804 q.applied[info[0]] = statusentry(info[1], name)
2805 q.applieddirty = True
2805 q.applieddirty = True
2806
2806
2807 destdir = os.path.dirname(absdest)
2807 destdir = os.path.dirname(absdest)
2808 if not os.path.isdir(destdir):
2808 if not os.path.isdir(destdir):
2809 os.makedirs(destdir)
2809 os.makedirs(destdir)
2810 util.rename(q.join(patch), absdest)
2810 util.rename(q.join(patch), absdest)
2811 r = q.qrepo()
2811 r = q.qrepo()
2812 if r and patch in r.dirstate:
2812 if r and patch in r.dirstate:
2813 wctx = r[None]
2813 wctx = r[None]
2814 wlock = r.wlock()
2814 wlock = r.wlock()
2815 try:
2815 try:
2816 if r.dirstate[patch] == 'a':
2816 if r.dirstate[patch] == 'a':
2817 r.dirstate.drop(patch)
2817 r.dirstate.drop(patch)
2818 r.dirstate.add(name)
2818 r.dirstate.add(name)
2819 else:
2819 else:
2820 wctx.copy(patch, name)
2820 wctx.copy(patch, name)
2821 wctx.forget([patch])
2821 wctx.forget([patch])
2822 finally:
2822 finally:
2823 wlock.release()
2823 wlock.release()
2824
2824
2825 q.savedirty()
2825 q.savedirty()
2826
2826
2827 @command("qrestore",
2827 @command("qrestore",
2828 [('d', 'delete', None, _('delete save entry')),
2828 [('d', 'delete', None, _('delete save entry')),
2829 ('u', 'update', None, _('update queue working directory'))],
2829 ('u', 'update', None, _('update queue working directory'))],
2830 _('hg qrestore [-d] [-u] REV'))
2830 _('hg qrestore [-d] [-u] REV'))
2831 def restore(ui, repo, rev, **opts):
2831 def restore(ui, repo, rev, **opts):
2832 """restore the queue state saved by a revision (DEPRECATED)
2832 """restore the queue state saved by a revision (DEPRECATED)
2833
2833
2834 This command is deprecated, use :hg:`rebase` instead."""
2834 This command is deprecated, use :hg:`rebase` instead."""
2835 rev = repo.lookup(rev)
2835 rev = repo.lookup(rev)
2836 q = repo.mq
2836 q = repo.mq
2837 q.restore(repo, rev, delete=opts.get('delete'),
2837 q.restore(repo, rev, delete=opts.get('delete'),
2838 qupdate=opts.get('update'))
2838 qupdate=opts.get('update'))
2839 q.savedirty()
2839 q.savedirty()
2840 return 0
2840 return 0
2841
2841
2842 @command("qsave",
2842 @command("qsave",
2843 [('c', 'copy', None, _('copy patch directory')),
2843 [('c', 'copy', None, _('copy patch directory')),
2844 ('n', 'name', '',
2844 ('n', 'name', '',
2845 _('copy directory name'), _('NAME')),
2845 _('copy directory name'), _('NAME')),
2846 ('e', 'empty', None, _('clear queue status file')),
2846 ('e', 'empty', None, _('clear queue status file')),
2847 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2847 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2848 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'))
2848 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'))
2849 def save(ui, repo, **opts):
2849 def save(ui, repo, **opts):
2850 """save current queue state (DEPRECATED)
2850 """save current queue state (DEPRECATED)
2851
2851
2852 This command is deprecated, use :hg:`rebase` instead."""
2852 This command is deprecated, use :hg:`rebase` instead."""
2853 q = repo.mq
2853 q = repo.mq
2854 message = cmdutil.logmessage(ui, opts)
2854 message = cmdutil.logmessage(ui, opts)
2855 ret = q.save(repo, msg=message)
2855 ret = q.save(repo, msg=message)
2856 if ret:
2856 if ret:
2857 return ret
2857 return ret
2858 q.savedirty() # save to .hg/patches before copying
2858 q.savedirty() # save to .hg/patches before copying
2859 if opts.get('copy'):
2859 if opts.get('copy'):
2860 path = q.path
2860 path = q.path
2861 if opts.get('name'):
2861 if opts.get('name'):
2862 newpath = os.path.join(q.basepath, opts.get('name'))
2862 newpath = os.path.join(q.basepath, opts.get('name'))
2863 if os.path.exists(newpath):
2863 if os.path.exists(newpath):
2864 if not os.path.isdir(newpath):
2864 if not os.path.isdir(newpath):
2865 raise util.Abort(_('destination %s exists and is not '
2865 raise util.Abort(_('destination %s exists and is not '
2866 'a directory') % newpath)
2866 'a directory') % newpath)
2867 if not opts.get('force'):
2867 if not opts.get('force'):
2868 raise util.Abort(_('destination %s exists, '
2868 raise util.Abort(_('destination %s exists, '
2869 'use -f to force') % newpath)
2869 'use -f to force') % newpath)
2870 else:
2870 else:
2871 newpath = savename(path)
2871 newpath = savename(path)
2872 ui.warn(_("copy %s to %s\n") % (path, newpath))
2872 ui.warn(_("copy %s to %s\n") % (path, newpath))
2873 util.copyfiles(path, newpath)
2873 util.copyfiles(path, newpath)
2874 if opts.get('empty'):
2874 if opts.get('empty'):
2875 del q.applied[:]
2875 del q.applied[:]
2876 q.applieddirty = True
2876 q.applieddirty = True
2877 q.savedirty()
2877 q.savedirty()
2878 return 0
2878 return 0
2879
2879
2880 @command("strip",
2880 @command("strip",
2881 [
2881 [
2882 ('r', 'rev', [], _('strip specified revision (optional, '
2882 ('r', 'rev', [], _('strip specified revision (optional, '
2883 'can specify revisions without this '
2883 'can specify revisions without this '
2884 'option)'), _('REV')),
2884 'option)'), _('REV')),
2885 ('f', 'force', None, _('force removal of changesets, discard '
2885 ('f', 'force', None, _('force removal of changesets, discard '
2886 'uncommitted changes (no backup)')),
2886 'uncommitted changes (no backup)')),
2887 ('b', 'backup', None, _('bundle only changesets with local revision'
2887 ('b', 'backup', None, _('bundle only changesets with local revision'
2888 ' number greater than REV which are not'
2888 ' number greater than REV which are not'
2889 ' descendants of REV (DEPRECATED)')),
2889 ' descendants of REV (DEPRECATED)')),
2890 ('', 'no-backup', None, _('no backups')),
2890 ('', 'no-backup', None, _('no backups')),
2891 ('', 'nobackup', None, _('no backups (DEPRECATED)')),
2891 ('', 'nobackup', None, _('no backups (DEPRECATED)')),
2892 ('n', '', None, _('ignored (DEPRECATED)')),
2892 ('n', '', None, _('ignored (DEPRECATED)')),
2893 ('k', 'keep', None, _("do not modify working copy during strip"))],
2893 ('k', 'keep', None, _("do not modify working copy during strip"))],
2894 _('hg strip [-k] [-f] [-n] REV...'))
2894 _('hg strip [-k] [-f] [-n] REV...'))
2895 def strip(ui, repo, *revs, **opts):
2895 def strip(ui, repo, *revs, **opts):
2896 """strip changesets and all their descendants from the repository
2896 """strip changesets and all their descendants from the repository
2897
2897
2898 The strip command removes the specified changesets and all their
2898 The strip command removes the specified changesets and all their
2899 descendants. If the working directory has uncommitted changes, the
2899 descendants. If the working directory has uncommitted changes, the
2900 operation is aborted unless the --force flag is supplied, in which
2900 operation is aborted unless the --force flag is supplied, in which
2901 case changes will be discarded.
2901 case changes will be discarded.
2902
2902
2903 If a parent of the working directory is stripped, then the working
2903 If a parent of the working directory is stripped, then the working
2904 directory will automatically be updated to the most recent
2904 directory will automatically be updated to the most recent
2905 available ancestor of the stripped parent after the operation
2905 available ancestor of the stripped parent after the operation
2906 completes.
2906 completes.
2907
2907
2908 Any stripped changesets are stored in ``.hg/strip-backup`` as a
2908 Any stripped changesets are stored in ``.hg/strip-backup`` as a
2909 bundle (see :hg:`help bundle` and :hg:`help unbundle`). They can
2909 bundle (see :hg:`help bundle` and :hg:`help unbundle`). They can
2910 be restored by running :hg:`unbundle .hg/strip-backup/BUNDLE`,
2910 be restored by running :hg:`unbundle .hg/strip-backup/BUNDLE`,
2911 where BUNDLE is the bundle file created by the strip. Note that
2911 where BUNDLE is the bundle file created by the strip. Note that
2912 the local revision numbers will in general be different after the
2912 the local revision numbers will in general be different after the
2913 restore.
2913 restore.
2914
2914
2915 Use the --no-backup option to discard the backup bundle once the
2915 Use the --no-backup option to discard the backup bundle once the
2916 operation completes.
2916 operation completes.
2917
2917
2918 Return 0 on success.
2918 Return 0 on success.
2919 """
2919 """
2920 backup = 'all'
2920 backup = 'all'
2921 if opts.get('backup'):
2921 if opts.get('backup'):
2922 backup = 'strip'
2922 backup = 'strip'
2923 elif opts.get('no_backup') or opts.get('nobackup'):
2923 elif opts.get('no_backup') or opts.get('nobackup'):
2924 backup = 'none'
2924 backup = 'none'
2925
2925
2926 cl = repo.changelog
2926 cl = repo.changelog
2927 revs = list(revs) + opts.get('rev')
2927 revs = list(revs) + opts.get('rev')
2928 revs = set(scmutil.revrange(repo, revs))
2928 revs = set(scmutil.revrange(repo, revs))
2929 if not revs:
2929 if not revs:
2930 raise util.Abort(_('empty revision set'))
2930 raise util.Abort(_('empty revision set'))
2931
2931
2932 descendants = set(cl.descendants(*revs))
2932 descendants = set(cl.descendants(*revs))
2933 strippedrevs = revs.union(descendants)
2933 strippedrevs = revs.union(descendants)
2934 roots = revs.difference(descendants)
2934 roots = revs.difference(descendants)
2935
2935
2936 update = False
2936 update = False
2937 # if one of the wdir parent is stripped we'll need
2937 # if one of the wdir parent is stripped we'll need
2938 # to update away to an earlier revision
2938 # to update away to an earlier revision
2939 for p in repo.dirstate.parents():
2939 for p in repo.dirstate.parents():
2940 if p != nullid and cl.rev(p) in strippedrevs:
2940 if p != nullid and cl.rev(p) in strippedrevs:
2941 update = True
2941 update = True
2942 break
2942 break
2943
2943
2944 rootnodes = set(cl.node(r) for r in roots)
2944 rootnodes = set(cl.node(r) for r in roots)
2945
2945
2946 q = repo.mq
2946 q = repo.mq
2947 if q.applied:
2947 if q.applied:
2948 # refresh queue state if we're about to strip
2948 # refresh queue state if we're about to strip
2949 # applied patches
2949 # applied patches
2950 if cl.rev(repo.lookup('qtip')) in strippedrevs:
2950 if cl.rev(repo.lookup('qtip')) in strippedrevs:
2951 q.applieddirty = True
2951 q.applieddirty = True
2952 start = 0
2952 start = 0
2953 end = len(q.applied)
2953 end = len(q.applied)
2954 for i, statusentry in enumerate(q.applied):
2954 for i, statusentry in enumerate(q.applied):
2955 if statusentry.node in rootnodes:
2955 if statusentry.node in rootnodes:
2956 # if one of the stripped roots is an applied
2956 # if one of the stripped roots is an applied
2957 # patch, only part of the queue is stripped
2957 # patch, only part of the queue is stripped
2958 start = i
2958 start = i
2959 break
2959 break
2960 del q.applied[start:end]
2960 del q.applied[start:end]
2961 q.savedirty()
2961 q.savedirty()
2962
2962
2963 revs = list(rootnodes)
2963 revs = list(rootnodes)
2964 if update and opts.get('keep'):
2964 if update and opts.get('keep'):
2965 wlock = repo.wlock()
2965 wlock = repo.wlock()
2966 try:
2966 try:
2967 urev = repo.mq.qparents(repo, revs[0])
2967 urev = repo.mq.qparents(repo, revs[0])
2968 repo.dirstate.rebuild(urev, repo[urev].manifest())
2968 repo.dirstate.rebuild(urev, repo[urev].manifest())
2969 repo.dirstate.write()
2969 repo.dirstate.write()
2970 update = False
2970 update = False
2971 finally:
2971 finally:
2972 wlock.release()
2972 wlock.release()
2973
2973
2974 repo.mq.strip(repo, revs, backup=backup, update=update,
2974 repo.mq.strip(repo, revs, backup=backup, update=update,
2975 force=opts.get('force'))
2975 force=opts.get('force'))
2976 return 0
2976 return 0
2977
2977
2978 @command("qselect",
2978 @command("qselect",
2979 [('n', 'none', None, _('disable all guards')),
2979 [('n', 'none', None, _('disable all guards')),
2980 ('s', 'series', None, _('list all guards in series file')),
2980 ('s', 'series', None, _('list all guards in series file')),
2981 ('', 'pop', None, _('pop to before first guarded applied patch')),
2981 ('', 'pop', None, _('pop to before first guarded applied patch')),
2982 ('', 'reapply', None, _('pop, then reapply patches'))],
2982 ('', 'reapply', None, _('pop, then reapply patches'))],
2983 _('hg qselect [OPTION]... [GUARD]...'))
2983 _('hg qselect [OPTION]... [GUARD]...'))
2984 def select(ui, repo, *args, **opts):
2984 def select(ui, repo, *args, **opts):
2985 '''set or print guarded patches to push
2985 '''set or print guarded patches to push
2986
2986
2987 Use the :hg:`qguard` command to set or print guards on patch, then use
2987 Use the :hg:`qguard` command to set or print guards on patch, then use
2988 qselect to tell mq which guards to use. A patch will be pushed if
2988 qselect to tell mq which guards to use. A patch will be pushed if
2989 it has no guards or any positive guards match the currently
2989 it has no guards or any positive guards match the currently
2990 selected guard, but will not be pushed if any negative guards
2990 selected guard, but will not be pushed if any negative guards
2991 match the current guard. For example::
2991 match the current guard. For example::
2992
2992
2993 qguard foo.patch -- -stable (negative guard)
2993 qguard foo.patch -- -stable (negative guard)
2994 qguard bar.patch +stable (positive guard)
2994 qguard bar.patch +stable (positive guard)
2995 qselect stable
2995 qselect stable
2996
2996
2997 This activates the "stable" guard. mq will skip foo.patch (because
2997 This activates the "stable" guard. mq will skip foo.patch (because
2998 it has a negative match) but push bar.patch (because it has a
2998 it has a negative match) but push bar.patch (because it has a
2999 positive match).
2999 positive match).
3000
3000
3001 With no arguments, prints the currently active guards.
3001 With no arguments, prints the currently active guards.
3002 With one argument, sets the active guard.
3002 With one argument, sets the active guard.
3003
3003
3004 Use -n/--none to deactivate guards (no other arguments needed).
3004 Use -n/--none to deactivate guards (no other arguments needed).
3005 When no guards are active, patches with positive guards are
3005 When no guards are active, patches with positive guards are
3006 skipped and patches with negative guards are pushed.
3006 skipped and patches with negative guards are pushed.
3007
3007
3008 qselect can change the guards on applied patches. It does not pop
3008 qselect can change the guards on applied patches. It does not pop
3009 guarded patches by default. Use --pop to pop back to the last
3009 guarded patches by default. Use --pop to pop back to the last
3010 applied patch that is not guarded. Use --reapply (which implies
3010 applied patch that is not guarded. Use --reapply (which implies
3011 --pop) to push back to the current patch afterwards, but skip
3011 --pop) to push back to the current patch afterwards, but skip
3012 guarded patches.
3012 guarded patches.
3013
3013
3014 Use -s/--series to print a list of all guards in the series file
3014 Use -s/--series to print a list of all guards in the series file
3015 (no other arguments needed). Use -v for more information.
3015 (no other arguments needed). Use -v for more information.
3016
3016
3017 Returns 0 on success.'''
3017 Returns 0 on success.'''
3018
3018
3019 q = repo.mq
3019 q = repo.mq
3020 guards = q.active()
3020 guards = q.active()
3021 if args or opts.get('none'):
3021 if args or opts.get('none'):
3022 old_unapplied = q.unapplied(repo)
3022 old_unapplied = q.unapplied(repo)
3023 old_guarded = [i for i in xrange(len(q.applied)) if
3023 old_guarded = [i for i in xrange(len(q.applied)) if
3024 not q.pushable(i)[0]]
3024 not q.pushable(i)[0]]
3025 q.setactive(args)
3025 q.setactive(args)
3026 q.savedirty()
3026 q.savedirty()
3027 if not args:
3027 if not args:
3028 ui.status(_('guards deactivated\n'))
3028 ui.status(_('guards deactivated\n'))
3029 if not opts.get('pop') and not opts.get('reapply'):
3029 if not opts.get('pop') and not opts.get('reapply'):
3030 unapplied = q.unapplied(repo)
3030 unapplied = q.unapplied(repo)
3031 guarded = [i for i in xrange(len(q.applied))
3031 guarded = [i for i in xrange(len(q.applied))
3032 if not q.pushable(i)[0]]
3032 if not q.pushable(i)[0]]
3033 if len(unapplied) != len(old_unapplied):
3033 if len(unapplied) != len(old_unapplied):
3034 ui.status(_('number of unguarded, unapplied patches has '
3034 ui.status(_('number of unguarded, unapplied patches has '
3035 'changed from %d to %d\n') %
3035 'changed from %d to %d\n') %
3036 (len(old_unapplied), len(unapplied)))
3036 (len(old_unapplied), len(unapplied)))
3037 if len(guarded) != len(old_guarded):
3037 if len(guarded) != len(old_guarded):
3038 ui.status(_('number of guarded, applied patches has changed '
3038 ui.status(_('number of guarded, applied patches has changed '
3039 'from %d to %d\n') %
3039 'from %d to %d\n') %
3040 (len(old_guarded), len(guarded)))
3040 (len(old_guarded), len(guarded)))
3041 elif opts.get('series'):
3041 elif opts.get('series'):
3042 guards = {}
3042 guards = {}
3043 noguards = 0
3043 noguards = 0
3044 for gs in q.seriesguards:
3044 for gs in q.seriesguards:
3045 if not gs:
3045 if not gs:
3046 noguards += 1
3046 noguards += 1
3047 for g in gs:
3047 for g in gs:
3048 guards.setdefault(g, 0)
3048 guards.setdefault(g, 0)
3049 guards[g] += 1
3049 guards[g] += 1
3050 if ui.verbose:
3050 if ui.verbose:
3051 guards['NONE'] = noguards
3051 guards['NONE'] = noguards
3052 guards = guards.items()
3052 guards = guards.items()
3053 guards.sort(key=lambda x: x[0][1:])
3053 guards.sort(key=lambda x: x[0][1:])
3054 if guards:
3054 if guards:
3055 ui.note(_('guards in series file:\n'))
3055 ui.note(_('guards in series file:\n'))
3056 for guard, count in guards:
3056 for guard, count in guards:
3057 ui.note('%2d ' % count)
3057 ui.note('%2d ' % count)
3058 ui.write(guard, '\n')
3058 ui.write(guard, '\n')
3059 else:
3059 else:
3060 ui.note(_('no guards in series file\n'))
3060 ui.note(_('no guards in series file\n'))
3061 else:
3061 else:
3062 if guards:
3062 if guards:
3063 ui.note(_('active guards:\n'))
3063 ui.note(_('active guards:\n'))
3064 for g in guards:
3064 for g in guards:
3065 ui.write(g, '\n')
3065 ui.write(g, '\n')
3066 else:
3066 else:
3067 ui.write(_('no active guards\n'))
3067 ui.write(_('no active guards\n'))
3068 reapply = opts.get('reapply') and q.applied and q.appliedname(-1)
3068 reapply = opts.get('reapply') and q.applied and q.appliedname(-1)
3069 popped = False
3069 popped = False
3070 if opts.get('pop') or opts.get('reapply'):
3070 if opts.get('pop') or opts.get('reapply'):
3071 for i in xrange(len(q.applied)):
3071 for i in xrange(len(q.applied)):
3072 pushable, reason = q.pushable(i)
3072 pushable, reason = q.pushable(i)
3073 if not pushable:
3073 if not pushable:
3074 ui.status(_('popping guarded patches\n'))
3074 ui.status(_('popping guarded patches\n'))
3075 popped = True
3075 popped = True
3076 if i == 0:
3076 if i == 0:
3077 q.pop(repo, all=True)
3077 q.pop(repo, all=True)
3078 else:
3078 else:
3079 q.pop(repo, str(i - 1))
3079 q.pop(repo, str(i - 1))
3080 break
3080 break
3081 if popped:
3081 if popped:
3082 try:
3082 try:
3083 if reapply:
3083 if reapply:
3084 ui.status(_('reapplying unguarded patches\n'))
3084 ui.status(_('reapplying unguarded patches\n'))
3085 q.push(repo, reapply)
3085 q.push(repo, reapply)
3086 finally:
3086 finally:
3087 q.savedirty()
3087 q.savedirty()
3088
3088
3089 @command("qfinish",
3089 @command("qfinish",
3090 [('a', 'applied', None, _('finish all applied changesets'))],
3090 [('a', 'applied', None, _('finish all applied changesets'))],
3091 _('hg qfinish [-a] [REV]...'))
3091 _('hg qfinish [-a] [REV]...'))
3092 def finish(ui, repo, *revrange, **opts):
3092 def finish(ui, repo, *revrange, **opts):
3093 """move applied patches into repository history
3093 """move applied patches into repository history
3094
3094
3095 Finishes the specified revisions (corresponding to applied
3095 Finishes the specified revisions (corresponding to applied
3096 patches) by moving them out of mq control into regular repository
3096 patches) by moving them out of mq control into regular repository
3097 history.
3097 history.
3098
3098
3099 Accepts a revision range or the -a/--applied option. If --applied
3099 Accepts a revision range or the -a/--applied option. If --applied
3100 is specified, all applied mq revisions are removed from mq
3100 is specified, all applied mq revisions are removed from mq
3101 control. Otherwise, the given revisions must be at the base of the
3101 control. Otherwise, the given revisions must be at the base of the
3102 stack of applied patches.
3102 stack of applied patches.
3103
3103
3104 This can be especially useful if your changes have been applied to
3104 This can be especially useful if your changes have been applied to
3105 an upstream repository, or if you are about to push your changes
3105 an upstream repository, or if you are about to push your changes
3106 to upstream.
3106 to upstream.
3107
3107
3108 Returns 0 on success.
3108 Returns 0 on success.
3109 """
3109 """
3110 if not opts.get('applied') and not revrange:
3110 if not opts.get('applied') and not revrange:
3111 raise util.Abort(_('no revisions specified'))
3111 raise util.Abort(_('no revisions specified'))
3112 elif opts.get('applied'):
3112 elif opts.get('applied'):
3113 revrange = ('qbase::qtip',) + revrange
3113 revrange = ('qbase::qtip',) + revrange
3114
3114
3115 q = repo.mq
3115 q = repo.mq
3116 if not q.applied:
3116 if not q.applied:
3117 ui.status(_('no patches applied\n'))
3117 ui.status(_('no patches applied\n'))
3118 return 0
3118 return 0
3119
3119
3120 revs = scmutil.revrange(repo, revrange)
3120 revs = scmutil.revrange(repo, revrange)
3121 if repo['.'].rev() in revs and repo[None].files():
3121 if repo['.'].rev() in revs and repo[None].files():
3122 ui.warn(_('warning: uncommitted changes in the working directory\n'))
3122 ui.warn(_('warning: uncommitted changes in the working directory\n'))
3123 # queue.finish may changes phases but leave the responsability to lock the
3123 # queue.finish may changes phases but leave the responsability to lock the
3124 # repo to the caller to avoid deadlock with wlock. This command code is
3124 # repo to the caller to avoid deadlock with wlock. This command code is
3125 # responsability for this locking.
3125 # responsability for this locking.
3126 lock = repo.lock()
3126 lock = repo.lock()
3127 try:
3127 try:
3128 q.finish(repo, revs)
3128 q.finish(repo, revs)
3129 q.savedirty()
3129 q.savedirty()
3130 finally:
3130 finally:
3131 lock.release()
3131 lock.release()
3132 return 0
3132 return 0
3133
3133
3134 @command("qqueue",
3134 @command("qqueue",
3135 [('l', 'list', False, _('list all available queues')),
3135 [('l', 'list', False, _('list all available queues')),
3136 ('', 'active', False, _('print name of active queue')),
3136 ('', 'active', False, _('print name of active queue')),
3137 ('c', 'create', False, _('create new queue')),
3137 ('c', 'create', False, _('create new queue')),
3138 ('', 'rename', False, _('rename active queue')),
3138 ('', 'rename', False, _('rename active queue')),
3139 ('', 'delete', False, _('delete reference to queue')),
3139 ('', 'delete', False, _('delete reference to queue')),
3140 ('', 'purge', False, _('delete queue, and remove patch dir')),
3140 ('', 'purge', False, _('delete queue, and remove patch dir')),
3141 ],
3141 ],
3142 _('[OPTION] [QUEUE]'))
3142 _('[OPTION] [QUEUE]'))
3143 def qqueue(ui, repo, name=None, **opts):
3143 def qqueue(ui, repo, name=None, **opts):
3144 '''manage multiple patch queues
3144 '''manage multiple patch queues
3145
3145
3146 Supports switching between different patch queues, as well as creating
3146 Supports switching between different patch queues, as well as creating
3147 new patch queues and deleting existing ones.
3147 new patch queues and deleting existing ones.
3148
3148
3149 Omitting a queue name or specifying -l/--list will show you the registered
3149 Omitting a queue name or specifying -l/--list will show you the registered
3150 queues - by default the "normal" patches queue is registered. The currently
3150 queues - by default the "normal" patches queue is registered. The currently
3151 active queue will be marked with "(active)". Specifying --active will print
3151 active queue will be marked with "(active)". Specifying --active will print
3152 only the name of the active queue.
3152 only the name of the active queue.
3153
3153
3154 To create a new queue, use -c/--create. The queue is automatically made
3154 To create a new queue, use -c/--create. The queue is automatically made
3155 active, except in the case where there are applied patches from the
3155 active, except in the case where there are applied patches from the
3156 currently active queue in the repository. Then the queue will only be
3156 currently active queue in the repository. Then the queue will only be
3157 created and switching will fail.
3157 created and switching will fail.
3158
3158
3159 To delete an existing queue, use --delete. You cannot delete the currently
3159 To delete an existing queue, use --delete. You cannot delete the currently
3160 active queue.
3160 active queue.
3161
3161
3162 Returns 0 on success.
3162 Returns 0 on success.
3163 '''
3163 '''
3164 q = repo.mq
3164 q = repo.mq
3165 _defaultqueue = 'patches'
3165 _defaultqueue = 'patches'
3166 _allqueues = 'patches.queues'
3166 _allqueues = 'patches.queues'
3167 _activequeue = 'patches.queue'
3167 _activequeue = 'patches.queue'
3168
3168
3169 def _getcurrent():
3169 def _getcurrent():
3170 cur = os.path.basename(q.path)
3170 cur = os.path.basename(q.path)
3171 if cur.startswith('patches-'):
3171 if cur.startswith('patches-'):
3172 cur = cur[8:]
3172 cur = cur[8:]
3173 return cur
3173 return cur
3174
3174
3175 def _noqueues():
3175 def _noqueues():
3176 try:
3176 try:
3177 fh = repo.opener(_allqueues, 'r')
3177 fh = repo.opener(_allqueues, 'r')
3178 fh.close()
3178 fh.close()
3179 except IOError:
3179 except IOError:
3180 return True
3180 return True
3181
3181
3182 return False
3182 return False
3183
3183
3184 def _getqueues():
3184 def _getqueues():
3185 current = _getcurrent()
3185 current = _getcurrent()
3186
3186
3187 try:
3187 try:
3188 fh = repo.opener(_allqueues, 'r')
3188 fh = repo.opener(_allqueues, 'r')
3189 queues = [queue.strip() for queue in fh if queue.strip()]
3189 queues = [queue.strip() for queue in fh if queue.strip()]
3190 fh.close()
3190 fh.close()
3191 if current not in queues:
3191 if current not in queues:
3192 queues.append(current)
3192 queues.append(current)
3193 except IOError:
3193 except IOError:
3194 queues = [_defaultqueue]
3194 queues = [_defaultqueue]
3195
3195
3196 return sorted(queues)
3196 return sorted(queues)
3197
3197
3198 def _setactive(name):
3198 def _setactive(name):
3199 if q.applied:
3199 if q.applied:
3200 raise util.Abort(_('patches applied - cannot set new queue active'))
3200 raise util.Abort(_('patches applied - cannot set new queue active'))
3201 _setactivenocheck(name)
3201 _setactivenocheck(name)
3202
3202
3203 def _setactivenocheck(name):
3203 def _setactivenocheck(name):
3204 fh = repo.opener(_activequeue, 'w')
3204 fh = repo.opener(_activequeue, 'w')
3205 if name != 'patches':
3205 if name != 'patches':
3206 fh.write(name)
3206 fh.write(name)
3207 fh.close()
3207 fh.close()
3208
3208
3209 def _addqueue(name):
3209 def _addqueue(name):
3210 fh = repo.opener(_allqueues, 'a')
3210 fh = repo.opener(_allqueues, 'a')
3211 fh.write('%s\n' % (name,))
3211 fh.write('%s\n' % (name,))
3212 fh.close()
3212 fh.close()
3213
3213
3214 def _queuedir(name):
3214 def _queuedir(name):
3215 if name == 'patches':
3215 if name == 'patches':
3216 return repo.join('patches')
3216 return repo.join('patches')
3217 else:
3217 else:
3218 return repo.join('patches-' + name)
3218 return repo.join('patches-' + name)
3219
3219
3220 def _validname(name):
3220 def _validname(name):
3221 for n in name:
3221 for n in name:
3222 if n in ':\\/.':
3222 if n in ':\\/.':
3223 return False
3223 return False
3224 return True
3224 return True
3225
3225
3226 def _delete(name):
3226 def _delete(name):
3227 if name not in existing:
3227 if name not in existing:
3228 raise util.Abort(_('cannot delete queue that does not exist'))
3228 raise util.Abort(_('cannot delete queue that does not exist'))
3229
3229
3230 current = _getcurrent()
3230 current = _getcurrent()
3231
3231
3232 if name == current:
3232 if name == current:
3233 raise util.Abort(_('cannot delete currently active queue'))
3233 raise util.Abort(_('cannot delete currently active queue'))
3234
3234
3235 fh = repo.opener('patches.queues.new', 'w')
3235 fh = repo.opener('patches.queues.new', 'w')
3236 for queue in existing:
3236 for queue in existing:
3237 if queue == name:
3237 if queue == name:
3238 continue
3238 continue
3239 fh.write('%s\n' % (queue,))
3239 fh.write('%s\n' % (queue,))
3240 fh.close()
3240 fh.close()
3241 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3241 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3242
3242
3243 if not name or opts.get('list') or opts.get('active'):
3243 if not name or opts.get('list') or opts.get('active'):
3244 current = _getcurrent()
3244 current = _getcurrent()
3245 if opts.get('active'):
3245 if opts.get('active'):
3246 ui.write('%s\n' % (current,))
3246 ui.write('%s\n' % (current,))
3247 return
3247 return
3248 for queue in _getqueues():
3248 for queue in _getqueues():
3249 ui.write('%s' % (queue,))
3249 ui.write('%s' % (queue,))
3250 if queue == current and not ui.quiet:
3250 if queue == current and not ui.quiet:
3251 ui.write(_(' (active)\n'))
3251 ui.write(_(' (active)\n'))
3252 else:
3252 else:
3253 ui.write('\n')
3253 ui.write('\n')
3254 return
3254 return
3255
3255
3256 if not _validname(name):
3256 if not _validname(name):
3257 raise util.Abort(
3257 raise util.Abort(
3258 _('invalid queue name, may not contain the characters ":\\/."'))
3258 _('invalid queue name, may not contain the characters ":\\/."'))
3259
3259
3260 existing = _getqueues()
3260 existing = _getqueues()
3261
3261
3262 if opts.get('create'):
3262 if opts.get('create'):
3263 if name in existing:
3263 if name in existing:
3264 raise util.Abort(_('queue "%s" already exists') % name)
3264 raise util.Abort(_('queue "%s" already exists') % name)
3265 if _noqueues():
3265 if _noqueues():
3266 _addqueue(_defaultqueue)
3266 _addqueue(_defaultqueue)
3267 _addqueue(name)
3267 _addqueue(name)
3268 _setactive(name)
3268 _setactive(name)
3269 elif opts.get('rename'):
3269 elif opts.get('rename'):
3270 current = _getcurrent()
3270 current = _getcurrent()
3271 if name == current:
3271 if name == current:
3272 raise util.Abort(_('can\'t rename "%s" to its current name') % name)
3272 raise util.Abort(_('can\'t rename "%s" to its current name') % name)
3273 if name in existing:
3273 if name in existing:
3274 raise util.Abort(_('queue "%s" already exists') % name)
3274 raise util.Abort(_('queue "%s" already exists') % name)
3275
3275
3276 olddir = _queuedir(current)
3276 olddir = _queuedir(current)
3277 newdir = _queuedir(name)
3277 newdir = _queuedir(name)
3278
3278
3279 if os.path.exists(newdir):
3279 if os.path.exists(newdir):
3280 raise util.Abort(_('non-queue directory "%s" already exists') %
3280 raise util.Abort(_('non-queue directory "%s" already exists') %
3281 newdir)
3281 newdir)
3282
3282
3283 fh = repo.opener('patches.queues.new', 'w')
3283 fh = repo.opener('patches.queues.new', 'w')
3284 for queue in existing:
3284 for queue in existing:
3285 if queue == current:
3285 if queue == current:
3286 fh.write('%s\n' % (name,))
3286 fh.write('%s\n' % (name,))
3287 if os.path.exists(olddir):
3287 if os.path.exists(olddir):
3288 util.rename(olddir, newdir)
3288 util.rename(olddir, newdir)
3289 else:
3289 else:
3290 fh.write('%s\n' % (queue,))
3290 fh.write('%s\n' % (queue,))
3291 fh.close()
3291 fh.close()
3292 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3292 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3293 _setactivenocheck(name)
3293 _setactivenocheck(name)
3294 elif opts.get('delete'):
3294 elif opts.get('delete'):
3295 _delete(name)
3295 _delete(name)
3296 elif opts.get('purge'):
3296 elif opts.get('purge'):
3297 if name in existing:
3297 if name in existing:
3298 _delete(name)
3298 _delete(name)
3299 qdir = _queuedir(name)
3299 qdir = _queuedir(name)
3300 if os.path.exists(qdir):
3300 if os.path.exists(qdir):
3301 shutil.rmtree(qdir)
3301 shutil.rmtree(qdir)
3302 else:
3302 else:
3303 if name not in existing:
3303 if name not in existing:
3304 raise util.Abort(_('use --create to create a new queue'))
3304 raise util.Abort(_('use --create to create a new queue'))
3305 _setactive(name)
3305 _setactive(name)
3306
3306
3307 def mqphasedefaults(repo, roots):
3307 def mqphasedefaults(repo, roots):
3308 """callback used to set mq changeset as secret when no phase data exists"""
3308 """callback used to set mq changeset as secret when no phase data exists"""
3309 if repo.mq.applied:
3309 if repo.mq.applied:
3310 if repo.ui.configbool('mq', 'secret', False):
3310 if repo.ui.configbool('mq', 'secret', False):
3311 mqphase = phases.secret
3311 mqphase = phases.secret
3312 else:
3312 else:
3313 mqphase = phases.draft
3313 mqphase = phases.draft
3314 qbase = repo[repo.mq.applied[0].node]
3314 qbase = repo[repo.mq.applied[0].node]
3315 roots[mqphase].add(qbase.node())
3315 roots[mqphase].add(qbase.node())
3316 return roots
3316 return roots
3317
3317
3318 def reposetup(ui, repo):
3318 def reposetup(ui, repo):
3319 class mqrepo(repo.__class__):
3319 class mqrepo(repo.__class__):
3320 @util.propertycache
3320 @util.propertycache
3321 def mq(self):
3321 def mq(self):
3322 return queue(self.ui, self.path)
3322 return queue(self.ui, self.path)
3323
3323
3324 def abortifwdirpatched(self, errmsg, force=False):
3324 def abortifwdirpatched(self, errmsg, force=False):
3325 if self.mq.applied and not force:
3325 if self.mq.applied and not force:
3326 parents = self.dirstate.parents()
3326 parents = self.dirstate.parents()
3327 patches = [s.node for s in self.mq.applied]
3327 patches = [s.node for s in self.mq.applied]
3328 if parents[0] in patches or parents[1] in patches:
3328 if parents[0] in patches or parents[1] in patches:
3329 raise util.Abort(errmsg)
3329 raise util.Abort(errmsg)
3330
3330
3331 def commit(self, text="", user=None, date=None, match=None,
3331 def commit(self, text="", user=None, date=None, match=None,
3332 force=False, editor=False, extra={}):
3332 force=False, editor=False, extra={}):
3333 self.abortifwdirpatched(
3333 self.abortifwdirpatched(
3334 _('cannot commit over an applied mq patch'),
3334 _('cannot commit over an applied mq patch'),
3335 force)
3335 force)
3336
3336
3337 return super(mqrepo, self).commit(text, user, date, match, force,
3337 return super(mqrepo, self).commit(text, user, date, match, force,
3338 editor, extra)
3338 editor, extra)
3339
3339
3340 def checkpush(self, force, revs):
3340 def checkpush(self, force, revs):
3341 if self.mq.applied and not force:
3341 if self.mq.applied and not force:
3342 outapplied = [e.node for e in self.mq.applied]
3342 outapplied = [e.node for e in self.mq.applied]
3343 if revs:
3343 if revs:
3344 # Assume applied patches have no non-patch descendants and
3344 # Assume applied patches have no non-patch descendants and
3345 # are not on remote already. Filtering any changeset not
3345 # are not on remote already. Filtering any changeset not
3346 # pushed.
3346 # pushed.
3347 heads = set(revs)
3347 heads = set(revs)
3348 for node in reversed(outapplied):
3348 for node in reversed(outapplied):
3349 if node in heads:
3349 if node in heads:
3350 break
3350 break
3351 else:
3351 else:
3352 outapplied.pop()
3352 outapplied.pop()
3353 # looking for pushed and shared changeset
3353 # looking for pushed and shared changeset
3354 for node in outapplied:
3354 for node in outapplied:
3355 if repo[node].phase() < phases.secret:
3355 if repo[node].phase() < phases.secret:
3356 raise util.Abort(_('source has mq patches applied'))
3356 raise util.Abort(_('source has mq patches applied'))
3357 # no non-secret patches pushed
3357 # no non-secret patches pushed
3358 super(mqrepo, self).checkpush(force, revs)
3358 super(mqrepo, self).checkpush(force, revs)
3359
3359
3360 def _findtags(self):
3360 def _findtags(self):
3361 '''augment tags from base class with patch tags'''
3361 '''augment tags from base class with patch tags'''
3362 result = super(mqrepo, self)._findtags()
3362 result = super(mqrepo, self)._findtags()
3363
3363
3364 q = self.mq
3364 q = self.mq
3365 if not q.applied:
3365 if not q.applied:
3366 return result
3366 return result
3367
3367
3368 mqtags = [(patch.node, patch.name) for patch in q.applied]
3368 mqtags = [(patch.node, patch.name) for patch in q.applied]
3369
3369
3370 try:
3370 try:
3371 self.changelog.rev(mqtags[-1][0])
3371 self.changelog.rev(mqtags[-1][0])
3372 except error.LookupError:
3372 except error.LookupError:
3373 self.ui.warn(_('mq status file refers to unknown node %s\n')
3373 self.ui.warn(_('mq status file refers to unknown node %s\n')
3374 % short(mqtags[-1][0]))
3374 % short(mqtags[-1][0]))
3375 return result
3375 return result
3376
3376
3377 mqtags.append((mqtags[-1][0], 'qtip'))
3377 mqtags.append((mqtags[-1][0], 'qtip'))
3378 mqtags.append((mqtags[0][0], 'qbase'))
3378 mqtags.append((mqtags[0][0], 'qbase'))
3379 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
3379 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
3380 tags = result[0]
3380 tags = result[0]
3381 for patch in mqtags:
3381 for patch in mqtags:
3382 if patch[1] in tags:
3382 if patch[1] in tags:
3383 self.ui.warn(_('Tag %s overrides mq patch of the same '
3383 self.ui.warn(_('Tag %s overrides mq patch of the same '
3384 'name\n') % patch[1])
3384 'name\n') % patch[1])
3385 else:
3385 else:
3386 tags[patch[1]] = patch[0]
3386 tags[patch[1]] = patch[0]
3387
3387
3388 return result
3388 return result
3389
3389
3390 def _branchtags(self, partial, lrev):
3390 def _branchtags(self, partial, lrev):
3391 q = self.mq
3391 q = self.mq
3392 cl = self.changelog
3392 cl = self.changelog
3393 qbase = None
3393 qbase = None
3394 if not q.applied:
3394 if not q.applied:
3395 if getattr(self, '_committingpatch', False):
3395 if getattr(self, '_committingpatch', False):
3396 # Committing a new patch, must be tip
3396 # Committing a new patch, must be tip
3397 qbase = len(cl) - 1
3397 qbase = len(cl) - 1
3398 else:
3398 else:
3399 qbasenode = q.applied[0].node
3399 qbasenode = q.applied[0].node
3400 try:
3400 try:
3401 qbase = cl.rev(qbasenode)
3401 qbase = cl.rev(qbasenode)
3402 except error.LookupError:
3402 except error.LookupError:
3403 self.ui.warn(_('mq status file refers to unknown node %s\n')
3403 self.ui.warn(_('mq status file refers to unknown node %s\n')
3404 % short(qbasenode))
3404 % short(qbasenode))
3405 if qbase is None:
3405 if qbase is None:
3406 return super(mqrepo, self)._branchtags(partial, lrev)
3406 return super(mqrepo, self)._branchtags(partial, lrev)
3407
3407
3408 start = lrev + 1
3408 start = lrev + 1
3409 if start < qbase:
3409 if start < qbase:
3410 # update the cache (excluding the patches) and save it
3410 # update the cache (excluding the patches) and save it
3411 ctxgen = (self[r] for r in xrange(lrev + 1, qbase))
3411 ctxgen = (self[r] for r in xrange(lrev + 1, qbase))
3412 self._updatebranchcache(partial, ctxgen)
3412 self._updatebranchcache(partial, ctxgen)
3413 self._writebranchcache(partial, cl.node(qbase - 1), qbase - 1)
3413 self._writebranchcache(partial, cl.node(qbase - 1), qbase - 1)
3414 start = qbase
3414 start = qbase
3415 # if start = qbase, the cache is as updated as it should be.
3415 # if start = qbase, the cache is as updated as it should be.
3416 # if start > qbase, the cache includes (part of) the patches.
3416 # if start > qbase, the cache includes (part of) the patches.
3417 # we might as well use it, but we won't save it.
3417 # we might as well use it, but we won't save it.
3418
3418
3419 # update the cache up to the tip
3419 # update the cache up to the tip
3420 ctxgen = (self[r] for r in xrange(start, len(cl)))
3420 ctxgen = (self[r] for r in xrange(start, len(cl)))
3421 self._updatebranchcache(partial, ctxgen)
3421 self._updatebranchcache(partial, ctxgen)
3422
3422
3423 return partial
3423 return partial
3424
3424
3425 if repo.local():
3425 if repo.local():
3426 repo.__class__ = mqrepo
3426 repo.__class__ = mqrepo
3427
3427
3428 repo._phasedefaults.append(mqphasedefaults)
3428 repo._phasedefaults.append(mqphasedefaults)
3429
3429
3430 def mqimport(orig, ui, repo, *args, **kwargs):
3430 def mqimport(orig, ui, repo, *args, **kwargs):
3431 if (util.safehasattr(repo, 'abortifwdirpatched')
3431 if (util.safehasattr(repo, 'abortifwdirpatched')
3432 and not kwargs.get('no_commit', False)):
3432 and not kwargs.get('no_commit', False)):
3433 repo.abortifwdirpatched(_('cannot import over an applied patch'),
3433 repo.abortifwdirpatched(_('cannot import over an applied patch'),
3434 kwargs.get('force'))
3434 kwargs.get('force'))
3435 return orig(ui, repo, *args, **kwargs)
3435 return orig(ui, repo, *args, **kwargs)
3436
3436
3437 def mqinit(orig, ui, *args, **kwargs):
3437 def mqinit(orig, ui, *args, **kwargs):
3438 mq = kwargs.pop('mq', None)
3438 mq = kwargs.pop('mq', None)
3439
3439
3440 if not mq:
3440 if not mq:
3441 return orig(ui, *args, **kwargs)
3441 return orig(ui, *args, **kwargs)
3442
3442
3443 if args:
3443 if args:
3444 repopath = args[0]
3444 repopath = args[0]
3445 if not hg.islocal(repopath):
3445 if not hg.islocal(repopath):
3446 raise util.Abort(_('only a local queue repository '
3446 raise util.Abort(_('only a local queue repository '
3447 'may be initialized'))
3447 'may be initialized'))
3448 else:
3448 else:
3449 repopath = cmdutil.findrepo(os.getcwd())
3449 repopath = cmdutil.findrepo(os.getcwd())
3450 if not repopath:
3450 if not repopath:
3451 raise util.Abort(_('there is no Mercurial repository here '
3451 raise util.Abort(_('there is no Mercurial repository here '
3452 '(.hg not found)'))
3452 '(.hg not found)'))
3453 repo = hg.repository(ui, repopath)
3453 repo = hg.repository(ui, repopath)
3454 return qinit(ui, repo, True)
3454 return qinit(ui, repo, True)
3455
3455
3456 def mqcommand(orig, ui, repo, *args, **kwargs):
3456 def mqcommand(orig, ui, repo, *args, **kwargs):
3457 """Add --mq option to operate on patch repository instead of main"""
3457 """Add --mq option to operate on patch repository instead of main"""
3458
3458
3459 # some commands do not like getting unknown options
3459 # some commands do not like getting unknown options
3460 mq = kwargs.pop('mq', None)
3460 mq = kwargs.pop('mq', None)
3461
3461
3462 if not mq:
3462 if not mq:
3463 return orig(ui, repo, *args, **kwargs)
3463 return orig(ui, repo, *args, **kwargs)
3464
3464
3465 q = repo.mq
3465 q = repo.mq
3466 r = q.qrepo()
3466 r = q.qrepo()
3467 if not r:
3467 if not r:
3468 raise util.Abort(_('no queue repository'))
3468 raise util.Abort(_('no queue repository'))
3469 return orig(r.ui, r, *args, **kwargs)
3469 return orig(r.ui, r, *args, **kwargs)
3470
3470
3471 def summary(orig, ui, repo, *args, **kwargs):
3471 def summary(orig, ui, repo, *args, **kwargs):
3472 r = orig(ui, repo, *args, **kwargs)
3472 r = orig(ui, repo, *args, **kwargs)
3473 q = repo.mq
3473 q = repo.mq
3474 m = []
3474 m = []
3475 a, u = len(q.applied), len(q.unapplied(repo))
3475 a, u = len(q.applied), len(q.unapplied(repo))
3476 if a:
3476 if a:
3477 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3477 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3478 if u:
3478 if u:
3479 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3479 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3480 if m:
3480 if m:
3481 ui.write("mq: %s\n" % ', '.join(m))
3481 ui.write("mq: %s\n" % ', '.join(m))
3482 else:
3482 else:
3483 ui.note(_("mq: (empty queue)\n"))
3483 ui.note(_("mq: (empty queue)\n"))
3484 return r
3484 return r
3485
3485
3486 def revsetmq(repo, subset, x):
3486 def revsetmq(repo, subset, x):
3487 """``mq()``
3487 """``mq()``
3488 Changesets managed by MQ.
3488 Changesets managed by MQ.
3489 """
3489 """
3490 revset.getargs(x, 0, 0, _("mq takes no arguments"))
3490 revset.getargs(x, 0, 0, _("mq takes no arguments"))
3491 applied = set([repo[r.node].rev() for r in repo.mq.applied])
3491 applied = set([repo[r.node].rev() for r in repo.mq.applied])
3492 return [r for r in subset if r in applied]
3492 return [r for r in subset if r in applied]
3493
3493
3494 def extsetup(ui):
3494 def extsetup(ui):
3495 revset.symbols['mq'] = revsetmq
3495 revset.symbols['mq'] = revsetmq
3496
3496
3497 # tell hggettext to extract docstrings from these functions:
3497 # tell hggettext to extract docstrings from these functions:
3498 i18nfunctions = [revsetmq]
3498 i18nfunctions = [revsetmq]
3499
3499
3500 def uisetup(ui):
3500 def uisetup(ui):
3501 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3501 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3502
3502
3503 extensions.wrapcommand(commands.table, 'import', mqimport)
3503 extensions.wrapcommand(commands.table, 'import', mqimport)
3504 extensions.wrapcommand(commands.table, 'summary', summary)
3504 extensions.wrapcommand(commands.table, 'summary', summary)
3505
3505
3506 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3506 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3507 entry[1].extend(mqopt)
3507 entry[1].extend(mqopt)
3508
3508
3509 nowrap = set(commands.norepo.split(" "))
3509 nowrap = set(commands.norepo.split(" "))
3510
3510
3511 def dotable(cmdtable):
3511 def dotable(cmdtable):
3512 for cmd in cmdtable.keys():
3512 for cmd in cmdtable.keys():
3513 cmd = cmdutil.parsealiases(cmd)[0]
3513 cmd = cmdutil.parsealiases(cmd)[0]
3514 if cmd in nowrap:
3514 if cmd in nowrap:
3515 continue
3515 continue
3516 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3516 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3517 entry[1].extend(mqopt)
3517 entry[1].extend(mqopt)
3518
3518
3519 dotable(commands.table)
3519 dotable(commands.table)
3520
3520
3521 for extname, extmodule in extensions.extensions():
3521 for extname, extmodule in extensions.extensions():
3522 if extmodule.__file__ != __file__:
3522 if extmodule.__file__ != __file__:
3523 dotable(getattr(extmodule, 'cmdtable', {}))
3523 dotable(getattr(extmodule, 'cmdtable', {}))
3524
3524
3525
3525
3526 colortable = {'qguard.negative': 'red',
3526 colortable = {'qguard.negative': 'red',
3527 'qguard.positive': 'yellow',
3527 'qguard.positive': 'yellow',
3528 'qguard.unguarded': 'green',
3528 'qguard.unguarded': 'green',
3529 'qseries.applied': 'blue bold underline',
3529 'qseries.applied': 'blue bold underline',
3530 'qseries.guarded': 'black bold',
3530 'qseries.guarded': 'black bold',
3531 'qseries.missing': 'red bold',
3531 'qseries.missing': 'red bold',
3532 'qseries.unapplied': 'black bold'}
3532 'qseries.unapplied': 'black bold'}
@@ -1,5772 +1,5772 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex, bin, nullid, nullrev, short
8 from node import hex, bin, nullid, nullrev, short
9 from lock import release
9 from lock import release
10 from i18n import _, gettext
10 from i18n import _, gettext
11 import os, re, difflib, time, tempfile, errno
11 import os, re, difflib, time, tempfile, errno
12 import hg, scmutil, util, revlog, extensions, copies, error, bookmarks
12 import hg, scmutil, util, revlog, extensions, copies, error, bookmarks
13 import patch, help, url, encoding, templatekw, discovery
13 import patch, help, url, encoding, templatekw, discovery
14 import archival, changegroup, cmdutil, hbisect
14 import archival, changegroup, cmdutil, hbisect
15 import sshserver, hgweb, hgweb.server, commandserver
15 import sshserver, hgweb, hgweb.server, commandserver
16 import merge as mergemod
16 import merge as mergemod
17 import minirst, revset, fileset
17 import minirst, revset, fileset
18 import dagparser, context, simplemerge
18 import dagparser, context, simplemerge
19 import random, setdiscovery, treediscovery, dagutil, pvec
19 import random, setdiscovery, treediscovery, dagutil, pvec
20 import phases
20 import phases
21
21
22 table = {}
22 table = {}
23
23
24 command = cmdutil.command(table)
24 command = cmdutil.command(table)
25
25
26 # common command options
26 # common command options
27
27
28 globalopts = [
28 globalopts = [
29 ('R', 'repository', '',
29 ('R', 'repository', '',
30 _('repository root directory or name of overlay bundle file'),
30 _('repository root directory or name of overlay bundle file'),
31 _('REPO')),
31 _('REPO')),
32 ('', 'cwd', '',
32 ('', 'cwd', '',
33 _('change working directory'), _('DIR')),
33 _('change working directory'), _('DIR')),
34 ('y', 'noninteractive', None,
34 ('y', 'noninteractive', None,
35 _('do not prompt, automatically pick the first choice for all prompts')),
35 _('do not prompt, automatically pick the first choice for all prompts')),
36 ('q', 'quiet', None, _('suppress output')),
36 ('q', 'quiet', None, _('suppress output')),
37 ('v', 'verbose', None, _('enable additional output')),
37 ('v', 'verbose', None, _('enable additional output')),
38 ('', 'config', [],
38 ('', 'config', [],
39 _('set/override config option (use \'section.name=value\')'),
39 _('set/override config option (use \'section.name=value\')'),
40 _('CONFIG')),
40 _('CONFIG')),
41 ('', 'debug', None, _('enable debugging output')),
41 ('', 'debug', None, _('enable debugging output')),
42 ('', 'debugger', None, _('start debugger')),
42 ('', 'debugger', None, _('start debugger')),
43 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
43 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
44 _('ENCODE')),
44 _('ENCODE')),
45 ('', 'encodingmode', encoding.encodingmode,
45 ('', 'encodingmode', encoding.encodingmode,
46 _('set the charset encoding mode'), _('MODE')),
46 _('set the charset encoding mode'), _('MODE')),
47 ('', 'traceback', None, _('always print a traceback on exception')),
47 ('', 'traceback', None, _('always print a traceback on exception')),
48 ('', 'time', None, _('time how long the command takes')),
48 ('', 'time', None, _('time how long the command takes')),
49 ('', 'profile', None, _('print command execution profile')),
49 ('', 'profile', None, _('print command execution profile')),
50 ('', 'version', None, _('output version information and exit')),
50 ('', 'version', None, _('output version information and exit')),
51 ('h', 'help', None, _('display help and exit')),
51 ('h', 'help', None, _('display help and exit')),
52 ]
52 ]
53
53
54 dryrunopts = [('n', 'dry-run', None,
54 dryrunopts = [('n', 'dry-run', None,
55 _('do not perform actions, just print output'))]
55 _('do not perform actions, just print output'))]
56
56
57 remoteopts = [
57 remoteopts = [
58 ('e', 'ssh', '',
58 ('e', 'ssh', '',
59 _('specify ssh command to use'), _('CMD')),
59 _('specify ssh command to use'), _('CMD')),
60 ('', 'remotecmd', '',
60 ('', 'remotecmd', '',
61 _('specify hg command to run on the remote side'), _('CMD')),
61 _('specify hg command to run on the remote side'), _('CMD')),
62 ('', 'insecure', None,
62 ('', 'insecure', None,
63 _('do not verify server certificate (ignoring web.cacerts config)')),
63 _('do not verify server certificate (ignoring web.cacerts config)')),
64 ]
64 ]
65
65
66 walkopts = [
66 walkopts = [
67 ('I', 'include', [],
67 ('I', 'include', [],
68 _('include names matching the given patterns'), _('PATTERN')),
68 _('include names matching the given patterns'), _('PATTERN')),
69 ('X', 'exclude', [],
69 ('X', 'exclude', [],
70 _('exclude names matching the given patterns'), _('PATTERN')),
70 _('exclude names matching the given patterns'), _('PATTERN')),
71 ]
71 ]
72
72
73 commitopts = [
73 commitopts = [
74 ('m', 'message', '',
74 ('m', 'message', '',
75 _('use text as commit message'), _('TEXT')),
75 _('use text as commit message'), _('TEXT')),
76 ('l', 'logfile', '',
76 ('l', 'logfile', '',
77 _('read commit message from file'), _('FILE')),
77 _('read commit message from file'), _('FILE')),
78 ]
78 ]
79
79
80 commitopts2 = [
80 commitopts2 = [
81 ('d', 'date', '',
81 ('d', 'date', '',
82 _('record the specified date as commit date'), _('DATE')),
82 _('record the specified date as commit date'), _('DATE')),
83 ('u', 'user', '',
83 ('u', 'user', '',
84 _('record the specified user as committer'), _('USER')),
84 _('record the specified user as committer'), _('USER')),
85 ]
85 ]
86
86
87 templateopts = [
87 templateopts = [
88 ('', 'style', '',
88 ('', 'style', '',
89 _('display using template map file'), _('STYLE')),
89 _('display using template map file'), _('STYLE')),
90 ('', 'template', '',
90 ('', 'template', '',
91 _('display with template'), _('TEMPLATE')),
91 _('display with template'), _('TEMPLATE')),
92 ]
92 ]
93
93
94 logopts = [
94 logopts = [
95 ('p', 'patch', None, _('show patch')),
95 ('p', 'patch', None, _('show patch')),
96 ('g', 'git', None, _('use git extended diff format')),
96 ('g', 'git', None, _('use git extended diff format')),
97 ('l', 'limit', '',
97 ('l', 'limit', '',
98 _('limit number of changes displayed'), _('NUM')),
98 _('limit number of changes displayed'), _('NUM')),
99 ('M', 'no-merges', None, _('do not show merges')),
99 ('M', 'no-merges', None, _('do not show merges')),
100 ('', 'stat', None, _('output diffstat-style summary of changes')),
100 ('', 'stat', None, _('output diffstat-style summary of changes')),
101 ] + templateopts
101 ] + templateopts
102
102
103 diffopts = [
103 diffopts = [
104 ('a', 'text', None, _('treat all files as text')),
104 ('a', 'text', None, _('treat all files as text')),
105 ('g', 'git', None, _('use git extended diff format')),
105 ('g', 'git', None, _('use git extended diff format')),
106 ('', 'nodates', None, _('omit dates from diff headers'))
106 ('', 'nodates', None, _('omit dates from diff headers'))
107 ]
107 ]
108
108
109 diffwsopts = [
109 diffwsopts = [
110 ('w', 'ignore-all-space', None,
110 ('w', 'ignore-all-space', None,
111 _('ignore white space when comparing lines')),
111 _('ignore white space when comparing lines')),
112 ('b', 'ignore-space-change', None,
112 ('b', 'ignore-space-change', None,
113 _('ignore changes in the amount of white space')),
113 _('ignore changes in the amount of white space')),
114 ('B', 'ignore-blank-lines', None,
114 ('B', 'ignore-blank-lines', None,
115 _('ignore changes whose lines are all blank')),
115 _('ignore changes whose lines are all blank')),
116 ]
116 ]
117
117
118 diffopts2 = [
118 diffopts2 = [
119 ('p', 'show-function', None, _('show which function each change is in')),
119 ('p', 'show-function', None, _('show which function each change is in')),
120 ('', 'reverse', None, _('produce a diff that undoes the changes')),
120 ('', 'reverse', None, _('produce a diff that undoes the changes')),
121 ] + diffwsopts + [
121 ] + diffwsopts + [
122 ('U', 'unified', '',
122 ('U', 'unified', '',
123 _('number of lines of context to show'), _('NUM')),
123 _('number of lines of context to show'), _('NUM')),
124 ('', 'stat', None, _('output diffstat-style summary of changes')),
124 ('', 'stat', None, _('output diffstat-style summary of changes')),
125 ]
125 ]
126
126
127 mergetoolopts = [
127 mergetoolopts = [
128 ('t', 'tool', '', _('specify merge tool')),
128 ('t', 'tool', '', _('specify merge tool')),
129 ]
129 ]
130
130
131 similarityopts = [
131 similarityopts = [
132 ('s', 'similarity', '',
132 ('s', 'similarity', '',
133 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
133 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
134 ]
134 ]
135
135
136 subrepoopts = [
136 subrepoopts = [
137 ('S', 'subrepos', None,
137 ('S', 'subrepos', None,
138 _('recurse into subrepositories'))
138 _('recurse into subrepositories'))
139 ]
139 ]
140
140
141 # Commands start here, listed alphabetically
141 # Commands start here, listed alphabetically
142
142
143 @command('^add',
143 @command('^add',
144 walkopts + subrepoopts + dryrunopts,
144 walkopts + subrepoopts + dryrunopts,
145 _('[OPTION]... [FILE]...'))
145 _('[OPTION]... [FILE]...'))
146 def add(ui, repo, *pats, **opts):
146 def add(ui, repo, *pats, **opts):
147 """add the specified files on the next commit
147 """add the specified files on the next commit
148
148
149 Schedule files to be version controlled and added to the
149 Schedule files to be version controlled and added to the
150 repository.
150 repository.
151
151
152 The files will be added to the repository at the next commit. To
152 The files will be added to the repository at the next commit. To
153 undo an add before that, see :hg:`forget`.
153 undo an add before that, see :hg:`forget`.
154
154
155 If no names are given, add all files to the repository.
155 If no names are given, add all files to the repository.
156
156
157 .. container:: verbose
157 .. container:: verbose
158
158
159 An example showing how new (unknown) files are added
159 An example showing how new (unknown) files are added
160 automatically by :hg:`add`::
160 automatically by :hg:`add`::
161
161
162 $ ls
162 $ ls
163 foo.c
163 foo.c
164 $ hg status
164 $ hg status
165 ? foo.c
165 ? foo.c
166 $ hg add
166 $ hg add
167 adding foo.c
167 adding foo.c
168 $ hg status
168 $ hg status
169 A foo.c
169 A foo.c
170
170
171 Returns 0 if all files are successfully added.
171 Returns 0 if all files are successfully added.
172 """
172 """
173
173
174 m = scmutil.match(repo[None], pats, opts)
174 m = scmutil.match(repo[None], pats, opts)
175 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
175 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
176 opts.get('subrepos'), prefix="", explicitonly=False)
176 opts.get('subrepos'), prefix="", explicitonly=False)
177 return rejected and 1 or 0
177 return rejected and 1 or 0
178
178
179 @command('addremove',
179 @command('addremove',
180 similarityopts + walkopts + dryrunopts,
180 similarityopts + walkopts + dryrunopts,
181 _('[OPTION]... [FILE]...'))
181 _('[OPTION]... [FILE]...'))
182 def addremove(ui, repo, *pats, **opts):
182 def addremove(ui, repo, *pats, **opts):
183 """add all new files, delete all missing files
183 """add all new files, delete all missing files
184
184
185 Add all new files and remove all missing files from the
185 Add all new files and remove all missing files from the
186 repository.
186 repository.
187
187
188 New files are ignored if they match any of the patterns in
188 New files are ignored if they match any of the patterns in
189 ``.hgignore``. As with add, these changes take effect at the next
189 ``.hgignore``. As with add, these changes take effect at the next
190 commit.
190 commit.
191
191
192 Use the -s/--similarity option to detect renamed files. With a
192 Use the -s/--similarity option to detect renamed files. With a
193 parameter greater than 0, this compares every removed file with
193 parameter greater than 0, this compares every removed file with
194 every added file and records those similar enough as renames. This
194 every added file and records those similar enough as renames. This
195 option takes a percentage between 0 (disabled) and 100 (files must
195 option takes a percentage between 0 (disabled) and 100 (files must
196 be identical) as its parameter. Detecting renamed files this way
196 be identical) as its parameter. Detecting renamed files this way
197 can be expensive. After using this option, :hg:`status -C` can be
197 can be expensive. After using this option, :hg:`status -C` can be
198 used to check which files were identified as moved or renamed.
198 used to check which files were identified as moved or renamed.
199 If this option is not specified, only renames of identical files
199 If this option is not specified, only renames of identical files
200 are detected.
200 are detected.
201
201
202 Returns 0 if all files are successfully added.
202 Returns 0 if all files are successfully added.
203 """
203 """
204 try:
204 try:
205 sim = float(opts.get('similarity') or 100)
205 sim = float(opts.get('similarity') or 100)
206 except ValueError:
206 except ValueError:
207 raise util.Abort(_('similarity must be a number'))
207 raise util.Abort(_('similarity must be a number'))
208 if sim < 0 or sim > 100:
208 if sim < 0 or sim > 100:
209 raise util.Abort(_('similarity must be between 0 and 100'))
209 raise util.Abort(_('similarity must be between 0 and 100'))
210 return scmutil.addremove(repo, pats, opts, similarity=sim / 100.0)
210 return scmutil.addremove(repo, pats, opts, similarity=sim / 100.0)
211
211
212 @command('^annotate|blame',
212 @command('^annotate|blame',
213 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
213 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
214 ('', 'follow', None,
214 ('', 'follow', None,
215 _('follow copies/renames and list the filename (DEPRECATED)')),
215 _('follow copies/renames and list the filename (DEPRECATED)')),
216 ('', 'no-follow', None, _("don't follow copies and renames")),
216 ('', 'no-follow', None, _("don't follow copies and renames")),
217 ('a', 'text', None, _('treat all files as text')),
217 ('a', 'text', None, _('treat all files as text')),
218 ('u', 'user', None, _('list the author (long with -v)')),
218 ('u', 'user', None, _('list the author (long with -v)')),
219 ('f', 'file', None, _('list the filename')),
219 ('f', 'file', None, _('list the filename')),
220 ('d', 'date', None, _('list the date (short with -q)')),
220 ('d', 'date', None, _('list the date (short with -q)')),
221 ('n', 'number', None, _('list the revision number (default)')),
221 ('n', 'number', None, _('list the revision number (default)')),
222 ('c', 'changeset', None, _('list the changeset')),
222 ('c', 'changeset', None, _('list the changeset')),
223 ('l', 'line-number', None, _('show line number at the first appearance'))
223 ('l', 'line-number', None, _('show line number at the first appearance'))
224 ] + diffwsopts + walkopts,
224 ] + diffwsopts + walkopts,
225 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'))
225 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'))
226 def annotate(ui, repo, *pats, **opts):
226 def annotate(ui, repo, *pats, **opts):
227 """show changeset information by line for each file
227 """show changeset information by line for each file
228
228
229 List changes in files, showing the revision id responsible for
229 List changes in files, showing the revision id responsible for
230 each line
230 each line
231
231
232 This command is useful for discovering when a change was made and
232 This command is useful for discovering when a change was made and
233 by whom.
233 by whom.
234
234
235 Without the -a/--text option, annotate will avoid processing files
235 Without the -a/--text option, annotate will avoid processing files
236 it detects as binary. With -a, annotate will annotate the file
236 it detects as binary. With -a, annotate will annotate the file
237 anyway, although the results will probably be neither useful
237 anyway, although the results will probably be neither useful
238 nor desirable.
238 nor desirable.
239
239
240 Returns 0 on success.
240 Returns 0 on success.
241 """
241 """
242 if opts.get('follow'):
242 if opts.get('follow'):
243 # --follow is deprecated and now just an alias for -f/--file
243 # --follow is deprecated and now just an alias for -f/--file
244 # to mimic the behavior of Mercurial before version 1.5
244 # to mimic the behavior of Mercurial before version 1.5
245 opts['file'] = True
245 opts['file'] = True
246
246
247 datefunc = ui.quiet and util.shortdate or util.datestr
247 datefunc = ui.quiet and util.shortdate or util.datestr
248 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
248 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
249
249
250 if not pats:
250 if not pats:
251 raise util.Abort(_('at least one filename or pattern is required'))
251 raise util.Abort(_('at least one filename or pattern is required'))
252
252
253 hexfn = ui.debugflag and hex or short
253 hexfn = ui.debugflag and hex or short
254
254
255 opmap = [('user', ' ', lambda x: ui.shortuser(x[0].user())),
255 opmap = [('user', ' ', lambda x: ui.shortuser(x[0].user())),
256 ('number', ' ', lambda x: str(x[0].rev())),
256 ('number', ' ', lambda x: str(x[0].rev())),
257 ('changeset', ' ', lambda x: hexfn(x[0].node())),
257 ('changeset', ' ', lambda x: hexfn(x[0].node())),
258 ('date', ' ', getdate),
258 ('date', ' ', getdate),
259 ('file', ' ', lambda x: x[0].path()),
259 ('file', ' ', lambda x: x[0].path()),
260 ('line_number', ':', lambda x: str(x[1])),
260 ('line_number', ':', lambda x: str(x[1])),
261 ]
261 ]
262
262
263 if (not opts.get('user') and not opts.get('changeset')
263 if (not opts.get('user') and not opts.get('changeset')
264 and not opts.get('date') and not opts.get('file')):
264 and not opts.get('date') and not opts.get('file')):
265 opts['number'] = True
265 opts['number'] = True
266
266
267 linenumber = opts.get('line_number') is not None
267 linenumber = opts.get('line_number') is not None
268 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
268 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
269 raise util.Abort(_('at least one of -n/-c is required for -l'))
269 raise util.Abort(_('at least one of -n/-c is required for -l'))
270
270
271 funcmap = [(func, sep) for op, sep, func in opmap if opts.get(op)]
271 funcmap = [(func, sep) for op, sep, func in opmap if opts.get(op)]
272 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
272 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
273
273
274 def bad(x, y):
274 def bad(x, y):
275 raise util.Abort("%s: %s" % (x, y))
275 raise util.Abort("%s: %s" % (x, y))
276
276
277 ctx = scmutil.revsingle(repo, opts.get('rev'))
277 ctx = scmutil.revsingle(repo, opts.get('rev'))
278 m = scmutil.match(ctx, pats, opts)
278 m = scmutil.match(ctx, pats, opts)
279 m.bad = bad
279 m.bad = bad
280 follow = not opts.get('no_follow')
280 follow = not opts.get('no_follow')
281 diffopts = patch.diffopts(ui, opts, section='annotate')
281 diffopts = patch.diffopts(ui, opts, section='annotate')
282 for abs in ctx.walk(m):
282 for abs in ctx.walk(m):
283 fctx = ctx[abs]
283 fctx = ctx[abs]
284 if not opts.get('text') and util.binary(fctx.data()):
284 if not opts.get('text') and util.binary(fctx.data()):
285 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
285 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
286 continue
286 continue
287
287
288 lines = fctx.annotate(follow=follow, linenumber=linenumber,
288 lines = fctx.annotate(follow=follow, linenumber=linenumber,
289 diffopts=diffopts)
289 diffopts=diffopts)
290 pieces = []
290 pieces = []
291
291
292 for f, sep in funcmap:
292 for f, sep in funcmap:
293 l = [f(n) for n, dummy in lines]
293 l = [f(n) for n, dummy in lines]
294 if l:
294 if l:
295 sized = [(x, encoding.colwidth(x)) for x in l]
295 sized = [(x, encoding.colwidth(x)) for x in l]
296 ml = max([w for x, w in sized])
296 ml = max([w for x, w in sized])
297 pieces.append(["%s%s%s" % (sep, ' ' * (ml - w), x)
297 pieces.append(["%s%s%s" % (sep, ' ' * (ml - w), x)
298 for x, w in sized])
298 for x, w in sized])
299
299
300 if pieces:
300 if pieces:
301 for p, l in zip(zip(*pieces), lines):
301 for p, l in zip(zip(*pieces), lines):
302 ui.write("%s: %s" % ("".join(p), l[1]))
302 ui.write("%s: %s" % ("".join(p), l[1]))
303
303
304 if lines and not lines[-1][1].endswith('\n'):
304 if lines and not lines[-1][1].endswith('\n'):
305 ui.write('\n')
305 ui.write('\n')
306
306
307 @command('archive',
307 @command('archive',
308 [('', 'no-decode', None, _('do not pass files through decoders')),
308 [('', 'no-decode', None, _('do not pass files through decoders')),
309 ('p', 'prefix', '', _('directory prefix for files in archive'),
309 ('p', 'prefix', '', _('directory prefix for files in archive'),
310 _('PREFIX')),
310 _('PREFIX')),
311 ('r', 'rev', '', _('revision to distribute'), _('REV')),
311 ('r', 'rev', '', _('revision to distribute'), _('REV')),
312 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
312 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
313 ] + subrepoopts + walkopts,
313 ] + subrepoopts + walkopts,
314 _('[OPTION]... DEST'))
314 _('[OPTION]... DEST'))
315 def archive(ui, repo, dest, **opts):
315 def archive(ui, repo, dest, **opts):
316 '''create an unversioned archive of a repository revision
316 '''create an unversioned archive of a repository revision
317
317
318 By default, the revision used is the parent of the working
318 By default, the revision used is the parent of the working
319 directory; use -r/--rev to specify a different revision.
319 directory; use -r/--rev to specify a different revision.
320
320
321 The archive type is automatically detected based on file
321 The archive type is automatically detected based on file
322 extension (or override using -t/--type).
322 extension (or override using -t/--type).
323
323
324 .. container:: verbose
324 .. container:: verbose
325
325
326 Examples:
326 Examples:
327
327
328 - create a zip file containing the 1.0 release::
328 - create a zip file containing the 1.0 release::
329
329
330 hg archive -r 1.0 project-1.0.zip
330 hg archive -r 1.0 project-1.0.zip
331
331
332 - create a tarball excluding .hg files::
332 - create a tarball excluding .hg files::
333
333
334 hg archive project.tar.gz -X ".hg*"
334 hg archive project.tar.gz -X ".hg*"
335
335
336 Valid types are:
336 Valid types are:
337
337
338 :``files``: a directory full of files (default)
338 :``files``: a directory full of files (default)
339 :``tar``: tar archive, uncompressed
339 :``tar``: tar archive, uncompressed
340 :``tbz2``: tar archive, compressed using bzip2
340 :``tbz2``: tar archive, compressed using bzip2
341 :``tgz``: tar archive, compressed using gzip
341 :``tgz``: tar archive, compressed using gzip
342 :``uzip``: zip archive, uncompressed
342 :``uzip``: zip archive, uncompressed
343 :``zip``: zip archive, compressed using deflate
343 :``zip``: zip archive, compressed using deflate
344
344
345 The exact name of the destination archive or directory is given
345 The exact name of the destination archive or directory is given
346 using a format string; see :hg:`help export` for details.
346 using a format string; see :hg:`help export` for details.
347
347
348 Each member added to an archive file has a directory prefix
348 Each member added to an archive file has a directory prefix
349 prepended. Use -p/--prefix to specify a format string for the
349 prepended. Use -p/--prefix to specify a format string for the
350 prefix. The default is the basename of the archive, with suffixes
350 prefix. The default is the basename of the archive, with suffixes
351 removed.
351 removed.
352
352
353 Returns 0 on success.
353 Returns 0 on success.
354 '''
354 '''
355
355
356 ctx = scmutil.revsingle(repo, opts.get('rev'))
356 ctx = scmutil.revsingle(repo, opts.get('rev'))
357 if not ctx:
357 if not ctx:
358 raise util.Abort(_('no working directory: please specify a revision'))
358 raise util.Abort(_('no working directory: please specify a revision'))
359 node = ctx.node()
359 node = ctx.node()
360 dest = cmdutil.makefilename(repo, dest, node)
360 dest = cmdutil.makefilename(repo, dest, node)
361 if os.path.realpath(dest) == repo.root:
361 if os.path.realpath(dest) == repo.root:
362 raise util.Abort(_('repository root cannot be destination'))
362 raise util.Abort(_('repository root cannot be destination'))
363
363
364 kind = opts.get('type') or archival.guesskind(dest) or 'files'
364 kind = opts.get('type') or archival.guesskind(dest) or 'files'
365 prefix = opts.get('prefix')
365 prefix = opts.get('prefix')
366
366
367 if dest == '-':
367 if dest == '-':
368 if kind == 'files':
368 if kind == 'files':
369 raise util.Abort(_('cannot archive plain files to stdout'))
369 raise util.Abort(_('cannot archive plain files to stdout'))
370 dest = cmdutil.makefileobj(repo, dest)
370 dest = cmdutil.makefileobj(repo, dest)
371 if not prefix:
371 if not prefix:
372 prefix = os.path.basename(repo.root) + '-%h'
372 prefix = os.path.basename(repo.root) + '-%h'
373
373
374 prefix = cmdutil.makefilename(repo, prefix, node)
374 prefix = cmdutil.makefilename(repo, prefix, node)
375 matchfn = scmutil.match(ctx, [], opts)
375 matchfn = scmutil.match(ctx, [], opts)
376 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
376 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
377 matchfn, prefix, subrepos=opts.get('subrepos'))
377 matchfn, prefix, subrepos=opts.get('subrepos'))
378
378
379 @command('backout',
379 @command('backout',
380 [('', 'merge', None, _('merge with old dirstate parent after backout')),
380 [('', 'merge', None, _('merge with old dirstate parent after backout')),
381 ('', 'parent', '',
381 ('', 'parent', '',
382 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
382 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
383 ('r', 'rev', '', _('revision to backout'), _('REV')),
383 ('r', 'rev', '', _('revision to backout'), _('REV')),
384 ] + mergetoolopts + walkopts + commitopts + commitopts2,
384 ] + mergetoolopts + walkopts + commitopts + commitopts2,
385 _('[OPTION]... [-r] REV'))
385 _('[OPTION]... [-r] REV'))
386 def backout(ui, repo, node=None, rev=None, **opts):
386 def backout(ui, repo, node=None, rev=None, **opts):
387 '''reverse effect of earlier changeset
387 '''reverse effect of earlier changeset
388
388
389 Prepare a new changeset with the effect of REV undone in the
389 Prepare a new changeset with the effect of REV undone in the
390 current working directory.
390 current working directory.
391
391
392 If REV is the parent of the working directory, then this new changeset
392 If REV is the parent of the working directory, then this new changeset
393 is committed automatically. Otherwise, hg needs to merge the
393 is committed automatically. Otherwise, hg needs to merge the
394 changes and the merged result is left uncommitted.
394 changes and the merged result is left uncommitted.
395
395
396 .. note::
396 .. note::
397 backout cannot be used to fix either an unwanted or
397 backout cannot be used to fix either an unwanted or
398 incorrect merge.
398 incorrect merge.
399
399
400 .. container:: verbose
400 .. container:: verbose
401
401
402 By default, the pending changeset will have one parent,
402 By default, the pending changeset will have one parent,
403 maintaining a linear history. With --merge, the pending
403 maintaining a linear history. With --merge, the pending
404 changeset will instead have two parents: the old parent of the
404 changeset will instead have two parents: the old parent of the
405 working directory and a new child of REV that simply undoes REV.
405 working directory and a new child of REV that simply undoes REV.
406
406
407 Before version 1.7, the behavior without --merge was equivalent
407 Before version 1.7, the behavior without --merge was equivalent
408 to specifying --merge followed by :hg:`update --clean .` to
408 to specifying --merge followed by :hg:`update --clean .` to
409 cancel the merge and leave the child of REV as a head to be
409 cancel the merge and leave the child of REV as a head to be
410 merged separately.
410 merged separately.
411
411
412 See :hg:`help dates` for a list of formats valid for -d/--date.
412 See :hg:`help dates` for a list of formats valid for -d/--date.
413
413
414 Returns 0 on success.
414 Returns 0 on success.
415 '''
415 '''
416 if rev and node:
416 if rev and node:
417 raise util.Abort(_("please specify just one revision"))
417 raise util.Abort(_("please specify just one revision"))
418
418
419 if not rev:
419 if not rev:
420 rev = node
420 rev = node
421
421
422 if not rev:
422 if not rev:
423 raise util.Abort(_("please specify a revision to backout"))
423 raise util.Abort(_("please specify a revision to backout"))
424
424
425 date = opts.get('date')
425 date = opts.get('date')
426 if date:
426 if date:
427 opts['date'] = util.parsedate(date)
427 opts['date'] = util.parsedate(date)
428
428
429 cmdutil.bailifchanged(repo)
429 cmdutil.bailifchanged(repo)
430 node = scmutil.revsingle(repo, rev).node()
430 node = scmutil.revsingle(repo, rev).node()
431
431
432 op1, op2 = repo.dirstate.parents()
432 op1, op2 = repo.dirstate.parents()
433 a = repo.changelog.ancestor(op1, node)
433 a = repo.changelog.ancestor(op1, node)
434 if a != node:
434 if a != node:
435 raise util.Abort(_('cannot backout change on a different branch'))
435 raise util.Abort(_('cannot backout change on a different branch'))
436
436
437 p1, p2 = repo.changelog.parents(node)
437 p1, p2 = repo.changelog.parents(node)
438 if p1 == nullid:
438 if p1 == nullid:
439 raise util.Abort(_('cannot backout a change with no parents'))
439 raise util.Abort(_('cannot backout a change with no parents'))
440 if p2 != nullid:
440 if p2 != nullid:
441 if not opts.get('parent'):
441 if not opts.get('parent'):
442 raise util.Abort(_('cannot backout a merge changeset'))
442 raise util.Abort(_('cannot backout a merge changeset'))
443 p = repo.lookup(opts['parent'])
443 p = repo.lookup(opts['parent'])
444 if p not in (p1, p2):
444 if p not in (p1, p2):
445 raise util.Abort(_('%s is not a parent of %s') %
445 raise util.Abort(_('%s is not a parent of %s') %
446 (short(p), short(node)))
446 (short(p), short(node)))
447 parent = p
447 parent = p
448 else:
448 else:
449 if opts.get('parent'):
449 if opts.get('parent'):
450 raise util.Abort(_('cannot use --parent on non-merge changeset'))
450 raise util.Abort(_('cannot use --parent on non-merge changeset'))
451 parent = p1
451 parent = p1
452
452
453 # the backout should appear on the same branch
453 # the backout should appear on the same branch
454 wlock = repo.wlock()
454 wlock = repo.wlock()
455 try:
455 try:
456 branch = repo.dirstate.branch()
456 branch = repo.dirstate.branch()
457 hg.clean(repo, node, show_stats=False)
457 hg.clean(repo, node, show_stats=False)
458 repo.dirstate.setbranch(branch)
458 repo.dirstate.setbranch(branch)
459 revert_opts = opts.copy()
459 revert_opts = opts.copy()
460 revert_opts['date'] = None
460 revert_opts['date'] = None
461 revert_opts['all'] = True
461 revert_opts['all'] = True
462 revert_opts['rev'] = hex(parent)
462 revert_opts['rev'] = hex(parent)
463 revert_opts['no_backup'] = None
463 revert_opts['no_backup'] = None
464 revert(ui, repo, **revert_opts)
464 revert(ui, repo, **revert_opts)
465 if not opts.get('merge') and op1 != node:
465 if not opts.get('merge') and op1 != node:
466 try:
466 try:
467 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
467 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
468 return hg.update(repo, op1)
468 return hg.update(repo, op1)
469 finally:
469 finally:
470 ui.setconfig('ui', 'forcemerge', '')
470 ui.setconfig('ui', 'forcemerge', '')
471
471
472 commit_opts = opts.copy()
472 commit_opts = opts.copy()
473 commit_opts['addremove'] = False
473 commit_opts['addremove'] = False
474 if not commit_opts['message'] and not commit_opts['logfile']:
474 if not commit_opts['message'] and not commit_opts['logfile']:
475 # we don't translate commit messages
475 # we don't translate commit messages
476 commit_opts['message'] = "Backed out changeset %s" % short(node)
476 commit_opts['message'] = "Backed out changeset %s" % short(node)
477 commit_opts['force_editor'] = True
477 commit_opts['force_editor'] = True
478 commit(ui, repo, **commit_opts)
478 commit(ui, repo, **commit_opts)
479 def nice(node):
479 def nice(node):
480 return '%d:%s' % (repo.changelog.rev(node), short(node))
480 return '%d:%s' % (repo.changelog.rev(node), short(node))
481 ui.status(_('changeset %s backs out changeset %s\n') %
481 ui.status(_('changeset %s backs out changeset %s\n') %
482 (nice(repo.changelog.tip()), nice(node)))
482 (nice(repo.changelog.tip()), nice(node)))
483 if opts.get('merge') and op1 != node:
483 if opts.get('merge') and op1 != node:
484 hg.clean(repo, op1, show_stats=False)
484 hg.clean(repo, op1, show_stats=False)
485 ui.status(_('merging with changeset %s\n')
485 ui.status(_('merging with changeset %s\n')
486 % nice(repo.changelog.tip()))
486 % nice(repo.changelog.tip()))
487 try:
487 try:
488 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
488 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
489 return hg.merge(repo, hex(repo.changelog.tip()))
489 return hg.merge(repo, hex(repo.changelog.tip()))
490 finally:
490 finally:
491 ui.setconfig('ui', 'forcemerge', '')
491 ui.setconfig('ui', 'forcemerge', '')
492 finally:
492 finally:
493 wlock.release()
493 wlock.release()
494 return 0
494 return 0
495
495
496 @command('bisect',
496 @command('bisect',
497 [('r', 'reset', False, _('reset bisect state')),
497 [('r', 'reset', False, _('reset bisect state')),
498 ('g', 'good', False, _('mark changeset good')),
498 ('g', 'good', False, _('mark changeset good')),
499 ('b', 'bad', False, _('mark changeset bad')),
499 ('b', 'bad', False, _('mark changeset bad')),
500 ('s', 'skip', False, _('skip testing changeset')),
500 ('s', 'skip', False, _('skip testing changeset')),
501 ('e', 'extend', False, _('extend the bisect range')),
501 ('e', 'extend', False, _('extend the bisect range')),
502 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
502 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
503 ('U', 'noupdate', False, _('do not update to target'))],
503 ('U', 'noupdate', False, _('do not update to target'))],
504 _("[-gbsr] [-U] [-c CMD] [REV]"))
504 _("[-gbsr] [-U] [-c CMD] [REV]"))
505 def bisect(ui, repo, rev=None, extra=None, command=None,
505 def bisect(ui, repo, rev=None, extra=None, command=None,
506 reset=None, good=None, bad=None, skip=None, extend=None,
506 reset=None, good=None, bad=None, skip=None, extend=None,
507 noupdate=None):
507 noupdate=None):
508 """subdivision search of changesets
508 """subdivision search of changesets
509
509
510 This command helps to find changesets which introduce problems. To
510 This command helps to find changesets which introduce problems. To
511 use, mark the earliest changeset you know exhibits the problem as
511 use, mark the earliest changeset you know exhibits the problem as
512 bad, then mark the latest changeset which is free from the problem
512 bad, then mark the latest changeset which is free from the problem
513 as good. Bisect will update your working directory to a revision
513 as good. Bisect will update your working directory to a revision
514 for testing (unless the -U/--noupdate option is specified). Once
514 for testing (unless the -U/--noupdate option is specified). Once
515 you have performed tests, mark the working directory as good or
515 you have performed tests, mark the working directory as good or
516 bad, and bisect will either update to another candidate changeset
516 bad, and bisect will either update to another candidate changeset
517 or announce that it has found the bad revision.
517 or announce that it has found the bad revision.
518
518
519 As a shortcut, you can also use the revision argument to mark a
519 As a shortcut, you can also use the revision argument to mark a
520 revision as good or bad without checking it out first.
520 revision as good or bad without checking it out first.
521
521
522 If you supply a command, it will be used for automatic bisection.
522 If you supply a command, it will be used for automatic bisection.
523 The environment variable HG_NODE will contain the ID of the
523 The environment variable HG_NODE will contain the ID of the
524 changeset being tested. The exit status of the command will be
524 changeset being tested. The exit status of the command will be
525 used to mark revisions as good or bad: status 0 means good, 125
525 used to mark revisions as good or bad: status 0 means good, 125
526 means to skip the revision, 127 (command not found) will abort the
526 means to skip the revision, 127 (command not found) will abort the
527 bisection, and any other non-zero exit status means the revision
527 bisection, and any other non-zero exit status means the revision
528 is bad.
528 is bad.
529
529
530 .. container:: verbose
530 .. container:: verbose
531
531
532 Some examples:
532 Some examples:
533
533
534 - start a bisection with known bad revision 12, and good revision 34::
534 - start a bisection with known bad revision 12, and good revision 34::
535
535
536 hg bisect --bad 34
536 hg bisect --bad 34
537 hg bisect --good 12
537 hg bisect --good 12
538
538
539 - advance the current bisection by marking current revision as good or
539 - advance the current bisection by marking current revision as good or
540 bad::
540 bad::
541
541
542 hg bisect --good
542 hg bisect --good
543 hg bisect --bad
543 hg bisect --bad
544
544
545 - mark the current revision, or a known revision, to be skipped (eg. if
545 - mark the current revision, or a known revision, to be skipped (eg. if
546 that revision is not usable because of another issue)::
546 that revision is not usable because of another issue)::
547
547
548 hg bisect --skip
548 hg bisect --skip
549 hg bisect --skip 23
549 hg bisect --skip 23
550
550
551 - forget the current bisection::
551 - forget the current bisection::
552
552
553 hg bisect --reset
553 hg bisect --reset
554
554
555 - use 'make && make tests' to automatically find the first broken
555 - use 'make && make tests' to automatically find the first broken
556 revision::
556 revision::
557
557
558 hg bisect --reset
558 hg bisect --reset
559 hg bisect --bad 34
559 hg bisect --bad 34
560 hg bisect --good 12
560 hg bisect --good 12
561 hg bisect --command 'make && make tests'
561 hg bisect --command 'make && make tests'
562
562
563 - see all changesets whose states are already known in the current
563 - see all changesets whose states are already known in the current
564 bisection::
564 bisection::
565
565
566 hg log -r "bisect(pruned)"
566 hg log -r "bisect(pruned)"
567
567
568 - see the changeset currently being bisected (especially useful
568 - see the changeset currently being bisected (especially useful
569 if running with -U/--noupdate)::
569 if running with -U/--noupdate)::
570
570
571 hg log -r "bisect(current)"
571 hg log -r "bisect(current)"
572
572
573 - see all changesets that took part in the current bisection::
573 - see all changesets that took part in the current bisection::
574
574
575 hg log -r "bisect(range)"
575 hg log -r "bisect(range)"
576
576
577 - with the graphlog extension, you can even get a nice graph::
577 - with the graphlog extension, you can even get a nice graph::
578
578
579 hg log --graph -r "bisect(range)"
579 hg log --graph -r "bisect(range)"
580
580
581 See :hg:`help revsets` for more about the `bisect()` keyword.
581 See :hg:`help revsets` for more about the `bisect()` keyword.
582
582
583 Returns 0 on success.
583 Returns 0 on success.
584 """
584 """
585 def extendbisectrange(nodes, good):
585 def extendbisectrange(nodes, good):
586 # bisect is incomplete when it ends on a merge node and
586 # bisect is incomplete when it ends on a merge node and
587 # one of the parent was not checked.
587 # one of the parent was not checked.
588 parents = repo[nodes[0]].parents()
588 parents = repo[nodes[0]].parents()
589 if len(parents) > 1:
589 if len(parents) > 1:
590 side = good and state['bad'] or state['good']
590 side = good and state['bad'] or state['good']
591 num = len(set(i.node() for i in parents) & set(side))
591 num = len(set(i.node() for i in parents) & set(side))
592 if num == 1:
592 if num == 1:
593 return parents[0].ancestor(parents[1])
593 return parents[0].ancestor(parents[1])
594 return None
594 return None
595
595
596 def print_result(nodes, good):
596 def print_result(nodes, good):
597 displayer = cmdutil.show_changeset(ui, repo, {})
597 displayer = cmdutil.show_changeset(ui, repo, {})
598 if len(nodes) == 1:
598 if len(nodes) == 1:
599 # narrowed it down to a single revision
599 # narrowed it down to a single revision
600 if good:
600 if good:
601 ui.write(_("The first good revision is:\n"))
601 ui.write(_("The first good revision is:\n"))
602 else:
602 else:
603 ui.write(_("The first bad revision is:\n"))
603 ui.write(_("The first bad revision is:\n"))
604 displayer.show(repo[nodes[0]])
604 displayer.show(repo[nodes[0]])
605 extendnode = extendbisectrange(nodes, good)
605 extendnode = extendbisectrange(nodes, good)
606 if extendnode is not None:
606 if extendnode is not None:
607 ui.write(_('Not all ancestors of this changeset have been'
607 ui.write(_('Not all ancestors of this changeset have been'
608 ' checked.\nUse bisect --extend to continue the '
608 ' checked.\nUse bisect --extend to continue the '
609 'bisection from\nthe common ancestor, %s.\n')
609 'bisection from\nthe common ancestor, %s.\n')
610 % extendnode)
610 % extendnode)
611 else:
611 else:
612 # multiple possible revisions
612 # multiple possible revisions
613 if good:
613 if good:
614 ui.write(_("Due to skipped revisions, the first "
614 ui.write(_("Due to skipped revisions, the first "
615 "good revision could be any of:\n"))
615 "good revision could be any of:\n"))
616 else:
616 else:
617 ui.write(_("Due to skipped revisions, the first "
617 ui.write(_("Due to skipped revisions, the first "
618 "bad revision could be any of:\n"))
618 "bad revision could be any of:\n"))
619 for n in nodes:
619 for n in nodes:
620 displayer.show(repo[n])
620 displayer.show(repo[n])
621 displayer.close()
621 displayer.close()
622
622
623 def check_state(state, interactive=True):
623 def check_state(state, interactive=True):
624 if not state['good'] or not state['bad']:
624 if not state['good'] or not state['bad']:
625 if (good or bad or skip or reset) and interactive:
625 if (good or bad or skip or reset) and interactive:
626 return
626 return
627 if not state['good']:
627 if not state['good']:
628 raise util.Abort(_('cannot bisect (no known good revisions)'))
628 raise util.Abort(_('cannot bisect (no known good revisions)'))
629 else:
629 else:
630 raise util.Abort(_('cannot bisect (no known bad revisions)'))
630 raise util.Abort(_('cannot bisect (no known bad revisions)'))
631 return True
631 return True
632
632
633 # backward compatibility
633 # backward compatibility
634 if rev in "good bad reset init".split():
634 if rev in "good bad reset init".split():
635 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
635 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
636 cmd, rev, extra = rev, extra, None
636 cmd, rev, extra = rev, extra, None
637 if cmd == "good":
637 if cmd == "good":
638 good = True
638 good = True
639 elif cmd == "bad":
639 elif cmd == "bad":
640 bad = True
640 bad = True
641 else:
641 else:
642 reset = True
642 reset = True
643 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
643 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
644 raise util.Abort(_('incompatible arguments'))
644 raise util.Abort(_('incompatible arguments'))
645
645
646 if reset:
646 if reset:
647 p = repo.join("bisect.state")
647 p = repo.join("bisect.state")
648 if os.path.exists(p):
648 if os.path.exists(p):
649 os.unlink(p)
649 os.unlink(p)
650 return
650 return
651
651
652 state = hbisect.load_state(repo)
652 state = hbisect.load_state(repo)
653
653
654 if command:
654 if command:
655 changesets = 1
655 changesets = 1
656 try:
656 try:
657 node = state['current'][0]
657 node = state['current'][0]
658 except LookupError:
658 except LookupError:
659 if noupdate:
659 if noupdate:
660 raise util.Abort(_('current bisect revision is unknown - '
660 raise util.Abort(_('current bisect revision is unknown - '
661 'start a new bisect to fix'))
661 'start a new bisect to fix'))
662 node, p2 = repo.dirstate.parents()
662 node, p2 = repo.dirstate.parents()
663 if p2 != nullid:
663 if p2 != nullid:
664 raise util.Abort(_('current bisect revision is a merge'))
664 raise util.Abort(_('current bisect revision is a merge'))
665 try:
665 try:
666 while changesets:
666 while changesets:
667 # update state
667 # update state
668 state['current'] = [node]
668 state['current'] = [node]
669 hbisect.save_state(repo, state)
669 hbisect.save_state(repo, state)
670 status = util.system(command,
670 status = util.system(command,
671 environ={'HG_NODE': hex(node)},
671 environ={'HG_NODE': hex(node)},
672 out=ui.fout)
672 out=ui.fout)
673 if status == 125:
673 if status == 125:
674 transition = "skip"
674 transition = "skip"
675 elif status == 0:
675 elif status == 0:
676 transition = "good"
676 transition = "good"
677 # status < 0 means process was killed
677 # status < 0 means process was killed
678 elif status == 127:
678 elif status == 127:
679 raise util.Abort(_("failed to execute %s") % command)
679 raise util.Abort(_("failed to execute %s") % command)
680 elif status < 0:
680 elif status < 0:
681 raise util.Abort(_("%s killed") % command)
681 raise util.Abort(_("%s killed") % command)
682 else:
682 else:
683 transition = "bad"
683 transition = "bad"
684 ctx = scmutil.revsingle(repo, rev, node)
684 ctx = scmutil.revsingle(repo, rev, node)
685 rev = None # clear for future iterations
685 rev = None # clear for future iterations
686 state[transition].append(ctx.node())
686 state[transition].append(ctx.node())
687 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
687 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
688 check_state(state, interactive=False)
688 check_state(state, interactive=False)
689 # bisect
689 # bisect
690 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
690 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
691 # update to next check
691 # update to next check
692 node = nodes[0]
692 node = nodes[0]
693 if not noupdate:
693 if not noupdate:
694 cmdutil.bailifchanged(repo)
694 cmdutil.bailifchanged(repo)
695 hg.clean(repo, node, show_stats=False)
695 hg.clean(repo, node, show_stats=False)
696 finally:
696 finally:
697 state['current'] = [node]
697 state['current'] = [node]
698 hbisect.save_state(repo, state)
698 hbisect.save_state(repo, state)
699 print_result(nodes, good)
699 print_result(nodes, good)
700 return
700 return
701
701
702 # update state
702 # update state
703
703
704 if rev:
704 if rev:
705 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
705 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
706 else:
706 else:
707 nodes = [repo.lookup('.')]
707 nodes = [repo.lookup('.')]
708
708
709 if good or bad or skip:
709 if good or bad or skip:
710 if good:
710 if good:
711 state['good'] += nodes
711 state['good'] += nodes
712 elif bad:
712 elif bad:
713 state['bad'] += nodes
713 state['bad'] += nodes
714 elif skip:
714 elif skip:
715 state['skip'] += nodes
715 state['skip'] += nodes
716 hbisect.save_state(repo, state)
716 hbisect.save_state(repo, state)
717
717
718 if not check_state(state):
718 if not check_state(state):
719 return
719 return
720
720
721 # actually bisect
721 # actually bisect
722 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
722 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
723 if extend:
723 if extend:
724 if not changesets:
724 if not changesets:
725 extendnode = extendbisectrange(nodes, good)
725 extendnode = extendbisectrange(nodes, good)
726 if extendnode is not None:
726 if extendnode is not None:
727 ui.write(_("Extending search to changeset %d:%s\n"
727 ui.write(_("Extending search to changeset %d:%s\n"
728 % (extendnode.rev(), extendnode)))
728 % (extendnode.rev(), extendnode)))
729 state['current'] = [extendnode.node()]
729 state['current'] = [extendnode.node()]
730 hbisect.save_state(repo, state)
730 hbisect.save_state(repo, state)
731 if noupdate:
731 if noupdate:
732 return
732 return
733 cmdutil.bailifchanged(repo)
733 cmdutil.bailifchanged(repo)
734 return hg.clean(repo, extendnode.node())
734 return hg.clean(repo, extendnode.node())
735 raise util.Abort(_("nothing to extend"))
735 raise util.Abort(_("nothing to extend"))
736
736
737 if changesets == 0:
737 if changesets == 0:
738 print_result(nodes, good)
738 print_result(nodes, good)
739 else:
739 else:
740 assert len(nodes) == 1 # only a single node can be tested next
740 assert len(nodes) == 1 # only a single node can be tested next
741 node = nodes[0]
741 node = nodes[0]
742 # compute the approximate number of remaining tests
742 # compute the approximate number of remaining tests
743 tests, size = 0, 2
743 tests, size = 0, 2
744 while size <= changesets:
744 while size <= changesets:
745 tests, size = tests + 1, size * 2
745 tests, size = tests + 1, size * 2
746 rev = repo.changelog.rev(node)
746 rev = repo.changelog.rev(node)
747 ui.write(_("Testing changeset %d:%s "
747 ui.write(_("Testing changeset %d:%s "
748 "(%d changesets remaining, ~%d tests)\n")
748 "(%d changesets remaining, ~%d tests)\n")
749 % (rev, short(node), changesets, tests))
749 % (rev, short(node), changesets, tests))
750 state['current'] = [node]
750 state['current'] = [node]
751 hbisect.save_state(repo, state)
751 hbisect.save_state(repo, state)
752 if not noupdate:
752 if not noupdate:
753 cmdutil.bailifchanged(repo)
753 cmdutil.bailifchanged(repo)
754 return hg.clean(repo, node)
754 return hg.clean(repo, node)
755
755
756 @command('bookmarks',
756 @command('bookmarks',
757 [('f', 'force', False, _('force')),
757 [('f', 'force', False, _('force')),
758 ('r', 'rev', '', _('revision'), _('REV')),
758 ('r', 'rev', '', _('revision'), _('REV')),
759 ('d', 'delete', False, _('delete a given bookmark')),
759 ('d', 'delete', False, _('delete a given bookmark')),
760 ('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
760 ('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
761 ('i', 'inactive', False, _('mark a bookmark inactive'))],
761 ('i', 'inactive', False, _('mark a bookmark inactive'))],
762 _('hg bookmarks [-f] [-d] [-i] [-m NAME] [-r REV] [NAME]'))
762 _('hg bookmarks [-f] [-d] [-i] [-m NAME] [-r REV] [NAME]'))
763 def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False,
763 def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False,
764 rename=None, inactive=False):
764 rename=None, inactive=False):
765 '''track a line of development with movable markers
765 '''track a line of development with movable markers
766
766
767 Bookmarks are pointers to certain commits that move when committing.
767 Bookmarks are pointers to certain commits that move when committing.
768 Bookmarks are local. They can be renamed, copied and deleted. It is
768 Bookmarks are local. They can be renamed, copied and deleted. It is
769 possible to use :hg:`merge NAME` to merge from a given bookmark, and
769 possible to use :hg:`merge NAME` to merge from a given bookmark, and
770 :hg:`update NAME` to update to a given bookmark.
770 :hg:`update NAME` to update to a given bookmark.
771
771
772 You can use :hg:`bookmark NAME` to set a bookmark on the working
772 You can use :hg:`bookmark NAME` to set a bookmark on the working
773 directory's parent revision with the given name. If you specify
773 directory's parent revision with the given name. If you specify
774 a revision using -r REV (where REV may be an existing bookmark),
774 a revision using -r REV (where REV may be an existing bookmark),
775 the bookmark is assigned to that revision.
775 the bookmark is assigned to that revision.
776
776
777 Bookmarks can be pushed and pulled between repositories (see :hg:`help
777 Bookmarks can be pushed and pulled between repositories (see :hg:`help
778 push` and :hg:`help pull`). This requires both the local and remote
778 push` and :hg:`help pull`). This requires both the local and remote
779 repositories to support bookmarks. For versions prior to 1.8, this means
779 repositories to support bookmarks. For versions prior to 1.8, this means
780 the bookmarks extension must be enabled.
780 the bookmarks extension must be enabled.
781
781
782 With -i/--inactive, the new bookmark will not be made the active
782 With -i/--inactive, the new bookmark will not be made the active
783 bookmark. If -r/--rev is given, the new bookmark will not be made
783 bookmark. If -r/--rev is given, the new bookmark will not be made
784 active even if -i/--inactive is not given. If no NAME is given, the
784 active even if -i/--inactive is not given. If no NAME is given, the
785 current active bookmark will be marked inactive.
785 current active bookmark will be marked inactive.
786 '''
786 '''
787 hexfn = ui.debugflag and hex or short
787 hexfn = ui.debugflag and hex or short
788 marks = repo._bookmarks
788 marks = repo._bookmarks
789 cur = repo.changectx('.').node()
789 cur = repo.changectx('.').node()
790
790
791 if delete:
791 if delete:
792 if mark is None:
792 if mark is None:
793 raise util.Abort(_("bookmark name required"))
793 raise util.Abort(_("bookmark name required"))
794 if mark not in marks:
794 if mark not in marks:
795 raise util.Abort(_("bookmark '%s' does not exist") % mark)
795 raise util.Abort(_("bookmark '%s' does not exist") % mark)
796 if mark == repo._bookmarkcurrent:
796 if mark == repo._bookmarkcurrent:
797 bookmarks.setcurrent(repo, None)
797 bookmarks.setcurrent(repo, None)
798 del marks[mark]
798 del marks[mark]
799 bookmarks.write(repo)
799 bookmarks.write(repo)
800 return
800 return
801
801
802 if rename:
802 if rename:
803 if rename not in marks:
803 if rename not in marks:
804 raise util.Abort(_("bookmark '%s' does not exist") % rename)
804 raise util.Abort(_("bookmark '%s' does not exist") % rename)
805 if mark in marks and not force:
805 if mark in marks and not force:
806 raise util.Abort(_("bookmark '%s' already exists "
806 raise util.Abort(_("bookmark '%s' already exists "
807 "(use -f to force)") % mark)
807 "(use -f to force)") % mark)
808 if mark is None:
808 if mark is None:
809 raise util.Abort(_("new bookmark name required"))
809 raise util.Abort(_("new bookmark name required"))
810 marks[mark] = marks[rename]
810 marks[mark] = marks[rename]
811 if repo._bookmarkcurrent == rename and not inactive:
811 if repo._bookmarkcurrent == rename and not inactive:
812 bookmarks.setcurrent(repo, mark)
812 bookmarks.setcurrent(repo, mark)
813 del marks[rename]
813 del marks[rename]
814 bookmarks.write(repo)
814 bookmarks.write(repo)
815 return
815 return
816
816
817 if mark is not None:
817 if mark is not None:
818 if "\n" in mark:
818 if "\n" in mark:
819 raise util.Abort(_("bookmark name cannot contain newlines"))
819 raise util.Abort(_("bookmark name cannot contain newlines"))
820 mark = mark.strip()
820 mark = mark.strip()
821 if not mark:
821 if not mark:
822 raise util.Abort(_("bookmark names cannot consist entirely of "
822 raise util.Abort(_("bookmark names cannot consist entirely of "
823 "whitespace"))
823 "whitespace"))
824 if inactive and mark == repo._bookmarkcurrent:
824 if inactive and mark == repo._bookmarkcurrent:
825 bookmarks.setcurrent(repo, None)
825 bookmarks.setcurrent(repo, None)
826 return
826 return
827 if mark in marks and not force:
827 if mark in marks and not force:
828 raise util.Abort(_("bookmark '%s' already exists "
828 raise util.Abort(_("bookmark '%s' already exists "
829 "(use -f to force)") % mark)
829 "(use -f to force)") % mark)
830 if ((mark in repo.branchtags() or mark == repo.dirstate.branch())
830 if ((mark in repo.branchtags() or mark == repo.dirstate.branch())
831 and not force):
831 and not force):
832 raise util.Abort(
832 raise util.Abort(
833 _("a bookmark cannot have the name of an existing branch"))
833 _("a bookmark cannot have the name of an existing branch"))
834 if rev:
834 if rev:
835 marks[mark] = repo.lookup(rev)
835 marks[mark] = repo.lookup(rev)
836 else:
836 else:
837 marks[mark] = cur
837 marks[mark] = cur
838 if not inactive and cur == marks[mark]:
838 if not inactive and cur == marks[mark]:
839 bookmarks.setcurrent(repo, mark)
839 bookmarks.setcurrent(repo, mark)
840 bookmarks.write(repo)
840 bookmarks.write(repo)
841 return
841 return
842
842
843 if mark is None:
843 if mark is None:
844 if rev:
844 if rev:
845 raise util.Abort(_("bookmark name required"))
845 raise util.Abort(_("bookmark name required"))
846 if len(marks) == 0:
846 if len(marks) == 0:
847 ui.status(_("no bookmarks set\n"))
847 ui.status(_("no bookmarks set\n"))
848 else:
848 else:
849 for bmark, n in sorted(marks.iteritems()):
849 for bmark, n in sorted(marks.iteritems()):
850 current = repo._bookmarkcurrent
850 current = repo._bookmarkcurrent
851 if bmark == current and n == cur:
851 if bmark == current and n == cur:
852 prefix, label = '*', 'bookmarks.current'
852 prefix, label = '*', 'bookmarks.current'
853 else:
853 else:
854 prefix, label = ' ', ''
854 prefix, label = ' ', ''
855
855
856 if ui.quiet:
856 if ui.quiet:
857 ui.write("%s\n" % bmark, label=label)
857 ui.write("%s\n" % bmark, label=label)
858 else:
858 else:
859 ui.write(" %s %-25s %d:%s\n" % (
859 ui.write(" %s %-25s %d:%s\n" % (
860 prefix, bmark, repo.changelog.rev(n), hexfn(n)),
860 prefix, bmark, repo.changelog.rev(n), hexfn(n)),
861 label=label)
861 label=label)
862 return
862 return
863
863
864 @command('branch',
864 @command('branch',
865 [('f', 'force', None,
865 [('f', 'force', None,
866 _('set branch name even if it shadows an existing branch')),
866 _('set branch name even if it shadows an existing branch')),
867 ('C', 'clean', None, _('reset branch name to parent branch name'))],
867 ('C', 'clean', None, _('reset branch name to parent branch name'))],
868 _('[-fC] [NAME]'))
868 _('[-fC] [NAME]'))
869 def branch(ui, repo, label=None, **opts):
869 def branch(ui, repo, label=None, **opts):
870 """set or show the current branch name
870 """set or show the current branch name
871
871
872 .. note::
872 .. note::
873 Branch names are permanent and global. Use :hg:`bookmark` to create a
873 Branch names are permanent and global. Use :hg:`bookmark` to create a
874 light-weight bookmark instead. See :hg:`help glossary` for more
874 light-weight bookmark instead. See :hg:`help glossary` for more
875 information about named branches and bookmarks.
875 information about named branches and bookmarks.
876
876
877 With no argument, show the current branch name. With one argument,
877 With no argument, show the current branch name. With one argument,
878 set the working directory branch name (the branch will not exist
878 set the working directory branch name (the branch will not exist
879 in the repository until the next commit). Standard practice
879 in the repository until the next commit). Standard practice
880 recommends that primary development take place on the 'default'
880 recommends that primary development take place on the 'default'
881 branch.
881 branch.
882
882
883 Unless -f/--force is specified, branch will not let you set a
883 Unless -f/--force is specified, branch will not let you set a
884 branch name that already exists, even if it's inactive.
884 branch name that already exists, even if it's inactive.
885
885
886 Use -C/--clean to reset the working directory branch to that of
886 Use -C/--clean to reset the working directory branch to that of
887 the parent of the working directory, negating a previous branch
887 the parent of the working directory, negating a previous branch
888 change.
888 change.
889
889
890 Use the command :hg:`update` to switch to an existing branch. Use
890 Use the command :hg:`update` to switch to an existing branch. Use
891 :hg:`commit --close-branch` to mark this branch as closed.
891 :hg:`commit --close-branch` to mark this branch as closed.
892
892
893 Returns 0 on success.
893 Returns 0 on success.
894 """
894 """
895 if not opts.get('clean') and not label:
895 if not opts.get('clean') and not label:
896 ui.write("%s\n" % repo.dirstate.branch())
896 ui.write("%s\n" % repo.dirstate.branch())
897 return
897 return
898
898
899 wlock = repo.wlock()
899 wlock = repo.wlock()
900 try:
900 try:
901 if opts.get('clean'):
901 if opts.get('clean'):
902 label = repo[None].p1().branch()
902 label = repo[None].p1().branch()
903 repo.dirstate.setbranch(label)
903 repo.dirstate.setbranch(label)
904 ui.status(_('reset working directory to branch %s\n') % label)
904 ui.status(_('reset working directory to branch %s\n') % label)
905 elif label:
905 elif label:
906 if not opts.get('force') and label in repo.branchtags():
906 if not opts.get('force') and label in repo.branchtags():
907 if label not in [p.branch() for p in repo.parents()]:
907 if label not in [p.branch() for p in repo.parents()]:
908 raise util.Abort(_('a branch of the same name already'
908 raise util.Abort(_('a branch of the same name already'
909 ' exists'),
909 ' exists'),
910 # i18n: "it" refers to an existing branch
910 # i18n: "it" refers to an existing branch
911 hint=_("use 'hg update' to switch to it"))
911 hint=_("use 'hg update' to switch to it"))
912 repo.dirstate.setbranch(label)
912 repo.dirstate.setbranch(label)
913 ui.status(_('marked working directory as branch %s\n') % label)
913 ui.status(_('marked working directory as branch %s\n') % label)
914 ui.status(_('(branches are permanent and global, '
914 ui.status(_('(branches are permanent and global, '
915 'did you want a bookmark?)\n'))
915 'did you want a bookmark?)\n'))
916 finally:
916 finally:
917 wlock.release()
917 wlock.release()
918
918
919 @command('branches',
919 @command('branches',
920 [('a', 'active', False, _('show only branches that have unmerged heads')),
920 [('a', 'active', False, _('show only branches that have unmerged heads')),
921 ('c', 'closed', False, _('show normal and closed branches'))],
921 ('c', 'closed', False, _('show normal and closed branches'))],
922 _('[-ac]'))
922 _('[-ac]'))
923 def branches(ui, repo, active=False, closed=False):
923 def branches(ui, repo, active=False, closed=False):
924 """list repository named branches
924 """list repository named branches
925
925
926 List the repository's named branches, indicating which ones are
926 List the repository's named branches, indicating which ones are
927 inactive. If -c/--closed is specified, also list branches which have
927 inactive. If -c/--closed is specified, also list branches which have
928 been marked closed (see :hg:`commit --close-branch`).
928 been marked closed (see :hg:`commit --close-branch`).
929
929
930 If -a/--active is specified, only show active branches. A branch
930 If -a/--active is specified, only show active branches. A branch
931 is considered active if it contains repository heads.
931 is considered active if it contains repository heads.
932
932
933 Use the command :hg:`update` to switch to an existing branch.
933 Use the command :hg:`update` to switch to an existing branch.
934
934
935 Returns 0.
935 Returns 0.
936 """
936 """
937
937
938 hexfunc = ui.debugflag and hex or short
938 hexfunc = ui.debugflag and hex or short
939 activebranches = [repo[n].branch() for n in repo.heads()]
939 activebranches = [repo[n].branch() for n in repo.heads()]
940 def testactive(tag, node):
940 def testactive(tag, node):
941 realhead = tag in activebranches
941 realhead = tag in activebranches
942 open = node in repo.branchheads(tag, closed=False)
942 open = node in repo.branchheads(tag, closed=False)
943 return realhead and open
943 return realhead and open
944 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
944 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
945 for tag, node in repo.branchtags().items()],
945 for tag, node in repo.branchtags().items()],
946 reverse=True)
946 reverse=True)
947
947
948 for isactive, node, tag in branches:
948 for isactive, node, tag in branches:
949 if (not active) or isactive:
949 if (not active) or isactive:
950 hn = repo.lookup(node)
950 hn = repo.lookup(node)
951 if isactive:
951 if isactive:
952 label = 'branches.active'
952 label = 'branches.active'
953 notice = ''
953 notice = ''
954 elif hn not in repo.branchheads(tag, closed=False):
954 elif hn not in repo.branchheads(tag, closed=False):
955 if not closed:
955 if not closed:
956 continue
956 continue
957 label = 'branches.closed'
957 label = 'branches.closed'
958 notice = _(' (closed)')
958 notice = _(' (closed)')
959 else:
959 else:
960 label = 'branches.inactive'
960 label = 'branches.inactive'
961 notice = _(' (inactive)')
961 notice = _(' (inactive)')
962 if tag == repo.dirstate.branch():
962 if tag == repo.dirstate.branch():
963 label = 'branches.current'
963 label = 'branches.current'
964 rev = str(node).rjust(31 - encoding.colwidth(tag))
964 rev = str(node).rjust(31 - encoding.colwidth(tag))
965 rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset')
965 rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset')
966 tag = ui.label(tag, label)
966 tag = ui.label(tag, label)
967 if ui.quiet:
967 if ui.quiet:
968 ui.write("%s\n" % tag)
968 ui.write("%s\n" % tag)
969 else:
969 else:
970 ui.write("%s %s%s\n" % (tag, rev, notice))
970 ui.write("%s %s%s\n" % (tag, rev, notice))
971
971
972 @command('bundle',
972 @command('bundle',
973 [('f', 'force', None, _('run even when the destination is unrelated')),
973 [('f', 'force', None, _('run even when the destination is unrelated')),
974 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
974 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
975 _('REV')),
975 _('REV')),
976 ('b', 'branch', [], _('a specific branch you would like to bundle'),
976 ('b', 'branch', [], _('a specific branch you would like to bundle'),
977 _('BRANCH')),
977 _('BRANCH')),
978 ('', 'base', [],
978 ('', 'base', [],
979 _('a base changeset assumed to be available at the destination'),
979 _('a base changeset assumed to be available at the destination'),
980 _('REV')),
980 _('REV')),
981 ('a', 'all', None, _('bundle all changesets in the repository')),
981 ('a', 'all', None, _('bundle all changesets in the repository')),
982 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
982 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
983 ] + remoteopts,
983 ] + remoteopts,
984 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
984 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
985 def bundle(ui, repo, fname, dest=None, **opts):
985 def bundle(ui, repo, fname, dest=None, **opts):
986 """create a changegroup file
986 """create a changegroup file
987
987
988 Generate a compressed changegroup file collecting changesets not
988 Generate a compressed changegroup file collecting changesets not
989 known to be in another repository.
989 known to be in another repository.
990
990
991 If you omit the destination repository, then hg assumes the
991 If you omit the destination repository, then hg assumes the
992 destination will have all the nodes you specify with --base
992 destination will have all the nodes you specify with --base
993 parameters. To create a bundle containing all changesets, use
993 parameters. To create a bundle containing all changesets, use
994 -a/--all (or --base null).
994 -a/--all (or --base null).
995
995
996 You can change compression method with the -t/--type option.
996 You can change compression method with the -t/--type option.
997 The available compression methods are: none, bzip2, and
997 The available compression methods are: none, bzip2, and
998 gzip (by default, bundles are compressed using bzip2).
998 gzip (by default, bundles are compressed using bzip2).
999
999
1000 The bundle file can then be transferred using conventional means
1000 The bundle file can then be transferred using conventional means
1001 and applied to another repository with the unbundle or pull
1001 and applied to another repository with the unbundle or pull
1002 command. This is useful when direct push and pull are not
1002 command. This is useful when direct push and pull are not
1003 available or when exporting an entire repository is undesirable.
1003 available or when exporting an entire repository is undesirable.
1004
1004
1005 Applying bundles preserves all changeset contents including
1005 Applying bundles preserves all changeset contents including
1006 permissions, copy/rename information, and revision history.
1006 permissions, copy/rename information, and revision history.
1007
1007
1008 Returns 0 on success, 1 if no changes found.
1008 Returns 0 on success, 1 if no changes found.
1009 """
1009 """
1010 revs = None
1010 revs = None
1011 if 'rev' in opts:
1011 if 'rev' in opts:
1012 revs = scmutil.revrange(repo, opts['rev'])
1012 revs = scmutil.revrange(repo, opts['rev'])
1013
1013
1014 bundletype = opts.get('type', 'bzip2').lower()
1014 bundletype = opts.get('type', 'bzip2').lower()
1015 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1015 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1016 bundletype = btypes.get(bundletype)
1016 bundletype = btypes.get(bundletype)
1017 if bundletype not in changegroup.bundletypes:
1017 if bundletype not in changegroup.bundletypes:
1018 raise util.Abort(_('unknown bundle type specified with --type'))
1018 raise util.Abort(_('unknown bundle type specified with --type'))
1019
1019
1020 if opts.get('all'):
1020 if opts.get('all'):
1021 base = ['null']
1021 base = ['null']
1022 else:
1022 else:
1023 base = scmutil.revrange(repo, opts.get('base'))
1023 base = scmutil.revrange(repo, opts.get('base'))
1024 if base:
1024 if base:
1025 if dest:
1025 if dest:
1026 raise util.Abort(_("--base is incompatible with specifying "
1026 raise util.Abort(_("--base is incompatible with specifying "
1027 "a destination"))
1027 "a destination"))
1028 common = [repo.lookup(rev) for rev in base]
1028 common = [repo.lookup(rev) for rev in base]
1029 heads = revs and map(repo.lookup, revs) or revs
1029 heads = revs and map(repo.lookup, revs) or revs
1030 cg = repo.getbundle('bundle', heads=heads, common=common)
1030 cg = repo.getbundle('bundle', heads=heads, common=common)
1031 outgoing = None
1031 outgoing = None
1032 else:
1032 else:
1033 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1033 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1034 dest, branches = hg.parseurl(dest, opts.get('branch'))
1034 dest, branches = hg.parseurl(dest, opts.get('branch'))
1035 other = hg.peer(repo, opts, dest)
1035 other = hg.peer(repo, opts, dest)
1036 revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
1036 revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
1037 heads = revs and map(repo.lookup, revs) or revs
1037 heads = revs and map(repo.lookup, revs) or revs
1038 outgoing = discovery.findcommonoutgoing(repo, other,
1038 outgoing = discovery.findcommonoutgoing(repo, other,
1039 onlyheads=heads,
1039 onlyheads=heads,
1040 force=opts.get('force'))
1040 force=opts.get('force'))
1041 cg = repo.getlocalbundle('bundle', outgoing)
1041 cg = repo.getlocalbundle('bundle', outgoing)
1042 if not cg:
1042 if not cg:
1043 scmutil.nochangesfound(ui, outgoing and outgoing.excluded)
1043 scmutil.nochangesfound(ui, outgoing and outgoing.excluded)
1044 return 1
1044 return 1
1045
1045
1046 changegroup.writebundle(cg, fname, bundletype)
1046 changegroup.writebundle(cg, fname, bundletype)
1047
1047
1048 @command('cat',
1048 @command('cat',
1049 [('o', 'output', '',
1049 [('o', 'output', '',
1050 _('print output to file with formatted name'), _('FORMAT')),
1050 _('print output to file with formatted name'), _('FORMAT')),
1051 ('r', 'rev', '', _('print the given revision'), _('REV')),
1051 ('r', 'rev', '', _('print the given revision'), _('REV')),
1052 ('', 'decode', None, _('apply any matching decode filter')),
1052 ('', 'decode', None, _('apply any matching decode filter')),
1053 ] + walkopts,
1053 ] + walkopts,
1054 _('[OPTION]... FILE...'))
1054 _('[OPTION]... FILE...'))
1055 def cat(ui, repo, file1, *pats, **opts):
1055 def cat(ui, repo, file1, *pats, **opts):
1056 """output the current or given revision of files
1056 """output the current or given revision of files
1057
1057
1058 Print the specified files as they were at the given revision. If
1058 Print the specified files as they were at the given revision. If
1059 no revision is given, the parent of the working directory is used,
1059 no revision is given, the parent of the working directory is used,
1060 or tip if no revision is checked out.
1060 or tip if no revision is checked out.
1061
1061
1062 Output may be to a file, in which case the name of the file is
1062 Output may be to a file, in which case the name of the file is
1063 given using a format string. The formatting rules are the same as
1063 given using a format string. The formatting rules are the same as
1064 for the export command, with the following additions:
1064 for the export command, with the following additions:
1065
1065
1066 :``%s``: basename of file being printed
1066 :``%s``: basename of file being printed
1067 :``%d``: dirname of file being printed, or '.' if in repository root
1067 :``%d``: dirname of file being printed, or '.' if in repository root
1068 :``%p``: root-relative path name of file being printed
1068 :``%p``: root-relative path name of file being printed
1069
1069
1070 Returns 0 on success.
1070 Returns 0 on success.
1071 """
1071 """
1072 ctx = scmutil.revsingle(repo, opts.get('rev'))
1072 ctx = scmutil.revsingle(repo, opts.get('rev'))
1073 err = 1
1073 err = 1
1074 m = scmutil.match(ctx, (file1,) + pats, opts)
1074 m = scmutil.match(ctx, (file1,) + pats, opts)
1075 for abs in ctx.walk(m):
1075 for abs in ctx.walk(m):
1076 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1076 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1077 pathname=abs)
1077 pathname=abs)
1078 data = ctx[abs].data()
1078 data = ctx[abs].data()
1079 if opts.get('decode'):
1079 if opts.get('decode'):
1080 data = repo.wwritedata(abs, data)
1080 data = repo.wwritedata(abs, data)
1081 fp.write(data)
1081 fp.write(data)
1082 fp.close()
1082 fp.close()
1083 err = 0
1083 err = 0
1084 return err
1084 return err
1085
1085
1086 @command('^clone',
1086 @command('^clone',
1087 [('U', 'noupdate', None,
1087 [('U', 'noupdate', None,
1088 _('the clone will include an empty working copy (only a repository)')),
1088 _('the clone will include an empty working copy (only a repository)')),
1089 ('u', 'updaterev', '', _('revision, tag or branch to check out'), _('REV')),
1089 ('u', 'updaterev', '', _('revision, tag or branch to check out'), _('REV')),
1090 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1090 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1091 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1091 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1092 ('', 'pull', None, _('use pull protocol to copy metadata')),
1092 ('', 'pull', None, _('use pull protocol to copy metadata')),
1093 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1093 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1094 ] + remoteopts,
1094 ] + remoteopts,
1095 _('[OPTION]... SOURCE [DEST]'))
1095 _('[OPTION]... SOURCE [DEST]'))
1096 def clone(ui, source, dest=None, **opts):
1096 def clone(ui, source, dest=None, **opts):
1097 """make a copy of an existing repository
1097 """make a copy of an existing repository
1098
1098
1099 Create a copy of an existing repository in a new directory.
1099 Create a copy of an existing repository in a new directory.
1100
1100
1101 If no destination directory name is specified, it defaults to the
1101 If no destination directory name is specified, it defaults to the
1102 basename of the source.
1102 basename of the source.
1103
1103
1104 The location of the source is added to the new repository's
1104 The location of the source is added to the new repository's
1105 ``.hg/hgrc`` file, as the default to be used for future pulls.
1105 ``.hg/hgrc`` file, as the default to be used for future pulls.
1106
1106
1107 Only local paths and ``ssh://`` URLs are supported as
1107 Only local paths and ``ssh://`` URLs are supported as
1108 destinations. For ``ssh://`` destinations, no working directory or
1108 destinations. For ``ssh://`` destinations, no working directory or
1109 ``.hg/hgrc`` will be created on the remote side.
1109 ``.hg/hgrc`` will be created on the remote side.
1110
1110
1111 To pull only a subset of changesets, specify one or more revisions
1111 To pull only a subset of changesets, specify one or more revisions
1112 identifiers with -r/--rev or branches with -b/--branch. The
1112 identifiers with -r/--rev or branches with -b/--branch. The
1113 resulting clone will contain only the specified changesets and
1113 resulting clone will contain only the specified changesets and
1114 their ancestors. These options (or 'clone src#rev dest') imply
1114 their ancestors. These options (or 'clone src#rev dest') imply
1115 --pull, even for local source repositories. Note that specifying a
1115 --pull, even for local source repositories. Note that specifying a
1116 tag will include the tagged changeset but not the changeset
1116 tag will include the tagged changeset but not the changeset
1117 containing the tag.
1117 containing the tag.
1118
1118
1119 To check out a particular version, use -u/--update, or
1119 To check out a particular version, use -u/--update, or
1120 -U/--noupdate to create a clone with no working directory.
1120 -U/--noupdate to create a clone with no working directory.
1121
1121
1122 .. container:: verbose
1122 .. container:: verbose
1123
1123
1124 For efficiency, hardlinks are used for cloning whenever the
1124 For efficiency, hardlinks are used for cloning whenever the
1125 source and destination are on the same filesystem (note this
1125 source and destination are on the same filesystem (note this
1126 applies only to the repository data, not to the working
1126 applies only to the repository data, not to the working
1127 directory). Some filesystems, such as AFS, implement hardlinking
1127 directory). Some filesystems, such as AFS, implement hardlinking
1128 incorrectly, but do not report errors. In these cases, use the
1128 incorrectly, but do not report errors. In these cases, use the
1129 --pull option to avoid hardlinking.
1129 --pull option to avoid hardlinking.
1130
1130
1131 In some cases, you can clone repositories and the working
1131 In some cases, you can clone repositories and the working
1132 directory using full hardlinks with ::
1132 directory using full hardlinks with ::
1133
1133
1134 $ cp -al REPO REPOCLONE
1134 $ cp -al REPO REPOCLONE
1135
1135
1136 This is the fastest way to clone, but it is not always safe. The
1136 This is the fastest way to clone, but it is not always safe. The
1137 operation is not atomic (making sure REPO is not modified during
1137 operation is not atomic (making sure REPO is not modified during
1138 the operation is up to you) and you have to make sure your
1138 the operation is up to you) and you have to make sure your
1139 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1139 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1140 so). Also, this is not compatible with certain extensions that
1140 so). Also, this is not compatible with certain extensions that
1141 place their metadata under the .hg directory, such as mq.
1141 place their metadata under the .hg directory, such as mq.
1142
1142
1143 Mercurial will update the working directory to the first applicable
1143 Mercurial will update the working directory to the first applicable
1144 revision from this list:
1144 revision from this list:
1145
1145
1146 a) null if -U or the source repository has no changesets
1146 a) null if -U or the source repository has no changesets
1147 b) if -u . and the source repository is local, the first parent of
1147 b) if -u . and the source repository is local, the first parent of
1148 the source repository's working directory
1148 the source repository's working directory
1149 c) the changeset specified with -u (if a branch name, this means the
1149 c) the changeset specified with -u (if a branch name, this means the
1150 latest head of that branch)
1150 latest head of that branch)
1151 d) the changeset specified with -r
1151 d) the changeset specified with -r
1152 e) the tipmost head specified with -b
1152 e) the tipmost head specified with -b
1153 f) the tipmost head specified with the url#branch source syntax
1153 f) the tipmost head specified with the url#branch source syntax
1154 g) the tipmost head of the default branch
1154 g) the tipmost head of the default branch
1155 h) tip
1155 h) tip
1156
1156
1157 Examples:
1157 Examples:
1158
1158
1159 - clone a remote repository to a new directory named hg/::
1159 - clone a remote repository to a new directory named hg/::
1160
1160
1161 hg clone http://selenic.com/hg
1161 hg clone http://selenic.com/hg
1162
1162
1163 - create a lightweight local clone::
1163 - create a lightweight local clone::
1164
1164
1165 hg clone project/ project-feature/
1165 hg clone project/ project-feature/
1166
1166
1167 - clone from an absolute path on an ssh server (note double-slash)::
1167 - clone from an absolute path on an ssh server (note double-slash)::
1168
1168
1169 hg clone ssh://user@server//home/projects/alpha/
1169 hg clone ssh://user@server//home/projects/alpha/
1170
1170
1171 - do a high-speed clone over a LAN while checking out a
1171 - do a high-speed clone over a LAN while checking out a
1172 specified version::
1172 specified version::
1173
1173
1174 hg clone --uncompressed http://server/repo -u 1.5
1174 hg clone --uncompressed http://server/repo -u 1.5
1175
1175
1176 - create a repository without changesets after a particular revision::
1176 - create a repository without changesets after a particular revision::
1177
1177
1178 hg clone -r 04e544 experimental/ good/
1178 hg clone -r 04e544 experimental/ good/
1179
1179
1180 - clone (and track) a particular named branch::
1180 - clone (and track) a particular named branch::
1181
1181
1182 hg clone http://selenic.com/hg#stable
1182 hg clone http://selenic.com/hg#stable
1183
1183
1184 See :hg:`help urls` for details on specifying URLs.
1184 See :hg:`help urls` for details on specifying URLs.
1185
1185
1186 Returns 0 on success.
1186 Returns 0 on success.
1187 """
1187 """
1188 if opts.get('noupdate') and opts.get('updaterev'):
1188 if opts.get('noupdate') and opts.get('updaterev'):
1189 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
1189 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
1190
1190
1191 r = hg.clone(ui, opts, source, dest,
1191 r = hg.clone(ui, opts, source, dest,
1192 pull=opts.get('pull'),
1192 pull=opts.get('pull'),
1193 stream=opts.get('uncompressed'),
1193 stream=opts.get('uncompressed'),
1194 rev=opts.get('rev'),
1194 rev=opts.get('rev'),
1195 update=opts.get('updaterev') or not opts.get('noupdate'),
1195 update=opts.get('updaterev') or not opts.get('noupdate'),
1196 branch=opts.get('branch'))
1196 branch=opts.get('branch'))
1197
1197
1198 return r is None
1198 return r is None
1199
1199
1200 @command('^commit|ci',
1200 @command('^commit|ci',
1201 [('A', 'addremove', None,
1201 [('A', 'addremove', None,
1202 _('mark new/missing files as added/removed before committing')),
1202 _('mark new/missing files as added/removed before committing')),
1203 ('', 'close-branch', None,
1203 ('', 'close-branch', None,
1204 _('mark a branch as closed, hiding it from the branch list')),
1204 _('mark a branch as closed, hiding it from the branch list')),
1205 ('', 'amend', None, _('amend the parent of the working dir')),
1205 ('', 'amend', None, _('amend the parent of the working dir')),
1206 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1206 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1207 _('[OPTION]... [FILE]...'))
1207 _('[OPTION]... [FILE]...'))
1208 def commit(ui, repo, *pats, **opts):
1208 def commit(ui, repo, *pats, **opts):
1209 """commit the specified files or all outstanding changes
1209 """commit the specified files or all outstanding changes
1210
1210
1211 Commit changes to the given files into the repository. Unlike a
1211 Commit changes to the given files into the repository. Unlike a
1212 centralized SCM, this operation is a local operation. See
1212 centralized SCM, this operation is a local operation. See
1213 :hg:`push` for a way to actively distribute your changes.
1213 :hg:`push` for a way to actively distribute your changes.
1214
1214
1215 If a list of files is omitted, all changes reported by :hg:`status`
1215 If a list of files is omitted, all changes reported by :hg:`status`
1216 will be committed.
1216 will be committed.
1217
1217
1218 If you are committing the result of a merge, do not provide any
1218 If you are committing the result of a merge, do not provide any
1219 filenames or -I/-X filters.
1219 filenames or -I/-X filters.
1220
1220
1221 If no commit message is specified, Mercurial starts your
1221 If no commit message is specified, Mercurial starts your
1222 configured editor where you can enter a message. In case your
1222 configured editor where you can enter a message. In case your
1223 commit fails, you will find a backup of your message in
1223 commit fails, you will find a backup of your message in
1224 ``.hg/last-message.txt``.
1224 ``.hg/last-message.txt``.
1225
1225
1226 The --amend flag can be used to amend the parent of the
1226 The --amend flag can be used to amend the parent of the
1227 working directory with a new commit that contains the changes
1227 working directory with a new commit that contains the changes
1228 in the parent in addition to those currently reported by :hg:`status`,
1228 in the parent in addition to those currently reported by :hg:`status`,
1229 if there are any. The old commit is stored in a backup bundle in
1229 if there are any. The old commit is stored in a backup bundle in
1230 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1230 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1231 on how to restore it).
1231 on how to restore it).
1232
1232
1233 Message, user and date are taken from the amended commit unless
1233 Message, user and date are taken from the amended commit unless
1234 specified. When a message isn't specified on the command line,
1234 specified. When a message isn't specified on the command line,
1235 the editor will open with the message of the amended commit.
1235 the editor will open with the message of the amended commit.
1236
1236
1237 It is not possible to amend public changesets (see :hg:`help phases`)
1237 It is not possible to amend public changesets (see :hg:`help phases`)
1238 or changesets that have children.
1238 or changesets that have children.
1239
1239
1240 See :hg:`help dates` for a list of formats valid for -d/--date.
1240 See :hg:`help dates` for a list of formats valid for -d/--date.
1241
1241
1242 Returns 0 on success, 1 if nothing changed.
1242 Returns 0 on success, 1 if nothing changed.
1243 """
1243 """
1244 if opts.get('subrepos'):
1244 if opts.get('subrepos'):
1245 # Let --subrepos on the command line overide config setting.
1245 # Let --subrepos on the command line overide config setting.
1246 ui.setconfig('ui', 'commitsubrepos', True)
1246 ui.setconfig('ui', 'commitsubrepos', True)
1247
1247
1248 extra = {}
1248 extra = {}
1249 if opts.get('close_branch'):
1249 if opts.get('close_branch'):
1250 if repo['.'].node() not in repo.branchheads():
1250 if repo['.'].node() not in repo.branchheads():
1251 # The topo heads set is included in the branch heads set of the
1251 # The topo heads set is included in the branch heads set of the
1252 # current branch, so it's sufficient to test branchheads
1252 # current branch, so it's sufficient to test branchheads
1253 raise util.Abort(_('can only close branch heads'))
1253 raise util.Abort(_('can only close branch heads'))
1254 extra['close'] = 1
1254 extra['close'] = 1
1255
1255
1256 branch = repo[None].branch()
1256 branch = repo[None].branch()
1257 bheads = repo.branchheads(branch)
1257 bheads = repo.branchheads(branch)
1258
1258
1259 if opts.get('amend'):
1259 if opts.get('amend'):
1260 if ui.configbool('ui', 'commitsubrepos'):
1260 if ui.configbool('ui', 'commitsubrepos'):
1261 raise util.Abort(_('cannot amend recursively'))
1261 raise util.Abort(_('cannot amend recursively'))
1262
1262
1263 old = repo['.']
1263 old = repo['.']
1264 if old.phase() == phases.public:
1264 if old.phase() == phases.public:
1265 raise util.Abort(_('cannot amend public changesets'))
1265 raise util.Abort(_('cannot amend public changesets'))
1266 if len(old.parents()) > 1:
1266 if len(old.parents()) > 1:
1267 raise util.Abort(_('cannot amend merge changesets'))
1267 raise util.Abort(_('cannot amend merge changesets'))
1268 if len(repo[None].parents()) > 1:
1268 if len(repo[None].parents()) > 1:
1269 raise util.Abort(_('cannot amend while merging'))
1269 raise util.Abort(_('cannot amend while merging'))
1270 if old.children():
1270 if old.children():
1271 raise util.Abort(_('cannot amend changeset with children'))
1271 raise util.Abort(_('cannot amend changeset with children'))
1272
1272
1273 e = cmdutil.commiteditor
1273 e = cmdutil.commiteditor
1274 if opts.get('force_editor'):
1274 if opts.get('force_editor'):
1275 e = cmdutil.commitforceeditor
1275 e = cmdutil.commitforceeditor
1276
1276
1277 def commitfunc(ui, repo, message, match, opts):
1277 def commitfunc(ui, repo, message, match, opts):
1278 editor = e
1278 editor = e
1279 # message contains text from -m or -l, if it's empty,
1279 # message contains text from -m or -l, if it's empty,
1280 # open the editor with the old message
1280 # open the editor with the old message
1281 if not message:
1281 if not message:
1282 message = old.description()
1282 message = old.description()
1283 editor = cmdutil.commitforceeditor
1283 editor = cmdutil.commitforceeditor
1284 return repo.commit(message,
1284 return repo.commit(message,
1285 opts.get('user') or old.user(),
1285 opts.get('user') or old.user(),
1286 opts.get('date') or old.date(),
1286 opts.get('date') or old.date(),
1287 match,
1287 match,
1288 editor=editor,
1288 editor=editor,
1289 extra=extra)
1289 extra=extra)
1290
1290
1291 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1291 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1292 if node == old.node():
1292 if node == old.node():
1293 ui.status(_("nothing changed\n"))
1293 ui.status(_("nothing changed\n"))
1294 return 1
1294 return 1
1295 else:
1295 else:
1296 e = cmdutil.commiteditor
1296 e = cmdutil.commiteditor
1297 if opts.get('force_editor'):
1297 if opts.get('force_editor'):
1298 e = cmdutil.commitforceeditor
1298 e = cmdutil.commitforceeditor
1299
1299
1300 def commitfunc(ui, repo, message, match, opts):
1300 def commitfunc(ui, repo, message, match, opts):
1301 return repo.commit(message, opts.get('user'), opts.get('date'),
1301 return repo.commit(message, opts.get('user'), opts.get('date'),
1302 match, editor=e, extra=extra)
1302 match, editor=e, extra=extra)
1303
1303
1304 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1304 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1305
1305
1306 if not node:
1306 if not node:
1307 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
1307 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
1308 if stat[3]:
1308 if stat[3]:
1309 ui.status(_("nothing changed (%d missing files, see "
1309 ui.status(_("nothing changed (%d missing files, see "
1310 "'hg status')\n") % len(stat[3]))
1310 "'hg status')\n") % len(stat[3]))
1311 else:
1311 else:
1312 ui.status(_("nothing changed\n"))
1312 ui.status(_("nothing changed\n"))
1313 return 1
1313 return 1
1314
1314
1315 ctx = repo[node]
1315 ctx = repo[node]
1316 parents = ctx.parents()
1316 parents = ctx.parents()
1317
1317
1318 if (not opts.get('amend') and bheads and node not in bheads and not
1318 if (not opts.get('amend') and bheads and node not in bheads and not
1319 [x for x in parents if x.node() in bheads and x.branch() == branch]):
1319 [x for x in parents if x.node() in bheads and x.branch() == branch]):
1320 ui.status(_('created new head\n'))
1320 ui.status(_('created new head\n'))
1321 # The message is not printed for initial roots. For the other
1321 # The message is not printed for initial roots. For the other
1322 # changesets, it is printed in the following situations:
1322 # changesets, it is printed in the following situations:
1323 #
1323 #
1324 # Par column: for the 2 parents with ...
1324 # Par column: for the 2 parents with ...
1325 # N: null or no parent
1325 # N: null or no parent
1326 # B: parent is on another named branch
1326 # B: parent is on another named branch
1327 # C: parent is a regular non head changeset
1327 # C: parent is a regular non head changeset
1328 # H: parent was a branch head of the current branch
1328 # H: parent was a branch head of the current branch
1329 # Msg column: whether we print "created new head" message
1329 # Msg column: whether we print "created new head" message
1330 # In the following, it is assumed that there already exists some
1330 # In the following, it is assumed that there already exists some
1331 # initial branch heads of the current branch, otherwise nothing is
1331 # initial branch heads of the current branch, otherwise nothing is
1332 # printed anyway.
1332 # printed anyway.
1333 #
1333 #
1334 # Par Msg Comment
1334 # Par Msg Comment
1335 # NN y additional topo root
1335 # NN y additional topo root
1336 #
1336 #
1337 # BN y additional branch root
1337 # BN y additional branch root
1338 # CN y additional topo head
1338 # CN y additional topo head
1339 # HN n usual case
1339 # HN n usual case
1340 #
1340 #
1341 # BB y weird additional branch root
1341 # BB y weird additional branch root
1342 # CB y branch merge
1342 # CB y branch merge
1343 # HB n merge with named branch
1343 # HB n merge with named branch
1344 #
1344 #
1345 # CC y additional head from merge
1345 # CC y additional head from merge
1346 # CH n merge with a head
1346 # CH n merge with a head
1347 #
1347 #
1348 # HH n head merge: head count decreases
1348 # HH n head merge: head count decreases
1349
1349
1350 if not opts.get('close_branch'):
1350 if not opts.get('close_branch'):
1351 for r in parents:
1351 for r in parents:
1352 if r.extra().get('close') and r.branch() == branch:
1352 if r.extra().get('close') and r.branch() == branch:
1353 ui.status(_('reopening closed branch head %d\n') % r)
1353 ui.status(_('reopening closed branch head %d\n') % r)
1354
1354
1355 if ui.debugflag:
1355 if ui.debugflag:
1356 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
1356 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
1357 elif ui.verbose:
1357 elif ui.verbose:
1358 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
1358 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
1359
1359
1360 @command('copy|cp',
1360 @command('copy|cp',
1361 [('A', 'after', None, _('record a copy that has already occurred')),
1361 [('A', 'after', None, _('record a copy that has already occurred')),
1362 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1362 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1363 ] + walkopts + dryrunopts,
1363 ] + walkopts + dryrunopts,
1364 _('[OPTION]... [SOURCE]... DEST'))
1364 _('[OPTION]... [SOURCE]... DEST'))
1365 def copy(ui, repo, *pats, **opts):
1365 def copy(ui, repo, *pats, **opts):
1366 """mark files as copied for the next commit
1366 """mark files as copied for the next commit
1367
1367
1368 Mark dest as having copies of source files. If dest is a
1368 Mark dest as having copies of source files. If dest is a
1369 directory, copies are put in that directory. If dest is a file,
1369 directory, copies are put in that directory. If dest is a file,
1370 the source must be a single file.
1370 the source must be a single file.
1371
1371
1372 By default, this command copies the contents of files as they
1372 By default, this command copies the contents of files as they
1373 exist in the working directory. If invoked with -A/--after, the
1373 exist in the working directory. If invoked with -A/--after, the
1374 operation is recorded, but no copying is performed.
1374 operation is recorded, but no copying is performed.
1375
1375
1376 This command takes effect with the next commit. To undo a copy
1376 This command takes effect with the next commit. To undo a copy
1377 before that, see :hg:`revert`.
1377 before that, see :hg:`revert`.
1378
1378
1379 Returns 0 on success, 1 if errors are encountered.
1379 Returns 0 on success, 1 if errors are encountered.
1380 """
1380 """
1381 wlock = repo.wlock(False)
1381 wlock = repo.wlock(False)
1382 try:
1382 try:
1383 return cmdutil.copy(ui, repo, pats, opts)
1383 return cmdutil.copy(ui, repo, pats, opts)
1384 finally:
1384 finally:
1385 wlock.release()
1385 wlock.release()
1386
1386
1387 @command('debugancestor', [], _('[INDEX] REV1 REV2'))
1387 @command('debugancestor', [], _('[INDEX] REV1 REV2'))
1388 def debugancestor(ui, repo, *args):
1388 def debugancestor(ui, repo, *args):
1389 """find the ancestor revision of two revisions in a given index"""
1389 """find the ancestor revision of two revisions in a given index"""
1390 if len(args) == 3:
1390 if len(args) == 3:
1391 index, rev1, rev2 = args
1391 index, rev1, rev2 = args
1392 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1392 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1393 lookup = r.lookup
1393 lookup = r.lookup
1394 elif len(args) == 2:
1394 elif len(args) == 2:
1395 if not repo:
1395 if not repo:
1396 raise util.Abort(_("there is no Mercurial repository here "
1396 raise util.Abort(_("there is no Mercurial repository here "
1397 "(.hg not found)"))
1397 "(.hg not found)"))
1398 rev1, rev2 = args
1398 rev1, rev2 = args
1399 r = repo.changelog
1399 r = repo.changelog
1400 lookup = repo.lookup
1400 lookup = repo.lookup
1401 else:
1401 else:
1402 raise util.Abort(_('either two or three arguments required'))
1402 raise util.Abort(_('either two or three arguments required'))
1403 a = r.ancestor(lookup(rev1), lookup(rev2))
1403 a = r.ancestor(lookup(rev1), lookup(rev2))
1404 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1404 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1405
1405
1406 @command('debugbuilddag',
1406 @command('debugbuilddag',
1407 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1407 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1408 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1408 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1409 ('n', 'new-file', None, _('add new file at each rev'))],
1409 ('n', 'new-file', None, _('add new file at each rev'))],
1410 _('[OPTION]... [TEXT]'))
1410 _('[OPTION]... [TEXT]'))
1411 def debugbuilddag(ui, repo, text=None,
1411 def debugbuilddag(ui, repo, text=None,
1412 mergeable_file=False,
1412 mergeable_file=False,
1413 overwritten_file=False,
1413 overwritten_file=False,
1414 new_file=False):
1414 new_file=False):
1415 """builds a repo with a given DAG from scratch in the current empty repo
1415 """builds a repo with a given DAG from scratch in the current empty repo
1416
1416
1417 The description of the DAG is read from stdin if not given on the
1417 The description of the DAG is read from stdin if not given on the
1418 command line.
1418 command line.
1419
1419
1420 Elements:
1420 Elements:
1421
1421
1422 - "+n" is a linear run of n nodes based on the current default parent
1422 - "+n" is a linear run of n nodes based on the current default parent
1423 - "." is a single node based on the current default parent
1423 - "." is a single node based on the current default parent
1424 - "$" resets the default parent to null (implied at the start);
1424 - "$" resets the default parent to null (implied at the start);
1425 otherwise the default parent is always the last node created
1425 otherwise the default parent is always the last node created
1426 - "<p" sets the default parent to the backref p
1426 - "<p" sets the default parent to the backref p
1427 - "*p" is a fork at parent p, which is a backref
1427 - "*p" is a fork at parent p, which is a backref
1428 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1428 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1429 - "/p2" is a merge of the preceding node and p2
1429 - "/p2" is a merge of the preceding node and p2
1430 - ":tag" defines a local tag for the preceding node
1430 - ":tag" defines a local tag for the preceding node
1431 - "@branch" sets the named branch for subsequent nodes
1431 - "@branch" sets the named branch for subsequent nodes
1432 - "#...\\n" is a comment up to the end of the line
1432 - "#...\\n" is a comment up to the end of the line
1433
1433
1434 Whitespace between the above elements is ignored.
1434 Whitespace between the above elements is ignored.
1435
1435
1436 A backref is either
1436 A backref is either
1437
1437
1438 - a number n, which references the node curr-n, where curr is the current
1438 - a number n, which references the node curr-n, where curr is the current
1439 node, or
1439 node, or
1440 - the name of a local tag you placed earlier using ":tag", or
1440 - the name of a local tag you placed earlier using ":tag", or
1441 - empty to denote the default parent.
1441 - empty to denote the default parent.
1442
1442
1443 All string valued-elements are either strictly alphanumeric, or must
1443 All string valued-elements are either strictly alphanumeric, or must
1444 be enclosed in double quotes ("..."), with "\\" as escape character.
1444 be enclosed in double quotes ("..."), with "\\" as escape character.
1445 """
1445 """
1446
1446
1447 if text is None:
1447 if text is None:
1448 ui.status(_("reading DAG from stdin\n"))
1448 ui.status(_("reading DAG from stdin\n"))
1449 text = ui.fin.read()
1449 text = ui.fin.read()
1450
1450
1451 cl = repo.changelog
1451 cl = repo.changelog
1452 if len(cl) > 0:
1452 if len(cl) > 0:
1453 raise util.Abort(_('repository is not empty'))
1453 raise util.Abort(_('repository is not empty'))
1454
1454
1455 # determine number of revs in DAG
1455 # determine number of revs in DAG
1456 total = 0
1456 total = 0
1457 for type, data in dagparser.parsedag(text):
1457 for type, data in dagparser.parsedag(text):
1458 if type == 'n':
1458 if type == 'n':
1459 total += 1
1459 total += 1
1460
1460
1461 if mergeable_file:
1461 if mergeable_file:
1462 linesperrev = 2
1462 linesperrev = 2
1463 # make a file with k lines per rev
1463 # make a file with k lines per rev
1464 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1464 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1465 initialmergedlines.append("")
1465 initialmergedlines.append("")
1466
1466
1467 tags = []
1467 tags = []
1468
1468
1469 lock = tr = None
1469 lock = tr = None
1470 try:
1470 try:
1471 lock = repo.lock()
1471 lock = repo.lock()
1472 tr = repo.transaction("builddag")
1472 tr = repo.transaction("builddag")
1473
1473
1474 at = -1
1474 at = -1
1475 atbranch = 'default'
1475 atbranch = 'default'
1476 nodeids = []
1476 nodeids = []
1477 id = 0
1477 id = 0
1478 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1478 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1479 for type, data in dagparser.parsedag(text):
1479 for type, data in dagparser.parsedag(text):
1480 if type == 'n':
1480 if type == 'n':
1481 ui.note('node %s\n' % str(data))
1481 ui.note('node %s\n' % str(data))
1482 id, ps = data
1482 id, ps = data
1483
1483
1484 files = []
1484 files = []
1485 fctxs = {}
1485 fctxs = {}
1486
1486
1487 p2 = None
1487 p2 = None
1488 if mergeable_file:
1488 if mergeable_file:
1489 fn = "mf"
1489 fn = "mf"
1490 p1 = repo[ps[0]]
1490 p1 = repo[ps[0]]
1491 if len(ps) > 1:
1491 if len(ps) > 1:
1492 p2 = repo[ps[1]]
1492 p2 = repo[ps[1]]
1493 pa = p1.ancestor(p2)
1493 pa = p1.ancestor(p2)
1494 base, local, other = [x[fn].data() for x in pa, p1, p2]
1494 base, local, other = [x[fn].data() for x in pa, p1, p2]
1495 m3 = simplemerge.Merge3Text(base, local, other)
1495 m3 = simplemerge.Merge3Text(base, local, other)
1496 ml = [l.strip() for l in m3.merge_lines()]
1496 ml = [l.strip() for l in m3.merge_lines()]
1497 ml.append("")
1497 ml.append("")
1498 elif at > 0:
1498 elif at > 0:
1499 ml = p1[fn].data().split("\n")
1499 ml = p1[fn].data().split("\n")
1500 else:
1500 else:
1501 ml = initialmergedlines
1501 ml = initialmergedlines
1502 ml[id * linesperrev] += " r%i" % id
1502 ml[id * linesperrev] += " r%i" % id
1503 mergedtext = "\n".join(ml)
1503 mergedtext = "\n".join(ml)
1504 files.append(fn)
1504 files.append(fn)
1505 fctxs[fn] = context.memfilectx(fn, mergedtext)
1505 fctxs[fn] = context.memfilectx(fn, mergedtext)
1506
1506
1507 if overwritten_file:
1507 if overwritten_file:
1508 fn = "of"
1508 fn = "of"
1509 files.append(fn)
1509 files.append(fn)
1510 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1510 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1511
1511
1512 if new_file:
1512 if new_file:
1513 fn = "nf%i" % id
1513 fn = "nf%i" % id
1514 files.append(fn)
1514 files.append(fn)
1515 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1515 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1516 if len(ps) > 1:
1516 if len(ps) > 1:
1517 if not p2:
1517 if not p2:
1518 p2 = repo[ps[1]]
1518 p2 = repo[ps[1]]
1519 for fn in p2:
1519 for fn in p2:
1520 if fn.startswith("nf"):
1520 if fn.startswith("nf"):
1521 files.append(fn)
1521 files.append(fn)
1522 fctxs[fn] = p2[fn]
1522 fctxs[fn] = p2[fn]
1523
1523
1524 def fctxfn(repo, cx, path):
1524 def fctxfn(repo, cx, path):
1525 return fctxs.get(path)
1525 return fctxs.get(path)
1526
1526
1527 if len(ps) == 0 or ps[0] < 0:
1527 if len(ps) == 0 or ps[0] < 0:
1528 pars = [None, None]
1528 pars = [None, None]
1529 elif len(ps) == 1:
1529 elif len(ps) == 1:
1530 pars = [nodeids[ps[0]], None]
1530 pars = [nodeids[ps[0]], None]
1531 else:
1531 else:
1532 pars = [nodeids[p] for p in ps]
1532 pars = [nodeids[p] for p in ps]
1533 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
1533 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
1534 date=(id, 0),
1534 date=(id, 0),
1535 user="debugbuilddag",
1535 user="debugbuilddag",
1536 extra={'branch': atbranch})
1536 extra={'branch': atbranch})
1537 nodeid = repo.commitctx(cx)
1537 nodeid = repo.commitctx(cx)
1538 nodeids.append(nodeid)
1538 nodeids.append(nodeid)
1539 at = id
1539 at = id
1540 elif type == 'l':
1540 elif type == 'l':
1541 id, name = data
1541 id, name = data
1542 ui.note('tag %s\n' % name)
1542 ui.note('tag %s\n' % name)
1543 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
1543 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
1544 elif type == 'a':
1544 elif type == 'a':
1545 ui.note('branch %s\n' % data)
1545 ui.note('branch %s\n' % data)
1546 atbranch = data
1546 atbranch = data
1547 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1547 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1548 tr.close()
1548 tr.close()
1549
1549
1550 if tags:
1550 if tags:
1551 repo.opener.write("localtags", "".join(tags))
1551 repo.opener.write("localtags", "".join(tags))
1552 finally:
1552 finally:
1553 ui.progress(_('building'), None)
1553 ui.progress(_('building'), None)
1554 release(tr, lock)
1554 release(tr, lock)
1555
1555
1556 @command('debugbundle', [('a', 'all', None, _('show all details'))], _('FILE'))
1556 @command('debugbundle', [('a', 'all', None, _('show all details'))], _('FILE'))
1557 def debugbundle(ui, bundlepath, all=None, **opts):
1557 def debugbundle(ui, bundlepath, all=None, **opts):
1558 """lists the contents of a bundle"""
1558 """lists the contents of a bundle"""
1559 f = url.open(ui, bundlepath)
1559 f = url.open(ui, bundlepath)
1560 try:
1560 try:
1561 gen = changegroup.readbundle(f, bundlepath)
1561 gen = changegroup.readbundle(f, bundlepath)
1562 if all:
1562 if all:
1563 ui.write("format: id, p1, p2, cset, delta base, len(delta)\n")
1563 ui.write("format: id, p1, p2, cset, delta base, len(delta)\n")
1564
1564
1565 def showchunks(named):
1565 def showchunks(named):
1566 ui.write("\n%s\n" % named)
1566 ui.write("\n%s\n" % named)
1567 chain = None
1567 chain = None
1568 while True:
1568 while True:
1569 chunkdata = gen.deltachunk(chain)
1569 chunkdata = gen.deltachunk(chain)
1570 if not chunkdata:
1570 if not chunkdata:
1571 break
1571 break
1572 node = chunkdata['node']
1572 node = chunkdata['node']
1573 p1 = chunkdata['p1']
1573 p1 = chunkdata['p1']
1574 p2 = chunkdata['p2']
1574 p2 = chunkdata['p2']
1575 cs = chunkdata['cs']
1575 cs = chunkdata['cs']
1576 deltabase = chunkdata['deltabase']
1576 deltabase = chunkdata['deltabase']
1577 delta = chunkdata['delta']
1577 delta = chunkdata['delta']
1578 ui.write("%s %s %s %s %s %s\n" %
1578 ui.write("%s %s %s %s %s %s\n" %
1579 (hex(node), hex(p1), hex(p2),
1579 (hex(node), hex(p1), hex(p2),
1580 hex(cs), hex(deltabase), len(delta)))
1580 hex(cs), hex(deltabase), len(delta)))
1581 chain = node
1581 chain = node
1582
1582
1583 chunkdata = gen.changelogheader()
1583 chunkdata = gen.changelogheader()
1584 showchunks("changelog")
1584 showchunks("changelog")
1585 chunkdata = gen.manifestheader()
1585 chunkdata = gen.manifestheader()
1586 showchunks("manifest")
1586 showchunks("manifest")
1587 while True:
1587 while True:
1588 chunkdata = gen.filelogheader()
1588 chunkdata = gen.filelogheader()
1589 if not chunkdata:
1589 if not chunkdata:
1590 break
1590 break
1591 fname = chunkdata['filename']
1591 fname = chunkdata['filename']
1592 showchunks(fname)
1592 showchunks(fname)
1593 else:
1593 else:
1594 chunkdata = gen.changelogheader()
1594 chunkdata = gen.changelogheader()
1595 chain = None
1595 chain = None
1596 while True:
1596 while True:
1597 chunkdata = gen.deltachunk(chain)
1597 chunkdata = gen.deltachunk(chain)
1598 if not chunkdata:
1598 if not chunkdata:
1599 break
1599 break
1600 node = chunkdata['node']
1600 node = chunkdata['node']
1601 ui.write("%s\n" % hex(node))
1601 ui.write("%s\n" % hex(node))
1602 chain = node
1602 chain = node
1603 finally:
1603 finally:
1604 f.close()
1604 f.close()
1605
1605
1606 @command('debugcheckstate', [], '')
1606 @command('debugcheckstate', [], '')
1607 def debugcheckstate(ui, repo):
1607 def debugcheckstate(ui, repo):
1608 """validate the correctness of the current dirstate"""
1608 """validate the correctness of the current dirstate"""
1609 parent1, parent2 = repo.dirstate.parents()
1609 parent1, parent2 = repo.dirstate.parents()
1610 m1 = repo[parent1].manifest()
1610 m1 = repo[parent1].manifest()
1611 m2 = repo[parent2].manifest()
1611 m2 = repo[parent2].manifest()
1612 errors = 0
1612 errors = 0
1613 for f in repo.dirstate:
1613 for f in repo.dirstate:
1614 state = repo.dirstate[f]
1614 state = repo.dirstate[f]
1615 if state in "nr" and f not in m1:
1615 if state in "nr" and f not in m1:
1616 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1616 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1617 errors += 1
1617 errors += 1
1618 if state in "a" and f in m1:
1618 if state in "a" and f in m1:
1619 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1619 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1620 errors += 1
1620 errors += 1
1621 if state in "m" and f not in m1 and f not in m2:
1621 if state in "m" and f not in m1 and f not in m2:
1622 ui.warn(_("%s in state %s, but not in either manifest\n") %
1622 ui.warn(_("%s in state %s, but not in either manifest\n") %
1623 (f, state))
1623 (f, state))
1624 errors += 1
1624 errors += 1
1625 for f in m1:
1625 for f in m1:
1626 state = repo.dirstate[f]
1626 state = repo.dirstate[f]
1627 if state not in "nrm":
1627 if state not in "nrm":
1628 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1628 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1629 errors += 1
1629 errors += 1
1630 if errors:
1630 if errors:
1631 error = _(".hg/dirstate inconsistent with current parent's manifest")
1631 error = _(".hg/dirstate inconsistent with current parent's manifest")
1632 raise util.Abort(error)
1632 raise util.Abort(error)
1633
1633
1634 @command('debugcommands', [], _('[COMMAND]'))
1634 @command('debugcommands', [], _('[COMMAND]'))
1635 def debugcommands(ui, cmd='', *args):
1635 def debugcommands(ui, cmd='', *args):
1636 """list all available commands and options"""
1636 """list all available commands and options"""
1637 for cmd, vals in sorted(table.iteritems()):
1637 for cmd, vals in sorted(table.iteritems()):
1638 cmd = cmd.split('|')[0].strip('^')
1638 cmd = cmd.split('|')[0].strip('^')
1639 opts = ', '.join([i[1] for i in vals[1]])
1639 opts = ', '.join([i[1] for i in vals[1]])
1640 ui.write('%s: %s\n' % (cmd, opts))
1640 ui.write('%s: %s\n' % (cmd, opts))
1641
1641
1642 @command('debugcomplete',
1642 @command('debugcomplete',
1643 [('o', 'options', None, _('show the command options'))],
1643 [('o', 'options', None, _('show the command options'))],
1644 _('[-o] CMD'))
1644 _('[-o] CMD'))
1645 def debugcomplete(ui, cmd='', **opts):
1645 def debugcomplete(ui, cmd='', **opts):
1646 """returns the completion list associated with the given command"""
1646 """returns the completion list associated with the given command"""
1647
1647
1648 if opts.get('options'):
1648 if opts.get('options'):
1649 options = []
1649 options = []
1650 otables = [globalopts]
1650 otables = [globalopts]
1651 if cmd:
1651 if cmd:
1652 aliases, entry = cmdutil.findcmd(cmd, table, False)
1652 aliases, entry = cmdutil.findcmd(cmd, table, False)
1653 otables.append(entry[1])
1653 otables.append(entry[1])
1654 for t in otables:
1654 for t in otables:
1655 for o in t:
1655 for o in t:
1656 if "(DEPRECATED)" in o[3]:
1656 if "(DEPRECATED)" in o[3]:
1657 continue
1657 continue
1658 if o[0]:
1658 if o[0]:
1659 options.append('-%s' % o[0])
1659 options.append('-%s' % o[0])
1660 options.append('--%s' % o[1])
1660 options.append('--%s' % o[1])
1661 ui.write("%s\n" % "\n".join(options))
1661 ui.write("%s\n" % "\n".join(options))
1662 return
1662 return
1663
1663
1664 cmdlist = cmdutil.findpossible(cmd, table)
1664 cmdlist = cmdutil.findpossible(cmd, table)
1665 if ui.verbose:
1665 if ui.verbose:
1666 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1666 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1667 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1667 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1668
1668
1669 @command('debugdag',
1669 @command('debugdag',
1670 [('t', 'tags', None, _('use tags as labels')),
1670 [('t', 'tags', None, _('use tags as labels')),
1671 ('b', 'branches', None, _('annotate with branch names')),
1671 ('b', 'branches', None, _('annotate with branch names')),
1672 ('', 'dots', None, _('use dots for runs')),
1672 ('', 'dots', None, _('use dots for runs')),
1673 ('s', 'spaces', None, _('separate elements by spaces'))],
1673 ('s', 'spaces', None, _('separate elements by spaces'))],
1674 _('[OPTION]... [FILE [REV]...]'))
1674 _('[OPTION]... [FILE [REV]...]'))
1675 def debugdag(ui, repo, file_=None, *revs, **opts):
1675 def debugdag(ui, repo, file_=None, *revs, **opts):
1676 """format the changelog or an index DAG as a concise textual description
1676 """format the changelog or an index DAG as a concise textual description
1677
1677
1678 If you pass a revlog index, the revlog's DAG is emitted. If you list
1678 If you pass a revlog index, the revlog's DAG is emitted. If you list
1679 revision numbers, they get labelled in the output as rN.
1679 revision numbers, they get labelled in the output as rN.
1680
1680
1681 Otherwise, the changelog DAG of the current repo is emitted.
1681 Otherwise, the changelog DAG of the current repo is emitted.
1682 """
1682 """
1683 spaces = opts.get('spaces')
1683 spaces = opts.get('spaces')
1684 dots = opts.get('dots')
1684 dots = opts.get('dots')
1685 if file_:
1685 if file_:
1686 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1686 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1687 revs = set((int(r) for r in revs))
1687 revs = set((int(r) for r in revs))
1688 def events():
1688 def events():
1689 for r in rlog:
1689 for r in rlog:
1690 yield 'n', (r, list(set(p for p in rlog.parentrevs(r)
1690 yield 'n', (r, list(set(p for p in rlog.parentrevs(r)
1691 if p != -1)))
1691 if p != -1)))
1692 if r in revs:
1692 if r in revs:
1693 yield 'l', (r, "r%i" % r)
1693 yield 'l', (r, "r%i" % r)
1694 elif repo:
1694 elif repo:
1695 cl = repo.changelog
1695 cl = repo.changelog
1696 tags = opts.get('tags')
1696 tags = opts.get('tags')
1697 branches = opts.get('branches')
1697 branches = opts.get('branches')
1698 if tags:
1698 if tags:
1699 labels = {}
1699 labels = {}
1700 for l, n in repo.tags().items():
1700 for l, n in repo.tags().items():
1701 labels.setdefault(cl.rev(n), []).append(l)
1701 labels.setdefault(cl.rev(n), []).append(l)
1702 def events():
1702 def events():
1703 b = "default"
1703 b = "default"
1704 for r in cl:
1704 for r in cl:
1705 if branches:
1705 if branches:
1706 newb = cl.read(cl.node(r))[5]['branch']
1706 newb = cl.read(cl.node(r))[5]['branch']
1707 if newb != b:
1707 if newb != b:
1708 yield 'a', newb
1708 yield 'a', newb
1709 b = newb
1709 b = newb
1710 yield 'n', (r, list(set(p for p in cl.parentrevs(r)
1710 yield 'n', (r, list(set(p for p in cl.parentrevs(r)
1711 if p != -1)))
1711 if p != -1)))
1712 if tags:
1712 if tags:
1713 ls = labels.get(r)
1713 ls = labels.get(r)
1714 if ls:
1714 if ls:
1715 for l in ls:
1715 for l in ls:
1716 yield 'l', (r, l)
1716 yield 'l', (r, l)
1717 else:
1717 else:
1718 raise util.Abort(_('need repo for changelog dag'))
1718 raise util.Abort(_('need repo for changelog dag'))
1719
1719
1720 for line in dagparser.dagtextlines(events(),
1720 for line in dagparser.dagtextlines(events(),
1721 addspaces=spaces,
1721 addspaces=spaces,
1722 wraplabels=True,
1722 wraplabels=True,
1723 wrapannotations=True,
1723 wrapannotations=True,
1724 wrapnonlinear=dots,
1724 wrapnonlinear=dots,
1725 usedots=dots,
1725 usedots=dots,
1726 maxlinewidth=70):
1726 maxlinewidth=70):
1727 ui.write(line)
1727 ui.write(line)
1728 ui.write("\n")
1728 ui.write("\n")
1729
1729
1730 @command('debugdata',
1730 @command('debugdata',
1731 [('c', 'changelog', False, _('open changelog')),
1731 [('c', 'changelog', False, _('open changelog')),
1732 ('m', 'manifest', False, _('open manifest'))],
1732 ('m', 'manifest', False, _('open manifest'))],
1733 _('-c|-m|FILE REV'))
1733 _('-c|-m|FILE REV'))
1734 def debugdata(ui, repo, file_, rev = None, **opts):
1734 def debugdata(ui, repo, file_, rev = None, **opts):
1735 """dump the contents of a data file revision"""
1735 """dump the contents of a data file revision"""
1736 if opts.get('changelog') or opts.get('manifest'):
1736 if opts.get('changelog') or opts.get('manifest'):
1737 file_, rev = None, file_
1737 file_, rev = None, file_
1738 elif rev is None:
1738 elif rev is None:
1739 raise error.CommandError('debugdata', _('invalid arguments'))
1739 raise error.CommandError('debugdata', _('invalid arguments'))
1740 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
1740 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
1741 try:
1741 try:
1742 ui.write(r.revision(r.lookup(rev)))
1742 ui.write(r.revision(r.lookup(rev)))
1743 except KeyError:
1743 except KeyError:
1744 raise util.Abort(_('invalid revision identifier %s') % rev)
1744 raise util.Abort(_('invalid revision identifier %s') % rev)
1745
1745
1746 @command('debugdate',
1746 @command('debugdate',
1747 [('e', 'extended', None, _('try extended date formats'))],
1747 [('e', 'extended', None, _('try extended date formats'))],
1748 _('[-e] DATE [RANGE]'))
1748 _('[-e] DATE [RANGE]'))
1749 def debugdate(ui, date, range=None, **opts):
1749 def debugdate(ui, date, range=None, **opts):
1750 """parse and display a date"""
1750 """parse and display a date"""
1751 if opts["extended"]:
1751 if opts["extended"]:
1752 d = util.parsedate(date, util.extendeddateformats)
1752 d = util.parsedate(date, util.extendeddateformats)
1753 else:
1753 else:
1754 d = util.parsedate(date)
1754 d = util.parsedate(date)
1755 ui.write("internal: %s %s\n" % d)
1755 ui.write("internal: %s %s\n" % d)
1756 ui.write("standard: %s\n" % util.datestr(d))
1756 ui.write("standard: %s\n" % util.datestr(d))
1757 if range:
1757 if range:
1758 m = util.matchdate(range)
1758 m = util.matchdate(range)
1759 ui.write("match: %s\n" % m(d[0]))
1759 ui.write("match: %s\n" % m(d[0]))
1760
1760
1761 @command('debugdiscovery',
1761 @command('debugdiscovery',
1762 [('', 'old', None, _('use old-style discovery')),
1762 [('', 'old', None, _('use old-style discovery')),
1763 ('', 'nonheads', None,
1763 ('', 'nonheads', None,
1764 _('use old-style discovery with non-heads included')),
1764 _('use old-style discovery with non-heads included')),
1765 ] + remoteopts,
1765 ] + remoteopts,
1766 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
1766 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
1767 def debugdiscovery(ui, repo, remoteurl="default", **opts):
1767 def debugdiscovery(ui, repo, remoteurl="default", **opts):
1768 """runs the changeset discovery protocol in isolation"""
1768 """runs the changeset discovery protocol in isolation"""
1769 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
1769 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
1770 opts.get('branch'))
1770 opts.get('branch'))
1771 remote = hg.peer(repo, opts, remoteurl)
1771 remote = hg.peer(repo, opts, remoteurl)
1772 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
1772 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
1773
1773
1774 # make sure tests are repeatable
1774 # make sure tests are repeatable
1775 random.seed(12323)
1775 random.seed(12323)
1776
1776
1777 def doit(localheads, remoteheads):
1777 def doit(localheads, remoteheads):
1778 if opts.get('old'):
1778 if opts.get('old'):
1779 if localheads:
1779 if localheads:
1780 raise util.Abort('cannot use localheads with old style '
1780 raise util.Abort('cannot use localheads with old style '
1781 'discovery')
1781 'discovery')
1782 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
1782 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
1783 force=True)
1783 force=True)
1784 common = set(common)
1784 common = set(common)
1785 if not opts.get('nonheads'):
1785 if not opts.get('nonheads'):
1786 ui.write("unpruned common: %s\n" % " ".join([short(n)
1786 ui.write("unpruned common: %s\n" % " ".join([short(n)
1787 for n in common]))
1787 for n in common]))
1788 dag = dagutil.revlogdag(repo.changelog)
1788 dag = dagutil.revlogdag(repo.changelog)
1789 all = dag.ancestorset(dag.internalizeall(common))
1789 all = dag.ancestorset(dag.internalizeall(common))
1790 common = dag.externalizeall(dag.headsetofconnecteds(all))
1790 common = dag.externalizeall(dag.headsetofconnecteds(all))
1791 else:
1791 else:
1792 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
1792 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
1793 common = set(common)
1793 common = set(common)
1794 rheads = set(hds)
1794 rheads = set(hds)
1795 lheads = set(repo.heads())
1795 lheads = set(repo.heads())
1796 ui.write("common heads: %s\n" % " ".join([short(n) for n in common]))
1796 ui.write("common heads: %s\n" % " ".join([short(n) for n in common]))
1797 if lheads <= common:
1797 if lheads <= common:
1798 ui.write("local is subset\n")
1798 ui.write("local is subset\n")
1799 elif rheads <= common:
1799 elif rheads <= common:
1800 ui.write("remote is subset\n")
1800 ui.write("remote is subset\n")
1801
1801
1802 serverlogs = opts.get('serverlog')
1802 serverlogs = opts.get('serverlog')
1803 if serverlogs:
1803 if serverlogs:
1804 for filename in serverlogs:
1804 for filename in serverlogs:
1805 logfile = open(filename, 'r')
1805 logfile = open(filename, 'r')
1806 try:
1806 try:
1807 line = logfile.readline()
1807 line = logfile.readline()
1808 while line:
1808 while line:
1809 parts = line.strip().split(';')
1809 parts = line.strip().split(';')
1810 op = parts[1]
1810 op = parts[1]
1811 if op == 'cg':
1811 if op == 'cg':
1812 pass
1812 pass
1813 elif op == 'cgss':
1813 elif op == 'cgss':
1814 doit(parts[2].split(' '), parts[3].split(' '))
1814 doit(parts[2].split(' '), parts[3].split(' '))
1815 elif op == 'unb':
1815 elif op == 'unb':
1816 doit(parts[3].split(' '), parts[2].split(' '))
1816 doit(parts[3].split(' '), parts[2].split(' '))
1817 line = logfile.readline()
1817 line = logfile.readline()
1818 finally:
1818 finally:
1819 logfile.close()
1819 logfile.close()
1820
1820
1821 else:
1821 else:
1822 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
1822 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
1823 opts.get('remote_head'))
1823 opts.get('remote_head'))
1824 localrevs = opts.get('local_head')
1824 localrevs = opts.get('local_head')
1825 doit(localrevs, remoterevs)
1825 doit(localrevs, remoterevs)
1826
1826
1827 @command('debugfileset', [], ('REVSPEC'))
1827 @command('debugfileset', [], ('REVSPEC'))
1828 def debugfileset(ui, repo, expr):
1828 def debugfileset(ui, repo, expr):
1829 '''parse and apply a fileset specification'''
1829 '''parse and apply a fileset specification'''
1830 if ui.verbose:
1830 if ui.verbose:
1831 tree = fileset.parse(expr)[0]
1831 tree = fileset.parse(expr)[0]
1832 ui.note(tree, "\n")
1832 ui.note(tree, "\n")
1833
1833
1834 for f in fileset.getfileset(repo[None], expr):
1834 for f in fileset.getfileset(repo[None], expr):
1835 ui.write("%s\n" % f)
1835 ui.write("%s\n" % f)
1836
1836
1837 @command('debugfsinfo', [], _('[PATH]'))
1837 @command('debugfsinfo', [], _('[PATH]'))
1838 def debugfsinfo(ui, path = "."):
1838 def debugfsinfo(ui, path = "."):
1839 """show information detected about current filesystem"""
1839 """show information detected about current filesystem"""
1840 util.writefile('.debugfsinfo', '')
1840 util.writefile('.debugfsinfo', '')
1841 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
1841 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
1842 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
1842 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
1843 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
1843 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
1844 and 'yes' or 'no'))
1844 and 'yes' or 'no'))
1845 os.unlink('.debugfsinfo')
1845 os.unlink('.debugfsinfo')
1846
1846
1847 @command('debuggetbundle',
1847 @command('debuggetbundle',
1848 [('H', 'head', [], _('id of head node'), _('ID')),
1848 [('H', 'head', [], _('id of head node'), _('ID')),
1849 ('C', 'common', [], _('id of common node'), _('ID')),
1849 ('C', 'common', [], _('id of common node'), _('ID')),
1850 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1850 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1851 _('REPO FILE [-H|-C ID]...'))
1851 _('REPO FILE [-H|-C ID]...'))
1852 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1852 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1853 """retrieves a bundle from a repo
1853 """retrieves a bundle from a repo
1854
1854
1855 Every ID must be a full-length hex node id string. Saves the bundle to the
1855 Every ID must be a full-length hex node id string. Saves the bundle to the
1856 given file.
1856 given file.
1857 """
1857 """
1858 repo = hg.peer(ui, opts, repopath)
1858 repo = hg.peer(ui, opts, repopath)
1859 if not repo.capable('getbundle'):
1859 if not repo.capable('getbundle'):
1860 raise util.Abort("getbundle() not supported by target repository")
1860 raise util.Abort("getbundle() not supported by target repository")
1861 args = {}
1861 args = {}
1862 if common:
1862 if common:
1863 args['common'] = [bin(s) for s in common]
1863 args['common'] = [bin(s) for s in common]
1864 if head:
1864 if head:
1865 args['heads'] = [bin(s) for s in head]
1865 args['heads'] = [bin(s) for s in head]
1866 bundle = repo.getbundle('debug', **args)
1866 bundle = repo.getbundle('debug', **args)
1867
1867
1868 bundletype = opts.get('type', 'bzip2').lower()
1868 bundletype = opts.get('type', 'bzip2').lower()
1869 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1869 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1870 bundletype = btypes.get(bundletype)
1870 bundletype = btypes.get(bundletype)
1871 if bundletype not in changegroup.bundletypes:
1871 if bundletype not in changegroup.bundletypes:
1872 raise util.Abort(_('unknown bundle type specified with --type'))
1872 raise util.Abort(_('unknown bundle type specified with --type'))
1873 changegroup.writebundle(bundle, bundlepath, bundletype)
1873 changegroup.writebundle(bundle, bundlepath, bundletype)
1874
1874
1875 @command('debugignore', [], '')
1875 @command('debugignore', [], '')
1876 def debugignore(ui, repo, *values, **opts):
1876 def debugignore(ui, repo, *values, **opts):
1877 """display the combined ignore pattern"""
1877 """display the combined ignore pattern"""
1878 ignore = repo.dirstate._ignore
1878 ignore = repo.dirstate._ignore
1879 includepat = getattr(ignore, 'includepat', None)
1879 includepat = getattr(ignore, 'includepat', None)
1880 if includepat is not None:
1880 if includepat is not None:
1881 ui.write("%s\n" % includepat)
1881 ui.write("%s\n" % includepat)
1882 else:
1882 else:
1883 raise util.Abort(_("no ignore patterns found"))
1883 raise util.Abort(_("no ignore patterns found"))
1884
1884
1885 @command('debugindex',
1885 @command('debugindex',
1886 [('c', 'changelog', False, _('open changelog')),
1886 [('c', 'changelog', False, _('open changelog')),
1887 ('m', 'manifest', False, _('open manifest')),
1887 ('m', 'manifest', False, _('open manifest')),
1888 ('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1888 ('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1889 _('[-f FORMAT] -c|-m|FILE'))
1889 _('[-f FORMAT] -c|-m|FILE'))
1890 def debugindex(ui, repo, file_ = None, **opts):
1890 def debugindex(ui, repo, file_ = None, **opts):
1891 """dump the contents of an index file"""
1891 """dump the contents of an index file"""
1892 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1892 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1893 format = opts.get('format', 0)
1893 format = opts.get('format', 0)
1894 if format not in (0, 1):
1894 if format not in (0, 1):
1895 raise util.Abort(_("unknown format %d") % format)
1895 raise util.Abort(_("unknown format %d") % format)
1896
1896
1897 generaldelta = r.version & revlog.REVLOGGENERALDELTA
1897 generaldelta = r.version & revlog.REVLOGGENERALDELTA
1898 if generaldelta:
1898 if generaldelta:
1899 basehdr = ' delta'
1899 basehdr = ' delta'
1900 else:
1900 else:
1901 basehdr = ' base'
1901 basehdr = ' base'
1902
1902
1903 if format == 0:
1903 if format == 0:
1904 ui.write(" rev offset length " + basehdr + " linkrev"
1904 ui.write(" rev offset length " + basehdr + " linkrev"
1905 " nodeid p1 p2\n")
1905 " nodeid p1 p2\n")
1906 elif format == 1:
1906 elif format == 1:
1907 ui.write(" rev flag offset length"
1907 ui.write(" rev flag offset length"
1908 " size " + basehdr + " link p1 p2"
1908 " size " + basehdr + " link p1 p2"
1909 " nodeid\n")
1909 " nodeid\n")
1910
1910
1911 for i in r:
1911 for i in r:
1912 node = r.node(i)
1912 node = r.node(i)
1913 if generaldelta:
1913 if generaldelta:
1914 base = r.deltaparent(i)
1914 base = r.deltaparent(i)
1915 else:
1915 else:
1916 base = r.chainbase(i)
1916 base = r.chainbase(i)
1917 if format == 0:
1917 if format == 0:
1918 try:
1918 try:
1919 pp = r.parents(node)
1919 pp = r.parents(node)
1920 except Exception:
1920 except Exception:
1921 pp = [nullid, nullid]
1921 pp = [nullid, nullid]
1922 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1922 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1923 i, r.start(i), r.length(i), base, r.linkrev(i),
1923 i, r.start(i), r.length(i), base, r.linkrev(i),
1924 short(node), short(pp[0]), short(pp[1])))
1924 short(node), short(pp[0]), short(pp[1])))
1925 elif format == 1:
1925 elif format == 1:
1926 pr = r.parentrevs(i)
1926 pr = r.parentrevs(i)
1927 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1927 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1928 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1928 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1929 base, r.linkrev(i), pr[0], pr[1], short(node)))
1929 base, r.linkrev(i), pr[0], pr[1], short(node)))
1930
1930
1931 @command('debugindexdot', [], _('FILE'))
1931 @command('debugindexdot', [], _('FILE'))
1932 def debugindexdot(ui, repo, file_):
1932 def debugindexdot(ui, repo, file_):
1933 """dump an index DAG as a graphviz dot file"""
1933 """dump an index DAG as a graphviz dot file"""
1934 r = None
1934 r = None
1935 if repo:
1935 if repo:
1936 filelog = repo.file(file_)
1936 filelog = repo.file(file_)
1937 if len(filelog):
1937 if len(filelog):
1938 r = filelog
1938 r = filelog
1939 if not r:
1939 if not r:
1940 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1940 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1941 ui.write("digraph G {\n")
1941 ui.write("digraph G {\n")
1942 for i in r:
1942 for i in r:
1943 node = r.node(i)
1943 node = r.node(i)
1944 pp = r.parents(node)
1944 pp = r.parents(node)
1945 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1945 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1946 if pp[1] != nullid:
1946 if pp[1] != nullid:
1947 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1947 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1948 ui.write("}\n")
1948 ui.write("}\n")
1949
1949
1950 @command('debuginstall', [], '')
1950 @command('debuginstall', [], '')
1951 def debuginstall(ui):
1951 def debuginstall(ui):
1952 '''test Mercurial installation
1952 '''test Mercurial installation
1953
1953
1954 Returns 0 on success.
1954 Returns 0 on success.
1955 '''
1955 '''
1956
1956
1957 def writetemp(contents):
1957 def writetemp(contents):
1958 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1958 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1959 f = os.fdopen(fd, "wb")
1959 f = os.fdopen(fd, "wb")
1960 f.write(contents)
1960 f.write(contents)
1961 f.close()
1961 f.close()
1962 return name
1962 return name
1963
1963
1964 problems = 0
1964 problems = 0
1965
1965
1966 # encoding
1966 # encoding
1967 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
1967 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
1968 try:
1968 try:
1969 encoding.fromlocal("test")
1969 encoding.fromlocal("test")
1970 except util.Abort, inst:
1970 except util.Abort, inst:
1971 ui.write(" %s\n" % inst)
1971 ui.write(" %s\n" % inst)
1972 ui.write(_(" (check that your locale is properly set)\n"))
1972 ui.write(_(" (check that your locale is properly set)\n"))
1973 problems += 1
1973 problems += 1
1974
1974
1975 # compiled modules
1975 # compiled modules
1976 ui.status(_("Checking installed modules (%s)...\n")
1976 ui.status(_("Checking installed modules (%s)...\n")
1977 % os.path.dirname(__file__))
1977 % os.path.dirname(__file__))
1978 try:
1978 try:
1979 import bdiff, mpatch, base85, osutil
1979 import bdiff, mpatch, base85, osutil
1980 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1980 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1981 except Exception, inst:
1981 except Exception, inst:
1982 ui.write(" %s\n" % inst)
1982 ui.write(" %s\n" % inst)
1983 ui.write(_(" One or more extensions could not be found"))
1983 ui.write(_(" One or more extensions could not be found"))
1984 ui.write(_(" (check that you compiled the extensions)\n"))
1984 ui.write(_(" (check that you compiled the extensions)\n"))
1985 problems += 1
1985 problems += 1
1986
1986
1987 # templates
1987 # templates
1988 import templater
1988 import templater
1989 p = templater.templatepath()
1989 p = templater.templatepath()
1990 ui.status(_("Checking templates (%s)...\n") % ' '.join(p))
1990 ui.status(_("Checking templates (%s)...\n") % ' '.join(p))
1991 try:
1991 try:
1992 templater.templater(templater.templatepath("map-cmdline.default"))
1992 templater.templater(templater.templatepath("map-cmdline.default"))
1993 except Exception, inst:
1993 except Exception, inst:
1994 ui.write(" %s\n" % inst)
1994 ui.write(" %s\n" % inst)
1995 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
1995 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
1996 problems += 1
1996 problems += 1
1997
1997
1998 # editor
1998 # editor
1999 ui.status(_("Checking commit editor...\n"))
1999 ui.status(_("Checking commit editor...\n"))
2000 editor = ui.geteditor()
2000 editor = ui.geteditor()
2001 cmdpath = util.findexe(editor) or util.findexe(editor.split()[0])
2001 cmdpath = util.findexe(editor) or util.findexe(editor.split()[0])
2002 if not cmdpath:
2002 if not cmdpath:
2003 if editor == 'vi':
2003 if editor == 'vi':
2004 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
2004 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
2005 ui.write(_(" (specify a commit editor in your configuration"
2005 ui.write(_(" (specify a commit editor in your configuration"
2006 " file)\n"))
2006 " file)\n"))
2007 else:
2007 else:
2008 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
2008 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
2009 ui.write(_(" (specify a commit editor in your configuration"
2009 ui.write(_(" (specify a commit editor in your configuration"
2010 " file)\n"))
2010 " file)\n"))
2011 problems += 1
2011 problems += 1
2012
2012
2013 # check username
2013 # check username
2014 ui.status(_("Checking username...\n"))
2014 ui.status(_("Checking username...\n"))
2015 try:
2015 try:
2016 ui.username()
2016 ui.username()
2017 except util.Abort, e:
2017 except util.Abort, e:
2018 ui.write(" %s\n" % e)
2018 ui.write(" %s\n" % e)
2019 ui.write(_(" (specify a username in your configuration file)\n"))
2019 ui.write(_(" (specify a username in your configuration file)\n"))
2020 problems += 1
2020 problems += 1
2021
2021
2022 if not problems:
2022 if not problems:
2023 ui.status(_("No problems detected\n"))
2023 ui.status(_("No problems detected\n"))
2024 else:
2024 else:
2025 ui.write(_("%s problems detected,"
2025 ui.write(_("%s problems detected,"
2026 " please check your install!\n") % problems)
2026 " please check your install!\n") % problems)
2027
2027
2028 return problems
2028 return problems
2029
2029
2030 @command('debugknown', [], _('REPO ID...'))
2030 @command('debugknown', [], _('REPO ID...'))
2031 def debugknown(ui, repopath, *ids, **opts):
2031 def debugknown(ui, repopath, *ids, **opts):
2032 """test whether node ids are known to a repo
2032 """test whether node ids are known to a repo
2033
2033
2034 Every ID must be a full-length hex node id string. Returns a list of 0s
2034 Every ID must be a full-length hex node id string. Returns a list of 0s
2035 and 1s indicating unknown/known.
2035 and 1s indicating unknown/known.
2036 """
2036 """
2037 repo = hg.peer(ui, opts, repopath)
2037 repo = hg.peer(ui, opts, repopath)
2038 if not repo.capable('known'):
2038 if not repo.capable('known'):
2039 raise util.Abort("known() not supported by target repository")
2039 raise util.Abort("known() not supported by target repository")
2040 flags = repo.known([bin(s) for s in ids])
2040 flags = repo.known([bin(s) for s in ids])
2041 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
2041 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
2042
2042
2043 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'))
2043 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'))
2044 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2044 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2045 '''access the pushkey key/value protocol
2045 '''access the pushkey key/value protocol
2046
2046
2047 With two args, list the keys in the given namespace.
2047 With two args, list the keys in the given namespace.
2048
2048
2049 With five args, set a key to new if it currently is set to old.
2049 With five args, set a key to new if it currently is set to old.
2050 Reports success or failure.
2050 Reports success or failure.
2051 '''
2051 '''
2052
2052
2053 target = hg.peer(ui, {}, repopath)
2053 target = hg.peer(ui, {}, repopath)
2054 if keyinfo:
2054 if keyinfo:
2055 key, old, new = keyinfo
2055 key, old, new = keyinfo
2056 r = target.pushkey(namespace, key, old, new)
2056 r = target.pushkey(namespace, key, old, new)
2057 ui.status(str(r) + '\n')
2057 ui.status(str(r) + '\n')
2058 return not r
2058 return not r
2059 else:
2059 else:
2060 for k, v in target.listkeys(namespace).iteritems():
2060 for k, v in target.listkeys(namespace).iteritems():
2061 ui.write("%s\t%s\n" % (k.encode('string-escape'),
2061 ui.write("%s\t%s\n" % (k.encode('string-escape'),
2062 v.encode('string-escape')))
2062 v.encode('string-escape')))
2063
2063
2064 @command('debugpvec', [], _('A B'))
2064 @command('debugpvec', [], _('A B'))
2065 def debugpvec(ui, repo, a, b=None):
2065 def debugpvec(ui, repo, a, b=None):
2066 ca = scmutil.revsingle(repo, a)
2066 ca = scmutil.revsingle(repo, a)
2067 cb = scmutil.revsingle(repo, b)
2067 cb = scmutil.revsingle(repo, b)
2068 pa = pvec.ctxpvec(ca)
2068 pa = pvec.ctxpvec(ca)
2069 pb = pvec.ctxpvec(cb)
2069 pb = pvec.ctxpvec(cb)
2070 if pa == pb:
2070 if pa == pb:
2071 rel = "="
2071 rel = "="
2072 elif pa > pb:
2072 elif pa > pb:
2073 rel = ">"
2073 rel = ">"
2074 elif pa < pb:
2074 elif pa < pb:
2075 rel = "<"
2075 rel = "<"
2076 elif pa | pb:
2076 elif pa | pb:
2077 rel = "|"
2077 rel = "|"
2078 ui.write(_("a: %s\n") % pa)
2078 ui.write(_("a: %s\n") % pa)
2079 ui.write(_("b: %s\n") % pb)
2079 ui.write(_("b: %s\n") % pb)
2080 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2080 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2081 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2081 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2082 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2082 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2083 pa.distance(pb), rel))
2083 pa.distance(pb), rel))
2084
2084
2085 @command('debugrebuildstate',
2085 @command('debugrebuildstate',
2086 [('r', 'rev', '', _('revision to rebuild to'), _('REV'))],
2086 [('r', 'rev', '', _('revision to rebuild to'), _('REV'))],
2087 _('[-r REV] [REV]'))
2087 _('[-r REV] [REV]'))
2088 def debugrebuildstate(ui, repo, rev="tip"):
2088 def debugrebuildstate(ui, repo, rev="tip"):
2089 """rebuild the dirstate as it would look like for the given revision"""
2089 """rebuild the dirstate as it would look like for the given revision"""
2090 ctx = scmutil.revsingle(repo, rev)
2090 ctx = scmutil.revsingle(repo, rev)
2091 wlock = repo.wlock()
2091 wlock = repo.wlock()
2092 try:
2092 try:
2093 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
2093 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
2094 finally:
2094 finally:
2095 wlock.release()
2095 wlock.release()
2096
2096
2097 @command('debugrename',
2097 @command('debugrename',
2098 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2098 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2099 _('[-r REV] FILE'))
2099 _('[-r REV] FILE'))
2100 def debugrename(ui, repo, file1, *pats, **opts):
2100 def debugrename(ui, repo, file1, *pats, **opts):
2101 """dump rename information"""
2101 """dump rename information"""
2102
2102
2103 ctx = scmutil.revsingle(repo, opts.get('rev'))
2103 ctx = scmutil.revsingle(repo, opts.get('rev'))
2104 m = scmutil.match(ctx, (file1,) + pats, opts)
2104 m = scmutil.match(ctx, (file1,) + pats, opts)
2105 for abs in ctx.walk(m):
2105 for abs in ctx.walk(m):
2106 fctx = ctx[abs]
2106 fctx = ctx[abs]
2107 o = fctx.filelog().renamed(fctx.filenode())
2107 o = fctx.filelog().renamed(fctx.filenode())
2108 rel = m.rel(abs)
2108 rel = m.rel(abs)
2109 if o:
2109 if o:
2110 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2110 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2111 else:
2111 else:
2112 ui.write(_("%s not renamed\n") % rel)
2112 ui.write(_("%s not renamed\n") % rel)
2113
2113
2114 @command('debugrevlog',
2114 @command('debugrevlog',
2115 [('c', 'changelog', False, _('open changelog')),
2115 [('c', 'changelog', False, _('open changelog')),
2116 ('m', 'manifest', False, _('open manifest')),
2116 ('m', 'manifest', False, _('open manifest')),
2117 ('d', 'dump', False, _('dump index data'))],
2117 ('d', 'dump', False, _('dump index data'))],
2118 _('-c|-m|FILE'))
2118 _('-c|-m|FILE'))
2119 def debugrevlog(ui, repo, file_ = None, **opts):
2119 def debugrevlog(ui, repo, file_ = None, **opts):
2120 """show data and statistics about a revlog"""
2120 """show data and statistics about a revlog"""
2121 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2121 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2122
2122
2123 if opts.get("dump"):
2123 if opts.get("dump"):
2124 numrevs = len(r)
2124 numrevs = len(r)
2125 ui.write("# rev p1rev p2rev start end deltastart base p1 p2"
2125 ui.write("# rev p1rev p2rev start end deltastart base p1 p2"
2126 " rawsize totalsize compression heads\n")
2126 " rawsize totalsize compression heads\n")
2127 ts = 0
2127 ts = 0
2128 heads = set()
2128 heads = set()
2129 for rev in xrange(numrevs):
2129 for rev in xrange(numrevs):
2130 dbase = r.deltaparent(rev)
2130 dbase = r.deltaparent(rev)
2131 if dbase == -1:
2131 if dbase == -1:
2132 dbase = rev
2132 dbase = rev
2133 cbase = r.chainbase(rev)
2133 cbase = r.chainbase(rev)
2134 p1, p2 = r.parentrevs(rev)
2134 p1, p2 = r.parentrevs(rev)
2135 rs = r.rawsize(rev)
2135 rs = r.rawsize(rev)
2136 ts = ts + rs
2136 ts = ts + rs
2137 heads -= set(r.parentrevs(rev))
2137 heads -= set(r.parentrevs(rev))
2138 heads.add(rev)
2138 heads.add(rev)
2139 ui.write("%d %d %d %d %d %d %d %d %d %d %d %d %d\n" %
2139 ui.write("%d %d %d %d %d %d %d %d %d %d %d %d %d\n" %
2140 (rev, p1, p2, r.start(rev), r.end(rev),
2140 (rev, p1, p2, r.start(rev), r.end(rev),
2141 r.start(dbase), r.start(cbase),
2141 r.start(dbase), r.start(cbase),
2142 r.start(p1), r.start(p2),
2142 r.start(p1), r.start(p2),
2143 rs, ts, ts / r.end(rev), len(heads)))
2143 rs, ts, ts / r.end(rev), len(heads)))
2144 return 0
2144 return 0
2145
2145
2146 v = r.version
2146 v = r.version
2147 format = v & 0xFFFF
2147 format = v & 0xFFFF
2148 flags = []
2148 flags = []
2149 gdelta = False
2149 gdelta = False
2150 if v & revlog.REVLOGNGINLINEDATA:
2150 if v & revlog.REVLOGNGINLINEDATA:
2151 flags.append('inline')
2151 flags.append('inline')
2152 if v & revlog.REVLOGGENERALDELTA:
2152 if v & revlog.REVLOGGENERALDELTA:
2153 gdelta = True
2153 gdelta = True
2154 flags.append('generaldelta')
2154 flags.append('generaldelta')
2155 if not flags:
2155 if not flags:
2156 flags = ['(none)']
2156 flags = ['(none)']
2157
2157
2158 nummerges = 0
2158 nummerges = 0
2159 numfull = 0
2159 numfull = 0
2160 numprev = 0
2160 numprev = 0
2161 nump1 = 0
2161 nump1 = 0
2162 nump2 = 0
2162 nump2 = 0
2163 numother = 0
2163 numother = 0
2164 nump1prev = 0
2164 nump1prev = 0
2165 nump2prev = 0
2165 nump2prev = 0
2166 chainlengths = []
2166 chainlengths = []
2167
2167
2168 datasize = [None, 0, 0L]
2168 datasize = [None, 0, 0L]
2169 fullsize = [None, 0, 0L]
2169 fullsize = [None, 0, 0L]
2170 deltasize = [None, 0, 0L]
2170 deltasize = [None, 0, 0L]
2171
2171
2172 def addsize(size, l):
2172 def addsize(size, l):
2173 if l[0] is None or size < l[0]:
2173 if l[0] is None or size < l[0]:
2174 l[0] = size
2174 l[0] = size
2175 if size > l[1]:
2175 if size > l[1]:
2176 l[1] = size
2176 l[1] = size
2177 l[2] += size
2177 l[2] += size
2178
2178
2179 numrevs = len(r)
2179 numrevs = len(r)
2180 for rev in xrange(numrevs):
2180 for rev in xrange(numrevs):
2181 p1, p2 = r.parentrevs(rev)
2181 p1, p2 = r.parentrevs(rev)
2182 delta = r.deltaparent(rev)
2182 delta = r.deltaparent(rev)
2183 if format > 0:
2183 if format > 0:
2184 addsize(r.rawsize(rev), datasize)
2184 addsize(r.rawsize(rev), datasize)
2185 if p2 != nullrev:
2185 if p2 != nullrev:
2186 nummerges += 1
2186 nummerges += 1
2187 size = r.length(rev)
2187 size = r.length(rev)
2188 if delta == nullrev:
2188 if delta == nullrev:
2189 chainlengths.append(0)
2189 chainlengths.append(0)
2190 numfull += 1
2190 numfull += 1
2191 addsize(size, fullsize)
2191 addsize(size, fullsize)
2192 else:
2192 else:
2193 chainlengths.append(chainlengths[delta] + 1)
2193 chainlengths.append(chainlengths[delta] + 1)
2194 addsize(size, deltasize)
2194 addsize(size, deltasize)
2195 if delta == rev - 1:
2195 if delta == rev - 1:
2196 numprev += 1
2196 numprev += 1
2197 if delta == p1:
2197 if delta == p1:
2198 nump1prev += 1
2198 nump1prev += 1
2199 elif delta == p2:
2199 elif delta == p2:
2200 nump2prev += 1
2200 nump2prev += 1
2201 elif delta == p1:
2201 elif delta == p1:
2202 nump1 += 1
2202 nump1 += 1
2203 elif delta == p2:
2203 elif delta == p2:
2204 nump2 += 1
2204 nump2 += 1
2205 elif delta != nullrev:
2205 elif delta != nullrev:
2206 numother += 1
2206 numother += 1
2207
2207
2208 numdeltas = numrevs - numfull
2208 numdeltas = numrevs - numfull
2209 numoprev = numprev - nump1prev - nump2prev
2209 numoprev = numprev - nump1prev - nump2prev
2210 totalrawsize = datasize[2]
2210 totalrawsize = datasize[2]
2211 datasize[2] /= numrevs
2211 datasize[2] /= numrevs
2212 fulltotal = fullsize[2]
2212 fulltotal = fullsize[2]
2213 fullsize[2] /= numfull
2213 fullsize[2] /= numfull
2214 deltatotal = deltasize[2]
2214 deltatotal = deltasize[2]
2215 deltasize[2] /= numrevs - numfull
2215 deltasize[2] /= numrevs - numfull
2216 totalsize = fulltotal + deltatotal
2216 totalsize = fulltotal + deltatotal
2217 avgchainlen = sum(chainlengths) / numrevs
2217 avgchainlen = sum(chainlengths) / numrevs
2218 compratio = totalrawsize / totalsize
2218 compratio = totalrawsize / totalsize
2219
2219
2220 basedfmtstr = '%%%dd\n'
2220 basedfmtstr = '%%%dd\n'
2221 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2221 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2222
2222
2223 def dfmtstr(max):
2223 def dfmtstr(max):
2224 return basedfmtstr % len(str(max))
2224 return basedfmtstr % len(str(max))
2225 def pcfmtstr(max, padding=0):
2225 def pcfmtstr(max, padding=0):
2226 return basepcfmtstr % (len(str(max)), ' ' * padding)
2226 return basepcfmtstr % (len(str(max)), ' ' * padding)
2227
2227
2228 def pcfmt(value, total):
2228 def pcfmt(value, total):
2229 return (value, 100 * float(value) / total)
2229 return (value, 100 * float(value) / total)
2230
2230
2231 ui.write('format : %d\n' % format)
2231 ui.write('format : %d\n' % format)
2232 ui.write('flags : %s\n' % ', '.join(flags))
2232 ui.write('flags : %s\n' % ', '.join(flags))
2233
2233
2234 ui.write('\n')
2234 ui.write('\n')
2235 fmt = pcfmtstr(totalsize)
2235 fmt = pcfmtstr(totalsize)
2236 fmt2 = dfmtstr(totalsize)
2236 fmt2 = dfmtstr(totalsize)
2237 ui.write('revisions : ' + fmt2 % numrevs)
2237 ui.write('revisions : ' + fmt2 % numrevs)
2238 ui.write(' merges : ' + fmt % pcfmt(nummerges, numrevs))
2238 ui.write(' merges : ' + fmt % pcfmt(nummerges, numrevs))
2239 ui.write(' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs))
2239 ui.write(' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs))
2240 ui.write('revisions : ' + fmt2 % numrevs)
2240 ui.write('revisions : ' + fmt2 % numrevs)
2241 ui.write(' full : ' + fmt % pcfmt(numfull, numrevs))
2241 ui.write(' full : ' + fmt % pcfmt(numfull, numrevs))
2242 ui.write(' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2242 ui.write(' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2243 ui.write('revision size : ' + fmt2 % totalsize)
2243 ui.write('revision size : ' + fmt2 % totalsize)
2244 ui.write(' full : ' + fmt % pcfmt(fulltotal, totalsize))
2244 ui.write(' full : ' + fmt % pcfmt(fulltotal, totalsize))
2245 ui.write(' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2245 ui.write(' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2246
2246
2247 ui.write('\n')
2247 ui.write('\n')
2248 fmt = dfmtstr(max(avgchainlen, compratio))
2248 fmt = dfmtstr(max(avgchainlen, compratio))
2249 ui.write('avg chain length : ' + fmt % avgchainlen)
2249 ui.write('avg chain length : ' + fmt % avgchainlen)
2250 ui.write('compression ratio : ' + fmt % compratio)
2250 ui.write('compression ratio : ' + fmt % compratio)
2251
2251
2252 if format > 0:
2252 if format > 0:
2253 ui.write('\n')
2253 ui.write('\n')
2254 ui.write('uncompressed data size (min/max/avg) : %d / %d / %d\n'
2254 ui.write('uncompressed data size (min/max/avg) : %d / %d / %d\n'
2255 % tuple(datasize))
2255 % tuple(datasize))
2256 ui.write('full revision size (min/max/avg) : %d / %d / %d\n'
2256 ui.write('full revision size (min/max/avg) : %d / %d / %d\n'
2257 % tuple(fullsize))
2257 % tuple(fullsize))
2258 ui.write('delta size (min/max/avg) : %d / %d / %d\n'
2258 ui.write('delta size (min/max/avg) : %d / %d / %d\n'
2259 % tuple(deltasize))
2259 % tuple(deltasize))
2260
2260
2261 if numdeltas > 0:
2261 if numdeltas > 0:
2262 ui.write('\n')
2262 ui.write('\n')
2263 fmt = pcfmtstr(numdeltas)
2263 fmt = pcfmtstr(numdeltas)
2264 fmt2 = pcfmtstr(numdeltas, 4)
2264 fmt2 = pcfmtstr(numdeltas, 4)
2265 ui.write('deltas against prev : ' + fmt % pcfmt(numprev, numdeltas))
2265 ui.write('deltas against prev : ' + fmt % pcfmt(numprev, numdeltas))
2266 if numprev > 0:
2266 if numprev > 0:
2267 ui.write(' where prev = p1 : ' + fmt2 % pcfmt(nump1prev,
2267 ui.write(' where prev = p1 : ' + fmt2 % pcfmt(nump1prev,
2268 numprev))
2268 numprev))
2269 ui.write(' where prev = p2 : ' + fmt2 % pcfmt(nump2prev,
2269 ui.write(' where prev = p2 : ' + fmt2 % pcfmt(nump2prev,
2270 numprev))
2270 numprev))
2271 ui.write(' other : ' + fmt2 % pcfmt(numoprev,
2271 ui.write(' other : ' + fmt2 % pcfmt(numoprev,
2272 numprev))
2272 numprev))
2273 if gdelta:
2273 if gdelta:
2274 ui.write('deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas))
2274 ui.write('deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas))
2275 ui.write('deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas))
2275 ui.write('deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas))
2276 ui.write('deltas against other : ' + fmt % pcfmt(numother,
2276 ui.write('deltas against other : ' + fmt % pcfmt(numother,
2277 numdeltas))
2277 numdeltas))
2278
2278
2279 @command('debugrevspec', [], ('REVSPEC'))
2279 @command('debugrevspec', [], ('REVSPEC'))
2280 def debugrevspec(ui, repo, expr):
2280 def debugrevspec(ui, repo, expr):
2281 """parse and apply a revision specification
2281 """parse and apply a revision specification
2282
2282
2283 Use --verbose to print the parsed tree before and after aliases
2283 Use --verbose to print the parsed tree before and after aliases
2284 expansion.
2284 expansion.
2285 """
2285 """
2286 if ui.verbose:
2286 if ui.verbose:
2287 tree = revset.parse(expr)[0]
2287 tree = revset.parse(expr)[0]
2288 ui.note(revset.prettyformat(tree), "\n")
2288 ui.note(revset.prettyformat(tree), "\n")
2289 newtree = revset.findaliases(ui, tree)
2289 newtree = revset.findaliases(ui, tree)
2290 if newtree != tree:
2290 if newtree != tree:
2291 ui.note(revset.prettyformat(newtree), "\n")
2291 ui.note(revset.prettyformat(newtree), "\n")
2292 func = revset.match(ui, expr)
2292 func = revset.match(ui, expr)
2293 for c in func(repo, range(len(repo))):
2293 for c in func(repo, range(len(repo))):
2294 ui.write("%s\n" % c)
2294 ui.write("%s\n" % c)
2295
2295
2296 @command('debugsetparents', [], _('REV1 [REV2]'))
2296 @command('debugsetparents', [], _('REV1 [REV2]'))
2297 def debugsetparents(ui, repo, rev1, rev2=None):
2297 def debugsetparents(ui, repo, rev1, rev2=None):
2298 """manually set the parents of the current working directory
2298 """manually set the parents of the current working directory
2299
2299
2300 This is useful for writing repository conversion tools, but should
2300 This is useful for writing repository conversion tools, but should
2301 be used with care.
2301 be used with care.
2302
2302
2303 Returns 0 on success.
2303 Returns 0 on success.
2304 """
2304 """
2305
2305
2306 r1 = scmutil.revsingle(repo, rev1).node()
2306 r1 = scmutil.revsingle(repo, rev1).node()
2307 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2307 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2308
2308
2309 wlock = repo.wlock()
2309 wlock = repo.wlock()
2310 try:
2310 try:
2311 repo.setparents(r1, r2)
2311 repo.setparents(r1, r2)
2312 finally:
2312 finally:
2313 wlock.release()
2313 wlock.release()
2314
2314
2315 @command('debugstate',
2315 @command('debugstate',
2316 [('', 'nodates', None, _('do not display the saved mtime')),
2316 [('', 'nodates', None, _('do not display the saved mtime')),
2317 ('', 'datesort', None, _('sort by saved mtime'))],
2317 ('', 'datesort', None, _('sort by saved mtime'))],
2318 _('[OPTION]...'))
2318 _('[OPTION]...'))
2319 def debugstate(ui, repo, nodates=None, datesort=None):
2319 def debugstate(ui, repo, nodates=None, datesort=None):
2320 """show the contents of the current dirstate"""
2320 """show the contents of the current dirstate"""
2321 timestr = ""
2321 timestr = ""
2322 showdate = not nodates
2322 showdate = not nodates
2323 if datesort:
2323 if datesort:
2324 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
2324 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
2325 else:
2325 else:
2326 keyfunc = None # sort by filename
2326 keyfunc = None # sort by filename
2327 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
2327 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
2328 if showdate:
2328 if showdate:
2329 if ent[3] == -1:
2329 if ent[3] == -1:
2330 # Pad or slice to locale representation
2330 # Pad or slice to locale representation
2331 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
2331 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
2332 time.localtime(0)))
2332 time.localtime(0)))
2333 timestr = 'unset'
2333 timestr = 'unset'
2334 timestr = (timestr[:locale_len] +
2334 timestr = (timestr[:locale_len] +
2335 ' ' * (locale_len - len(timestr)))
2335 ' ' * (locale_len - len(timestr)))
2336 else:
2336 else:
2337 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
2337 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
2338 time.localtime(ent[3]))
2338 time.localtime(ent[3]))
2339 if ent[1] & 020000:
2339 if ent[1] & 020000:
2340 mode = 'lnk'
2340 mode = 'lnk'
2341 else:
2341 else:
2342 mode = '%3o' % (ent[1] & 0777 & ~util.umask)
2342 mode = '%3o' % (ent[1] & 0777 & ~util.umask)
2343 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
2343 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
2344 for f in repo.dirstate.copies():
2344 for f in repo.dirstate.copies():
2345 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
2345 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
2346
2346
2347 @command('debugsub',
2347 @command('debugsub',
2348 [('r', 'rev', '',
2348 [('r', 'rev', '',
2349 _('revision to check'), _('REV'))],
2349 _('revision to check'), _('REV'))],
2350 _('[-r REV] [REV]'))
2350 _('[-r REV] [REV]'))
2351 def debugsub(ui, repo, rev=None):
2351 def debugsub(ui, repo, rev=None):
2352 ctx = scmutil.revsingle(repo, rev, None)
2352 ctx = scmutil.revsingle(repo, rev, None)
2353 for k, v in sorted(ctx.substate.items()):
2353 for k, v in sorted(ctx.substate.items()):
2354 ui.write('path %s\n' % k)
2354 ui.write('path %s\n' % k)
2355 ui.write(' source %s\n' % v[0])
2355 ui.write(' source %s\n' % v[0])
2356 ui.write(' revision %s\n' % v[1])
2356 ui.write(' revision %s\n' % v[1])
2357
2357
2358 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'))
2358 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'))
2359 def debugwalk(ui, repo, *pats, **opts):
2359 def debugwalk(ui, repo, *pats, **opts):
2360 """show how files match on given patterns"""
2360 """show how files match on given patterns"""
2361 m = scmutil.match(repo[None], pats, opts)
2361 m = scmutil.match(repo[None], pats, opts)
2362 items = list(repo.walk(m))
2362 items = list(repo.walk(m))
2363 if not items:
2363 if not items:
2364 return
2364 return
2365 fmt = 'f %%-%ds %%-%ds %%s' % (
2365 fmt = 'f %%-%ds %%-%ds %%s' % (
2366 max([len(abs) for abs in items]),
2366 max([len(abs) for abs in items]),
2367 max([len(m.rel(abs)) for abs in items]))
2367 max([len(m.rel(abs)) for abs in items]))
2368 for abs in items:
2368 for abs in items:
2369 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
2369 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
2370 ui.write("%s\n" % line.rstrip())
2370 ui.write("%s\n" % line.rstrip())
2371
2371
2372 @command('debugwireargs',
2372 @command('debugwireargs',
2373 [('', 'three', '', 'three'),
2373 [('', 'three', '', 'three'),
2374 ('', 'four', '', 'four'),
2374 ('', 'four', '', 'four'),
2375 ('', 'five', '', 'five'),
2375 ('', 'five', '', 'five'),
2376 ] + remoteopts,
2376 ] + remoteopts,
2377 _('REPO [OPTIONS]... [ONE [TWO]]'))
2377 _('REPO [OPTIONS]... [ONE [TWO]]'))
2378 def debugwireargs(ui, repopath, *vals, **opts):
2378 def debugwireargs(ui, repopath, *vals, **opts):
2379 repo = hg.peer(ui, opts, repopath)
2379 repo = hg.peer(ui, opts, repopath)
2380 for opt in remoteopts:
2380 for opt in remoteopts:
2381 del opts[opt[1]]
2381 del opts[opt[1]]
2382 args = {}
2382 args = {}
2383 for k, v in opts.iteritems():
2383 for k, v in opts.iteritems():
2384 if v:
2384 if v:
2385 args[k] = v
2385 args[k] = v
2386 # run twice to check that we don't mess up the stream for the next command
2386 # run twice to check that we don't mess up the stream for the next command
2387 res1 = repo.debugwireargs(*vals, **args)
2387 res1 = repo.debugwireargs(*vals, **args)
2388 res2 = repo.debugwireargs(*vals, **args)
2388 res2 = repo.debugwireargs(*vals, **args)
2389 ui.write("%s\n" % res1)
2389 ui.write("%s\n" % res1)
2390 if res1 != res2:
2390 if res1 != res2:
2391 ui.warn("%s\n" % res2)
2391 ui.warn("%s\n" % res2)
2392
2392
2393 @command('^diff',
2393 @command('^diff',
2394 [('r', 'rev', [], _('revision'), _('REV')),
2394 [('r', 'rev', [], _('revision'), _('REV')),
2395 ('c', 'change', '', _('change made by revision'), _('REV'))
2395 ('c', 'change', '', _('change made by revision'), _('REV'))
2396 ] + diffopts + diffopts2 + walkopts + subrepoopts,
2396 ] + diffopts + diffopts2 + walkopts + subrepoopts,
2397 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'))
2397 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'))
2398 def diff(ui, repo, *pats, **opts):
2398 def diff(ui, repo, *pats, **opts):
2399 """diff repository (or selected files)
2399 """diff repository (or selected files)
2400
2400
2401 Show differences between revisions for the specified files.
2401 Show differences between revisions for the specified files.
2402
2402
2403 Differences between files are shown using the unified diff format.
2403 Differences between files are shown using the unified diff format.
2404
2404
2405 .. note::
2405 .. note::
2406 diff may generate unexpected results for merges, as it will
2406 diff may generate unexpected results for merges, as it will
2407 default to comparing against the working directory's first
2407 default to comparing against the working directory's first
2408 parent changeset if no revisions are specified.
2408 parent changeset if no revisions are specified.
2409
2409
2410 When two revision arguments are given, then changes are shown
2410 When two revision arguments are given, then changes are shown
2411 between those revisions. If only one revision is specified then
2411 between those revisions. If only one revision is specified then
2412 that revision is compared to the working directory, and, when no
2412 that revision is compared to the working directory, and, when no
2413 revisions are specified, the working directory files are compared
2413 revisions are specified, the working directory files are compared
2414 to its parent.
2414 to its parent.
2415
2415
2416 Alternatively you can specify -c/--change with a revision to see
2416 Alternatively you can specify -c/--change with a revision to see
2417 the changes in that changeset relative to its first parent.
2417 the changes in that changeset relative to its first parent.
2418
2418
2419 Without the -a/--text option, diff will avoid generating diffs of
2419 Without the -a/--text option, diff will avoid generating diffs of
2420 files it detects as binary. With -a, diff will generate a diff
2420 files it detects as binary. With -a, diff will generate a diff
2421 anyway, probably with undesirable results.
2421 anyway, probably with undesirable results.
2422
2422
2423 Use the -g/--git option to generate diffs in the git extended diff
2423 Use the -g/--git option to generate diffs in the git extended diff
2424 format. For more information, read :hg:`help diffs`.
2424 format. For more information, read :hg:`help diffs`.
2425
2425
2426 .. container:: verbose
2426 .. container:: verbose
2427
2427
2428 Examples:
2428 Examples:
2429
2429
2430 - compare a file in the current working directory to its parent::
2430 - compare a file in the current working directory to its parent::
2431
2431
2432 hg diff foo.c
2432 hg diff foo.c
2433
2433
2434 - compare two historical versions of a directory, with rename info::
2434 - compare two historical versions of a directory, with rename info::
2435
2435
2436 hg diff --git -r 1.0:1.2 lib/
2436 hg diff --git -r 1.0:1.2 lib/
2437
2437
2438 - get change stats relative to the last change on some date::
2438 - get change stats relative to the last change on some date::
2439
2439
2440 hg diff --stat -r "date('may 2')"
2440 hg diff --stat -r "date('may 2')"
2441
2441
2442 - diff all newly-added files that contain a keyword::
2442 - diff all newly-added files that contain a keyword::
2443
2443
2444 hg diff "set:added() and grep(GNU)"
2444 hg diff "set:added() and grep(GNU)"
2445
2445
2446 - compare a revision and its parents::
2446 - compare a revision and its parents::
2447
2447
2448 hg diff -c 9353 # compare against first parent
2448 hg diff -c 9353 # compare against first parent
2449 hg diff -r 9353^:9353 # same using revset syntax
2449 hg diff -r 9353^:9353 # same using revset syntax
2450 hg diff -r 9353^2:9353 # compare against the second parent
2450 hg diff -r 9353^2:9353 # compare against the second parent
2451
2451
2452 Returns 0 on success.
2452 Returns 0 on success.
2453 """
2453 """
2454
2454
2455 revs = opts.get('rev')
2455 revs = opts.get('rev')
2456 change = opts.get('change')
2456 change = opts.get('change')
2457 stat = opts.get('stat')
2457 stat = opts.get('stat')
2458 reverse = opts.get('reverse')
2458 reverse = opts.get('reverse')
2459
2459
2460 if revs and change:
2460 if revs and change:
2461 msg = _('cannot specify --rev and --change at the same time')
2461 msg = _('cannot specify --rev and --change at the same time')
2462 raise util.Abort(msg)
2462 raise util.Abort(msg)
2463 elif change:
2463 elif change:
2464 node2 = scmutil.revsingle(repo, change, None).node()
2464 node2 = scmutil.revsingle(repo, change, None).node()
2465 node1 = repo[node2].p1().node()
2465 node1 = repo[node2].p1().node()
2466 else:
2466 else:
2467 node1, node2 = scmutil.revpair(repo, revs)
2467 node1, node2 = scmutil.revpair(repo, revs)
2468
2468
2469 if reverse:
2469 if reverse:
2470 node1, node2 = node2, node1
2470 node1, node2 = node2, node1
2471
2471
2472 diffopts = patch.diffopts(ui, opts)
2472 diffopts = patch.diffopts(ui, opts)
2473 m = scmutil.match(repo[node2], pats, opts)
2473 m = scmutil.match(repo[node2], pats, opts)
2474 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
2474 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
2475 listsubrepos=opts.get('subrepos'))
2475 listsubrepos=opts.get('subrepos'))
2476
2476
2477 @command('^export',
2477 @command('^export',
2478 [('o', 'output', '',
2478 [('o', 'output', '',
2479 _('print output to file with formatted name'), _('FORMAT')),
2479 _('print output to file with formatted name'), _('FORMAT')),
2480 ('', 'switch-parent', None, _('diff against the second parent')),
2480 ('', 'switch-parent', None, _('diff against the second parent')),
2481 ('r', 'rev', [], _('revisions to export'), _('REV')),
2481 ('r', 'rev', [], _('revisions to export'), _('REV')),
2482 ] + diffopts,
2482 ] + diffopts,
2483 _('[OPTION]... [-o OUTFILESPEC] REV...'))
2483 _('[OPTION]... [-o OUTFILESPEC] REV...'))
2484 def export(ui, repo, *changesets, **opts):
2484 def export(ui, repo, *changesets, **opts):
2485 """dump the header and diffs for one or more changesets
2485 """dump the header and diffs for one or more changesets
2486
2486
2487 Print the changeset header and diffs for one or more revisions.
2487 Print the changeset header and diffs for one or more revisions.
2488
2488
2489 The information shown in the changeset header is: author, date,
2489 The information shown in the changeset header is: author, date,
2490 branch name (if non-default), changeset hash, parent(s) and commit
2490 branch name (if non-default), changeset hash, parent(s) and commit
2491 comment.
2491 comment.
2492
2492
2493 .. note::
2493 .. note::
2494 export may generate unexpected diff output for merge
2494 export may generate unexpected diff output for merge
2495 changesets, as it will compare the merge changeset against its
2495 changesets, as it will compare the merge changeset against its
2496 first parent only.
2496 first parent only.
2497
2497
2498 Output may be to a file, in which case the name of the file is
2498 Output may be to a file, in which case the name of the file is
2499 given using a format string. The formatting rules are as follows:
2499 given using a format string. The formatting rules are as follows:
2500
2500
2501 :``%%``: literal "%" character
2501 :``%%``: literal "%" character
2502 :``%H``: changeset hash (40 hexadecimal digits)
2502 :``%H``: changeset hash (40 hexadecimal digits)
2503 :``%N``: number of patches being generated
2503 :``%N``: number of patches being generated
2504 :``%R``: changeset revision number
2504 :``%R``: changeset revision number
2505 :``%b``: basename of the exporting repository
2505 :``%b``: basename of the exporting repository
2506 :``%h``: short-form changeset hash (12 hexadecimal digits)
2506 :``%h``: short-form changeset hash (12 hexadecimal digits)
2507 :``%m``: first line of the commit message (only alphanumeric characters)
2507 :``%m``: first line of the commit message (only alphanumeric characters)
2508 :``%n``: zero-padded sequence number, starting at 1
2508 :``%n``: zero-padded sequence number, starting at 1
2509 :``%r``: zero-padded changeset revision number
2509 :``%r``: zero-padded changeset revision number
2510
2510
2511 Without the -a/--text option, export will avoid generating diffs
2511 Without the -a/--text option, export will avoid generating diffs
2512 of files it detects as binary. With -a, export will generate a
2512 of files it detects as binary. With -a, export will generate a
2513 diff anyway, probably with undesirable results.
2513 diff anyway, probably with undesirable results.
2514
2514
2515 Use the -g/--git option to generate diffs in the git extended diff
2515 Use the -g/--git option to generate diffs in the git extended diff
2516 format. See :hg:`help diffs` for more information.
2516 format. See :hg:`help diffs` for more information.
2517
2517
2518 With the --switch-parent option, the diff will be against the
2518 With the --switch-parent option, the diff will be against the
2519 second parent. It can be useful to review a merge.
2519 second parent. It can be useful to review a merge.
2520
2520
2521 .. container:: verbose
2521 .. container:: verbose
2522
2522
2523 Examples:
2523 Examples:
2524
2524
2525 - use export and import to transplant a bugfix to the current
2525 - use export and import to transplant a bugfix to the current
2526 branch::
2526 branch::
2527
2527
2528 hg export -r 9353 | hg import -
2528 hg export -r 9353 | hg import -
2529
2529
2530 - export all the changesets between two revisions to a file with
2530 - export all the changesets between two revisions to a file with
2531 rename information::
2531 rename information::
2532
2532
2533 hg export --git -r 123:150 > changes.txt
2533 hg export --git -r 123:150 > changes.txt
2534
2534
2535 - split outgoing changes into a series of patches with
2535 - split outgoing changes into a series of patches with
2536 descriptive names::
2536 descriptive names::
2537
2537
2538 hg export -r "outgoing()" -o "%n-%m.patch"
2538 hg export -r "outgoing()" -o "%n-%m.patch"
2539
2539
2540 Returns 0 on success.
2540 Returns 0 on success.
2541 """
2541 """
2542 changesets += tuple(opts.get('rev', []))
2542 changesets += tuple(opts.get('rev', []))
2543 revs = scmutil.revrange(repo, changesets)
2543 revs = scmutil.revrange(repo, changesets)
2544 if not revs:
2544 if not revs:
2545 raise util.Abort(_("export requires at least one changeset"))
2545 raise util.Abort(_("export requires at least one changeset"))
2546 if len(revs) > 1:
2546 if len(revs) > 1:
2547 ui.note(_('exporting patches:\n'))
2547 ui.note(_('exporting patches:\n'))
2548 else:
2548 else:
2549 ui.note(_('exporting patch:\n'))
2549 ui.note(_('exporting patch:\n'))
2550 cmdutil.export(repo, revs, template=opts.get('output'),
2550 cmdutil.export(repo, revs, template=opts.get('output'),
2551 switch_parent=opts.get('switch_parent'),
2551 switch_parent=opts.get('switch_parent'),
2552 opts=patch.diffopts(ui, opts))
2552 opts=patch.diffopts(ui, opts))
2553
2553
2554 @command('^forget', walkopts, _('[OPTION]... FILE...'))
2554 @command('^forget', walkopts, _('[OPTION]... FILE...'))
2555 def forget(ui, repo, *pats, **opts):
2555 def forget(ui, repo, *pats, **opts):
2556 """forget the specified files on the next commit
2556 """forget the specified files on the next commit
2557
2557
2558 Mark the specified files so they will no longer be tracked
2558 Mark the specified files so they will no longer be tracked
2559 after the next commit.
2559 after the next commit.
2560
2560
2561 This only removes files from the current branch, not from the
2561 This only removes files from the current branch, not from the
2562 entire project history, and it does not delete them from the
2562 entire project history, and it does not delete them from the
2563 working directory.
2563 working directory.
2564
2564
2565 To undo a forget before the next commit, see :hg:`add`.
2565 To undo a forget before the next commit, see :hg:`add`.
2566
2566
2567 .. container:: verbose
2567 .. container:: verbose
2568
2568
2569 Examples:
2569 Examples:
2570
2570
2571 - forget newly-added binary files::
2571 - forget newly-added binary files::
2572
2572
2573 hg forget "set:added() and binary()"
2573 hg forget "set:added() and binary()"
2574
2574
2575 - forget files that would be excluded by .hgignore::
2575 - forget files that would be excluded by .hgignore::
2576
2576
2577 hg forget "set:hgignore()"
2577 hg forget "set:hgignore()"
2578
2578
2579 Returns 0 on success.
2579 Returns 0 on success.
2580 """
2580 """
2581
2581
2582 if not pats:
2582 if not pats:
2583 raise util.Abort(_('no files specified'))
2583 raise util.Abort(_('no files specified'))
2584
2584
2585 m = scmutil.match(repo[None], pats, opts)
2585 m = scmutil.match(repo[None], pats, opts)
2586 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
2586 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
2587 return rejected and 1 or 0
2587 return rejected and 1 or 0
2588
2588
2589 @command(
2589 @command(
2590 'graft',
2590 'graft',
2591 [('c', 'continue', False, _('resume interrupted graft')),
2591 [('c', 'continue', False, _('resume interrupted graft')),
2592 ('e', 'edit', False, _('invoke editor on commit messages')),
2592 ('e', 'edit', False, _('invoke editor on commit messages')),
2593 ('', 'log', None, _('append graft info to log message')),
2593 ('', 'log', None, _('append graft info to log message')),
2594 ('D', 'currentdate', False,
2594 ('D', 'currentdate', False,
2595 _('record the current date as commit date')),
2595 _('record the current date as commit date')),
2596 ('U', 'currentuser', False,
2596 ('U', 'currentuser', False,
2597 _('record the current user as committer'), _('DATE'))]
2597 _('record the current user as committer'), _('DATE'))]
2598 + commitopts2 + mergetoolopts + dryrunopts,
2598 + commitopts2 + mergetoolopts + dryrunopts,
2599 _('[OPTION]... REVISION...'))
2599 _('[OPTION]... REVISION...'))
2600 def graft(ui, repo, *revs, **opts):
2600 def graft(ui, repo, *revs, **opts):
2601 '''copy changes from other branches onto the current branch
2601 '''copy changes from other branches onto the current branch
2602
2602
2603 This command uses Mercurial's merge logic to copy individual
2603 This command uses Mercurial's merge logic to copy individual
2604 changes from other branches without merging branches in the
2604 changes from other branches without merging branches in the
2605 history graph. This is sometimes known as 'backporting' or
2605 history graph. This is sometimes known as 'backporting' or
2606 'cherry-picking'. By default, graft will copy user, date, and
2606 'cherry-picking'. By default, graft will copy user, date, and
2607 description from the source changesets.
2607 description from the source changesets.
2608
2608
2609 Changesets that are ancestors of the current revision, that have
2609 Changesets that are ancestors of the current revision, that have
2610 already been grafted, or that are merges will be skipped.
2610 already been grafted, or that are merges will be skipped.
2611
2611
2612 If --log is specified, log messages will have a comment appended
2612 If --log is specified, log messages will have a comment appended
2613 of the form::
2613 of the form::
2614
2614
2615 (grafted from CHANGESETHASH)
2615 (grafted from CHANGESETHASH)
2616
2616
2617 If a graft merge results in conflicts, the graft process is
2617 If a graft merge results in conflicts, the graft process is
2618 interrupted so that the current merge can be manually resolved.
2618 interrupted so that the current merge can be manually resolved.
2619 Once all conflicts are addressed, the graft process can be
2619 Once all conflicts are addressed, the graft process can be
2620 continued with the -c/--continue option.
2620 continued with the -c/--continue option.
2621
2621
2622 .. note::
2622 .. note::
2623 The -c/--continue option does not reapply earlier options.
2623 The -c/--continue option does not reapply earlier options.
2624
2624
2625 .. container:: verbose
2625 .. container:: verbose
2626
2626
2627 Examples:
2627 Examples:
2628
2628
2629 - copy a single change to the stable branch and edit its description::
2629 - copy a single change to the stable branch and edit its description::
2630
2630
2631 hg update stable
2631 hg update stable
2632 hg graft --edit 9393
2632 hg graft --edit 9393
2633
2633
2634 - graft a range of changesets with one exception, updating dates::
2634 - graft a range of changesets with one exception, updating dates::
2635
2635
2636 hg graft -D "2085::2093 and not 2091"
2636 hg graft -D "2085::2093 and not 2091"
2637
2637
2638 - continue a graft after resolving conflicts::
2638 - continue a graft after resolving conflicts::
2639
2639
2640 hg graft -c
2640 hg graft -c
2641
2641
2642 - show the source of a grafted changeset::
2642 - show the source of a grafted changeset::
2643
2643
2644 hg log --debug -r tip
2644 hg log --debug -r tip
2645
2645
2646 Returns 0 on successful completion.
2646 Returns 0 on successful completion.
2647 '''
2647 '''
2648
2648
2649 if not opts.get('user') and opts.get('currentuser'):
2649 if not opts.get('user') and opts.get('currentuser'):
2650 opts['user'] = ui.username()
2650 opts['user'] = ui.username()
2651 if not opts.get('date') and opts.get('currentdate'):
2651 if not opts.get('date') and opts.get('currentdate'):
2652 opts['date'] = "%d %d" % util.makedate()
2652 opts['date'] = "%d %d" % util.makedate()
2653
2653
2654 editor = None
2654 editor = None
2655 if opts.get('edit'):
2655 if opts.get('edit'):
2656 editor = cmdutil.commitforceeditor
2656 editor = cmdutil.commitforceeditor
2657
2657
2658 cont = False
2658 cont = False
2659 if opts['continue']:
2659 if opts['continue']:
2660 cont = True
2660 cont = True
2661 if revs:
2661 if revs:
2662 raise util.Abort(_("can't specify --continue and revisions"))
2662 raise util.Abort(_("can't specify --continue and revisions"))
2663 # read in unfinished revisions
2663 # read in unfinished revisions
2664 try:
2664 try:
2665 nodes = repo.opener.read('graftstate').splitlines()
2665 nodes = repo.opener.read('graftstate').splitlines()
2666 revs = [repo[node].rev() for node in nodes]
2666 revs = [repo[node].rev() for node in nodes]
2667 except IOError, inst:
2667 except IOError, inst:
2668 if inst.errno != errno.ENOENT:
2668 if inst.errno != errno.ENOENT:
2669 raise
2669 raise
2670 raise util.Abort(_("no graft state found, can't continue"))
2670 raise util.Abort(_("no graft state found, can't continue"))
2671 else:
2671 else:
2672 cmdutil.bailifchanged(repo)
2672 cmdutil.bailifchanged(repo)
2673 if not revs:
2673 if not revs:
2674 raise util.Abort(_('no revisions specified'))
2674 raise util.Abort(_('no revisions specified'))
2675 revs = scmutil.revrange(repo, revs)
2675 revs = scmutil.revrange(repo, revs)
2676
2676
2677 # check for merges
2677 # check for merges
2678 for rev in repo.revs('%ld and merge()', revs):
2678 for rev in repo.revs('%ld and merge()', revs):
2679 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
2679 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
2680 revs.remove(rev)
2680 revs.remove(rev)
2681 if not revs:
2681 if not revs:
2682 return -1
2682 return -1
2683
2683
2684 # check for ancestors of dest branch
2684 # check for ancestors of dest branch
2685 for rev in repo.revs('::. and %ld', revs):
2685 for rev in repo.revs('::. and %ld', revs):
2686 ui.warn(_('skipping ancestor revision %s\n') % rev)
2686 ui.warn(_('skipping ancestor revision %s\n') % rev)
2687 revs.remove(rev)
2687 revs.remove(rev)
2688 if not revs:
2688 if not revs:
2689 return -1
2689 return -1
2690
2690
2691 # analyze revs for earlier grafts
2691 # analyze revs for earlier grafts
2692 ids = {}
2692 ids = {}
2693 for ctx in repo.set("%ld", revs):
2693 for ctx in repo.set("%ld", revs):
2694 ids[ctx.hex()] = ctx.rev()
2694 ids[ctx.hex()] = ctx.rev()
2695 n = ctx.extra().get('source')
2695 n = ctx.extra().get('source')
2696 if n:
2696 if n:
2697 ids[n] = ctx.rev()
2697 ids[n] = ctx.rev()
2698
2698
2699 # check ancestors for earlier grafts
2699 # check ancestors for earlier grafts
2700 ui.debug('scanning for duplicate grafts\n')
2700 ui.debug('scanning for duplicate grafts\n')
2701 for ctx in repo.set("::. - ::%ld", revs):
2701 for ctx in repo.set("::. - ::%ld", revs):
2702 n = ctx.extra().get('source')
2702 n = ctx.extra().get('source')
2703 if n in ids:
2703 if n in ids:
2704 r = repo[n].rev()
2704 r = repo[n].rev()
2705 if r in revs:
2705 if r in revs:
2706 ui.warn(_('skipping already grafted revision %s\n') % r)
2706 ui.warn(_('skipping already grafted revision %s\n') % r)
2707 revs.remove(r)
2707 revs.remove(r)
2708 elif ids[n] in revs:
2708 elif ids[n] in revs:
2709 ui.warn(_('skipping already grafted revision %s '
2709 ui.warn(_('skipping already grafted revision %s '
2710 '(same origin %d)\n') % (ids[n], r))
2710 '(same origin %d)\n') % (ids[n], r))
2711 revs.remove(ids[n])
2711 revs.remove(ids[n])
2712 elif ctx.hex() in ids:
2712 elif ctx.hex() in ids:
2713 r = ids[ctx.hex()]
2713 r = ids[ctx.hex()]
2714 ui.warn(_('skipping already grafted revision %s '
2714 ui.warn(_('skipping already grafted revision %s '
2715 '(was grafted from %d)\n') % (r, ctx.rev()))
2715 '(was grafted from %d)\n') % (r, ctx.rev()))
2716 revs.remove(r)
2716 revs.remove(r)
2717 if not revs:
2717 if not revs:
2718 return -1
2718 return -1
2719
2719
2720 wlock = repo.wlock()
2720 wlock = repo.wlock()
2721 try:
2721 try:
2722 for pos, ctx in enumerate(repo.set("%ld", revs)):
2722 for pos, ctx in enumerate(repo.set("%ld", revs)):
2723 current = repo['.']
2723 current = repo['.']
2724
2724
2725 ui.status(_('grafting revision %s\n') % ctx.rev())
2725 ui.status(_('grafting revision %s\n') % ctx.rev())
2726 if opts.get('dry_run'):
2726 if opts.get('dry_run'):
2727 continue
2727 continue
2728
2728
2729 # we don't merge the first commit when continuing
2729 # we don't merge the first commit when continuing
2730 if not cont:
2730 if not cont:
2731 # perform the graft merge with p1(rev) as 'ancestor'
2731 # perform the graft merge with p1(rev) as 'ancestor'
2732 try:
2732 try:
2733 # ui.forcemerge is an internal variable, do not document
2733 # ui.forcemerge is an internal variable, do not document
2734 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
2734 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
2735 stats = mergemod.update(repo, ctx.node(), True, True, False,
2735 stats = mergemod.update(repo, ctx.node(), True, True, False,
2736 ctx.p1().node())
2736 ctx.p1().node())
2737 finally:
2737 finally:
2738 ui.setconfig('ui', 'forcemerge', '')
2738 ui.setconfig('ui', 'forcemerge', '')
2739 # drop the second merge parent
2739 # drop the second merge parent
2740 repo.setparents(current.node(), nullid)
2740 repo.setparents(current.node(), nullid)
2741 repo.dirstate.write()
2741 repo.dirstate.write()
2742 # fix up dirstate for copies and renames
2742 # fix up dirstate for copies and renames
2743 cmdutil.duplicatecopies(repo, ctx.rev(), ctx.p1().rev())
2743 cmdutil.duplicatecopies(repo, ctx.rev(), ctx.p1().rev())
2744 # report any conflicts
2744 # report any conflicts
2745 if stats and stats[3] > 0:
2745 if stats and stats[3] > 0:
2746 # write out state for --continue
2746 # write out state for --continue
2747 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
2747 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
2748 repo.opener.write('graftstate', ''.join(nodelines))
2748 repo.opener.write('graftstate', ''.join(nodelines))
2749 raise util.Abort(
2749 raise util.Abort(
2750 _("unresolved conflicts, can't continue"),
2750 _("unresolved conflicts, can't continue"),
2751 hint=_('use hg resolve and hg graft --continue'))
2751 hint=_('use hg resolve and hg graft --continue'))
2752 else:
2752 else:
2753 cont = False
2753 cont = False
2754
2754
2755 # commit
2755 # commit
2756 source = ctx.extra().get('source')
2756 source = ctx.extra().get('source')
2757 if not source:
2757 if not source:
2758 source = ctx.hex()
2758 source = ctx.hex()
2759 extra = {'source': source}
2759 extra = {'source': source}
2760 user = ctx.user()
2760 user = ctx.user()
2761 if opts.get('user'):
2761 if opts.get('user'):
2762 user = opts['user']
2762 user = opts['user']
2763 date = ctx.date()
2763 date = ctx.date()
2764 if opts.get('date'):
2764 if opts.get('date'):
2765 date = opts['date']
2765 date = opts['date']
2766 message = ctx.description()
2766 message = ctx.description()
2767 if opts.get('log'):
2767 if opts.get('log'):
2768 message += '\n(grafted from %s)' % ctx.hex()
2768 message += '\n(grafted from %s)' % ctx.hex()
2769 node = repo.commit(text=message, user=user,
2769 node = repo.commit(text=message, user=user,
2770 date=date, extra=extra, editor=editor)
2770 date=date, extra=extra, editor=editor)
2771 if node is None:
2771 if node is None:
2772 ui.status(_('graft for revision %s is empty\n') % ctx.rev())
2772 ui.status(_('graft for revision %s is empty\n') % ctx.rev())
2773 finally:
2773 finally:
2774 wlock.release()
2774 wlock.release()
2775
2775
2776 # remove state when we complete successfully
2776 # remove state when we complete successfully
2777 if not opts.get('dry_run') and os.path.exists(repo.join('graftstate')):
2777 if not opts.get('dry_run') and os.path.exists(repo.join('graftstate')):
2778 util.unlinkpath(repo.join('graftstate'))
2778 util.unlinkpath(repo.join('graftstate'))
2779
2779
2780 return 0
2780 return 0
2781
2781
2782 @command('grep',
2782 @command('grep',
2783 [('0', 'print0', None, _('end fields with NUL')),
2783 [('0', 'print0', None, _('end fields with NUL')),
2784 ('', 'all', None, _('print all revisions that match')),
2784 ('', 'all', None, _('print all revisions that match')),
2785 ('a', 'text', None, _('treat all files as text')),
2785 ('a', 'text', None, _('treat all files as text')),
2786 ('f', 'follow', None,
2786 ('f', 'follow', None,
2787 _('follow changeset history,'
2787 _('follow changeset history,'
2788 ' or file history across copies and renames')),
2788 ' or file history across copies and renames')),
2789 ('i', 'ignore-case', None, _('ignore case when matching')),
2789 ('i', 'ignore-case', None, _('ignore case when matching')),
2790 ('l', 'files-with-matches', None,
2790 ('l', 'files-with-matches', None,
2791 _('print only filenames and revisions that match')),
2791 _('print only filenames and revisions that match')),
2792 ('n', 'line-number', None, _('print matching line numbers')),
2792 ('n', 'line-number', None, _('print matching line numbers')),
2793 ('r', 'rev', [],
2793 ('r', 'rev', [],
2794 _('only search files changed within revision range'), _('REV')),
2794 _('only search files changed within revision range'), _('REV')),
2795 ('u', 'user', None, _('list the author (long with -v)')),
2795 ('u', 'user', None, _('list the author (long with -v)')),
2796 ('d', 'date', None, _('list the date (short with -q)')),
2796 ('d', 'date', None, _('list the date (short with -q)')),
2797 ] + walkopts,
2797 ] + walkopts,
2798 _('[OPTION]... PATTERN [FILE]...'))
2798 _('[OPTION]... PATTERN [FILE]...'))
2799 def grep(ui, repo, pattern, *pats, **opts):
2799 def grep(ui, repo, pattern, *pats, **opts):
2800 """search for a pattern in specified files and revisions
2800 """search for a pattern in specified files and revisions
2801
2801
2802 Search revisions of files for a regular expression.
2802 Search revisions of files for a regular expression.
2803
2803
2804 This command behaves differently than Unix grep. It only accepts
2804 This command behaves differently than Unix grep. It only accepts
2805 Python/Perl regexps. It searches repository history, not the
2805 Python/Perl regexps. It searches repository history, not the
2806 working directory. It always prints the revision number in which a
2806 working directory. It always prints the revision number in which a
2807 match appears.
2807 match appears.
2808
2808
2809 By default, grep only prints output for the first revision of a
2809 By default, grep only prints output for the first revision of a
2810 file in which it finds a match. To get it to print every revision
2810 file in which it finds a match. To get it to print every revision
2811 that contains a change in match status ("-" for a match that
2811 that contains a change in match status ("-" for a match that
2812 becomes a non-match, or "+" for a non-match that becomes a match),
2812 becomes a non-match, or "+" for a non-match that becomes a match),
2813 use the --all flag.
2813 use the --all flag.
2814
2814
2815 Returns 0 if a match is found, 1 otherwise.
2815 Returns 0 if a match is found, 1 otherwise.
2816 """
2816 """
2817 reflags = re.M
2817 reflags = re.M
2818 if opts.get('ignore_case'):
2818 if opts.get('ignore_case'):
2819 reflags |= re.I
2819 reflags |= re.I
2820 try:
2820 try:
2821 regexp = re.compile(pattern, reflags)
2821 regexp = re.compile(pattern, reflags)
2822 except re.error, inst:
2822 except re.error, inst:
2823 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
2823 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
2824 return 1
2824 return 1
2825 sep, eol = ':', '\n'
2825 sep, eol = ':', '\n'
2826 if opts.get('print0'):
2826 if opts.get('print0'):
2827 sep = eol = '\0'
2827 sep = eol = '\0'
2828
2828
2829 getfile = util.lrucachefunc(repo.file)
2829 getfile = util.lrucachefunc(repo.file)
2830
2830
2831 def matchlines(body):
2831 def matchlines(body):
2832 begin = 0
2832 begin = 0
2833 linenum = 0
2833 linenum = 0
2834 while True:
2834 while True:
2835 match = regexp.search(body, begin)
2835 match = regexp.search(body, begin)
2836 if not match:
2836 if not match:
2837 break
2837 break
2838 mstart, mend = match.span()
2838 mstart, mend = match.span()
2839 linenum += body.count('\n', begin, mstart) + 1
2839 linenum += body.count('\n', begin, mstart) + 1
2840 lstart = body.rfind('\n', begin, mstart) + 1 or begin
2840 lstart = body.rfind('\n', begin, mstart) + 1 or begin
2841 begin = body.find('\n', mend) + 1 or len(body) + 1
2841 begin = body.find('\n', mend) + 1 or len(body) + 1
2842 lend = begin - 1
2842 lend = begin - 1
2843 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
2843 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
2844
2844
2845 class linestate(object):
2845 class linestate(object):
2846 def __init__(self, line, linenum, colstart, colend):
2846 def __init__(self, line, linenum, colstart, colend):
2847 self.line = line
2847 self.line = line
2848 self.linenum = linenum
2848 self.linenum = linenum
2849 self.colstart = colstart
2849 self.colstart = colstart
2850 self.colend = colend
2850 self.colend = colend
2851
2851
2852 def __hash__(self):
2852 def __hash__(self):
2853 return hash((self.linenum, self.line))
2853 return hash((self.linenum, self.line))
2854
2854
2855 def __eq__(self, other):
2855 def __eq__(self, other):
2856 return self.line == other.line
2856 return self.line == other.line
2857
2857
2858 matches = {}
2858 matches = {}
2859 copies = {}
2859 copies = {}
2860 def grepbody(fn, rev, body):
2860 def grepbody(fn, rev, body):
2861 matches[rev].setdefault(fn, [])
2861 matches[rev].setdefault(fn, [])
2862 m = matches[rev][fn]
2862 m = matches[rev][fn]
2863 for lnum, cstart, cend, line in matchlines(body):
2863 for lnum, cstart, cend, line in matchlines(body):
2864 s = linestate(line, lnum, cstart, cend)
2864 s = linestate(line, lnum, cstart, cend)
2865 m.append(s)
2865 m.append(s)
2866
2866
2867 def difflinestates(a, b):
2867 def difflinestates(a, b):
2868 sm = difflib.SequenceMatcher(None, a, b)
2868 sm = difflib.SequenceMatcher(None, a, b)
2869 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2869 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2870 if tag == 'insert':
2870 if tag == 'insert':
2871 for i in xrange(blo, bhi):
2871 for i in xrange(blo, bhi):
2872 yield ('+', b[i])
2872 yield ('+', b[i])
2873 elif tag == 'delete':
2873 elif tag == 'delete':
2874 for i in xrange(alo, ahi):
2874 for i in xrange(alo, ahi):
2875 yield ('-', a[i])
2875 yield ('-', a[i])
2876 elif tag == 'replace':
2876 elif tag == 'replace':
2877 for i in xrange(alo, ahi):
2877 for i in xrange(alo, ahi):
2878 yield ('-', a[i])
2878 yield ('-', a[i])
2879 for i in xrange(blo, bhi):
2879 for i in xrange(blo, bhi):
2880 yield ('+', b[i])
2880 yield ('+', b[i])
2881
2881
2882 def display(fn, ctx, pstates, states):
2882 def display(fn, ctx, pstates, states):
2883 rev = ctx.rev()
2883 rev = ctx.rev()
2884 datefunc = ui.quiet and util.shortdate or util.datestr
2884 datefunc = ui.quiet and util.shortdate or util.datestr
2885 found = False
2885 found = False
2886 filerevmatches = {}
2886 filerevmatches = {}
2887 def binary():
2887 def binary():
2888 flog = getfile(fn)
2888 flog = getfile(fn)
2889 return util.binary(flog.read(ctx.filenode(fn)))
2889 return util.binary(flog.read(ctx.filenode(fn)))
2890
2890
2891 if opts.get('all'):
2891 if opts.get('all'):
2892 iter = difflinestates(pstates, states)
2892 iter = difflinestates(pstates, states)
2893 else:
2893 else:
2894 iter = [('', l) for l in states]
2894 iter = [('', l) for l in states]
2895 for change, l in iter:
2895 for change, l in iter:
2896 cols = [fn, str(rev)]
2896 cols = [fn, str(rev)]
2897 before, match, after = None, None, None
2897 before, match, after = None, None, None
2898 if opts.get('line_number'):
2898 if opts.get('line_number'):
2899 cols.append(str(l.linenum))
2899 cols.append(str(l.linenum))
2900 if opts.get('all'):
2900 if opts.get('all'):
2901 cols.append(change)
2901 cols.append(change)
2902 if opts.get('user'):
2902 if opts.get('user'):
2903 cols.append(ui.shortuser(ctx.user()))
2903 cols.append(ui.shortuser(ctx.user()))
2904 if opts.get('date'):
2904 if opts.get('date'):
2905 cols.append(datefunc(ctx.date()))
2905 cols.append(datefunc(ctx.date()))
2906 if opts.get('files_with_matches'):
2906 if opts.get('files_with_matches'):
2907 c = (fn, rev)
2907 c = (fn, rev)
2908 if c in filerevmatches:
2908 if c in filerevmatches:
2909 continue
2909 continue
2910 filerevmatches[c] = 1
2910 filerevmatches[c] = 1
2911 else:
2911 else:
2912 before = l.line[:l.colstart]
2912 before = l.line[:l.colstart]
2913 match = l.line[l.colstart:l.colend]
2913 match = l.line[l.colstart:l.colend]
2914 after = l.line[l.colend:]
2914 after = l.line[l.colend:]
2915 ui.write(sep.join(cols))
2915 ui.write(sep.join(cols))
2916 if before is not None:
2916 if before is not None:
2917 if not opts.get('text') and binary():
2917 if not opts.get('text') and binary():
2918 ui.write(sep + " Binary file matches")
2918 ui.write(sep + " Binary file matches")
2919 else:
2919 else:
2920 ui.write(sep + before)
2920 ui.write(sep + before)
2921 ui.write(match, label='grep.match')
2921 ui.write(match, label='grep.match')
2922 ui.write(after)
2922 ui.write(after)
2923 ui.write(eol)
2923 ui.write(eol)
2924 found = True
2924 found = True
2925 return found
2925 return found
2926
2926
2927 skip = {}
2927 skip = {}
2928 revfiles = {}
2928 revfiles = {}
2929 matchfn = scmutil.match(repo[None], pats, opts)
2929 matchfn = scmutil.match(repo[None], pats, opts)
2930 found = False
2930 found = False
2931 follow = opts.get('follow')
2931 follow = opts.get('follow')
2932
2932
2933 def prep(ctx, fns):
2933 def prep(ctx, fns):
2934 rev = ctx.rev()
2934 rev = ctx.rev()
2935 pctx = ctx.p1()
2935 pctx = ctx.p1()
2936 parent = pctx.rev()
2936 parent = pctx.rev()
2937 matches.setdefault(rev, {})
2937 matches.setdefault(rev, {})
2938 matches.setdefault(parent, {})
2938 matches.setdefault(parent, {})
2939 files = revfiles.setdefault(rev, [])
2939 files = revfiles.setdefault(rev, [])
2940 for fn in fns:
2940 for fn in fns:
2941 flog = getfile(fn)
2941 flog = getfile(fn)
2942 try:
2942 try:
2943 fnode = ctx.filenode(fn)
2943 fnode = ctx.filenode(fn)
2944 except error.LookupError:
2944 except error.LookupError:
2945 continue
2945 continue
2946
2946
2947 copied = flog.renamed(fnode)
2947 copied = flog.renamed(fnode)
2948 copy = follow and copied and copied[0]
2948 copy = follow and copied and copied[0]
2949 if copy:
2949 if copy:
2950 copies.setdefault(rev, {})[fn] = copy
2950 copies.setdefault(rev, {})[fn] = copy
2951 if fn in skip:
2951 if fn in skip:
2952 if copy:
2952 if copy:
2953 skip[copy] = True
2953 skip[copy] = True
2954 continue
2954 continue
2955 files.append(fn)
2955 files.append(fn)
2956
2956
2957 if fn not in matches[rev]:
2957 if fn not in matches[rev]:
2958 grepbody(fn, rev, flog.read(fnode))
2958 grepbody(fn, rev, flog.read(fnode))
2959
2959
2960 pfn = copy or fn
2960 pfn = copy or fn
2961 if pfn not in matches[parent]:
2961 if pfn not in matches[parent]:
2962 try:
2962 try:
2963 fnode = pctx.filenode(pfn)
2963 fnode = pctx.filenode(pfn)
2964 grepbody(pfn, parent, flog.read(fnode))
2964 grepbody(pfn, parent, flog.read(fnode))
2965 except error.LookupError:
2965 except error.LookupError:
2966 pass
2966 pass
2967
2967
2968 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2968 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2969 rev = ctx.rev()
2969 rev = ctx.rev()
2970 parent = ctx.p1().rev()
2970 parent = ctx.p1().rev()
2971 for fn in sorted(revfiles.get(rev, [])):
2971 for fn in sorted(revfiles.get(rev, [])):
2972 states = matches[rev][fn]
2972 states = matches[rev][fn]
2973 copy = copies.get(rev, {}).get(fn)
2973 copy = copies.get(rev, {}).get(fn)
2974 if fn in skip:
2974 if fn in skip:
2975 if copy:
2975 if copy:
2976 skip[copy] = True
2976 skip[copy] = True
2977 continue
2977 continue
2978 pstates = matches.get(parent, {}).get(copy or fn, [])
2978 pstates = matches.get(parent, {}).get(copy or fn, [])
2979 if pstates or states:
2979 if pstates or states:
2980 r = display(fn, ctx, pstates, states)
2980 r = display(fn, ctx, pstates, states)
2981 found = found or r
2981 found = found or r
2982 if r and not opts.get('all'):
2982 if r and not opts.get('all'):
2983 skip[fn] = True
2983 skip[fn] = True
2984 if copy:
2984 if copy:
2985 skip[copy] = True
2985 skip[copy] = True
2986 del matches[rev]
2986 del matches[rev]
2987 del revfiles[rev]
2987 del revfiles[rev]
2988
2988
2989 return not found
2989 return not found
2990
2990
2991 @command('heads',
2991 @command('heads',
2992 [('r', 'rev', '',
2992 [('r', 'rev', '',
2993 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
2993 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
2994 ('t', 'topo', False, _('show topological heads only')),
2994 ('t', 'topo', False, _('show topological heads only')),
2995 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
2995 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
2996 ('c', 'closed', False, _('show normal and closed branch heads')),
2996 ('c', 'closed', False, _('show normal and closed branch heads')),
2997 ] + templateopts,
2997 ] + templateopts,
2998 _('[-ac] [-r STARTREV] [REV]...'))
2998 _('[-ac] [-r STARTREV] [REV]...'))
2999 def heads(ui, repo, *branchrevs, **opts):
2999 def heads(ui, repo, *branchrevs, **opts):
3000 """show current repository heads or show branch heads
3000 """show current repository heads or show branch heads
3001
3001
3002 With no arguments, show all repository branch heads.
3002 With no arguments, show all repository branch heads.
3003
3003
3004 Repository "heads" are changesets with no child changesets. They are
3004 Repository "heads" are changesets with no child changesets. They are
3005 where development generally takes place and are the usual targets
3005 where development generally takes place and are the usual targets
3006 for update and merge operations. Branch heads are changesets that have
3006 for update and merge operations. Branch heads are changesets that have
3007 no child changeset on the same branch.
3007 no child changeset on the same branch.
3008
3008
3009 If one or more REVs are given, only branch heads on the branches
3009 If one or more REVs are given, only branch heads on the branches
3010 associated with the specified changesets are shown. This means
3010 associated with the specified changesets are shown. This means
3011 that you can use :hg:`heads foo` to see the heads on a branch
3011 that you can use :hg:`heads foo` to see the heads on a branch
3012 named ``foo``.
3012 named ``foo``.
3013
3013
3014 If -c/--closed is specified, also show branch heads marked closed
3014 If -c/--closed is specified, also show branch heads marked closed
3015 (see :hg:`commit --close-branch`).
3015 (see :hg:`commit --close-branch`).
3016
3016
3017 If STARTREV is specified, only those heads that are descendants of
3017 If STARTREV is specified, only those heads that are descendants of
3018 STARTREV will be displayed.
3018 STARTREV will be displayed.
3019
3019
3020 If -t/--topo is specified, named branch mechanics will be ignored and only
3020 If -t/--topo is specified, named branch mechanics will be ignored and only
3021 changesets without children will be shown.
3021 changesets without children will be shown.
3022
3022
3023 Returns 0 if matching heads are found, 1 if not.
3023 Returns 0 if matching heads are found, 1 if not.
3024 """
3024 """
3025
3025
3026 start = None
3026 start = None
3027 if 'rev' in opts:
3027 if 'rev' in opts:
3028 start = scmutil.revsingle(repo, opts['rev'], None).node()
3028 start = scmutil.revsingle(repo, opts['rev'], None).node()
3029
3029
3030 if opts.get('topo'):
3030 if opts.get('topo'):
3031 heads = [repo[h] for h in repo.heads(start)]
3031 heads = [repo[h] for h in repo.heads(start)]
3032 else:
3032 else:
3033 heads = []
3033 heads = []
3034 for branch in repo.branchmap():
3034 for branch in repo.branchmap():
3035 heads += repo.branchheads(branch, start, opts.get('closed'))
3035 heads += repo.branchheads(branch, start, opts.get('closed'))
3036 heads = [repo[h] for h in heads]
3036 heads = [repo[h] for h in heads]
3037
3037
3038 if branchrevs:
3038 if branchrevs:
3039 branches = set(repo[br].branch() for br in branchrevs)
3039 branches = set(repo[br].branch() for br in branchrevs)
3040 heads = [h for h in heads if h.branch() in branches]
3040 heads = [h for h in heads if h.branch() in branches]
3041
3041
3042 if opts.get('active') and branchrevs:
3042 if opts.get('active') and branchrevs:
3043 dagheads = repo.heads(start)
3043 dagheads = repo.heads(start)
3044 heads = [h for h in heads if h.node() in dagheads]
3044 heads = [h for h in heads if h.node() in dagheads]
3045
3045
3046 if branchrevs:
3046 if branchrevs:
3047 haveheads = set(h.branch() for h in heads)
3047 haveheads = set(h.branch() for h in heads)
3048 if branches - haveheads:
3048 if branches - haveheads:
3049 headless = ', '.join(b for b in branches - haveheads)
3049 headless = ', '.join(b for b in branches - haveheads)
3050 msg = _('no open branch heads found on branches %s')
3050 msg = _('no open branch heads found on branches %s')
3051 if opts.get('rev'):
3051 if opts.get('rev'):
3052 msg += _(' (started at %s)') % opts['rev']
3052 msg += _(' (started at %s)') % opts['rev']
3053 ui.warn((msg + '\n') % headless)
3053 ui.warn((msg + '\n') % headless)
3054
3054
3055 if not heads:
3055 if not heads:
3056 return 1
3056 return 1
3057
3057
3058 heads = sorted(heads, key=lambda x: -x.rev())
3058 heads = sorted(heads, key=lambda x: -x.rev())
3059 displayer = cmdutil.show_changeset(ui, repo, opts)
3059 displayer = cmdutil.show_changeset(ui, repo, opts)
3060 for ctx in heads:
3060 for ctx in heads:
3061 displayer.show(ctx)
3061 displayer.show(ctx)
3062 displayer.close()
3062 displayer.close()
3063
3063
3064 @command('help',
3064 @command('help',
3065 [('e', 'extension', None, _('show only help for extensions')),
3065 [('e', 'extension', None, _('show only help for extensions')),
3066 ('c', 'command', None, _('show only help for commands'))],
3066 ('c', 'command', None, _('show only help for commands'))],
3067 _('[-ec] [TOPIC]'))
3067 _('[-ec] [TOPIC]'))
3068 def help_(ui, name=None, unknowncmd=False, full=True, **opts):
3068 def help_(ui, name=None, unknowncmd=False, full=True, **opts):
3069 """show help for a given topic or a help overview
3069 """show help for a given topic or a help overview
3070
3070
3071 With no arguments, print a list of commands with short help messages.
3071 With no arguments, print a list of commands with short help messages.
3072
3072
3073 Given a topic, extension, or command name, print help for that
3073 Given a topic, extension, or command name, print help for that
3074 topic.
3074 topic.
3075
3075
3076 Returns 0 if successful.
3076 Returns 0 if successful.
3077 """
3077 """
3078
3078
3079 textwidth = min(ui.termwidth(), 80) - 2
3079 textwidth = min(ui.termwidth(), 80) - 2
3080
3080
3081 def optrst(options):
3081 def optrst(options):
3082 data = []
3082 data = []
3083 multioccur = False
3083 multioccur = False
3084 for option in options:
3084 for option in options:
3085 if len(option) == 5:
3085 if len(option) == 5:
3086 shortopt, longopt, default, desc, optlabel = option
3086 shortopt, longopt, default, desc, optlabel = option
3087 else:
3087 else:
3088 shortopt, longopt, default, desc = option
3088 shortopt, longopt, default, desc = option
3089 optlabel = _("VALUE") # default label
3089 optlabel = _("VALUE") # default label
3090
3090
3091 if _("DEPRECATED") in desc and not ui.verbose:
3091 if _("DEPRECATED") in desc and not ui.verbose:
3092 continue
3092 continue
3093
3093
3094 so = ''
3094 so = ''
3095 if shortopt:
3095 if shortopt:
3096 so = '-' + shortopt
3096 so = '-' + shortopt
3097 lo = '--' + longopt
3097 lo = '--' + longopt
3098 if default:
3098 if default:
3099 desc += _(" (default: %s)") % default
3099 desc += _(" (default: %s)") % default
3100
3100
3101 if isinstance(default, list):
3101 if isinstance(default, list):
3102 lo += " %s [+]" % optlabel
3102 lo += " %s [+]" % optlabel
3103 multioccur = True
3103 multioccur = True
3104 elif (default is not None) and not isinstance(default, bool):
3104 elif (default is not None) and not isinstance(default, bool):
3105 lo += " %s" % optlabel
3105 lo += " %s" % optlabel
3106
3106
3107 data.append((so, lo, desc))
3107 data.append((so, lo, desc))
3108
3108
3109 rst = minirst.maketable(data, 1)
3109 rst = minirst.maketable(data, 1)
3110
3110
3111 if multioccur:
3111 if multioccur:
3112 rst += _("\n[+] marked option can be specified multiple times\n")
3112 rst += _("\n[+] marked option can be specified multiple times\n")
3113
3113
3114 return rst
3114 return rst
3115
3115
3116 # list all option lists
3116 # list all option lists
3117 def opttext(optlist, width):
3117 def opttext(optlist, width):
3118 rst = ''
3118 rst = ''
3119 if not optlist:
3119 if not optlist:
3120 return ''
3120 return ''
3121
3121
3122 for title, options in optlist:
3122 for title, options in optlist:
3123 rst += '\n%s\n' % title
3123 rst += '\n%s\n' % title
3124 if options:
3124 if options:
3125 rst += "\n"
3125 rst += "\n"
3126 rst += optrst(options)
3126 rst += optrst(options)
3127 rst += '\n'
3127 rst += '\n'
3128
3128
3129 return '\n' + minirst.format(rst, width)
3129 return '\n' + minirst.format(rst, width)
3130
3130
3131 def addglobalopts(optlist, aliases):
3131 def addglobalopts(optlist, aliases):
3132 if ui.quiet:
3132 if ui.quiet:
3133 return []
3133 return []
3134
3134
3135 if ui.verbose:
3135 if ui.verbose:
3136 optlist.append((_("global options:"), globalopts))
3136 optlist.append((_("global options:"), globalopts))
3137 if name == 'shortlist':
3137 if name == 'shortlist':
3138 optlist.append((_('use "hg help" for the full list '
3138 optlist.append((_('use "hg help" for the full list '
3139 'of commands'), ()))
3139 'of commands'), ()))
3140 else:
3140 else:
3141 if name == 'shortlist':
3141 if name == 'shortlist':
3142 msg = _('use "hg help" for the full list of commands '
3142 msg = _('use "hg help" for the full list of commands '
3143 'or "hg -v" for details')
3143 'or "hg -v" for details')
3144 elif name and not full:
3144 elif name and not full:
3145 msg = _('use "hg help %s" to show the full help text') % name
3145 msg = _('use "hg help %s" to show the full help text') % name
3146 elif aliases:
3146 elif aliases:
3147 msg = _('use "hg -v help%s" to show builtin aliases and '
3147 msg = _('use "hg -v help%s" to show builtin aliases and '
3148 'global options') % (name and " " + name or "")
3148 'global options') % (name and " " + name or "")
3149 else:
3149 else:
3150 msg = _('use "hg -v help %s" to show more info') % name
3150 msg = _('use "hg -v help %s" to show more info') % name
3151 optlist.append((msg, ()))
3151 optlist.append((msg, ()))
3152
3152
3153 def helpcmd(name):
3153 def helpcmd(name):
3154 try:
3154 try:
3155 aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
3155 aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
3156 except error.AmbiguousCommand, inst:
3156 except error.AmbiguousCommand, inst:
3157 # py3k fix: except vars can't be used outside the scope of the
3157 # py3k fix: except vars can't be used outside the scope of the
3158 # except block, nor can be used inside a lambda. python issue4617
3158 # except block, nor can be used inside a lambda. python issue4617
3159 prefix = inst.args[0]
3159 prefix = inst.args[0]
3160 select = lambda c: c.lstrip('^').startswith(prefix)
3160 select = lambda c: c.lstrip('^').startswith(prefix)
3161 helplist(select)
3161 helplist(select)
3162 return
3162 return
3163
3163
3164 # check if it's an invalid alias and display its error if it is
3164 # check if it's an invalid alias and display its error if it is
3165 if getattr(entry[0], 'badalias', False):
3165 if getattr(entry[0], 'badalias', False):
3166 if not unknowncmd:
3166 if not unknowncmd:
3167 entry[0](ui)
3167 entry[0](ui)
3168 return
3168 return
3169
3169
3170 rst = ""
3170 rst = ""
3171
3171
3172 # synopsis
3172 # synopsis
3173 if len(entry) > 2:
3173 if len(entry) > 2:
3174 if entry[2].startswith('hg'):
3174 if entry[2].startswith('hg'):
3175 rst += "%s\n" % entry[2]
3175 rst += "%s\n" % entry[2]
3176 else:
3176 else:
3177 rst += 'hg %s %s\n' % (aliases[0], entry[2])
3177 rst += 'hg %s %s\n' % (aliases[0], entry[2])
3178 else:
3178 else:
3179 rst += 'hg %s\n' % aliases[0]
3179 rst += 'hg %s\n' % aliases[0]
3180
3180
3181 # aliases
3181 # aliases
3182 if full and not ui.quiet and len(aliases) > 1:
3182 if full and not ui.quiet and len(aliases) > 1:
3183 rst += _("\naliases: %s\n") % ', '.join(aliases[1:])
3183 rst += _("\naliases: %s\n") % ', '.join(aliases[1:])
3184
3184
3185 # description
3185 # description
3186 doc = gettext(entry[0].__doc__)
3186 doc = gettext(entry[0].__doc__)
3187 if not doc:
3187 if not doc:
3188 doc = _("(no help text available)")
3188 doc = _("(no help text available)")
3189 if util.safehasattr(entry[0], 'definition'): # aliased command
3189 if util.safehasattr(entry[0], 'definition'): # aliased command
3190 if entry[0].definition.startswith('!'): # shell alias
3190 if entry[0].definition.startswith('!'): # shell alias
3191 doc = _('shell alias for::\n\n %s') % entry[0].definition[1:]
3191 doc = _('shell alias for::\n\n %s') % entry[0].definition[1:]
3192 else:
3192 else:
3193 doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
3193 doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
3194 if ui.quiet or not full:
3194 if ui.quiet or not full:
3195 doc = doc.splitlines()[0]
3195 doc = doc.splitlines()[0]
3196 rst += "\n" + doc + "\n"
3196 rst += "\n" + doc + "\n"
3197
3197
3198 # check if this command shadows a non-trivial (multi-line)
3198 # check if this command shadows a non-trivial (multi-line)
3199 # extension help text
3199 # extension help text
3200 try:
3200 try:
3201 mod = extensions.find(name)
3201 mod = extensions.find(name)
3202 doc = gettext(mod.__doc__) or ''
3202 doc = gettext(mod.__doc__) or ''
3203 if '\n' in doc.strip():
3203 if '\n' in doc.strip():
3204 msg = _('use "hg help -e %s" to show help for '
3204 msg = _('use "hg help -e %s" to show help for '
3205 'the %s extension') % (name, name)
3205 'the %s extension') % (name, name)
3206 rst += '\n%s\n' % msg
3206 rst += '\n%s\n' % msg
3207 except KeyError:
3207 except KeyError:
3208 pass
3208 pass
3209
3209
3210 # options
3210 # options
3211 if not ui.quiet and entry[1]:
3211 if not ui.quiet and entry[1]:
3212 rst += '\n'
3212 rst += '\n'
3213 rst += _("options:")
3213 rst += _("options:")
3214 rst += '\n\n'
3214 rst += '\n\n'
3215 rst += optrst(entry[1])
3215 rst += optrst(entry[1])
3216
3216
3217 if ui.verbose:
3217 if ui.verbose:
3218 rst += '\n'
3218 rst += '\n'
3219 rst += _("global options:")
3219 rst += _("global options:")
3220 rst += '\n\n'
3220 rst += '\n\n'
3221 rst += optrst(globalopts)
3221 rst += optrst(globalopts)
3222
3222
3223 keep = ui.verbose and ['verbose'] or []
3223 keep = ui.verbose and ['verbose'] or []
3224 formatted, pruned = minirst.format(rst, textwidth, keep=keep)
3224 formatted, pruned = minirst.format(rst, textwidth, keep=keep)
3225 ui.write(formatted)
3225 ui.write(formatted)
3226
3226
3227 if not ui.verbose:
3227 if not ui.verbose:
3228 if not full:
3228 if not full:
3229 ui.write(_('\nuse "hg help %s" to show the full help text\n')
3229 ui.write(_('\nuse "hg help %s" to show the full help text\n')
3230 % name)
3230 % name)
3231 elif not ui.quiet:
3231 elif not ui.quiet:
3232 ui.write(_('\nuse "hg -v help %s" to show more info\n') % name)
3232 ui.write(_('\nuse "hg -v help %s" to show more info\n') % name)
3233
3233
3234
3234
3235 def helplist(select=None):
3235 def helplist(select=None):
3236 # list of commands
3236 # list of commands
3237 if name == "shortlist":
3237 if name == "shortlist":
3238 header = _('basic commands:\n\n')
3238 header = _('basic commands:\n\n')
3239 else:
3239 else:
3240 header = _('list of commands:\n\n')
3240 header = _('list of commands:\n\n')
3241
3241
3242 h = {}
3242 h = {}
3243 cmds = {}
3243 cmds = {}
3244 for c, e in table.iteritems():
3244 for c, e in table.iteritems():
3245 f = c.split("|", 1)[0]
3245 f = c.split("|", 1)[0]
3246 if select and not select(f):
3246 if select and not select(f):
3247 continue
3247 continue
3248 if (not select and name != 'shortlist' and
3248 if (not select and name != 'shortlist' and
3249 e[0].__module__ != __name__):
3249 e[0].__module__ != __name__):
3250 continue
3250 continue
3251 if name == "shortlist" and not f.startswith("^"):
3251 if name == "shortlist" and not f.startswith("^"):
3252 continue
3252 continue
3253 f = f.lstrip("^")
3253 f = f.lstrip("^")
3254 if not ui.debugflag and f.startswith("debug"):
3254 if not ui.debugflag and f.startswith("debug"):
3255 continue
3255 continue
3256 doc = e[0].__doc__
3256 doc = e[0].__doc__
3257 if doc and 'DEPRECATED' in doc and not ui.verbose:
3257 if doc and 'DEPRECATED' in doc and not ui.verbose:
3258 continue
3258 continue
3259 doc = gettext(doc)
3259 doc = gettext(doc)
3260 if not doc:
3260 if not doc:
3261 doc = _("(no help text available)")
3261 doc = _("(no help text available)")
3262 h[f] = doc.splitlines()[0].rstrip()
3262 h[f] = doc.splitlines()[0].rstrip()
3263 cmds[f] = c.lstrip("^")
3263 cmds[f] = c.lstrip("^")
3264
3264
3265 if not h:
3265 if not h:
3266 ui.status(_('no commands defined\n'))
3266 ui.status(_('no commands defined\n'))
3267 return
3267 return
3268
3268
3269 ui.status(header)
3269 ui.status(header)
3270 fns = sorted(h)
3270 fns = sorted(h)
3271 m = max(map(len, fns))
3271 m = max(map(len, fns))
3272 for f in fns:
3272 for f in fns:
3273 if ui.verbose:
3273 if ui.verbose:
3274 commands = cmds[f].replace("|",", ")
3274 commands = cmds[f].replace("|",", ")
3275 ui.write(" %s:\n %s\n"%(commands, h[f]))
3275 ui.write(" %s:\n %s\n"%(commands, h[f]))
3276 else:
3276 else:
3277 ui.write('%s\n' % (util.wrap(h[f], textwidth,
3277 ui.write('%s\n' % (util.wrap(h[f], textwidth,
3278 initindent=' %-*s ' % (m, f),
3278 initindent=' %-*s ' % (m, f),
3279 hangindent=' ' * (m + 4))))
3279 hangindent=' ' * (m + 4))))
3280
3280
3281 if not name:
3281 if not name:
3282 text = help.listexts(_('enabled extensions:'), extensions.enabled())
3282 text = help.listexts(_('enabled extensions:'), extensions.enabled())
3283 if text:
3283 if text:
3284 ui.write("\n%s" % minirst.format(text, textwidth))
3284 ui.write("\n%s" % minirst.format(text, textwidth))
3285
3285
3286 ui.write(_("\nadditional help topics:\n\n"))
3286 ui.write(_("\nadditional help topics:\n\n"))
3287 topics = []
3287 topics = []
3288 for names, header, doc in help.helptable:
3288 for names, header, doc in help.helptable:
3289 topics.append((sorted(names, key=len, reverse=True)[0], header))
3289 topics.append((sorted(names, key=len, reverse=True)[0], header))
3290 topics_len = max([len(s[0]) for s in topics])
3290 topics_len = max([len(s[0]) for s in topics])
3291 for t, desc in topics:
3291 for t, desc in topics:
3292 ui.write(" %-*s %s\n" % (topics_len, t, desc))
3292 ui.write(" %-*s %s\n" % (topics_len, t, desc))
3293
3293
3294 optlist = []
3294 optlist = []
3295 addglobalopts(optlist, True)
3295 addglobalopts(optlist, True)
3296 ui.write(opttext(optlist, textwidth))
3296 ui.write(opttext(optlist, textwidth))
3297
3297
3298 def helptopic(name):
3298 def helptopic(name):
3299 for names, header, doc in help.helptable:
3299 for names, header, doc in help.helptable:
3300 if name in names:
3300 if name in names:
3301 break
3301 break
3302 else:
3302 else:
3303 raise error.UnknownCommand(name)
3303 raise error.UnknownCommand(name)
3304
3304
3305 # description
3305 # description
3306 if not doc:
3306 if not doc:
3307 doc = _("(no help text available)")
3307 doc = _("(no help text available)")
3308 if util.safehasattr(doc, '__call__'):
3308 if util.safehasattr(doc, '__call__'):
3309 doc = doc()
3309 doc = doc()
3310
3310
3311 ui.write("%s\n\n" % header)
3311 ui.write("%s\n\n" % header)
3312 ui.write("%s" % minirst.format(doc, textwidth, indent=4))
3312 ui.write("%s" % minirst.format(doc, textwidth, indent=4))
3313 try:
3313 try:
3314 cmdutil.findcmd(name, table)
3314 cmdutil.findcmd(name, table)
3315 ui.write(_('\nuse "hg help -c %s" to see help for '
3315 ui.write(_('\nuse "hg help -c %s" to see help for '
3316 'the %s command\n') % (name, name))
3316 'the %s command\n') % (name, name))
3317 except error.UnknownCommand:
3317 except error.UnknownCommand:
3318 pass
3318 pass
3319
3319
3320 def helpext(name):
3320 def helpext(name):
3321 try:
3321 try:
3322 mod = extensions.find(name)
3322 mod = extensions.find(name)
3323 doc = gettext(mod.__doc__) or _('no help text available')
3323 doc = gettext(mod.__doc__) or _('no help text available')
3324 except KeyError:
3324 except KeyError:
3325 mod = None
3325 mod = None
3326 doc = extensions.disabledext(name)
3326 doc = extensions.disabledext(name)
3327 if not doc:
3327 if not doc:
3328 raise error.UnknownCommand(name)
3328 raise error.UnknownCommand(name)
3329
3329
3330 if '\n' not in doc:
3330 if '\n' not in doc:
3331 head, tail = doc, ""
3331 head, tail = doc, ""
3332 else:
3332 else:
3333 head, tail = doc.split('\n', 1)
3333 head, tail = doc.split('\n', 1)
3334 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
3334 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
3335 if tail:
3335 if tail:
3336 ui.write(minirst.format(tail, textwidth))
3336 ui.write(minirst.format(tail, textwidth))
3337 ui.status('\n')
3337 ui.status('\n')
3338
3338
3339 if mod:
3339 if mod:
3340 try:
3340 try:
3341 ct = mod.cmdtable
3341 ct = mod.cmdtable
3342 except AttributeError:
3342 except AttributeError:
3343 ct = {}
3343 ct = {}
3344 modcmds = set([c.split('|', 1)[0] for c in ct])
3344 modcmds = set([c.split('|', 1)[0] for c in ct])
3345 helplist(modcmds.__contains__)
3345 helplist(modcmds.__contains__)
3346 else:
3346 else:
3347 ui.write(_('use "hg help extensions" for information on enabling '
3347 ui.write(_('use "hg help extensions" for information on enabling '
3348 'extensions\n'))
3348 'extensions\n'))
3349
3349
3350 def helpextcmd(name):
3350 def helpextcmd(name):
3351 cmd, ext, mod = extensions.disabledcmd(ui, name,
3351 cmd, ext, mod = extensions.disabledcmd(ui, name,
3352 ui.configbool('ui', 'strict'))
3352 ui.configbool('ui', 'strict'))
3353 doc = gettext(mod.__doc__).splitlines()[0]
3353 doc = gettext(mod.__doc__).splitlines()[0]
3354
3354
3355 msg = help.listexts(_("'%s' is provided by the following "
3355 msg = help.listexts(_("'%s' is provided by the following "
3356 "extension:") % cmd, {ext: doc}, indent=4)
3356 "extension:") % cmd, {ext: doc}, indent=4)
3357 ui.write(minirst.format(msg, textwidth))
3357 ui.write(minirst.format(msg, textwidth))
3358 ui.write('\n')
3358 ui.write('\n')
3359 ui.write(_('use "hg help extensions" for information on enabling '
3359 ui.write(_('use "hg help extensions" for information on enabling '
3360 'extensions\n'))
3360 'extensions\n'))
3361
3361
3362 if name and name != 'shortlist':
3362 if name and name != 'shortlist':
3363 i = None
3363 i = None
3364 if unknowncmd:
3364 if unknowncmd:
3365 queries = (helpextcmd,)
3365 queries = (helpextcmd,)
3366 elif opts.get('extension'):
3366 elif opts.get('extension'):
3367 queries = (helpext,)
3367 queries = (helpext,)
3368 elif opts.get('command'):
3368 elif opts.get('command'):
3369 queries = (helpcmd,)
3369 queries = (helpcmd,)
3370 else:
3370 else:
3371 queries = (helptopic, helpcmd, helpext, helpextcmd)
3371 queries = (helptopic, helpcmd, helpext, helpextcmd)
3372 for f in queries:
3372 for f in queries:
3373 try:
3373 try:
3374 f(name)
3374 f(name)
3375 i = None
3375 i = None
3376 break
3376 break
3377 except error.UnknownCommand, inst:
3377 except error.UnknownCommand, inst:
3378 i = inst
3378 i = inst
3379 if i:
3379 if i:
3380 raise i
3380 raise i
3381 else:
3381 else:
3382 # program name
3382 # program name
3383 ui.status(_("Mercurial Distributed SCM\n"))
3383 ui.status(_("Mercurial Distributed SCM\n"))
3384 ui.status('\n')
3384 ui.status('\n')
3385 helplist()
3385 helplist()
3386
3386
3387
3387
3388 @command('identify|id',
3388 @command('identify|id',
3389 [('r', 'rev', '',
3389 [('r', 'rev', '',
3390 _('identify the specified revision'), _('REV')),
3390 _('identify the specified revision'), _('REV')),
3391 ('n', 'num', None, _('show local revision number')),
3391 ('n', 'num', None, _('show local revision number')),
3392 ('i', 'id', None, _('show global revision id')),
3392 ('i', 'id', None, _('show global revision id')),
3393 ('b', 'branch', None, _('show branch')),
3393 ('b', 'branch', None, _('show branch')),
3394 ('t', 'tags', None, _('show tags')),
3394 ('t', 'tags', None, _('show tags')),
3395 ('B', 'bookmarks', None, _('show bookmarks')),
3395 ('B', 'bookmarks', None, _('show bookmarks')),
3396 ] + remoteopts,
3396 ] + remoteopts,
3397 _('[-nibtB] [-r REV] [SOURCE]'))
3397 _('[-nibtB] [-r REV] [SOURCE]'))
3398 def identify(ui, repo, source=None, rev=None,
3398 def identify(ui, repo, source=None, rev=None,
3399 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
3399 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
3400 """identify the working copy or specified revision
3400 """identify the working copy or specified revision
3401
3401
3402 Print a summary identifying the repository state at REV using one or
3402 Print a summary identifying the repository state at REV using one or
3403 two parent hash identifiers, followed by a "+" if the working
3403 two parent hash identifiers, followed by a "+" if the working
3404 directory has uncommitted changes, the branch name (if not default),
3404 directory has uncommitted changes, the branch name (if not default),
3405 a list of tags, and a list of bookmarks.
3405 a list of tags, and a list of bookmarks.
3406
3406
3407 When REV is not given, print a summary of the current state of the
3407 When REV is not given, print a summary of the current state of the
3408 repository.
3408 repository.
3409
3409
3410 Specifying a path to a repository root or Mercurial bundle will
3410 Specifying a path to a repository root or Mercurial bundle will
3411 cause lookup to operate on that repository/bundle.
3411 cause lookup to operate on that repository/bundle.
3412
3412
3413 .. container:: verbose
3413 .. container:: verbose
3414
3414
3415 Examples:
3415 Examples:
3416
3416
3417 - generate a build identifier for the working directory::
3417 - generate a build identifier for the working directory::
3418
3418
3419 hg id --id > build-id.dat
3419 hg id --id > build-id.dat
3420
3420
3421 - find the revision corresponding to a tag::
3421 - find the revision corresponding to a tag::
3422
3422
3423 hg id -n -r 1.3
3423 hg id -n -r 1.3
3424
3424
3425 - check the most recent revision of a remote repository::
3425 - check the most recent revision of a remote repository::
3426
3426
3427 hg id -r tip http://selenic.com/hg/
3427 hg id -r tip http://selenic.com/hg/
3428
3428
3429 Returns 0 if successful.
3429 Returns 0 if successful.
3430 """
3430 """
3431
3431
3432 if not repo and not source:
3432 if not repo and not source:
3433 raise util.Abort(_("there is no Mercurial repository here "
3433 raise util.Abort(_("there is no Mercurial repository here "
3434 "(.hg not found)"))
3434 "(.hg not found)"))
3435
3435
3436 hexfunc = ui.debugflag and hex or short
3436 hexfunc = ui.debugflag and hex or short
3437 default = not (num or id or branch or tags or bookmarks)
3437 default = not (num or id or branch or tags or bookmarks)
3438 output = []
3438 output = []
3439 revs = []
3439 revs = []
3440
3440
3441 if source:
3441 if source:
3442 source, branches = hg.parseurl(ui.expandpath(source))
3442 source, branches = hg.parseurl(ui.expandpath(source))
3443 repo = hg.peer(ui, opts, source)
3443 repo = hg.peer(ui, opts, source)
3444 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
3444 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
3445
3445
3446 if not repo.local():
3446 if not repo.local():
3447 if num or branch or tags:
3447 if num or branch or tags:
3448 raise util.Abort(
3448 raise util.Abort(
3449 _("can't query remote revision number, branch, or tags"))
3449 _("can't query remote revision number, branch, or tags"))
3450 if not rev and revs:
3450 if not rev and revs:
3451 rev = revs[0]
3451 rev = revs[0]
3452 if not rev:
3452 if not rev:
3453 rev = "tip"
3453 rev = "tip"
3454
3454
3455 remoterev = repo.lookup(rev)
3455 remoterev = repo.lookup(rev)
3456 if default or id:
3456 if default or id:
3457 output = [hexfunc(remoterev)]
3457 output = [hexfunc(remoterev)]
3458
3458
3459 def getbms():
3459 def getbms():
3460 bms = []
3460 bms = []
3461
3461
3462 if 'bookmarks' in repo.listkeys('namespaces'):
3462 if 'bookmarks' in repo.listkeys('namespaces'):
3463 hexremoterev = hex(remoterev)
3463 hexremoterev = hex(remoterev)
3464 bms = [bm for bm, bmr in repo.listkeys('bookmarks').iteritems()
3464 bms = [bm for bm, bmr in repo.listkeys('bookmarks').iteritems()
3465 if bmr == hexremoterev]
3465 if bmr == hexremoterev]
3466
3466
3467 return bms
3467 return bms
3468
3468
3469 if bookmarks:
3469 if bookmarks:
3470 output.extend(getbms())
3470 output.extend(getbms())
3471 elif default and not ui.quiet:
3471 elif default and not ui.quiet:
3472 # multiple bookmarks for a single parent separated by '/'
3472 # multiple bookmarks for a single parent separated by '/'
3473 bm = '/'.join(getbms())
3473 bm = '/'.join(getbms())
3474 if bm:
3474 if bm:
3475 output.append(bm)
3475 output.append(bm)
3476 else:
3476 else:
3477 if not rev:
3477 if not rev:
3478 ctx = repo[None]
3478 ctx = repo[None]
3479 parents = ctx.parents()
3479 parents = ctx.parents()
3480 changed = ""
3480 changed = ""
3481 if default or id or num:
3481 if default or id or num:
3482 changed = util.any(repo.status()) and "+" or ""
3482 changed = util.any(repo.status()) and "+" or ""
3483 if default or id:
3483 if default or id:
3484 output = ["%s%s" %
3484 output = ["%s%s" %
3485 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
3485 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
3486 if num:
3486 if num:
3487 output.append("%s%s" %
3487 output.append("%s%s" %
3488 ('+'.join([str(p.rev()) for p in parents]), changed))
3488 ('+'.join([str(p.rev()) for p in parents]), changed))
3489 else:
3489 else:
3490 ctx = scmutil.revsingle(repo, rev)
3490 ctx = scmutil.revsingle(repo, rev)
3491 if default or id:
3491 if default or id:
3492 output = [hexfunc(ctx.node())]
3492 output = [hexfunc(ctx.node())]
3493 if num:
3493 if num:
3494 output.append(str(ctx.rev()))
3494 output.append(str(ctx.rev()))
3495
3495
3496 if default and not ui.quiet:
3496 if default and not ui.quiet:
3497 b = ctx.branch()
3497 b = ctx.branch()
3498 if b != 'default':
3498 if b != 'default':
3499 output.append("(%s)" % b)
3499 output.append("(%s)" % b)
3500
3500
3501 # multiple tags for a single parent separated by '/'
3501 # multiple tags for a single parent separated by '/'
3502 t = '/'.join(ctx.tags())
3502 t = '/'.join(ctx.tags())
3503 if t:
3503 if t:
3504 output.append(t)
3504 output.append(t)
3505
3505
3506 # multiple bookmarks for a single parent separated by '/'
3506 # multiple bookmarks for a single parent separated by '/'
3507 bm = '/'.join(ctx.bookmarks())
3507 bm = '/'.join(ctx.bookmarks())
3508 if bm:
3508 if bm:
3509 output.append(bm)
3509 output.append(bm)
3510 else:
3510 else:
3511 if branch:
3511 if branch:
3512 output.append(ctx.branch())
3512 output.append(ctx.branch())
3513
3513
3514 if tags:
3514 if tags:
3515 output.extend(ctx.tags())
3515 output.extend(ctx.tags())
3516
3516
3517 if bookmarks:
3517 if bookmarks:
3518 output.extend(ctx.bookmarks())
3518 output.extend(ctx.bookmarks())
3519
3519
3520 ui.write("%s\n" % ' '.join(output))
3520 ui.write("%s\n" % ' '.join(output))
3521
3521
3522 @command('import|patch',
3522 @command('import|patch',
3523 [('p', 'strip', 1,
3523 [('p', 'strip', 1,
3524 _('directory strip option for patch. This has the same '
3524 _('directory strip option for patch. This has the same '
3525 'meaning as the corresponding patch option'), _('NUM')),
3525 'meaning as the corresponding patch option'), _('NUM')),
3526 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
3526 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
3527 ('e', 'edit', False, _('invoke editor on commit messages')),
3527 ('e', 'edit', False, _('invoke editor on commit messages')),
3528 ('f', 'force', None, _('skip check for outstanding uncommitted changes')),
3528 ('f', 'force', None, _('skip check for outstanding uncommitted changes')),
3529 ('', 'no-commit', None,
3529 ('', 'no-commit', None,
3530 _("don't commit, just update the working directory")),
3530 _("don't commit, just update the working directory")),
3531 ('', 'bypass', None,
3531 ('', 'bypass', None,
3532 _("apply patch without touching the working directory")),
3532 _("apply patch without touching the working directory")),
3533 ('', 'exact', None,
3533 ('', 'exact', None,
3534 _('apply patch to the nodes from which it was generated')),
3534 _('apply patch to the nodes from which it was generated')),
3535 ('', 'import-branch', None,
3535 ('', 'import-branch', None,
3536 _('use any branch information in patch (implied by --exact)'))] +
3536 _('use any branch information in patch (implied by --exact)'))] +
3537 commitopts + commitopts2 + similarityopts,
3537 commitopts + commitopts2 + similarityopts,
3538 _('[OPTION]... PATCH...'))
3538 _('[OPTION]... PATCH...'))
3539 def import_(ui, repo, patch1=None, *patches, **opts):
3539 def import_(ui, repo, patch1=None, *patches, **opts):
3540 """import an ordered set of patches
3540 """import an ordered set of patches
3541
3541
3542 Import a list of patches and commit them individually (unless
3542 Import a list of patches and commit them individually (unless
3543 --no-commit is specified).
3543 --no-commit is specified).
3544
3544
3545 If there are outstanding changes in the working directory, import
3545 If there are outstanding changes in the working directory, import
3546 will abort unless given the -f/--force flag.
3546 will abort unless given the -f/--force flag.
3547
3547
3548 You can import a patch straight from a mail message. Even patches
3548 You can import a patch straight from a mail message. Even patches
3549 as attachments work (to use the body part, it must have type
3549 as attachments work (to use the body part, it must have type
3550 text/plain or text/x-patch). From and Subject headers of email
3550 text/plain or text/x-patch). From and Subject headers of email
3551 message are used as default committer and commit message. All
3551 message are used as default committer and commit message. All
3552 text/plain body parts before first diff are added to commit
3552 text/plain body parts before first diff are added to commit
3553 message.
3553 message.
3554
3554
3555 If the imported patch was generated by :hg:`export`, user and
3555 If the imported patch was generated by :hg:`export`, user and
3556 description from patch override values from message headers and
3556 description from patch override values from message headers and
3557 body. Values given on command line with -m/--message and -u/--user
3557 body. Values given on command line with -m/--message and -u/--user
3558 override these.
3558 override these.
3559
3559
3560 If --exact is specified, import will set the working directory to
3560 If --exact is specified, import will set the working directory to
3561 the parent of each patch before applying it, and will abort if the
3561 the parent of each patch before applying it, and will abort if the
3562 resulting changeset has a different ID than the one recorded in
3562 resulting changeset has a different ID than the one recorded in
3563 the patch. This may happen due to character set problems or other
3563 the patch. This may happen due to character set problems or other
3564 deficiencies in the text patch format.
3564 deficiencies in the text patch format.
3565
3565
3566 Use --bypass to apply and commit patches directly to the
3566 Use --bypass to apply and commit patches directly to the
3567 repository, not touching the working directory. Without --exact,
3567 repository, not touching the working directory. Without --exact,
3568 patches will be applied on top of the working directory parent
3568 patches will be applied on top of the working directory parent
3569 revision.
3569 revision.
3570
3570
3571 With -s/--similarity, hg will attempt to discover renames and
3571 With -s/--similarity, hg will attempt to discover renames and
3572 copies in the patch in the same way as :hg:`addremove`.
3572 copies in the patch in the same way as :hg:`addremove`.
3573
3573
3574 To read a patch from standard input, use "-" as the patch name. If
3574 To read a patch from standard input, use "-" as the patch name. If
3575 a URL is specified, the patch will be downloaded from it.
3575 a URL is specified, the patch will be downloaded from it.
3576 See :hg:`help dates` for a list of formats valid for -d/--date.
3576 See :hg:`help dates` for a list of formats valid for -d/--date.
3577
3577
3578 .. container:: verbose
3578 .. container:: verbose
3579
3579
3580 Examples:
3580 Examples:
3581
3581
3582 - import a traditional patch from a website and detect renames::
3582 - import a traditional patch from a website and detect renames::
3583
3583
3584 hg import -s 80 http://example.com/bugfix.patch
3584 hg import -s 80 http://example.com/bugfix.patch
3585
3585
3586 - import a changeset from an hgweb server::
3586 - import a changeset from an hgweb server::
3587
3587
3588 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
3588 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
3589
3589
3590 - import all the patches in an Unix-style mbox::
3590 - import all the patches in an Unix-style mbox::
3591
3591
3592 hg import incoming-patches.mbox
3592 hg import incoming-patches.mbox
3593
3593
3594 - attempt to exactly restore an exported changeset (not always
3594 - attempt to exactly restore an exported changeset (not always
3595 possible)::
3595 possible)::
3596
3596
3597 hg import --exact proposed-fix.patch
3597 hg import --exact proposed-fix.patch
3598
3598
3599 Returns 0 on success.
3599 Returns 0 on success.
3600 """
3600 """
3601
3601
3602 if not patch1:
3602 if not patch1:
3603 raise util.Abort(_('need at least one patch to import'))
3603 raise util.Abort(_('need at least one patch to import'))
3604
3604
3605 patches = (patch1,) + patches
3605 patches = (patch1,) + patches
3606
3606
3607 date = opts.get('date')
3607 date = opts.get('date')
3608 if date:
3608 if date:
3609 opts['date'] = util.parsedate(date)
3609 opts['date'] = util.parsedate(date)
3610
3610
3611 editor = cmdutil.commiteditor
3611 editor = cmdutil.commiteditor
3612 if opts.get('edit'):
3612 if opts.get('edit'):
3613 editor = cmdutil.commitforceeditor
3613 editor = cmdutil.commitforceeditor
3614
3614
3615 update = not opts.get('bypass')
3615 update = not opts.get('bypass')
3616 if not update and opts.get('no_commit'):
3616 if not update and opts.get('no_commit'):
3617 raise util.Abort(_('cannot use --no-commit with --bypass'))
3617 raise util.Abort(_('cannot use --no-commit with --bypass'))
3618 try:
3618 try:
3619 sim = float(opts.get('similarity') or 0)
3619 sim = float(opts.get('similarity') or 0)
3620 except ValueError:
3620 except ValueError:
3621 raise util.Abort(_('similarity must be a number'))
3621 raise util.Abort(_('similarity must be a number'))
3622 if sim < 0 or sim > 100:
3622 if sim < 0 or sim > 100:
3623 raise util.Abort(_('similarity must be between 0 and 100'))
3623 raise util.Abort(_('similarity must be between 0 and 100'))
3624 if sim and not update:
3624 if sim and not update:
3625 raise util.Abort(_('cannot use --similarity with --bypass'))
3625 raise util.Abort(_('cannot use --similarity with --bypass'))
3626
3626
3627 if (opts.get('exact') or not opts.get('force')) and update:
3627 if (opts.get('exact') or not opts.get('force')) and update:
3628 cmdutil.bailifchanged(repo)
3628 cmdutil.bailifchanged(repo)
3629
3629
3630 base = opts["base"]
3630 base = opts["base"]
3631 strip = opts["strip"]
3631 strip = opts["strip"]
3632 wlock = lock = tr = None
3632 wlock = lock = tr = None
3633 msgs = []
3633 msgs = []
3634
3634
3635 def checkexact(repo, n, nodeid):
3635 def checkexact(repo, n, nodeid):
3636 if opts.get('exact') and hex(n) != nodeid:
3636 if opts.get('exact') and hex(n) != nodeid:
3637 repo.rollback()
3637 repo.rollback()
3638 raise util.Abort(_('patch is damaged or loses information'))
3638 raise util.Abort(_('patch is damaged or loses information'))
3639
3639
3640 def tryone(ui, hunk, parents):
3640 def tryone(ui, hunk, parents):
3641 tmpname, message, user, date, branch, nodeid, p1, p2 = \
3641 tmpname, message, user, date, branch, nodeid, p1, p2 = \
3642 patch.extract(ui, hunk)
3642 patch.extract(ui, hunk)
3643
3643
3644 if not tmpname:
3644 if not tmpname:
3645 return (None, None)
3645 return (None, None)
3646 msg = _('applied to working directory')
3646 msg = _('applied to working directory')
3647
3647
3648 try:
3648 try:
3649 cmdline_message = cmdutil.logmessage(ui, opts)
3649 cmdline_message = cmdutil.logmessage(ui, opts)
3650 if cmdline_message:
3650 if cmdline_message:
3651 # pickup the cmdline msg
3651 # pickup the cmdline msg
3652 message = cmdline_message
3652 message = cmdline_message
3653 elif message:
3653 elif message:
3654 # pickup the patch msg
3654 # pickup the patch msg
3655 message = message.strip()
3655 message = message.strip()
3656 else:
3656 else:
3657 # launch the editor
3657 # launch the editor
3658 message = None
3658 message = None
3659 ui.debug('message:\n%s\n' % message)
3659 ui.debug('message:\n%s\n' % message)
3660
3660
3661 if len(parents) == 1:
3661 if len(parents) == 1:
3662 parents.append(repo[nullid])
3662 parents.append(repo[nullid])
3663 if opts.get('exact'):
3663 if opts.get('exact'):
3664 if not nodeid or not p1:
3664 if not nodeid or not p1:
3665 raise util.Abort(_('not a Mercurial patch'))
3665 raise util.Abort(_('not a Mercurial patch'))
3666 p1 = repo[p1]
3666 p1 = repo[p1]
3667 p2 = repo[p2 or nullid]
3667 p2 = repo[p2 or nullid]
3668 elif p2:
3668 elif p2:
3669 try:
3669 try:
3670 p1 = repo[p1]
3670 p1 = repo[p1]
3671 p2 = repo[p2]
3671 p2 = repo[p2]
3672 # Without any options, consider p2 only if the
3672 # Without any options, consider p2 only if the
3673 # patch is being applied on top of the recorded
3673 # patch is being applied on top of the recorded
3674 # first parent.
3674 # first parent.
3675 if p1 != parents[0]:
3675 if p1 != parents[0]:
3676 p1 = parents[0]
3676 p1 = parents[0]
3677 p2 = repo[nullid]
3677 p2 = repo[nullid]
3678 except error.RepoError:
3678 except error.RepoError:
3679 p1, p2 = parents
3679 p1, p2 = parents
3680 else:
3680 else:
3681 p1, p2 = parents
3681 p1, p2 = parents
3682
3682
3683 n = None
3683 n = None
3684 if update:
3684 if update:
3685 if p1 != parents[0]:
3685 if p1 != parents[0]:
3686 hg.clean(repo, p1.node())
3686 hg.clean(repo, p1.node())
3687 if p2 != parents[1]:
3687 if p2 != parents[1]:
3688 repo.setparents(p1.node(), p2.node())
3688 repo.setparents(p1.node(), p2.node())
3689
3689
3690 if opts.get('exact') or opts.get('import_branch'):
3690 if opts.get('exact') or opts.get('import_branch'):
3691 repo.dirstate.setbranch(branch or 'default')
3691 repo.dirstate.setbranch(branch or 'default')
3692
3692
3693 files = set()
3693 files = set()
3694 patch.patch(ui, repo, tmpname, strip=strip, files=files,
3694 patch.patch(ui, repo, tmpname, strip=strip, files=files,
3695 eolmode=None, similarity=sim / 100.0)
3695 eolmode=None, similarity=sim / 100.0)
3696 files = list(files)
3696 files = list(files)
3697 if opts.get('no_commit'):
3697 if opts.get('no_commit'):
3698 if message:
3698 if message:
3699 msgs.append(message)
3699 msgs.append(message)
3700 else:
3700 else:
3701 if opts.get('exact') or p2:
3701 if opts.get('exact') or p2:
3702 # If you got here, you either use --force and know what
3702 # If you got here, you either use --force and know what
3703 # you are doing or used --exact or a merge patch while
3703 # you are doing or used --exact or a merge patch while
3704 # being updated to its first parent.
3704 # being updated to its first parent.
3705 m = None
3705 m = None
3706 else:
3706 else:
3707 m = scmutil.matchfiles(repo, files or [])
3707 m = scmutil.matchfiles(repo, files or [])
3708 n = repo.commit(message, opts.get('user') or user,
3708 n = repo.commit(message, opts.get('user') or user,
3709 opts.get('date') or date, match=m,
3709 opts.get('date') or date, match=m,
3710 editor=editor)
3710 editor=editor)
3711 checkexact(repo, n, nodeid)
3711 checkexact(repo, n, nodeid)
3712 else:
3712 else:
3713 if opts.get('exact') or opts.get('import_branch'):
3713 if opts.get('exact') or opts.get('import_branch'):
3714 branch = branch or 'default'
3714 branch = branch or 'default'
3715 else:
3715 else:
3716 branch = p1.branch()
3716 branch = p1.branch()
3717 store = patch.filestore()
3717 store = patch.filestore()
3718 try:
3718 try:
3719 files = set()
3719 files = set()
3720 try:
3720 try:
3721 patch.patchrepo(ui, repo, p1, store, tmpname, strip,
3721 patch.patchrepo(ui, repo, p1, store, tmpname, strip,
3722 files, eolmode=None)
3722 files, eolmode=None)
3723 except patch.PatchError, e:
3723 except patch.PatchError, e:
3724 raise util.Abort(str(e))
3724 raise util.Abort(str(e))
3725 memctx = patch.makememctx(repo, (p1.node(), p2.node()),
3725 memctx = patch.makememctx(repo, (p1.node(), p2.node()),
3726 message,
3726 message,
3727 opts.get('user') or user,
3727 opts.get('user') or user,
3728 opts.get('date') or date,
3728 opts.get('date') or date,
3729 branch, files, store,
3729 branch, files, store,
3730 editor=cmdutil.commiteditor)
3730 editor=cmdutil.commiteditor)
3731 repo.savecommitmessage(memctx.description())
3731 repo.savecommitmessage(memctx.description())
3732 n = memctx.commit()
3732 n = memctx.commit()
3733 checkexact(repo, n, nodeid)
3733 checkexact(repo, n, nodeid)
3734 finally:
3734 finally:
3735 store.close()
3735 store.close()
3736 if n:
3736 if n:
3737 # i18n: refers to a short changeset id
3737 # i18n: refers to a short changeset id
3738 msg = _('created %s') % short(n)
3738 msg = _('created %s') % short(n)
3739 return (msg, n)
3739 return (msg, n)
3740 finally:
3740 finally:
3741 os.unlink(tmpname)
3741 os.unlink(tmpname)
3742
3742
3743 try:
3743 try:
3744 try:
3744 try:
3745 wlock = repo.wlock()
3745 wlock = repo.wlock()
3746 if not opts.get('no_commit'):
3746 if not opts.get('no_commit'):
3747 lock = repo.lock()
3747 lock = repo.lock()
3748 tr = repo.transaction('import')
3748 tr = repo.transaction('import')
3749 parents = repo.parents()
3749 parents = repo.parents()
3750 for patchurl in patches:
3750 for patchurl in patches:
3751 if patchurl == '-':
3751 if patchurl == '-':
3752 ui.status(_('applying patch from stdin\n'))
3752 ui.status(_('applying patch from stdin\n'))
3753 patchfile = ui.fin
3753 patchfile = ui.fin
3754 patchurl = 'stdin' # for error message
3754 patchurl = 'stdin' # for error message
3755 else:
3755 else:
3756 patchurl = os.path.join(base, patchurl)
3756 patchurl = os.path.join(base, patchurl)
3757 ui.status(_('applying %s\n') % patchurl)
3757 ui.status(_('applying %s\n') % patchurl)
3758 patchfile = url.open(ui, patchurl)
3758 patchfile = url.open(ui, patchurl)
3759
3759
3760 haspatch = False
3760 haspatch = False
3761 for hunk in patch.split(patchfile):
3761 for hunk in patch.split(patchfile):
3762 (msg, node) = tryone(ui, hunk, parents)
3762 (msg, node) = tryone(ui, hunk, parents)
3763 if msg:
3763 if msg:
3764 haspatch = True
3764 haspatch = True
3765 ui.note(msg + '\n')
3765 ui.note(msg + '\n')
3766 if update or opts.get('exact'):
3766 if update or opts.get('exact'):
3767 parents = repo.parents()
3767 parents = repo.parents()
3768 else:
3768 else:
3769 parents = [repo[node]]
3769 parents = [repo[node]]
3770
3770
3771 if not haspatch:
3771 if not haspatch:
3772 raise util.Abort(_('%s: no diffs found') % patchurl)
3772 raise util.Abort(_('%s: no diffs found') % patchurl)
3773
3773
3774 if tr:
3774 if tr:
3775 tr.close()
3775 tr.close()
3776 if msgs:
3776 if msgs:
3777 repo.savecommitmessage('\n* * *\n'.join(msgs))
3777 repo.savecommitmessage('\n* * *\n'.join(msgs))
3778 except:
3778 except: # re-raises
3779 # wlock.release() indirectly calls dirstate.write(): since
3779 # wlock.release() indirectly calls dirstate.write(): since
3780 # we're crashing, we do not want to change the working dir
3780 # we're crashing, we do not want to change the working dir
3781 # parent after all, so make sure it writes nothing
3781 # parent after all, so make sure it writes nothing
3782 repo.dirstate.invalidate()
3782 repo.dirstate.invalidate()
3783 raise
3783 raise
3784 finally:
3784 finally:
3785 if tr:
3785 if tr:
3786 tr.release()
3786 tr.release()
3787 release(lock, wlock)
3787 release(lock, wlock)
3788
3788
3789 @command('incoming|in',
3789 @command('incoming|in',
3790 [('f', 'force', None,
3790 [('f', 'force', None,
3791 _('run even if remote repository is unrelated')),
3791 _('run even if remote repository is unrelated')),
3792 ('n', 'newest-first', None, _('show newest record first')),
3792 ('n', 'newest-first', None, _('show newest record first')),
3793 ('', 'bundle', '',
3793 ('', 'bundle', '',
3794 _('file to store the bundles into'), _('FILE')),
3794 _('file to store the bundles into'), _('FILE')),
3795 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3795 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3796 ('B', 'bookmarks', False, _("compare bookmarks")),
3796 ('B', 'bookmarks', False, _("compare bookmarks")),
3797 ('b', 'branch', [],
3797 ('b', 'branch', [],
3798 _('a specific branch you would like to pull'), _('BRANCH')),
3798 _('a specific branch you would like to pull'), _('BRANCH')),
3799 ] + logopts + remoteopts + subrepoopts,
3799 ] + logopts + remoteopts + subrepoopts,
3800 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3800 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3801 def incoming(ui, repo, source="default", **opts):
3801 def incoming(ui, repo, source="default", **opts):
3802 """show new changesets found in source
3802 """show new changesets found in source
3803
3803
3804 Show new changesets found in the specified path/URL or the default
3804 Show new changesets found in the specified path/URL or the default
3805 pull location. These are the changesets that would have been pulled
3805 pull location. These are the changesets that would have been pulled
3806 if a pull at the time you issued this command.
3806 if a pull at the time you issued this command.
3807
3807
3808 For remote repository, using --bundle avoids downloading the
3808 For remote repository, using --bundle avoids downloading the
3809 changesets twice if the incoming is followed by a pull.
3809 changesets twice if the incoming is followed by a pull.
3810
3810
3811 See pull for valid source format details.
3811 See pull for valid source format details.
3812
3812
3813 Returns 0 if there are incoming changes, 1 otherwise.
3813 Returns 0 if there are incoming changes, 1 otherwise.
3814 """
3814 """
3815 if opts.get('bundle') and opts.get('subrepos'):
3815 if opts.get('bundle') and opts.get('subrepos'):
3816 raise util.Abort(_('cannot combine --bundle and --subrepos'))
3816 raise util.Abort(_('cannot combine --bundle and --subrepos'))
3817
3817
3818 if opts.get('bookmarks'):
3818 if opts.get('bookmarks'):
3819 source, branches = hg.parseurl(ui.expandpath(source),
3819 source, branches = hg.parseurl(ui.expandpath(source),
3820 opts.get('branch'))
3820 opts.get('branch'))
3821 other = hg.peer(repo, opts, source)
3821 other = hg.peer(repo, opts, source)
3822 if 'bookmarks' not in other.listkeys('namespaces'):
3822 if 'bookmarks' not in other.listkeys('namespaces'):
3823 ui.warn(_("remote doesn't support bookmarks\n"))
3823 ui.warn(_("remote doesn't support bookmarks\n"))
3824 return 0
3824 return 0
3825 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3825 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3826 return bookmarks.diff(ui, repo, other)
3826 return bookmarks.diff(ui, repo, other)
3827
3827
3828 repo._subtoppath = ui.expandpath(source)
3828 repo._subtoppath = ui.expandpath(source)
3829 try:
3829 try:
3830 return hg.incoming(ui, repo, source, opts)
3830 return hg.incoming(ui, repo, source, opts)
3831 finally:
3831 finally:
3832 del repo._subtoppath
3832 del repo._subtoppath
3833
3833
3834
3834
3835 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'))
3835 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'))
3836 def init(ui, dest=".", **opts):
3836 def init(ui, dest=".", **opts):
3837 """create a new repository in the given directory
3837 """create a new repository in the given directory
3838
3838
3839 Initialize a new repository in the given directory. If the given
3839 Initialize a new repository in the given directory. If the given
3840 directory does not exist, it will be created.
3840 directory does not exist, it will be created.
3841
3841
3842 If no directory is given, the current directory is used.
3842 If no directory is given, the current directory is used.
3843
3843
3844 It is possible to specify an ``ssh://`` URL as the destination.
3844 It is possible to specify an ``ssh://`` URL as the destination.
3845 See :hg:`help urls` for more information.
3845 See :hg:`help urls` for more information.
3846
3846
3847 Returns 0 on success.
3847 Returns 0 on success.
3848 """
3848 """
3849 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3849 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3850
3850
3851 @command('locate',
3851 @command('locate',
3852 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3852 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3853 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3853 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3854 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3854 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3855 ] + walkopts,
3855 ] + walkopts,
3856 _('[OPTION]... [PATTERN]...'))
3856 _('[OPTION]... [PATTERN]...'))
3857 def locate(ui, repo, *pats, **opts):
3857 def locate(ui, repo, *pats, **opts):
3858 """locate files matching specific patterns
3858 """locate files matching specific patterns
3859
3859
3860 Print files under Mercurial control in the working directory whose
3860 Print files under Mercurial control in the working directory whose
3861 names match the given patterns.
3861 names match the given patterns.
3862
3862
3863 By default, this command searches all directories in the working
3863 By default, this command searches all directories in the working
3864 directory. To search just the current directory and its
3864 directory. To search just the current directory and its
3865 subdirectories, use "--include .".
3865 subdirectories, use "--include .".
3866
3866
3867 If no patterns are given to match, this command prints the names
3867 If no patterns are given to match, this command prints the names
3868 of all files under Mercurial control in the working directory.
3868 of all files under Mercurial control in the working directory.
3869
3869
3870 If you want to feed the output of this command into the "xargs"
3870 If you want to feed the output of this command into the "xargs"
3871 command, use the -0 option to both this command and "xargs". This
3871 command, use the -0 option to both this command and "xargs". This
3872 will avoid the problem of "xargs" treating single filenames that
3872 will avoid the problem of "xargs" treating single filenames that
3873 contain whitespace as multiple filenames.
3873 contain whitespace as multiple filenames.
3874
3874
3875 Returns 0 if a match is found, 1 otherwise.
3875 Returns 0 if a match is found, 1 otherwise.
3876 """
3876 """
3877 end = opts.get('print0') and '\0' or '\n'
3877 end = opts.get('print0') and '\0' or '\n'
3878 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
3878 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
3879
3879
3880 ret = 1
3880 ret = 1
3881 m = scmutil.match(repo[rev], pats, opts, default='relglob')
3881 m = scmutil.match(repo[rev], pats, opts, default='relglob')
3882 m.bad = lambda x, y: False
3882 m.bad = lambda x, y: False
3883 for abs in repo[rev].walk(m):
3883 for abs in repo[rev].walk(m):
3884 if not rev and abs not in repo.dirstate:
3884 if not rev and abs not in repo.dirstate:
3885 continue
3885 continue
3886 if opts.get('fullpath'):
3886 if opts.get('fullpath'):
3887 ui.write(repo.wjoin(abs), end)
3887 ui.write(repo.wjoin(abs), end)
3888 else:
3888 else:
3889 ui.write(((pats and m.rel(abs)) or abs), end)
3889 ui.write(((pats and m.rel(abs)) or abs), end)
3890 ret = 0
3890 ret = 0
3891
3891
3892 return ret
3892 return ret
3893
3893
3894 @command('^log|history',
3894 @command('^log|history',
3895 [('f', 'follow', None,
3895 [('f', 'follow', None,
3896 _('follow changeset history, or file history across copies and renames')),
3896 _('follow changeset history, or file history across copies and renames')),
3897 ('', 'follow-first', None,
3897 ('', 'follow-first', None,
3898 _('only follow the first parent of merge changesets (DEPRECATED)')),
3898 _('only follow the first parent of merge changesets (DEPRECATED)')),
3899 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3899 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3900 ('C', 'copies', None, _('show copied files')),
3900 ('C', 'copies', None, _('show copied files')),
3901 ('k', 'keyword', [],
3901 ('k', 'keyword', [],
3902 _('do case-insensitive search for a given text'), _('TEXT')),
3902 _('do case-insensitive search for a given text'), _('TEXT')),
3903 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
3903 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
3904 ('', 'removed', None, _('include revisions where files were removed')),
3904 ('', 'removed', None, _('include revisions where files were removed')),
3905 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
3905 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
3906 ('u', 'user', [], _('revisions committed by user'), _('USER')),
3906 ('u', 'user', [], _('revisions committed by user'), _('USER')),
3907 ('', 'only-branch', [],
3907 ('', 'only-branch', [],
3908 _('show only changesets within the given named branch (DEPRECATED)'),
3908 _('show only changesets within the given named branch (DEPRECATED)'),
3909 _('BRANCH')),
3909 _('BRANCH')),
3910 ('b', 'branch', [],
3910 ('b', 'branch', [],
3911 _('show changesets within the given named branch'), _('BRANCH')),
3911 _('show changesets within the given named branch'), _('BRANCH')),
3912 ('P', 'prune', [],
3912 ('P', 'prune', [],
3913 _('do not display revision or any of its ancestors'), _('REV')),
3913 _('do not display revision or any of its ancestors'), _('REV')),
3914 ('', 'hidden', False, _('show hidden changesets (DEPRECATED)')),
3914 ('', 'hidden', False, _('show hidden changesets (DEPRECATED)')),
3915 ] + logopts + walkopts,
3915 ] + logopts + walkopts,
3916 _('[OPTION]... [FILE]'))
3916 _('[OPTION]... [FILE]'))
3917 def log(ui, repo, *pats, **opts):
3917 def log(ui, repo, *pats, **opts):
3918 """show revision history of entire repository or files
3918 """show revision history of entire repository or files
3919
3919
3920 Print the revision history of the specified files or the entire
3920 Print the revision history of the specified files or the entire
3921 project.
3921 project.
3922
3922
3923 If no revision range is specified, the default is ``tip:0`` unless
3923 If no revision range is specified, the default is ``tip:0`` unless
3924 --follow is set, in which case the working directory parent is
3924 --follow is set, in which case the working directory parent is
3925 used as the starting revision.
3925 used as the starting revision.
3926
3926
3927 File history is shown without following rename or copy history of
3927 File history is shown without following rename or copy history of
3928 files. Use -f/--follow with a filename to follow history across
3928 files. Use -f/--follow with a filename to follow history across
3929 renames and copies. --follow without a filename will only show
3929 renames and copies. --follow without a filename will only show
3930 ancestors or descendants of the starting revision.
3930 ancestors or descendants of the starting revision.
3931
3931
3932 By default this command prints revision number and changeset id,
3932 By default this command prints revision number and changeset id,
3933 tags, non-trivial parents, user, date and time, and a summary for
3933 tags, non-trivial parents, user, date and time, and a summary for
3934 each commit. When the -v/--verbose switch is used, the list of
3934 each commit. When the -v/--verbose switch is used, the list of
3935 changed files and full commit message are shown.
3935 changed files and full commit message are shown.
3936
3936
3937 .. note::
3937 .. note::
3938 log -p/--patch may generate unexpected diff output for merge
3938 log -p/--patch may generate unexpected diff output for merge
3939 changesets, as it will only compare the merge changeset against
3939 changesets, as it will only compare the merge changeset against
3940 its first parent. Also, only files different from BOTH parents
3940 its first parent. Also, only files different from BOTH parents
3941 will appear in files:.
3941 will appear in files:.
3942
3942
3943 .. note::
3943 .. note::
3944 for performance reasons, log FILE may omit duplicate changes
3944 for performance reasons, log FILE may omit duplicate changes
3945 made on branches and will not show deletions. To see all
3945 made on branches and will not show deletions. To see all
3946 changes including duplicates and deletions, use the --removed
3946 changes including duplicates and deletions, use the --removed
3947 switch.
3947 switch.
3948
3948
3949 .. container:: verbose
3949 .. container:: verbose
3950
3950
3951 Some examples:
3951 Some examples:
3952
3952
3953 - changesets with full descriptions and file lists::
3953 - changesets with full descriptions and file lists::
3954
3954
3955 hg log -v
3955 hg log -v
3956
3956
3957 - changesets ancestral to the working directory::
3957 - changesets ancestral to the working directory::
3958
3958
3959 hg log -f
3959 hg log -f
3960
3960
3961 - last 10 commits on the current branch::
3961 - last 10 commits on the current branch::
3962
3962
3963 hg log -l 10 -b .
3963 hg log -l 10 -b .
3964
3964
3965 - changesets showing all modifications of a file, including removals::
3965 - changesets showing all modifications of a file, including removals::
3966
3966
3967 hg log --removed file.c
3967 hg log --removed file.c
3968
3968
3969 - all changesets that touch a directory, with diffs, excluding merges::
3969 - all changesets that touch a directory, with diffs, excluding merges::
3970
3970
3971 hg log -Mp lib/
3971 hg log -Mp lib/
3972
3972
3973 - all revision numbers that match a keyword::
3973 - all revision numbers that match a keyword::
3974
3974
3975 hg log -k bug --template "{rev}\\n"
3975 hg log -k bug --template "{rev}\\n"
3976
3976
3977 - check if a given changeset is included is a tagged release::
3977 - check if a given changeset is included is a tagged release::
3978
3978
3979 hg log -r "a21ccf and ancestor(1.9)"
3979 hg log -r "a21ccf and ancestor(1.9)"
3980
3980
3981 - find all changesets by some user in a date range::
3981 - find all changesets by some user in a date range::
3982
3982
3983 hg log -k alice -d "may 2008 to jul 2008"
3983 hg log -k alice -d "may 2008 to jul 2008"
3984
3984
3985 - summary of all changesets after the last tag::
3985 - summary of all changesets after the last tag::
3986
3986
3987 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
3987 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
3988
3988
3989 See :hg:`help dates` for a list of formats valid for -d/--date.
3989 See :hg:`help dates` for a list of formats valid for -d/--date.
3990
3990
3991 See :hg:`help revisions` and :hg:`help revsets` for more about
3991 See :hg:`help revisions` and :hg:`help revsets` for more about
3992 specifying revisions.
3992 specifying revisions.
3993
3993
3994 See :hg:`help templates` for more about pre-packaged styles and
3994 See :hg:`help templates` for more about pre-packaged styles and
3995 specifying custom templates.
3995 specifying custom templates.
3996
3996
3997 Returns 0 on success.
3997 Returns 0 on success.
3998 """
3998 """
3999
3999
4000 matchfn = scmutil.match(repo[None], pats, opts)
4000 matchfn = scmutil.match(repo[None], pats, opts)
4001 limit = cmdutil.loglimit(opts)
4001 limit = cmdutil.loglimit(opts)
4002 count = 0
4002 count = 0
4003
4003
4004 getrenamed, endrev = None, None
4004 getrenamed, endrev = None, None
4005 if opts.get('copies'):
4005 if opts.get('copies'):
4006 if opts.get('rev'):
4006 if opts.get('rev'):
4007 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
4007 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
4008 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
4008 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
4009
4009
4010 df = False
4010 df = False
4011 if opts["date"]:
4011 if opts["date"]:
4012 df = util.matchdate(opts["date"])
4012 df = util.matchdate(opts["date"])
4013
4013
4014 branches = opts.get('branch', []) + opts.get('only_branch', [])
4014 branches = opts.get('branch', []) + opts.get('only_branch', [])
4015 opts['branch'] = [repo.lookupbranch(b) for b in branches]
4015 opts['branch'] = [repo.lookupbranch(b) for b in branches]
4016
4016
4017 displayer = cmdutil.show_changeset(ui, repo, opts, True)
4017 displayer = cmdutil.show_changeset(ui, repo, opts, True)
4018 def prep(ctx, fns):
4018 def prep(ctx, fns):
4019 rev = ctx.rev()
4019 rev = ctx.rev()
4020 parents = [p for p in repo.changelog.parentrevs(rev)
4020 parents = [p for p in repo.changelog.parentrevs(rev)
4021 if p != nullrev]
4021 if p != nullrev]
4022 if opts.get('no_merges') and len(parents) == 2:
4022 if opts.get('no_merges') and len(parents) == 2:
4023 return
4023 return
4024 if opts.get('only_merges') and len(parents) != 2:
4024 if opts.get('only_merges') and len(parents) != 2:
4025 return
4025 return
4026 if opts.get('branch') and ctx.branch() not in opts['branch']:
4026 if opts.get('branch') and ctx.branch() not in opts['branch']:
4027 return
4027 return
4028 if not opts.get('hidden') and ctx.hidden():
4028 if not opts.get('hidden') and ctx.hidden():
4029 return
4029 return
4030 if df and not df(ctx.date()[0]):
4030 if df and not df(ctx.date()[0]):
4031 return
4031 return
4032
4032
4033 lower = encoding.lower
4033 lower = encoding.lower
4034 if opts.get('user'):
4034 if opts.get('user'):
4035 luser = lower(ctx.user())
4035 luser = lower(ctx.user())
4036 for k in [lower(x) for x in opts['user']]:
4036 for k in [lower(x) for x in opts['user']]:
4037 if (k in luser):
4037 if (k in luser):
4038 break
4038 break
4039 else:
4039 else:
4040 return
4040 return
4041 if opts.get('keyword'):
4041 if opts.get('keyword'):
4042 luser = lower(ctx.user())
4042 luser = lower(ctx.user())
4043 ldesc = lower(ctx.description())
4043 ldesc = lower(ctx.description())
4044 lfiles = lower(" ".join(ctx.files()))
4044 lfiles = lower(" ".join(ctx.files()))
4045 for k in [lower(x) for x in opts['keyword']]:
4045 for k in [lower(x) for x in opts['keyword']]:
4046 if (k in luser or k in ldesc or k in lfiles):
4046 if (k in luser or k in ldesc or k in lfiles):
4047 break
4047 break
4048 else:
4048 else:
4049 return
4049 return
4050
4050
4051 copies = None
4051 copies = None
4052 if getrenamed is not None and rev:
4052 if getrenamed is not None and rev:
4053 copies = []
4053 copies = []
4054 for fn in ctx.files():
4054 for fn in ctx.files():
4055 rename = getrenamed(fn, rev)
4055 rename = getrenamed(fn, rev)
4056 if rename:
4056 if rename:
4057 copies.append((fn, rename[0]))
4057 copies.append((fn, rename[0]))
4058
4058
4059 revmatchfn = None
4059 revmatchfn = None
4060 if opts.get('patch') or opts.get('stat'):
4060 if opts.get('patch') or opts.get('stat'):
4061 if opts.get('follow') or opts.get('follow_first'):
4061 if opts.get('follow') or opts.get('follow_first'):
4062 # note: this might be wrong when following through merges
4062 # note: this might be wrong when following through merges
4063 revmatchfn = scmutil.match(repo[None], fns, default='path')
4063 revmatchfn = scmutil.match(repo[None], fns, default='path')
4064 else:
4064 else:
4065 revmatchfn = matchfn
4065 revmatchfn = matchfn
4066
4066
4067 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
4067 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
4068
4068
4069 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
4069 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
4070 if count == limit:
4070 if count == limit:
4071 break
4071 break
4072 if displayer.flush(ctx.rev()):
4072 if displayer.flush(ctx.rev()):
4073 count += 1
4073 count += 1
4074 displayer.close()
4074 displayer.close()
4075
4075
4076 @command('manifest',
4076 @command('manifest',
4077 [('r', 'rev', '', _('revision to display'), _('REV')),
4077 [('r', 'rev', '', _('revision to display'), _('REV')),
4078 ('', 'all', False, _("list files from all revisions"))],
4078 ('', 'all', False, _("list files from all revisions"))],
4079 _('[-r REV]'))
4079 _('[-r REV]'))
4080 def manifest(ui, repo, node=None, rev=None, **opts):
4080 def manifest(ui, repo, node=None, rev=None, **opts):
4081 """output the current or given revision of the project manifest
4081 """output the current or given revision of the project manifest
4082
4082
4083 Print a list of version controlled files for the given revision.
4083 Print a list of version controlled files for the given revision.
4084 If no revision is given, the first parent of the working directory
4084 If no revision is given, the first parent of the working directory
4085 is used, or the null revision if no revision is checked out.
4085 is used, or the null revision if no revision is checked out.
4086
4086
4087 With -v, print file permissions, symlink and executable bits.
4087 With -v, print file permissions, symlink and executable bits.
4088 With --debug, print file revision hashes.
4088 With --debug, print file revision hashes.
4089
4089
4090 If option --all is specified, the list of all files from all revisions
4090 If option --all is specified, the list of all files from all revisions
4091 is printed. This includes deleted and renamed files.
4091 is printed. This includes deleted and renamed files.
4092
4092
4093 Returns 0 on success.
4093 Returns 0 on success.
4094 """
4094 """
4095 if opts.get('all'):
4095 if opts.get('all'):
4096 if rev or node:
4096 if rev or node:
4097 raise util.Abort(_("can't specify a revision with --all"))
4097 raise util.Abort(_("can't specify a revision with --all"))
4098
4098
4099 res = []
4099 res = []
4100 prefix = "data/"
4100 prefix = "data/"
4101 suffix = ".i"
4101 suffix = ".i"
4102 plen = len(prefix)
4102 plen = len(prefix)
4103 slen = len(suffix)
4103 slen = len(suffix)
4104 lock = repo.lock()
4104 lock = repo.lock()
4105 try:
4105 try:
4106 for fn, b, size in repo.store.datafiles():
4106 for fn, b, size in repo.store.datafiles():
4107 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
4107 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
4108 res.append(fn[plen:-slen])
4108 res.append(fn[plen:-slen])
4109 finally:
4109 finally:
4110 lock.release()
4110 lock.release()
4111 for f in sorted(res):
4111 for f in sorted(res):
4112 ui.write("%s\n" % f)
4112 ui.write("%s\n" % f)
4113 return
4113 return
4114
4114
4115 if rev and node:
4115 if rev and node:
4116 raise util.Abort(_("please specify just one revision"))
4116 raise util.Abort(_("please specify just one revision"))
4117
4117
4118 if not node:
4118 if not node:
4119 node = rev
4119 node = rev
4120
4120
4121 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
4121 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
4122 ctx = scmutil.revsingle(repo, node)
4122 ctx = scmutil.revsingle(repo, node)
4123 for f in ctx:
4123 for f in ctx:
4124 if ui.debugflag:
4124 if ui.debugflag:
4125 ui.write("%40s " % hex(ctx.manifest()[f]))
4125 ui.write("%40s " % hex(ctx.manifest()[f]))
4126 if ui.verbose:
4126 if ui.verbose:
4127 ui.write(decor[ctx.flags(f)])
4127 ui.write(decor[ctx.flags(f)])
4128 ui.write("%s\n" % f)
4128 ui.write("%s\n" % f)
4129
4129
4130 @command('^merge',
4130 @command('^merge',
4131 [('f', 'force', None, _('force a merge with outstanding changes')),
4131 [('f', 'force', None, _('force a merge with outstanding changes')),
4132 ('r', 'rev', '', _('revision to merge'), _('REV')),
4132 ('r', 'rev', '', _('revision to merge'), _('REV')),
4133 ('P', 'preview', None,
4133 ('P', 'preview', None,
4134 _('review revisions to merge (no merge is performed)'))
4134 _('review revisions to merge (no merge is performed)'))
4135 ] + mergetoolopts,
4135 ] + mergetoolopts,
4136 _('[-P] [-f] [[-r] REV]'))
4136 _('[-P] [-f] [[-r] REV]'))
4137 def merge(ui, repo, node=None, **opts):
4137 def merge(ui, repo, node=None, **opts):
4138 """merge working directory with another revision
4138 """merge working directory with another revision
4139
4139
4140 The current working directory is updated with all changes made in
4140 The current working directory is updated with all changes made in
4141 the requested revision since the last common predecessor revision.
4141 the requested revision since the last common predecessor revision.
4142
4142
4143 Files that changed between either parent are marked as changed for
4143 Files that changed between either parent are marked as changed for
4144 the next commit and a commit must be performed before any further
4144 the next commit and a commit must be performed before any further
4145 updates to the repository are allowed. The next commit will have
4145 updates to the repository are allowed. The next commit will have
4146 two parents.
4146 two parents.
4147
4147
4148 ``--tool`` can be used to specify the merge tool used for file
4148 ``--tool`` can be used to specify the merge tool used for file
4149 merges. It overrides the HGMERGE environment variable and your
4149 merges. It overrides the HGMERGE environment variable and your
4150 configuration files. See :hg:`help merge-tools` for options.
4150 configuration files. See :hg:`help merge-tools` for options.
4151
4151
4152 If no revision is specified, the working directory's parent is a
4152 If no revision is specified, the working directory's parent is a
4153 head revision, and the current branch contains exactly one other
4153 head revision, and the current branch contains exactly one other
4154 head, the other head is merged with by default. Otherwise, an
4154 head, the other head is merged with by default. Otherwise, an
4155 explicit revision with which to merge with must be provided.
4155 explicit revision with which to merge with must be provided.
4156
4156
4157 :hg:`resolve` must be used to resolve unresolved files.
4157 :hg:`resolve` must be used to resolve unresolved files.
4158
4158
4159 To undo an uncommitted merge, use :hg:`update --clean .` which
4159 To undo an uncommitted merge, use :hg:`update --clean .` which
4160 will check out a clean copy of the original merge parent, losing
4160 will check out a clean copy of the original merge parent, losing
4161 all changes.
4161 all changes.
4162
4162
4163 Returns 0 on success, 1 if there are unresolved files.
4163 Returns 0 on success, 1 if there are unresolved files.
4164 """
4164 """
4165
4165
4166 if opts.get('rev') and node:
4166 if opts.get('rev') and node:
4167 raise util.Abort(_("please specify just one revision"))
4167 raise util.Abort(_("please specify just one revision"))
4168 if not node:
4168 if not node:
4169 node = opts.get('rev')
4169 node = opts.get('rev')
4170
4170
4171 if not node:
4171 if not node:
4172 branch = repo[None].branch()
4172 branch = repo[None].branch()
4173 bheads = repo.branchheads(branch)
4173 bheads = repo.branchheads(branch)
4174 if len(bheads) > 2:
4174 if len(bheads) > 2:
4175 raise util.Abort(_("branch '%s' has %d heads - "
4175 raise util.Abort(_("branch '%s' has %d heads - "
4176 "please merge with an explicit rev")
4176 "please merge with an explicit rev")
4177 % (branch, len(bheads)),
4177 % (branch, len(bheads)),
4178 hint=_("run 'hg heads .' to see heads"))
4178 hint=_("run 'hg heads .' to see heads"))
4179
4179
4180 parent = repo.dirstate.p1()
4180 parent = repo.dirstate.p1()
4181 if len(bheads) == 1:
4181 if len(bheads) == 1:
4182 if len(repo.heads()) > 1:
4182 if len(repo.heads()) > 1:
4183 raise util.Abort(_("branch '%s' has one head - "
4183 raise util.Abort(_("branch '%s' has one head - "
4184 "please merge with an explicit rev")
4184 "please merge with an explicit rev")
4185 % branch,
4185 % branch,
4186 hint=_("run 'hg heads' to see all heads"))
4186 hint=_("run 'hg heads' to see all heads"))
4187 msg, hint = _('nothing to merge'), None
4187 msg, hint = _('nothing to merge'), None
4188 if parent != repo.lookup(branch):
4188 if parent != repo.lookup(branch):
4189 hint = _("use 'hg update' instead")
4189 hint = _("use 'hg update' instead")
4190 raise util.Abort(msg, hint=hint)
4190 raise util.Abort(msg, hint=hint)
4191
4191
4192 if parent not in bheads:
4192 if parent not in bheads:
4193 raise util.Abort(_('working directory not at a head revision'),
4193 raise util.Abort(_('working directory not at a head revision'),
4194 hint=_("use 'hg update' or merge with an "
4194 hint=_("use 'hg update' or merge with an "
4195 "explicit revision"))
4195 "explicit revision"))
4196 node = parent == bheads[0] and bheads[-1] or bheads[0]
4196 node = parent == bheads[0] and bheads[-1] or bheads[0]
4197 else:
4197 else:
4198 node = scmutil.revsingle(repo, node).node()
4198 node = scmutil.revsingle(repo, node).node()
4199
4199
4200 if opts.get('preview'):
4200 if opts.get('preview'):
4201 # find nodes that are ancestors of p2 but not of p1
4201 # find nodes that are ancestors of p2 but not of p1
4202 p1 = repo.lookup('.')
4202 p1 = repo.lookup('.')
4203 p2 = repo.lookup(node)
4203 p2 = repo.lookup(node)
4204 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4204 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4205
4205
4206 displayer = cmdutil.show_changeset(ui, repo, opts)
4206 displayer = cmdutil.show_changeset(ui, repo, opts)
4207 for node in nodes:
4207 for node in nodes:
4208 displayer.show(repo[node])
4208 displayer.show(repo[node])
4209 displayer.close()
4209 displayer.close()
4210 return 0
4210 return 0
4211
4211
4212 try:
4212 try:
4213 # ui.forcemerge is an internal variable, do not document
4213 # ui.forcemerge is an internal variable, do not document
4214 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
4214 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
4215 return hg.merge(repo, node, force=opts.get('force'))
4215 return hg.merge(repo, node, force=opts.get('force'))
4216 finally:
4216 finally:
4217 ui.setconfig('ui', 'forcemerge', '')
4217 ui.setconfig('ui', 'forcemerge', '')
4218
4218
4219 @command('outgoing|out',
4219 @command('outgoing|out',
4220 [('f', 'force', None, _('run even when the destination is unrelated')),
4220 [('f', 'force', None, _('run even when the destination is unrelated')),
4221 ('r', 'rev', [],
4221 ('r', 'rev', [],
4222 _('a changeset intended to be included in the destination'), _('REV')),
4222 _('a changeset intended to be included in the destination'), _('REV')),
4223 ('n', 'newest-first', None, _('show newest record first')),
4223 ('n', 'newest-first', None, _('show newest record first')),
4224 ('B', 'bookmarks', False, _('compare bookmarks')),
4224 ('B', 'bookmarks', False, _('compare bookmarks')),
4225 ('b', 'branch', [], _('a specific branch you would like to push'),
4225 ('b', 'branch', [], _('a specific branch you would like to push'),
4226 _('BRANCH')),
4226 _('BRANCH')),
4227 ] + logopts + remoteopts + subrepoopts,
4227 ] + logopts + remoteopts + subrepoopts,
4228 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
4228 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
4229 def outgoing(ui, repo, dest=None, **opts):
4229 def outgoing(ui, repo, dest=None, **opts):
4230 """show changesets not found in the destination
4230 """show changesets not found in the destination
4231
4231
4232 Show changesets not found in the specified destination repository
4232 Show changesets not found in the specified destination repository
4233 or the default push location. These are the changesets that would
4233 or the default push location. These are the changesets that would
4234 be pushed if a push was requested.
4234 be pushed if a push was requested.
4235
4235
4236 See pull for details of valid destination formats.
4236 See pull for details of valid destination formats.
4237
4237
4238 Returns 0 if there are outgoing changes, 1 otherwise.
4238 Returns 0 if there are outgoing changes, 1 otherwise.
4239 """
4239 """
4240
4240
4241 if opts.get('bookmarks'):
4241 if opts.get('bookmarks'):
4242 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4242 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4243 dest, branches = hg.parseurl(dest, opts.get('branch'))
4243 dest, branches = hg.parseurl(dest, opts.get('branch'))
4244 other = hg.peer(repo, opts, dest)
4244 other = hg.peer(repo, opts, dest)
4245 if 'bookmarks' not in other.listkeys('namespaces'):
4245 if 'bookmarks' not in other.listkeys('namespaces'):
4246 ui.warn(_("remote doesn't support bookmarks\n"))
4246 ui.warn(_("remote doesn't support bookmarks\n"))
4247 return 0
4247 return 0
4248 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
4248 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
4249 return bookmarks.diff(ui, other, repo)
4249 return bookmarks.diff(ui, other, repo)
4250
4250
4251 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
4251 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
4252 try:
4252 try:
4253 return hg.outgoing(ui, repo, dest, opts)
4253 return hg.outgoing(ui, repo, dest, opts)
4254 finally:
4254 finally:
4255 del repo._subtoppath
4255 del repo._subtoppath
4256
4256
4257 @command('parents',
4257 @command('parents',
4258 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
4258 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
4259 ] + templateopts,
4259 ] + templateopts,
4260 _('[-r REV] [FILE]'))
4260 _('[-r REV] [FILE]'))
4261 def parents(ui, repo, file_=None, **opts):
4261 def parents(ui, repo, file_=None, **opts):
4262 """show the parents of the working directory or revision
4262 """show the parents of the working directory or revision
4263
4263
4264 Print the working directory's parent revisions. If a revision is
4264 Print the working directory's parent revisions. If a revision is
4265 given via -r/--rev, the parent of that revision will be printed.
4265 given via -r/--rev, the parent of that revision will be printed.
4266 If a file argument is given, the revision in which the file was
4266 If a file argument is given, the revision in which the file was
4267 last changed (before the working directory revision or the
4267 last changed (before the working directory revision or the
4268 argument to --rev if given) is printed.
4268 argument to --rev if given) is printed.
4269
4269
4270 Returns 0 on success.
4270 Returns 0 on success.
4271 """
4271 """
4272
4272
4273 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
4273 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
4274
4274
4275 if file_:
4275 if file_:
4276 m = scmutil.match(ctx, (file_,), opts)
4276 m = scmutil.match(ctx, (file_,), opts)
4277 if m.anypats() or len(m.files()) != 1:
4277 if m.anypats() or len(m.files()) != 1:
4278 raise util.Abort(_('can only specify an explicit filename'))
4278 raise util.Abort(_('can only specify an explicit filename'))
4279 file_ = m.files()[0]
4279 file_ = m.files()[0]
4280 filenodes = []
4280 filenodes = []
4281 for cp in ctx.parents():
4281 for cp in ctx.parents():
4282 if not cp:
4282 if not cp:
4283 continue
4283 continue
4284 try:
4284 try:
4285 filenodes.append(cp.filenode(file_))
4285 filenodes.append(cp.filenode(file_))
4286 except error.LookupError:
4286 except error.LookupError:
4287 pass
4287 pass
4288 if not filenodes:
4288 if not filenodes:
4289 raise util.Abort(_("'%s' not found in manifest!") % file_)
4289 raise util.Abort(_("'%s' not found in manifest!") % file_)
4290 fl = repo.file(file_)
4290 fl = repo.file(file_)
4291 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
4291 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
4292 else:
4292 else:
4293 p = [cp.node() for cp in ctx.parents()]
4293 p = [cp.node() for cp in ctx.parents()]
4294
4294
4295 displayer = cmdutil.show_changeset(ui, repo, opts)
4295 displayer = cmdutil.show_changeset(ui, repo, opts)
4296 for n in p:
4296 for n in p:
4297 if n != nullid:
4297 if n != nullid:
4298 displayer.show(repo[n])
4298 displayer.show(repo[n])
4299 displayer.close()
4299 displayer.close()
4300
4300
4301 @command('paths', [], _('[NAME]'))
4301 @command('paths', [], _('[NAME]'))
4302 def paths(ui, repo, search=None):
4302 def paths(ui, repo, search=None):
4303 """show aliases for remote repositories
4303 """show aliases for remote repositories
4304
4304
4305 Show definition of symbolic path name NAME. If no name is given,
4305 Show definition of symbolic path name NAME. If no name is given,
4306 show definition of all available names.
4306 show definition of all available names.
4307
4307
4308 Option -q/--quiet suppresses all output when searching for NAME
4308 Option -q/--quiet suppresses all output when searching for NAME
4309 and shows only the path names when listing all definitions.
4309 and shows only the path names when listing all definitions.
4310
4310
4311 Path names are defined in the [paths] section of your
4311 Path names are defined in the [paths] section of your
4312 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4312 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4313 repository, ``.hg/hgrc`` is used, too.
4313 repository, ``.hg/hgrc`` is used, too.
4314
4314
4315 The path names ``default`` and ``default-push`` have a special
4315 The path names ``default`` and ``default-push`` have a special
4316 meaning. When performing a push or pull operation, they are used
4316 meaning. When performing a push or pull operation, they are used
4317 as fallbacks if no location is specified on the command-line.
4317 as fallbacks if no location is specified on the command-line.
4318 When ``default-push`` is set, it will be used for push and
4318 When ``default-push`` is set, it will be used for push and
4319 ``default`` will be used for pull; otherwise ``default`` is used
4319 ``default`` will be used for pull; otherwise ``default`` is used
4320 as the fallback for both. When cloning a repository, the clone
4320 as the fallback for both. When cloning a repository, the clone
4321 source is written as ``default`` in ``.hg/hgrc``. Note that
4321 source is written as ``default`` in ``.hg/hgrc``. Note that
4322 ``default`` and ``default-push`` apply to all inbound (e.g.
4322 ``default`` and ``default-push`` apply to all inbound (e.g.
4323 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
4323 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
4324 :hg:`bundle`) operations.
4324 :hg:`bundle`) operations.
4325
4325
4326 See :hg:`help urls` for more information.
4326 See :hg:`help urls` for more information.
4327
4327
4328 Returns 0 on success.
4328 Returns 0 on success.
4329 """
4329 """
4330 if search:
4330 if search:
4331 for name, path in ui.configitems("paths"):
4331 for name, path in ui.configitems("paths"):
4332 if name == search:
4332 if name == search:
4333 ui.status("%s\n" % util.hidepassword(path))
4333 ui.status("%s\n" % util.hidepassword(path))
4334 return
4334 return
4335 if not ui.quiet:
4335 if not ui.quiet:
4336 ui.warn(_("not found!\n"))
4336 ui.warn(_("not found!\n"))
4337 return 1
4337 return 1
4338 else:
4338 else:
4339 for name, path in ui.configitems("paths"):
4339 for name, path in ui.configitems("paths"):
4340 if ui.quiet:
4340 if ui.quiet:
4341 ui.write("%s\n" % name)
4341 ui.write("%s\n" % name)
4342 else:
4342 else:
4343 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
4343 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
4344
4344
4345 @command('^phase',
4345 @command('^phase',
4346 [('p', 'public', False, _('set changeset phase to public')),
4346 [('p', 'public', False, _('set changeset phase to public')),
4347 ('d', 'draft', False, _('set changeset phase to draft')),
4347 ('d', 'draft', False, _('set changeset phase to draft')),
4348 ('s', 'secret', False, _('set changeset phase to secret')),
4348 ('s', 'secret', False, _('set changeset phase to secret')),
4349 ('f', 'force', False, _('allow to move boundary backward')),
4349 ('f', 'force', False, _('allow to move boundary backward')),
4350 ('r', 'rev', [], _('target revision'), _('REV')),
4350 ('r', 'rev', [], _('target revision'), _('REV')),
4351 ],
4351 ],
4352 _('[-p|-d|-s] [-f] [-r] REV...'))
4352 _('[-p|-d|-s] [-f] [-r] REV...'))
4353 def phase(ui, repo, *revs, **opts):
4353 def phase(ui, repo, *revs, **opts):
4354 """set or show the current phase name
4354 """set or show the current phase name
4355
4355
4356 With no argument, show the phase name of specified revisions.
4356 With no argument, show the phase name of specified revisions.
4357
4357
4358 With one of -p/--public, -d/--draft or -s/--secret, change the
4358 With one of -p/--public, -d/--draft or -s/--secret, change the
4359 phase value of the specified revisions.
4359 phase value of the specified revisions.
4360
4360
4361 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
4361 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
4362 lower phase to an higher phase. Phases are ordered as follows::
4362 lower phase to an higher phase. Phases are ordered as follows::
4363
4363
4364 public < draft < secret
4364 public < draft < secret
4365
4365
4366 Return 0 on success, 1 if no phases were changed or some could not
4366 Return 0 on success, 1 if no phases were changed or some could not
4367 be changed.
4367 be changed.
4368 """
4368 """
4369 # search for a unique phase argument
4369 # search for a unique phase argument
4370 targetphase = None
4370 targetphase = None
4371 for idx, name in enumerate(phases.phasenames):
4371 for idx, name in enumerate(phases.phasenames):
4372 if opts[name]:
4372 if opts[name]:
4373 if targetphase is not None:
4373 if targetphase is not None:
4374 raise util.Abort(_('only one phase can be specified'))
4374 raise util.Abort(_('only one phase can be specified'))
4375 targetphase = idx
4375 targetphase = idx
4376
4376
4377 # look for specified revision
4377 # look for specified revision
4378 revs = list(revs)
4378 revs = list(revs)
4379 revs.extend(opts['rev'])
4379 revs.extend(opts['rev'])
4380 if not revs:
4380 if not revs:
4381 raise util.Abort(_('no revisions specified'))
4381 raise util.Abort(_('no revisions specified'))
4382
4382
4383 revs = scmutil.revrange(repo, revs)
4383 revs = scmutil.revrange(repo, revs)
4384
4384
4385 lock = None
4385 lock = None
4386 ret = 0
4386 ret = 0
4387 if targetphase is None:
4387 if targetphase is None:
4388 # display
4388 # display
4389 for r in revs:
4389 for r in revs:
4390 ctx = repo[r]
4390 ctx = repo[r]
4391 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
4391 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
4392 else:
4392 else:
4393 lock = repo.lock()
4393 lock = repo.lock()
4394 try:
4394 try:
4395 # set phase
4395 # set phase
4396 if not revs:
4396 if not revs:
4397 raise util.Abort(_('empty revision set'))
4397 raise util.Abort(_('empty revision set'))
4398 nodes = [repo[r].node() for r in revs]
4398 nodes = [repo[r].node() for r in revs]
4399 olddata = repo._phasecache.getphaserevs(repo)[:]
4399 olddata = repo._phasecache.getphaserevs(repo)[:]
4400 phases.advanceboundary(repo, targetphase, nodes)
4400 phases.advanceboundary(repo, targetphase, nodes)
4401 if opts['force']:
4401 if opts['force']:
4402 phases.retractboundary(repo, targetphase, nodes)
4402 phases.retractboundary(repo, targetphase, nodes)
4403 finally:
4403 finally:
4404 lock.release()
4404 lock.release()
4405 if olddata is not None:
4405 if olddata is not None:
4406 changes = 0
4406 changes = 0
4407 newdata = repo._phasecache.getphaserevs(repo)
4407 newdata = repo._phasecache.getphaserevs(repo)
4408 changes = sum(o != newdata[i] for i, o in enumerate(olddata))
4408 changes = sum(o != newdata[i] for i, o in enumerate(olddata))
4409 rejected = [n for n in nodes
4409 rejected = [n for n in nodes
4410 if newdata[repo[n].rev()] < targetphase]
4410 if newdata[repo[n].rev()] < targetphase]
4411 if rejected:
4411 if rejected:
4412 ui.warn(_('cannot move %i changesets to a more permissive '
4412 ui.warn(_('cannot move %i changesets to a more permissive '
4413 'phase, use --force\n') % len(rejected))
4413 'phase, use --force\n') % len(rejected))
4414 ret = 1
4414 ret = 1
4415 if changes:
4415 if changes:
4416 msg = _('phase changed for %i changesets\n') % changes
4416 msg = _('phase changed for %i changesets\n') % changes
4417 if ret:
4417 if ret:
4418 ui.status(msg)
4418 ui.status(msg)
4419 else:
4419 else:
4420 ui.note(msg)
4420 ui.note(msg)
4421 else:
4421 else:
4422 ui.warn(_('no phases changed\n'))
4422 ui.warn(_('no phases changed\n'))
4423 ret = 1
4423 ret = 1
4424 return ret
4424 return ret
4425
4425
4426 def postincoming(ui, repo, modheads, optupdate, checkout):
4426 def postincoming(ui, repo, modheads, optupdate, checkout):
4427 if modheads == 0:
4427 if modheads == 0:
4428 return
4428 return
4429 if optupdate:
4429 if optupdate:
4430 movemarkfrom = repo['.'].node()
4430 movemarkfrom = repo['.'].node()
4431 try:
4431 try:
4432 ret = hg.update(repo, checkout)
4432 ret = hg.update(repo, checkout)
4433 except util.Abort, inst:
4433 except util.Abort, inst:
4434 ui.warn(_("not updating: %s\n") % str(inst))
4434 ui.warn(_("not updating: %s\n") % str(inst))
4435 return 0
4435 return 0
4436 if not ret and not checkout:
4436 if not ret and not checkout:
4437 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
4437 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
4438 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
4438 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
4439 return ret
4439 return ret
4440 if modheads > 1:
4440 if modheads > 1:
4441 currentbranchheads = len(repo.branchheads())
4441 currentbranchheads = len(repo.branchheads())
4442 if currentbranchheads == modheads:
4442 if currentbranchheads == modheads:
4443 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
4443 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
4444 elif currentbranchheads > 1:
4444 elif currentbranchheads > 1:
4445 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
4445 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
4446 "merge)\n"))
4446 "merge)\n"))
4447 else:
4447 else:
4448 ui.status(_("(run 'hg heads' to see heads)\n"))
4448 ui.status(_("(run 'hg heads' to see heads)\n"))
4449 else:
4449 else:
4450 ui.status(_("(run 'hg update' to get a working copy)\n"))
4450 ui.status(_("(run 'hg update' to get a working copy)\n"))
4451
4451
4452 @command('^pull',
4452 @command('^pull',
4453 [('u', 'update', None,
4453 [('u', 'update', None,
4454 _('update to new branch head if changesets were pulled')),
4454 _('update to new branch head if changesets were pulled')),
4455 ('f', 'force', None, _('run even when remote repository is unrelated')),
4455 ('f', 'force', None, _('run even when remote repository is unrelated')),
4456 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4456 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4457 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4457 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4458 ('b', 'branch', [], _('a specific branch you would like to pull'),
4458 ('b', 'branch', [], _('a specific branch you would like to pull'),
4459 _('BRANCH')),
4459 _('BRANCH')),
4460 ] + remoteopts,
4460 ] + remoteopts,
4461 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
4461 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
4462 def pull(ui, repo, source="default", **opts):
4462 def pull(ui, repo, source="default", **opts):
4463 """pull changes from the specified source
4463 """pull changes from the specified source
4464
4464
4465 Pull changes from a remote repository to a local one.
4465 Pull changes from a remote repository to a local one.
4466
4466
4467 This finds all changes from the repository at the specified path
4467 This finds all changes from the repository at the specified path
4468 or URL and adds them to a local repository (the current one unless
4468 or URL and adds them to a local repository (the current one unless
4469 -R is specified). By default, this does not update the copy of the
4469 -R is specified). By default, this does not update the copy of the
4470 project in the working directory.
4470 project in the working directory.
4471
4471
4472 Use :hg:`incoming` if you want to see what would have been added
4472 Use :hg:`incoming` if you want to see what would have been added
4473 by a pull at the time you issued this command. If you then decide
4473 by a pull at the time you issued this command. If you then decide
4474 to add those changes to the repository, you should use :hg:`pull
4474 to add those changes to the repository, you should use :hg:`pull
4475 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
4475 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
4476
4476
4477 If SOURCE is omitted, the 'default' path will be used.
4477 If SOURCE is omitted, the 'default' path will be used.
4478 See :hg:`help urls` for more information.
4478 See :hg:`help urls` for more information.
4479
4479
4480 Returns 0 on success, 1 if an update had unresolved files.
4480 Returns 0 on success, 1 if an update had unresolved files.
4481 """
4481 """
4482 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
4482 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
4483 other = hg.peer(repo, opts, source)
4483 other = hg.peer(repo, opts, source)
4484 ui.status(_('pulling from %s\n') % util.hidepassword(source))
4484 ui.status(_('pulling from %s\n') % util.hidepassword(source))
4485 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
4485 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
4486
4486
4487 if opts.get('bookmark'):
4487 if opts.get('bookmark'):
4488 if not revs:
4488 if not revs:
4489 revs = []
4489 revs = []
4490 rb = other.listkeys('bookmarks')
4490 rb = other.listkeys('bookmarks')
4491 for b in opts['bookmark']:
4491 for b in opts['bookmark']:
4492 if b not in rb:
4492 if b not in rb:
4493 raise util.Abort(_('remote bookmark %s not found!') % b)
4493 raise util.Abort(_('remote bookmark %s not found!') % b)
4494 revs.append(rb[b])
4494 revs.append(rb[b])
4495
4495
4496 if revs:
4496 if revs:
4497 try:
4497 try:
4498 revs = [other.lookup(rev) for rev in revs]
4498 revs = [other.lookup(rev) for rev in revs]
4499 except error.CapabilityError:
4499 except error.CapabilityError:
4500 err = _("other repository doesn't support revision lookup, "
4500 err = _("other repository doesn't support revision lookup, "
4501 "so a rev cannot be specified.")
4501 "so a rev cannot be specified.")
4502 raise util.Abort(err)
4502 raise util.Abort(err)
4503
4503
4504 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
4504 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
4505 bookmarks.updatefromremote(ui, repo, other, source)
4505 bookmarks.updatefromremote(ui, repo, other, source)
4506 if checkout:
4506 if checkout:
4507 checkout = str(repo.changelog.rev(other.lookup(checkout)))
4507 checkout = str(repo.changelog.rev(other.lookup(checkout)))
4508 repo._subtoppath = source
4508 repo._subtoppath = source
4509 try:
4509 try:
4510 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
4510 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
4511
4511
4512 finally:
4512 finally:
4513 del repo._subtoppath
4513 del repo._subtoppath
4514
4514
4515 # update specified bookmarks
4515 # update specified bookmarks
4516 if opts.get('bookmark'):
4516 if opts.get('bookmark'):
4517 for b in opts['bookmark']:
4517 for b in opts['bookmark']:
4518 # explicit pull overrides local bookmark if any
4518 # explicit pull overrides local bookmark if any
4519 ui.status(_("importing bookmark %s\n") % b)
4519 ui.status(_("importing bookmark %s\n") % b)
4520 repo._bookmarks[b] = repo[rb[b]].node()
4520 repo._bookmarks[b] = repo[rb[b]].node()
4521 bookmarks.write(repo)
4521 bookmarks.write(repo)
4522
4522
4523 return ret
4523 return ret
4524
4524
4525 @command('^push',
4525 @command('^push',
4526 [('f', 'force', None, _('force push')),
4526 [('f', 'force', None, _('force push')),
4527 ('r', 'rev', [],
4527 ('r', 'rev', [],
4528 _('a changeset intended to be included in the destination'),
4528 _('a changeset intended to be included in the destination'),
4529 _('REV')),
4529 _('REV')),
4530 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4530 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4531 ('b', 'branch', [],
4531 ('b', 'branch', [],
4532 _('a specific branch you would like to push'), _('BRANCH')),
4532 _('a specific branch you would like to push'), _('BRANCH')),
4533 ('', 'new-branch', False, _('allow pushing a new branch')),
4533 ('', 'new-branch', False, _('allow pushing a new branch')),
4534 ] + remoteopts,
4534 ] + remoteopts,
4535 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4535 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4536 def push(ui, repo, dest=None, **opts):
4536 def push(ui, repo, dest=None, **opts):
4537 """push changes to the specified destination
4537 """push changes to the specified destination
4538
4538
4539 Push changesets from the local repository to the specified
4539 Push changesets from the local repository to the specified
4540 destination.
4540 destination.
4541
4541
4542 This operation is symmetrical to pull: it is identical to a pull
4542 This operation is symmetrical to pull: it is identical to a pull
4543 in the destination repository from the current one.
4543 in the destination repository from the current one.
4544
4544
4545 By default, push will not allow creation of new heads at the
4545 By default, push will not allow creation of new heads at the
4546 destination, since multiple heads would make it unclear which head
4546 destination, since multiple heads would make it unclear which head
4547 to use. In this situation, it is recommended to pull and merge
4547 to use. In this situation, it is recommended to pull and merge
4548 before pushing.
4548 before pushing.
4549
4549
4550 Use --new-branch if you want to allow push to create a new named
4550 Use --new-branch if you want to allow push to create a new named
4551 branch that is not present at the destination. This allows you to
4551 branch that is not present at the destination. This allows you to
4552 only create a new branch without forcing other changes.
4552 only create a new branch without forcing other changes.
4553
4553
4554 Use -f/--force to override the default behavior and push all
4554 Use -f/--force to override the default behavior and push all
4555 changesets on all branches.
4555 changesets on all branches.
4556
4556
4557 If -r/--rev is used, the specified revision and all its ancestors
4557 If -r/--rev is used, the specified revision and all its ancestors
4558 will be pushed to the remote repository.
4558 will be pushed to the remote repository.
4559
4559
4560 Please see :hg:`help urls` for important details about ``ssh://``
4560 Please see :hg:`help urls` for important details about ``ssh://``
4561 URLs. If DESTINATION is omitted, a default path will be used.
4561 URLs. If DESTINATION is omitted, a default path will be used.
4562
4562
4563 Returns 0 if push was successful, 1 if nothing to push.
4563 Returns 0 if push was successful, 1 if nothing to push.
4564 """
4564 """
4565
4565
4566 if opts.get('bookmark'):
4566 if opts.get('bookmark'):
4567 for b in opts['bookmark']:
4567 for b in opts['bookmark']:
4568 # translate -B options to -r so changesets get pushed
4568 # translate -B options to -r so changesets get pushed
4569 if b in repo._bookmarks:
4569 if b in repo._bookmarks:
4570 opts.setdefault('rev', []).append(b)
4570 opts.setdefault('rev', []).append(b)
4571 else:
4571 else:
4572 # if we try to push a deleted bookmark, translate it to null
4572 # if we try to push a deleted bookmark, translate it to null
4573 # this lets simultaneous -r, -b options continue working
4573 # this lets simultaneous -r, -b options continue working
4574 opts.setdefault('rev', []).append("null")
4574 opts.setdefault('rev', []).append("null")
4575
4575
4576 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4576 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4577 dest, branches = hg.parseurl(dest, opts.get('branch'))
4577 dest, branches = hg.parseurl(dest, opts.get('branch'))
4578 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4578 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4579 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4579 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4580 other = hg.peer(repo, opts, dest)
4580 other = hg.peer(repo, opts, dest)
4581 if revs:
4581 if revs:
4582 revs = [repo.lookup(rev) for rev in revs]
4582 revs = [repo.lookup(rev) for rev in revs]
4583
4583
4584 repo._subtoppath = dest
4584 repo._subtoppath = dest
4585 try:
4585 try:
4586 # push subrepos depth-first for coherent ordering
4586 # push subrepos depth-first for coherent ordering
4587 c = repo['']
4587 c = repo['']
4588 subs = c.substate # only repos that are committed
4588 subs = c.substate # only repos that are committed
4589 for s in sorted(subs):
4589 for s in sorted(subs):
4590 if c.sub(s).push(opts) == 0:
4590 if c.sub(s).push(opts) == 0:
4591 return False
4591 return False
4592 finally:
4592 finally:
4593 del repo._subtoppath
4593 del repo._subtoppath
4594 result = repo.push(other, opts.get('force'), revs=revs,
4594 result = repo.push(other, opts.get('force'), revs=revs,
4595 newbranch=opts.get('new_branch'))
4595 newbranch=opts.get('new_branch'))
4596
4596
4597 result = not result
4597 result = not result
4598
4598
4599 if opts.get('bookmark'):
4599 if opts.get('bookmark'):
4600 rb = other.listkeys('bookmarks')
4600 rb = other.listkeys('bookmarks')
4601 for b in opts['bookmark']:
4601 for b in opts['bookmark']:
4602 # explicit push overrides remote bookmark if any
4602 # explicit push overrides remote bookmark if any
4603 if b in repo._bookmarks:
4603 if b in repo._bookmarks:
4604 ui.status(_("exporting bookmark %s\n") % b)
4604 ui.status(_("exporting bookmark %s\n") % b)
4605 new = repo[b].hex()
4605 new = repo[b].hex()
4606 elif b in rb:
4606 elif b in rb:
4607 ui.status(_("deleting remote bookmark %s\n") % b)
4607 ui.status(_("deleting remote bookmark %s\n") % b)
4608 new = '' # delete
4608 new = '' # delete
4609 else:
4609 else:
4610 ui.warn(_('bookmark %s does not exist on the local '
4610 ui.warn(_('bookmark %s does not exist on the local '
4611 'or remote repository!\n') % b)
4611 'or remote repository!\n') % b)
4612 return 2
4612 return 2
4613 old = rb.get(b, '')
4613 old = rb.get(b, '')
4614 r = other.pushkey('bookmarks', b, old, new)
4614 r = other.pushkey('bookmarks', b, old, new)
4615 if not r:
4615 if not r:
4616 ui.warn(_('updating bookmark %s failed!\n') % b)
4616 ui.warn(_('updating bookmark %s failed!\n') % b)
4617 if not result:
4617 if not result:
4618 result = 2
4618 result = 2
4619
4619
4620 return result
4620 return result
4621
4621
4622 @command('recover', [])
4622 @command('recover', [])
4623 def recover(ui, repo):
4623 def recover(ui, repo):
4624 """roll back an interrupted transaction
4624 """roll back an interrupted transaction
4625
4625
4626 Recover from an interrupted commit or pull.
4626 Recover from an interrupted commit or pull.
4627
4627
4628 This command tries to fix the repository status after an
4628 This command tries to fix the repository status after an
4629 interrupted operation. It should only be necessary when Mercurial
4629 interrupted operation. It should only be necessary when Mercurial
4630 suggests it.
4630 suggests it.
4631
4631
4632 Returns 0 if successful, 1 if nothing to recover or verify fails.
4632 Returns 0 if successful, 1 if nothing to recover or verify fails.
4633 """
4633 """
4634 if repo.recover():
4634 if repo.recover():
4635 return hg.verify(repo)
4635 return hg.verify(repo)
4636 return 1
4636 return 1
4637
4637
4638 @command('^remove|rm',
4638 @command('^remove|rm',
4639 [('A', 'after', None, _('record delete for missing files')),
4639 [('A', 'after', None, _('record delete for missing files')),
4640 ('f', 'force', None,
4640 ('f', 'force', None,
4641 _('remove (and delete) file even if added or modified')),
4641 _('remove (and delete) file even if added or modified')),
4642 ] + walkopts,
4642 ] + walkopts,
4643 _('[OPTION]... FILE...'))
4643 _('[OPTION]... FILE...'))
4644 def remove(ui, repo, *pats, **opts):
4644 def remove(ui, repo, *pats, **opts):
4645 """remove the specified files on the next commit
4645 """remove the specified files on the next commit
4646
4646
4647 Schedule the indicated files for removal from the current branch.
4647 Schedule the indicated files for removal from the current branch.
4648
4648
4649 This command schedules the files to be removed at the next commit.
4649 This command schedules the files to be removed at the next commit.
4650 To undo a remove before that, see :hg:`revert`. To undo added
4650 To undo a remove before that, see :hg:`revert`. To undo added
4651 files, see :hg:`forget`.
4651 files, see :hg:`forget`.
4652
4652
4653 .. container:: verbose
4653 .. container:: verbose
4654
4654
4655 -A/--after can be used to remove only files that have already
4655 -A/--after can be used to remove only files that have already
4656 been deleted, -f/--force can be used to force deletion, and -Af
4656 been deleted, -f/--force can be used to force deletion, and -Af
4657 can be used to remove files from the next revision without
4657 can be used to remove files from the next revision without
4658 deleting them from the working directory.
4658 deleting them from the working directory.
4659
4659
4660 The following table details the behavior of remove for different
4660 The following table details the behavior of remove for different
4661 file states (columns) and option combinations (rows). The file
4661 file states (columns) and option combinations (rows). The file
4662 states are Added [A], Clean [C], Modified [M] and Missing [!]
4662 states are Added [A], Clean [C], Modified [M] and Missing [!]
4663 (as reported by :hg:`status`). The actions are Warn, Remove
4663 (as reported by :hg:`status`). The actions are Warn, Remove
4664 (from branch) and Delete (from disk):
4664 (from branch) and Delete (from disk):
4665
4665
4666 ======= == == == ==
4666 ======= == == == ==
4667 A C M !
4667 A C M !
4668 ======= == == == ==
4668 ======= == == == ==
4669 none W RD W R
4669 none W RD W R
4670 -f R RD RD R
4670 -f R RD RD R
4671 -A W W W R
4671 -A W W W R
4672 -Af R R R R
4672 -Af R R R R
4673 ======= == == == ==
4673 ======= == == == ==
4674
4674
4675 Note that remove never deletes files in Added [A] state from the
4675 Note that remove never deletes files in Added [A] state from the
4676 working directory, not even if option --force is specified.
4676 working directory, not even if option --force is specified.
4677
4677
4678 Returns 0 on success, 1 if any warnings encountered.
4678 Returns 0 on success, 1 if any warnings encountered.
4679 """
4679 """
4680
4680
4681 ret = 0
4681 ret = 0
4682 after, force = opts.get('after'), opts.get('force')
4682 after, force = opts.get('after'), opts.get('force')
4683 if not pats and not after:
4683 if not pats and not after:
4684 raise util.Abort(_('no files specified'))
4684 raise util.Abort(_('no files specified'))
4685
4685
4686 m = scmutil.match(repo[None], pats, opts)
4686 m = scmutil.match(repo[None], pats, opts)
4687 s = repo.status(match=m, clean=True)
4687 s = repo.status(match=m, clean=True)
4688 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
4688 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
4689
4689
4690 for f in m.files():
4690 for f in m.files():
4691 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
4691 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
4692 if os.path.exists(m.rel(f)):
4692 if os.path.exists(m.rel(f)):
4693 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
4693 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
4694 ret = 1
4694 ret = 1
4695
4695
4696 if force:
4696 if force:
4697 list = modified + deleted + clean + added
4697 list = modified + deleted + clean + added
4698 elif after:
4698 elif after:
4699 list = deleted
4699 list = deleted
4700 for f in modified + added + clean:
4700 for f in modified + added + clean:
4701 ui.warn(_('not removing %s: file still exists (use -f'
4701 ui.warn(_('not removing %s: file still exists (use -f'
4702 ' to force removal)\n') % m.rel(f))
4702 ' to force removal)\n') % m.rel(f))
4703 ret = 1
4703 ret = 1
4704 else:
4704 else:
4705 list = deleted + clean
4705 list = deleted + clean
4706 for f in modified:
4706 for f in modified:
4707 ui.warn(_('not removing %s: file is modified (use -f'
4707 ui.warn(_('not removing %s: file is modified (use -f'
4708 ' to force removal)\n') % m.rel(f))
4708 ' to force removal)\n') % m.rel(f))
4709 ret = 1
4709 ret = 1
4710 for f in added:
4710 for f in added:
4711 ui.warn(_('not removing %s: file has been marked for add'
4711 ui.warn(_('not removing %s: file has been marked for add'
4712 ' (use forget to undo)\n') % m.rel(f))
4712 ' (use forget to undo)\n') % m.rel(f))
4713 ret = 1
4713 ret = 1
4714
4714
4715 for f in sorted(list):
4715 for f in sorted(list):
4716 if ui.verbose or not m.exact(f):
4716 if ui.verbose or not m.exact(f):
4717 ui.status(_('removing %s\n') % m.rel(f))
4717 ui.status(_('removing %s\n') % m.rel(f))
4718
4718
4719 wlock = repo.wlock()
4719 wlock = repo.wlock()
4720 try:
4720 try:
4721 if not after:
4721 if not after:
4722 for f in list:
4722 for f in list:
4723 if f in added:
4723 if f in added:
4724 continue # we never unlink added files on remove
4724 continue # we never unlink added files on remove
4725 try:
4725 try:
4726 util.unlinkpath(repo.wjoin(f))
4726 util.unlinkpath(repo.wjoin(f))
4727 except OSError, inst:
4727 except OSError, inst:
4728 if inst.errno != errno.ENOENT:
4728 if inst.errno != errno.ENOENT:
4729 raise
4729 raise
4730 repo[None].forget(list)
4730 repo[None].forget(list)
4731 finally:
4731 finally:
4732 wlock.release()
4732 wlock.release()
4733
4733
4734 return ret
4734 return ret
4735
4735
4736 @command('rename|move|mv',
4736 @command('rename|move|mv',
4737 [('A', 'after', None, _('record a rename that has already occurred')),
4737 [('A', 'after', None, _('record a rename that has already occurred')),
4738 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4738 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4739 ] + walkopts + dryrunopts,
4739 ] + walkopts + dryrunopts,
4740 _('[OPTION]... SOURCE... DEST'))
4740 _('[OPTION]... SOURCE... DEST'))
4741 def rename(ui, repo, *pats, **opts):
4741 def rename(ui, repo, *pats, **opts):
4742 """rename files; equivalent of copy + remove
4742 """rename files; equivalent of copy + remove
4743
4743
4744 Mark dest as copies of sources; mark sources for deletion. If dest
4744 Mark dest as copies of sources; mark sources for deletion. If dest
4745 is a directory, copies are put in that directory. If dest is a
4745 is a directory, copies are put in that directory. If dest is a
4746 file, there can only be one source.
4746 file, there can only be one source.
4747
4747
4748 By default, this command copies the contents of files as they
4748 By default, this command copies the contents of files as they
4749 exist in the working directory. If invoked with -A/--after, the
4749 exist in the working directory. If invoked with -A/--after, the
4750 operation is recorded, but no copying is performed.
4750 operation is recorded, but no copying is performed.
4751
4751
4752 This command takes effect at the next commit. To undo a rename
4752 This command takes effect at the next commit. To undo a rename
4753 before that, see :hg:`revert`.
4753 before that, see :hg:`revert`.
4754
4754
4755 Returns 0 on success, 1 if errors are encountered.
4755 Returns 0 on success, 1 if errors are encountered.
4756 """
4756 """
4757 wlock = repo.wlock(False)
4757 wlock = repo.wlock(False)
4758 try:
4758 try:
4759 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4759 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4760 finally:
4760 finally:
4761 wlock.release()
4761 wlock.release()
4762
4762
4763 @command('resolve',
4763 @command('resolve',
4764 [('a', 'all', None, _('select all unresolved files')),
4764 [('a', 'all', None, _('select all unresolved files')),
4765 ('l', 'list', None, _('list state of files needing merge')),
4765 ('l', 'list', None, _('list state of files needing merge')),
4766 ('m', 'mark', None, _('mark files as resolved')),
4766 ('m', 'mark', None, _('mark files as resolved')),
4767 ('u', 'unmark', None, _('mark files as unresolved')),
4767 ('u', 'unmark', None, _('mark files as unresolved')),
4768 ('n', 'no-status', None, _('hide status prefix'))]
4768 ('n', 'no-status', None, _('hide status prefix'))]
4769 + mergetoolopts + walkopts,
4769 + mergetoolopts + walkopts,
4770 _('[OPTION]... [FILE]...'))
4770 _('[OPTION]... [FILE]...'))
4771 def resolve(ui, repo, *pats, **opts):
4771 def resolve(ui, repo, *pats, **opts):
4772 """redo merges or set/view the merge status of files
4772 """redo merges or set/view the merge status of files
4773
4773
4774 Merges with unresolved conflicts are often the result of
4774 Merges with unresolved conflicts are often the result of
4775 non-interactive merging using the ``internal:merge`` configuration
4775 non-interactive merging using the ``internal:merge`` configuration
4776 setting, or a command-line merge tool like ``diff3``. The resolve
4776 setting, or a command-line merge tool like ``diff3``. The resolve
4777 command is used to manage the files involved in a merge, after
4777 command is used to manage the files involved in a merge, after
4778 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4778 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4779 working directory must have two parents). See :hg:`help
4779 working directory must have two parents). See :hg:`help
4780 merge-tools` for information on configuring merge tools.
4780 merge-tools` for information on configuring merge tools.
4781
4781
4782 The resolve command can be used in the following ways:
4782 The resolve command can be used in the following ways:
4783
4783
4784 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4784 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4785 files, discarding any previous merge attempts. Re-merging is not
4785 files, discarding any previous merge attempts. Re-merging is not
4786 performed for files already marked as resolved. Use ``--all/-a``
4786 performed for files already marked as resolved. Use ``--all/-a``
4787 to select all unresolved files. ``--tool`` can be used to specify
4787 to select all unresolved files. ``--tool`` can be used to specify
4788 the merge tool used for the given files. It overrides the HGMERGE
4788 the merge tool used for the given files. It overrides the HGMERGE
4789 environment variable and your configuration files. Previous file
4789 environment variable and your configuration files. Previous file
4790 contents are saved with a ``.orig`` suffix.
4790 contents are saved with a ``.orig`` suffix.
4791
4791
4792 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4792 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4793 (e.g. after having manually fixed-up the files). The default is
4793 (e.g. after having manually fixed-up the files). The default is
4794 to mark all unresolved files.
4794 to mark all unresolved files.
4795
4795
4796 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4796 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4797 default is to mark all resolved files.
4797 default is to mark all resolved files.
4798
4798
4799 - :hg:`resolve -l`: list files which had or still have conflicts.
4799 - :hg:`resolve -l`: list files which had or still have conflicts.
4800 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4800 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4801
4801
4802 Note that Mercurial will not let you commit files with unresolved
4802 Note that Mercurial will not let you commit files with unresolved
4803 merge conflicts. You must use :hg:`resolve -m ...` before you can
4803 merge conflicts. You must use :hg:`resolve -m ...` before you can
4804 commit after a conflicting merge.
4804 commit after a conflicting merge.
4805
4805
4806 Returns 0 on success, 1 if any files fail a resolve attempt.
4806 Returns 0 on success, 1 if any files fail a resolve attempt.
4807 """
4807 """
4808
4808
4809 all, mark, unmark, show, nostatus = \
4809 all, mark, unmark, show, nostatus = \
4810 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
4810 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
4811
4811
4812 if (show and (mark or unmark)) or (mark and unmark):
4812 if (show and (mark or unmark)) or (mark and unmark):
4813 raise util.Abort(_("too many options specified"))
4813 raise util.Abort(_("too many options specified"))
4814 if pats and all:
4814 if pats and all:
4815 raise util.Abort(_("can't specify --all and patterns"))
4815 raise util.Abort(_("can't specify --all and patterns"))
4816 if not (all or pats or show or mark or unmark):
4816 if not (all or pats or show or mark or unmark):
4817 raise util.Abort(_('no files or directories specified; '
4817 raise util.Abort(_('no files or directories specified; '
4818 'use --all to remerge all files'))
4818 'use --all to remerge all files'))
4819
4819
4820 ms = mergemod.mergestate(repo)
4820 ms = mergemod.mergestate(repo)
4821 m = scmutil.match(repo[None], pats, opts)
4821 m = scmutil.match(repo[None], pats, opts)
4822 ret = 0
4822 ret = 0
4823
4823
4824 for f in ms:
4824 for f in ms:
4825 if m(f):
4825 if m(f):
4826 if show:
4826 if show:
4827 if nostatus:
4827 if nostatus:
4828 ui.write("%s\n" % f)
4828 ui.write("%s\n" % f)
4829 else:
4829 else:
4830 ui.write("%s %s\n" % (ms[f].upper(), f),
4830 ui.write("%s %s\n" % (ms[f].upper(), f),
4831 label='resolve.' +
4831 label='resolve.' +
4832 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
4832 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
4833 elif mark:
4833 elif mark:
4834 ms.mark(f, "r")
4834 ms.mark(f, "r")
4835 elif unmark:
4835 elif unmark:
4836 ms.mark(f, "u")
4836 ms.mark(f, "u")
4837 else:
4837 else:
4838 wctx = repo[None]
4838 wctx = repo[None]
4839 mctx = wctx.parents()[-1]
4839 mctx = wctx.parents()[-1]
4840
4840
4841 # backup pre-resolve (merge uses .orig for its own purposes)
4841 # backup pre-resolve (merge uses .orig for its own purposes)
4842 a = repo.wjoin(f)
4842 a = repo.wjoin(f)
4843 util.copyfile(a, a + ".resolve")
4843 util.copyfile(a, a + ".resolve")
4844
4844
4845 try:
4845 try:
4846 # resolve file
4846 # resolve file
4847 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
4847 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
4848 if ms.resolve(f, wctx, mctx):
4848 if ms.resolve(f, wctx, mctx):
4849 ret = 1
4849 ret = 1
4850 finally:
4850 finally:
4851 ui.setconfig('ui', 'forcemerge', '')
4851 ui.setconfig('ui', 'forcemerge', '')
4852
4852
4853 # replace filemerge's .orig file with our resolve file
4853 # replace filemerge's .orig file with our resolve file
4854 util.rename(a + ".resolve", a + ".orig")
4854 util.rename(a + ".resolve", a + ".orig")
4855
4855
4856 ms.commit()
4856 ms.commit()
4857 return ret
4857 return ret
4858
4858
4859 @command('revert',
4859 @command('revert',
4860 [('a', 'all', None, _('revert all changes when no arguments given')),
4860 [('a', 'all', None, _('revert all changes when no arguments given')),
4861 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4861 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4862 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4862 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4863 ('C', 'no-backup', None, _('do not save backup copies of files')),
4863 ('C', 'no-backup', None, _('do not save backup copies of files')),
4864 ] + walkopts + dryrunopts,
4864 ] + walkopts + dryrunopts,
4865 _('[OPTION]... [-r REV] [NAME]...'))
4865 _('[OPTION]... [-r REV] [NAME]...'))
4866 def revert(ui, repo, *pats, **opts):
4866 def revert(ui, repo, *pats, **opts):
4867 """restore files to their checkout state
4867 """restore files to their checkout state
4868
4868
4869 .. note::
4869 .. note::
4870 To check out earlier revisions, you should use :hg:`update REV`.
4870 To check out earlier revisions, you should use :hg:`update REV`.
4871 To cancel a merge (and lose your changes), use :hg:`update --clean .`.
4871 To cancel a merge (and lose your changes), use :hg:`update --clean .`.
4872
4872
4873 With no revision specified, revert the specified files or directories
4873 With no revision specified, revert the specified files or directories
4874 to the contents they had in the parent of the working directory.
4874 to the contents they had in the parent of the working directory.
4875 This restores the contents of files to an unmodified
4875 This restores the contents of files to an unmodified
4876 state and unschedules adds, removes, copies, and renames. If the
4876 state and unschedules adds, removes, copies, and renames. If the
4877 working directory has two parents, you must explicitly specify a
4877 working directory has two parents, you must explicitly specify a
4878 revision.
4878 revision.
4879
4879
4880 Using the -r/--rev or -d/--date options, revert the given files or
4880 Using the -r/--rev or -d/--date options, revert the given files or
4881 directories to their states as of a specific revision. Because
4881 directories to their states as of a specific revision. Because
4882 revert does not change the working directory parents, this will
4882 revert does not change the working directory parents, this will
4883 cause these files to appear modified. This can be helpful to "back
4883 cause these files to appear modified. This can be helpful to "back
4884 out" some or all of an earlier change. See :hg:`backout` for a
4884 out" some or all of an earlier change. See :hg:`backout` for a
4885 related method.
4885 related method.
4886
4886
4887 Modified files are saved with a .orig suffix before reverting.
4887 Modified files are saved with a .orig suffix before reverting.
4888 To disable these backups, use --no-backup.
4888 To disable these backups, use --no-backup.
4889
4889
4890 See :hg:`help dates` for a list of formats valid for -d/--date.
4890 See :hg:`help dates` for a list of formats valid for -d/--date.
4891
4891
4892 Returns 0 on success.
4892 Returns 0 on success.
4893 """
4893 """
4894
4894
4895 if opts.get("date"):
4895 if opts.get("date"):
4896 if opts.get("rev"):
4896 if opts.get("rev"):
4897 raise util.Abort(_("you can't specify a revision and a date"))
4897 raise util.Abort(_("you can't specify a revision and a date"))
4898 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
4898 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
4899
4899
4900 parent, p2 = repo.dirstate.parents()
4900 parent, p2 = repo.dirstate.parents()
4901 if not opts.get('rev') and p2 != nullid:
4901 if not opts.get('rev') and p2 != nullid:
4902 # revert after merge is a trap for new users (issue2915)
4902 # revert after merge is a trap for new users (issue2915)
4903 raise util.Abort(_('uncommitted merge with no revision specified'),
4903 raise util.Abort(_('uncommitted merge with no revision specified'),
4904 hint=_('use "hg update" or see "hg help revert"'))
4904 hint=_('use "hg update" or see "hg help revert"'))
4905
4905
4906 ctx = scmutil.revsingle(repo, opts.get('rev'))
4906 ctx = scmutil.revsingle(repo, opts.get('rev'))
4907
4907
4908 if not pats and not opts.get('all'):
4908 if not pats and not opts.get('all'):
4909 msg = _("no files or directories specified")
4909 msg = _("no files or directories specified")
4910 if p2 != nullid:
4910 if p2 != nullid:
4911 hint = _("uncommitted merge, use --all to discard all changes,"
4911 hint = _("uncommitted merge, use --all to discard all changes,"
4912 " or 'hg update -C .' to abort the merge")
4912 " or 'hg update -C .' to abort the merge")
4913 raise util.Abort(msg, hint=hint)
4913 raise util.Abort(msg, hint=hint)
4914 dirty = util.any(repo.status())
4914 dirty = util.any(repo.status())
4915 node = ctx.node()
4915 node = ctx.node()
4916 if node != parent:
4916 if node != parent:
4917 if dirty:
4917 if dirty:
4918 hint = _("uncommitted changes, use --all to discard all"
4918 hint = _("uncommitted changes, use --all to discard all"
4919 " changes, or 'hg update %s' to update") % ctx.rev()
4919 " changes, or 'hg update %s' to update") % ctx.rev()
4920 else:
4920 else:
4921 hint = _("use --all to revert all files,"
4921 hint = _("use --all to revert all files,"
4922 " or 'hg update %s' to update") % ctx.rev()
4922 " or 'hg update %s' to update") % ctx.rev()
4923 elif dirty:
4923 elif dirty:
4924 hint = _("uncommitted changes, use --all to discard all changes")
4924 hint = _("uncommitted changes, use --all to discard all changes")
4925 else:
4925 else:
4926 hint = _("use --all to revert all files")
4926 hint = _("use --all to revert all files")
4927 raise util.Abort(msg, hint=hint)
4927 raise util.Abort(msg, hint=hint)
4928
4928
4929 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
4929 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
4930
4930
4931 @command('rollback', dryrunopts +
4931 @command('rollback', dryrunopts +
4932 [('f', 'force', False, _('ignore safety measures'))])
4932 [('f', 'force', False, _('ignore safety measures'))])
4933 def rollback(ui, repo, **opts):
4933 def rollback(ui, repo, **opts):
4934 """roll back the last transaction (dangerous)
4934 """roll back the last transaction (dangerous)
4935
4935
4936 This command should be used with care. There is only one level of
4936 This command should be used with care. There is only one level of
4937 rollback, and there is no way to undo a rollback. It will also
4937 rollback, and there is no way to undo a rollback. It will also
4938 restore the dirstate at the time of the last transaction, losing
4938 restore the dirstate at the time of the last transaction, losing
4939 any dirstate changes since that time. This command does not alter
4939 any dirstate changes since that time. This command does not alter
4940 the working directory.
4940 the working directory.
4941
4941
4942 Transactions are used to encapsulate the effects of all commands
4942 Transactions are used to encapsulate the effects of all commands
4943 that create new changesets or propagate existing changesets into a
4943 that create new changesets or propagate existing changesets into a
4944 repository. For example, the following commands are transactional,
4944 repository. For example, the following commands are transactional,
4945 and their effects can be rolled back:
4945 and their effects can be rolled back:
4946
4946
4947 - commit
4947 - commit
4948 - import
4948 - import
4949 - pull
4949 - pull
4950 - push (with this repository as the destination)
4950 - push (with this repository as the destination)
4951 - unbundle
4951 - unbundle
4952
4952
4953 To avoid permanent data loss, rollback will refuse to rollback a
4953 To avoid permanent data loss, rollback will refuse to rollback a
4954 commit transaction if it isn't checked out. Use --force to
4954 commit transaction if it isn't checked out. Use --force to
4955 override this protection.
4955 override this protection.
4956
4956
4957 This command is not intended for use on public repositories. Once
4957 This command is not intended for use on public repositories. Once
4958 changes are visible for pull by other users, rolling a transaction
4958 changes are visible for pull by other users, rolling a transaction
4959 back locally is ineffective (someone else may already have pulled
4959 back locally is ineffective (someone else may already have pulled
4960 the changes). Furthermore, a race is possible with readers of the
4960 the changes). Furthermore, a race is possible with readers of the
4961 repository; for example an in-progress pull from the repository
4961 repository; for example an in-progress pull from the repository
4962 may fail if a rollback is performed.
4962 may fail if a rollback is performed.
4963
4963
4964 Returns 0 on success, 1 if no rollback data is available.
4964 Returns 0 on success, 1 if no rollback data is available.
4965 """
4965 """
4966 return repo.rollback(dryrun=opts.get('dry_run'),
4966 return repo.rollback(dryrun=opts.get('dry_run'),
4967 force=opts.get('force'))
4967 force=opts.get('force'))
4968
4968
4969 @command('root', [])
4969 @command('root', [])
4970 def root(ui, repo):
4970 def root(ui, repo):
4971 """print the root (top) of the current working directory
4971 """print the root (top) of the current working directory
4972
4972
4973 Print the root directory of the current repository.
4973 Print the root directory of the current repository.
4974
4974
4975 Returns 0 on success.
4975 Returns 0 on success.
4976 """
4976 """
4977 ui.write(repo.root + "\n")
4977 ui.write(repo.root + "\n")
4978
4978
4979 @command('^serve',
4979 @command('^serve',
4980 [('A', 'accesslog', '', _('name of access log file to write to'),
4980 [('A', 'accesslog', '', _('name of access log file to write to'),
4981 _('FILE')),
4981 _('FILE')),
4982 ('d', 'daemon', None, _('run server in background')),
4982 ('d', 'daemon', None, _('run server in background')),
4983 ('', 'daemon-pipefds', '', _('used internally by daemon mode'), _('NUM')),
4983 ('', 'daemon-pipefds', '', _('used internally by daemon mode'), _('NUM')),
4984 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
4984 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
4985 # use string type, then we can check if something was passed
4985 # use string type, then we can check if something was passed
4986 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
4986 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
4987 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
4987 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
4988 _('ADDR')),
4988 _('ADDR')),
4989 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
4989 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
4990 _('PREFIX')),
4990 _('PREFIX')),
4991 ('n', 'name', '',
4991 ('n', 'name', '',
4992 _('name to show in web pages (default: working directory)'), _('NAME')),
4992 _('name to show in web pages (default: working directory)'), _('NAME')),
4993 ('', 'web-conf', '',
4993 ('', 'web-conf', '',
4994 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
4994 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
4995 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
4995 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
4996 _('FILE')),
4996 _('FILE')),
4997 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
4997 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
4998 ('', 'stdio', None, _('for remote clients')),
4998 ('', 'stdio', None, _('for remote clients')),
4999 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
4999 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
5000 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
5000 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
5001 ('', 'style', '', _('template style to use'), _('STYLE')),
5001 ('', 'style', '', _('template style to use'), _('STYLE')),
5002 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
5002 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
5003 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
5003 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
5004 _('[OPTION]...'))
5004 _('[OPTION]...'))
5005 def serve(ui, repo, **opts):
5005 def serve(ui, repo, **opts):
5006 """start stand-alone webserver
5006 """start stand-alone webserver
5007
5007
5008 Start a local HTTP repository browser and pull server. You can use
5008 Start a local HTTP repository browser and pull server. You can use
5009 this for ad-hoc sharing and browsing of repositories. It is
5009 this for ad-hoc sharing and browsing of repositories. It is
5010 recommended to use a real web server to serve a repository for
5010 recommended to use a real web server to serve a repository for
5011 longer periods of time.
5011 longer periods of time.
5012
5012
5013 Please note that the server does not implement access control.
5013 Please note that the server does not implement access control.
5014 This means that, by default, anybody can read from the server and
5014 This means that, by default, anybody can read from the server and
5015 nobody can write to it by default. Set the ``web.allow_push``
5015 nobody can write to it by default. Set the ``web.allow_push``
5016 option to ``*`` to allow everybody to push to the server. You
5016 option to ``*`` to allow everybody to push to the server. You
5017 should use a real web server if you need to authenticate users.
5017 should use a real web server if you need to authenticate users.
5018
5018
5019 By default, the server logs accesses to stdout and errors to
5019 By default, the server logs accesses to stdout and errors to
5020 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
5020 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
5021 files.
5021 files.
5022
5022
5023 To have the server choose a free port number to listen on, specify
5023 To have the server choose a free port number to listen on, specify
5024 a port number of 0; in this case, the server will print the port
5024 a port number of 0; in this case, the server will print the port
5025 number it uses.
5025 number it uses.
5026
5026
5027 Returns 0 on success.
5027 Returns 0 on success.
5028 """
5028 """
5029
5029
5030 if opts["stdio"] and opts["cmdserver"]:
5030 if opts["stdio"] and opts["cmdserver"]:
5031 raise util.Abort(_("cannot use --stdio with --cmdserver"))
5031 raise util.Abort(_("cannot use --stdio with --cmdserver"))
5032
5032
5033 def checkrepo():
5033 def checkrepo():
5034 if repo is None:
5034 if repo is None:
5035 raise error.RepoError(_("There is no Mercurial repository here"
5035 raise error.RepoError(_("There is no Mercurial repository here"
5036 " (.hg not found)"))
5036 " (.hg not found)"))
5037
5037
5038 if opts["stdio"]:
5038 if opts["stdio"]:
5039 checkrepo()
5039 checkrepo()
5040 s = sshserver.sshserver(ui, repo)
5040 s = sshserver.sshserver(ui, repo)
5041 s.serve_forever()
5041 s.serve_forever()
5042
5042
5043 if opts["cmdserver"]:
5043 if opts["cmdserver"]:
5044 checkrepo()
5044 checkrepo()
5045 s = commandserver.server(ui, repo, opts["cmdserver"])
5045 s = commandserver.server(ui, repo, opts["cmdserver"])
5046 return s.serve()
5046 return s.serve()
5047
5047
5048 # this way we can check if something was given in the command-line
5048 # this way we can check if something was given in the command-line
5049 if opts.get('port'):
5049 if opts.get('port'):
5050 opts['port'] = util.getport(opts.get('port'))
5050 opts['port'] = util.getport(opts.get('port'))
5051
5051
5052 baseui = repo and repo.baseui or ui
5052 baseui = repo and repo.baseui or ui
5053 optlist = ("name templates style address port prefix ipv6"
5053 optlist = ("name templates style address port prefix ipv6"
5054 " accesslog errorlog certificate encoding")
5054 " accesslog errorlog certificate encoding")
5055 for o in optlist.split():
5055 for o in optlist.split():
5056 val = opts.get(o, '')
5056 val = opts.get(o, '')
5057 if val in (None, ''): # should check against default options instead
5057 if val in (None, ''): # should check against default options instead
5058 continue
5058 continue
5059 baseui.setconfig("web", o, val)
5059 baseui.setconfig("web", o, val)
5060 if repo and repo.ui != baseui:
5060 if repo and repo.ui != baseui:
5061 repo.ui.setconfig("web", o, val)
5061 repo.ui.setconfig("web", o, val)
5062
5062
5063 o = opts.get('web_conf') or opts.get('webdir_conf')
5063 o = opts.get('web_conf') or opts.get('webdir_conf')
5064 if not o:
5064 if not o:
5065 if not repo:
5065 if not repo:
5066 raise error.RepoError(_("There is no Mercurial repository"
5066 raise error.RepoError(_("There is no Mercurial repository"
5067 " here (.hg not found)"))
5067 " here (.hg not found)"))
5068 o = repo.root
5068 o = repo.root
5069
5069
5070 app = hgweb.hgweb(o, baseui=ui)
5070 app = hgweb.hgweb(o, baseui=ui)
5071
5071
5072 class service(object):
5072 class service(object):
5073 def init(self):
5073 def init(self):
5074 util.setsignalhandler()
5074 util.setsignalhandler()
5075 self.httpd = hgweb.server.create_server(ui, app)
5075 self.httpd = hgweb.server.create_server(ui, app)
5076
5076
5077 if opts['port'] and not ui.verbose:
5077 if opts['port'] and not ui.verbose:
5078 return
5078 return
5079
5079
5080 if self.httpd.prefix:
5080 if self.httpd.prefix:
5081 prefix = self.httpd.prefix.strip('/') + '/'
5081 prefix = self.httpd.prefix.strip('/') + '/'
5082 else:
5082 else:
5083 prefix = ''
5083 prefix = ''
5084
5084
5085 port = ':%d' % self.httpd.port
5085 port = ':%d' % self.httpd.port
5086 if port == ':80':
5086 if port == ':80':
5087 port = ''
5087 port = ''
5088
5088
5089 bindaddr = self.httpd.addr
5089 bindaddr = self.httpd.addr
5090 if bindaddr == '0.0.0.0':
5090 if bindaddr == '0.0.0.0':
5091 bindaddr = '*'
5091 bindaddr = '*'
5092 elif ':' in bindaddr: # IPv6
5092 elif ':' in bindaddr: # IPv6
5093 bindaddr = '[%s]' % bindaddr
5093 bindaddr = '[%s]' % bindaddr
5094
5094
5095 fqaddr = self.httpd.fqaddr
5095 fqaddr = self.httpd.fqaddr
5096 if ':' in fqaddr:
5096 if ':' in fqaddr:
5097 fqaddr = '[%s]' % fqaddr
5097 fqaddr = '[%s]' % fqaddr
5098 if opts['port']:
5098 if opts['port']:
5099 write = ui.status
5099 write = ui.status
5100 else:
5100 else:
5101 write = ui.write
5101 write = ui.write
5102 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
5102 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
5103 (fqaddr, port, prefix, bindaddr, self.httpd.port))
5103 (fqaddr, port, prefix, bindaddr, self.httpd.port))
5104
5104
5105 def run(self):
5105 def run(self):
5106 self.httpd.serve_forever()
5106 self.httpd.serve_forever()
5107
5107
5108 service = service()
5108 service = service()
5109
5109
5110 cmdutil.service(opts, initfn=service.init, runfn=service.run)
5110 cmdutil.service(opts, initfn=service.init, runfn=service.run)
5111
5111
5112 @command('showconfig|debugconfig',
5112 @command('showconfig|debugconfig',
5113 [('u', 'untrusted', None, _('show untrusted configuration options'))],
5113 [('u', 'untrusted', None, _('show untrusted configuration options'))],
5114 _('[-u] [NAME]...'))
5114 _('[-u] [NAME]...'))
5115 def showconfig(ui, repo, *values, **opts):
5115 def showconfig(ui, repo, *values, **opts):
5116 """show combined config settings from all hgrc files
5116 """show combined config settings from all hgrc files
5117
5117
5118 With no arguments, print names and values of all config items.
5118 With no arguments, print names and values of all config items.
5119
5119
5120 With one argument of the form section.name, print just the value
5120 With one argument of the form section.name, print just the value
5121 of that config item.
5121 of that config item.
5122
5122
5123 With multiple arguments, print names and values of all config
5123 With multiple arguments, print names and values of all config
5124 items with matching section names.
5124 items with matching section names.
5125
5125
5126 With --debug, the source (filename and line number) is printed
5126 With --debug, the source (filename and line number) is printed
5127 for each config item.
5127 for each config item.
5128
5128
5129 Returns 0 on success.
5129 Returns 0 on success.
5130 """
5130 """
5131
5131
5132 for f in scmutil.rcpath():
5132 for f in scmutil.rcpath():
5133 ui.debug('read config from: %s\n' % f)
5133 ui.debug('read config from: %s\n' % f)
5134 untrusted = bool(opts.get('untrusted'))
5134 untrusted = bool(opts.get('untrusted'))
5135 if values:
5135 if values:
5136 sections = [v for v in values if '.' not in v]
5136 sections = [v for v in values if '.' not in v]
5137 items = [v for v in values if '.' in v]
5137 items = [v for v in values if '.' in v]
5138 if len(items) > 1 or items and sections:
5138 if len(items) > 1 or items and sections:
5139 raise util.Abort(_('only one config item permitted'))
5139 raise util.Abort(_('only one config item permitted'))
5140 for section, name, value in ui.walkconfig(untrusted=untrusted):
5140 for section, name, value in ui.walkconfig(untrusted=untrusted):
5141 value = str(value).replace('\n', '\\n')
5141 value = str(value).replace('\n', '\\n')
5142 sectname = section + '.' + name
5142 sectname = section + '.' + name
5143 if values:
5143 if values:
5144 for v in values:
5144 for v in values:
5145 if v == section:
5145 if v == section:
5146 ui.debug('%s: ' %
5146 ui.debug('%s: ' %
5147 ui.configsource(section, name, untrusted))
5147 ui.configsource(section, name, untrusted))
5148 ui.write('%s=%s\n' % (sectname, value))
5148 ui.write('%s=%s\n' % (sectname, value))
5149 elif v == sectname:
5149 elif v == sectname:
5150 ui.debug('%s: ' %
5150 ui.debug('%s: ' %
5151 ui.configsource(section, name, untrusted))
5151 ui.configsource(section, name, untrusted))
5152 ui.write(value, '\n')
5152 ui.write(value, '\n')
5153 else:
5153 else:
5154 ui.debug('%s: ' %
5154 ui.debug('%s: ' %
5155 ui.configsource(section, name, untrusted))
5155 ui.configsource(section, name, untrusted))
5156 ui.write('%s=%s\n' % (sectname, value))
5156 ui.write('%s=%s\n' % (sectname, value))
5157
5157
5158 @command('^status|st',
5158 @command('^status|st',
5159 [('A', 'all', None, _('show status of all files')),
5159 [('A', 'all', None, _('show status of all files')),
5160 ('m', 'modified', None, _('show only modified files')),
5160 ('m', 'modified', None, _('show only modified files')),
5161 ('a', 'added', None, _('show only added files')),
5161 ('a', 'added', None, _('show only added files')),
5162 ('r', 'removed', None, _('show only removed files')),
5162 ('r', 'removed', None, _('show only removed files')),
5163 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5163 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5164 ('c', 'clean', None, _('show only files without changes')),
5164 ('c', 'clean', None, _('show only files without changes')),
5165 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5165 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5166 ('i', 'ignored', None, _('show only ignored files')),
5166 ('i', 'ignored', None, _('show only ignored files')),
5167 ('n', 'no-status', None, _('hide status prefix')),
5167 ('n', 'no-status', None, _('hide status prefix')),
5168 ('C', 'copies', None, _('show source of copied files')),
5168 ('C', 'copies', None, _('show source of copied files')),
5169 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5169 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5170 ('', 'rev', [], _('show difference from revision'), _('REV')),
5170 ('', 'rev', [], _('show difference from revision'), _('REV')),
5171 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5171 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5172 ] + walkopts + subrepoopts,
5172 ] + walkopts + subrepoopts,
5173 _('[OPTION]... [FILE]...'))
5173 _('[OPTION]... [FILE]...'))
5174 def status(ui, repo, *pats, **opts):
5174 def status(ui, repo, *pats, **opts):
5175 """show changed files in the working directory
5175 """show changed files in the working directory
5176
5176
5177 Show status of files in the repository. If names are given, only
5177 Show status of files in the repository. If names are given, only
5178 files that match are shown. Files that are clean or ignored or
5178 files that match are shown. Files that are clean or ignored or
5179 the source of a copy/move operation, are not listed unless
5179 the source of a copy/move operation, are not listed unless
5180 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5180 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5181 Unless options described with "show only ..." are given, the
5181 Unless options described with "show only ..." are given, the
5182 options -mardu are used.
5182 options -mardu are used.
5183
5183
5184 Option -q/--quiet hides untracked (unknown and ignored) files
5184 Option -q/--quiet hides untracked (unknown and ignored) files
5185 unless explicitly requested with -u/--unknown or -i/--ignored.
5185 unless explicitly requested with -u/--unknown or -i/--ignored.
5186
5186
5187 .. note::
5187 .. note::
5188 status may appear to disagree with diff if permissions have
5188 status may appear to disagree with diff if permissions have
5189 changed or a merge has occurred. The standard diff format does
5189 changed or a merge has occurred. The standard diff format does
5190 not report permission changes and diff only reports changes
5190 not report permission changes and diff only reports changes
5191 relative to one merge parent.
5191 relative to one merge parent.
5192
5192
5193 If one revision is given, it is used as the base revision.
5193 If one revision is given, it is used as the base revision.
5194 If two revisions are given, the differences between them are
5194 If two revisions are given, the differences between them are
5195 shown. The --change option can also be used as a shortcut to list
5195 shown. The --change option can also be used as a shortcut to list
5196 the changed files of a revision from its first parent.
5196 the changed files of a revision from its first parent.
5197
5197
5198 The codes used to show the status of files are::
5198 The codes used to show the status of files are::
5199
5199
5200 M = modified
5200 M = modified
5201 A = added
5201 A = added
5202 R = removed
5202 R = removed
5203 C = clean
5203 C = clean
5204 ! = missing (deleted by non-hg command, but still tracked)
5204 ! = missing (deleted by non-hg command, but still tracked)
5205 ? = not tracked
5205 ? = not tracked
5206 I = ignored
5206 I = ignored
5207 = origin of the previous file listed as A (added)
5207 = origin of the previous file listed as A (added)
5208
5208
5209 .. container:: verbose
5209 .. container:: verbose
5210
5210
5211 Examples:
5211 Examples:
5212
5212
5213 - show changes in the working directory relative to a
5213 - show changes in the working directory relative to a
5214 changeset::
5214 changeset::
5215
5215
5216 hg status --rev 9353
5216 hg status --rev 9353
5217
5217
5218 - show all changes including copies in an existing changeset::
5218 - show all changes including copies in an existing changeset::
5219
5219
5220 hg status --copies --change 9353
5220 hg status --copies --change 9353
5221
5221
5222 - get a NUL separated list of added files, suitable for xargs::
5222 - get a NUL separated list of added files, suitable for xargs::
5223
5223
5224 hg status -an0
5224 hg status -an0
5225
5225
5226 Returns 0 on success.
5226 Returns 0 on success.
5227 """
5227 """
5228
5228
5229 revs = opts.get('rev')
5229 revs = opts.get('rev')
5230 change = opts.get('change')
5230 change = opts.get('change')
5231
5231
5232 if revs and change:
5232 if revs and change:
5233 msg = _('cannot specify --rev and --change at the same time')
5233 msg = _('cannot specify --rev and --change at the same time')
5234 raise util.Abort(msg)
5234 raise util.Abort(msg)
5235 elif change:
5235 elif change:
5236 node2 = scmutil.revsingle(repo, change, None).node()
5236 node2 = scmutil.revsingle(repo, change, None).node()
5237 node1 = repo[node2].p1().node()
5237 node1 = repo[node2].p1().node()
5238 else:
5238 else:
5239 node1, node2 = scmutil.revpair(repo, revs)
5239 node1, node2 = scmutil.revpair(repo, revs)
5240
5240
5241 cwd = (pats and repo.getcwd()) or ''
5241 cwd = (pats and repo.getcwd()) or ''
5242 end = opts.get('print0') and '\0' or '\n'
5242 end = opts.get('print0') and '\0' or '\n'
5243 copy = {}
5243 copy = {}
5244 states = 'modified added removed deleted unknown ignored clean'.split()
5244 states = 'modified added removed deleted unknown ignored clean'.split()
5245 show = [k for k in states if opts.get(k)]
5245 show = [k for k in states if opts.get(k)]
5246 if opts.get('all'):
5246 if opts.get('all'):
5247 show += ui.quiet and (states[:4] + ['clean']) or states
5247 show += ui.quiet and (states[:4] + ['clean']) or states
5248 if not show:
5248 if not show:
5249 show = ui.quiet and states[:4] or states[:5]
5249 show = ui.quiet and states[:4] or states[:5]
5250
5250
5251 stat = repo.status(node1, node2, scmutil.match(repo[node2], pats, opts),
5251 stat = repo.status(node1, node2, scmutil.match(repo[node2], pats, opts),
5252 'ignored' in show, 'clean' in show, 'unknown' in show,
5252 'ignored' in show, 'clean' in show, 'unknown' in show,
5253 opts.get('subrepos'))
5253 opts.get('subrepos'))
5254 changestates = zip(states, 'MAR!?IC', stat)
5254 changestates = zip(states, 'MAR!?IC', stat)
5255
5255
5256 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
5256 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
5257 copy = copies.pathcopies(repo[node1], repo[node2])
5257 copy = copies.pathcopies(repo[node1], repo[node2])
5258
5258
5259 fm = ui.formatter('status', opts)
5259 fm = ui.formatter('status', opts)
5260 format = '%s %s' + end
5260 format = '%s %s' + end
5261 if opts.get('no_status'):
5261 if opts.get('no_status'):
5262 format = '%.0s%s' + end
5262 format = '%.0s%s' + end
5263
5263
5264 for state, char, files in changestates:
5264 for state, char, files in changestates:
5265 if state in show:
5265 if state in show:
5266 label = 'status.' + state
5266 label = 'status.' + state
5267 for f in files:
5267 for f in files:
5268 fm.startitem()
5268 fm.startitem()
5269 fm.write("status path", format, char,
5269 fm.write("status path", format, char,
5270 repo.pathto(f, cwd), label=label)
5270 repo.pathto(f, cwd), label=label)
5271 if f in copy:
5271 if f in copy:
5272 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
5272 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
5273 label='status.copied')
5273 label='status.copied')
5274 fm.end()
5274 fm.end()
5275
5275
5276 @command('^summary|sum',
5276 @command('^summary|sum',
5277 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
5277 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
5278 def summary(ui, repo, **opts):
5278 def summary(ui, repo, **opts):
5279 """summarize working directory state
5279 """summarize working directory state
5280
5280
5281 This generates a brief summary of the working directory state,
5281 This generates a brief summary of the working directory state,
5282 including parents, branch, commit status, and available updates.
5282 including parents, branch, commit status, and available updates.
5283
5283
5284 With the --remote option, this will check the default paths for
5284 With the --remote option, this will check the default paths for
5285 incoming and outgoing changes. This can be time-consuming.
5285 incoming and outgoing changes. This can be time-consuming.
5286
5286
5287 Returns 0 on success.
5287 Returns 0 on success.
5288 """
5288 """
5289
5289
5290 ctx = repo[None]
5290 ctx = repo[None]
5291 parents = ctx.parents()
5291 parents = ctx.parents()
5292 pnode = parents[0].node()
5292 pnode = parents[0].node()
5293 marks = []
5293 marks = []
5294
5294
5295 for p in parents:
5295 for p in parents:
5296 # label with log.changeset (instead of log.parent) since this
5296 # label with log.changeset (instead of log.parent) since this
5297 # shows a working directory parent *changeset*:
5297 # shows a working directory parent *changeset*:
5298 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
5298 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
5299 label='log.changeset')
5299 label='log.changeset')
5300 ui.write(' '.join(p.tags()), label='log.tag')
5300 ui.write(' '.join(p.tags()), label='log.tag')
5301 if p.bookmarks():
5301 if p.bookmarks():
5302 marks.extend(p.bookmarks())
5302 marks.extend(p.bookmarks())
5303 if p.rev() == -1:
5303 if p.rev() == -1:
5304 if not len(repo):
5304 if not len(repo):
5305 ui.write(_(' (empty repository)'))
5305 ui.write(_(' (empty repository)'))
5306 else:
5306 else:
5307 ui.write(_(' (no revision checked out)'))
5307 ui.write(_(' (no revision checked out)'))
5308 ui.write('\n')
5308 ui.write('\n')
5309 if p.description():
5309 if p.description():
5310 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5310 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5311 label='log.summary')
5311 label='log.summary')
5312
5312
5313 branch = ctx.branch()
5313 branch = ctx.branch()
5314 bheads = repo.branchheads(branch)
5314 bheads = repo.branchheads(branch)
5315 m = _('branch: %s\n') % branch
5315 m = _('branch: %s\n') % branch
5316 if branch != 'default':
5316 if branch != 'default':
5317 ui.write(m, label='log.branch')
5317 ui.write(m, label='log.branch')
5318 else:
5318 else:
5319 ui.status(m, label='log.branch')
5319 ui.status(m, label='log.branch')
5320
5320
5321 if marks:
5321 if marks:
5322 current = repo._bookmarkcurrent
5322 current = repo._bookmarkcurrent
5323 ui.write(_('bookmarks:'), label='log.bookmark')
5323 ui.write(_('bookmarks:'), label='log.bookmark')
5324 if current is not None:
5324 if current is not None:
5325 try:
5325 try:
5326 marks.remove(current)
5326 marks.remove(current)
5327 ui.write(' *' + current, label='bookmarks.current')
5327 ui.write(' *' + current, label='bookmarks.current')
5328 except ValueError:
5328 except ValueError:
5329 # current bookmark not in parent ctx marks
5329 # current bookmark not in parent ctx marks
5330 pass
5330 pass
5331 for m in marks:
5331 for m in marks:
5332 ui.write(' ' + m, label='log.bookmark')
5332 ui.write(' ' + m, label='log.bookmark')
5333 ui.write('\n', label='log.bookmark')
5333 ui.write('\n', label='log.bookmark')
5334
5334
5335 st = list(repo.status(unknown=True))[:6]
5335 st = list(repo.status(unknown=True))[:6]
5336
5336
5337 c = repo.dirstate.copies()
5337 c = repo.dirstate.copies()
5338 copied, renamed = [], []
5338 copied, renamed = [], []
5339 for d, s in c.iteritems():
5339 for d, s in c.iteritems():
5340 if s in st[2]:
5340 if s in st[2]:
5341 st[2].remove(s)
5341 st[2].remove(s)
5342 renamed.append(d)
5342 renamed.append(d)
5343 else:
5343 else:
5344 copied.append(d)
5344 copied.append(d)
5345 if d in st[1]:
5345 if d in st[1]:
5346 st[1].remove(d)
5346 st[1].remove(d)
5347 st.insert(3, renamed)
5347 st.insert(3, renamed)
5348 st.insert(4, copied)
5348 st.insert(4, copied)
5349
5349
5350 ms = mergemod.mergestate(repo)
5350 ms = mergemod.mergestate(repo)
5351 st.append([f for f in ms if ms[f] == 'u'])
5351 st.append([f for f in ms if ms[f] == 'u'])
5352
5352
5353 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5353 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5354 st.append(subs)
5354 st.append(subs)
5355
5355
5356 labels = [ui.label(_('%d modified'), 'status.modified'),
5356 labels = [ui.label(_('%d modified'), 'status.modified'),
5357 ui.label(_('%d added'), 'status.added'),
5357 ui.label(_('%d added'), 'status.added'),
5358 ui.label(_('%d removed'), 'status.removed'),
5358 ui.label(_('%d removed'), 'status.removed'),
5359 ui.label(_('%d renamed'), 'status.copied'),
5359 ui.label(_('%d renamed'), 'status.copied'),
5360 ui.label(_('%d copied'), 'status.copied'),
5360 ui.label(_('%d copied'), 'status.copied'),
5361 ui.label(_('%d deleted'), 'status.deleted'),
5361 ui.label(_('%d deleted'), 'status.deleted'),
5362 ui.label(_('%d unknown'), 'status.unknown'),
5362 ui.label(_('%d unknown'), 'status.unknown'),
5363 ui.label(_('%d ignored'), 'status.ignored'),
5363 ui.label(_('%d ignored'), 'status.ignored'),
5364 ui.label(_('%d unresolved'), 'resolve.unresolved'),
5364 ui.label(_('%d unresolved'), 'resolve.unresolved'),
5365 ui.label(_('%d subrepos'), 'status.modified')]
5365 ui.label(_('%d subrepos'), 'status.modified')]
5366 t = []
5366 t = []
5367 for s, l in zip(st, labels):
5367 for s, l in zip(st, labels):
5368 if s:
5368 if s:
5369 t.append(l % len(s))
5369 t.append(l % len(s))
5370
5370
5371 t = ', '.join(t)
5371 t = ', '.join(t)
5372 cleanworkdir = False
5372 cleanworkdir = False
5373
5373
5374 if len(parents) > 1:
5374 if len(parents) > 1:
5375 t += _(' (merge)')
5375 t += _(' (merge)')
5376 elif branch != parents[0].branch():
5376 elif branch != parents[0].branch():
5377 t += _(' (new branch)')
5377 t += _(' (new branch)')
5378 elif (parents[0].extra().get('close') and
5378 elif (parents[0].extra().get('close') and
5379 pnode in repo.branchheads(branch, closed=True)):
5379 pnode in repo.branchheads(branch, closed=True)):
5380 t += _(' (head closed)')
5380 t += _(' (head closed)')
5381 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
5381 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
5382 t += _(' (clean)')
5382 t += _(' (clean)')
5383 cleanworkdir = True
5383 cleanworkdir = True
5384 elif pnode not in bheads:
5384 elif pnode not in bheads:
5385 t += _(' (new branch head)')
5385 t += _(' (new branch head)')
5386
5386
5387 if cleanworkdir:
5387 if cleanworkdir:
5388 ui.status(_('commit: %s\n') % t.strip())
5388 ui.status(_('commit: %s\n') % t.strip())
5389 else:
5389 else:
5390 ui.write(_('commit: %s\n') % t.strip())
5390 ui.write(_('commit: %s\n') % t.strip())
5391
5391
5392 # all ancestors of branch heads - all ancestors of parent = new csets
5392 # all ancestors of branch heads - all ancestors of parent = new csets
5393 new = [0] * len(repo)
5393 new = [0] * len(repo)
5394 cl = repo.changelog
5394 cl = repo.changelog
5395 for a in [cl.rev(n) for n in bheads]:
5395 for a in [cl.rev(n) for n in bheads]:
5396 new[a] = 1
5396 new[a] = 1
5397 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
5397 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
5398 new[a] = 1
5398 new[a] = 1
5399 for a in [p.rev() for p in parents]:
5399 for a in [p.rev() for p in parents]:
5400 if a >= 0:
5400 if a >= 0:
5401 new[a] = 0
5401 new[a] = 0
5402 for a in cl.ancestors(*[p.rev() for p in parents]):
5402 for a in cl.ancestors(*[p.rev() for p in parents]):
5403 new[a] = 0
5403 new[a] = 0
5404 new = sum(new)
5404 new = sum(new)
5405
5405
5406 if new == 0:
5406 if new == 0:
5407 ui.status(_('update: (current)\n'))
5407 ui.status(_('update: (current)\n'))
5408 elif pnode not in bheads:
5408 elif pnode not in bheads:
5409 ui.write(_('update: %d new changesets (update)\n') % new)
5409 ui.write(_('update: %d new changesets (update)\n') % new)
5410 else:
5410 else:
5411 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
5411 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
5412 (new, len(bheads)))
5412 (new, len(bheads)))
5413
5413
5414 if opts.get('remote'):
5414 if opts.get('remote'):
5415 t = []
5415 t = []
5416 source, branches = hg.parseurl(ui.expandpath('default'))
5416 source, branches = hg.parseurl(ui.expandpath('default'))
5417 other = hg.peer(repo, {}, source)
5417 other = hg.peer(repo, {}, source)
5418 revs, checkout = hg.addbranchrevs(repo, other, branches,
5418 revs, checkout = hg.addbranchrevs(repo, other, branches,
5419 opts.get('rev'))
5419 opts.get('rev'))
5420 ui.debug('comparing with %s\n' % util.hidepassword(source))
5420 ui.debug('comparing with %s\n' % util.hidepassword(source))
5421 repo.ui.pushbuffer()
5421 repo.ui.pushbuffer()
5422 commoninc = discovery.findcommonincoming(repo, other)
5422 commoninc = discovery.findcommonincoming(repo, other)
5423 _common, incoming, _rheads = commoninc
5423 _common, incoming, _rheads = commoninc
5424 repo.ui.popbuffer()
5424 repo.ui.popbuffer()
5425 if incoming:
5425 if incoming:
5426 t.append(_('1 or more incoming'))
5426 t.append(_('1 or more incoming'))
5427
5427
5428 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5428 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5429 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5429 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5430 if source != dest:
5430 if source != dest:
5431 other = hg.peer(repo, {}, dest)
5431 other = hg.peer(repo, {}, dest)
5432 commoninc = None
5432 commoninc = None
5433 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5433 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5434 repo.ui.pushbuffer()
5434 repo.ui.pushbuffer()
5435 outgoing = discovery.findcommonoutgoing(repo, other,
5435 outgoing = discovery.findcommonoutgoing(repo, other,
5436 commoninc=commoninc)
5436 commoninc=commoninc)
5437 repo.ui.popbuffer()
5437 repo.ui.popbuffer()
5438 o = outgoing.missing
5438 o = outgoing.missing
5439 if o:
5439 if o:
5440 t.append(_('%d outgoing') % len(o))
5440 t.append(_('%d outgoing') % len(o))
5441 if 'bookmarks' in other.listkeys('namespaces'):
5441 if 'bookmarks' in other.listkeys('namespaces'):
5442 lmarks = repo.listkeys('bookmarks')
5442 lmarks = repo.listkeys('bookmarks')
5443 rmarks = other.listkeys('bookmarks')
5443 rmarks = other.listkeys('bookmarks')
5444 diff = set(rmarks) - set(lmarks)
5444 diff = set(rmarks) - set(lmarks)
5445 if len(diff) > 0:
5445 if len(diff) > 0:
5446 t.append(_('%d incoming bookmarks') % len(diff))
5446 t.append(_('%d incoming bookmarks') % len(diff))
5447 diff = set(lmarks) - set(rmarks)
5447 diff = set(lmarks) - set(rmarks)
5448 if len(diff) > 0:
5448 if len(diff) > 0:
5449 t.append(_('%d outgoing bookmarks') % len(diff))
5449 t.append(_('%d outgoing bookmarks') % len(diff))
5450
5450
5451 if t:
5451 if t:
5452 ui.write(_('remote: %s\n') % (', '.join(t)))
5452 ui.write(_('remote: %s\n') % (', '.join(t)))
5453 else:
5453 else:
5454 ui.status(_('remote: (synced)\n'))
5454 ui.status(_('remote: (synced)\n'))
5455
5455
5456 @command('tag',
5456 @command('tag',
5457 [('f', 'force', None, _('force tag')),
5457 [('f', 'force', None, _('force tag')),
5458 ('l', 'local', None, _('make the tag local')),
5458 ('l', 'local', None, _('make the tag local')),
5459 ('r', 'rev', '', _('revision to tag'), _('REV')),
5459 ('r', 'rev', '', _('revision to tag'), _('REV')),
5460 ('', 'remove', None, _('remove a tag')),
5460 ('', 'remove', None, _('remove a tag')),
5461 # -l/--local is already there, commitopts cannot be used
5461 # -l/--local is already there, commitopts cannot be used
5462 ('e', 'edit', None, _('edit commit message')),
5462 ('e', 'edit', None, _('edit commit message')),
5463 ('m', 'message', '', _('use <text> as commit message'), _('TEXT')),
5463 ('m', 'message', '', _('use <text> as commit message'), _('TEXT')),
5464 ] + commitopts2,
5464 ] + commitopts2,
5465 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5465 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5466 def tag(ui, repo, name1, *names, **opts):
5466 def tag(ui, repo, name1, *names, **opts):
5467 """add one or more tags for the current or given revision
5467 """add one or more tags for the current or given revision
5468
5468
5469 Name a particular revision using <name>.
5469 Name a particular revision using <name>.
5470
5470
5471 Tags are used to name particular revisions of the repository and are
5471 Tags are used to name particular revisions of the repository and are
5472 very useful to compare different revisions, to go back to significant
5472 very useful to compare different revisions, to go back to significant
5473 earlier versions or to mark branch points as releases, etc. Changing
5473 earlier versions or to mark branch points as releases, etc. Changing
5474 an existing tag is normally disallowed; use -f/--force to override.
5474 an existing tag is normally disallowed; use -f/--force to override.
5475
5475
5476 If no revision is given, the parent of the working directory is
5476 If no revision is given, the parent of the working directory is
5477 used, or tip if no revision is checked out.
5477 used, or tip if no revision is checked out.
5478
5478
5479 To facilitate version control, distribution, and merging of tags,
5479 To facilitate version control, distribution, and merging of tags,
5480 they are stored as a file named ".hgtags" which is managed similarly
5480 they are stored as a file named ".hgtags" which is managed similarly
5481 to other project files and can be hand-edited if necessary. This
5481 to other project files and can be hand-edited if necessary. This
5482 also means that tagging creates a new commit. The file
5482 also means that tagging creates a new commit. The file
5483 ".hg/localtags" is used for local tags (not shared among
5483 ".hg/localtags" is used for local tags (not shared among
5484 repositories).
5484 repositories).
5485
5485
5486 Tag commits are usually made at the head of a branch. If the parent
5486 Tag commits are usually made at the head of a branch. If the parent
5487 of the working directory is not a branch head, :hg:`tag` aborts; use
5487 of the working directory is not a branch head, :hg:`tag` aborts; use
5488 -f/--force to force the tag commit to be based on a non-head
5488 -f/--force to force the tag commit to be based on a non-head
5489 changeset.
5489 changeset.
5490
5490
5491 See :hg:`help dates` for a list of formats valid for -d/--date.
5491 See :hg:`help dates` for a list of formats valid for -d/--date.
5492
5492
5493 Since tag names have priority over branch names during revision
5493 Since tag names have priority over branch names during revision
5494 lookup, using an existing branch name as a tag name is discouraged.
5494 lookup, using an existing branch name as a tag name is discouraged.
5495
5495
5496 Returns 0 on success.
5496 Returns 0 on success.
5497 """
5497 """
5498 wlock = lock = None
5498 wlock = lock = None
5499 try:
5499 try:
5500 wlock = repo.wlock()
5500 wlock = repo.wlock()
5501 lock = repo.lock()
5501 lock = repo.lock()
5502 rev_ = "."
5502 rev_ = "."
5503 names = [t.strip() for t in (name1,) + names]
5503 names = [t.strip() for t in (name1,) + names]
5504 if len(names) != len(set(names)):
5504 if len(names) != len(set(names)):
5505 raise util.Abort(_('tag names must be unique'))
5505 raise util.Abort(_('tag names must be unique'))
5506 for n in names:
5506 for n in names:
5507 if n in ['tip', '.', 'null']:
5507 if n in ['tip', '.', 'null']:
5508 raise util.Abort(_("the name '%s' is reserved") % n)
5508 raise util.Abort(_("the name '%s' is reserved") % n)
5509 if not n:
5509 if not n:
5510 raise util.Abort(_('tag names cannot consist entirely of '
5510 raise util.Abort(_('tag names cannot consist entirely of '
5511 'whitespace'))
5511 'whitespace'))
5512 if opts.get('rev') and opts.get('remove'):
5512 if opts.get('rev') and opts.get('remove'):
5513 raise util.Abort(_("--rev and --remove are incompatible"))
5513 raise util.Abort(_("--rev and --remove are incompatible"))
5514 if opts.get('rev'):
5514 if opts.get('rev'):
5515 rev_ = opts['rev']
5515 rev_ = opts['rev']
5516 message = opts.get('message')
5516 message = opts.get('message')
5517 if opts.get('remove'):
5517 if opts.get('remove'):
5518 expectedtype = opts.get('local') and 'local' or 'global'
5518 expectedtype = opts.get('local') and 'local' or 'global'
5519 for n in names:
5519 for n in names:
5520 if not repo.tagtype(n):
5520 if not repo.tagtype(n):
5521 raise util.Abort(_("tag '%s' does not exist") % n)
5521 raise util.Abort(_("tag '%s' does not exist") % n)
5522 if repo.tagtype(n) != expectedtype:
5522 if repo.tagtype(n) != expectedtype:
5523 if expectedtype == 'global':
5523 if expectedtype == 'global':
5524 raise util.Abort(_("tag '%s' is not a global tag") % n)
5524 raise util.Abort(_("tag '%s' is not a global tag") % n)
5525 else:
5525 else:
5526 raise util.Abort(_("tag '%s' is not a local tag") % n)
5526 raise util.Abort(_("tag '%s' is not a local tag") % n)
5527 rev_ = nullid
5527 rev_ = nullid
5528 if not message:
5528 if not message:
5529 # we don't translate commit messages
5529 # we don't translate commit messages
5530 message = 'Removed tag %s' % ', '.join(names)
5530 message = 'Removed tag %s' % ', '.join(names)
5531 elif not opts.get('force'):
5531 elif not opts.get('force'):
5532 for n in names:
5532 for n in names:
5533 if n in repo.tags():
5533 if n in repo.tags():
5534 raise util.Abort(_("tag '%s' already exists "
5534 raise util.Abort(_("tag '%s' already exists "
5535 "(use -f to force)") % n)
5535 "(use -f to force)") % n)
5536 if not opts.get('local'):
5536 if not opts.get('local'):
5537 p1, p2 = repo.dirstate.parents()
5537 p1, p2 = repo.dirstate.parents()
5538 if p2 != nullid:
5538 if p2 != nullid:
5539 raise util.Abort(_('uncommitted merge'))
5539 raise util.Abort(_('uncommitted merge'))
5540 bheads = repo.branchheads()
5540 bheads = repo.branchheads()
5541 if not opts.get('force') and bheads and p1 not in bheads:
5541 if not opts.get('force') and bheads and p1 not in bheads:
5542 raise util.Abort(_('not at a branch head (use -f to force)'))
5542 raise util.Abort(_('not at a branch head (use -f to force)'))
5543 r = scmutil.revsingle(repo, rev_).node()
5543 r = scmutil.revsingle(repo, rev_).node()
5544
5544
5545 if not message:
5545 if not message:
5546 # we don't translate commit messages
5546 # we don't translate commit messages
5547 message = ('Added tag %s for changeset %s' %
5547 message = ('Added tag %s for changeset %s' %
5548 (', '.join(names), short(r)))
5548 (', '.join(names), short(r)))
5549
5549
5550 date = opts.get('date')
5550 date = opts.get('date')
5551 if date:
5551 if date:
5552 date = util.parsedate(date)
5552 date = util.parsedate(date)
5553
5553
5554 if opts.get('edit'):
5554 if opts.get('edit'):
5555 message = ui.edit(message, ui.username())
5555 message = ui.edit(message, ui.username())
5556
5556
5557 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
5557 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
5558 finally:
5558 finally:
5559 release(lock, wlock)
5559 release(lock, wlock)
5560
5560
5561 @command('tags', [], '')
5561 @command('tags', [], '')
5562 def tags(ui, repo):
5562 def tags(ui, repo):
5563 """list repository tags
5563 """list repository tags
5564
5564
5565 This lists both regular and local tags. When the -v/--verbose
5565 This lists both regular and local tags. When the -v/--verbose
5566 switch is used, a third column "local" is printed for local tags.
5566 switch is used, a third column "local" is printed for local tags.
5567
5567
5568 Returns 0 on success.
5568 Returns 0 on success.
5569 """
5569 """
5570
5570
5571 hexfunc = ui.debugflag and hex or short
5571 hexfunc = ui.debugflag and hex or short
5572 tagtype = ""
5572 tagtype = ""
5573
5573
5574 for t, n in reversed(repo.tagslist()):
5574 for t, n in reversed(repo.tagslist()):
5575 if ui.quiet:
5575 if ui.quiet:
5576 ui.write("%s\n" % t, label='tags.normal')
5576 ui.write("%s\n" % t, label='tags.normal')
5577 continue
5577 continue
5578
5578
5579 hn = hexfunc(n)
5579 hn = hexfunc(n)
5580 r = "%5d:%s" % (repo.changelog.rev(n), hn)
5580 r = "%5d:%s" % (repo.changelog.rev(n), hn)
5581 rev = ui.label(r, 'log.changeset')
5581 rev = ui.label(r, 'log.changeset')
5582 spaces = " " * (30 - encoding.colwidth(t))
5582 spaces = " " * (30 - encoding.colwidth(t))
5583
5583
5584 tag = ui.label(t, 'tags.normal')
5584 tag = ui.label(t, 'tags.normal')
5585 if ui.verbose:
5585 if ui.verbose:
5586 if repo.tagtype(t) == 'local':
5586 if repo.tagtype(t) == 'local':
5587 tagtype = " local"
5587 tagtype = " local"
5588 tag = ui.label(t, 'tags.local')
5588 tag = ui.label(t, 'tags.local')
5589 else:
5589 else:
5590 tagtype = ""
5590 tagtype = ""
5591 ui.write("%s%s %s%s\n" % (tag, spaces, rev, tagtype))
5591 ui.write("%s%s %s%s\n" % (tag, spaces, rev, tagtype))
5592
5592
5593 @command('tip',
5593 @command('tip',
5594 [('p', 'patch', None, _('show patch')),
5594 [('p', 'patch', None, _('show patch')),
5595 ('g', 'git', None, _('use git extended diff format')),
5595 ('g', 'git', None, _('use git extended diff format')),
5596 ] + templateopts,
5596 ] + templateopts,
5597 _('[-p] [-g]'))
5597 _('[-p] [-g]'))
5598 def tip(ui, repo, **opts):
5598 def tip(ui, repo, **opts):
5599 """show the tip revision
5599 """show the tip revision
5600
5600
5601 The tip revision (usually just called the tip) is the changeset
5601 The tip revision (usually just called the tip) is the changeset
5602 most recently added to the repository (and therefore the most
5602 most recently added to the repository (and therefore the most
5603 recently changed head).
5603 recently changed head).
5604
5604
5605 If you have just made a commit, that commit will be the tip. If
5605 If you have just made a commit, that commit will be the tip. If
5606 you have just pulled changes from another repository, the tip of
5606 you have just pulled changes from another repository, the tip of
5607 that repository becomes the current tip. The "tip" tag is special
5607 that repository becomes the current tip. The "tip" tag is special
5608 and cannot be renamed or assigned to a different changeset.
5608 and cannot be renamed or assigned to a different changeset.
5609
5609
5610 Returns 0 on success.
5610 Returns 0 on success.
5611 """
5611 """
5612 displayer = cmdutil.show_changeset(ui, repo, opts)
5612 displayer = cmdutil.show_changeset(ui, repo, opts)
5613 displayer.show(repo[len(repo) - 1])
5613 displayer.show(repo[len(repo) - 1])
5614 displayer.close()
5614 displayer.close()
5615
5615
5616 @command('unbundle',
5616 @command('unbundle',
5617 [('u', 'update', None,
5617 [('u', 'update', None,
5618 _('update to new branch head if changesets were unbundled'))],
5618 _('update to new branch head if changesets were unbundled'))],
5619 _('[-u] FILE...'))
5619 _('[-u] FILE...'))
5620 def unbundle(ui, repo, fname1, *fnames, **opts):
5620 def unbundle(ui, repo, fname1, *fnames, **opts):
5621 """apply one or more changegroup files
5621 """apply one or more changegroup files
5622
5622
5623 Apply one or more compressed changegroup files generated by the
5623 Apply one or more compressed changegroup files generated by the
5624 bundle command.
5624 bundle command.
5625
5625
5626 Returns 0 on success, 1 if an update has unresolved files.
5626 Returns 0 on success, 1 if an update has unresolved files.
5627 """
5627 """
5628 fnames = (fname1,) + fnames
5628 fnames = (fname1,) + fnames
5629
5629
5630 lock = repo.lock()
5630 lock = repo.lock()
5631 wc = repo['.']
5631 wc = repo['.']
5632 try:
5632 try:
5633 for fname in fnames:
5633 for fname in fnames:
5634 f = url.open(ui, fname)
5634 f = url.open(ui, fname)
5635 gen = changegroup.readbundle(f, fname)
5635 gen = changegroup.readbundle(f, fname)
5636 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
5636 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
5637 finally:
5637 finally:
5638 lock.release()
5638 lock.release()
5639 bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
5639 bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
5640 return postincoming(ui, repo, modheads, opts.get('update'), None)
5640 return postincoming(ui, repo, modheads, opts.get('update'), None)
5641
5641
5642 @command('^update|up|checkout|co',
5642 @command('^update|up|checkout|co',
5643 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5643 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5644 ('c', 'check', None,
5644 ('c', 'check', None,
5645 _('update across branches if no uncommitted changes')),
5645 _('update across branches if no uncommitted changes')),
5646 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5646 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5647 ('r', 'rev', '', _('revision'), _('REV'))],
5647 ('r', 'rev', '', _('revision'), _('REV'))],
5648 _('[-c] [-C] [-d DATE] [[-r] REV]'))
5648 _('[-c] [-C] [-d DATE] [[-r] REV]'))
5649 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
5649 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
5650 """update working directory (or switch revisions)
5650 """update working directory (or switch revisions)
5651
5651
5652 Update the repository's working directory to the specified
5652 Update the repository's working directory to the specified
5653 changeset. If no changeset is specified, update to the tip of the
5653 changeset. If no changeset is specified, update to the tip of the
5654 current named branch and move the current bookmark (see :hg:`help
5654 current named branch and move the current bookmark (see :hg:`help
5655 bookmarks`).
5655 bookmarks`).
5656
5656
5657 If the changeset is not a descendant of the working directory's
5657 If the changeset is not a descendant of the working directory's
5658 parent, the update is aborted. With the -c/--check option, the
5658 parent, the update is aborted. With the -c/--check option, the
5659 working directory is checked for uncommitted changes; if none are
5659 working directory is checked for uncommitted changes; if none are
5660 found, the working directory is updated to the specified
5660 found, the working directory is updated to the specified
5661 changeset.
5661 changeset.
5662
5662
5663 Update sets the working directory's parent revison to the specified
5663 Update sets the working directory's parent revison to the specified
5664 changeset (see :hg:`help parents`).
5664 changeset (see :hg:`help parents`).
5665
5665
5666 The following rules apply when the working directory contains
5666 The following rules apply when the working directory contains
5667 uncommitted changes:
5667 uncommitted changes:
5668
5668
5669 1. If neither -c/--check nor -C/--clean is specified, and if
5669 1. If neither -c/--check nor -C/--clean is specified, and if
5670 the requested changeset is an ancestor or descendant of
5670 the requested changeset is an ancestor or descendant of
5671 the working directory's parent, the uncommitted changes
5671 the working directory's parent, the uncommitted changes
5672 are merged into the requested changeset and the merged
5672 are merged into the requested changeset and the merged
5673 result is left uncommitted. If the requested changeset is
5673 result is left uncommitted. If the requested changeset is
5674 not an ancestor or descendant (that is, it is on another
5674 not an ancestor or descendant (that is, it is on another
5675 branch), the update is aborted and the uncommitted changes
5675 branch), the update is aborted and the uncommitted changes
5676 are preserved.
5676 are preserved.
5677
5677
5678 2. With the -c/--check option, the update is aborted and the
5678 2. With the -c/--check option, the update is aborted and the
5679 uncommitted changes are preserved.
5679 uncommitted changes are preserved.
5680
5680
5681 3. With the -C/--clean option, uncommitted changes are discarded and
5681 3. With the -C/--clean option, uncommitted changes are discarded and
5682 the working directory is updated to the requested changeset.
5682 the working directory is updated to the requested changeset.
5683
5683
5684 Use null as the changeset to remove the working directory (like
5684 Use null as the changeset to remove the working directory (like
5685 :hg:`clone -U`).
5685 :hg:`clone -U`).
5686
5686
5687 If you want to revert just one file to an older revision, use
5687 If you want to revert just one file to an older revision, use
5688 :hg:`revert [-r REV] NAME`.
5688 :hg:`revert [-r REV] NAME`.
5689
5689
5690 See :hg:`help dates` for a list of formats valid for -d/--date.
5690 See :hg:`help dates` for a list of formats valid for -d/--date.
5691
5691
5692 Returns 0 on success, 1 if there are unresolved files.
5692 Returns 0 on success, 1 if there are unresolved files.
5693 """
5693 """
5694 if rev and node:
5694 if rev and node:
5695 raise util.Abort(_("please specify just one revision"))
5695 raise util.Abort(_("please specify just one revision"))
5696
5696
5697 if rev is None or rev == '':
5697 if rev is None or rev == '':
5698 rev = node
5698 rev = node
5699
5699
5700 # with no argument, we also move the current bookmark, if any
5700 # with no argument, we also move the current bookmark, if any
5701 movemarkfrom = None
5701 movemarkfrom = None
5702 if rev is None or node == '':
5702 if rev is None or node == '':
5703 movemarkfrom = repo['.'].node()
5703 movemarkfrom = repo['.'].node()
5704
5704
5705 # if we defined a bookmark, we have to remember the original bookmark name
5705 # if we defined a bookmark, we have to remember the original bookmark name
5706 brev = rev
5706 brev = rev
5707 rev = scmutil.revsingle(repo, rev, rev).rev()
5707 rev = scmutil.revsingle(repo, rev, rev).rev()
5708
5708
5709 if check and clean:
5709 if check and clean:
5710 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
5710 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
5711
5711
5712 if date:
5712 if date:
5713 if rev is not None:
5713 if rev is not None:
5714 raise util.Abort(_("you can't specify a revision and a date"))
5714 raise util.Abort(_("you can't specify a revision and a date"))
5715 rev = cmdutil.finddate(ui, repo, date)
5715 rev = cmdutil.finddate(ui, repo, date)
5716
5716
5717 if check:
5717 if check:
5718 c = repo[None]
5718 c = repo[None]
5719 if c.dirty(merge=False, branch=False):
5719 if c.dirty(merge=False, branch=False):
5720 raise util.Abort(_("uncommitted local changes"))
5720 raise util.Abort(_("uncommitted local changes"))
5721 if rev is None:
5721 if rev is None:
5722 rev = repo[repo[None].branch()].rev()
5722 rev = repo[repo[None].branch()].rev()
5723 mergemod._checkunknown(repo, repo[None], repo[rev])
5723 mergemod._checkunknown(repo, repo[None], repo[rev])
5724
5724
5725 if clean:
5725 if clean:
5726 ret = hg.clean(repo, rev)
5726 ret = hg.clean(repo, rev)
5727 else:
5727 else:
5728 ret = hg.update(repo, rev)
5728 ret = hg.update(repo, rev)
5729
5729
5730 if not ret and movemarkfrom:
5730 if not ret and movemarkfrom:
5731 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
5731 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
5732 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
5732 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
5733 elif brev in repo._bookmarks:
5733 elif brev in repo._bookmarks:
5734 bookmarks.setcurrent(repo, brev)
5734 bookmarks.setcurrent(repo, brev)
5735 elif brev:
5735 elif brev:
5736 bookmarks.unsetcurrent(repo)
5736 bookmarks.unsetcurrent(repo)
5737
5737
5738 return ret
5738 return ret
5739
5739
5740 @command('verify', [])
5740 @command('verify', [])
5741 def verify(ui, repo):
5741 def verify(ui, repo):
5742 """verify the integrity of the repository
5742 """verify the integrity of the repository
5743
5743
5744 Verify the integrity of the current repository.
5744 Verify the integrity of the current repository.
5745
5745
5746 This will perform an extensive check of the repository's
5746 This will perform an extensive check of the repository's
5747 integrity, validating the hashes and checksums of each entry in
5747 integrity, validating the hashes and checksums of each entry in
5748 the changelog, manifest, and tracked files, as well as the
5748 the changelog, manifest, and tracked files, as well as the
5749 integrity of their crosslinks and indices.
5749 integrity of their crosslinks and indices.
5750
5750
5751 Returns 0 on success, 1 if errors are encountered.
5751 Returns 0 on success, 1 if errors are encountered.
5752 """
5752 """
5753 return hg.verify(repo)
5753 return hg.verify(repo)
5754
5754
5755 @command('version', [])
5755 @command('version', [])
5756 def version_(ui):
5756 def version_(ui):
5757 """output version and copyright information"""
5757 """output version and copyright information"""
5758 ui.write(_("Mercurial Distributed SCM (version %s)\n")
5758 ui.write(_("Mercurial Distributed SCM (version %s)\n")
5759 % util.version())
5759 % util.version())
5760 ui.status(_(
5760 ui.status(_(
5761 "(see http://mercurial.selenic.com for more information)\n"
5761 "(see http://mercurial.selenic.com for more information)\n"
5762 "\nCopyright (C) 2005-2012 Matt Mackall and others\n"
5762 "\nCopyright (C) 2005-2012 Matt Mackall and others\n"
5763 "This is free software; see the source for copying conditions. "
5763 "This is free software; see the source for copying conditions. "
5764 "There is NO\nwarranty; "
5764 "There is NO\nwarranty; "
5765 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5765 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5766 ))
5766 ))
5767
5767
5768 norepo = ("clone init version help debugcommands debugcomplete"
5768 norepo = ("clone init version help debugcommands debugcomplete"
5769 " debugdate debuginstall debugfsinfo debugpushkey debugwireargs"
5769 " debugdate debuginstall debugfsinfo debugpushkey debugwireargs"
5770 " debugknown debuggetbundle debugbundle")
5770 " debugknown debuggetbundle debugbundle")
5771 optionalrepo = ("identify paths serve showconfig debugancestor debugdag"
5771 optionalrepo = ("identify paths serve showconfig debugancestor debugdag"
5772 " debugdata debugindex debugindexdot debugrevlog")
5772 " debugdata debugindex debugindexdot debugrevlog")
@@ -1,789 +1,789 b''
1 # dispatch.py - command dispatching for mercurial
1 # dispatch.py - command dispatching for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import os, sys, atexit, signal, pdb, socket, errno, shlex, time, traceback, re
9 import os, sys, atexit, signal, pdb, socket, errno, shlex, time, traceback, re
10 import util, commands, hg, fancyopts, extensions, hook, error
10 import util, commands, hg, fancyopts, extensions, hook, error
11 import cmdutil, encoding
11 import cmdutil, encoding
12 import ui as uimod
12 import ui as uimod
13
13
14 class request(object):
14 class request(object):
15 def __init__(self, args, ui=None, repo=None, fin=None, fout=None,
15 def __init__(self, args, ui=None, repo=None, fin=None, fout=None,
16 ferr=None):
16 ferr=None):
17 self.args = args
17 self.args = args
18 self.ui = ui
18 self.ui = ui
19 self.repo = repo
19 self.repo = repo
20
20
21 # input/output/error streams
21 # input/output/error streams
22 self.fin = fin
22 self.fin = fin
23 self.fout = fout
23 self.fout = fout
24 self.ferr = ferr
24 self.ferr = ferr
25
25
26 def run():
26 def run():
27 "run the command in sys.argv"
27 "run the command in sys.argv"
28 sys.exit((dispatch(request(sys.argv[1:])) or 0) & 255)
28 sys.exit((dispatch(request(sys.argv[1:])) or 0) & 255)
29
29
30 def dispatch(req):
30 def dispatch(req):
31 "run the command specified in req.args"
31 "run the command specified in req.args"
32 if req.ferr:
32 if req.ferr:
33 ferr = req.ferr
33 ferr = req.ferr
34 elif req.ui:
34 elif req.ui:
35 ferr = req.ui.ferr
35 ferr = req.ui.ferr
36 else:
36 else:
37 ferr = sys.stderr
37 ferr = sys.stderr
38
38
39 try:
39 try:
40 if not req.ui:
40 if not req.ui:
41 req.ui = uimod.ui()
41 req.ui = uimod.ui()
42 if '--traceback' in req.args:
42 if '--traceback' in req.args:
43 req.ui.setconfig('ui', 'traceback', 'on')
43 req.ui.setconfig('ui', 'traceback', 'on')
44
44
45 # set ui streams from the request
45 # set ui streams from the request
46 if req.fin:
46 if req.fin:
47 req.ui.fin = req.fin
47 req.ui.fin = req.fin
48 if req.fout:
48 if req.fout:
49 req.ui.fout = req.fout
49 req.ui.fout = req.fout
50 if req.ferr:
50 if req.ferr:
51 req.ui.ferr = req.ferr
51 req.ui.ferr = req.ferr
52 except util.Abort, inst:
52 except util.Abort, inst:
53 ferr.write(_("abort: %s\n") % inst)
53 ferr.write(_("abort: %s\n") % inst)
54 if inst.hint:
54 if inst.hint:
55 ferr.write(_("(%s)\n") % inst.hint)
55 ferr.write(_("(%s)\n") % inst.hint)
56 return -1
56 return -1
57 except error.ParseError, inst:
57 except error.ParseError, inst:
58 if len(inst.args) > 1:
58 if len(inst.args) > 1:
59 ferr.write(_("hg: parse error at %s: %s\n") %
59 ferr.write(_("hg: parse error at %s: %s\n") %
60 (inst.args[1], inst.args[0]))
60 (inst.args[1], inst.args[0]))
61 else:
61 else:
62 ferr.write(_("hg: parse error: %s\n") % inst.args[0])
62 ferr.write(_("hg: parse error: %s\n") % inst.args[0])
63 return -1
63 return -1
64
64
65 return _runcatch(req)
65 return _runcatch(req)
66
66
67 def _runcatch(req):
67 def _runcatch(req):
68 def catchterm(*args):
68 def catchterm(*args):
69 raise error.SignalInterrupt
69 raise error.SignalInterrupt
70
70
71 ui = req.ui
71 ui = req.ui
72 try:
72 try:
73 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
73 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
74 num = getattr(signal, name, None)
74 num = getattr(signal, name, None)
75 if num:
75 if num:
76 signal.signal(num, catchterm)
76 signal.signal(num, catchterm)
77 except ValueError:
77 except ValueError:
78 pass # happens if called in a thread
78 pass # happens if called in a thread
79
79
80 try:
80 try:
81 try:
81 try:
82 # enter the debugger before command execution
82 # enter the debugger before command execution
83 if '--debugger' in req.args:
83 if '--debugger' in req.args:
84 ui.warn(_("entering debugger - "
84 ui.warn(_("entering debugger - "
85 "type c to continue starting hg or h for help\n"))
85 "type c to continue starting hg or h for help\n"))
86 pdb.set_trace()
86 pdb.set_trace()
87 try:
87 try:
88 return _dispatch(req)
88 return _dispatch(req)
89 finally:
89 finally:
90 ui.flush()
90 ui.flush()
91 except:
91 except: # re-raises
92 # enter the debugger when we hit an exception
92 # enter the debugger when we hit an exception
93 if '--debugger' in req.args:
93 if '--debugger' in req.args:
94 traceback.print_exc()
94 traceback.print_exc()
95 pdb.post_mortem(sys.exc_info()[2])
95 pdb.post_mortem(sys.exc_info()[2])
96 ui.traceback()
96 ui.traceback()
97 raise
97 raise
98
98
99 # Global exception handling, alphabetically
99 # Global exception handling, alphabetically
100 # Mercurial-specific first, followed by built-in and library exceptions
100 # Mercurial-specific first, followed by built-in and library exceptions
101 except error.AmbiguousCommand, inst:
101 except error.AmbiguousCommand, inst:
102 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
102 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
103 (inst.args[0], " ".join(inst.args[1])))
103 (inst.args[0], " ".join(inst.args[1])))
104 except error.ParseError, inst:
104 except error.ParseError, inst:
105 if len(inst.args) > 1:
105 if len(inst.args) > 1:
106 ui.warn(_("hg: parse error at %s: %s\n") %
106 ui.warn(_("hg: parse error at %s: %s\n") %
107 (inst.args[1], inst.args[0]))
107 (inst.args[1], inst.args[0]))
108 else:
108 else:
109 ui.warn(_("hg: parse error: %s\n") % inst.args[0])
109 ui.warn(_("hg: parse error: %s\n") % inst.args[0])
110 return -1
110 return -1
111 except error.LockHeld, inst:
111 except error.LockHeld, inst:
112 if inst.errno == errno.ETIMEDOUT:
112 if inst.errno == errno.ETIMEDOUT:
113 reason = _('timed out waiting for lock held by %s') % inst.locker
113 reason = _('timed out waiting for lock held by %s') % inst.locker
114 else:
114 else:
115 reason = _('lock held by %s') % inst.locker
115 reason = _('lock held by %s') % inst.locker
116 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
116 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
117 except error.LockUnavailable, inst:
117 except error.LockUnavailable, inst:
118 ui.warn(_("abort: could not lock %s: %s\n") %
118 ui.warn(_("abort: could not lock %s: %s\n") %
119 (inst.desc or inst.filename, inst.strerror))
119 (inst.desc or inst.filename, inst.strerror))
120 except error.CommandError, inst:
120 except error.CommandError, inst:
121 if inst.args[0]:
121 if inst.args[0]:
122 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
122 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
123 commands.help_(ui, inst.args[0], full=False, command=True)
123 commands.help_(ui, inst.args[0], full=False, command=True)
124 else:
124 else:
125 ui.warn(_("hg: %s\n") % inst.args[1])
125 ui.warn(_("hg: %s\n") % inst.args[1])
126 commands.help_(ui, 'shortlist')
126 commands.help_(ui, 'shortlist')
127 except error.OutOfBandError, inst:
127 except error.OutOfBandError, inst:
128 ui.warn(_("abort: remote error:\n"))
128 ui.warn(_("abort: remote error:\n"))
129 ui.warn(''.join(inst.args))
129 ui.warn(''.join(inst.args))
130 except error.RepoError, inst:
130 except error.RepoError, inst:
131 ui.warn(_("abort: %s!\n") % inst)
131 ui.warn(_("abort: %s!\n") % inst)
132 if inst.hint:
132 if inst.hint:
133 ui.warn(_("(%s)\n") % inst.hint)
133 ui.warn(_("(%s)\n") % inst.hint)
134 except error.ResponseError, inst:
134 except error.ResponseError, inst:
135 ui.warn(_("abort: %s") % inst.args[0])
135 ui.warn(_("abort: %s") % inst.args[0])
136 if not isinstance(inst.args[1], basestring):
136 if not isinstance(inst.args[1], basestring):
137 ui.warn(" %r\n" % (inst.args[1],))
137 ui.warn(" %r\n" % (inst.args[1],))
138 elif not inst.args[1]:
138 elif not inst.args[1]:
139 ui.warn(_(" empty string\n"))
139 ui.warn(_(" empty string\n"))
140 else:
140 else:
141 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
141 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
142 except error.RevlogError, inst:
142 except error.RevlogError, inst:
143 ui.warn(_("abort: %s!\n") % inst)
143 ui.warn(_("abort: %s!\n") % inst)
144 except error.SignalInterrupt:
144 except error.SignalInterrupt:
145 ui.warn(_("killed!\n"))
145 ui.warn(_("killed!\n"))
146 except error.UnknownCommand, inst:
146 except error.UnknownCommand, inst:
147 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
147 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
148 try:
148 try:
149 # check if the command is in a disabled extension
149 # check if the command is in a disabled extension
150 # (but don't check for extensions themselves)
150 # (but don't check for extensions themselves)
151 commands.help_(ui, inst.args[0], unknowncmd=True)
151 commands.help_(ui, inst.args[0], unknowncmd=True)
152 except error.UnknownCommand:
152 except error.UnknownCommand:
153 commands.help_(ui, 'shortlist')
153 commands.help_(ui, 'shortlist')
154 except util.Abort, inst:
154 except util.Abort, inst:
155 ui.warn(_("abort: %s\n") % inst)
155 ui.warn(_("abort: %s\n") % inst)
156 if inst.hint:
156 if inst.hint:
157 ui.warn(_("(%s)\n") % inst.hint)
157 ui.warn(_("(%s)\n") % inst.hint)
158 except ImportError, inst:
158 except ImportError, inst:
159 ui.warn(_("abort: %s!\n") % inst)
159 ui.warn(_("abort: %s!\n") % inst)
160 m = str(inst).split()[-1]
160 m = str(inst).split()[-1]
161 if m in "mpatch bdiff".split():
161 if m in "mpatch bdiff".split():
162 ui.warn(_("(did you forget to compile extensions?)\n"))
162 ui.warn(_("(did you forget to compile extensions?)\n"))
163 elif m in "zlib".split():
163 elif m in "zlib".split():
164 ui.warn(_("(is your Python install correct?)\n"))
164 ui.warn(_("(is your Python install correct?)\n"))
165 except IOError, inst:
165 except IOError, inst:
166 if util.safehasattr(inst, "code"):
166 if util.safehasattr(inst, "code"):
167 ui.warn(_("abort: %s\n") % inst)
167 ui.warn(_("abort: %s\n") % inst)
168 elif util.safehasattr(inst, "reason"):
168 elif util.safehasattr(inst, "reason"):
169 try: # usually it is in the form (errno, strerror)
169 try: # usually it is in the form (errno, strerror)
170 reason = inst.reason.args[1]
170 reason = inst.reason.args[1]
171 except (AttributeError, IndexError):
171 except (AttributeError, IndexError):
172 # it might be anything, for example a string
172 # it might be anything, for example a string
173 reason = inst.reason
173 reason = inst.reason
174 ui.warn(_("abort: error: %s\n") % reason)
174 ui.warn(_("abort: error: %s\n") % reason)
175 elif util.safehasattr(inst, "args") and inst.args[0] == errno.EPIPE:
175 elif util.safehasattr(inst, "args") and inst.args[0] == errno.EPIPE:
176 if ui.debugflag:
176 if ui.debugflag:
177 ui.warn(_("broken pipe\n"))
177 ui.warn(_("broken pipe\n"))
178 elif getattr(inst, "strerror", None):
178 elif getattr(inst, "strerror", None):
179 if getattr(inst, "filename", None):
179 if getattr(inst, "filename", None):
180 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
180 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
181 else:
181 else:
182 ui.warn(_("abort: %s\n") % inst.strerror)
182 ui.warn(_("abort: %s\n") % inst.strerror)
183 else:
183 else:
184 raise
184 raise
185 except OSError, inst:
185 except OSError, inst:
186 if getattr(inst, "filename", None):
186 if getattr(inst, "filename", None):
187 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
187 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
188 else:
188 else:
189 ui.warn(_("abort: %s\n") % inst.strerror)
189 ui.warn(_("abort: %s\n") % inst.strerror)
190 except KeyboardInterrupt:
190 except KeyboardInterrupt:
191 try:
191 try:
192 ui.warn(_("interrupted!\n"))
192 ui.warn(_("interrupted!\n"))
193 except IOError, inst:
193 except IOError, inst:
194 if inst.errno == errno.EPIPE:
194 if inst.errno == errno.EPIPE:
195 if ui.debugflag:
195 if ui.debugflag:
196 ui.warn(_("\nbroken pipe\n"))
196 ui.warn(_("\nbroken pipe\n"))
197 else:
197 else:
198 raise
198 raise
199 except MemoryError:
199 except MemoryError:
200 ui.warn(_("abort: out of memory\n"))
200 ui.warn(_("abort: out of memory\n"))
201 except SystemExit, inst:
201 except SystemExit, inst:
202 # Commands shouldn't sys.exit directly, but give a return code.
202 # Commands shouldn't sys.exit directly, but give a return code.
203 # Just in case catch this and and pass exit code to caller.
203 # Just in case catch this and and pass exit code to caller.
204 return inst.code
204 return inst.code
205 except socket.error, inst:
205 except socket.error, inst:
206 ui.warn(_("abort: %s\n") % inst.args[-1])
206 ui.warn(_("abort: %s\n") % inst.args[-1])
207 except:
207 except: # re-raises
208 ui.warn(_("** unknown exception encountered,"
208 ui.warn(_("** unknown exception encountered,"
209 " please report by visiting\n"))
209 " please report by visiting\n"))
210 ui.warn(_("** http://mercurial.selenic.com/wiki/BugTracker\n"))
210 ui.warn(_("** http://mercurial.selenic.com/wiki/BugTracker\n"))
211 ui.warn(_("** Python %s\n") % sys.version.replace('\n', ''))
211 ui.warn(_("** Python %s\n") % sys.version.replace('\n', ''))
212 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
212 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
213 % util.version())
213 % util.version())
214 ui.warn(_("** Extensions loaded: %s\n")
214 ui.warn(_("** Extensions loaded: %s\n")
215 % ", ".join([x[0] for x in extensions.extensions()]))
215 % ", ".join([x[0] for x in extensions.extensions()]))
216 raise
216 raise
217
217
218 return -1
218 return -1
219
219
220 def aliasargs(fn, givenargs):
220 def aliasargs(fn, givenargs):
221 args = getattr(fn, 'args', [])
221 args = getattr(fn, 'args', [])
222 if args:
222 if args:
223 cmd = ' '.join(map(util.shellquote, args))
223 cmd = ' '.join(map(util.shellquote, args))
224
224
225 nums = []
225 nums = []
226 def replacer(m):
226 def replacer(m):
227 num = int(m.group(1)) - 1
227 num = int(m.group(1)) - 1
228 nums.append(num)
228 nums.append(num)
229 if num < len(givenargs):
229 if num < len(givenargs):
230 return givenargs[num]
230 return givenargs[num]
231 raise util.Abort(_('too few arguments for command alias'))
231 raise util.Abort(_('too few arguments for command alias'))
232 cmd = re.sub(r'\$(\d+|\$)', replacer, cmd)
232 cmd = re.sub(r'\$(\d+|\$)', replacer, cmd)
233 givenargs = [x for i, x in enumerate(givenargs)
233 givenargs = [x for i, x in enumerate(givenargs)
234 if i not in nums]
234 if i not in nums]
235 args = shlex.split(cmd)
235 args = shlex.split(cmd)
236 return args + givenargs
236 return args + givenargs
237
237
238 class cmdalias(object):
238 class cmdalias(object):
239 def __init__(self, name, definition, cmdtable):
239 def __init__(self, name, definition, cmdtable):
240 self.name = self.cmd = name
240 self.name = self.cmd = name
241 self.cmdname = ''
241 self.cmdname = ''
242 self.definition = definition
242 self.definition = definition
243 self.args = []
243 self.args = []
244 self.opts = []
244 self.opts = []
245 self.help = ''
245 self.help = ''
246 self.norepo = True
246 self.norepo = True
247 self.optionalrepo = False
247 self.optionalrepo = False
248 self.badalias = False
248 self.badalias = False
249
249
250 try:
250 try:
251 aliases, entry = cmdutil.findcmd(self.name, cmdtable)
251 aliases, entry = cmdutil.findcmd(self.name, cmdtable)
252 for alias, e in cmdtable.iteritems():
252 for alias, e in cmdtable.iteritems():
253 if e is entry:
253 if e is entry:
254 self.cmd = alias
254 self.cmd = alias
255 break
255 break
256 self.shadows = True
256 self.shadows = True
257 except error.UnknownCommand:
257 except error.UnknownCommand:
258 self.shadows = False
258 self.shadows = False
259
259
260 if not self.definition:
260 if not self.definition:
261 def fn(ui, *args):
261 def fn(ui, *args):
262 ui.warn(_("no definition for alias '%s'\n") % self.name)
262 ui.warn(_("no definition for alias '%s'\n") % self.name)
263 return 1
263 return 1
264 self.fn = fn
264 self.fn = fn
265 self.badalias = True
265 self.badalias = True
266 return
266 return
267
267
268 if self.definition.startswith('!'):
268 if self.definition.startswith('!'):
269 self.shell = True
269 self.shell = True
270 def fn(ui, *args):
270 def fn(ui, *args):
271 env = {'HG_ARGS': ' '.join((self.name,) + args)}
271 env = {'HG_ARGS': ' '.join((self.name,) + args)}
272 def _checkvar(m):
272 def _checkvar(m):
273 if m.groups()[0] == '$':
273 if m.groups()[0] == '$':
274 return m.group()
274 return m.group()
275 elif int(m.groups()[0]) <= len(args):
275 elif int(m.groups()[0]) <= len(args):
276 return m.group()
276 return m.group()
277 else:
277 else:
278 ui.debug("No argument found for substitution "
278 ui.debug("No argument found for substitution "
279 "of %i variable in alias '%s' definition."
279 "of %i variable in alias '%s' definition."
280 % (int(m.groups()[0]), self.name))
280 % (int(m.groups()[0]), self.name))
281 return ''
281 return ''
282 cmd = re.sub(r'\$(\d+|\$)', _checkvar, self.definition[1:])
282 cmd = re.sub(r'\$(\d+|\$)', _checkvar, self.definition[1:])
283 replace = dict((str(i + 1), arg) for i, arg in enumerate(args))
283 replace = dict((str(i + 1), arg) for i, arg in enumerate(args))
284 replace['0'] = self.name
284 replace['0'] = self.name
285 replace['@'] = ' '.join(args)
285 replace['@'] = ' '.join(args)
286 cmd = util.interpolate(r'\$', replace, cmd, escape_prefix=True)
286 cmd = util.interpolate(r'\$', replace, cmd, escape_prefix=True)
287 return util.system(cmd, environ=env, out=ui.fout)
287 return util.system(cmd, environ=env, out=ui.fout)
288 self.fn = fn
288 self.fn = fn
289 return
289 return
290
290
291 args = shlex.split(self.definition)
291 args = shlex.split(self.definition)
292 self.cmdname = cmd = args.pop(0)
292 self.cmdname = cmd = args.pop(0)
293 args = map(util.expandpath, args)
293 args = map(util.expandpath, args)
294
294
295 for invalidarg in ("--cwd", "-R", "--repository", "--repo"):
295 for invalidarg in ("--cwd", "-R", "--repository", "--repo"):
296 if _earlygetopt([invalidarg], args):
296 if _earlygetopt([invalidarg], args):
297 def fn(ui, *args):
297 def fn(ui, *args):
298 ui.warn(_("error in definition for alias '%s': %s may only "
298 ui.warn(_("error in definition for alias '%s': %s may only "
299 "be given on the command line\n")
299 "be given on the command line\n")
300 % (self.name, invalidarg))
300 % (self.name, invalidarg))
301 return 1
301 return 1
302
302
303 self.fn = fn
303 self.fn = fn
304 self.badalias = True
304 self.badalias = True
305 return
305 return
306
306
307 try:
307 try:
308 tableentry = cmdutil.findcmd(cmd, cmdtable, False)[1]
308 tableentry = cmdutil.findcmd(cmd, cmdtable, False)[1]
309 if len(tableentry) > 2:
309 if len(tableentry) > 2:
310 self.fn, self.opts, self.help = tableentry
310 self.fn, self.opts, self.help = tableentry
311 else:
311 else:
312 self.fn, self.opts = tableentry
312 self.fn, self.opts = tableentry
313
313
314 self.args = aliasargs(self.fn, args)
314 self.args = aliasargs(self.fn, args)
315 if cmd not in commands.norepo.split(' '):
315 if cmd not in commands.norepo.split(' '):
316 self.norepo = False
316 self.norepo = False
317 if cmd in commands.optionalrepo.split(' '):
317 if cmd in commands.optionalrepo.split(' '):
318 self.optionalrepo = True
318 self.optionalrepo = True
319 if self.help.startswith("hg " + cmd):
319 if self.help.startswith("hg " + cmd):
320 # drop prefix in old-style help lines so hg shows the alias
320 # drop prefix in old-style help lines so hg shows the alias
321 self.help = self.help[4 + len(cmd):]
321 self.help = self.help[4 + len(cmd):]
322 self.__doc__ = self.fn.__doc__
322 self.__doc__ = self.fn.__doc__
323
323
324 except error.UnknownCommand:
324 except error.UnknownCommand:
325 def fn(ui, *args):
325 def fn(ui, *args):
326 ui.warn(_("alias '%s' resolves to unknown command '%s'\n") \
326 ui.warn(_("alias '%s' resolves to unknown command '%s'\n") \
327 % (self.name, cmd))
327 % (self.name, cmd))
328 try:
328 try:
329 # check if the command is in a disabled extension
329 # check if the command is in a disabled extension
330 commands.help_(ui, cmd, unknowncmd=True)
330 commands.help_(ui, cmd, unknowncmd=True)
331 except error.UnknownCommand:
331 except error.UnknownCommand:
332 pass
332 pass
333 return 1
333 return 1
334 self.fn = fn
334 self.fn = fn
335 self.badalias = True
335 self.badalias = True
336 except error.AmbiguousCommand:
336 except error.AmbiguousCommand:
337 def fn(ui, *args):
337 def fn(ui, *args):
338 ui.warn(_("alias '%s' resolves to ambiguous command '%s'\n") \
338 ui.warn(_("alias '%s' resolves to ambiguous command '%s'\n") \
339 % (self.name, cmd))
339 % (self.name, cmd))
340 return 1
340 return 1
341 self.fn = fn
341 self.fn = fn
342 self.badalias = True
342 self.badalias = True
343
343
344 def __call__(self, ui, *args, **opts):
344 def __call__(self, ui, *args, **opts):
345 if self.shadows:
345 if self.shadows:
346 ui.debug("alias '%s' shadows command '%s'\n" %
346 ui.debug("alias '%s' shadows command '%s'\n" %
347 (self.name, self.cmdname))
347 (self.name, self.cmdname))
348
348
349 if util.safehasattr(self, 'shell'):
349 if util.safehasattr(self, 'shell'):
350 return self.fn(ui, *args, **opts)
350 return self.fn(ui, *args, **opts)
351 else:
351 else:
352 try:
352 try:
353 util.checksignature(self.fn)(ui, *args, **opts)
353 util.checksignature(self.fn)(ui, *args, **opts)
354 except error.SignatureError:
354 except error.SignatureError:
355 args = ' '.join([self.cmdname] + self.args)
355 args = ' '.join([self.cmdname] + self.args)
356 ui.debug("alias '%s' expands to '%s'\n" % (self.name, args))
356 ui.debug("alias '%s' expands to '%s'\n" % (self.name, args))
357 raise
357 raise
358
358
359 def addaliases(ui, cmdtable):
359 def addaliases(ui, cmdtable):
360 # aliases are processed after extensions have been loaded, so they
360 # aliases are processed after extensions have been loaded, so they
361 # may use extension commands. Aliases can also use other alias definitions,
361 # may use extension commands. Aliases can also use other alias definitions,
362 # but only if they have been defined prior to the current definition.
362 # but only if they have been defined prior to the current definition.
363 for alias, definition in ui.configitems('alias'):
363 for alias, definition in ui.configitems('alias'):
364 aliasdef = cmdalias(alias, definition, cmdtable)
364 aliasdef = cmdalias(alias, definition, cmdtable)
365
365
366 try:
366 try:
367 olddef = cmdtable[aliasdef.cmd][0]
367 olddef = cmdtable[aliasdef.cmd][0]
368 if olddef.definition == aliasdef.definition:
368 if olddef.definition == aliasdef.definition:
369 continue
369 continue
370 except (KeyError, AttributeError):
370 except (KeyError, AttributeError):
371 # definition might not exist or it might not be a cmdalias
371 # definition might not exist or it might not be a cmdalias
372 pass
372 pass
373
373
374 cmdtable[aliasdef.name] = (aliasdef, aliasdef.opts, aliasdef.help)
374 cmdtable[aliasdef.name] = (aliasdef, aliasdef.opts, aliasdef.help)
375 if aliasdef.norepo:
375 if aliasdef.norepo:
376 commands.norepo += ' %s' % alias
376 commands.norepo += ' %s' % alias
377 if aliasdef.optionalrepo:
377 if aliasdef.optionalrepo:
378 commands.optionalrepo += ' %s' % alias
378 commands.optionalrepo += ' %s' % alias
379
379
380 def _parse(ui, args):
380 def _parse(ui, args):
381 options = {}
381 options = {}
382 cmdoptions = {}
382 cmdoptions = {}
383
383
384 try:
384 try:
385 args = fancyopts.fancyopts(args, commands.globalopts, options)
385 args = fancyopts.fancyopts(args, commands.globalopts, options)
386 except fancyopts.getopt.GetoptError, inst:
386 except fancyopts.getopt.GetoptError, inst:
387 raise error.CommandError(None, inst)
387 raise error.CommandError(None, inst)
388
388
389 if args:
389 if args:
390 cmd, args = args[0], args[1:]
390 cmd, args = args[0], args[1:]
391 aliases, entry = cmdutil.findcmd(cmd, commands.table,
391 aliases, entry = cmdutil.findcmd(cmd, commands.table,
392 ui.configbool("ui", "strict"))
392 ui.configbool("ui", "strict"))
393 cmd = aliases[0]
393 cmd = aliases[0]
394 args = aliasargs(entry[0], args)
394 args = aliasargs(entry[0], args)
395 defaults = ui.config("defaults", cmd)
395 defaults = ui.config("defaults", cmd)
396 if defaults:
396 if defaults:
397 args = map(util.expandpath, shlex.split(defaults)) + args
397 args = map(util.expandpath, shlex.split(defaults)) + args
398 c = list(entry[1])
398 c = list(entry[1])
399 else:
399 else:
400 cmd = None
400 cmd = None
401 c = []
401 c = []
402
402
403 # combine global options into local
403 # combine global options into local
404 for o in commands.globalopts:
404 for o in commands.globalopts:
405 c.append((o[0], o[1], options[o[1]], o[3]))
405 c.append((o[0], o[1], options[o[1]], o[3]))
406
406
407 try:
407 try:
408 args = fancyopts.fancyopts(args, c, cmdoptions, True)
408 args = fancyopts.fancyopts(args, c, cmdoptions, True)
409 except fancyopts.getopt.GetoptError, inst:
409 except fancyopts.getopt.GetoptError, inst:
410 raise error.CommandError(cmd, inst)
410 raise error.CommandError(cmd, inst)
411
411
412 # separate global options back out
412 # separate global options back out
413 for o in commands.globalopts:
413 for o in commands.globalopts:
414 n = o[1]
414 n = o[1]
415 options[n] = cmdoptions[n]
415 options[n] = cmdoptions[n]
416 del cmdoptions[n]
416 del cmdoptions[n]
417
417
418 return (cmd, cmd and entry[0] or None, args, options, cmdoptions)
418 return (cmd, cmd and entry[0] or None, args, options, cmdoptions)
419
419
420 def _parseconfig(ui, config):
420 def _parseconfig(ui, config):
421 """parse the --config options from the command line"""
421 """parse the --config options from the command line"""
422 configs = []
422 configs = []
423
423
424 for cfg in config:
424 for cfg in config:
425 try:
425 try:
426 name, value = cfg.split('=', 1)
426 name, value = cfg.split('=', 1)
427 section, name = name.split('.', 1)
427 section, name = name.split('.', 1)
428 if not section or not name:
428 if not section or not name:
429 raise IndexError
429 raise IndexError
430 ui.setconfig(section, name, value)
430 ui.setconfig(section, name, value)
431 configs.append((section, name, value))
431 configs.append((section, name, value))
432 except (IndexError, ValueError):
432 except (IndexError, ValueError):
433 raise util.Abort(_('malformed --config option: %r '
433 raise util.Abort(_('malformed --config option: %r '
434 '(use --config section.name=value)') % cfg)
434 '(use --config section.name=value)') % cfg)
435
435
436 return configs
436 return configs
437
437
438 def _earlygetopt(aliases, args):
438 def _earlygetopt(aliases, args):
439 """Return list of values for an option (or aliases).
439 """Return list of values for an option (or aliases).
440
440
441 The values are listed in the order they appear in args.
441 The values are listed in the order they appear in args.
442 The options and values are removed from args.
442 The options and values are removed from args.
443 """
443 """
444 try:
444 try:
445 argcount = args.index("--")
445 argcount = args.index("--")
446 except ValueError:
446 except ValueError:
447 argcount = len(args)
447 argcount = len(args)
448 shortopts = [opt for opt in aliases if len(opt) == 2]
448 shortopts = [opt for opt in aliases if len(opt) == 2]
449 values = []
449 values = []
450 pos = 0
450 pos = 0
451 while pos < argcount:
451 while pos < argcount:
452 if args[pos] in aliases:
452 if args[pos] in aliases:
453 if pos + 1 >= argcount:
453 if pos + 1 >= argcount:
454 # ignore and let getopt report an error if there is no value
454 # ignore and let getopt report an error if there is no value
455 break
455 break
456 del args[pos]
456 del args[pos]
457 values.append(args.pop(pos))
457 values.append(args.pop(pos))
458 argcount -= 2
458 argcount -= 2
459 elif args[pos][:2] in shortopts:
459 elif args[pos][:2] in shortopts:
460 # short option can have no following space, e.g. hg log -Rfoo
460 # short option can have no following space, e.g. hg log -Rfoo
461 values.append(args.pop(pos)[2:])
461 values.append(args.pop(pos)[2:])
462 argcount -= 1
462 argcount -= 1
463 else:
463 else:
464 pos += 1
464 pos += 1
465 return values
465 return values
466
466
467 def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions):
467 def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions):
468 # run pre-hook, and abort if it fails
468 # run pre-hook, and abort if it fails
469 ret = hook.hook(lui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs),
469 ret = hook.hook(lui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs),
470 pats=cmdpats, opts=cmdoptions)
470 pats=cmdpats, opts=cmdoptions)
471 if ret:
471 if ret:
472 return ret
472 return ret
473 ret = _runcommand(ui, options, cmd, d)
473 ret = _runcommand(ui, options, cmd, d)
474 # run post-hook, passing command result
474 # run post-hook, passing command result
475 hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
475 hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
476 result=ret, pats=cmdpats, opts=cmdoptions)
476 result=ret, pats=cmdpats, opts=cmdoptions)
477 return ret
477 return ret
478
478
479 def _getlocal(ui, rpath):
479 def _getlocal(ui, rpath):
480 """Return (path, local ui object) for the given target path.
480 """Return (path, local ui object) for the given target path.
481
481
482 Takes paths in [cwd]/.hg/hgrc into account."
482 Takes paths in [cwd]/.hg/hgrc into account."
483 """
483 """
484 try:
484 try:
485 wd = os.getcwd()
485 wd = os.getcwd()
486 except OSError, e:
486 except OSError, e:
487 raise util.Abort(_("error getting current working directory: %s") %
487 raise util.Abort(_("error getting current working directory: %s") %
488 e.strerror)
488 e.strerror)
489 path = cmdutil.findrepo(wd) or ""
489 path = cmdutil.findrepo(wd) or ""
490 if not path:
490 if not path:
491 lui = ui
491 lui = ui
492 else:
492 else:
493 lui = ui.copy()
493 lui = ui.copy()
494 lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
494 lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
495
495
496 if rpath and rpath[-1]:
496 if rpath and rpath[-1]:
497 path = lui.expandpath(rpath[-1])
497 path = lui.expandpath(rpath[-1])
498 lui = ui.copy()
498 lui = ui.copy()
499 lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
499 lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
500
500
501 return path, lui
501 return path, lui
502
502
503 def _checkshellalias(lui, ui, args):
503 def _checkshellalias(lui, ui, args):
504 options = {}
504 options = {}
505
505
506 try:
506 try:
507 args = fancyopts.fancyopts(args, commands.globalopts, options)
507 args = fancyopts.fancyopts(args, commands.globalopts, options)
508 except fancyopts.getopt.GetoptError:
508 except fancyopts.getopt.GetoptError:
509 return
509 return
510
510
511 if not args:
511 if not args:
512 return
512 return
513
513
514 norepo = commands.norepo
514 norepo = commands.norepo
515 optionalrepo = commands.optionalrepo
515 optionalrepo = commands.optionalrepo
516 def restorecommands():
516 def restorecommands():
517 commands.norepo = norepo
517 commands.norepo = norepo
518 commands.optionalrepo = optionalrepo
518 commands.optionalrepo = optionalrepo
519
519
520 cmdtable = commands.table.copy()
520 cmdtable = commands.table.copy()
521 addaliases(lui, cmdtable)
521 addaliases(lui, cmdtable)
522
522
523 cmd = args[0]
523 cmd = args[0]
524 try:
524 try:
525 aliases, entry = cmdutil.findcmd(cmd, cmdtable,
525 aliases, entry = cmdutil.findcmd(cmd, cmdtable,
526 lui.configbool("ui", "strict"))
526 lui.configbool("ui", "strict"))
527 except (error.AmbiguousCommand, error.UnknownCommand):
527 except (error.AmbiguousCommand, error.UnknownCommand):
528 restorecommands()
528 restorecommands()
529 return
529 return
530
530
531 cmd = aliases[0]
531 cmd = aliases[0]
532 fn = entry[0]
532 fn = entry[0]
533
533
534 if cmd and util.safehasattr(fn, 'shell'):
534 if cmd and util.safehasattr(fn, 'shell'):
535 d = lambda: fn(ui, *args[1:])
535 d = lambda: fn(ui, *args[1:])
536 return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d,
536 return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d,
537 [], {})
537 [], {})
538
538
539 restorecommands()
539 restorecommands()
540
540
541 _loaded = set()
541 _loaded = set()
542 def _dispatch(req):
542 def _dispatch(req):
543 args = req.args
543 args = req.args
544 ui = req.ui
544 ui = req.ui
545
545
546 # read --config before doing anything else
546 # read --config before doing anything else
547 # (e.g. to change trust settings for reading .hg/hgrc)
547 # (e.g. to change trust settings for reading .hg/hgrc)
548 cfgs = _parseconfig(ui, _earlygetopt(['--config'], args))
548 cfgs = _parseconfig(ui, _earlygetopt(['--config'], args))
549
549
550 # check for cwd
550 # check for cwd
551 cwd = _earlygetopt(['--cwd'], args)
551 cwd = _earlygetopt(['--cwd'], args)
552 if cwd:
552 if cwd:
553 os.chdir(cwd[-1])
553 os.chdir(cwd[-1])
554
554
555 rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
555 rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
556 path, lui = _getlocal(ui, rpath)
556 path, lui = _getlocal(ui, rpath)
557
557
558 # Now that we're operating in the right directory/repository with
558 # Now that we're operating in the right directory/repository with
559 # the right config settings, check for shell aliases
559 # the right config settings, check for shell aliases
560 shellaliasfn = _checkshellalias(lui, ui, args)
560 shellaliasfn = _checkshellalias(lui, ui, args)
561 if shellaliasfn:
561 if shellaliasfn:
562 return shellaliasfn()
562 return shellaliasfn()
563
563
564 # Configure extensions in phases: uisetup, extsetup, cmdtable, and
564 # Configure extensions in phases: uisetup, extsetup, cmdtable, and
565 # reposetup. Programs like TortoiseHg will call _dispatch several
565 # reposetup. Programs like TortoiseHg will call _dispatch several
566 # times so we keep track of configured extensions in _loaded.
566 # times so we keep track of configured extensions in _loaded.
567 extensions.loadall(lui)
567 extensions.loadall(lui)
568 exts = [ext for ext in extensions.extensions() if ext[0] not in _loaded]
568 exts = [ext for ext in extensions.extensions() if ext[0] not in _loaded]
569 # Propagate any changes to lui.__class__ by extensions
569 # Propagate any changes to lui.__class__ by extensions
570 ui.__class__ = lui.__class__
570 ui.__class__ = lui.__class__
571
571
572 # (uisetup and extsetup are handled in extensions.loadall)
572 # (uisetup and extsetup are handled in extensions.loadall)
573
573
574 for name, module in exts:
574 for name, module in exts:
575 cmdtable = getattr(module, 'cmdtable', {})
575 cmdtable = getattr(module, 'cmdtable', {})
576 overrides = [cmd for cmd in cmdtable if cmd in commands.table]
576 overrides = [cmd for cmd in cmdtable if cmd in commands.table]
577 if overrides:
577 if overrides:
578 ui.warn(_("extension '%s' overrides commands: %s\n")
578 ui.warn(_("extension '%s' overrides commands: %s\n")
579 % (name, " ".join(overrides)))
579 % (name, " ".join(overrides)))
580 commands.table.update(cmdtable)
580 commands.table.update(cmdtable)
581 _loaded.add(name)
581 _loaded.add(name)
582
582
583 # (reposetup is handled in hg.repository)
583 # (reposetup is handled in hg.repository)
584
584
585 addaliases(lui, commands.table)
585 addaliases(lui, commands.table)
586
586
587 # check for fallback encoding
587 # check for fallback encoding
588 fallback = lui.config('ui', 'fallbackencoding')
588 fallback = lui.config('ui', 'fallbackencoding')
589 if fallback:
589 if fallback:
590 encoding.fallbackencoding = fallback
590 encoding.fallbackencoding = fallback
591
591
592 fullargs = args
592 fullargs = args
593 cmd, func, args, options, cmdoptions = _parse(lui, args)
593 cmd, func, args, options, cmdoptions = _parse(lui, args)
594
594
595 if options["config"]:
595 if options["config"]:
596 raise util.Abort(_("option --config may not be abbreviated!"))
596 raise util.Abort(_("option --config may not be abbreviated!"))
597 if options["cwd"]:
597 if options["cwd"]:
598 raise util.Abort(_("option --cwd may not be abbreviated!"))
598 raise util.Abort(_("option --cwd may not be abbreviated!"))
599 if options["repository"]:
599 if options["repository"]:
600 raise util.Abort(_(
600 raise util.Abort(_(
601 "option -R has to be separated from other options (e.g. not -qR) "
601 "option -R has to be separated from other options (e.g. not -qR) "
602 "and --repository may only be abbreviated as --repo!"))
602 "and --repository may only be abbreviated as --repo!"))
603
603
604 if options["encoding"]:
604 if options["encoding"]:
605 encoding.encoding = options["encoding"]
605 encoding.encoding = options["encoding"]
606 if options["encodingmode"]:
606 if options["encodingmode"]:
607 encoding.encodingmode = options["encodingmode"]
607 encoding.encodingmode = options["encodingmode"]
608 if options["time"]:
608 if options["time"]:
609 def get_times():
609 def get_times():
610 t = os.times()
610 t = os.times()
611 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
611 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
612 t = (t[0], t[1], t[2], t[3], time.clock())
612 t = (t[0], t[1], t[2], t[3], time.clock())
613 return t
613 return t
614 s = get_times()
614 s = get_times()
615 def print_time():
615 def print_time():
616 t = get_times()
616 t = get_times()
617 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
617 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
618 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
618 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
619 atexit.register(print_time)
619 atexit.register(print_time)
620
620
621 uis = set([ui, lui])
621 uis = set([ui, lui])
622
622
623 if req.repo:
623 if req.repo:
624 uis.add(req.repo.ui)
624 uis.add(req.repo.ui)
625
625
626 # copy configs that were passed on the cmdline (--config) to the repo ui
626 # copy configs that were passed on the cmdline (--config) to the repo ui
627 for cfg in cfgs:
627 for cfg in cfgs:
628 req.repo.ui.setconfig(*cfg)
628 req.repo.ui.setconfig(*cfg)
629
629
630 if options['verbose'] or options['debug'] or options['quiet']:
630 if options['verbose'] or options['debug'] or options['quiet']:
631 for opt in ('verbose', 'debug', 'quiet'):
631 for opt in ('verbose', 'debug', 'quiet'):
632 val = str(bool(options[opt]))
632 val = str(bool(options[opt]))
633 for ui_ in uis:
633 for ui_ in uis:
634 ui_.setconfig('ui', opt, val)
634 ui_.setconfig('ui', opt, val)
635
635
636 if options['traceback']:
636 if options['traceback']:
637 for ui_ in uis:
637 for ui_ in uis:
638 ui_.setconfig('ui', 'traceback', 'on')
638 ui_.setconfig('ui', 'traceback', 'on')
639
639
640 if options['noninteractive']:
640 if options['noninteractive']:
641 for ui_ in uis:
641 for ui_ in uis:
642 ui_.setconfig('ui', 'interactive', 'off')
642 ui_.setconfig('ui', 'interactive', 'off')
643
643
644 if cmdoptions.get('insecure', False):
644 if cmdoptions.get('insecure', False):
645 for ui_ in uis:
645 for ui_ in uis:
646 ui_.setconfig('web', 'cacerts', '')
646 ui_.setconfig('web', 'cacerts', '')
647
647
648 if options['version']:
648 if options['version']:
649 return commands.version_(ui)
649 return commands.version_(ui)
650 if options['help']:
650 if options['help']:
651 return commands.help_(ui, cmd)
651 return commands.help_(ui, cmd)
652 elif not cmd:
652 elif not cmd:
653 return commands.help_(ui, 'shortlist')
653 return commands.help_(ui, 'shortlist')
654
654
655 repo = None
655 repo = None
656 cmdpats = args[:]
656 cmdpats = args[:]
657 if cmd not in commands.norepo.split():
657 if cmd not in commands.norepo.split():
658 # use the repo from the request only if we don't have -R
658 # use the repo from the request only if we don't have -R
659 if not rpath and not cwd:
659 if not rpath and not cwd:
660 repo = req.repo
660 repo = req.repo
661
661
662 if repo:
662 if repo:
663 # set the descriptors of the repo ui to those of ui
663 # set the descriptors of the repo ui to those of ui
664 repo.ui.fin = ui.fin
664 repo.ui.fin = ui.fin
665 repo.ui.fout = ui.fout
665 repo.ui.fout = ui.fout
666 repo.ui.ferr = ui.ferr
666 repo.ui.ferr = ui.ferr
667 else:
667 else:
668 try:
668 try:
669 repo = hg.repository(ui, path=path)
669 repo = hg.repository(ui, path=path)
670 if not repo.local():
670 if not repo.local():
671 raise util.Abort(_("repository '%s' is not local") % path)
671 raise util.Abort(_("repository '%s' is not local") % path)
672 repo.ui.setconfig("bundle", "mainreporoot", repo.root)
672 repo.ui.setconfig("bundle", "mainreporoot", repo.root)
673 except error.RequirementError:
673 except error.RequirementError:
674 raise
674 raise
675 except error.RepoError:
675 except error.RepoError:
676 if cmd not in commands.optionalrepo.split():
676 if cmd not in commands.optionalrepo.split():
677 if args and not path: # try to infer -R from command args
677 if args and not path: # try to infer -R from command args
678 repos = map(cmdutil.findrepo, args)
678 repos = map(cmdutil.findrepo, args)
679 guess = repos[0]
679 guess = repos[0]
680 if guess and repos.count(guess) == len(repos):
680 if guess and repos.count(guess) == len(repos):
681 req.args = ['--repository', guess] + fullargs
681 req.args = ['--repository', guess] + fullargs
682 return _dispatch(req)
682 return _dispatch(req)
683 if not path:
683 if not path:
684 raise error.RepoError(_("no repository found in '%s'"
684 raise error.RepoError(_("no repository found in '%s'"
685 " (.hg not found)")
685 " (.hg not found)")
686 % os.getcwd())
686 % os.getcwd())
687 raise
687 raise
688 if repo:
688 if repo:
689 ui = repo.ui
689 ui = repo.ui
690 args.insert(0, repo)
690 args.insert(0, repo)
691 elif rpath:
691 elif rpath:
692 ui.warn(_("warning: --repository ignored\n"))
692 ui.warn(_("warning: --repository ignored\n"))
693
693
694 msg = ' '.join(' ' in a and repr(a) or a for a in fullargs)
694 msg = ' '.join(' ' in a and repr(a) or a for a in fullargs)
695 ui.log("command", msg + "\n")
695 ui.log("command", msg + "\n")
696 d = lambda: util.checksignature(func)(ui, *args, **cmdoptions)
696 d = lambda: util.checksignature(func)(ui, *args, **cmdoptions)
697 try:
697 try:
698 return runcommand(lui, repo, cmd, fullargs, ui, options, d,
698 return runcommand(lui, repo, cmd, fullargs, ui, options, d,
699 cmdpats, cmdoptions)
699 cmdpats, cmdoptions)
700 finally:
700 finally:
701 if repo and repo != req.repo:
701 if repo and repo != req.repo:
702 repo.close()
702 repo.close()
703
703
704 def lsprofile(ui, func, fp):
704 def lsprofile(ui, func, fp):
705 format = ui.config('profiling', 'format', default='text')
705 format = ui.config('profiling', 'format', default='text')
706 field = ui.config('profiling', 'sort', default='inlinetime')
706 field = ui.config('profiling', 'sort', default='inlinetime')
707 climit = ui.configint('profiling', 'nested', default=5)
707 climit = ui.configint('profiling', 'nested', default=5)
708
708
709 if format not in ['text', 'kcachegrind']:
709 if format not in ['text', 'kcachegrind']:
710 ui.warn(_("unrecognized profiling format '%s'"
710 ui.warn(_("unrecognized profiling format '%s'"
711 " - Ignored\n") % format)
711 " - Ignored\n") % format)
712 format = 'text'
712 format = 'text'
713
713
714 try:
714 try:
715 from mercurial import lsprof
715 from mercurial import lsprof
716 except ImportError:
716 except ImportError:
717 raise util.Abort(_(
717 raise util.Abort(_(
718 'lsprof not available - install from '
718 'lsprof not available - install from '
719 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
719 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
720 p = lsprof.Profiler()
720 p = lsprof.Profiler()
721 p.enable(subcalls=True)
721 p.enable(subcalls=True)
722 try:
722 try:
723 return func()
723 return func()
724 finally:
724 finally:
725 p.disable()
725 p.disable()
726
726
727 if format == 'kcachegrind':
727 if format == 'kcachegrind':
728 import lsprofcalltree
728 import lsprofcalltree
729 calltree = lsprofcalltree.KCacheGrind(p)
729 calltree = lsprofcalltree.KCacheGrind(p)
730 calltree.output(fp)
730 calltree.output(fp)
731 else:
731 else:
732 # format == 'text'
732 # format == 'text'
733 stats = lsprof.Stats(p.getstats())
733 stats = lsprof.Stats(p.getstats())
734 stats.sort(field)
734 stats.sort(field)
735 stats.pprint(limit=30, file=fp, climit=climit)
735 stats.pprint(limit=30, file=fp, climit=climit)
736
736
737 def statprofile(ui, func, fp):
737 def statprofile(ui, func, fp):
738 try:
738 try:
739 import statprof
739 import statprof
740 except ImportError:
740 except ImportError:
741 raise util.Abort(_(
741 raise util.Abort(_(
742 'statprof not available - install using "easy_install statprof"'))
742 'statprof not available - install using "easy_install statprof"'))
743
743
744 freq = ui.configint('profiling', 'freq', default=1000)
744 freq = ui.configint('profiling', 'freq', default=1000)
745 if freq > 0:
745 if freq > 0:
746 statprof.reset(freq)
746 statprof.reset(freq)
747 else:
747 else:
748 ui.warn(_("invalid sampling frequency '%s' - ignoring\n") % freq)
748 ui.warn(_("invalid sampling frequency '%s' - ignoring\n") % freq)
749
749
750 statprof.start()
750 statprof.start()
751 try:
751 try:
752 return func()
752 return func()
753 finally:
753 finally:
754 statprof.stop()
754 statprof.stop()
755 statprof.display(fp)
755 statprof.display(fp)
756
756
757 def _runcommand(ui, options, cmd, cmdfunc):
757 def _runcommand(ui, options, cmd, cmdfunc):
758 def checkargs():
758 def checkargs():
759 try:
759 try:
760 return cmdfunc()
760 return cmdfunc()
761 except error.SignatureError:
761 except error.SignatureError:
762 raise error.CommandError(cmd, _("invalid arguments"))
762 raise error.CommandError(cmd, _("invalid arguments"))
763
763
764 if options['profile']:
764 if options['profile']:
765 profiler = os.getenv('HGPROF')
765 profiler = os.getenv('HGPROF')
766 if profiler is None:
766 if profiler is None:
767 profiler = ui.config('profiling', 'type', default='ls')
767 profiler = ui.config('profiling', 'type', default='ls')
768 if profiler not in ('ls', 'stat'):
768 if profiler not in ('ls', 'stat'):
769 ui.warn(_("unrecognized profiler '%s' - ignored\n") % profiler)
769 ui.warn(_("unrecognized profiler '%s' - ignored\n") % profiler)
770 profiler = 'ls'
770 profiler = 'ls'
771
771
772 output = ui.config('profiling', 'output')
772 output = ui.config('profiling', 'output')
773
773
774 if output:
774 if output:
775 path = ui.expandpath(output)
775 path = ui.expandpath(output)
776 fp = open(path, 'wb')
776 fp = open(path, 'wb')
777 else:
777 else:
778 fp = sys.stderr
778 fp = sys.stderr
779
779
780 try:
780 try:
781 if profiler == 'ls':
781 if profiler == 'ls':
782 return lsprofile(ui, checkargs, fp)
782 return lsprofile(ui, checkargs, fp)
783 else:
783 else:
784 return statprofile(ui, checkargs, fp)
784 return statprofile(ui, checkargs, fp)
785 finally:
785 finally:
786 if output:
786 if output:
787 fp.close()
787 fp.close()
788 else:
788 else:
789 return checkargs()
789 return checkargs()
@@ -1,586 +1,586 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from i18n import _
9 from i18n import _
10 from lock import release
10 from lock import release
11 from node import hex, nullid
11 from node import hex, nullid
12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo, bookmarks
12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo, bookmarks
13 import lock, util, extensions, error, node, scmutil
13 import lock, util, extensions, error, node, scmutil
14 import cmdutil, discovery
14 import cmdutil, discovery
15 import merge as mergemod
15 import merge as mergemod
16 import verify as verifymod
16 import verify as verifymod
17 import errno, os, shutil
17 import errno, os, shutil
18
18
19 def _local(path):
19 def _local(path):
20 path = util.expandpath(util.urllocalpath(path))
20 path = util.expandpath(util.urllocalpath(path))
21 return (os.path.isfile(path) and bundlerepo or localrepo)
21 return (os.path.isfile(path) and bundlerepo or localrepo)
22
22
23 def addbranchrevs(lrepo, repo, branches, revs):
23 def addbranchrevs(lrepo, repo, branches, revs):
24 hashbranch, branches = branches
24 hashbranch, branches = branches
25 if not hashbranch and not branches:
25 if not hashbranch and not branches:
26 return revs or None, revs and revs[0] or None
26 return revs or None, revs and revs[0] or None
27 revs = revs and list(revs) or []
27 revs = revs and list(revs) or []
28 if not repo.capable('branchmap'):
28 if not repo.capable('branchmap'):
29 if branches:
29 if branches:
30 raise util.Abort(_("remote branch lookup not supported"))
30 raise util.Abort(_("remote branch lookup not supported"))
31 revs.append(hashbranch)
31 revs.append(hashbranch)
32 return revs, revs[0]
32 return revs, revs[0]
33 branchmap = repo.branchmap()
33 branchmap = repo.branchmap()
34
34
35 def primary(branch):
35 def primary(branch):
36 if branch == '.':
36 if branch == '.':
37 if not lrepo or not lrepo.local():
37 if not lrepo or not lrepo.local():
38 raise util.Abort(_("dirstate branch not accessible"))
38 raise util.Abort(_("dirstate branch not accessible"))
39 branch = lrepo.dirstate.branch()
39 branch = lrepo.dirstate.branch()
40 if branch in branchmap:
40 if branch in branchmap:
41 revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
41 revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
42 return True
42 return True
43 else:
43 else:
44 return False
44 return False
45
45
46 for branch in branches:
46 for branch in branches:
47 if not primary(branch):
47 if not primary(branch):
48 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
48 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
49 if hashbranch:
49 if hashbranch:
50 if not primary(hashbranch):
50 if not primary(hashbranch):
51 revs.append(hashbranch)
51 revs.append(hashbranch)
52 return revs, revs[0]
52 return revs, revs[0]
53
53
54 def parseurl(path, branches=None):
54 def parseurl(path, branches=None):
55 '''parse url#branch, returning (url, (branch, branches))'''
55 '''parse url#branch, returning (url, (branch, branches))'''
56
56
57 u = util.url(path)
57 u = util.url(path)
58 branch = None
58 branch = None
59 if u.fragment:
59 if u.fragment:
60 branch = u.fragment
60 branch = u.fragment
61 u.fragment = None
61 u.fragment = None
62 return str(u), (branch, branches or [])
62 return str(u), (branch, branches or [])
63
63
64 schemes = {
64 schemes = {
65 'bundle': bundlerepo,
65 'bundle': bundlerepo,
66 'file': _local,
66 'file': _local,
67 'http': httprepo,
67 'http': httprepo,
68 'https': httprepo,
68 'https': httprepo,
69 'ssh': sshrepo,
69 'ssh': sshrepo,
70 'static-http': statichttprepo,
70 'static-http': statichttprepo,
71 }
71 }
72
72
73 def _peerlookup(path):
73 def _peerlookup(path):
74 u = util.url(path)
74 u = util.url(path)
75 scheme = u.scheme or 'file'
75 scheme = u.scheme or 'file'
76 thing = schemes.get(scheme) or schemes['file']
76 thing = schemes.get(scheme) or schemes['file']
77 try:
77 try:
78 return thing(path)
78 return thing(path)
79 except TypeError:
79 except TypeError:
80 return thing
80 return thing
81
81
82 def islocal(repo):
82 def islocal(repo):
83 '''return true if repo or path is local'''
83 '''return true if repo or path is local'''
84 if isinstance(repo, str):
84 if isinstance(repo, str):
85 try:
85 try:
86 return _peerlookup(repo).islocal(repo)
86 return _peerlookup(repo).islocal(repo)
87 except AttributeError:
87 except AttributeError:
88 return False
88 return False
89 return repo.local()
89 return repo.local()
90
90
91 def repository(ui, path='', create=False):
91 def repository(ui, path='', create=False):
92 """return a repository object for the specified path"""
92 """return a repository object for the specified path"""
93 repo = _peerlookup(path).instance(ui, path, create)
93 repo = _peerlookup(path).instance(ui, path, create)
94 ui = getattr(repo, "ui", ui)
94 ui = getattr(repo, "ui", ui)
95 for name, module in extensions.extensions():
95 for name, module in extensions.extensions():
96 hook = getattr(module, 'reposetup', None)
96 hook = getattr(module, 'reposetup', None)
97 if hook:
97 if hook:
98 hook(ui, repo)
98 hook(ui, repo)
99 return repo
99 return repo
100
100
101 def peer(uiorrepo, opts, path, create=False):
101 def peer(uiorrepo, opts, path, create=False):
102 '''return a repository peer for the specified path'''
102 '''return a repository peer for the specified path'''
103 rui = remoteui(uiorrepo, opts)
103 rui = remoteui(uiorrepo, opts)
104 return repository(rui, path, create)
104 return repository(rui, path, create)
105
105
106 def defaultdest(source):
106 def defaultdest(source):
107 '''return default destination of clone if none is given'''
107 '''return default destination of clone if none is given'''
108 return os.path.basename(os.path.normpath(source))
108 return os.path.basename(os.path.normpath(source))
109
109
110 def share(ui, source, dest=None, update=True):
110 def share(ui, source, dest=None, update=True):
111 '''create a shared repository'''
111 '''create a shared repository'''
112
112
113 if not islocal(source):
113 if not islocal(source):
114 raise util.Abort(_('can only share local repositories'))
114 raise util.Abort(_('can only share local repositories'))
115
115
116 if not dest:
116 if not dest:
117 dest = defaultdest(source)
117 dest = defaultdest(source)
118 else:
118 else:
119 dest = ui.expandpath(dest)
119 dest = ui.expandpath(dest)
120
120
121 if isinstance(source, str):
121 if isinstance(source, str):
122 origsource = ui.expandpath(source)
122 origsource = ui.expandpath(source)
123 source, branches = parseurl(origsource)
123 source, branches = parseurl(origsource)
124 srcrepo = repository(ui, source)
124 srcrepo = repository(ui, source)
125 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
125 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
126 else:
126 else:
127 srcrepo = source
127 srcrepo = source
128 origsource = source = srcrepo.url()
128 origsource = source = srcrepo.url()
129 checkout = None
129 checkout = None
130
130
131 sharedpath = srcrepo.sharedpath # if our source is already sharing
131 sharedpath = srcrepo.sharedpath # if our source is already sharing
132
132
133 root = os.path.realpath(dest)
133 root = os.path.realpath(dest)
134 roothg = os.path.join(root, '.hg')
134 roothg = os.path.join(root, '.hg')
135
135
136 if os.path.exists(roothg):
136 if os.path.exists(roothg):
137 raise util.Abort(_('destination already exists'))
137 raise util.Abort(_('destination already exists'))
138
138
139 if not os.path.isdir(root):
139 if not os.path.isdir(root):
140 os.mkdir(root)
140 os.mkdir(root)
141 util.makedir(roothg, notindexed=True)
141 util.makedir(roothg, notindexed=True)
142
142
143 requirements = ''
143 requirements = ''
144 try:
144 try:
145 requirements = srcrepo.opener.read('requires')
145 requirements = srcrepo.opener.read('requires')
146 except IOError, inst:
146 except IOError, inst:
147 if inst.errno != errno.ENOENT:
147 if inst.errno != errno.ENOENT:
148 raise
148 raise
149
149
150 requirements += 'shared\n'
150 requirements += 'shared\n'
151 util.writefile(os.path.join(roothg, 'requires'), requirements)
151 util.writefile(os.path.join(roothg, 'requires'), requirements)
152 util.writefile(os.path.join(roothg, 'sharedpath'), sharedpath)
152 util.writefile(os.path.join(roothg, 'sharedpath'), sharedpath)
153
153
154 r = repository(ui, root)
154 r = repository(ui, root)
155
155
156 default = srcrepo.ui.config('paths', 'default')
156 default = srcrepo.ui.config('paths', 'default')
157 if default:
157 if default:
158 fp = r.opener("hgrc", "w", text=True)
158 fp = r.opener("hgrc", "w", text=True)
159 fp.write("[paths]\n")
159 fp.write("[paths]\n")
160 fp.write("default = %s\n" % default)
160 fp.write("default = %s\n" % default)
161 fp.close()
161 fp.close()
162
162
163 if update:
163 if update:
164 r.ui.status(_("updating working directory\n"))
164 r.ui.status(_("updating working directory\n"))
165 if update is not True:
165 if update is not True:
166 checkout = update
166 checkout = update
167 for test in (checkout, 'default', 'tip'):
167 for test in (checkout, 'default', 'tip'):
168 if test is None:
168 if test is None:
169 continue
169 continue
170 try:
170 try:
171 uprev = r.lookup(test)
171 uprev = r.lookup(test)
172 break
172 break
173 except error.RepoLookupError:
173 except error.RepoLookupError:
174 continue
174 continue
175 _update(r, uprev)
175 _update(r, uprev)
176
176
177 def copystore(ui, srcrepo, destpath):
177 def copystore(ui, srcrepo, destpath):
178 '''copy files from store of srcrepo in destpath
178 '''copy files from store of srcrepo in destpath
179
179
180 returns destlock
180 returns destlock
181 '''
181 '''
182 destlock = None
182 destlock = None
183 try:
183 try:
184 hardlink = None
184 hardlink = None
185 num = 0
185 num = 0
186 srcpublishing = srcrepo.ui.configbool('phases', 'publish', True)
186 srcpublishing = srcrepo.ui.configbool('phases', 'publish', True)
187 for f in srcrepo.store.copylist():
187 for f in srcrepo.store.copylist():
188 if srcpublishing and f.endswith('phaseroots'):
188 if srcpublishing and f.endswith('phaseroots'):
189 continue
189 continue
190 src = os.path.join(srcrepo.sharedpath, f)
190 src = os.path.join(srcrepo.sharedpath, f)
191 dst = os.path.join(destpath, f)
191 dst = os.path.join(destpath, f)
192 dstbase = os.path.dirname(dst)
192 dstbase = os.path.dirname(dst)
193 if dstbase and not os.path.exists(dstbase):
193 if dstbase and not os.path.exists(dstbase):
194 os.mkdir(dstbase)
194 os.mkdir(dstbase)
195 if os.path.exists(src):
195 if os.path.exists(src):
196 if dst.endswith('data'):
196 if dst.endswith('data'):
197 # lock to avoid premature writing to the target
197 # lock to avoid premature writing to the target
198 destlock = lock.lock(os.path.join(dstbase, "lock"))
198 destlock = lock.lock(os.path.join(dstbase, "lock"))
199 hardlink, n = util.copyfiles(src, dst, hardlink)
199 hardlink, n = util.copyfiles(src, dst, hardlink)
200 num += n
200 num += n
201 if hardlink:
201 if hardlink:
202 ui.debug("linked %d files\n" % num)
202 ui.debug("linked %d files\n" % num)
203 else:
203 else:
204 ui.debug("copied %d files\n" % num)
204 ui.debug("copied %d files\n" % num)
205 return destlock
205 return destlock
206 except:
206 except: # re-raises
207 release(destlock)
207 release(destlock)
208 raise
208 raise
209
209
210 def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
210 def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
211 update=True, stream=False, branch=None):
211 update=True, stream=False, branch=None):
212 """Make a copy of an existing repository.
212 """Make a copy of an existing repository.
213
213
214 Create a copy of an existing repository in a new directory. The
214 Create a copy of an existing repository in a new directory. The
215 source and destination are URLs, as passed to the repository
215 source and destination are URLs, as passed to the repository
216 function. Returns a pair of repository objects, the source and
216 function. Returns a pair of repository objects, the source and
217 newly created destination.
217 newly created destination.
218
218
219 The location of the source is added to the new repository's
219 The location of the source is added to the new repository's
220 .hg/hgrc file, as the default to be used for future pulls and
220 .hg/hgrc file, as the default to be used for future pulls and
221 pushes.
221 pushes.
222
222
223 If an exception is raised, the partly cloned/updated destination
223 If an exception is raised, the partly cloned/updated destination
224 repository will be deleted.
224 repository will be deleted.
225
225
226 Arguments:
226 Arguments:
227
227
228 source: repository object or URL
228 source: repository object or URL
229
229
230 dest: URL of destination repository to create (defaults to base
230 dest: URL of destination repository to create (defaults to base
231 name of source repository)
231 name of source repository)
232
232
233 pull: always pull from source repository, even in local case
233 pull: always pull from source repository, even in local case
234
234
235 stream: stream raw data uncompressed from repository (fast over
235 stream: stream raw data uncompressed from repository (fast over
236 LAN, slow over WAN)
236 LAN, slow over WAN)
237
237
238 rev: revision to clone up to (implies pull=True)
238 rev: revision to clone up to (implies pull=True)
239
239
240 update: update working directory after clone completes, if
240 update: update working directory after clone completes, if
241 destination is local repository (True means update to default rev,
241 destination is local repository (True means update to default rev,
242 anything else is treated as a revision)
242 anything else is treated as a revision)
243
243
244 branch: branches to clone
244 branch: branches to clone
245 """
245 """
246
246
247 if isinstance(source, str):
247 if isinstance(source, str):
248 origsource = ui.expandpath(source)
248 origsource = ui.expandpath(source)
249 source, branch = parseurl(origsource, branch)
249 source, branch = parseurl(origsource, branch)
250 srcrepo = repository(remoteui(ui, peeropts), source)
250 srcrepo = repository(remoteui(ui, peeropts), source)
251 else:
251 else:
252 srcrepo = source
252 srcrepo = source
253 branch = (None, branch or [])
253 branch = (None, branch or [])
254 origsource = source = srcrepo.url()
254 origsource = source = srcrepo.url()
255 rev, checkout = addbranchrevs(srcrepo, srcrepo, branch, rev)
255 rev, checkout = addbranchrevs(srcrepo, srcrepo, branch, rev)
256
256
257 if dest is None:
257 if dest is None:
258 dest = defaultdest(source)
258 dest = defaultdest(source)
259 ui.status(_("destination directory: %s\n") % dest)
259 ui.status(_("destination directory: %s\n") % dest)
260 else:
260 else:
261 dest = ui.expandpath(dest)
261 dest = ui.expandpath(dest)
262
262
263 dest = util.urllocalpath(dest)
263 dest = util.urllocalpath(dest)
264 source = util.urllocalpath(source)
264 source = util.urllocalpath(source)
265
265
266 if os.path.exists(dest):
266 if os.path.exists(dest):
267 if not os.path.isdir(dest):
267 if not os.path.isdir(dest):
268 raise util.Abort(_("destination '%s' already exists") % dest)
268 raise util.Abort(_("destination '%s' already exists") % dest)
269 elif os.listdir(dest):
269 elif os.listdir(dest):
270 raise util.Abort(_("destination '%s' is not empty") % dest)
270 raise util.Abort(_("destination '%s' is not empty") % dest)
271
271
272 class DirCleanup(object):
272 class DirCleanup(object):
273 def __init__(self, dir_):
273 def __init__(self, dir_):
274 self.rmtree = shutil.rmtree
274 self.rmtree = shutil.rmtree
275 self.dir_ = dir_
275 self.dir_ = dir_
276 def close(self):
276 def close(self):
277 self.dir_ = None
277 self.dir_ = None
278 def cleanup(self):
278 def cleanup(self):
279 if self.dir_:
279 if self.dir_:
280 self.rmtree(self.dir_, True)
280 self.rmtree(self.dir_, True)
281
281
282 srclock = destlock = dircleanup = None
282 srclock = destlock = dircleanup = None
283 try:
283 try:
284 abspath = origsource
284 abspath = origsource
285 if islocal(origsource):
285 if islocal(origsource):
286 abspath = os.path.abspath(util.urllocalpath(origsource))
286 abspath = os.path.abspath(util.urllocalpath(origsource))
287
287
288 if islocal(dest):
288 if islocal(dest):
289 dircleanup = DirCleanup(dest)
289 dircleanup = DirCleanup(dest)
290
290
291 copy = False
291 copy = False
292 if srcrepo.cancopy() and islocal(dest) and not srcrepo.revs("secret()"):
292 if srcrepo.cancopy() and islocal(dest) and not srcrepo.revs("secret()"):
293 copy = not pull and not rev
293 copy = not pull and not rev
294
294
295 if copy:
295 if copy:
296 try:
296 try:
297 # we use a lock here because if we race with commit, we
297 # we use a lock here because if we race with commit, we
298 # can end up with extra data in the cloned revlogs that's
298 # can end up with extra data in the cloned revlogs that's
299 # not pointed to by changesets, thus causing verify to
299 # not pointed to by changesets, thus causing verify to
300 # fail
300 # fail
301 srclock = srcrepo.lock(wait=False)
301 srclock = srcrepo.lock(wait=False)
302 except error.LockError:
302 except error.LockError:
303 copy = False
303 copy = False
304
304
305 if copy:
305 if copy:
306 srcrepo.hook('preoutgoing', throw=True, source='clone')
306 srcrepo.hook('preoutgoing', throw=True, source='clone')
307 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
307 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
308 if not os.path.exists(dest):
308 if not os.path.exists(dest):
309 os.mkdir(dest)
309 os.mkdir(dest)
310 else:
310 else:
311 # only clean up directories we create ourselves
311 # only clean up directories we create ourselves
312 dircleanup.dir_ = hgdir
312 dircleanup.dir_ = hgdir
313 try:
313 try:
314 destpath = hgdir
314 destpath = hgdir
315 util.makedir(destpath, notindexed=True)
315 util.makedir(destpath, notindexed=True)
316 except OSError, inst:
316 except OSError, inst:
317 if inst.errno == errno.EEXIST:
317 if inst.errno == errno.EEXIST:
318 dircleanup.close()
318 dircleanup.close()
319 raise util.Abort(_("destination '%s' already exists")
319 raise util.Abort(_("destination '%s' already exists")
320 % dest)
320 % dest)
321 raise
321 raise
322
322
323 destlock = copystore(ui, srcrepo, destpath)
323 destlock = copystore(ui, srcrepo, destpath)
324
324
325 # we need to re-init the repo after manually copying the data
325 # we need to re-init the repo after manually copying the data
326 # into it
326 # into it
327 destrepo = repository(remoteui(ui, peeropts), dest)
327 destrepo = repository(remoteui(ui, peeropts), dest)
328 srcrepo.hook('outgoing', source='clone',
328 srcrepo.hook('outgoing', source='clone',
329 node=node.hex(node.nullid))
329 node=node.hex(node.nullid))
330 else:
330 else:
331 try:
331 try:
332 destrepo = repository(remoteui(ui, peeropts), dest,
332 destrepo = repository(remoteui(ui, peeropts), dest,
333 create=True)
333 create=True)
334 except OSError, inst:
334 except OSError, inst:
335 if inst.errno == errno.EEXIST:
335 if inst.errno == errno.EEXIST:
336 dircleanup.close()
336 dircleanup.close()
337 raise util.Abort(_("destination '%s' already exists")
337 raise util.Abort(_("destination '%s' already exists")
338 % dest)
338 % dest)
339 raise
339 raise
340
340
341 revs = None
341 revs = None
342 if rev:
342 if rev:
343 if not srcrepo.capable('lookup'):
343 if not srcrepo.capable('lookup'):
344 raise util.Abort(_("src repository does not support "
344 raise util.Abort(_("src repository does not support "
345 "revision lookup and so doesn't "
345 "revision lookup and so doesn't "
346 "support clone by revision"))
346 "support clone by revision"))
347 revs = [srcrepo.lookup(r) for r in rev]
347 revs = [srcrepo.lookup(r) for r in rev]
348 checkout = revs[0]
348 checkout = revs[0]
349 if destrepo.local():
349 if destrepo.local():
350 destrepo.clone(srcrepo, heads=revs, stream=stream)
350 destrepo.clone(srcrepo, heads=revs, stream=stream)
351 elif srcrepo.local():
351 elif srcrepo.local():
352 srcrepo.push(destrepo, revs=revs)
352 srcrepo.push(destrepo, revs=revs)
353 else:
353 else:
354 raise util.Abort(_("clone from remote to remote not supported"))
354 raise util.Abort(_("clone from remote to remote not supported"))
355
355
356 if dircleanup:
356 if dircleanup:
357 dircleanup.close()
357 dircleanup.close()
358
358
359 # clone all bookmarks except divergent ones
359 # clone all bookmarks except divergent ones
360 if destrepo.local() and srcrepo.capable("pushkey"):
360 if destrepo.local() and srcrepo.capable("pushkey"):
361 rb = srcrepo.listkeys('bookmarks')
361 rb = srcrepo.listkeys('bookmarks')
362 for k, n in rb.iteritems():
362 for k, n in rb.iteritems():
363 try:
363 try:
364 m = destrepo.lookup(n)
364 m = destrepo.lookup(n)
365 destrepo._bookmarks[k] = m
365 destrepo._bookmarks[k] = m
366 except error.RepoLookupError:
366 except error.RepoLookupError:
367 pass
367 pass
368 if rb:
368 if rb:
369 bookmarks.write(destrepo)
369 bookmarks.write(destrepo)
370 elif srcrepo.local() and destrepo.capable("pushkey"):
370 elif srcrepo.local() and destrepo.capable("pushkey"):
371 for k, n in srcrepo._bookmarks.iteritems():
371 for k, n in srcrepo._bookmarks.iteritems():
372 destrepo.pushkey('bookmarks', k, '', hex(n))
372 destrepo.pushkey('bookmarks', k, '', hex(n))
373
373
374 if destrepo.local():
374 if destrepo.local():
375 fp = destrepo.opener("hgrc", "w", text=True)
375 fp = destrepo.opener("hgrc", "w", text=True)
376 fp.write("[paths]\n")
376 fp.write("[paths]\n")
377 u = util.url(abspath)
377 u = util.url(abspath)
378 u.passwd = None
378 u.passwd = None
379 defaulturl = str(u)
379 defaulturl = str(u)
380 fp.write("default = %s\n" % defaulturl)
380 fp.write("default = %s\n" % defaulturl)
381 fp.close()
381 fp.close()
382
382
383 destrepo.ui.setconfig('paths', 'default', defaulturl)
383 destrepo.ui.setconfig('paths', 'default', defaulturl)
384
384
385 if update:
385 if update:
386 if update is not True:
386 if update is not True:
387 checkout = update
387 checkout = update
388 if srcrepo.local():
388 if srcrepo.local():
389 checkout = srcrepo.lookup(update)
389 checkout = srcrepo.lookup(update)
390 for test in (checkout, 'default', 'tip'):
390 for test in (checkout, 'default', 'tip'):
391 if test is None:
391 if test is None:
392 continue
392 continue
393 try:
393 try:
394 uprev = destrepo.lookup(test)
394 uprev = destrepo.lookup(test)
395 break
395 break
396 except error.RepoLookupError:
396 except error.RepoLookupError:
397 continue
397 continue
398 bn = destrepo[uprev].branch()
398 bn = destrepo[uprev].branch()
399 destrepo.ui.status(_("updating to branch %s\n") % bn)
399 destrepo.ui.status(_("updating to branch %s\n") % bn)
400 _update(destrepo, uprev)
400 _update(destrepo, uprev)
401
401
402 return srcrepo, destrepo
402 return srcrepo, destrepo
403 finally:
403 finally:
404 release(srclock, destlock)
404 release(srclock, destlock)
405 if dircleanup is not None:
405 if dircleanup is not None:
406 dircleanup.cleanup()
406 dircleanup.cleanup()
407 if srcrepo is not None:
407 if srcrepo is not None:
408 srcrepo.close()
408 srcrepo.close()
409
409
410 def _showstats(repo, stats):
410 def _showstats(repo, stats):
411 repo.ui.status(_("%d files updated, %d files merged, "
411 repo.ui.status(_("%d files updated, %d files merged, "
412 "%d files removed, %d files unresolved\n") % stats)
412 "%d files removed, %d files unresolved\n") % stats)
413
413
414 def update(repo, node):
414 def update(repo, node):
415 """update the working directory to node, merging linear changes"""
415 """update the working directory to node, merging linear changes"""
416 stats = mergemod.update(repo, node, False, False, None)
416 stats = mergemod.update(repo, node, False, False, None)
417 _showstats(repo, stats)
417 _showstats(repo, stats)
418 if stats[3]:
418 if stats[3]:
419 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
419 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
420 return stats[3] > 0
420 return stats[3] > 0
421
421
422 # naming conflict in clone()
422 # naming conflict in clone()
423 _update = update
423 _update = update
424
424
425 def clean(repo, node, show_stats=True):
425 def clean(repo, node, show_stats=True):
426 """forcibly switch the working directory to node, clobbering changes"""
426 """forcibly switch the working directory to node, clobbering changes"""
427 stats = mergemod.update(repo, node, False, True, None)
427 stats = mergemod.update(repo, node, False, True, None)
428 if show_stats:
428 if show_stats:
429 _showstats(repo, stats)
429 _showstats(repo, stats)
430 return stats[3] > 0
430 return stats[3] > 0
431
431
432 def merge(repo, node, force=None, remind=True):
432 def merge(repo, node, force=None, remind=True):
433 """Branch merge with node, resolving changes. Return true if any
433 """Branch merge with node, resolving changes. Return true if any
434 unresolved conflicts."""
434 unresolved conflicts."""
435 stats = mergemod.update(repo, node, True, force, False)
435 stats = mergemod.update(repo, node, True, force, False)
436 _showstats(repo, stats)
436 _showstats(repo, stats)
437 if stats[3]:
437 if stats[3]:
438 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
438 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
439 "or 'hg update -C .' to abandon\n"))
439 "or 'hg update -C .' to abandon\n"))
440 elif remind:
440 elif remind:
441 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
441 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
442 return stats[3] > 0
442 return stats[3] > 0
443
443
444 def _incoming(displaychlist, subreporecurse, ui, repo, source,
444 def _incoming(displaychlist, subreporecurse, ui, repo, source,
445 opts, buffered=False):
445 opts, buffered=False):
446 """
446 """
447 Helper for incoming / gincoming.
447 Helper for incoming / gincoming.
448 displaychlist gets called with
448 displaychlist gets called with
449 (remoterepo, incomingchangesetlist, displayer) parameters,
449 (remoterepo, incomingchangesetlist, displayer) parameters,
450 and is supposed to contain only code that can't be unified.
450 and is supposed to contain only code that can't be unified.
451 """
451 """
452 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
452 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
453 other = peer(repo, opts, source)
453 other = peer(repo, opts, source)
454 ui.status(_('comparing with %s\n') % util.hidepassword(source))
454 ui.status(_('comparing with %s\n') % util.hidepassword(source))
455 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
455 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
456
456
457 if revs:
457 if revs:
458 revs = [other.lookup(rev) for rev in revs]
458 revs = [other.lookup(rev) for rev in revs]
459 other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other,
459 other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other,
460 revs, opts["bundle"], opts["force"])
460 revs, opts["bundle"], opts["force"])
461 try:
461 try:
462 if not chlist:
462 if not chlist:
463 ui.status(_("no changes found\n"))
463 ui.status(_("no changes found\n"))
464 return subreporecurse()
464 return subreporecurse()
465
465
466 displayer = cmdutil.show_changeset(ui, other, opts, buffered)
466 displayer = cmdutil.show_changeset(ui, other, opts, buffered)
467
467
468 # XXX once graphlog extension makes it into core,
468 # XXX once graphlog extension makes it into core,
469 # should be replaced by a if graph/else
469 # should be replaced by a if graph/else
470 displaychlist(other, chlist, displayer)
470 displaychlist(other, chlist, displayer)
471
471
472 displayer.close()
472 displayer.close()
473 finally:
473 finally:
474 cleanupfn()
474 cleanupfn()
475 subreporecurse()
475 subreporecurse()
476 return 0 # exit code is zero since we found incoming changes
476 return 0 # exit code is zero since we found incoming changes
477
477
478 def incoming(ui, repo, source, opts):
478 def incoming(ui, repo, source, opts):
479 def subreporecurse():
479 def subreporecurse():
480 ret = 1
480 ret = 1
481 if opts.get('subrepos'):
481 if opts.get('subrepos'):
482 ctx = repo[None]
482 ctx = repo[None]
483 for subpath in sorted(ctx.substate):
483 for subpath in sorted(ctx.substate):
484 sub = ctx.sub(subpath)
484 sub = ctx.sub(subpath)
485 ret = min(ret, sub.incoming(ui, source, opts))
485 ret = min(ret, sub.incoming(ui, source, opts))
486 return ret
486 return ret
487
487
488 def display(other, chlist, displayer):
488 def display(other, chlist, displayer):
489 limit = cmdutil.loglimit(opts)
489 limit = cmdutil.loglimit(opts)
490 if opts.get('newest_first'):
490 if opts.get('newest_first'):
491 chlist.reverse()
491 chlist.reverse()
492 count = 0
492 count = 0
493 for n in chlist:
493 for n in chlist:
494 if limit is not None and count >= limit:
494 if limit is not None and count >= limit:
495 break
495 break
496 parents = [p for p in other.changelog.parents(n) if p != nullid]
496 parents = [p for p in other.changelog.parents(n) if p != nullid]
497 if opts.get('no_merges') and len(parents) == 2:
497 if opts.get('no_merges') and len(parents) == 2:
498 continue
498 continue
499 count += 1
499 count += 1
500 displayer.show(other[n])
500 displayer.show(other[n])
501 return _incoming(display, subreporecurse, ui, repo, source, opts)
501 return _incoming(display, subreporecurse, ui, repo, source, opts)
502
502
503 def _outgoing(ui, repo, dest, opts):
503 def _outgoing(ui, repo, dest, opts):
504 dest = ui.expandpath(dest or 'default-push', dest or 'default')
504 dest = ui.expandpath(dest or 'default-push', dest or 'default')
505 dest, branches = parseurl(dest, opts.get('branch'))
505 dest, branches = parseurl(dest, opts.get('branch'))
506 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
506 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
507 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
507 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
508 if revs:
508 if revs:
509 revs = [repo.lookup(rev) for rev in revs]
509 revs = [repo.lookup(rev) for rev in revs]
510
510
511 other = peer(repo, opts, dest)
511 other = peer(repo, opts, dest)
512 outgoing = discovery.findcommonoutgoing(repo, other, revs,
512 outgoing = discovery.findcommonoutgoing(repo, other, revs,
513 force=opts.get('force'))
513 force=opts.get('force'))
514 o = outgoing.missing
514 o = outgoing.missing
515 if not o:
515 if not o:
516 scmutil.nochangesfound(repo.ui, outgoing.excluded)
516 scmutil.nochangesfound(repo.ui, outgoing.excluded)
517 return None
517 return None
518 return o
518 return o
519
519
520 def outgoing(ui, repo, dest, opts):
520 def outgoing(ui, repo, dest, opts):
521 def recurse():
521 def recurse():
522 ret = 1
522 ret = 1
523 if opts.get('subrepos'):
523 if opts.get('subrepos'):
524 ctx = repo[None]
524 ctx = repo[None]
525 for subpath in sorted(ctx.substate):
525 for subpath in sorted(ctx.substate):
526 sub = ctx.sub(subpath)
526 sub = ctx.sub(subpath)
527 ret = min(ret, sub.outgoing(ui, dest, opts))
527 ret = min(ret, sub.outgoing(ui, dest, opts))
528 return ret
528 return ret
529
529
530 limit = cmdutil.loglimit(opts)
530 limit = cmdutil.loglimit(opts)
531 o = _outgoing(ui, repo, dest, opts)
531 o = _outgoing(ui, repo, dest, opts)
532 if o is None:
532 if o is None:
533 return recurse()
533 return recurse()
534
534
535 if opts.get('newest_first'):
535 if opts.get('newest_first'):
536 o.reverse()
536 o.reverse()
537 displayer = cmdutil.show_changeset(ui, repo, opts)
537 displayer = cmdutil.show_changeset(ui, repo, opts)
538 count = 0
538 count = 0
539 for n in o:
539 for n in o:
540 if limit is not None and count >= limit:
540 if limit is not None and count >= limit:
541 break
541 break
542 parents = [p for p in repo.changelog.parents(n) if p != nullid]
542 parents = [p for p in repo.changelog.parents(n) if p != nullid]
543 if opts.get('no_merges') and len(parents) == 2:
543 if opts.get('no_merges') and len(parents) == 2:
544 continue
544 continue
545 count += 1
545 count += 1
546 displayer.show(repo[n])
546 displayer.show(repo[n])
547 displayer.close()
547 displayer.close()
548 recurse()
548 recurse()
549 return 0 # exit code is zero since we found outgoing changes
549 return 0 # exit code is zero since we found outgoing changes
550
550
551 def revert(repo, node, choose):
551 def revert(repo, node, choose):
552 """revert changes to revision in node without updating dirstate"""
552 """revert changes to revision in node without updating dirstate"""
553 return mergemod.update(repo, node, False, True, choose)[3] > 0
553 return mergemod.update(repo, node, False, True, choose)[3] > 0
554
554
555 def verify(repo):
555 def verify(repo):
556 """verify the consistency of a repository"""
556 """verify the consistency of a repository"""
557 return verifymod.verify(repo)
557 return verifymod.verify(repo)
558
558
559 def remoteui(src, opts):
559 def remoteui(src, opts):
560 'build a remote ui from ui or repo and opts'
560 'build a remote ui from ui or repo and opts'
561 if util.safehasattr(src, 'baseui'): # looks like a repository
561 if util.safehasattr(src, 'baseui'): # looks like a repository
562 dst = src.baseui.copy() # drop repo-specific config
562 dst = src.baseui.copy() # drop repo-specific config
563 src = src.ui # copy target options from repo
563 src = src.ui # copy target options from repo
564 else: # assume it's a global ui object
564 else: # assume it's a global ui object
565 dst = src.copy() # keep all global options
565 dst = src.copy() # keep all global options
566
566
567 # copy ssh-specific options
567 # copy ssh-specific options
568 for o in 'ssh', 'remotecmd':
568 for o in 'ssh', 'remotecmd':
569 v = opts.get(o) or src.config('ui', o)
569 v = opts.get(o) or src.config('ui', o)
570 if v:
570 if v:
571 dst.setconfig("ui", o, v)
571 dst.setconfig("ui", o, v)
572
572
573 # copy bundle-specific options
573 # copy bundle-specific options
574 r = src.config('bundle', 'mainreporoot')
574 r = src.config('bundle', 'mainreporoot')
575 if r:
575 if r:
576 dst.setconfig('bundle', 'mainreporoot', r)
576 dst.setconfig('bundle', 'mainreporoot', r)
577
577
578 # copy selected local settings to the remote ui
578 # copy selected local settings to the remote ui
579 for sect in ('auth', 'hostfingerprints', 'http_proxy'):
579 for sect in ('auth', 'hostfingerprints', 'http_proxy'):
580 for key, val in src.configitems(sect):
580 for key, val in src.configitems(sect):
581 dst.setconfig(sect, key, val)
581 dst.setconfig(sect, key, val)
582 v = src.config('web', 'cacerts')
582 v = src.config('web', 'cacerts')
583 if v:
583 if v:
584 dst.setconfig('web', 'cacerts', util.expandpath(v))
584 dst.setconfig('web', 'cacerts', util.expandpath(v))
585
585
586 return dst
586 return dst
@@ -1,764 +1,764 b''
1 # This library is free software; you can redistribute it and/or
1 # This library is free software; you can redistribute it and/or
2 # modify it under the terms of the GNU Lesser General Public
2 # modify it under the terms of the GNU Lesser General Public
3 # License as published by the Free Software Foundation; either
3 # License as published by the Free Software Foundation; either
4 # version 2.1 of the License, or (at your option) any later version.
4 # version 2.1 of the License, or (at your option) any later version.
5 #
5 #
6 # This library is distributed in the hope that it will be useful,
6 # This library is distributed in the hope that it will be useful,
7 # but WITHOUT ANY WARRANTY; without even the implied warranty of
7 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
8 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
9 # Lesser General Public License for more details.
9 # Lesser General Public License for more details.
10 #
10 #
11 # You should have received a copy of the GNU Lesser General Public
11 # You should have received a copy of the GNU Lesser General Public
12 # License along with this library; if not, see
12 # License along with this library; if not, see
13 # <http://www.gnu.org/licenses/>.
13 # <http://www.gnu.org/licenses/>.
14
14
15 # This file is part of urlgrabber, a high-level cross-protocol url-grabber
15 # This file is part of urlgrabber, a high-level cross-protocol url-grabber
16 # Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
16 # Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
17
17
18 # Modified by Benoit Boissinot:
18 # Modified by Benoit Boissinot:
19 # - fix for digest auth (inspired from urllib2.py @ Python v2.4)
19 # - fix for digest auth (inspired from urllib2.py @ Python v2.4)
20 # Modified by Dirkjan Ochtman:
20 # Modified by Dirkjan Ochtman:
21 # - import md5 function from a local util module
21 # - import md5 function from a local util module
22 # Modified by Martin Geisler:
22 # Modified by Martin Geisler:
23 # - moved md5 function from local util module to this module
23 # - moved md5 function from local util module to this module
24 # Modified by Augie Fackler:
24 # Modified by Augie Fackler:
25 # - add safesend method and use it to prevent broken pipe errors
25 # - add safesend method and use it to prevent broken pipe errors
26 # on large POST requests
26 # on large POST requests
27
27
28 """An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive.
28 """An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive.
29
29
30 >>> import urllib2
30 >>> import urllib2
31 >>> from keepalive import HTTPHandler
31 >>> from keepalive import HTTPHandler
32 >>> keepalive_handler = HTTPHandler()
32 >>> keepalive_handler = HTTPHandler()
33 >>> opener = urllib2.build_opener(keepalive_handler)
33 >>> opener = urllib2.build_opener(keepalive_handler)
34 >>> urllib2.install_opener(opener)
34 >>> urllib2.install_opener(opener)
35 >>>
35 >>>
36 >>> fo = urllib2.urlopen('http://www.python.org')
36 >>> fo = urllib2.urlopen('http://www.python.org')
37
37
38 If a connection to a given host is requested, and all of the existing
38 If a connection to a given host is requested, and all of the existing
39 connections are still in use, another connection will be opened. If
39 connections are still in use, another connection will be opened. If
40 the handler tries to use an existing connection but it fails in some
40 the handler tries to use an existing connection but it fails in some
41 way, it will be closed and removed from the pool.
41 way, it will be closed and removed from the pool.
42
42
43 To remove the handler, simply re-run build_opener with no arguments, and
43 To remove the handler, simply re-run build_opener with no arguments, and
44 install that opener.
44 install that opener.
45
45
46 You can explicitly close connections by using the close_connection()
46 You can explicitly close connections by using the close_connection()
47 method of the returned file-like object (described below) or you can
47 method of the returned file-like object (described below) or you can
48 use the handler methods:
48 use the handler methods:
49
49
50 close_connection(host)
50 close_connection(host)
51 close_all()
51 close_all()
52 open_connections()
52 open_connections()
53
53
54 NOTE: using the close_connection and close_all methods of the handler
54 NOTE: using the close_connection and close_all methods of the handler
55 should be done with care when using multiple threads.
55 should be done with care when using multiple threads.
56 * there is nothing that prevents another thread from creating new
56 * there is nothing that prevents another thread from creating new
57 connections immediately after connections are closed
57 connections immediately after connections are closed
58 * no checks are done to prevent in-use connections from being closed
58 * no checks are done to prevent in-use connections from being closed
59
59
60 >>> keepalive_handler.close_all()
60 >>> keepalive_handler.close_all()
61
61
62 EXTRA ATTRIBUTES AND METHODS
62 EXTRA ATTRIBUTES AND METHODS
63
63
64 Upon a status of 200, the object returned has a few additional
64 Upon a status of 200, the object returned has a few additional
65 attributes and methods, which should not be used if you want to
65 attributes and methods, which should not be used if you want to
66 remain consistent with the normal urllib2-returned objects:
66 remain consistent with the normal urllib2-returned objects:
67
67
68 close_connection() - close the connection to the host
68 close_connection() - close the connection to the host
69 readlines() - you know, readlines()
69 readlines() - you know, readlines()
70 status - the return status (ie 404)
70 status - the return status (ie 404)
71 reason - english translation of status (ie 'File not found')
71 reason - english translation of status (ie 'File not found')
72
72
73 If you want the best of both worlds, use this inside an
73 If you want the best of both worlds, use this inside an
74 AttributeError-catching try:
74 AttributeError-catching try:
75
75
76 >>> try: status = fo.status
76 >>> try: status = fo.status
77 >>> except AttributeError: status = None
77 >>> except AttributeError: status = None
78
78
79 Unfortunately, these are ONLY there if status == 200, so it's not
79 Unfortunately, these are ONLY there if status == 200, so it's not
80 easy to distinguish between non-200 responses. The reason is that
80 easy to distinguish between non-200 responses. The reason is that
81 urllib2 tries to do clever things with error codes 301, 302, 401,
81 urllib2 tries to do clever things with error codes 301, 302, 401,
82 and 407, and it wraps the object upon return.
82 and 407, and it wraps the object upon return.
83
83
84 For python versions earlier than 2.4, you can avoid this fancy error
84 For python versions earlier than 2.4, you can avoid this fancy error
85 handling by setting the module-level global HANDLE_ERRORS to zero.
85 handling by setting the module-level global HANDLE_ERRORS to zero.
86 You see, prior to 2.4, it's the HTTP Handler's job to determine what
86 You see, prior to 2.4, it's the HTTP Handler's job to determine what
87 to handle specially, and what to just pass up. HANDLE_ERRORS == 0
87 to handle specially, and what to just pass up. HANDLE_ERRORS == 0
88 means "pass everything up". In python 2.4, however, this job no
88 means "pass everything up". In python 2.4, however, this job no
89 longer belongs to the HTTP Handler and is now done by a NEW handler,
89 longer belongs to the HTTP Handler and is now done by a NEW handler,
90 HTTPErrorProcessor. Here's the bottom line:
90 HTTPErrorProcessor. Here's the bottom line:
91
91
92 python version < 2.4
92 python version < 2.4
93 HANDLE_ERRORS == 1 (default) pass up 200, treat the rest as
93 HANDLE_ERRORS == 1 (default) pass up 200, treat the rest as
94 errors
94 errors
95 HANDLE_ERRORS == 0 pass everything up, error processing is
95 HANDLE_ERRORS == 0 pass everything up, error processing is
96 left to the calling code
96 left to the calling code
97 python version >= 2.4
97 python version >= 2.4
98 HANDLE_ERRORS == 1 pass up 200, treat the rest as errors
98 HANDLE_ERRORS == 1 pass up 200, treat the rest as errors
99 HANDLE_ERRORS == 0 (default) pass everything up, let the
99 HANDLE_ERRORS == 0 (default) pass everything up, let the
100 other handlers (specifically,
100 other handlers (specifically,
101 HTTPErrorProcessor) decide what to do
101 HTTPErrorProcessor) decide what to do
102
102
103 In practice, setting the variable either way makes little difference
103 In practice, setting the variable either way makes little difference
104 in python 2.4, so for the most consistent behavior across versions,
104 in python 2.4, so for the most consistent behavior across versions,
105 you probably just want to use the defaults, which will give you
105 you probably just want to use the defaults, which will give you
106 exceptions on errors.
106 exceptions on errors.
107
107
108 """
108 """
109
109
110 # $Id: keepalive.py,v 1.14 2006/04/04 21:00:32 mstenner Exp $
110 # $Id: keepalive.py,v 1.14 2006/04/04 21:00:32 mstenner Exp $
111
111
112 import errno
112 import errno
113 import httplib
113 import httplib
114 import socket
114 import socket
115 import thread
115 import thread
116 import urllib2
116 import urllib2
117
117
118 DEBUG = None
118 DEBUG = None
119
119
120 import sys
120 import sys
121 if sys.version_info < (2, 4):
121 if sys.version_info < (2, 4):
122 HANDLE_ERRORS = 1
122 HANDLE_ERRORS = 1
123 else: HANDLE_ERRORS = 0
123 else: HANDLE_ERRORS = 0
124
124
125 class ConnectionManager(object):
125 class ConnectionManager(object):
126 """
126 """
127 The connection manager must be able to:
127 The connection manager must be able to:
128 * keep track of all existing
128 * keep track of all existing
129 """
129 """
130 def __init__(self):
130 def __init__(self):
131 self._lock = thread.allocate_lock()
131 self._lock = thread.allocate_lock()
132 self._hostmap = {} # map hosts to a list of connections
132 self._hostmap = {} # map hosts to a list of connections
133 self._connmap = {} # map connections to host
133 self._connmap = {} # map connections to host
134 self._readymap = {} # map connection to ready state
134 self._readymap = {} # map connection to ready state
135
135
136 def add(self, host, connection, ready):
136 def add(self, host, connection, ready):
137 self._lock.acquire()
137 self._lock.acquire()
138 try:
138 try:
139 if host not in self._hostmap:
139 if host not in self._hostmap:
140 self._hostmap[host] = []
140 self._hostmap[host] = []
141 self._hostmap[host].append(connection)
141 self._hostmap[host].append(connection)
142 self._connmap[connection] = host
142 self._connmap[connection] = host
143 self._readymap[connection] = ready
143 self._readymap[connection] = ready
144 finally:
144 finally:
145 self._lock.release()
145 self._lock.release()
146
146
147 def remove(self, connection):
147 def remove(self, connection):
148 self._lock.acquire()
148 self._lock.acquire()
149 try:
149 try:
150 try:
150 try:
151 host = self._connmap[connection]
151 host = self._connmap[connection]
152 except KeyError:
152 except KeyError:
153 pass
153 pass
154 else:
154 else:
155 del self._connmap[connection]
155 del self._connmap[connection]
156 del self._readymap[connection]
156 del self._readymap[connection]
157 self._hostmap[host].remove(connection)
157 self._hostmap[host].remove(connection)
158 if not self._hostmap[host]: del self._hostmap[host]
158 if not self._hostmap[host]: del self._hostmap[host]
159 finally:
159 finally:
160 self._lock.release()
160 self._lock.release()
161
161
162 def set_ready(self, connection, ready):
162 def set_ready(self, connection, ready):
163 try:
163 try:
164 self._readymap[connection] = ready
164 self._readymap[connection] = ready
165 except KeyError:
165 except KeyError:
166 pass
166 pass
167
167
168 def get_ready_conn(self, host):
168 def get_ready_conn(self, host):
169 conn = None
169 conn = None
170 self._lock.acquire()
170 self._lock.acquire()
171 try:
171 try:
172 if host in self._hostmap:
172 if host in self._hostmap:
173 for c in self._hostmap[host]:
173 for c in self._hostmap[host]:
174 if self._readymap[c]:
174 if self._readymap[c]:
175 self._readymap[c] = 0
175 self._readymap[c] = 0
176 conn = c
176 conn = c
177 break
177 break
178 finally:
178 finally:
179 self._lock.release()
179 self._lock.release()
180 return conn
180 return conn
181
181
182 def get_all(self, host=None):
182 def get_all(self, host=None):
183 if host:
183 if host:
184 return list(self._hostmap.get(host, []))
184 return list(self._hostmap.get(host, []))
185 else:
185 else:
186 return dict(self._hostmap)
186 return dict(self._hostmap)
187
187
188 class KeepAliveHandler(object):
188 class KeepAliveHandler(object):
189 def __init__(self):
189 def __init__(self):
190 self._cm = ConnectionManager()
190 self._cm = ConnectionManager()
191
191
192 #### Connection Management
192 #### Connection Management
193 def open_connections(self):
193 def open_connections(self):
194 """return a list of connected hosts and the number of connections
194 """return a list of connected hosts and the number of connections
195 to each. [('foo.com:80', 2), ('bar.org', 1)]"""
195 to each. [('foo.com:80', 2), ('bar.org', 1)]"""
196 return [(host, len(li)) for (host, li) in self._cm.get_all().items()]
196 return [(host, len(li)) for (host, li) in self._cm.get_all().items()]
197
197
198 def close_connection(self, host):
198 def close_connection(self, host):
199 """close connection(s) to <host>
199 """close connection(s) to <host>
200 host is the host:port spec, as in 'www.cnn.com:8080' as passed in.
200 host is the host:port spec, as in 'www.cnn.com:8080' as passed in.
201 no error occurs if there is no connection to that host."""
201 no error occurs if there is no connection to that host."""
202 for h in self._cm.get_all(host):
202 for h in self._cm.get_all(host):
203 self._cm.remove(h)
203 self._cm.remove(h)
204 h.close()
204 h.close()
205
205
206 def close_all(self):
206 def close_all(self):
207 """close all open connections"""
207 """close all open connections"""
208 for host, conns in self._cm.get_all().iteritems():
208 for host, conns in self._cm.get_all().iteritems():
209 for h in conns:
209 for h in conns:
210 self._cm.remove(h)
210 self._cm.remove(h)
211 h.close()
211 h.close()
212
212
213 def _request_closed(self, request, host, connection):
213 def _request_closed(self, request, host, connection):
214 """tells us that this request is now closed and the the
214 """tells us that this request is now closed and the the
215 connection is ready for another request"""
215 connection is ready for another request"""
216 self._cm.set_ready(connection, 1)
216 self._cm.set_ready(connection, 1)
217
217
218 def _remove_connection(self, host, connection, close=0):
218 def _remove_connection(self, host, connection, close=0):
219 if close:
219 if close:
220 connection.close()
220 connection.close()
221 self._cm.remove(connection)
221 self._cm.remove(connection)
222
222
223 #### Transaction Execution
223 #### Transaction Execution
224 def http_open(self, req):
224 def http_open(self, req):
225 return self.do_open(HTTPConnection, req)
225 return self.do_open(HTTPConnection, req)
226
226
227 def do_open(self, http_class, req):
227 def do_open(self, http_class, req):
228 host = req.get_host()
228 host = req.get_host()
229 if not host:
229 if not host:
230 raise urllib2.URLError('no host given')
230 raise urllib2.URLError('no host given')
231
231
232 try:
232 try:
233 h = self._cm.get_ready_conn(host)
233 h = self._cm.get_ready_conn(host)
234 while h:
234 while h:
235 r = self._reuse_connection(h, req, host)
235 r = self._reuse_connection(h, req, host)
236
236
237 # if this response is non-None, then it worked and we're
237 # if this response is non-None, then it worked and we're
238 # done. Break out, skipping the else block.
238 # done. Break out, skipping the else block.
239 if r:
239 if r:
240 break
240 break
241
241
242 # connection is bad - possibly closed by server
242 # connection is bad - possibly closed by server
243 # discard it and ask for the next free connection
243 # discard it and ask for the next free connection
244 h.close()
244 h.close()
245 self._cm.remove(h)
245 self._cm.remove(h)
246 h = self._cm.get_ready_conn(host)
246 h = self._cm.get_ready_conn(host)
247 else:
247 else:
248 # no (working) free connections were found. Create a new one.
248 # no (working) free connections were found. Create a new one.
249 h = http_class(host)
249 h = http_class(host)
250 if DEBUG:
250 if DEBUG:
251 DEBUG.info("creating new connection to %s (%d)",
251 DEBUG.info("creating new connection to %s (%d)",
252 host, id(h))
252 host, id(h))
253 self._cm.add(host, h, 0)
253 self._cm.add(host, h, 0)
254 self._start_transaction(h, req)
254 self._start_transaction(h, req)
255 r = h.getresponse()
255 r = h.getresponse()
256 except (socket.error, httplib.HTTPException), err:
256 except (socket.error, httplib.HTTPException), err:
257 raise urllib2.URLError(err)
257 raise urllib2.URLError(err)
258
258
259 # if not a persistent connection, don't try to reuse it
259 # if not a persistent connection, don't try to reuse it
260 if r.will_close:
260 if r.will_close:
261 self._cm.remove(h)
261 self._cm.remove(h)
262
262
263 if DEBUG:
263 if DEBUG:
264 DEBUG.info("STATUS: %s, %s", r.status, r.reason)
264 DEBUG.info("STATUS: %s, %s", r.status, r.reason)
265 r._handler = self
265 r._handler = self
266 r._host = host
266 r._host = host
267 r._url = req.get_full_url()
267 r._url = req.get_full_url()
268 r._connection = h
268 r._connection = h
269 r.code = r.status
269 r.code = r.status
270 r.headers = r.msg
270 r.headers = r.msg
271 r.msg = r.reason
271 r.msg = r.reason
272
272
273 if r.status == 200 or not HANDLE_ERRORS:
273 if r.status == 200 or not HANDLE_ERRORS:
274 return r
274 return r
275 else:
275 else:
276 return self.parent.error('http', req, r,
276 return self.parent.error('http', req, r,
277 r.status, r.msg, r.headers)
277 r.status, r.msg, r.headers)
278
278
279 def _reuse_connection(self, h, req, host):
279 def _reuse_connection(self, h, req, host):
280 """start the transaction with a re-used connection
280 """start the transaction with a re-used connection
281 return a response object (r) upon success or None on failure.
281 return a response object (r) upon success or None on failure.
282 This DOES not close or remove bad connections in cases where
282 This DOES not close or remove bad connections in cases where
283 it returns. However, if an unexpected exception occurs, it
283 it returns. However, if an unexpected exception occurs, it
284 will close and remove the connection before re-raising.
284 will close and remove the connection before re-raising.
285 """
285 """
286 try:
286 try:
287 self._start_transaction(h, req)
287 self._start_transaction(h, req)
288 r = h.getresponse()
288 r = h.getresponse()
289 # note: just because we got something back doesn't mean it
289 # note: just because we got something back doesn't mean it
290 # worked. We'll check the version below, too.
290 # worked. We'll check the version below, too.
291 except (socket.error, httplib.HTTPException):
291 except (socket.error, httplib.HTTPException):
292 r = None
292 r = None
293 except:
293 except: # re-raises
294 # adding this block just in case we've missed
294 # adding this block just in case we've missed
295 # something we will still raise the exception, but
295 # something we will still raise the exception, but
296 # lets try and close the connection and remove it
296 # lets try and close the connection and remove it
297 # first. We previously got into a nasty loop
297 # first. We previously got into a nasty loop
298 # where an exception was uncaught, and so the
298 # where an exception was uncaught, and so the
299 # connection stayed open. On the next try, the
299 # connection stayed open. On the next try, the
300 # same exception was raised, etc. The tradeoff is
300 # same exception was raised, etc. The tradeoff is
301 # that it's now possible this call will raise
301 # that it's now possible this call will raise
302 # a DIFFERENT exception
302 # a DIFFERENT exception
303 if DEBUG:
303 if DEBUG:
304 DEBUG.error("unexpected exception - closing "
304 DEBUG.error("unexpected exception - closing "
305 "connection to %s (%d)", host, id(h))
305 "connection to %s (%d)", host, id(h))
306 self._cm.remove(h)
306 self._cm.remove(h)
307 h.close()
307 h.close()
308 raise
308 raise
309
309
310 if r is None or r.version == 9:
310 if r is None or r.version == 9:
311 # httplib falls back to assuming HTTP 0.9 if it gets a
311 # httplib falls back to assuming HTTP 0.9 if it gets a
312 # bad header back. This is most likely to happen if
312 # bad header back. This is most likely to happen if
313 # the socket has been closed by the server since we
313 # the socket has been closed by the server since we
314 # last used the connection.
314 # last used the connection.
315 if DEBUG:
315 if DEBUG:
316 DEBUG.info("failed to re-use connection to %s (%d)",
316 DEBUG.info("failed to re-use connection to %s (%d)",
317 host, id(h))
317 host, id(h))
318 r = None
318 r = None
319 else:
319 else:
320 if DEBUG:
320 if DEBUG:
321 DEBUG.info("re-using connection to %s (%d)", host, id(h))
321 DEBUG.info("re-using connection to %s (%d)", host, id(h))
322
322
323 return r
323 return r
324
324
325 def _start_transaction(self, h, req):
325 def _start_transaction(self, h, req):
326 # What follows mostly reimplements HTTPConnection.request()
326 # What follows mostly reimplements HTTPConnection.request()
327 # except it adds self.parent.addheaders in the mix.
327 # except it adds self.parent.addheaders in the mix.
328 headers = req.headers.copy()
328 headers = req.headers.copy()
329 if sys.version_info >= (2, 4):
329 if sys.version_info >= (2, 4):
330 headers.update(req.unredirected_hdrs)
330 headers.update(req.unredirected_hdrs)
331 headers.update(self.parent.addheaders)
331 headers.update(self.parent.addheaders)
332 headers = dict((n.lower(), v) for n, v in headers.items())
332 headers = dict((n.lower(), v) for n, v in headers.items())
333 skipheaders = {}
333 skipheaders = {}
334 for n in ('host', 'accept-encoding'):
334 for n in ('host', 'accept-encoding'):
335 if n in headers:
335 if n in headers:
336 skipheaders['skip_' + n.replace('-', '_')] = 1
336 skipheaders['skip_' + n.replace('-', '_')] = 1
337 try:
337 try:
338 if req.has_data():
338 if req.has_data():
339 data = req.get_data()
339 data = req.get_data()
340 h.putrequest('POST', req.get_selector(), **skipheaders)
340 h.putrequest('POST', req.get_selector(), **skipheaders)
341 if 'content-type' not in headers:
341 if 'content-type' not in headers:
342 h.putheader('Content-type',
342 h.putheader('Content-type',
343 'application/x-www-form-urlencoded')
343 'application/x-www-form-urlencoded')
344 if 'content-length' not in headers:
344 if 'content-length' not in headers:
345 h.putheader('Content-length', '%d' % len(data))
345 h.putheader('Content-length', '%d' % len(data))
346 else:
346 else:
347 h.putrequest('GET', req.get_selector(), **skipheaders)
347 h.putrequest('GET', req.get_selector(), **skipheaders)
348 except (socket.error), err:
348 except (socket.error), err:
349 raise urllib2.URLError(err)
349 raise urllib2.URLError(err)
350 for k, v in headers.items():
350 for k, v in headers.items():
351 h.putheader(k, v)
351 h.putheader(k, v)
352 h.endheaders()
352 h.endheaders()
353 if req.has_data():
353 if req.has_data():
354 h.send(data)
354 h.send(data)
355
355
356 class HTTPHandler(KeepAliveHandler, urllib2.HTTPHandler):
356 class HTTPHandler(KeepAliveHandler, urllib2.HTTPHandler):
357 pass
357 pass
358
358
359 class HTTPResponse(httplib.HTTPResponse):
359 class HTTPResponse(httplib.HTTPResponse):
360 # we need to subclass HTTPResponse in order to
360 # we need to subclass HTTPResponse in order to
361 # 1) add readline() and readlines() methods
361 # 1) add readline() and readlines() methods
362 # 2) add close_connection() methods
362 # 2) add close_connection() methods
363 # 3) add info() and geturl() methods
363 # 3) add info() and geturl() methods
364
364
365 # in order to add readline(), read must be modified to deal with a
365 # in order to add readline(), read must be modified to deal with a
366 # buffer. example: readline must read a buffer and then spit back
366 # buffer. example: readline must read a buffer and then spit back
367 # one line at a time. The only real alternative is to read one
367 # one line at a time. The only real alternative is to read one
368 # BYTE at a time (ick). Once something has been read, it can't be
368 # BYTE at a time (ick). Once something has been read, it can't be
369 # put back (ok, maybe it can, but that's even uglier than this),
369 # put back (ok, maybe it can, but that's even uglier than this),
370 # so if you THEN do a normal read, you must first take stuff from
370 # so if you THEN do a normal read, you must first take stuff from
371 # the buffer.
371 # the buffer.
372
372
373 # the read method wraps the original to accomodate buffering,
373 # the read method wraps the original to accomodate buffering,
374 # although read() never adds to the buffer.
374 # although read() never adds to the buffer.
375 # Both readline and readlines have been stolen with almost no
375 # Both readline and readlines have been stolen with almost no
376 # modification from socket.py
376 # modification from socket.py
377
377
378
378
379 def __init__(self, sock, debuglevel=0, strict=0, method=None):
379 def __init__(self, sock, debuglevel=0, strict=0, method=None):
380 if method: # the httplib in python 2.3 uses the method arg
380 if method: # the httplib in python 2.3 uses the method arg
381 httplib.HTTPResponse.__init__(self, sock, debuglevel, method)
381 httplib.HTTPResponse.__init__(self, sock, debuglevel, method)
382 else: # 2.2 doesn't
382 else: # 2.2 doesn't
383 httplib.HTTPResponse.__init__(self, sock, debuglevel)
383 httplib.HTTPResponse.__init__(self, sock, debuglevel)
384 self.fileno = sock.fileno
384 self.fileno = sock.fileno
385 self.code = None
385 self.code = None
386 self._rbuf = ''
386 self._rbuf = ''
387 self._rbufsize = 8096
387 self._rbufsize = 8096
388 self._handler = None # inserted by the handler later
388 self._handler = None # inserted by the handler later
389 self._host = None # (same)
389 self._host = None # (same)
390 self._url = None # (same)
390 self._url = None # (same)
391 self._connection = None # (same)
391 self._connection = None # (same)
392
392
393 _raw_read = httplib.HTTPResponse.read
393 _raw_read = httplib.HTTPResponse.read
394
394
395 def close(self):
395 def close(self):
396 if self.fp:
396 if self.fp:
397 self.fp.close()
397 self.fp.close()
398 self.fp = None
398 self.fp = None
399 if self._handler:
399 if self._handler:
400 self._handler._request_closed(self, self._host,
400 self._handler._request_closed(self, self._host,
401 self._connection)
401 self._connection)
402
402
403 def close_connection(self):
403 def close_connection(self):
404 self._handler._remove_connection(self._host, self._connection, close=1)
404 self._handler._remove_connection(self._host, self._connection, close=1)
405 self.close()
405 self.close()
406
406
407 def info(self):
407 def info(self):
408 return self.headers
408 return self.headers
409
409
410 def geturl(self):
410 def geturl(self):
411 return self._url
411 return self._url
412
412
413 def read(self, amt=None):
413 def read(self, amt=None):
414 # the _rbuf test is only in this first if for speed. It's not
414 # the _rbuf test is only in this first if for speed. It's not
415 # logically necessary
415 # logically necessary
416 if self._rbuf and not amt is None:
416 if self._rbuf and not amt is None:
417 L = len(self._rbuf)
417 L = len(self._rbuf)
418 if amt > L:
418 if amt > L:
419 amt -= L
419 amt -= L
420 else:
420 else:
421 s = self._rbuf[:amt]
421 s = self._rbuf[:amt]
422 self._rbuf = self._rbuf[amt:]
422 self._rbuf = self._rbuf[amt:]
423 return s
423 return s
424
424
425 s = self._rbuf + self._raw_read(amt)
425 s = self._rbuf + self._raw_read(amt)
426 self._rbuf = ''
426 self._rbuf = ''
427 return s
427 return s
428
428
429 # stolen from Python SVN #68532 to fix issue1088
429 # stolen from Python SVN #68532 to fix issue1088
430 def _read_chunked(self, amt):
430 def _read_chunked(self, amt):
431 chunk_left = self.chunk_left
431 chunk_left = self.chunk_left
432 value = ''
432 value = ''
433
433
434 # XXX This accumulates chunks by repeated string concatenation,
434 # XXX This accumulates chunks by repeated string concatenation,
435 # which is not efficient as the number or size of chunks gets big.
435 # which is not efficient as the number or size of chunks gets big.
436 while True:
436 while True:
437 if chunk_left is None:
437 if chunk_left is None:
438 line = self.fp.readline()
438 line = self.fp.readline()
439 i = line.find(';')
439 i = line.find(';')
440 if i >= 0:
440 if i >= 0:
441 line = line[:i] # strip chunk-extensions
441 line = line[:i] # strip chunk-extensions
442 try:
442 try:
443 chunk_left = int(line, 16)
443 chunk_left = int(line, 16)
444 except ValueError:
444 except ValueError:
445 # close the connection as protocol synchronisation is
445 # close the connection as protocol synchronisation is
446 # probably lost
446 # probably lost
447 self.close()
447 self.close()
448 raise httplib.IncompleteRead(value)
448 raise httplib.IncompleteRead(value)
449 if chunk_left == 0:
449 if chunk_left == 0:
450 break
450 break
451 if amt is None:
451 if amt is None:
452 value += self._safe_read(chunk_left)
452 value += self._safe_read(chunk_left)
453 elif amt < chunk_left:
453 elif amt < chunk_left:
454 value += self._safe_read(amt)
454 value += self._safe_read(amt)
455 self.chunk_left = chunk_left - amt
455 self.chunk_left = chunk_left - amt
456 return value
456 return value
457 elif amt == chunk_left:
457 elif amt == chunk_left:
458 value += self._safe_read(amt)
458 value += self._safe_read(amt)
459 self._safe_read(2) # toss the CRLF at the end of the chunk
459 self._safe_read(2) # toss the CRLF at the end of the chunk
460 self.chunk_left = None
460 self.chunk_left = None
461 return value
461 return value
462 else:
462 else:
463 value += self._safe_read(chunk_left)
463 value += self._safe_read(chunk_left)
464 amt -= chunk_left
464 amt -= chunk_left
465
465
466 # we read the whole chunk, get another
466 # we read the whole chunk, get another
467 self._safe_read(2) # toss the CRLF at the end of the chunk
467 self._safe_read(2) # toss the CRLF at the end of the chunk
468 chunk_left = None
468 chunk_left = None
469
469
470 # read and discard trailer up to the CRLF terminator
470 # read and discard trailer up to the CRLF terminator
471 ### note: we shouldn't have any trailers!
471 ### note: we shouldn't have any trailers!
472 while True:
472 while True:
473 line = self.fp.readline()
473 line = self.fp.readline()
474 if not line:
474 if not line:
475 # a vanishingly small number of sites EOF without
475 # a vanishingly small number of sites EOF without
476 # sending the trailer
476 # sending the trailer
477 break
477 break
478 if line == '\r\n':
478 if line == '\r\n':
479 break
479 break
480
480
481 # we read everything; close the "file"
481 # we read everything; close the "file"
482 self.close()
482 self.close()
483
483
484 return value
484 return value
485
485
486 def readline(self, limit=-1):
486 def readline(self, limit=-1):
487 i = self._rbuf.find('\n')
487 i = self._rbuf.find('\n')
488 while i < 0 and not (0 < limit <= len(self._rbuf)):
488 while i < 0 and not (0 < limit <= len(self._rbuf)):
489 new = self._raw_read(self._rbufsize)
489 new = self._raw_read(self._rbufsize)
490 if not new:
490 if not new:
491 break
491 break
492 i = new.find('\n')
492 i = new.find('\n')
493 if i >= 0:
493 if i >= 0:
494 i = i + len(self._rbuf)
494 i = i + len(self._rbuf)
495 self._rbuf = self._rbuf + new
495 self._rbuf = self._rbuf + new
496 if i < 0:
496 if i < 0:
497 i = len(self._rbuf)
497 i = len(self._rbuf)
498 else:
498 else:
499 i = i + 1
499 i = i + 1
500 if 0 <= limit < len(self._rbuf):
500 if 0 <= limit < len(self._rbuf):
501 i = limit
501 i = limit
502 data, self._rbuf = self._rbuf[:i], self._rbuf[i:]
502 data, self._rbuf = self._rbuf[:i], self._rbuf[i:]
503 return data
503 return data
504
504
505 def readlines(self, sizehint = 0):
505 def readlines(self, sizehint = 0):
506 total = 0
506 total = 0
507 list = []
507 list = []
508 while True:
508 while True:
509 line = self.readline()
509 line = self.readline()
510 if not line:
510 if not line:
511 break
511 break
512 list.append(line)
512 list.append(line)
513 total += len(line)
513 total += len(line)
514 if sizehint and total >= sizehint:
514 if sizehint and total >= sizehint:
515 break
515 break
516 return list
516 return list
517
517
518 def safesend(self, str):
518 def safesend(self, str):
519 """Send `str' to the server.
519 """Send `str' to the server.
520
520
521 Shamelessly ripped off from httplib to patch a bad behavior.
521 Shamelessly ripped off from httplib to patch a bad behavior.
522 """
522 """
523 # _broken_pipe_resp is an attribute we set in this function
523 # _broken_pipe_resp is an attribute we set in this function
524 # if the socket is closed while we're sending data but
524 # if the socket is closed while we're sending data but
525 # the server sent us a response before hanging up.
525 # the server sent us a response before hanging up.
526 # In that case, we want to pretend to send the rest of the
526 # In that case, we want to pretend to send the rest of the
527 # outgoing data, and then let the user use getresponse()
527 # outgoing data, and then let the user use getresponse()
528 # (which we wrap) to get this last response before
528 # (which we wrap) to get this last response before
529 # opening a new socket.
529 # opening a new socket.
530 if getattr(self, '_broken_pipe_resp', None) is not None:
530 if getattr(self, '_broken_pipe_resp', None) is not None:
531 return
531 return
532
532
533 if self.sock is None:
533 if self.sock is None:
534 if self.auto_open:
534 if self.auto_open:
535 self.connect()
535 self.connect()
536 else:
536 else:
537 raise httplib.NotConnected
537 raise httplib.NotConnected
538
538
539 # send the data to the server. if we get a broken pipe, then close
539 # send the data to the server. if we get a broken pipe, then close
540 # the socket. we want to reconnect when somebody tries to send again.
540 # the socket. we want to reconnect when somebody tries to send again.
541 #
541 #
542 # NOTE: we DO propagate the error, though, because we cannot simply
542 # NOTE: we DO propagate the error, though, because we cannot simply
543 # ignore the error... the caller will know if they can retry.
543 # ignore the error... the caller will know if they can retry.
544 if self.debuglevel > 0:
544 if self.debuglevel > 0:
545 print "send:", repr(str)
545 print "send:", repr(str)
546 try:
546 try:
547 blocksize = 8192
547 blocksize = 8192
548 read = getattr(str, 'read', None)
548 read = getattr(str, 'read', None)
549 if read is not None:
549 if read is not None:
550 if self.debuglevel > 0:
550 if self.debuglevel > 0:
551 print "sendIng a read()able"
551 print "sendIng a read()able"
552 data = read(blocksize)
552 data = read(blocksize)
553 while data:
553 while data:
554 self.sock.sendall(data)
554 self.sock.sendall(data)
555 data = read(blocksize)
555 data = read(blocksize)
556 else:
556 else:
557 self.sock.sendall(str)
557 self.sock.sendall(str)
558 except socket.error, v:
558 except socket.error, v:
559 reraise = True
559 reraise = True
560 if v[0] == errno.EPIPE: # Broken pipe
560 if v[0] == errno.EPIPE: # Broken pipe
561 if self._HTTPConnection__state == httplib._CS_REQ_SENT:
561 if self._HTTPConnection__state == httplib._CS_REQ_SENT:
562 self._broken_pipe_resp = None
562 self._broken_pipe_resp = None
563 self._broken_pipe_resp = self.getresponse()
563 self._broken_pipe_resp = self.getresponse()
564 reraise = False
564 reraise = False
565 self.close()
565 self.close()
566 if reraise:
566 if reraise:
567 raise
567 raise
568
568
569 def wrapgetresponse(cls):
569 def wrapgetresponse(cls):
570 """Wraps getresponse in cls with a broken-pipe sane version.
570 """Wraps getresponse in cls with a broken-pipe sane version.
571 """
571 """
572 def safegetresponse(self):
572 def safegetresponse(self):
573 # In safesend() we might set the _broken_pipe_resp
573 # In safesend() we might set the _broken_pipe_resp
574 # attribute, in which case the socket has already
574 # attribute, in which case the socket has already
575 # been closed and we just need to give them the response
575 # been closed and we just need to give them the response
576 # back. Otherwise, we use the normal response path.
576 # back. Otherwise, we use the normal response path.
577 r = getattr(self, '_broken_pipe_resp', None)
577 r = getattr(self, '_broken_pipe_resp', None)
578 if r is not None:
578 if r is not None:
579 return r
579 return r
580 return cls.getresponse(self)
580 return cls.getresponse(self)
581 safegetresponse.__doc__ = cls.getresponse.__doc__
581 safegetresponse.__doc__ = cls.getresponse.__doc__
582 return safegetresponse
582 return safegetresponse
583
583
584 class HTTPConnection(httplib.HTTPConnection):
584 class HTTPConnection(httplib.HTTPConnection):
585 # use the modified response class
585 # use the modified response class
586 response_class = HTTPResponse
586 response_class = HTTPResponse
587 send = safesend
587 send = safesend
588 getresponse = wrapgetresponse(httplib.HTTPConnection)
588 getresponse = wrapgetresponse(httplib.HTTPConnection)
589
589
590
590
591 #########################################################################
591 #########################################################################
592 ##### TEST FUNCTIONS
592 ##### TEST FUNCTIONS
593 #########################################################################
593 #########################################################################
594
594
595 def error_handler(url):
595 def error_handler(url):
596 global HANDLE_ERRORS
596 global HANDLE_ERRORS
597 orig = HANDLE_ERRORS
597 orig = HANDLE_ERRORS
598 keepalive_handler = HTTPHandler()
598 keepalive_handler = HTTPHandler()
599 opener = urllib2.build_opener(keepalive_handler)
599 opener = urllib2.build_opener(keepalive_handler)
600 urllib2.install_opener(opener)
600 urllib2.install_opener(opener)
601 pos = {0: 'off', 1: 'on'}
601 pos = {0: 'off', 1: 'on'}
602 for i in (0, 1):
602 for i in (0, 1):
603 print " fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)
603 print " fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)
604 HANDLE_ERRORS = i
604 HANDLE_ERRORS = i
605 try:
605 try:
606 fo = urllib2.urlopen(url)
606 fo = urllib2.urlopen(url)
607 fo.read()
607 fo.read()
608 fo.close()
608 fo.close()
609 try:
609 try:
610 status, reason = fo.status, fo.reason
610 status, reason = fo.status, fo.reason
611 except AttributeError:
611 except AttributeError:
612 status, reason = None, None
612 status, reason = None, None
613 except IOError, e:
613 except IOError, e:
614 print " EXCEPTION: %s" % e
614 print " EXCEPTION: %s" % e
615 raise
615 raise
616 else:
616 else:
617 print " status = %s, reason = %s" % (status, reason)
617 print " status = %s, reason = %s" % (status, reason)
618 HANDLE_ERRORS = orig
618 HANDLE_ERRORS = orig
619 hosts = keepalive_handler.open_connections()
619 hosts = keepalive_handler.open_connections()
620 print "open connections:", hosts
620 print "open connections:", hosts
621 keepalive_handler.close_all()
621 keepalive_handler.close_all()
622
622
623 def md5(s):
623 def md5(s):
624 try:
624 try:
625 from hashlib import md5 as _md5
625 from hashlib import md5 as _md5
626 except ImportError:
626 except ImportError:
627 from md5 import md5 as _md5
627 from md5 import md5 as _md5
628 global md5
628 global md5
629 md5 = _md5
629 md5 = _md5
630 return _md5(s)
630 return _md5(s)
631
631
632 def continuity(url):
632 def continuity(url):
633 format = '%25s: %s'
633 format = '%25s: %s'
634
634
635 # first fetch the file with the normal http handler
635 # first fetch the file with the normal http handler
636 opener = urllib2.build_opener()
636 opener = urllib2.build_opener()
637 urllib2.install_opener(opener)
637 urllib2.install_opener(opener)
638 fo = urllib2.urlopen(url)
638 fo = urllib2.urlopen(url)
639 foo = fo.read()
639 foo = fo.read()
640 fo.close()
640 fo.close()
641 m = md5.new(foo)
641 m = md5.new(foo)
642 print format % ('normal urllib', m.hexdigest())
642 print format % ('normal urllib', m.hexdigest())
643
643
644 # now install the keepalive handler and try again
644 # now install the keepalive handler and try again
645 opener = urllib2.build_opener(HTTPHandler())
645 opener = urllib2.build_opener(HTTPHandler())
646 urllib2.install_opener(opener)
646 urllib2.install_opener(opener)
647
647
648 fo = urllib2.urlopen(url)
648 fo = urllib2.urlopen(url)
649 foo = fo.read()
649 foo = fo.read()
650 fo.close()
650 fo.close()
651 m = md5.new(foo)
651 m = md5.new(foo)
652 print format % ('keepalive read', m.hexdigest())
652 print format % ('keepalive read', m.hexdigest())
653
653
654 fo = urllib2.urlopen(url)
654 fo = urllib2.urlopen(url)
655 foo = ''
655 foo = ''
656 while True:
656 while True:
657 f = fo.readline()
657 f = fo.readline()
658 if f:
658 if f:
659 foo = foo + f
659 foo = foo + f
660 else: break
660 else: break
661 fo.close()
661 fo.close()
662 m = md5.new(foo)
662 m = md5.new(foo)
663 print format % ('keepalive readline', m.hexdigest())
663 print format % ('keepalive readline', m.hexdigest())
664
664
665 def comp(N, url):
665 def comp(N, url):
666 print ' making %i connections to:\n %s' % (N, url)
666 print ' making %i connections to:\n %s' % (N, url)
667
667
668 sys.stdout.write(' first using the normal urllib handlers')
668 sys.stdout.write(' first using the normal urllib handlers')
669 # first use normal opener
669 # first use normal opener
670 opener = urllib2.build_opener()
670 opener = urllib2.build_opener()
671 urllib2.install_opener(opener)
671 urllib2.install_opener(opener)
672 t1 = fetch(N, url)
672 t1 = fetch(N, url)
673 print ' TIME: %.3f s' % t1
673 print ' TIME: %.3f s' % t1
674
674
675 sys.stdout.write(' now using the keepalive handler ')
675 sys.stdout.write(' now using the keepalive handler ')
676 # now install the keepalive handler and try again
676 # now install the keepalive handler and try again
677 opener = urllib2.build_opener(HTTPHandler())
677 opener = urllib2.build_opener(HTTPHandler())
678 urllib2.install_opener(opener)
678 urllib2.install_opener(opener)
679 t2 = fetch(N, url)
679 t2 = fetch(N, url)
680 print ' TIME: %.3f s' % t2
680 print ' TIME: %.3f s' % t2
681 print ' improvement factor: %.2f' % (t1 / t2)
681 print ' improvement factor: %.2f' % (t1 / t2)
682
682
683 def fetch(N, url, delay=0):
683 def fetch(N, url, delay=0):
684 import time
684 import time
685 lens = []
685 lens = []
686 starttime = time.time()
686 starttime = time.time()
687 for i in range(N):
687 for i in range(N):
688 if delay and i > 0:
688 if delay and i > 0:
689 time.sleep(delay)
689 time.sleep(delay)
690 fo = urllib2.urlopen(url)
690 fo = urllib2.urlopen(url)
691 foo = fo.read()
691 foo = fo.read()
692 fo.close()
692 fo.close()
693 lens.append(len(foo))
693 lens.append(len(foo))
694 diff = time.time() - starttime
694 diff = time.time() - starttime
695
695
696 j = 0
696 j = 0
697 for i in lens[1:]:
697 for i in lens[1:]:
698 j = j + 1
698 j = j + 1
699 if not i == lens[0]:
699 if not i == lens[0]:
700 print "WARNING: inconsistent length on read %i: %i" % (j, i)
700 print "WARNING: inconsistent length on read %i: %i" % (j, i)
701
701
702 return diff
702 return diff
703
703
704 def test_timeout(url):
704 def test_timeout(url):
705 global DEBUG
705 global DEBUG
706 dbbackup = DEBUG
706 dbbackup = DEBUG
707 class FakeLogger(object):
707 class FakeLogger(object):
708 def debug(self, msg, *args):
708 def debug(self, msg, *args):
709 print msg % args
709 print msg % args
710 info = warning = error = debug
710 info = warning = error = debug
711 DEBUG = FakeLogger()
711 DEBUG = FakeLogger()
712 print " fetching the file to establish a connection"
712 print " fetching the file to establish a connection"
713 fo = urllib2.urlopen(url)
713 fo = urllib2.urlopen(url)
714 data1 = fo.read()
714 data1 = fo.read()
715 fo.close()
715 fo.close()
716
716
717 i = 20
717 i = 20
718 print " waiting %i seconds for the server to close the connection" % i
718 print " waiting %i seconds for the server to close the connection" % i
719 while i > 0:
719 while i > 0:
720 sys.stdout.write('\r %2i' % i)
720 sys.stdout.write('\r %2i' % i)
721 sys.stdout.flush()
721 sys.stdout.flush()
722 time.sleep(1)
722 time.sleep(1)
723 i -= 1
723 i -= 1
724 sys.stderr.write('\r')
724 sys.stderr.write('\r')
725
725
726 print " fetching the file a second time"
726 print " fetching the file a second time"
727 fo = urllib2.urlopen(url)
727 fo = urllib2.urlopen(url)
728 data2 = fo.read()
728 data2 = fo.read()
729 fo.close()
729 fo.close()
730
730
731 if data1 == data2:
731 if data1 == data2:
732 print ' data are identical'
732 print ' data are identical'
733 else:
733 else:
734 print ' ERROR: DATA DIFFER'
734 print ' ERROR: DATA DIFFER'
735
735
736 DEBUG = dbbackup
736 DEBUG = dbbackup
737
737
738
738
739 def test(url, N=10):
739 def test(url, N=10):
740 print "checking error hander (do this on a non-200)"
740 print "checking error hander (do this on a non-200)"
741 try: error_handler(url)
741 try: error_handler(url)
742 except IOError:
742 except IOError:
743 print "exiting - exception will prevent further tests"
743 print "exiting - exception will prevent further tests"
744 sys.exit()
744 sys.exit()
745 print
745 print
746 print "performing continuity test (making sure stuff isn't corrupted)"
746 print "performing continuity test (making sure stuff isn't corrupted)"
747 continuity(url)
747 continuity(url)
748 print
748 print
749 print "performing speed comparison"
749 print "performing speed comparison"
750 comp(N, url)
750 comp(N, url)
751 print
751 print
752 print "performing dropped-connection check"
752 print "performing dropped-connection check"
753 test_timeout(url)
753 test_timeout(url)
754
754
755 if __name__ == '__main__':
755 if __name__ == '__main__':
756 import time
756 import time
757 import sys
757 import sys
758 try:
758 try:
759 N = int(sys.argv[1])
759 N = int(sys.argv[1])
760 url = sys.argv[2]
760 url = sys.argv[2]
761 except (IndexError, ValueError):
761 except (IndexError, ValueError):
762 print "%s <integer> <url>" % sys.argv[0]
762 print "%s <integer> <url>" % sys.argv[0]
763 else:
763 else:
764 test(url, N)
764 test(url, N)
@@ -1,2347 +1,2347 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import bin, hex, nullid, nullrev, short
8 from node import bin, hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import repo, changegroup, subrepo, discovery, pushkey
10 import repo, changegroup, subrepo, discovery, pushkey
11 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
11 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
12 import lock, transaction, store, encoding
12 import lock, transaction, store, encoding
13 import scmutil, util, extensions, hook, error, revset
13 import scmutil, util, extensions, hook, error, revset
14 import match as matchmod
14 import match as matchmod
15 import merge as mergemod
15 import merge as mergemod
16 import tags as tagsmod
16 import tags as tagsmod
17 from lock import release
17 from lock import release
18 import weakref, errno, os, time, inspect
18 import weakref, errno, os, time, inspect
19 propertycache = util.propertycache
19 propertycache = util.propertycache
20 filecache = scmutil.filecache
20 filecache = scmutil.filecache
21
21
22 class storecache(filecache):
22 class storecache(filecache):
23 """filecache for files in the store"""
23 """filecache for files in the store"""
24 def join(self, obj, fname):
24 def join(self, obj, fname):
25 return obj.sjoin(fname)
25 return obj.sjoin(fname)
26
26
27 class localrepository(repo.repository):
27 class localrepository(repo.repository):
28 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
28 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
29 'known', 'getbundle'))
29 'known', 'getbundle'))
30 supportedformats = set(('revlogv1', 'generaldelta'))
30 supportedformats = set(('revlogv1', 'generaldelta'))
31 supported = supportedformats | set(('store', 'fncache', 'shared',
31 supported = supportedformats | set(('store', 'fncache', 'shared',
32 'dotencode'))
32 'dotencode'))
33
33
34 def __init__(self, baseui, path=None, create=False):
34 def __init__(self, baseui, path=None, create=False):
35 repo.repository.__init__(self)
35 repo.repository.__init__(self)
36 self.root = os.path.realpath(util.expandpath(path))
36 self.root = os.path.realpath(util.expandpath(path))
37 self.path = os.path.join(self.root, ".hg")
37 self.path = os.path.join(self.root, ".hg")
38 self.origroot = path
38 self.origroot = path
39 self.auditor = scmutil.pathauditor(self.root, self._checknested)
39 self.auditor = scmutil.pathauditor(self.root, self._checknested)
40 self.opener = scmutil.opener(self.path)
40 self.opener = scmutil.opener(self.path)
41 self.wopener = scmutil.opener(self.root)
41 self.wopener = scmutil.opener(self.root)
42 self.baseui = baseui
42 self.baseui = baseui
43 self.ui = baseui.copy()
43 self.ui = baseui.copy()
44 # A list of callback to shape the phase if no data were found.
44 # A list of callback to shape the phase if no data were found.
45 # Callback are in the form: func(repo, roots) --> processed root.
45 # Callback are in the form: func(repo, roots) --> processed root.
46 # This list it to be filled by extension during repo setup
46 # This list it to be filled by extension during repo setup
47 self._phasedefaults = []
47 self._phasedefaults = []
48
48
49 try:
49 try:
50 self.ui.readconfig(self.join("hgrc"), self.root)
50 self.ui.readconfig(self.join("hgrc"), self.root)
51 extensions.loadall(self.ui)
51 extensions.loadall(self.ui)
52 except IOError:
52 except IOError:
53 pass
53 pass
54
54
55 if not os.path.isdir(self.path):
55 if not os.path.isdir(self.path):
56 if create:
56 if create:
57 if not os.path.exists(path):
57 if not os.path.exists(path):
58 util.makedirs(path)
58 util.makedirs(path)
59 util.makedir(self.path, notindexed=True)
59 util.makedir(self.path, notindexed=True)
60 requirements = ["revlogv1"]
60 requirements = ["revlogv1"]
61 if self.ui.configbool('format', 'usestore', True):
61 if self.ui.configbool('format', 'usestore', True):
62 os.mkdir(os.path.join(self.path, "store"))
62 os.mkdir(os.path.join(self.path, "store"))
63 requirements.append("store")
63 requirements.append("store")
64 if self.ui.configbool('format', 'usefncache', True):
64 if self.ui.configbool('format', 'usefncache', True):
65 requirements.append("fncache")
65 requirements.append("fncache")
66 if self.ui.configbool('format', 'dotencode', True):
66 if self.ui.configbool('format', 'dotencode', True):
67 requirements.append('dotencode')
67 requirements.append('dotencode')
68 # create an invalid changelog
68 # create an invalid changelog
69 self.opener.append(
69 self.opener.append(
70 "00changelog.i",
70 "00changelog.i",
71 '\0\0\0\2' # represents revlogv2
71 '\0\0\0\2' # represents revlogv2
72 ' dummy changelog to prevent using the old repo layout'
72 ' dummy changelog to prevent using the old repo layout'
73 )
73 )
74 if self.ui.configbool('format', 'generaldelta', False):
74 if self.ui.configbool('format', 'generaldelta', False):
75 requirements.append("generaldelta")
75 requirements.append("generaldelta")
76 requirements = set(requirements)
76 requirements = set(requirements)
77 else:
77 else:
78 raise error.RepoError(_("repository %s not found") % path)
78 raise error.RepoError(_("repository %s not found") % path)
79 elif create:
79 elif create:
80 raise error.RepoError(_("repository %s already exists") % path)
80 raise error.RepoError(_("repository %s already exists") % path)
81 else:
81 else:
82 try:
82 try:
83 requirements = scmutil.readrequires(self.opener, self.supported)
83 requirements = scmutil.readrequires(self.opener, self.supported)
84 except IOError, inst:
84 except IOError, inst:
85 if inst.errno != errno.ENOENT:
85 if inst.errno != errno.ENOENT:
86 raise
86 raise
87 requirements = set()
87 requirements = set()
88
88
89 self.sharedpath = self.path
89 self.sharedpath = self.path
90 try:
90 try:
91 s = os.path.realpath(self.opener.read("sharedpath").rstrip('\n'))
91 s = os.path.realpath(self.opener.read("sharedpath").rstrip('\n'))
92 if not os.path.exists(s):
92 if not os.path.exists(s):
93 raise error.RepoError(
93 raise error.RepoError(
94 _('.hg/sharedpath points to nonexistent directory %s') % s)
94 _('.hg/sharedpath points to nonexistent directory %s') % s)
95 self.sharedpath = s
95 self.sharedpath = s
96 except IOError, inst:
96 except IOError, inst:
97 if inst.errno != errno.ENOENT:
97 if inst.errno != errno.ENOENT:
98 raise
98 raise
99
99
100 self.store = store.store(requirements, self.sharedpath, scmutil.opener)
100 self.store = store.store(requirements, self.sharedpath, scmutil.opener)
101 self.spath = self.store.path
101 self.spath = self.store.path
102 self.sopener = self.store.opener
102 self.sopener = self.store.opener
103 self.sjoin = self.store.join
103 self.sjoin = self.store.join
104 self.opener.createmode = self.store.createmode
104 self.opener.createmode = self.store.createmode
105 self._applyrequirements(requirements)
105 self._applyrequirements(requirements)
106 if create:
106 if create:
107 self._writerequirements()
107 self._writerequirements()
108
108
109
109
110 self._branchcache = None
110 self._branchcache = None
111 self._branchcachetip = None
111 self._branchcachetip = None
112 self.filterpats = {}
112 self.filterpats = {}
113 self._datafilters = {}
113 self._datafilters = {}
114 self._transref = self._lockref = self._wlockref = None
114 self._transref = self._lockref = self._wlockref = None
115
115
116 # A cache for various files under .hg/ that tracks file changes,
116 # A cache for various files under .hg/ that tracks file changes,
117 # (used by the filecache decorator)
117 # (used by the filecache decorator)
118 #
118 #
119 # Maps a property name to its util.filecacheentry
119 # Maps a property name to its util.filecacheentry
120 self._filecache = {}
120 self._filecache = {}
121
121
122 def _applyrequirements(self, requirements):
122 def _applyrequirements(self, requirements):
123 self.requirements = requirements
123 self.requirements = requirements
124 openerreqs = set(('revlogv1', 'generaldelta'))
124 openerreqs = set(('revlogv1', 'generaldelta'))
125 self.sopener.options = dict((r, 1) for r in requirements
125 self.sopener.options = dict((r, 1) for r in requirements
126 if r in openerreqs)
126 if r in openerreqs)
127
127
128 def _writerequirements(self):
128 def _writerequirements(self):
129 reqfile = self.opener("requires", "w")
129 reqfile = self.opener("requires", "w")
130 for r in self.requirements:
130 for r in self.requirements:
131 reqfile.write("%s\n" % r)
131 reqfile.write("%s\n" % r)
132 reqfile.close()
132 reqfile.close()
133
133
134 def _checknested(self, path):
134 def _checknested(self, path):
135 """Determine if path is a legal nested repository."""
135 """Determine if path is a legal nested repository."""
136 if not path.startswith(self.root):
136 if not path.startswith(self.root):
137 return False
137 return False
138 subpath = path[len(self.root) + 1:]
138 subpath = path[len(self.root) + 1:]
139 normsubpath = util.pconvert(subpath)
139 normsubpath = util.pconvert(subpath)
140
140
141 # XXX: Checking against the current working copy is wrong in
141 # XXX: Checking against the current working copy is wrong in
142 # the sense that it can reject things like
142 # the sense that it can reject things like
143 #
143 #
144 # $ hg cat -r 10 sub/x.txt
144 # $ hg cat -r 10 sub/x.txt
145 #
145 #
146 # if sub/ is no longer a subrepository in the working copy
146 # if sub/ is no longer a subrepository in the working copy
147 # parent revision.
147 # parent revision.
148 #
148 #
149 # However, it can of course also allow things that would have
149 # However, it can of course also allow things that would have
150 # been rejected before, such as the above cat command if sub/
150 # been rejected before, such as the above cat command if sub/
151 # is a subrepository now, but was a normal directory before.
151 # is a subrepository now, but was a normal directory before.
152 # The old path auditor would have rejected by mistake since it
152 # The old path auditor would have rejected by mistake since it
153 # panics when it sees sub/.hg/.
153 # panics when it sees sub/.hg/.
154 #
154 #
155 # All in all, checking against the working copy seems sensible
155 # All in all, checking against the working copy seems sensible
156 # since we want to prevent access to nested repositories on
156 # since we want to prevent access to nested repositories on
157 # the filesystem *now*.
157 # the filesystem *now*.
158 ctx = self[None]
158 ctx = self[None]
159 parts = util.splitpath(subpath)
159 parts = util.splitpath(subpath)
160 while parts:
160 while parts:
161 prefix = '/'.join(parts)
161 prefix = '/'.join(parts)
162 if prefix in ctx.substate:
162 if prefix in ctx.substate:
163 if prefix == normsubpath:
163 if prefix == normsubpath:
164 return True
164 return True
165 else:
165 else:
166 sub = ctx.sub(prefix)
166 sub = ctx.sub(prefix)
167 return sub.checknested(subpath[len(prefix) + 1:])
167 return sub.checknested(subpath[len(prefix) + 1:])
168 else:
168 else:
169 parts.pop()
169 parts.pop()
170 return False
170 return False
171
171
172 @filecache('bookmarks')
172 @filecache('bookmarks')
173 def _bookmarks(self):
173 def _bookmarks(self):
174 return bookmarks.read(self)
174 return bookmarks.read(self)
175
175
176 @filecache('bookmarks.current')
176 @filecache('bookmarks.current')
177 def _bookmarkcurrent(self):
177 def _bookmarkcurrent(self):
178 return bookmarks.readcurrent(self)
178 return bookmarks.readcurrent(self)
179
179
180 def _writebookmarks(self, marks):
180 def _writebookmarks(self, marks):
181 bookmarks.write(self)
181 bookmarks.write(self)
182
182
183 @storecache('phaseroots')
183 @storecache('phaseroots')
184 def _phasecache(self):
184 def _phasecache(self):
185 return phases.phasecache(self, self._phasedefaults)
185 return phases.phasecache(self, self._phasedefaults)
186
186
187 @storecache('00changelog.i')
187 @storecache('00changelog.i')
188 def changelog(self):
188 def changelog(self):
189 c = changelog.changelog(self.sopener)
189 c = changelog.changelog(self.sopener)
190 if 'HG_PENDING' in os.environ:
190 if 'HG_PENDING' in os.environ:
191 p = os.environ['HG_PENDING']
191 p = os.environ['HG_PENDING']
192 if p.startswith(self.root):
192 if p.startswith(self.root):
193 c.readpending('00changelog.i.a')
193 c.readpending('00changelog.i.a')
194 return c
194 return c
195
195
196 @storecache('00manifest.i')
196 @storecache('00manifest.i')
197 def manifest(self):
197 def manifest(self):
198 return manifest.manifest(self.sopener)
198 return manifest.manifest(self.sopener)
199
199
200 @filecache('dirstate')
200 @filecache('dirstate')
201 def dirstate(self):
201 def dirstate(self):
202 warned = [0]
202 warned = [0]
203 def validate(node):
203 def validate(node):
204 try:
204 try:
205 self.changelog.rev(node)
205 self.changelog.rev(node)
206 return node
206 return node
207 except error.LookupError:
207 except error.LookupError:
208 if not warned[0]:
208 if not warned[0]:
209 warned[0] = True
209 warned[0] = True
210 self.ui.warn(_("warning: ignoring unknown"
210 self.ui.warn(_("warning: ignoring unknown"
211 " working parent %s!\n") % short(node))
211 " working parent %s!\n") % short(node))
212 return nullid
212 return nullid
213
213
214 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
214 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
215
215
216 def __getitem__(self, changeid):
216 def __getitem__(self, changeid):
217 if changeid is None:
217 if changeid is None:
218 return context.workingctx(self)
218 return context.workingctx(self)
219 return context.changectx(self, changeid)
219 return context.changectx(self, changeid)
220
220
221 def __contains__(self, changeid):
221 def __contains__(self, changeid):
222 try:
222 try:
223 return bool(self.lookup(changeid))
223 return bool(self.lookup(changeid))
224 except error.RepoLookupError:
224 except error.RepoLookupError:
225 return False
225 return False
226
226
227 def __nonzero__(self):
227 def __nonzero__(self):
228 return True
228 return True
229
229
230 def __len__(self):
230 def __len__(self):
231 return len(self.changelog)
231 return len(self.changelog)
232
232
233 def __iter__(self):
233 def __iter__(self):
234 for i in xrange(len(self)):
234 for i in xrange(len(self)):
235 yield i
235 yield i
236
236
237 def revs(self, expr, *args):
237 def revs(self, expr, *args):
238 '''Return a list of revisions matching the given revset'''
238 '''Return a list of revisions matching the given revset'''
239 expr = revset.formatspec(expr, *args)
239 expr = revset.formatspec(expr, *args)
240 m = revset.match(None, expr)
240 m = revset.match(None, expr)
241 return [r for r in m(self, range(len(self)))]
241 return [r for r in m(self, range(len(self)))]
242
242
243 def set(self, expr, *args):
243 def set(self, expr, *args):
244 '''
244 '''
245 Yield a context for each matching revision, after doing arg
245 Yield a context for each matching revision, after doing arg
246 replacement via revset.formatspec
246 replacement via revset.formatspec
247 '''
247 '''
248 for r in self.revs(expr, *args):
248 for r in self.revs(expr, *args):
249 yield self[r]
249 yield self[r]
250
250
251 def url(self):
251 def url(self):
252 return 'file:' + self.root
252 return 'file:' + self.root
253
253
254 def hook(self, name, throw=False, **args):
254 def hook(self, name, throw=False, **args):
255 return hook.hook(self.ui, self, name, throw, **args)
255 return hook.hook(self.ui, self, name, throw, **args)
256
256
257 tag_disallowed = ':\r\n'
257 tag_disallowed = ':\r\n'
258
258
259 def _tag(self, names, node, message, local, user, date, extra={}):
259 def _tag(self, names, node, message, local, user, date, extra={}):
260 if isinstance(names, str):
260 if isinstance(names, str):
261 allchars = names
261 allchars = names
262 names = (names,)
262 names = (names,)
263 else:
263 else:
264 allchars = ''.join(names)
264 allchars = ''.join(names)
265 for c in self.tag_disallowed:
265 for c in self.tag_disallowed:
266 if c in allchars:
266 if c in allchars:
267 raise util.Abort(_('%r cannot be used in a tag name') % c)
267 raise util.Abort(_('%r cannot be used in a tag name') % c)
268
268
269 branches = self.branchmap()
269 branches = self.branchmap()
270 for name in names:
270 for name in names:
271 self.hook('pretag', throw=True, node=hex(node), tag=name,
271 self.hook('pretag', throw=True, node=hex(node), tag=name,
272 local=local)
272 local=local)
273 if name in branches:
273 if name in branches:
274 self.ui.warn(_("warning: tag %s conflicts with existing"
274 self.ui.warn(_("warning: tag %s conflicts with existing"
275 " branch name\n") % name)
275 " branch name\n") % name)
276
276
277 def writetags(fp, names, munge, prevtags):
277 def writetags(fp, names, munge, prevtags):
278 fp.seek(0, 2)
278 fp.seek(0, 2)
279 if prevtags and prevtags[-1] != '\n':
279 if prevtags and prevtags[-1] != '\n':
280 fp.write('\n')
280 fp.write('\n')
281 for name in names:
281 for name in names:
282 m = munge and munge(name) or name
282 m = munge and munge(name) or name
283 if (self._tagscache.tagtypes and
283 if (self._tagscache.tagtypes and
284 name in self._tagscache.tagtypes):
284 name in self._tagscache.tagtypes):
285 old = self.tags().get(name, nullid)
285 old = self.tags().get(name, nullid)
286 fp.write('%s %s\n' % (hex(old), m))
286 fp.write('%s %s\n' % (hex(old), m))
287 fp.write('%s %s\n' % (hex(node), m))
287 fp.write('%s %s\n' % (hex(node), m))
288 fp.close()
288 fp.close()
289
289
290 prevtags = ''
290 prevtags = ''
291 if local:
291 if local:
292 try:
292 try:
293 fp = self.opener('localtags', 'r+')
293 fp = self.opener('localtags', 'r+')
294 except IOError:
294 except IOError:
295 fp = self.opener('localtags', 'a')
295 fp = self.opener('localtags', 'a')
296 else:
296 else:
297 prevtags = fp.read()
297 prevtags = fp.read()
298
298
299 # local tags are stored in the current charset
299 # local tags are stored in the current charset
300 writetags(fp, names, None, prevtags)
300 writetags(fp, names, None, prevtags)
301 for name in names:
301 for name in names:
302 self.hook('tag', node=hex(node), tag=name, local=local)
302 self.hook('tag', node=hex(node), tag=name, local=local)
303 return
303 return
304
304
305 try:
305 try:
306 fp = self.wfile('.hgtags', 'rb+')
306 fp = self.wfile('.hgtags', 'rb+')
307 except IOError, e:
307 except IOError, e:
308 if e.errno != errno.ENOENT:
308 if e.errno != errno.ENOENT:
309 raise
309 raise
310 fp = self.wfile('.hgtags', 'ab')
310 fp = self.wfile('.hgtags', 'ab')
311 else:
311 else:
312 prevtags = fp.read()
312 prevtags = fp.read()
313
313
314 # committed tags are stored in UTF-8
314 # committed tags are stored in UTF-8
315 writetags(fp, names, encoding.fromlocal, prevtags)
315 writetags(fp, names, encoding.fromlocal, prevtags)
316
316
317 fp.close()
317 fp.close()
318
318
319 self.invalidatecaches()
319 self.invalidatecaches()
320
320
321 if '.hgtags' not in self.dirstate:
321 if '.hgtags' not in self.dirstate:
322 self[None].add(['.hgtags'])
322 self[None].add(['.hgtags'])
323
323
324 m = matchmod.exact(self.root, '', ['.hgtags'])
324 m = matchmod.exact(self.root, '', ['.hgtags'])
325 tagnode = self.commit(message, user, date, extra=extra, match=m)
325 tagnode = self.commit(message, user, date, extra=extra, match=m)
326
326
327 for name in names:
327 for name in names:
328 self.hook('tag', node=hex(node), tag=name, local=local)
328 self.hook('tag', node=hex(node), tag=name, local=local)
329
329
330 return tagnode
330 return tagnode
331
331
332 def tag(self, names, node, message, local, user, date):
332 def tag(self, names, node, message, local, user, date):
333 '''tag a revision with one or more symbolic names.
333 '''tag a revision with one or more symbolic names.
334
334
335 names is a list of strings or, when adding a single tag, names may be a
335 names is a list of strings or, when adding a single tag, names may be a
336 string.
336 string.
337
337
338 if local is True, the tags are stored in a per-repository file.
338 if local is True, the tags are stored in a per-repository file.
339 otherwise, they are stored in the .hgtags file, and a new
339 otherwise, they are stored in the .hgtags file, and a new
340 changeset is committed with the change.
340 changeset is committed with the change.
341
341
342 keyword arguments:
342 keyword arguments:
343
343
344 local: whether to store tags in non-version-controlled file
344 local: whether to store tags in non-version-controlled file
345 (default False)
345 (default False)
346
346
347 message: commit message to use if committing
347 message: commit message to use if committing
348
348
349 user: name of user to use if committing
349 user: name of user to use if committing
350
350
351 date: date tuple to use if committing'''
351 date: date tuple to use if committing'''
352
352
353 if not local:
353 if not local:
354 for x in self.status()[:5]:
354 for x in self.status()[:5]:
355 if '.hgtags' in x:
355 if '.hgtags' in x:
356 raise util.Abort(_('working copy of .hgtags is changed '
356 raise util.Abort(_('working copy of .hgtags is changed '
357 '(please commit .hgtags manually)'))
357 '(please commit .hgtags manually)'))
358
358
359 self.tags() # instantiate the cache
359 self.tags() # instantiate the cache
360 self._tag(names, node, message, local, user, date)
360 self._tag(names, node, message, local, user, date)
361
361
362 @propertycache
362 @propertycache
363 def _tagscache(self):
363 def _tagscache(self):
364 '''Returns a tagscache object that contains various tags related
364 '''Returns a tagscache object that contains various tags related
365 caches.'''
365 caches.'''
366
366
367 # This simplifies its cache management by having one decorated
367 # This simplifies its cache management by having one decorated
368 # function (this one) and the rest simply fetch things from it.
368 # function (this one) and the rest simply fetch things from it.
369 class tagscache(object):
369 class tagscache(object):
370 def __init__(self):
370 def __init__(self):
371 # These two define the set of tags for this repository. tags
371 # These two define the set of tags for this repository. tags
372 # maps tag name to node; tagtypes maps tag name to 'global' or
372 # maps tag name to node; tagtypes maps tag name to 'global' or
373 # 'local'. (Global tags are defined by .hgtags across all
373 # 'local'. (Global tags are defined by .hgtags across all
374 # heads, and local tags are defined in .hg/localtags.)
374 # heads, and local tags are defined in .hg/localtags.)
375 # They constitute the in-memory cache of tags.
375 # They constitute the in-memory cache of tags.
376 self.tags = self.tagtypes = None
376 self.tags = self.tagtypes = None
377
377
378 self.nodetagscache = self.tagslist = None
378 self.nodetagscache = self.tagslist = None
379
379
380 cache = tagscache()
380 cache = tagscache()
381 cache.tags, cache.tagtypes = self._findtags()
381 cache.tags, cache.tagtypes = self._findtags()
382
382
383 return cache
383 return cache
384
384
385 def tags(self):
385 def tags(self):
386 '''return a mapping of tag to node'''
386 '''return a mapping of tag to node'''
387 t = {}
387 t = {}
388 for k, v in self._tagscache.tags.iteritems():
388 for k, v in self._tagscache.tags.iteritems():
389 try:
389 try:
390 # ignore tags to unknown nodes
390 # ignore tags to unknown nodes
391 self.changelog.rev(v)
391 self.changelog.rev(v)
392 t[k] = v
392 t[k] = v
393 except (error.LookupError, ValueError):
393 except (error.LookupError, ValueError):
394 pass
394 pass
395 return t
395 return t
396
396
397 def _findtags(self):
397 def _findtags(self):
398 '''Do the hard work of finding tags. Return a pair of dicts
398 '''Do the hard work of finding tags. Return a pair of dicts
399 (tags, tagtypes) where tags maps tag name to node, and tagtypes
399 (tags, tagtypes) where tags maps tag name to node, and tagtypes
400 maps tag name to a string like \'global\' or \'local\'.
400 maps tag name to a string like \'global\' or \'local\'.
401 Subclasses or extensions are free to add their own tags, but
401 Subclasses or extensions are free to add their own tags, but
402 should be aware that the returned dicts will be retained for the
402 should be aware that the returned dicts will be retained for the
403 duration of the localrepo object.'''
403 duration of the localrepo object.'''
404
404
405 # XXX what tagtype should subclasses/extensions use? Currently
405 # XXX what tagtype should subclasses/extensions use? Currently
406 # mq and bookmarks add tags, but do not set the tagtype at all.
406 # mq and bookmarks add tags, but do not set the tagtype at all.
407 # Should each extension invent its own tag type? Should there
407 # Should each extension invent its own tag type? Should there
408 # be one tagtype for all such "virtual" tags? Or is the status
408 # be one tagtype for all such "virtual" tags? Or is the status
409 # quo fine?
409 # quo fine?
410
410
411 alltags = {} # map tag name to (node, hist)
411 alltags = {} # map tag name to (node, hist)
412 tagtypes = {}
412 tagtypes = {}
413
413
414 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
414 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
415 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
415 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
416
416
417 # Build the return dicts. Have to re-encode tag names because
417 # Build the return dicts. Have to re-encode tag names because
418 # the tags module always uses UTF-8 (in order not to lose info
418 # the tags module always uses UTF-8 (in order not to lose info
419 # writing to the cache), but the rest of Mercurial wants them in
419 # writing to the cache), but the rest of Mercurial wants them in
420 # local encoding.
420 # local encoding.
421 tags = {}
421 tags = {}
422 for (name, (node, hist)) in alltags.iteritems():
422 for (name, (node, hist)) in alltags.iteritems():
423 if node != nullid:
423 if node != nullid:
424 tags[encoding.tolocal(name)] = node
424 tags[encoding.tolocal(name)] = node
425 tags['tip'] = self.changelog.tip()
425 tags['tip'] = self.changelog.tip()
426 tagtypes = dict([(encoding.tolocal(name), value)
426 tagtypes = dict([(encoding.tolocal(name), value)
427 for (name, value) in tagtypes.iteritems()])
427 for (name, value) in tagtypes.iteritems()])
428 return (tags, tagtypes)
428 return (tags, tagtypes)
429
429
430 def tagtype(self, tagname):
430 def tagtype(self, tagname):
431 '''
431 '''
432 return the type of the given tag. result can be:
432 return the type of the given tag. result can be:
433
433
434 'local' : a local tag
434 'local' : a local tag
435 'global' : a global tag
435 'global' : a global tag
436 None : tag does not exist
436 None : tag does not exist
437 '''
437 '''
438
438
439 return self._tagscache.tagtypes.get(tagname)
439 return self._tagscache.tagtypes.get(tagname)
440
440
441 def tagslist(self):
441 def tagslist(self):
442 '''return a list of tags ordered by revision'''
442 '''return a list of tags ordered by revision'''
443 if not self._tagscache.tagslist:
443 if not self._tagscache.tagslist:
444 l = []
444 l = []
445 for t, n in self.tags().iteritems():
445 for t, n in self.tags().iteritems():
446 r = self.changelog.rev(n)
446 r = self.changelog.rev(n)
447 l.append((r, t, n))
447 l.append((r, t, n))
448 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
448 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
449
449
450 return self._tagscache.tagslist
450 return self._tagscache.tagslist
451
451
452 def nodetags(self, node):
452 def nodetags(self, node):
453 '''return the tags associated with a node'''
453 '''return the tags associated with a node'''
454 if not self._tagscache.nodetagscache:
454 if not self._tagscache.nodetagscache:
455 nodetagscache = {}
455 nodetagscache = {}
456 for t, n in self._tagscache.tags.iteritems():
456 for t, n in self._tagscache.tags.iteritems():
457 nodetagscache.setdefault(n, []).append(t)
457 nodetagscache.setdefault(n, []).append(t)
458 for tags in nodetagscache.itervalues():
458 for tags in nodetagscache.itervalues():
459 tags.sort()
459 tags.sort()
460 self._tagscache.nodetagscache = nodetagscache
460 self._tagscache.nodetagscache = nodetagscache
461 return self._tagscache.nodetagscache.get(node, [])
461 return self._tagscache.nodetagscache.get(node, [])
462
462
463 def nodebookmarks(self, node):
463 def nodebookmarks(self, node):
464 marks = []
464 marks = []
465 for bookmark, n in self._bookmarks.iteritems():
465 for bookmark, n in self._bookmarks.iteritems():
466 if n == node:
466 if n == node:
467 marks.append(bookmark)
467 marks.append(bookmark)
468 return sorted(marks)
468 return sorted(marks)
469
469
470 def _branchtags(self, partial, lrev):
470 def _branchtags(self, partial, lrev):
471 # TODO: rename this function?
471 # TODO: rename this function?
472 tiprev = len(self) - 1
472 tiprev = len(self) - 1
473 if lrev != tiprev:
473 if lrev != tiprev:
474 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
474 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
475 self._updatebranchcache(partial, ctxgen)
475 self._updatebranchcache(partial, ctxgen)
476 self._writebranchcache(partial, self.changelog.tip(), tiprev)
476 self._writebranchcache(partial, self.changelog.tip(), tiprev)
477
477
478 return partial
478 return partial
479
479
480 def updatebranchcache(self):
480 def updatebranchcache(self):
481 tip = self.changelog.tip()
481 tip = self.changelog.tip()
482 if self._branchcache is not None and self._branchcachetip == tip:
482 if self._branchcache is not None and self._branchcachetip == tip:
483 return
483 return
484
484
485 oldtip = self._branchcachetip
485 oldtip = self._branchcachetip
486 self._branchcachetip = tip
486 self._branchcachetip = tip
487 if oldtip is None or oldtip not in self.changelog.nodemap:
487 if oldtip is None or oldtip not in self.changelog.nodemap:
488 partial, last, lrev = self._readbranchcache()
488 partial, last, lrev = self._readbranchcache()
489 else:
489 else:
490 lrev = self.changelog.rev(oldtip)
490 lrev = self.changelog.rev(oldtip)
491 partial = self._branchcache
491 partial = self._branchcache
492
492
493 self._branchtags(partial, lrev)
493 self._branchtags(partial, lrev)
494 # this private cache holds all heads (not just the branch tips)
494 # this private cache holds all heads (not just the branch tips)
495 self._branchcache = partial
495 self._branchcache = partial
496
496
497 def branchmap(self):
497 def branchmap(self):
498 '''returns a dictionary {branch: [branchheads]}'''
498 '''returns a dictionary {branch: [branchheads]}'''
499 self.updatebranchcache()
499 self.updatebranchcache()
500 return self._branchcache
500 return self._branchcache
501
501
502 def branchtags(self):
502 def branchtags(self):
503 '''return a dict where branch names map to the tipmost head of
503 '''return a dict where branch names map to the tipmost head of
504 the branch, open heads come before closed'''
504 the branch, open heads come before closed'''
505 bt = {}
505 bt = {}
506 for bn, heads in self.branchmap().iteritems():
506 for bn, heads in self.branchmap().iteritems():
507 tip = heads[-1]
507 tip = heads[-1]
508 for h in reversed(heads):
508 for h in reversed(heads):
509 if 'close' not in self.changelog.read(h)[5]:
509 if 'close' not in self.changelog.read(h)[5]:
510 tip = h
510 tip = h
511 break
511 break
512 bt[bn] = tip
512 bt[bn] = tip
513 return bt
513 return bt
514
514
515 def _readbranchcache(self):
515 def _readbranchcache(self):
516 partial = {}
516 partial = {}
517 try:
517 try:
518 f = self.opener("cache/branchheads")
518 f = self.opener("cache/branchheads")
519 lines = f.read().split('\n')
519 lines = f.read().split('\n')
520 f.close()
520 f.close()
521 except (IOError, OSError):
521 except (IOError, OSError):
522 return {}, nullid, nullrev
522 return {}, nullid, nullrev
523
523
524 try:
524 try:
525 last, lrev = lines.pop(0).split(" ", 1)
525 last, lrev = lines.pop(0).split(" ", 1)
526 last, lrev = bin(last), int(lrev)
526 last, lrev = bin(last), int(lrev)
527 if lrev >= len(self) or self[lrev].node() != last:
527 if lrev >= len(self) or self[lrev].node() != last:
528 # invalidate the cache
528 # invalidate the cache
529 raise ValueError('invalidating branch cache (tip differs)')
529 raise ValueError('invalidating branch cache (tip differs)')
530 for l in lines:
530 for l in lines:
531 if not l:
531 if not l:
532 continue
532 continue
533 node, label = l.split(" ", 1)
533 node, label = l.split(" ", 1)
534 label = encoding.tolocal(label.strip())
534 label = encoding.tolocal(label.strip())
535 partial.setdefault(label, []).append(bin(node))
535 partial.setdefault(label, []).append(bin(node))
536 except KeyboardInterrupt:
536 except KeyboardInterrupt:
537 raise
537 raise
538 except Exception, inst:
538 except Exception, inst:
539 if self.ui.debugflag:
539 if self.ui.debugflag:
540 self.ui.warn(str(inst), '\n')
540 self.ui.warn(str(inst), '\n')
541 partial, last, lrev = {}, nullid, nullrev
541 partial, last, lrev = {}, nullid, nullrev
542 return partial, last, lrev
542 return partial, last, lrev
543
543
544 def _writebranchcache(self, branches, tip, tiprev):
544 def _writebranchcache(self, branches, tip, tiprev):
545 try:
545 try:
546 f = self.opener("cache/branchheads", "w", atomictemp=True)
546 f = self.opener("cache/branchheads", "w", atomictemp=True)
547 f.write("%s %s\n" % (hex(tip), tiprev))
547 f.write("%s %s\n" % (hex(tip), tiprev))
548 for label, nodes in branches.iteritems():
548 for label, nodes in branches.iteritems():
549 for node in nodes:
549 for node in nodes:
550 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
550 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
551 f.close()
551 f.close()
552 except (IOError, OSError):
552 except (IOError, OSError):
553 pass
553 pass
554
554
555 def _updatebranchcache(self, partial, ctxgen):
555 def _updatebranchcache(self, partial, ctxgen):
556 # collect new branch entries
556 # collect new branch entries
557 newbranches = {}
557 newbranches = {}
558 for c in ctxgen:
558 for c in ctxgen:
559 newbranches.setdefault(c.branch(), []).append(c.node())
559 newbranches.setdefault(c.branch(), []).append(c.node())
560 # if older branchheads are reachable from new ones, they aren't
560 # if older branchheads are reachable from new ones, they aren't
561 # really branchheads. Note checking parents is insufficient:
561 # really branchheads. Note checking parents is insufficient:
562 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
562 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
563 for branch, newnodes in newbranches.iteritems():
563 for branch, newnodes in newbranches.iteritems():
564 bheads = partial.setdefault(branch, [])
564 bheads = partial.setdefault(branch, [])
565 bheads.extend(newnodes)
565 bheads.extend(newnodes)
566 if len(bheads) <= 1:
566 if len(bheads) <= 1:
567 continue
567 continue
568 bheads = sorted(bheads, key=lambda x: self[x].rev())
568 bheads = sorted(bheads, key=lambda x: self[x].rev())
569 # starting from tip means fewer passes over reachable
569 # starting from tip means fewer passes over reachable
570 while newnodes:
570 while newnodes:
571 latest = newnodes.pop()
571 latest = newnodes.pop()
572 if latest not in bheads:
572 if latest not in bheads:
573 continue
573 continue
574 minbhnode = self[bheads[0]].node()
574 minbhnode = self[bheads[0]].node()
575 reachable = self.changelog.reachable(latest, minbhnode)
575 reachable = self.changelog.reachable(latest, minbhnode)
576 reachable.remove(latest)
576 reachable.remove(latest)
577 if reachable:
577 if reachable:
578 bheads = [b for b in bheads if b not in reachable]
578 bheads = [b for b in bheads if b not in reachable]
579 partial[branch] = bheads
579 partial[branch] = bheads
580
580
581 def lookup(self, key):
581 def lookup(self, key):
582 return self[key].node()
582 return self[key].node()
583
583
584 def lookupbranch(self, key, remote=None):
584 def lookupbranch(self, key, remote=None):
585 repo = remote or self
585 repo = remote or self
586 if key in repo.branchmap():
586 if key in repo.branchmap():
587 return key
587 return key
588
588
589 repo = (remote and remote.local()) and remote or self
589 repo = (remote and remote.local()) and remote or self
590 return repo[key].branch()
590 return repo[key].branch()
591
591
592 def known(self, nodes):
592 def known(self, nodes):
593 nm = self.changelog.nodemap
593 nm = self.changelog.nodemap
594 pc = self._phasecache
594 pc = self._phasecache
595 result = []
595 result = []
596 for n in nodes:
596 for n in nodes:
597 r = nm.get(n)
597 r = nm.get(n)
598 resp = not (r is None or pc.phase(self, r) >= phases.secret)
598 resp = not (r is None or pc.phase(self, r) >= phases.secret)
599 result.append(resp)
599 result.append(resp)
600 return result
600 return result
601
601
602 def local(self):
602 def local(self):
603 return self
603 return self
604
604
605 def join(self, f):
605 def join(self, f):
606 return os.path.join(self.path, f)
606 return os.path.join(self.path, f)
607
607
608 def wjoin(self, f):
608 def wjoin(self, f):
609 return os.path.join(self.root, f)
609 return os.path.join(self.root, f)
610
610
611 def file(self, f):
611 def file(self, f):
612 if f[0] == '/':
612 if f[0] == '/':
613 f = f[1:]
613 f = f[1:]
614 return filelog.filelog(self.sopener, f)
614 return filelog.filelog(self.sopener, f)
615
615
616 def changectx(self, changeid):
616 def changectx(self, changeid):
617 return self[changeid]
617 return self[changeid]
618
618
619 def parents(self, changeid=None):
619 def parents(self, changeid=None):
620 '''get list of changectxs for parents of changeid'''
620 '''get list of changectxs for parents of changeid'''
621 return self[changeid].parents()
621 return self[changeid].parents()
622
622
623 def setparents(self, p1, p2=nullid):
623 def setparents(self, p1, p2=nullid):
624 copies = self.dirstate.setparents(p1, p2)
624 copies = self.dirstate.setparents(p1, p2)
625 if copies:
625 if copies:
626 # Adjust copy records, the dirstate cannot do it, it
626 # Adjust copy records, the dirstate cannot do it, it
627 # requires access to parents manifests. Preserve them
627 # requires access to parents manifests. Preserve them
628 # only for entries added to first parent.
628 # only for entries added to first parent.
629 pctx = self[p1]
629 pctx = self[p1]
630 for f in copies:
630 for f in copies:
631 if f not in pctx and copies[f] in pctx:
631 if f not in pctx and copies[f] in pctx:
632 self.dirstate.copy(copies[f], f)
632 self.dirstate.copy(copies[f], f)
633
633
634 def filectx(self, path, changeid=None, fileid=None):
634 def filectx(self, path, changeid=None, fileid=None):
635 """changeid can be a changeset revision, node, or tag.
635 """changeid can be a changeset revision, node, or tag.
636 fileid can be a file revision or node."""
636 fileid can be a file revision or node."""
637 return context.filectx(self, path, changeid, fileid)
637 return context.filectx(self, path, changeid, fileid)
638
638
639 def getcwd(self):
639 def getcwd(self):
640 return self.dirstate.getcwd()
640 return self.dirstate.getcwd()
641
641
642 def pathto(self, f, cwd=None):
642 def pathto(self, f, cwd=None):
643 return self.dirstate.pathto(f, cwd)
643 return self.dirstate.pathto(f, cwd)
644
644
645 def wfile(self, f, mode='r'):
645 def wfile(self, f, mode='r'):
646 return self.wopener(f, mode)
646 return self.wopener(f, mode)
647
647
648 def _link(self, f):
648 def _link(self, f):
649 return os.path.islink(self.wjoin(f))
649 return os.path.islink(self.wjoin(f))
650
650
651 def _loadfilter(self, filter):
651 def _loadfilter(self, filter):
652 if filter not in self.filterpats:
652 if filter not in self.filterpats:
653 l = []
653 l = []
654 for pat, cmd in self.ui.configitems(filter):
654 for pat, cmd in self.ui.configitems(filter):
655 if cmd == '!':
655 if cmd == '!':
656 continue
656 continue
657 mf = matchmod.match(self.root, '', [pat])
657 mf = matchmod.match(self.root, '', [pat])
658 fn = None
658 fn = None
659 params = cmd
659 params = cmd
660 for name, filterfn in self._datafilters.iteritems():
660 for name, filterfn in self._datafilters.iteritems():
661 if cmd.startswith(name):
661 if cmd.startswith(name):
662 fn = filterfn
662 fn = filterfn
663 params = cmd[len(name):].lstrip()
663 params = cmd[len(name):].lstrip()
664 break
664 break
665 if not fn:
665 if not fn:
666 fn = lambda s, c, **kwargs: util.filter(s, c)
666 fn = lambda s, c, **kwargs: util.filter(s, c)
667 # Wrap old filters not supporting keyword arguments
667 # Wrap old filters not supporting keyword arguments
668 if not inspect.getargspec(fn)[2]:
668 if not inspect.getargspec(fn)[2]:
669 oldfn = fn
669 oldfn = fn
670 fn = lambda s, c, **kwargs: oldfn(s, c)
670 fn = lambda s, c, **kwargs: oldfn(s, c)
671 l.append((mf, fn, params))
671 l.append((mf, fn, params))
672 self.filterpats[filter] = l
672 self.filterpats[filter] = l
673 return self.filterpats[filter]
673 return self.filterpats[filter]
674
674
675 def _filter(self, filterpats, filename, data):
675 def _filter(self, filterpats, filename, data):
676 for mf, fn, cmd in filterpats:
676 for mf, fn, cmd in filterpats:
677 if mf(filename):
677 if mf(filename):
678 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
678 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
679 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
679 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
680 break
680 break
681
681
682 return data
682 return data
683
683
684 @propertycache
684 @propertycache
685 def _encodefilterpats(self):
685 def _encodefilterpats(self):
686 return self._loadfilter('encode')
686 return self._loadfilter('encode')
687
687
688 @propertycache
688 @propertycache
689 def _decodefilterpats(self):
689 def _decodefilterpats(self):
690 return self._loadfilter('decode')
690 return self._loadfilter('decode')
691
691
692 def adddatafilter(self, name, filter):
692 def adddatafilter(self, name, filter):
693 self._datafilters[name] = filter
693 self._datafilters[name] = filter
694
694
695 def wread(self, filename):
695 def wread(self, filename):
696 if self._link(filename):
696 if self._link(filename):
697 data = os.readlink(self.wjoin(filename))
697 data = os.readlink(self.wjoin(filename))
698 else:
698 else:
699 data = self.wopener.read(filename)
699 data = self.wopener.read(filename)
700 return self._filter(self._encodefilterpats, filename, data)
700 return self._filter(self._encodefilterpats, filename, data)
701
701
702 def wwrite(self, filename, data, flags):
702 def wwrite(self, filename, data, flags):
703 data = self._filter(self._decodefilterpats, filename, data)
703 data = self._filter(self._decodefilterpats, filename, data)
704 if 'l' in flags:
704 if 'l' in flags:
705 self.wopener.symlink(data, filename)
705 self.wopener.symlink(data, filename)
706 else:
706 else:
707 self.wopener.write(filename, data)
707 self.wopener.write(filename, data)
708 if 'x' in flags:
708 if 'x' in flags:
709 util.setflags(self.wjoin(filename), False, True)
709 util.setflags(self.wjoin(filename), False, True)
710
710
711 def wwritedata(self, filename, data):
711 def wwritedata(self, filename, data):
712 return self._filter(self._decodefilterpats, filename, data)
712 return self._filter(self._decodefilterpats, filename, data)
713
713
714 def transaction(self, desc):
714 def transaction(self, desc):
715 tr = self._transref and self._transref() or None
715 tr = self._transref and self._transref() or None
716 if tr and tr.running():
716 if tr and tr.running():
717 return tr.nest()
717 return tr.nest()
718
718
719 # abort here if the journal already exists
719 # abort here if the journal already exists
720 if os.path.exists(self.sjoin("journal")):
720 if os.path.exists(self.sjoin("journal")):
721 raise error.RepoError(
721 raise error.RepoError(
722 _("abandoned transaction found - run hg recover"))
722 _("abandoned transaction found - run hg recover"))
723
723
724 self._writejournal(desc)
724 self._writejournal(desc)
725 renames = [(x, undoname(x)) for x in self._journalfiles()]
725 renames = [(x, undoname(x)) for x in self._journalfiles()]
726
726
727 tr = transaction.transaction(self.ui.warn, self.sopener,
727 tr = transaction.transaction(self.ui.warn, self.sopener,
728 self.sjoin("journal"),
728 self.sjoin("journal"),
729 aftertrans(renames),
729 aftertrans(renames),
730 self.store.createmode)
730 self.store.createmode)
731 self._transref = weakref.ref(tr)
731 self._transref = weakref.ref(tr)
732 return tr
732 return tr
733
733
734 def _journalfiles(self):
734 def _journalfiles(self):
735 return (self.sjoin('journal'), self.join('journal.dirstate'),
735 return (self.sjoin('journal'), self.join('journal.dirstate'),
736 self.join('journal.branch'), self.join('journal.desc'),
736 self.join('journal.branch'), self.join('journal.desc'),
737 self.join('journal.bookmarks'),
737 self.join('journal.bookmarks'),
738 self.sjoin('journal.phaseroots'))
738 self.sjoin('journal.phaseroots'))
739
739
740 def undofiles(self):
740 def undofiles(self):
741 return [undoname(x) for x in self._journalfiles()]
741 return [undoname(x) for x in self._journalfiles()]
742
742
743 def _writejournal(self, desc):
743 def _writejournal(self, desc):
744 self.opener.write("journal.dirstate",
744 self.opener.write("journal.dirstate",
745 self.opener.tryread("dirstate"))
745 self.opener.tryread("dirstate"))
746 self.opener.write("journal.branch",
746 self.opener.write("journal.branch",
747 encoding.fromlocal(self.dirstate.branch()))
747 encoding.fromlocal(self.dirstate.branch()))
748 self.opener.write("journal.desc",
748 self.opener.write("journal.desc",
749 "%d\n%s\n" % (len(self), desc))
749 "%d\n%s\n" % (len(self), desc))
750 self.opener.write("journal.bookmarks",
750 self.opener.write("journal.bookmarks",
751 self.opener.tryread("bookmarks"))
751 self.opener.tryread("bookmarks"))
752 self.sopener.write("journal.phaseroots",
752 self.sopener.write("journal.phaseroots",
753 self.sopener.tryread("phaseroots"))
753 self.sopener.tryread("phaseroots"))
754
754
755 def recover(self):
755 def recover(self):
756 lock = self.lock()
756 lock = self.lock()
757 try:
757 try:
758 if os.path.exists(self.sjoin("journal")):
758 if os.path.exists(self.sjoin("journal")):
759 self.ui.status(_("rolling back interrupted transaction\n"))
759 self.ui.status(_("rolling back interrupted transaction\n"))
760 transaction.rollback(self.sopener, self.sjoin("journal"),
760 transaction.rollback(self.sopener, self.sjoin("journal"),
761 self.ui.warn)
761 self.ui.warn)
762 self.invalidate()
762 self.invalidate()
763 return True
763 return True
764 else:
764 else:
765 self.ui.warn(_("no interrupted transaction available\n"))
765 self.ui.warn(_("no interrupted transaction available\n"))
766 return False
766 return False
767 finally:
767 finally:
768 lock.release()
768 lock.release()
769
769
770 def rollback(self, dryrun=False, force=False):
770 def rollback(self, dryrun=False, force=False):
771 wlock = lock = None
771 wlock = lock = None
772 try:
772 try:
773 wlock = self.wlock()
773 wlock = self.wlock()
774 lock = self.lock()
774 lock = self.lock()
775 if os.path.exists(self.sjoin("undo")):
775 if os.path.exists(self.sjoin("undo")):
776 return self._rollback(dryrun, force)
776 return self._rollback(dryrun, force)
777 else:
777 else:
778 self.ui.warn(_("no rollback information available\n"))
778 self.ui.warn(_("no rollback information available\n"))
779 return 1
779 return 1
780 finally:
780 finally:
781 release(lock, wlock)
781 release(lock, wlock)
782
782
783 def _rollback(self, dryrun, force):
783 def _rollback(self, dryrun, force):
784 ui = self.ui
784 ui = self.ui
785 try:
785 try:
786 args = self.opener.read('undo.desc').splitlines()
786 args = self.opener.read('undo.desc').splitlines()
787 (oldlen, desc, detail) = (int(args[0]), args[1], None)
787 (oldlen, desc, detail) = (int(args[0]), args[1], None)
788 if len(args) >= 3:
788 if len(args) >= 3:
789 detail = args[2]
789 detail = args[2]
790 oldtip = oldlen - 1
790 oldtip = oldlen - 1
791
791
792 if detail and ui.verbose:
792 if detail and ui.verbose:
793 msg = (_('repository tip rolled back to revision %s'
793 msg = (_('repository tip rolled back to revision %s'
794 ' (undo %s: %s)\n')
794 ' (undo %s: %s)\n')
795 % (oldtip, desc, detail))
795 % (oldtip, desc, detail))
796 else:
796 else:
797 msg = (_('repository tip rolled back to revision %s'
797 msg = (_('repository tip rolled back to revision %s'
798 ' (undo %s)\n')
798 ' (undo %s)\n')
799 % (oldtip, desc))
799 % (oldtip, desc))
800 except IOError:
800 except IOError:
801 msg = _('rolling back unknown transaction\n')
801 msg = _('rolling back unknown transaction\n')
802 desc = None
802 desc = None
803
803
804 if not force and self['.'] != self['tip'] and desc == 'commit':
804 if not force and self['.'] != self['tip'] and desc == 'commit':
805 raise util.Abort(
805 raise util.Abort(
806 _('rollback of last commit while not checked out '
806 _('rollback of last commit while not checked out '
807 'may lose data'), hint=_('use -f to force'))
807 'may lose data'), hint=_('use -f to force'))
808
808
809 ui.status(msg)
809 ui.status(msg)
810 if dryrun:
810 if dryrun:
811 return 0
811 return 0
812
812
813 parents = self.dirstate.parents()
813 parents = self.dirstate.parents()
814 transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
814 transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
815 if os.path.exists(self.join('undo.bookmarks')):
815 if os.path.exists(self.join('undo.bookmarks')):
816 util.rename(self.join('undo.bookmarks'),
816 util.rename(self.join('undo.bookmarks'),
817 self.join('bookmarks'))
817 self.join('bookmarks'))
818 if os.path.exists(self.sjoin('undo.phaseroots')):
818 if os.path.exists(self.sjoin('undo.phaseroots')):
819 util.rename(self.sjoin('undo.phaseroots'),
819 util.rename(self.sjoin('undo.phaseroots'),
820 self.sjoin('phaseroots'))
820 self.sjoin('phaseroots'))
821 self.invalidate()
821 self.invalidate()
822
822
823 parentgone = (parents[0] not in self.changelog.nodemap or
823 parentgone = (parents[0] not in self.changelog.nodemap or
824 parents[1] not in self.changelog.nodemap)
824 parents[1] not in self.changelog.nodemap)
825 if parentgone:
825 if parentgone:
826 util.rename(self.join('undo.dirstate'), self.join('dirstate'))
826 util.rename(self.join('undo.dirstate'), self.join('dirstate'))
827 try:
827 try:
828 branch = self.opener.read('undo.branch')
828 branch = self.opener.read('undo.branch')
829 self.dirstate.setbranch(branch)
829 self.dirstate.setbranch(branch)
830 except IOError:
830 except IOError:
831 ui.warn(_('named branch could not be reset: '
831 ui.warn(_('named branch could not be reset: '
832 'current branch is still \'%s\'\n')
832 'current branch is still \'%s\'\n')
833 % self.dirstate.branch())
833 % self.dirstate.branch())
834
834
835 self.dirstate.invalidate()
835 self.dirstate.invalidate()
836 parents = tuple([p.rev() for p in self.parents()])
836 parents = tuple([p.rev() for p in self.parents()])
837 if len(parents) > 1:
837 if len(parents) > 1:
838 ui.status(_('working directory now based on '
838 ui.status(_('working directory now based on '
839 'revisions %d and %d\n') % parents)
839 'revisions %d and %d\n') % parents)
840 else:
840 else:
841 ui.status(_('working directory now based on '
841 ui.status(_('working directory now based on '
842 'revision %d\n') % parents)
842 'revision %d\n') % parents)
843 self.destroyed()
843 self.destroyed()
844 return 0
844 return 0
845
845
846 def invalidatecaches(self):
846 def invalidatecaches(self):
847 def delcache(name):
847 def delcache(name):
848 try:
848 try:
849 delattr(self, name)
849 delattr(self, name)
850 except AttributeError:
850 except AttributeError:
851 pass
851 pass
852
852
853 delcache('_tagscache')
853 delcache('_tagscache')
854
854
855 self._branchcache = None # in UTF-8
855 self._branchcache = None # in UTF-8
856 self._branchcachetip = None
856 self._branchcachetip = None
857
857
858 def invalidatedirstate(self):
858 def invalidatedirstate(self):
859 '''Invalidates the dirstate, causing the next call to dirstate
859 '''Invalidates the dirstate, causing the next call to dirstate
860 to check if it was modified since the last time it was read,
860 to check if it was modified since the last time it was read,
861 rereading it if it has.
861 rereading it if it has.
862
862
863 This is different to dirstate.invalidate() that it doesn't always
863 This is different to dirstate.invalidate() that it doesn't always
864 rereads the dirstate. Use dirstate.invalidate() if you want to
864 rereads the dirstate. Use dirstate.invalidate() if you want to
865 explicitly read the dirstate again (i.e. restoring it to a previous
865 explicitly read the dirstate again (i.e. restoring it to a previous
866 known good state).'''
866 known good state).'''
867 if 'dirstate' in self.__dict__:
867 if 'dirstate' in self.__dict__:
868 for k in self.dirstate._filecache:
868 for k in self.dirstate._filecache:
869 try:
869 try:
870 delattr(self.dirstate, k)
870 delattr(self.dirstate, k)
871 except AttributeError:
871 except AttributeError:
872 pass
872 pass
873 delattr(self, 'dirstate')
873 delattr(self, 'dirstate')
874
874
875 def invalidate(self):
875 def invalidate(self):
876 for k in self._filecache:
876 for k in self._filecache:
877 # dirstate is invalidated separately in invalidatedirstate()
877 # dirstate is invalidated separately in invalidatedirstate()
878 if k == 'dirstate':
878 if k == 'dirstate':
879 continue
879 continue
880
880
881 try:
881 try:
882 delattr(self, k)
882 delattr(self, k)
883 except AttributeError:
883 except AttributeError:
884 pass
884 pass
885 self.invalidatecaches()
885 self.invalidatecaches()
886
886
887 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
887 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
888 try:
888 try:
889 l = lock.lock(lockname, 0, releasefn, desc=desc)
889 l = lock.lock(lockname, 0, releasefn, desc=desc)
890 except error.LockHeld, inst:
890 except error.LockHeld, inst:
891 if not wait:
891 if not wait:
892 raise
892 raise
893 self.ui.warn(_("waiting for lock on %s held by %r\n") %
893 self.ui.warn(_("waiting for lock on %s held by %r\n") %
894 (desc, inst.locker))
894 (desc, inst.locker))
895 # default to 600 seconds timeout
895 # default to 600 seconds timeout
896 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
896 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
897 releasefn, desc=desc)
897 releasefn, desc=desc)
898 if acquirefn:
898 if acquirefn:
899 acquirefn()
899 acquirefn()
900 return l
900 return l
901
901
902 def _afterlock(self, callback):
902 def _afterlock(self, callback):
903 """add a callback to the current repository lock.
903 """add a callback to the current repository lock.
904
904
905 The callback will be executed on lock release."""
905 The callback will be executed on lock release."""
906 l = self._lockref and self._lockref()
906 l = self._lockref and self._lockref()
907 if l:
907 if l:
908 l.postrelease.append(callback)
908 l.postrelease.append(callback)
909 else:
909 else:
910 callback()
910 callback()
911
911
912 def lock(self, wait=True):
912 def lock(self, wait=True):
913 '''Lock the repository store (.hg/store) and return a weak reference
913 '''Lock the repository store (.hg/store) and return a weak reference
914 to the lock. Use this before modifying the store (e.g. committing or
914 to the lock. Use this before modifying the store (e.g. committing or
915 stripping). If you are opening a transaction, get a lock as well.)'''
915 stripping). If you are opening a transaction, get a lock as well.)'''
916 l = self._lockref and self._lockref()
916 l = self._lockref and self._lockref()
917 if l is not None and l.held:
917 if l is not None and l.held:
918 l.lock()
918 l.lock()
919 return l
919 return l
920
920
921 def unlock():
921 def unlock():
922 self.store.write()
922 self.store.write()
923 if '_phasecache' in vars(self):
923 if '_phasecache' in vars(self):
924 self._phasecache.write()
924 self._phasecache.write()
925 for k, ce in self._filecache.items():
925 for k, ce in self._filecache.items():
926 if k == 'dirstate':
926 if k == 'dirstate':
927 continue
927 continue
928 ce.refresh()
928 ce.refresh()
929
929
930 l = self._lock(self.sjoin("lock"), wait, unlock,
930 l = self._lock(self.sjoin("lock"), wait, unlock,
931 self.invalidate, _('repository %s') % self.origroot)
931 self.invalidate, _('repository %s') % self.origroot)
932 self._lockref = weakref.ref(l)
932 self._lockref = weakref.ref(l)
933 return l
933 return l
934
934
935 def wlock(self, wait=True):
935 def wlock(self, wait=True):
936 '''Lock the non-store parts of the repository (everything under
936 '''Lock the non-store parts of the repository (everything under
937 .hg except .hg/store) and return a weak reference to the lock.
937 .hg except .hg/store) and return a weak reference to the lock.
938 Use this before modifying files in .hg.'''
938 Use this before modifying files in .hg.'''
939 l = self._wlockref and self._wlockref()
939 l = self._wlockref and self._wlockref()
940 if l is not None and l.held:
940 if l is not None and l.held:
941 l.lock()
941 l.lock()
942 return l
942 return l
943
943
944 def unlock():
944 def unlock():
945 self.dirstate.write()
945 self.dirstate.write()
946 ce = self._filecache.get('dirstate')
946 ce = self._filecache.get('dirstate')
947 if ce:
947 if ce:
948 ce.refresh()
948 ce.refresh()
949
949
950 l = self._lock(self.join("wlock"), wait, unlock,
950 l = self._lock(self.join("wlock"), wait, unlock,
951 self.invalidatedirstate, _('working directory of %s') %
951 self.invalidatedirstate, _('working directory of %s') %
952 self.origroot)
952 self.origroot)
953 self._wlockref = weakref.ref(l)
953 self._wlockref = weakref.ref(l)
954 return l
954 return l
955
955
956 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
956 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
957 """
957 """
958 commit an individual file as part of a larger transaction
958 commit an individual file as part of a larger transaction
959 """
959 """
960
960
961 fname = fctx.path()
961 fname = fctx.path()
962 text = fctx.data()
962 text = fctx.data()
963 flog = self.file(fname)
963 flog = self.file(fname)
964 fparent1 = manifest1.get(fname, nullid)
964 fparent1 = manifest1.get(fname, nullid)
965 fparent2 = fparent2o = manifest2.get(fname, nullid)
965 fparent2 = fparent2o = manifest2.get(fname, nullid)
966
966
967 meta = {}
967 meta = {}
968 copy = fctx.renamed()
968 copy = fctx.renamed()
969 if copy and copy[0] != fname:
969 if copy and copy[0] != fname:
970 # Mark the new revision of this file as a copy of another
970 # Mark the new revision of this file as a copy of another
971 # file. This copy data will effectively act as a parent
971 # file. This copy data will effectively act as a parent
972 # of this new revision. If this is a merge, the first
972 # of this new revision. If this is a merge, the first
973 # parent will be the nullid (meaning "look up the copy data")
973 # parent will be the nullid (meaning "look up the copy data")
974 # and the second one will be the other parent. For example:
974 # and the second one will be the other parent. For example:
975 #
975 #
976 # 0 --- 1 --- 3 rev1 changes file foo
976 # 0 --- 1 --- 3 rev1 changes file foo
977 # \ / rev2 renames foo to bar and changes it
977 # \ / rev2 renames foo to bar and changes it
978 # \- 2 -/ rev3 should have bar with all changes and
978 # \- 2 -/ rev3 should have bar with all changes and
979 # should record that bar descends from
979 # should record that bar descends from
980 # bar in rev2 and foo in rev1
980 # bar in rev2 and foo in rev1
981 #
981 #
982 # this allows this merge to succeed:
982 # this allows this merge to succeed:
983 #
983 #
984 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
984 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
985 # \ / merging rev3 and rev4 should use bar@rev2
985 # \ / merging rev3 and rev4 should use bar@rev2
986 # \- 2 --- 4 as the merge base
986 # \- 2 --- 4 as the merge base
987 #
987 #
988
988
989 cfname = copy[0]
989 cfname = copy[0]
990 crev = manifest1.get(cfname)
990 crev = manifest1.get(cfname)
991 newfparent = fparent2
991 newfparent = fparent2
992
992
993 if manifest2: # branch merge
993 if manifest2: # branch merge
994 if fparent2 == nullid or crev is None: # copied on remote side
994 if fparent2 == nullid or crev is None: # copied on remote side
995 if cfname in manifest2:
995 if cfname in manifest2:
996 crev = manifest2[cfname]
996 crev = manifest2[cfname]
997 newfparent = fparent1
997 newfparent = fparent1
998
998
999 # find source in nearest ancestor if we've lost track
999 # find source in nearest ancestor if we've lost track
1000 if not crev:
1000 if not crev:
1001 self.ui.debug(" %s: searching for copy revision for %s\n" %
1001 self.ui.debug(" %s: searching for copy revision for %s\n" %
1002 (fname, cfname))
1002 (fname, cfname))
1003 for ancestor in self[None].ancestors():
1003 for ancestor in self[None].ancestors():
1004 if cfname in ancestor:
1004 if cfname in ancestor:
1005 crev = ancestor[cfname].filenode()
1005 crev = ancestor[cfname].filenode()
1006 break
1006 break
1007
1007
1008 if crev:
1008 if crev:
1009 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1009 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1010 meta["copy"] = cfname
1010 meta["copy"] = cfname
1011 meta["copyrev"] = hex(crev)
1011 meta["copyrev"] = hex(crev)
1012 fparent1, fparent2 = nullid, newfparent
1012 fparent1, fparent2 = nullid, newfparent
1013 else:
1013 else:
1014 self.ui.warn(_("warning: can't find ancestor for '%s' "
1014 self.ui.warn(_("warning: can't find ancestor for '%s' "
1015 "copied from '%s'!\n") % (fname, cfname))
1015 "copied from '%s'!\n") % (fname, cfname))
1016
1016
1017 elif fparent2 != nullid:
1017 elif fparent2 != nullid:
1018 # is one parent an ancestor of the other?
1018 # is one parent an ancestor of the other?
1019 fparentancestor = flog.ancestor(fparent1, fparent2)
1019 fparentancestor = flog.ancestor(fparent1, fparent2)
1020 if fparentancestor == fparent1:
1020 if fparentancestor == fparent1:
1021 fparent1, fparent2 = fparent2, nullid
1021 fparent1, fparent2 = fparent2, nullid
1022 elif fparentancestor == fparent2:
1022 elif fparentancestor == fparent2:
1023 fparent2 = nullid
1023 fparent2 = nullid
1024
1024
1025 # is the file changed?
1025 # is the file changed?
1026 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1026 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1027 changelist.append(fname)
1027 changelist.append(fname)
1028 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1028 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1029
1029
1030 # are just the flags changed during merge?
1030 # are just the flags changed during merge?
1031 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1031 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1032 changelist.append(fname)
1032 changelist.append(fname)
1033
1033
1034 return fparent1
1034 return fparent1
1035
1035
1036 def commit(self, text="", user=None, date=None, match=None, force=False,
1036 def commit(self, text="", user=None, date=None, match=None, force=False,
1037 editor=False, extra={}):
1037 editor=False, extra={}):
1038 """Add a new revision to current repository.
1038 """Add a new revision to current repository.
1039
1039
1040 Revision information is gathered from the working directory,
1040 Revision information is gathered from the working directory,
1041 match can be used to filter the committed files. If editor is
1041 match can be used to filter the committed files. If editor is
1042 supplied, it is called to get a commit message.
1042 supplied, it is called to get a commit message.
1043 """
1043 """
1044
1044
1045 def fail(f, msg):
1045 def fail(f, msg):
1046 raise util.Abort('%s: %s' % (f, msg))
1046 raise util.Abort('%s: %s' % (f, msg))
1047
1047
1048 if not match:
1048 if not match:
1049 match = matchmod.always(self.root, '')
1049 match = matchmod.always(self.root, '')
1050
1050
1051 if not force:
1051 if not force:
1052 vdirs = []
1052 vdirs = []
1053 match.dir = vdirs.append
1053 match.dir = vdirs.append
1054 match.bad = fail
1054 match.bad = fail
1055
1055
1056 wlock = self.wlock()
1056 wlock = self.wlock()
1057 try:
1057 try:
1058 wctx = self[None]
1058 wctx = self[None]
1059 merge = len(wctx.parents()) > 1
1059 merge = len(wctx.parents()) > 1
1060
1060
1061 if (not force and merge and match and
1061 if (not force and merge and match and
1062 (match.files() or match.anypats())):
1062 (match.files() or match.anypats())):
1063 raise util.Abort(_('cannot partially commit a merge '
1063 raise util.Abort(_('cannot partially commit a merge '
1064 '(do not specify files or patterns)'))
1064 '(do not specify files or patterns)'))
1065
1065
1066 changes = self.status(match=match, clean=force)
1066 changes = self.status(match=match, clean=force)
1067 if force:
1067 if force:
1068 changes[0].extend(changes[6]) # mq may commit unchanged files
1068 changes[0].extend(changes[6]) # mq may commit unchanged files
1069
1069
1070 # check subrepos
1070 # check subrepos
1071 subs = []
1071 subs = []
1072 commitsubs = set()
1072 commitsubs = set()
1073 newstate = wctx.substate.copy()
1073 newstate = wctx.substate.copy()
1074 # only manage subrepos and .hgsubstate if .hgsub is present
1074 # only manage subrepos and .hgsubstate if .hgsub is present
1075 if '.hgsub' in wctx:
1075 if '.hgsub' in wctx:
1076 # we'll decide whether to track this ourselves, thanks
1076 # we'll decide whether to track this ourselves, thanks
1077 if '.hgsubstate' in changes[0]:
1077 if '.hgsubstate' in changes[0]:
1078 changes[0].remove('.hgsubstate')
1078 changes[0].remove('.hgsubstate')
1079 if '.hgsubstate' in changes[2]:
1079 if '.hgsubstate' in changes[2]:
1080 changes[2].remove('.hgsubstate')
1080 changes[2].remove('.hgsubstate')
1081
1081
1082 # compare current state to last committed state
1082 # compare current state to last committed state
1083 # build new substate based on last committed state
1083 # build new substate based on last committed state
1084 oldstate = wctx.p1().substate
1084 oldstate = wctx.p1().substate
1085 for s in sorted(newstate.keys()):
1085 for s in sorted(newstate.keys()):
1086 if not match(s):
1086 if not match(s):
1087 # ignore working copy, use old state if present
1087 # ignore working copy, use old state if present
1088 if s in oldstate:
1088 if s in oldstate:
1089 newstate[s] = oldstate[s]
1089 newstate[s] = oldstate[s]
1090 continue
1090 continue
1091 if not force:
1091 if not force:
1092 raise util.Abort(
1092 raise util.Abort(
1093 _("commit with new subrepo %s excluded") % s)
1093 _("commit with new subrepo %s excluded") % s)
1094 if wctx.sub(s).dirty(True):
1094 if wctx.sub(s).dirty(True):
1095 if not self.ui.configbool('ui', 'commitsubrepos'):
1095 if not self.ui.configbool('ui', 'commitsubrepos'):
1096 raise util.Abort(
1096 raise util.Abort(
1097 _("uncommitted changes in subrepo %s") % s,
1097 _("uncommitted changes in subrepo %s") % s,
1098 hint=_("use --subrepos for recursive commit"))
1098 hint=_("use --subrepos for recursive commit"))
1099 subs.append(s)
1099 subs.append(s)
1100 commitsubs.add(s)
1100 commitsubs.add(s)
1101 else:
1101 else:
1102 bs = wctx.sub(s).basestate()
1102 bs = wctx.sub(s).basestate()
1103 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1103 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1104 if oldstate.get(s, (None, None, None))[1] != bs:
1104 if oldstate.get(s, (None, None, None))[1] != bs:
1105 subs.append(s)
1105 subs.append(s)
1106
1106
1107 # check for removed subrepos
1107 # check for removed subrepos
1108 for p in wctx.parents():
1108 for p in wctx.parents():
1109 r = [s for s in p.substate if s not in newstate]
1109 r = [s for s in p.substate if s not in newstate]
1110 subs += [s for s in r if match(s)]
1110 subs += [s for s in r if match(s)]
1111 if subs:
1111 if subs:
1112 if (not match('.hgsub') and
1112 if (not match('.hgsub') and
1113 '.hgsub' in (wctx.modified() + wctx.added())):
1113 '.hgsub' in (wctx.modified() + wctx.added())):
1114 raise util.Abort(
1114 raise util.Abort(
1115 _("can't commit subrepos without .hgsub"))
1115 _("can't commit subrepos without .hgsub"))
1116 changes[0].insert(0, '.hgsubstate')
1116 changes[0].insert(0, '.hgsubstate')
1117
1117
1118 elif '.hgsub' in changes[2]:
1118 elif '.hgsub' in changes[2]:
1119 # clean up .hgsubstate when .hgsub is removed
1119 # clean up .hgsubstate when .hgsub is removed
1120 if ('.hgsubstate' in wctx and
1120 if ('.hgsubstate' in wctx and
1121 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1121 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1122 changes[2].insert(0, '.hgsubstate')
1122 changes[2].insert(0, '.hgsubstate')
1123
1123
1124 # make sure all explicit patterns are matched
1124 # make sure all explicit patterns are matched
1125 if not force and match.files():
1125 if not force and match.files():
1126 matched = set(changes[0] + changes[1] + changes[2])
1126 matched = set(changes[0] + changes[1] + changes[2])
1127
1127
1128 for f in match.files():
1128 for f in match.files():
1129 if f == '.' or f in matched or f in wctx.substate:
1129 if f == '.' or f in matched or f in wctx.substate:
1130 continue
1130 continue
1131 if f in changes[3]: # missing
1131 if f in changes[3]: # missing
1132 fail(f, _('file not found!'))
1132 fail(f, _('file not found!'))
1133 if f in vdirs: # visited directory
1133 if f in vdirs: # visited directory
1134 d = f + '/'
1134 d = f + '/'
1135 for mf in matched:
1135 for mf in matched:
1136 if mf.startswith(d):
1136 if mf.startswith(d):
1137 break
1137 break
1138 else:
1138 else:
1139 fail(f, _("no match under directory!"))
1139 fail(f, _("no match under directory!"))
1140 elif f not in self.dirstate:
1140 elif f not in self.dirstate:
1141 fail(f, _("file not tracked!"))
1141 fail(f, _("file not tracked!"))
1142
1142
1143 if (not force and not extra.get("close") and not merge
1143 if (not force and not extra.get("close") and not merge
1144 and not (changes[0] or changes[1] or changes[2])
1144 and not (changes[0] or changes[1] or changes[2])
1145 and wctx.branch() == wctx.p1().branch()):
1145 and wctx.branch() == wctx.p1().branch()):
1146 return None
1146 return None
1147
1147
1148 if merge and changes[3]:
1148 if merge and changes[3]:
1149 raise util.Abort(_("cannot commit merge with missing files"))
1149 raise util.Abort(_("cannot commit merge with missing files"))
1150
1150
1151 ms = mergemod.mergestate(self)
1151 ms = mergemod.mergestate(self)
1152 for f in changes[0]:
1152 for f in changes[0]:
1153 if f in ms and ms[f] == 'u':
1153 if f in ms and ms[f] == 'u':
1154 raise util.Abort(_("unresolved merge conflicts "
1154 raise util.Abort(_("unresolved merge conflicts "
1155 "(see hg help resolve)"))
1155 "(see hg help resolve)"))
1156
1156
1157 cctx = context.workingctx(self, text, user, date, extra, changes)
1157 cctx = context.workingctx(self, text, user, date, extra, changes)
1158 if editor:
1158 if editor:
1159 cctx._text = editor(self, cctx, subs)
1159 cctx._text = editor(self, cctx, subs)
1160 edited = (text != cctx._text)
1160 edited = (text != cctx._text)
1161
1161
1162 # commit subs and write new state
1162 # commit subs and write new state
1163 if subs:
1163 if subs:
1164 for s in sorted(commitsubs):
1164 for s in sorted(commitsubs):
1165 sub = wctx.sub(s)
1165 sub = wctx.sub(s)
1166 self.ui.status(_('committing subrepository %s\n') %
1166 self.ui.status(_('committing subrepository %s\n') %
1167 subrepo.subrelpath(sub))
1167 subrepo.subrelpath(sub))
1168 sr = sub.commit(cctx._text, user, date)
1168 sr = sub.commit(cctx._text, user, date)
1169 newstate[s] = (newstate[s][0], sr)
1169 newstate[s] = (newstate[s][0], sr)
1170 subrepo.writestate(self, newstate)
1170 subrepo.writestate(self, newstate)
1171
1171
1172 # Save commit message in case this transaction gets rolled back
1172 # Save commit message in case this transaction gets rolled back
1173 # (e.g. by a pretxncommit hook). Leave the content alone on
1173 # (e.g. by a pretxncommit hook). Leave the content alone on
1174 # the assumption that the user will use the same editor again.
1174 # the assumption that the user will use the same editor again.
1175 msgfn = self.savecommitmessage(cctx._text)
1175 msgfn = self.savecommitmessage(cctx._text)
1176
1176
1177 p1, p2 = self.dirstate.parents()
1177 p1, p2 = self.dirstate.parents()
1178 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1178 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1179 try:
1179 try:
1180 self.hook("precommit", throw=True, parent1=hookp1,
1180 self.hook("precommit", throw=True, parent1=hookp1,
1181 parent2=hookp2)
1181 parent2=hookp2)
1182 ret = self.commitctx(cctx, True)
1182 ret = self.commitctx(cctx, True)
1183 except:
1183 except: # re-raises
1184 if edited:
1184 if edited:
1185 self.ui.write(
1185 self.ui.write(
1186 _('note: commit message saved in %s\n') % msgfn)
1186 _('note: commit message saved in %s\n') % msgfn)
1187 raise
1187 raise
1188
1188
1189 # update bookmarks, dirstate and mergestate
1189 # update bookmarks, dirstate and mergestate
1190 bookmarks.update(self, p1, ret)
1190 bookmarks.update(self, p1, ret)
1191 for f in changes[0] + changes[1]:
1191 for f in changes[0] + changes[1]:
1192 self.dirstate.normal(f)
1192 self.dirstate.normal(f)
1193 for f in changes[2]:
1193 for f in changes[2]:
1194 self.dirstate.drop(f)
1194 self.dirstate.drop(f)
1195 self.dirstate.setparents(ret)
1195 self.dirstate.setparents(ret)
1196 ms.reset()
1196 ms.reset()
1197 finally:
1197 finally:
1198 wlock.release()
1198 wlock.release()
1199
1199
1200 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1200 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1201 self.hook("commit", node=node, parent1=parent1, parent2=parent2)
1201 self.hook("commit", node=node, parent1=parent1, parent2=parent2)
1202 self._afterlock(commithook)
1202 self._afterlock(commithook)
1203 return ret
1203 return ret
1204
1204
1205 def commitctx(self, ctx, error=False):
1205 def commitctx(self, ctx, error=False):
1206 """Add a new revision to current repository.
1206 """Add a new revision to current repository.
1207 Revision information is passed via the context argument.
1207 Revision information is passed via the context argument.
1208 """
1208 """
1209
1209
1210 tr = lock = None
1210 tr = lock = None
1211 removed = list(ctx.removed())
1211 removed = list(ctx.removed())
1212 p1, p2 = ctx.p1(), ctx.p2()
1212 p1, p2 = ctx.p1(), ctx.p2()
1213 user = ctx.user()
1213 user = ctx.user()
1214
1214
1215 lock = self.lock()
1215 lock = self.lock()
1216 try:
1216 try:
1217 tr = self.transaction("commit")
1217 tr = self.transaction("commit")
1218 trp = weakref.proxy(tr)
1218 trp = weakref.proxy(tr)
1219
1219
1220 if ctx.files():
1220 if ctx.files():
1221 m1 = p1.manifest().copy()
1221 m1 = p1.manifest().copy()
1222 m2 = p2.manifest()
1222 m2 = p2.manifest()
1223
1223
1224 # check in files
1224 # check in files
1225 new = {}
1225 new = {}
1226 changed = []
1226 changed = []
1227 linkrev = len(self)
1227 linkrev = len(self)
1228 for f in sorted(ctx.modified() + ctx.added()):
1228 for f in sorted(ctx.modified() + ctx.added()):
1229 self.ui.note(f + "\n")
1229 self.ui.note(f + "\n")
1230 try:
1230 try:
1231 fctx = ctx[f]
1231 fctx = ctx[f]
1232 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1232 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1233 changed)
1233 changed)
1234 m1.set(f, fctx.flags())
1234 m1.set(f, fctx.flags())
1235 except OSError, inst:
1235 except OSError, inst:
1236 self.ui.warn(_("trouble committing %s!\n") % f)
1236 self.ui.warn(_("trouble committing %s!\n") % f)
1237 raise
1237 raise
1238 except IOError, inst:
1238 except IOError, inst:
1239 errcode = getattr(inst, 'errno', errno.ENOENT)
1239 errcode = getattr(inst, 'errno', errno.ENOENT)
1240 if error or errcode and errcode != errno.ENOENT:
1240 if error or errcode and errcode != errno.ENOENT:
1241 self.ui.warn(_("trouble committing %s!\n") % f)
1241 self.ui.warn(_("trouble committing %s!\n") % f)
1242 raise
1242 raise
1243 else:
1243 else:
1244 removed.append(f)
1244 removed.append(f)
1245
1245
1246 # update manifest
1246 # update manifest
1247 m1.update(new)
1247 m1.update(new)
1248 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1248 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1249 drop = [f for f in removed if f in m1]
1249 drop = [f for f in removed if f in m1]
1250 for f in drop:
1250 for f in drop:
1251 del m1[f]
1251 del m1[f]
1252 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1252 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1253 p2.manifestnode(), (new, drop))
1253 p2.manifestnode(), (new, drop))
1254 files = changed + removed
1254 files = changed + removed
1255 else:
1255 else:
1256 mn = p1.manifestnode()
1256 mn = p1.manifestnode()
1257 files = []
1257 files = []
1258
1258
1259 # update changelog
1259 # update changelog
1260 self.changelog.delayupdate()
1260 self.changelog.delayupdate()
1261 n = self.changelog.add(mn, files, ctx.description(),
1261 n = self.changelog.add(mn, files, ctx.description(),
1262 trp, p1.node(), p2.node(),
1262 trp, p1.node(), p2.node(),
1263 user, ctx.date(), ctx.extra().copy())
1263 user, ctx.date(), ctx.extra().copy())
1264 p = lambda: self.changelog.writepending() and self.root or ""
1264 p = lambda: self.changelog.writepending() and self.root or ""
1265 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1265 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1266 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1266 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1267 parent2=xp2, pending=p)
1267 parent2=xp2, pending=p)
1268 self.changelog.finalize(trp)
1268 self.changelog.finalize(trp)
1269 # set the new commit is proper phase
1269 # set the new commit is proper phase
1270 targetphase = phases.newcommitphase(self.ui)
1270 targetphase = phases.newcommitphase(self.ui)
1271 if targetphase:
1271 if targetphase:
1272 # retract boundary do not alter parent changeset.
1272 # retract boundary do not alter parent changeset.
1273 # if a parent have higher the resulting phase will
1273 # if a parent have higher the resulting phase will
1274 # be compliant anyway
1274 # be compliant anyway
1275 #
1275 #
1276 # if minimal phase was 0 we don't need to retract anything
1276 # if minimal phase was 0 we don't need to retract anything
1277 phases.retractboundary(self, targetphase, [n])
1277 phases.retractboundary(self, targetphase, [n])
1278 tr.close()
1278 tr.close()
1279 self.updatebranchcache()
1279 self.updatebranchcache()
1280 return n
1280 return n
1281 finally:
1281 finally:
1282 if tr:
1282 if tr:
1283 tr.release()
1283 tr.release()
1284 lock.release()
1284 lock.release()
1285
1285
1286 def destroyed(self):
1286 def destroyed(self):
1287 '''Inform the repository that nodes have been destroyed.
1287 '''Inform the repository that nodes have been destroyed.
1288 Intended for use by strip and rollback, so there's a common
1288 Intended for use by strip and rollback, so there's a common
1289 place for anything that has to be done after destroying history.'''
1289 place for anything that has to be done after destroying history.'''
1290 # XXX it might be nice if we could take the list of destroyed
1290 # XXX it might be nice if we could take the list of destroyed
1291 # nodes, but I don't see an easy way for rollback() to do that
1291 # nodes, but I don't see an easy way for rollback() to do that
1292
1292
1293 # Ensure the persistent tag cache is updated. Doing it now
1293 # Ensure the persistent tag cache is updated. Doing it now
1294 # means that the tag cache only has to worry about destroyed
1294 # means that the tag cache only has to worry about destroyed
1295 # heads immediately after a strip/rollback. That in turn
1295 # heads immediately after a strip/rollback. That in turn
1296 # guarantees that "cachetip == currenttip" (comparing both rev
1296 # guarantees that "cachetip == currenttip" (comparing both rev
1297 # and node) always means no nodes have been added or destroyed.
1297 # and node) always means no nodes have been added or destroyed.
1298
1298
1299 # XXX this is suboptimal when qrefresh'ing: we strip the current
1299 # XXX this is suboptimal when qrefresh'ing: we strip the current
1300 # head, refresh the tag cache, then immediately add a new head.
1300 # head, refresh the tag cache, then immediately add a new head.
1301 # But I think doing it this way is necessary for the "instant
1301 # But I think doing it this way is necessary for the "instant
1302 # tag cache retrieval" case to work.
1302 # tag cache retrieval" case to work.
1303 self.invalidatecaches()
1303 self.invalidatecaches()
1304
1304
1305 # Discard all cache entries to force reloading everything.
1305 # Discard all cache entries to force reloading everything.
1306 self._filecache.clear()
1306 self._filecache.clear()
1307
1307
1308 def walk(self, match, node=None):
1308 def walk(self, match, node=None):
1309 '''
1309 '''
1310 walk recursively through the directory tree or a given
1310 walk recursively through the directory tree or a given
1311 changeset, finding all files matched by the match
1311 changeset, finding all files matched by the match
1312 function
1312 function
1313 '''
1313 '''
1314 return self[node].walk(match)
1314 return self[node].walk(match)
1315
1315
1316 def status(self, node1='.', node2=None, match=None,
1316 def status(self, node1='.', node2=None, match=None,
1317 ignored=False, clean=False, unknown=False,
1317 ignored=False, clean=False, unknown=False,
1318 listsubrepos=False):
1318 listsubrepos=False):
1319 """return status of files between two nodes or node and working
1319 """return status of files between two nodes or node and working
1320 directory.
1320 directory.
1321
1321
1322 If node1 is None, use the first dirstate parent instead.
1322 If node1 is None, use the first dirstate parent instead.
1323 If node2 is None, compare node1 with working directory.
1323 If node2 is None, compare node1 with working directory.
1324 """
1324 """
1325
1325
1326 def mfmatches(ctx):
1326 def mfmatches(ctx):
1327 mf = ctx.manifest().copy()
1327 mf = ctx.manifest().copy()
1328 if match.always():
1328 if match.always():
1329 return mf
1329 return mf
1330 for fn in mf.keys():
1330 for fn in mf.keys():
1331 if not match(fn):
1331 if not match(fn):
1332 del mf[fn]
1332 del mf[fn]
1333 return mf
1333 return mf
1334
1334
1335 if isinstance(node1, context.changectx):
1335 if isinstance(node1, context.changectx):
1336 ctx1 = node1
1336 ctx1 = node1
1337 else:
1337 else:
1338 ctx1 = self[node1]
1338 ctx1 = self[node1]
1339 if isinstance(node2, context.changectx):
1339 if isinstance(node2, context.changectx):
1340 ctx2 = node2
1340 ctx2 = node2
1341 else:
1341 else:
1342 ctx2 = self[node2]
1342 ctx2 = self[node2]
1343
1343
1344 working = ctx2.rev() is None
1344 working = ctx2.rev() is None
1345 parentworking = working and ctx1 == self['.']
1345 parentworking = working and ctx1 == self['.']
1346 match = match or matchmod.always(self.root, self.getcwd())
1346 match = match or matchmod.always(self.root, self.getcwd())
1347 listignored, listclean, listunknown = ignored, clean, unknown
1347 listignored, listclean, listunknown = ignored, clean, unknown
1348
1348
1349 # load earliest manifest first for caching reasons
1349 # load earliest manifest first for caching reasons
1350 if not working and ctx2.rev() < ctx1.rev():
1350 if not working and ctx2.rev() < ctx1.rev():
1351 ctx2.manifest()
1351 ctx2.manifest()
1352
1352
1353 if not parentworking:
1353 if not parentworking:
1354 def bad(f, msg):
1354 def bad(f, msg):
1355 # 'f' may be a directory pattern from 'match.files()',
1355 # 'f' may be a directory pattern from 'match.files()',
1356 # so 'f not in ctx1' is not enough
1356 # so 'f not in ctx1' is not enough
1357 if f not in ctx1 and f not in ctx1.dirs():
1357 if f not in ctx1 and f not in ctx1.dirs():
1358 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1358 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1359 match.bad = bad
1359 match.bad = bad
1360
1360
1361 if working: # we need to scan the working dir
1361 if working: # we need to scan the working dir
1362 subrepos = []
1362 subrepos = []
1363 if '.hgsub' in self.dirstate:
1363 if '.hgsub' in self.dirstate:
1364 subrepos = ctx2.substate.keys()
1364 subrepos = ctx2.substate.keys()
1365 s = self.dirstate.status(match, subrepos, listignored,
1365 s = self.dirstate.status(match, subrepos, listignored,
1366 listclean, listunknown)
1366 listclean, listunknown)
1367 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1367 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1368
1368
1369 # check for any possibly clean files
1369 # check for any possibly clean files
1370 if parentworking and cmp:
1370 if parentworking and cmp:
1371 fixup = []
1371 fixup = []
1372 # do a full compare of any files that might have changed
1372 # do a full compare of any files that might have changed
1373 for f in sorted(cmp):
1373 for f in sorted(cmp):
1374 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1374 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1375 or ctx1[f].cmp(ctx2[f])):
1375 or ctx1[f].cmp(ctx2[f])):
1376 modified.append(f)
1376 modified.append(f)
1377 else:
1377 else:
1378 fixup.append(f)
1378 fixup.append(f)
1379
1379
1380 # update dirstate for files that are actually clean
1380 # update dirstate for files that are actually clean
1381 if fixup:
1381 if fixup:
1382 if listclean:
1382 if listclean:
1383 clean += fixup
1383 clean += fixup
1384
1384
1385 try:
1385 try:
1386 # updating the dirstate is optional
1386 # updating the dirstate is optional
1387 # so we don't wait on the lock
1387 # so we don't wait on the lock
1388 wlock = self.wlock(False)
1388 wlock = self.wlock(False)
1389 try:
1389 try:
1390 for f in fixup:
1390 for f in fixup:
1391 self.dirstate.normal(f)
1391 self.dirstate.normal(f)
1392 finally:
1392 finally:
1393 wlock.release()
1393 wlock.release()
1394 except error.LockError:
1394 except error.LockError:
1395 pass
1395 pass
1396
1396
1397 if not parentworking:
1397 if not parentworking:
1398 mf1 = mfmatches(ctx1)
1398 mf1 = mfmatches(ctx1)
1399 if working:
1399 if working:
1400 # we are comparing working dir against non-parent
1400 # we are comparing working dir against non-parent
1401 # generate a pseudo-manifest for the working dir
1401 # generate a pseudo-manifest for the working dir
1402 mf2 = mfmatches(self['.'])
1402 mf2 = mfmatches(self['.'])
1403 for f in cmp + modified + added:
1403 for f in cmp + modified + added:
1404 mf2[f] = None
1404 mf2[f] = None
1405 mf2.set(f, ctx2.flags(f))
1405 mf2.set(f, ctx2.flags(f))
1406 for f in removed:
1406 for f in removed:
1407 if f in mf2:
1407 if f in mf2:
1408 del mf2[f]
1408 del mf2[f]
1409 else:
1409 else:
1410 # we are comparing two revisions
1410 # we are comparing two revisions
1411 deleted, unknown, ignored = [], [], []
1411 deleted, unknown, ignored = [], [], []
1412 mf2 = mfmatches(ctx2)
1412 mf2 = mfmatches(ctx2)
1413
1413
1414 modified, added, clean = [], [], []
1414 modified, added, clean = [], [], []
1415 withflags = mf1.withflags() | mf2.withflags()
1415 withflags = mf1.withflags() | mf2.withflags()
1416 for fn in mf2:
1416 for fn in mf2:
1417 if fn in mf1:
1417 if fn in mf1:
1418 if (fn not in deleted and
1418 if (fn not in deleted and
1419 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
1419 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
1420 (mf1[fn] != mf2[fn] and
1420 (mf1[fn] != mf2[fn] and
1421 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1421 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1422 modified.append(fn)
1422 modified.append(fn)
1423 elif listclean:
1423 elif listclean:
1424 clean.append(fn)
1424 clean.append(fn)
1425 del mf1[fn]
1425 del mf1[fn]
1426 elif fn not in deleted:
1426 elif fn not in deleted:
1427 added.append(fn)
1427 added.append(fn)
1428 removed = mf1.keys()
1428 removed = mf1.keys()
1429
1429
1430 if working and modified and not self.dirstate._checklink:
1430 if working and modified and not self.dirstate._checklink:
1431 # Symlink placeholders may get non-symlink-like contents
1431 # Symlink placeholders may get non-symlink-like contents
1432 # via user error or dereferencing by NFS or Samba servers,
1432 # via user error or dereferencing by NFS or Samba servers,
1433 # so we filter out any placeholders that don't look like a
1433 # so we filter out any placeholders that don't look like a
1434 # symlink
1434 # symlink
1435 sane = []
1435 sane = []
1436 for f in modified:
1436 for f in modified:
1437 if ctx2.flags(f) == 'l':
1437 if ctx2.flags(f) == 'l':
1438 d = ctx2[f].data()
1438 d = ctx2[f].data()
1439 if len(d) >= 1024 or '\n' in d or util.binary(d):
1439 if len(d) >= 1024 or '\n' in d or util.binary(d):
1440 self.ui.debug('ignoring suspect symlink placeholder'
1440 self.ui.debug('ignoring suspect symlink placeholder'
1441 ' "%s"\n' % f)
1441 ' "%s"\n' % f)
1442 continue
1442 continue
1443 sane.append(f)
1443 sane.append(f)
1444 modified = sane
1444 modified = sane
1445
1445
1446 r = modified, added, removed, deleted, unknown, ignored, clean
1446 r = modified, added, removed, deleted, unknown, ignored, clean
1447
1447
1448 if listsubrepos:
1448 if listsubrepos:
1449 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1449 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1450 if working:
1450 if working:
1451 rev2 = None
1451 rev2 = None
1452 else:
1452 else:
1453 rev2 = ctx2.substate[subpath][1]
1453 rev2 = ctx2.substate[subpath][1]
1454 try:
1454 try:
1455 submatch = matchmod.narrowmatcher(subpath, match)
1455 submatch = matchmod.narrowmatcher(subpath, match)
1456 s = sub.status(rev2, match=submatch, ignored=listignored,
1456 s = sub.status(rev2, match=submatch, ignored=listignored,
1457 clean=listclean, unknown=listunknown,
1457 clean=listclean, unknown=listunknown,
1458 listsubrepos=True)
1458 listsubrepos=True)
1459 for rfiles, sfiles in zip(r, s):
1459 for rfiles, sfiles in zip(r, s):
1460 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1460 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1461 except error.LookupError:
1461 except error.LookupError:
1462 self.ui.status(_("skipping missing subrepository: %s\n")
1462 self.ui.status(_("skipping missing subrepository: %s\n")
1463 % subpath)
1463 % subpath)
1464
1464
1465 for l in r:
1465 for l in r:
1466 l.sort()
1466 l.sort()
1467 return r
1467 return r
1468
1468
1469 def heads(self, start=None):
1469 def heads(self, start=None):
1470 heads = self.changelog.heads(start)
1470 heads = self.changelog.heads(start)
1471 # sort the output in rev descending order
1471 # sort the output in rev descending order
1472 return sorted(heads, key=self.changelog.rev, reverse=True)
1472 return sorted(heads, key=self.changelog.rev, reverse=True)
1473
1473
1474 def branchheads(self, branch=None, start=None, closed=False):
1474 def branchheads(self, branch=None, start=None, closed=False):
1475 '''return a (possibly filtered) list of heads for the given branch
1475 '''return a (possibly filtered) list of heads for the given branch
1476
1476
1477 Heads are returned in topological order, from newest to oldest.
1477 Heads are returned in topological order, from newest to oldest.
1478 If branch is None, use the dirstate branch.
1478 If branch is None, use the dirstate branch.
1479 If start is not None, return only heads reachable from start.
1479 If start is not None, return only heads reachable from start.
1480 If closed is True, return heads that are marked as closed as well.
1480 If closed is True, return heads that are marked as closed as well.
1481 '''
1481 '''
1482 if branch is None:
1482 if branch is None:
1483 branch = self[None].branch()
1483 branch = self[None].branch()
1484 branches = self.branchmap()
1484 branches = self.branchmap()
1485 if branch not in branches:
1485 if branch not in branches:
1486 return []
1486 return []
1487 # the cache returns heads ordered lowest to highest
1487 # the cache returns heads ordered lowest to highest
1488 bheads = list(reversed(branches[branch]))
1488 bheads = list(reversed(branches[branch]))
1489 if start is not None:
1489 if start is not None:
1490 # filter out the heads that cannot be reached from startrev
1490 # filter out the heads that cannot be reached from startrev
1491 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1491 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1492 bheads = [h for h in bheads if h in fbheads]
1492 bheads = [h for h in bheads if h in fbheads]
1493 if not closed:
1493 if not closed:
1494 bheads = [h for h in bheads if
1494 bheads = [h for h in bheads if
1495 ('close' not in self.changelog.read(h)[5])]
1495 ('close' not in self.changelog.read(h)[5])]
1496 return bheads
1496 return bheads
1497
1497
1498 def branches(self, nodes):
1498 def branches(self, nodes):
1499 if not nodes:
1499 if not nodes:
1500 nodes = [self.changelog.tip()]
1500 nodes = [self.changelog.tip()]
1501 b = []
1501 b = []
1502 for n in nodes:
1502 for n in nodes:
1503 t = n
1503 t = n
1504 while True:
1504 while True:
1505 p = self.changelog.parents(n)
1505 p = self.changelog.parents(n)
1506 if p[1] != nullid or p[0] == nullid:
1506 if p[1] != nullid or p[0] == nullid:
1507 b.append((t, n, p[0], p[1]))
1507 b.append((t, n, p[0], p[1]))
1508 break
1508 break
1509 n = p[0]
1509 n = p[0]
1510 return b
1510 return b
1511
1511
1512 def between(self, pairs):
1512 def between(self, pairs):
1513 r = []
1513 r = []
1514
1514
1515 for top, bottom in pairs:
1515 for top, bottom in pairs:
1516 n, l, i = top, [], 0
1516 n, l, i = top, [], 0
1517 f = 1
1517 f = 1
1518
1518
1519 while n != bottom and n != nullid:
1519 while n != bottom and n != nullid:
1520 p = self.changelog.parents(n)[0]
1520 p = self.changelog.parents(n)[0]
1521 if i == f:
1521 if i == f:
1522 l.append(n)
1522 l.append(n)
1523 f = f * 2
1523 f = f * 2
1524 n = p
1524 n = p
1525 i += 1
1525 i += 1
1526
1526
1527 r.append(l)
1527 r.append(l)
1528
1528
1529 return r
1529 return r
1530
1530
1531 def pull(self, remote, heads=None, force=False):
1531 def pull(self, remote, heads=None, force=False):
1532 lock = self.lock()
1532 lock = self.lock()
1533 try:
1533 try:
1534 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1534 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1535 force=force)
1535 force=force)
1536 common, fetch, rheads = tmp
1536 common, fetch, rheads = tmp
1537 if not fetch:
1537 if not fetch:
1538 self.ui.status(_("no changes found\n"))
1538 self.ui.status(_("no changes found\n"))
1539 added = []
1539 added = []
1540 result = 0
1540 result = 0
1541 else:
1541 else:
1542 if heads is None and list(common) == [nullid]:
1542 if heads is None and list(common) == [nullid]:
1543 self.ui.status(_("requesting all changes\n"))
1543 self.ui.status(_("requesting all changes\n"))
1544 elif heads is None and remote.capable('changegroupsubset'):
1544 elif heads is None and remote.capable('changegroupsubset'):
1545 # issue1320, avoid a race if remote changed after discovery
1545 # issue1320, avoid a race if remote changed after discovery
1546 heads = rheads
1546 heads = rheads
1547
1547
1548 if remote.capable('getbundle'):
1548 if remote.capable('getbundle'):
1549 cg = remote.getbundle('pull', common=common,
1549 cg = remote.getbundle('pull', common=common,
1550 heads=heads or rheads)
1550 heads=heads or rheads)
1551 elif heads is None:
1551 elif heads is None:
1552 cg = remote.changegroup(fetch, 'pull')
1552 cg = remote.changegroup(fetch, 'pull')
1553 elif not remote.capable('changegroupsubset'):
1553 elif not remote.capable('changegroupsubset'):
1554 raise util.Abort(_("partial pull cannot be done because "
1554 raise util.Abort(_("partial pull cannot be done because "
1555 "other repository doesn't support "
1555 "other repository doesn't support "
1556 "changegroupsubset."))
1556 "changegroupsubset."))
1557 else:
1557 else:
1558 cg = remote.changegroupsubset(fetch, heads, 'pull')
1558 cg = remote.changegroupsubset(fetch, heads, 'pull')
1559 clstart = len(self.changelog)
1559 clstart = len(self.changelog)
1560 result = self.addchangegroup(cg, 'pull', remote.url())
1560 result = self.addchangegroup(cg, 'pull', remote.url())
1561 clend = len(self.changelog)
1561 clend = len(self.changelog)
1562 added = [self.changelog.node(r) for r in xrange(clstart, clend)]
1562 added = [self.changelog.node(r) for r in xrange(clstart, clend)]
1563
1563
1564 # compute target subset
1564 # compute target subset
1565 if heads is None:
1565 if heads is None:
1566 # We pulled every thing possible
1566 # We pulled every thing possible
1567 # sync on everything common
1567 # sync on everything common
1568 subset = common + added
1568 subset = common + added
1569 else:
1569 else:
1570 # We pulled a specific subset
1570 # We pulled a specific subset
1571 # sync on this subset
1571 # sync on this subset
1572 subset = heads
1572 subset = heads
1573
1573
1574 # Get remote phases data from remote
1574 # Get remote phases data from remote
1575 remotephases = remote.listkeys('phases')
1575 remotephases = remote.listkeys('phases')
1576 publishing = bool(remotephases.get('publishing', False))
1576 publishing = bool(remotephases.get('publishing', False))
1577 if remotephases and not publishing:
1577 if remotephases and not publishing:
1578 # remote is new and unpublishing
1578 # remote is new and unpublishing
1579 pheads, _dr = phases.analyzeremotephases(self, subset,
1579 pheads, _dr = phases.analyzeremotephases(self, subset,
1580 remotephases)
1580 remotephases)
1581 phases.advanceboundary(self, phases.public, pheads)
1581 phases.advanceboundary(self, phases.public, pheads)
1582 phases.advanceboundary(self, phases.draft, subset)
1582 phases.advanceboundary(self, phases.draft, subset)
1583 else:
1583 else:
1584 # Remote is old or publishing all common changesets
1584 # Remote is old or publishing all common changesets
1585 # should be seen as public
1585 # should be seen as public
1586 phases.advanceboundary(self, phases.public, subset)
1586 phases.advanceboundary(self, phases.public, subset)
1587 finally:
1587 finally:
1588 lock.release()
1588 lock.release()
1589
1589
1590 return result
1590 return result
1591
1591
1592 def checkpush(self, force, revs):
1592 def checkpush(self, force, revs):
1593 """Extensions can override this function if additional checks have
1593 """Extensions can override this function if additional checks have
1594 to be performed before pushing, or call it if they override push
1594 to be performed before pushing, or call it if they override push
1595 command.
1595 command.
1596 """
1596 """
1597 pass
1597 pass
1598
1598
1599 def push(self, remote, force=False, revs=None, newbranch=False):
1599 def push(self, remote, force=False, revs=None, newbranch=False):
1600 '''Push outgoing changesets (limited by revs) from the current
1600 '''Push outgoing changesets (limited by revs) from the current
1601 repository to remote. Return an integer:
1601 repository to remote. Return an integer:
1602 - None means nothing to push
1602 - None means nothing to push
1603 - 0 means HTTP error
1603 - 0 means HTTP error
1604 - 1 means we pushed and remote head count is unchanged *or*
1604 - 1 means we pushed and remote head count is unchanged *or*
1605 we have outgoing changesets but refused to push
1605 we have outgoing changesets but refused to push
1606 - other values as described by addchangegroup()
1606 - other values as described by addchangegroup()
1607 '''
1607 '''
1608 # there are two ways to push to remote repo:
1608 # there are two ways to push to remote repo:
1609 #
1609 #
1610 # addchangegroup assumes local user can lock remote
1610 # addchangegroup assumes local user can lock remote
1611 # repo (local filesystem, old ssh servers).
1611 # repo (local filesystem, old ssh servers).
1612 #
1612 #
1613 # unbundle assumes local user cannot lock remote repo (new ssh
1613 # unbundle assumes local user cannot lock remote repo (new ssh
1614 # servers, http servers).
1614 # servers, http servers).
1615
1615
1616 # get local lock as we might write phase data
1616 # get local lock as we might write phase data
1617 locallock = self.lock()
1617 locallock = self.lock()
1618 try:
1618 try:
1619 self.checkpush(force, revs)
1619 self.checkpush(force, revs)
1620 lock = None
1620 lock = None
1621 unbundle = remote.capable('unbundle')
1621 unbundle = remote.capable('unbundle')
1622 if not unbundle:
1622 if not unbundle:
1623 lock = remote.lock()
1623 lock = remote.lock()
1624 try:
1624 try:
1625 # discovery
1625 # discovery
1626 fci = discovery.findcommonincoming
1626 fci = discovery.findcommonincoming
1627 commoninc = fci(self, remote, force=force)
1627 commoninc = fci(self, remote, force=force)
1628 common, inc, remoteheads = commoninc
1628 common, inc, remoteheads = commoninc
1629 fco = discovery.findcommonoutgoing
1629 fco = discovery.findcommonoutgoing
1630 outgoing = fco(self, remote, onlyheads=revs,
1630 outgoing = fco(self, remote, onlyheads=revs,
1631 commoninc=commoninc, force=force)
1631 commoninc=commoninc, force=force)
1632
1632
1633
1633
1634 if not outgoing.missing:
1634 if not outgoing.missing:
1635 # nothing to push
1635 # nothing to push
1636 scmutil.nochangesfound(self.ui, outgoing.excluded)
1636 scmutil.nochangesfound(self.ui, outgoing.excluded)
1637 ret = None
1637 ret = None
1638 else:
1638 else:
1639 # something to push
1639 # something to push
1640 if not force:
1640 if not force:
1641 discovery.checkheads(self, remote, outgoing,
1641 discovery.checkheads(self, remote, outgoing,
1642 remoteheads, newbranch,
1642 remoteheads, newbranch,
1643 bool(inc))
1643 bool(inc))
1644
1644
1645 # create a changegroup from local
1645 # create a changegroup from local
1646 if revs is None and not outgoing.excluded:
1646 if revs is None and not outgoing.excluded:
1647 # push everything,
1647 # push everything,
1648 # use the fast path, no race possible on push
1648 # use the fast path, no race possible on push
1649 cg = self._changegroup(outgoing.missing, 'push')
1649 cg = self._changegroup(outgoing.missing, 'push')
1650 else:
1650 else:
1651 cg = self.getlocalbundle('push', outgoing)
1651 cg = self.getlocalbundle('push', outgoing)
1652
1652
1653 # apply changegroup to remote
1653 # apply changegroup to remote
1654 if unbundle:
1654 if unbundle:
1655 # local repo finds heads on server, finds out what
1655 # local repo finds heads on server, finds out what
1656 # revs it must push. once revs transferred, if server
1656 # revs it must push. once revs transferred, if server
1657 # finds it has different heads (someone else won
1657 # finds it has different heads (someone else won
1658 # commit/push race), server aborts.
1658 # commit/push race), server aborts.
1659 if force:
1659 if force:
1660 remoteheads = ['force']
1660 remoteheads = ['force']
1661 # ssh: return remote's addchangegroup()
1661 # ssh: return remote's addchangegroup()
1662 # http: return remote's addchangegroup() or 0 for error
1662 # http: return remote's addchangegroup() or 0 for error
1663 ret = remote.unbundle(cg, remoteheads, 'push')
1663 ret = remote.unbundle(cg, remoteheads, 'push')
1664 else:
1664 else:
1665 # we return an integer indicating remote head count
1665 # we return an integer indicating remote head count
1666 # change
1666 # change
1667 ret = remote.addchangegroup(cg, 'push', self.url())
1667 ret = remote.addchangegroup(cg, 'push', self.url())
1668
1668
1669 if ret:
1669 if ret:
1670 # push succeed, synchonize target of the push
1670 # push succeed, synchonize target of the push
1671 cheads = outgoing.missingheads
1671 cheads = outgoing.missingheads
1672 elif revs is None:
1672 elif revs is None:
1673 # All out push fails. synchronize all common
1673 # All out push fails. synchronize all common
1674 cheads = outgoing.commonheads
1674 cheads = outgoing.commonheads
1675 else:
1675 else:
1676 # I want cheads = heads(::missingheads and ::commonheads)
1676 # I want cheads = heads(::missingheads and ::commonheads)
1677 # (missingheads is revs with secret changeset filtered out)
1677 # (missingheads is revs with secret changeset filtered out)
1678 #
1678 #
1679 # This can be expressed as:
1679 # This can be expressed as:
1680 # cheads = ( (missingheads and ::commonheads)
1680 # cheads = ( (missingheads and ::commonheads)
1681 # + (commonheads and ::missingheads))"
1681 # + (commonheads and ::missingheads))"
1682 # )
1682 # )
1683 #
1683 #
1684 # while trying to push we already computed the following:
1684 # while trying to push we already computed the following:
1685 # common = (::commonheads)
1685 # common = (::commonheads)
1686 # missing = ((commonheads::missingheads) - commonheads)
1686 # missing = ((commonheads::missingheads) - commonheads)
1687 #
1687 #
1688 # We can pick:
1688 # We can pick:
1689 # * missingheads part of comon (::commonheads)
1689 # * missingheads part of comon (::commonheads)
1690 common = set(outgoing.common)
1690 common = set(outgoing.common)
1691 cheads = [node for node in revs if node in common]
1691 cheads = [node for node in revs if node in common]
1692 # and
1692 # and
1693 # * commonheads parents on missing
1693 # * commonheads parents on missing
1694 revset = self.set('%ln and parents(roots(%ln))',
1694 revset = self.set('%ln and parents(roots(%ln))',
1695 outgoing.commonheads,
1695 outgoing.commonheads,
1696 outgoing.missing)
1696 outgoing.missing)
1697 cheads.extend(c.node() for c in revset)
1697 cheads.extend(c.node() for c in revset)
1698 # even when we don't push, exchanging phase data is useful
1698 # even when we don't push, exchanging phase data is useful
1699 remotephases = remote.listkeys('phases')
1699 remotephases = remote.listkeys('phases')
1700 if not remotephases: # old server or public only repo
1700 if not remotephases: # old server or public only repo
1701 phases.advanceboundary(self, phases.public, cheads)
1701 phases.advanceboundary(self, phases.public, cheads)
1702 # don't push any phase data as there is nothing to push
1702 # don't push any phase data as there is nothing to push
1703 else:
1703 else:
1704 ana = phases.analyzeremotephases(self, cheads, remotephases)
1704 ana = phases.analyzeremotephases(self, cheads, remotephases)
1705 pheads, droots = ana
1705 pheads, droots = ana
1706 ### Apply remote phase on local
1706 ### Apply remote phase on local
1707 if remotephases.get('publishing', False):
1707 if remotephases.get('publishing', False):
1708 phases.advanceboundary(self, phases.public, cheads)
1708 phases.advanceboundary(self, phases.public, cheads)
1709 else: # publish = False
1709 else: # publish = False
1710 phases.advanceboundary(self, phases.public, pheads)
1710 phases.advanceboundary(self, phases.public, pheads)
1711 phases.advanceboundary(self, phases.draft, cheads)
1711 phases.advanceboundary(self, phases.draft, cheads)
1712 ### Apply local phase on remote
1712 ### Apply local phase on remote
1713
1713
1714 # Get the list of all revs draft on remote by public here.
1714 # Get the list of all revs draft on remote by public here.
1715 # XXX Beware that revset break if droots is not strictly
1715 # XXX Beware that revset break if droots is not strictly
1716 # XXX root we may want to ensure it is but it is costly
1716 # XXX root we may want to ensure it is but it is costly
1717 outdated = self.set('heads((%ln::%ln) and public())',
1717 outdated = self.set('heads((%ln::%ln) and public())',
1718 droots, cheads)
1718 droots, cheads)
1719 for newremotehead in outdated:
1719 for newremotehead in outdated:
1720 r = remote.pushkey('phases',
1720 r = remote.pushkey('phases',
1721 newremotehead.hex(),
1721 newremotehead.hex(),
1722 str(phases.draft),
1722 str(phases.draft),
1723 str(phases.public))
1723 str(phases.public))
1724 if not r:
1724 if not r:
1725 self.ui.warn(_('updating %s to public failed!\n')
1725 self.ui.warn(_('updating %s to public failed!\n')
1726 % newremotehead)
1726 % newremotehead)
1727 finally:
1727 finally:
1728 if lock is not None:
1728 if lock is not None:
1729 lock.release()
1729 lock.release()
1730 finally:
1730 finally:
1731 locallock.release()
1731 locallock.release()
1732
1732
1733 self.ui.debug("checking for updated bookmarks\n")
1733 self.ui.debug("checking for updated bookmarks\n")
1734 rb = remote.listkeys('bookmarks')
1734 rb = remote.listkeys('bookmarks')
1735 for k in rb.keys():
1735 for k in rb.keys():
1736 if k in self._bookmarks:
1736 if k in self._bookmarks:
1737 nr, nl = rb[k], hex(self._bookmarks[k])
1737 nr, nl = rb[k], hex(self._bookmarks[k])
1738 if nr in self:
1738 if nr in self:
1739 cr = self[nr]
1739 cr = self[nr]
1740 cl = self[nl]
1740 cl = self[nl]
1741 if cl in cr.descendants():
1741 if cl in cr.descendants():
1742 r = remote.pushkey('bookmarks', k, nr, nl)
1742 r = remote.pushkey('bookmarks', k, nr, nl)
1743 if r:
1743 if r:
1744 self.ui.status(_("updating bookmark %s\n") % k)
1744 self.ui.status(_("updating bookmark %s\n") % k)
1745 else:
1745 else:
1746 self.ui.warn(_('updating bookmark %s'
1746 self.ui.warn(_('updating bookmark %s'
1747 ' failed!\n') % k)
1747 ' failed!\n') % k)
1748
1748
1749 return ret
1749 return ret
1750
1750
1751 def changegroupinfo(self, nodes, source):
1751 def changegroupinfo(self, nodes, source):
1752 if self.ui.verbose or source == 'bundle':
1752 if self.ui.verbose or source == 'bundle':
1753 self.ui.status(_("%d changesets found\n") % len(nodes))
1753 self.ui.status(_("%d changesets found\n") % len(nodes))
1754 if self.ui.debugflag:
1754 if self.ui.debugflag:
1755 self.ui.debug("list of changesets:\n")
1755 self.ui.debug("list of changesets:\n")
1756 for node in nodes:
1756 for node in nodes:
1757 self.ui.debug("%s\n" % hex(node))
1757 self.ui.debug("%s\n" % hex(node))
1758
1758
1759 def changegroupsubset(self, bases, heads, source):
1759 def changegroupsubset(self, bases, heads, source):
1760 """Compute a changegroup consisting of all the nodes that are
1760 """Compute a changegroup consisting of all the nodes that are
1761 descendants of any of the bases and ancestors of any of the heads.
1761 descendants of any of the bases and ancestors of any of the heads.
1762 Return a chunkbuffer object whose read() method will return
1762 Return a chunkbuffer object whose read() method will return
1763 successive changegroup chunks.
1763 successive changegroup chunks.
1764
1764
1765 It is fairly complex as determining which filenodes and which
1765 It is fairly complex as determining which filenodes and which
1766 manifest nodes need to be included for the changeset to be complete
1766 manifest nodes need to be included for the changeset to be complete
1767 is non-trivial.
1767 is non-trivial.
1768
1768
1769 Another wrinkle is doing the reverse, figuring out which changeset in
1769 Another wrinkle is doing the reverse, figuring out which changeset in
1770 the changegroup a particular filenode or manifestnode belongs to.
1770 the changegroup a particular filenode or manifestnode belongs to.
1771 """
1771 """
1772 cl = self.changelog
1772 cl = self.changelog
1773 if not bases:
1773 if not bases:
1774 bases = [nullid]
1774 bases = [nullid]
1775 csets, bases, heads = cl.nodesbetween(bases, heads)
1775 csets, bases, heads = cl.nodesbetween(bases, heads)
1776 # We assume that all ancestors of bases are known
1776 # We assume that all ancestors of bases are known
1777 common = set(cl.ancestors(*[cl.rev(n) for n in bases]))
1777 common = set(cl.ancestors(*[cl.rev(n) for n in bases]))
1778 return self._changegroupsubset(common, csets, heads, source)
1778 return self._changegroupsubset(common, csets, heads, source)
1779
1779
1780 def getlocalbundle(self, source, outgoing):
1780 def getlocalbundle(self, source, outgoing):
1781 """Like getbundle, but taking a discovery.outgoing as an argument.
1781 """Like getbundle, but taking a discovery.outgoing as an argument.
1782
1782
1783 This is only implemented for local repos and reuses potentially
1783 This is only implemented for local repos and reuses potentially
1784 precomputed sets in outgoing."""
1784 precomputed sets in outgoing."""
1785 if not outgoing.missing:
1785 if not outgoing.missing:
1786 return None
1786 return None
1787 return self._changegroupsubset(outgoing.common,
1787 return self._changegroupsubset(outgoing.common,
1788 outgoing.missing,
1788 outgoing.missing,
1789 outgoing.missingheads,
1789 outgoing.missingheads,
1790 source)
1790 source)
1791
1791
1792 def getbundle(self, source, heads=None, common=None):
1792 def getbundle(self, source, heads=None, common=None):
1793 """Like changegroupsubset, but returns the set difference between the
1793 """Like changegroupsubset, but returns the set difference between the
1794 ancestors of heads and the ancestors common.
1794 ancestors of heads and the ancestors common.
1795
1795
1796 If heads is None, use the local heads. If common is None, use [nullid].
1796 If heads is None, use the local heads. If common is None, use [nullid].
1797
1797
1798 The nodes in common might not all be known locally due to the way the
1798 The nodes in common might not all be known locally due to the way the
1799 current discovery protocol works.
1799 current discovery protocol works.
1800 """
1800 """
1801 cl = self.changelog
1801 cl = self.changelog
1802 if common:
1802 if common:
1803 nm = cl.nodemap
1803 nm = cl.nodemap
1804 common = [n for n in common if n in nm]
1804 common = [n for n in common if n in nm]
1805 else:
1805 else:
1806 common = [nullid]
1806 common = [nullid]
1807 if not heads:
1807 if not heads:
1808 heads = cl.heads()
1808 heads = cl.heads()
1809 return self.getlocalbundle(source,
1809 return self.getlocalbundle(source,
1810 discovery.outgoing(cl, common, heads))
1810 discovery.outgoing(cl, common, heads))
1811
1811
1812 def _changegroupsubset(self, commonrevs, csets, heads, source):
1812 def _changegroupsubset(self, commonrevs, csets, heads, source):
1813
1813
1814 cl = self.changelog
1814 cl = self.changelog
1815 mf = self.manifest
1815 mf = self.manifest
1816 mfs = {} # needed manifests
1816 mfs = {} # needed manifests
1817 fnodes = {} # needed file nodes
1817 fnodes = {} # needed file nodes
1818 changedfiles = set()
1818 changedfiles = set()
1819 fstate = ['', {}]
1819 fstate = ['', {}]
1820 count = [0, 0]
1820 count = [0, 0]
1821
1821
1822 # can we go through the fast path ?
1822 # can we go through the fast path ?
1823 heads.sort()
1823 heads.sort()
1824 if heads == sorted(self.heads()):
1824 if heads == sorted(self.heads()):
1825 return self._changegroup(csets, source)
1825 return self._changegroup(csets, source)
1826
1826
1827 # slow path
1827 # slow path
1828 self.hook('preoutgoing', throw=True, source=source)
1828 self.hook('preoutgoing', throw=True, source=source)
1829 self.changegroupinfo(csets, source)
1829 self.changegroupinfo(csets, source)
1830
1830
1831 # filter any nodes that claim to be part of the known set
1831 # filter any nodes that claim to be part of the known set
1832 def prune(revlog, missing):
1832 def prune(revlog, missing):
1833 rr, rl = revlog.rev, revlog.linkrev
1833 rr, rl = revlog.rev, revlog.linkrev
1834 return [n for n in missing
1834 return [n for n in missing
1835 if rl(rr(n)) not in commonrevs]
1835 if rl(rr(n)) not in commonrevs]
1836
1836
1837 progress = self.ui.progress
1837 progress = self.ui.progress
1838 _bundling = _('bundling')
1838 _bundling = _('bundling')
1839 _changesets = _('changesets')
1839 _changesets = _('changesets')
1840 _manifests = _('manifests')
1840 _manifests = _('manifests')
1841 _files = _('files')
1841 _files = _('files')
1842
1842
1843 def lookup(revlog, x):
1843 def lookup(revlog, x):
1844 if revlog == cl:
1844 if revlog == cl:
1845 c = cl.read(x)
1845 c = cl.read(x)
1846 changedfiles.update(c[3])
1846 changedfiles.update(c[3])
1847 mfs.setdefault(c[0], x)
1847 mfs.setdefault(c[0], x)
1848 count[0] += 1
1848 count[0] += 1
1849 progress(_bundling, count[0],
1849 progress(_bundling, count[0],
1850 unit=_changesets, total=count[1])
1850 unit=_changesets, total=count[1])
1851 return x
1851 return x
1852 elif revlog == mf:
1852 elif revlog == mf:
1853 clnode = mfs[x]
1853 clnode = mfs[x]
1854 mdata = mf.readfast(x)
1854 mdata = mf.readfast(x)
1855 for f, n in mdata.iteritems():
1855 for f, n in mdata.iteritems():
1856 if f in changedfiles:
1856 if f in changedfiles:
1857 fnodes[f].setdefault(n, clnode)
1857 fnodes[f].setdefault(n, clnode)
1858 count[0] += 1
1858 count[0] += 1
1859 progress(_bundling, count[0],
1859 progress(_bundling, count[0],
1860 unit=_manifests, total=count[1])
1860 unit=_manifests, total=count[1])
1861 return clnode
1861 return clnode
1862 else:
1862 else:
1863 progress(_bundling, count[0], item=fstate[0],
1863 progress(_bundling, count[0], item=fstate[0],
1864 unit=_files, total=count[1])
1864 unit=_files, total=count[1])
1865 return fstate[1][x]
1865 return fstate[1][x]
1866
1866
1867 bundler = changegroup.bundle10(lookup)
1867 bundler = changegroup.bundle10(lookup)
1868 reorder = self.ui.config('bundle', 'reorder', 'auto')
1868 reorder = self.ui.config('bundle', 'reorder', 'auto')
1869 if reorder == 'auto':
1869 if reorder == 'auto':
1870 reorder = None
1870 reorder = None
1871 else:
1871 else:
1872 reorder = util.parsebool(reorder)
1872 reorder = util.parsebool(reorder)
1873
1873
1874 def gengroup():
1874 def gengroup():
1875 # Create a changenode group generator that will call our functions
1875 # Create a changenode group generator that will call our functions
1876 # back to lookup the owning changenode and collect information.
1876 # back to lookup the owning changenode and collect information.
1877 count[:] = [0, len(csets)]
1877 count[:] = [0, len(csets)]
1878 for chunk in cl.group(csets, bundler, reorder=reorder):
1878 for chunk in cl.group(csets, bundler, reorder=reorder):
1879 yield chunk
1879 yield chunk
1880 progress(_bundling, None)
1880 progress(_bundling, None)
1881
1881
1882 # Create a generator for the manifestnodes that calls our lookup
1882 # Create a generator for the manifestnodes that calls our lookup
1883 # and data collection functions back.
1883 # and data collection functions back.
1884 for f in changedfiles:
1884 for f in changedfiles:
1885 fnodes[f] = {}
1885 fnodes[f] = {}
1886 count[:] = [0, len(mfs)]
1886 count[:] = [0, len(mfs)]
1887 for chunk in mf.group(prune(mf, mfs), bundler, reorder=reorder):
1887 for chunk in mf.group(prune(mf, mfs), bundler, reorder=reorder):
1888 yield chunk
1888 yield chunk
1889 progress(_bundling, None)
1889 progress(_bundling, None)
1890
1890
1891 mfs.clear()
1891 mfs.clear()
1892
1892
1893 # Go through all our files in order sorted by name.
1893 # Go through all our files in order sorted by name.
1894 count[:] = [0, len(changedfiles)]
1894 count[:] = [0, len(changedfiles)]
1895 for fname in sorted(changedfiles):
1895 for fname in sorted(changedfiles):
1896 filerevlog = self.file(fname)
1896 filerevlog = self.file(fname)
1897 if not len(filerevlog):
1897 if not len(filerevlog):
1898 raise util.Abort(_("empty or missing revlog for %s")
1898 raise util.Abort(_("empty or missing revlog for %s")
1899 % fname)
1899 % fname)
1900 fstate[0] = fname
1900 fstate[0] = fname
1901 fstate[1] = fnodes.pop(fname, {})
1901 fstate[1] = fnodes.pop(fname, {})
1902
1902
1903 nodelist = prune(filerevlog, fstate[1])
1903 nodelist = prune(filerevlog, fstate[1])
1904 if nodelist:
1904 if nodelist:
1905 count[0] += 1
1905 count[0] += 1
1906 yield bundler.fileheader(fname)
1906 yield bundler.fileheader(fname)
1907 for chunk in filerevlog.group(nodelist, bundler, reorder):
1907 for chunk in filerevlog.group(nodelist, bundler, reorder):
1908 yield chunk
1908 yield chunk
1909
1909
1910 # Signal that no more groups are left.
1910 # Signal that no more groups are left.
1911 yield bundler.close()
1911 yield bundler.close()
1912 progress(_bundling, None)
1912 progress(_bundling, None)
1913
1913
1914 if csets:
1914 if csets:
1915 self.hook('outgoing', node=hex(csets[0]), source=source)
1915 self.hook('outgoing', node=hex(csets[0]), source=source)
1916
1916
1917 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1917 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1918
1918
1919 def changegroup(self, basenodes, source):
1919 def changegroup(self, basenodes, source):
1920 # to avoid a race we use changegroupsubset() (issue1320)
1920 # to avoid a race we use changegroupsubset() (issue1320)
1921 return self.changegroupsubset(basenodes, self.heads(), source)
1921 return self.changegroupsubset(basenodes, self.heads(), source)
1922
1922
1923 def _changegroup(self, nodes, source):
1923 def _changegroup(self, nodes, source):
1924 """Compute the changegroup of all nodes that we have that a recipient
1924 """Compute the changegroup of all nodes that we have that a recipient
1925 doesn't. Return a chunkbuffer object whose read() method will return
1925 doesn't. Return a chunkbuffer object whose read() method will return
1926 successive changegroup chunks.
1926 successive changegroup chunks.
1927
1927
1928 This is much easier than the previous function as we can assume that
1928 This is much easier than the previous function as we can assume that
1929 the recipient has any changenode we aren't sending them.
1929 the recipient has any changenode we aren't sending them.
1930
1930
1931 nodes is the set of nodes to send"""
1931 nodes is the set of nodes to send"""
1932
1932
1933 cl = self.changelog
1933 cl = self.changelog
1934 mf = self.manifest
1934 mf = self.manifest
1935 mfs = {}
1935 mfs = {}
1936 changedfiles = set()
1936 changedfiles = set()
1937 fstate = ['']
1937 fstate = ['']
1938 count = [0, 0]
1938 count = [0, 0]
1939
1939
1940 self.hook('preoutgoing', throw=True, source=source)
1940 self.hook('preoutgoing', throw=True, source=source)
1941 self.changegroupinfo(nodes, source)
1941 self.changegroupinfo(nodes, source)
1942
1942
1943 revset = set([cl.rev(n) for n in nodes])
1943 revset = set([cl.rev(n) for n in nodes])
1944
1944
1945 def gennodelst(log):
1945 def gennodelst(log):
1946 ln, llr = log.node, log.linkrev
1946 ln, llr = log.node, log.linkrev
1947 return [ln(r) for r in log if llr(r) in revset]
1947 return [ln(r) for r in log if llr(r) in revset]
1948
1948
1949 progress = self.ui.progress
1949 progress = self.ui.progress
1950 _bundling = _('bundling')
1950 _bundling = _('bundling')
1951 _changesets = _('changesets')
1951 _changesets = _('changesets')
1952 _manifests = _('manifests')
1952 _manifests = _('manifests')
1953 _files = _('files')
1953 _files = _('files')
1954
1954
1955 def lookup(revlog, x):
1955 def lookup(revlog, x):
1956 if revlog == cl:
1956 if revlog == cl:
1957 c = cl.read(x)
1957 c = cl.read(x)
1958 changedfiles.update(c[3])
1958 changedfiles.update(c[3])
1959 mfs.setdefault(c[0], x)
1959 mfs.setdefault(c[0], x)
1960 count[0] += 1
1960 count[0] += 1
1961 progress(_bundling, count[0],
1961 progress(_bundling, count[0],
1962 unit=_changesets, total=count[1])
1962 unit=_changesets, total=count[1])
1963 return x
1963 return x
1964 elif revlog == mf:
1964 elif revlog == mf:
1965 count[0] += 1
1965 count[0] += 1
1966 progress(_bundling, count[0],
1966 progress(_bundling, count[0],
1967 unit=_manifests, total=count[1])
1967 unit=_manifests, total=count[1])
1968 return cl.node(revlog.linkrev(revlog.rev(x)))
1968 return cl.node(revlog.linkrev(revlog.rev(x)))
1969 else:
1969 else:
1970 progress(_bundling, count[0], item=fstate[0],
1970 progress(_bundling, count[0], item=fstate[0],
1971 total=count[1], unit=_files)
1971 total=count[1], unit=_files)
1972 return cl.node(revlog.linkrev(revlog.rev(x)))
1972 return cl.node(revlog.linkrev(revlog.rev(x)))
1973
1973
1974 bundler = changegroup.bundle10(lookup)
1974 bundler = changegroup.bundle10(lookup)
1975 reorder = self.ui.config('bundle', 'reorder', 'auto')
1975 reorder = self.ui.config('bundle', 'reorder', 'auto')
1976 if reorder == 'auto':
1976 if reorder == 'auto':
1977 reorder = None
1977 reorder = None
1978 else:
1978 else:
1979 reorder = util.parsebool(reorder)
1979 reorder = util.parsebool(reorder)
1980
1980
1981 def gengroup():
1981 def gengroup():
1982 '''yield a sequence of changegroup chunks (strings)'''
1982 '''yield a sequence of changegroup chunks (strings)'''
1983 # construct a list of all changed files
1983 # construct a list of all changed files
1984
1984
1985 count[:] = [0, len(nodes)]
1985 count[:] = [0, len(nodes)]
1986 for chunk in cl.group(nodes, bundler, reorder=reorder):
1986 for chunk in cl.group(nodes, bundler, reorder=reorder):
1987 yield chunk
1987 yield chunk
1988 progress(_bundling, None)
1988 progress(_bundling, None)
1989
1989
1990 count[:] = [0, len(mfs)]
1990 count[:] = [0, len(mfs)]
1991 for chunk in mf.group(gennodelst(mf), bundler, reorder=reorder):
1991 for chunk in mf.group(gennodelst(mf), bundler, reorder=reorder):
1992 yield chunk
1992 yield chunk
1993 progress(_bundling, None)
1993 progress(_bundling, None)
1994
1994
1995 count[:] = [0, len(changedfiles)]
1995 count[:] = [0, len(changedfiles)]
1996 for fname in sorted(changedfiles):
1996 for fname in sorted(changedfiles):
1997 filerevlog = self.file(fname)
1997 filerevlog = self.file(fname)
1998 if not len(filerevlog):
1998 if not len(filerevlog):
1999 raise util.Abort(_("empty or missing revlog for %s")
1999 raise util.Abort(_("empty or missing revlog for %s")
2000 % fname)
2000 % fname)
2001 fstate[0] = fname
2001 fstate[0] = fname
2002 nodelist = gennodelst(filerevlog)
2002 nodelist = gennodelst(filerevlog)
2003 if nodelist:
2003 if nodelist:
2004 count[0] += 1
2004 count[0] += 1
2005 yield bundler.fileheader(fname)
2005 yield bundler.fileheader(fname)
2006 for chunk in filerevlog.group(nodelist, bundler, reorder):
2006 for chunk in filerevlog.group(nodelist, bundler, reorder):
2007 yield chunk
2007 yield chunk
2008 yield bundler.close()
2008 yield bundler.close()
2009 progress(_bundling, None)
2009 progress(_bundling, None)
2010
2010
2011 if nodes:
2011 if nodes:
2012 self.hook('outgoing', node=hex(nodes[0]), source=source)
2012 self.hook('outgoing', node=hex(nodes[0]), source=source)
2013
2013
2014 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
2014 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
2015
2015
2016 def addchangegroup(self, source, srctype, url, emptyok=False):
2016 def addchangegroup(self, source, srctype, url, emptyok=False):
2017 """Add the changegroup returned by source.read() to this repo.
2017 """Add the changegroup returned by source.read() to this repo.
2018 srctype is a string like 'push', 'pull', or 'unbundle'. url is
2018 srctype is a string like 'push', 'pull', or 'unbundle'. url is
2019 the URL of the repo where this changegroup is coming from.
2019 the URL of the repo where this changegroup is coming from.
2020
2020
2021 Return an integer summarizing the change to this repo:
2021 Return an integer summarizing the change to this repo:
2022 - nothing changed or no source: 0
2022 - nothing changed or no source: 0
2023 - more heads than before: 1+added heads (2..n)
2023 - more heads than before: 1+added heads (2..n)
2024 - fewer heads than before: -1-removed heads (-2..-n)
2024 - fewer heads than before: -1-removed heads (-2..-n)
2025 - number of heads stays the same: 1
2025 - number of heads stays the same: 1
2026 """
2026 """
2027 def csmap(x):
2027 def csmap(x):
2028 self.ui.debug("add changeset %s\n" % short(x))
2028 self.ui.debug("add changeset %s\n" % short(x))
2029 return len(cl)
2029 return len(cl)
2030
2030
2031 def revmap(x):
2031 def revmap(x):
2032 return cl.rev(x)
2032 return cl.rev(x)
2033
2033
2034 if not source:
2034 if not source:
2035 return 0
2035 return 0
2036
2036
2037 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2037 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2038
2038
2039 changesets = files = revisions = 0
2039 changesets = files = revisions = 0
2040 efiles = set()
2040 efiles = set()
2041
2041
2042 # write changelog data to temp files so concurrent readers will not see
2042 # write changelog data to temp files so concurrent readers will not see
2043 # inconsistent view
2043 # inconsistent view
2044 cl = self.changelog
2044 cl = self.changelog
2045 cl.delayupdate()
2045 cl.delayupdate()
2046 oldheads = cl.heads()
2046 oldheads = cl.heads()
2047
2047
2048 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
2048 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
2049 try:
2049 try:
2050 trp = weakref.proxy(tr)
2050 trp = weakref.proxy(tr)
2051 # pull off the changeset group
2051 # pull off the changeset group
2052 self.ui.status(_("adding changesets\n"))
2052 self.ui.status(_("adding changesets\n"))
2053 clstart = len(cl)
2053 clstart = len(cl)
2054 class prog(object):
2054 class prog(object):
2055 step = _('changesets')
2055 step = _('changesets')
2056 count = 1
2056 count = 1
2057 ui = self.ui
2057 ui = self.ui
2058 total = None
2058 total = None
2059 def __call__(self):
2059 def __call__(self):
2060 self.ui.progress(self.step, self.count, unit=_('chunks'),
2060 self.ui.progress(self.step, self.count, unit=_('chunks'),
2061 total=self.total)
2061 total=self.total)
2062 self.count += 1
2062 self.count += 1
2063 pr = prog()
2063 pr = prog()
2064 source.callback = pr
2064 source.callback = pr
2065
2065
2066 source.changelogheader()
2066 source.changelogheader()
2067 srccontent = cl.addgroup(source, csmap, trp)
2067 srccontent = cl.addgroup(source, csmap, trp)
2068 if not (srccontent or emptyok):
2068 if not (srccontent or emptyok):
2069 raise util.Abort(_("received changelog group is empty"))
2069 raise util.Abort(_("received changelog group is empty"))
2070 clend = len(cl)
2070 clend = len(cl)
2071 changesets = clend - clstart
2071 changesets = clend - clstart
2072 for c in xrange(clstart, clend):
2072 for c in xrange(clstart, clend):
2073 efiles.update(self[c].files())
2073 efiles.update(self[c].files())
2074 efiles = len(efiles)
2074 efiles = len(efiles)
2075 self.ui.progress(_('changesets'), None)
2075 self.ui.progress(_('changesets'), None)
2076
2076
2077 # pull off the manifest group
2077 # pull off the manifest group
2078 self.ui.status(_("adding manifests\n"))
2078 self.ui.status(_("adding manifests\n"))
2079 pr.step = _('manifests')
2079 pr.step = _('manifests')
2080 pr.count = 1
2080 pr.count = 1
2081 pr.total = changesets # manifests <= changesets
2081 pr.total = changesets # manifests <= changesets
2082 # no need to check for empty manifest group here:
2082 # no need to check for empty manifest group here:
2083 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2083 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2084 # no new manifest will be created and the manifest group will
2084 # no new manifest will be created and the manifest group will
2085 # be empty during the pull
2085 # be empty during the pull
2086 source.manifestheader()
2086 source.manifestheader()
2087 self.manifest.addgroup(source, revmap, trp)
2087 self.manifest.addgroup(source, revmap, trp)
2088 self.ui.progress(_('manifests'), None)
2088 self.ui.progress(_('manifests'), None)
2089
2089
2090 needfiles = {}
2090 needfiles = {}
2091 if self.ui.configbool('server', 'validate', default=False):
2091 if self.ui.configbool('server', 'validate', default=False):
2092 # validate incoming csets have their manifests
2092 # validate incoming csets have their manifests
2093 for cset in xrange(clstart, clend):
2093 for cset in xrange(clstart, clend):
2094 mfest = self.changelog.read(self.changelog.node(cset))[0]
2094 mfest = self.changelog.read(self.changelog.node(cset))[0]
2095 mfest = self.manifest.readdelta(mfest)
2095 mfest = self.manifest.readdelta(mfest)
2096 # store file nodes we must see
2096 # store file nodes we must see
2097 for f, n in mfest.iteritems():
2097 for f, n in mfest.iteritems():
2098 needfiles.setdefault(f, set()).add(n)
2098 needfiles.setdefault(f, set()).add(n)
2099
2099
2100 # process the files
2100 # process the files
2101 self.ui.status(_("adding file changes\n"))
2101 self.ui.status(_("adding file changes\n"))
2102 pr.step = _('files')
2102 pr.step = _('files')
2103 pr.count = 1
2103 pr.count = 1
2104 pr.total = efiles
2104 pr.total = efiles
2105 source.callback = None
2105 source.callback = None
2106
2106
2107 while True:
2107 while True:
2108 chunkdata = source.filelogheader()
2108 chunkdata = source.filelogheader()
2109 if not chunkdata:
2109 if not chunkdata:
2110 break
2110 break
2111 f = chunkdata["filename"]
2111 f = chunkdata["filename"]
2112 self.ui.debug("adding %s revisions\n" % f)
2112 self.ui.debug("adding %s revisions\n" % f)
2113 pr()
2113 pr()
2114 fl = self.file(f)
2114 fl = self.file(f)
2115 o = len(fl)
2115 o = len(fl)
2116 if not fl.addgroup(source, revmap, trp):
2116 if not fl.addgroup(source, revmap, trp):
2117 raise util.Abort(_("received file revlog group is empty"))
2117 raise util.Abort(_("received file revlog group is empty"))
2118 revisions += len(fl) - o
2118 revisions += len(fl) - o
2119 files += 1
2119 files += 1
2120 if f in needfiles:
2120 if f in needfiles:
2121 needs = needfiles[f]
2121 needs = needfiles[f]
2122 for new in xrange(o, len(fl)):
2122 for new in xrange(o, len(fl)):
2123 n = fl.node(new)
2123 n = fl.node(new)
2124 if n in needs:
2124 if n in needs:
2125 needs.remove(n)
2125 needs.remove(n)
2126 if not needs:
2126 if not needs:
2127 del needfiles[f]
2127 del needfiles[f]
2128 self.ui.progress(_('files'), None)
2128 self.ui.progress(_('files'), None)
2129
2129
2130 for f, needs in needfiles.iteritems():
2130 for f, needs in needfiles.iteritems():
2131 fl = self.file(f)
2131 fl = self.file(f)
2132 for n in needs:
2132 for n in needs:
2133 try:
2133 try:
2134 fl.rev(n)
2134 fl.rev(n)
2135 except error.LookupError:
2135 except error.LookupError:
2136 raise util.Abort(
2136 raise util.Abort(
2137 _('missing file data for %s:%s - run hg verify') %
2137 _('missing file data for %s:%s - run hg verify') %
2138 (f, hex(n)))
2138 (f, hex(n)))
2139
2139
2140 dh = 0
2140 dh = 0
2141 if oldheads:
2141 if oldheads:
2142 heads = cl.heads()
2142 heads = cl.heads()
2143 dh = len(heads) - len(oldheads)
2143 dh = len(heads) - len(oldheads)
2144 for h in heads:
2144 for h in heads:
2145 if h not in oldheads and 'close' in self[h].extra():
2145 if h not in oldheads and 'close' in self[h].extra():
2146 dh -= 1
2146 dh -= 1
2147 htext = ""
2147 htext = ""
2148 if dh:
2148 if dh:
2149 htext = _(" (%+d heads)") % dh
2149 htext = _(" (%+d heads)") % dh
2150
2150
2151 self.ui.status(_("added %d changesets"
2151 self.ui.status(_("added %d changesets"
2152 " with %d changes to %d files%s\n")
2152 " with %d changes to %d files%s\n")
2153 % (changesets, revisions, files, htext))
2153 % (changesets, revisions, files, htext))
2154
2154
2155 if changesets > 0:
2155 if changesets > 0:
2156 p = lambda: cl.writepending() and self.root or ""
2156 p = lambda: cl.writepending() and self.root or ""
2157 self.hook('pretxnchangegroup', throw=True,
2157 self.hook('pretxnchangegroup', throw=True,
2158 node=hex(cl.node(clstart)), source=srctype,
2158 node=hex(cl.node(clstart)), source=srctype,
2159 url=url, pending=p)
2159 url=url, pending=p)
2160
2160
2161 added = [cl.node(r) for r in xrange(clstart, clend)]
2161 added = [cl.node(r) for r in xrange(clstart, clend)]
2162 publishing = self.ui.configbool('phases', 'publish', True)
2162 publishing = self.ui.configbool('phases', 'publish', True)
2163 if srctype == 'push':
2163 if srctype == 'push':
2164 # Old server can not push the boundary themself.
2164 # Old server can not push the boundary themself.
2165 # New server won't push the boundary if changeset already
2165 # New server won't push the boundary if changeset already
2166 # existed locally as secrete
2166 # existed locally as secrete
2167 #
2167 #
2168 # We should not use added here but the list of all change in
2168 # We should not use added here but the list of all change in
2169 # the bundle
2169 # the bundle
2170 if publishing:
2170 if publishing:
2171 phases.advanceboundary(self, phases.public, srccontent)
2171 phases.advanceboundary(self, phases.public, srccontent)
2172 else:
2172 else:
2173 phases.advanceboundary(self, phases.draft, srccontent)
2173 phases.advanceboundary(self, phases.draft, srccontent)
2174 phases.retractboundary(self, phases.draft, added)
2174 phases.retractboundary(self, phases.draft, added)
2175 elif srctype != 'strip':
2175 elif srctype != 'strip':
2176 # publishing only alter behavior during push
2176 # publishing only alter behavior during push
2177 #
2177 #
2178 # strip should not touch boundary at all
2178 # strip should not touch boundary at all
2179 phases.retractboundary(self, phases.draft, added)
2179 phases.retractboundary(self, phases.draft, added)
2180
2180
2181 # make changelog see real files again
2181 # make changelog see real files again
2182 cl.finalize(trp)
2182 cl.finalize(trp)
2183
2183
2184 tr.close()
2184 tr.close()
2185
2185
2186 if changesets > 0:
2186 if changesets > 0:
2187 def runhooks():
2187 def runhooks():
2188 # forcefully update the on-disk branch cache
2188 # forcefully update the on-disk branch cache
2189 self.ui.debug("updating the branch cache\n")
2189 self.ui.debug("updating the branch cache\n")
2190 self.updatebranchcache()
2190 self.updatebranchcache()
2191 self.hook("changegroup", node=hex(cl.node(clstart)),
2191 self.hook("changegroup", node=hex(cl.node(clstart)),
2192 source=srctype, url=url)
2192 source=srctype, url=url)
2193
2193
2194 for n in added:
2194 for n in added:
2195 self.hook("incoming", node=hex(n), source=srctype,
2195 self.hook("incoming", node=hex(n), source=srctype,
2196 url=url)
2196 url=url)
2197 self._afterlock(runhooks)
2197 self._afterlock(runhooks)
2198
2198
2199 finally:
2199 finally:
2200 tr.release()
2200 tr.release()
2201 # never return 0 here:
2201 # never return 0 here:
2202 if dh < 0:
2202 if dh < 0:
2203 return dh - 1
2203 return dh - 1
2204 else:
2204 else:
2205 return dh + 1
2205 return dh + 1
2206
2206
2207 def stream_in(self, remote, requirements):
2207 def stream_in(self, remote, requirements):
2208 lock = self.lock()
2208 lock = self.lock()
2209 try:
2209 try:
2210 fp = remote.stream_out()
2210 fp = remote.stream_out()
2211 l = fp.readline()
2211 l = fp.readline()
2212 try:
2212 try:
2213 resp = int(l)
2213 resp = int(l)
2214 except ValueError:
2214 except ValueError:
2215 raise error.ResponseError(
2215 raise error.ResponseError(
2216 _('Unexpected response from remote server:'), l)
2216 _('Unexpected response from remote server:'), l)
2217 if resp == 1:
2217 if resp == 1:
2218 raise util.Abort(_('operation forbidden by server'))
2218 raise util.Abort(_('operation forbidden by server'))
2219 elif resp == 2:
2219 elif resp == 2:
2220 raise util.Abort(_('locking the remote repository failed'))
2220 raise util.Abort(_('locking the remote repository failed'))
2221 elif resp != 0:
2221 elif resp != 0:
2222 raise util.Abort(_('the server sent an unknown error code'))
2222 raise util.Abort(_('the server sent an unknown error code'))
2223 self.ui.status(_('streaming all changes\n'))
2223 self.ui.status(_('streaming all changes\n'))
2224 l = fp.readline()
2224 l = fp.readline()
2225 try:
2225 try:
2226 total_files, total_bytes = map(int, l.split(' ', 1))
2226 total_files, total_bytes = map(int, l.split(' ', 1))
2227 except (ValueError, TypeError):
2227 except (ValueError, TypeError):
2228 raise error.ResponseError(
2228 raise error.ResponseError(
2229 _('Unexpected response from remote server:'), l)
2229 _('Unexpected response from remote server:'), l)
2230 self.ui.status(_('%d files to transfer, %s of data\n') %
2230 self.ui.status(_('%d files to transfer, %s of data\n') %
2231 (total_files, util.bytecount(total_bytes)))
2231 (total_files, util.bytecount(total_bytes)))
2232 start = time.time()
2232 start = time.time()
2233 for i in xrange(total_files):
2233 for i in xrange(total_files):
2234 # XXX doesn't support '\n' or '\r' in filenames
2234 # XXX doesn't support '\n' or '\r' in filenames
2235 l = fp.readline()
2235 l = fp.readline()
2236 try:
2236 try:
2237 name, size = l.split('\0', 1)
2237 name, size = l.split('\0', 1)
2238 size = int(size)
2238 size = int(size)
2239 except (ValueError, TypeError):
2239 except (ValueError, TypeError):
2240 raise error.ResponseError(
2240 raise error.ResponseError(
2241 _('Unexpected response from remote server:'), l)
2241 _('Unexpected response from remote server:'), l)
2242 if self.ui.debugflag:
2242 if self.ui.debugflag:
2243 self.ui.debug('adding %s (%s)\n' %
2243 self.ui.debug('adding %s (%s)\n' %
2244 (name, util.bytecount(size)))
2244 (name, util.bytecount(size)))
2245 # for backwards compat, name was partially encoded
2245 # for backwards compat, name was partially encoded
2246 ofp = self.sopener(store.decodedir(name), 'w')
2246 ofp = self.sopener(store.decodedir(name), 'w')
2247 for chunk in util.filechunkiter(fp, limit=size):
2247 for chunk in util.filechunkiter(fp, limit=size):
2248 ofp.write(chunk)
2248 ofp.write(chunk)
2249 ofp.close()
2249 ofp.close()
2250 elapsed = time.time() - start
2250 elapsed = time.time() - start
2251 if elapsed <= 0:
2251 if elapsed <= 0:
2252 elapsed = 0.001
2252 elapsed = 0.001
2253 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2253 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2254 (util.bytecount(total_bytes), elapsed,
2254 (util.bytecount(total_bytes), elapsed,
2255 util.bytecount(total_bytes / elapsed)))
2255 util.bytecount(total_bytes / elapsed)))
2256
2256
2257 # new requirements = old non-format requirements +
2257 # new requirements = old non-format requirements +
2258 # new format-related
2258 # new format-related
2259 # requirements from the streamed-in repository
2259 # requirements from the streamed-in repository
2260 requirements.update(set(self.requirements) - self.supportedformats)
2260 requirements.update(set(self.requirements) - self.supportedformats)
2261 self._applyrequirements(requirements)
2261 self._applyrequirements(requirements)
2262 self._writerequirements()
2262 self._writerequirements()
2263
2263
2264 self.invalidate()
2264 self.invalidate()
2265 return len(self.heads()) + 1
2265 return len(self.heads()) + 1
2266 finally:
2266 finally:
2267 lock.release()
2267 lock.release()
2268
2268
2269 def clone(self, remote, heads=[], stream=False):
2269 def clone(self, remote, heads=[], stream=False):
2270 '''clone remote repository.
2270 '''clone remote repository.
2271
2271
2272 keyword arguments:
2272 keyword arguments:
2273 heads: list of revs to clone (forces use of pull)
2273 heads: list of revs to clone (forces use of pull)
2274 stream: use streaming clone if possible'''
2274 stream: use streaming clone if possible'''
2275
2275
2276 # now, all clients that can request uncompressed clones can
2276 # now, all clients that can request uncompressed clones can
2277 # read repo formats supported by all servers that can serve
2277 # read repo formats supported by all servers that can serve
2278 # them.
2278 # them.
2279
2279
2280 # if revlog format changes, client will have to check version
2280 # if revlog format changes, client will have to check version
2281 # and format flags on "stream" capability, and use
2281 # and format flags on "stream" capability, and use
2282 # uncompressed only if compatible.
2282 # uncompressed only if compatible.
2283
2283
2284 if not stream:
2284 if not stream:
2285 # if the server explicitely prefer to stream (for fast LANs)
2285 # if the server explicitely prefer to stream (for fast LANs)
2286 stream = remote.capable('stream-preferred')
2286 stream = remote.capable('stream-preferred')
2287
2287
2288 if stream and not heads:
2288 if stream and not heads:
2289 # 'stream' means remote revlog format is revlogv1 only
2289 # 'stream' means remote revlog format is revlogv1 only
2290 if remote.capable('stream'):
2290 if remote.capable('stream'):
2291 return self.stream_in(remote, set(('revlogv1',)))
2291 return self.stream_in(remote, set(('revlogv1',)))
2292 # otherwise, 'streamreqs' contains the remote revlog format
2292 # otherwise, 'streamreqs' contains the remote revlog format
2293 streamreqs = remote.capable('streamreqs')
2293 streamreqs = remote.capable('streamreqs')
2294 if streamreqs:
2294 if streamreqs:
2295 streamreqs = set(streamreqs.split(','))
2295 streamreqs = set(streamreqs.split(','))
2296 # if we support it, stream in and adjust our requirements
2296 # if we support it, stream in and adjust our requirements
2297 if not streamreqs - self.supportedformats:
2297 if not streamreqs - self.supportedformats:
2298 return self.stream_in(remote, streamreqs)
2298 return self.stream_in(remote, streamreqs)
2299 return self.pull(remote, heads)
2299 return self.pull(remote, heads)
2300
2300
2301 def pushkey(self, namespace, key, old, new):
2301 def pushkey(self, namespace, key, old, new):
2302 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
2302 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
2303 old=old, new=new)
2303 old=old, new=new)
2304 ret = pushkey.push(self, namespace, key, old, new)
2304 ret = pushkey.push(self, namespace, key, old, new)
2305 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2305 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2306 ret=ret)
2306 ret=ret)
2307 return ret
2307 return ret
2308
2308
2309 def listkeys(self, namespace):
2309 def listkeys(self, namespace):
2310 self.hook('prelistkeys', throw=True, namespace=namespace)
2310 self.hook('prelistkeys', throw=True, namespace=namespace)
2311 values = pushkey.list(self, namespace)
2311 values = pushkey.list(self, namespace)
2312 self.hook('listkeys', namespace=namespace, values=values)
2312 self.hook('listkeys', namespace=namespace, values=values)
2313 return values
2313 return values
2314
2314
2315 def debugwireargs(self, one, two, three=None, four=None, five=None):
2315 def debugwireargs(self, one, two, three=None, four=None, five=None):
2316 '''used to test argument passing over the wire'''
2316 '''used to test argument passing over the wire'''
2317 return "%s %s %s %s %s" % (one, two, three, four, five)
2317 return "%s %s %s %s %s" % (one, two, three, four, five)
2318
2318
2319 def savecommitmessage(self, text):
2319 def savecommitmessage(self, text):
2320 fp = self.opener('last-message.txt', 'wb')
2320 fp = self.opener('last-message.txt', 'wb')
2321 try:
2321 try:
2322 fp.write(text)
2322 fp.write(text)
2323 finally:
2323 finally:
2324 fp.close()
2324 fp.close()
2325 return self.pathto(fp.name[len(self.root)+1:])
2325 return self.pathto(fp.name[len(self.root)+1:])
2326
2326
2327 # used to avoid circular references so destructors work
2327 # used to avoid circular references so destructors work
2328 def aftertrans(files):
2328 def aftertrans(files):
2329 renamefiles = [tuple(t) for t in files]
2329 renamefiles = [tuple(t) for t in files]
2330 def a():
2330 def a():
2331 for src, dest in renamefiles:
2331 for src, dest in renamefiles:
2332 try:
2332 try:
2333 util.rename(src, dest)
2333 util.rename(src, dest)
2334 except OSError: # journal file does not yet exist
2334 except OSError: # journal file does not yet exist
2335 pass
2335 pass
2336 return a
2336 return a
2337
2337
2338 def undoname(fn):
2338 def undoname(fn):
2339 base, name = os.path.split(fn)
2339 base, name = os.path.split(fn)
2340 assert name.startswith('journal')
2340 assert name.startswith('journal')
2341 return os.path.join(base, name.replace('journal', 'undo', 1))
2341 return os.path.join(base, name.replace('journal', 'undo', 1))
2342
2342
2343 def instance(ui, path, create):
2343 def instance(ui, path, create):
2344 return localrepository(ui, util.urllocalpath(path), create)
2344 return localrepository(ui, util.urllocalpath(path), create)
2345
2345
2346 def islocal(path):
2346 def islocal(path):
2347 return True
2347 return True
@@ -1,1890 +1,1890 b''
1 # patch.py - patch file parsing routines
1 # patch.py - patch file parsing routines
2 #
2 #
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 import cStringIO, email.Parser, os, errno, re
9 import cStringIO, email.Parser, os, errno, re
10 import tempfile, zlib, shutil
10 import tempfile, zlib, shutil
11
11
12 from i18n import _
12 from i18n import _
13 from node import hex, nullid, short
13 from node import hex, nullid, short
14 import base85, mdiff, scmutil, util, diffhelpers, copies, encoding, error
14 import base85, mdiff, scmutil, util, diffhelpers, copies, encoding, error
15 import context
15 import context
16
16
17 gitre = re.compile('diff --git a/(.*) b/(.*)')
17 gitre = re.compile('diff --git a/(.*) b/(.*)')
18
18
19 class PatchError(Exception):
19 class PatchError(Exception):
20 pass
20 pass
21
21
22
22
23 # public functions
23 # public functions
24
24
25 def split(stream):
25 def split(stream):
26 '''return an iterator of individual patches from a stream'''
26 '''return an iterator of individual patches from a stream'''
27 def isheader(line, inheader):
27 def isheader(line, inheader):
28 if inheader and line[0] in (' ', '\t'):
28 if inheader and line[0] in (' ', '\t'):
29 # continuation
29 # continuation
30 return True
30 return True
31 if line[0] in (' ', '-', '+'):
31 if line[0] in (' ', '-', '+'):
32 # diff line - don't check for header pattern in there
32 # diff line - don't check for header pattern in there
33 return False
33 return False
34 l = line.split(': ', 1)
34 l = line.split(': ', 1)
35 return len(l) == 2 and ' ' not in l[0]
35 return len(l) == 2 and ' ' not in l[0]
36
36
37 def chunk(lines):
37 def chunk(lines):
38 return cStringIO.StringIO(''.join(lines))
38 return cStringIO.StringIO(''.join(lines))
39
39
40 def hgsplit(stream, cur):
40 def hgsplit(stream, cur):
41 inheader = True
41 inheader = True
42
42
43 for line in stream:
43 for line in stream:
44 if not line.strip():
44 if not line.strip():
45 inheader = False
45 inheader = False
46 if not inheader and line.startswith('# HG changeset patch'):
46 if not inheader and line.startswith('# HG changeset patch'):
47 yield chunk(cur)
47 yield chunk(cur)
48 cur = []
48 cur = []
49 inheader = True
49 inheader = True
50
50
51 cur.append(line)
51 cur.append(line)
52
52
53 if cur:
53 if cur:
54 yield chunk(cur)
54 yield chunk(cur)
55
55
56 def mboxsplit(stream, cur):
56 def mboxsplit(stream, cur):
57 for line in stream:
57 for line in stream:
58 if line.startswith('From '):
58 if line.startswith('From '):
59 for c in split(chunk(cur[1:])):
59 for c in split(chunk(cur[1:])):
60 yield c
60 yield c
61 cur = []
61 cur = []
62
62
63 cur.append(line)
63 cur.append(line)
64
64
65 if cur:
65 if cur:
66 for c in split(chunk(cur[1:])):
66 for c in split(chunk(cur[1:])):
67 yield c
67 yield c
68
68
69 def mimesplit(stream, cur):
69 def mimesplit(stream, cur):
70 def msgfp(m):
70 def msgfp(m):
71 fp = cStringIO.StringIO()
71 fp = cStringIO.StringIO()
72 g = email.Generator.Generator(fp, mangle_from_=False)
72 g = email.Generator.Generator(fp, mangle_from_=False)
73 g.flatten(m)
73 g.flatten(m)
74 fp.seek(0)
74 fp.seek(0)
75 return fp
75 return fp
76
76
77 for line in stream:
77 for line in stream:
78 cur.append(line)
78 cur.append(line)
79 c = chunk(cur)
79 c = chunk(cur)
80
80
81 m = email.Parser.Parser().parse(c)
81 m = email.Parser.Parser().parse(c)
82 if not m.is_multipart():
82 if not m.is_multipart():
83 yield msgfp(m)
83 yield msgfp(m)
84 else:
84 else:
85 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
85 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
86 for part in m.walk():
86 for part in m.walk():
87 ct = part.get_content_type()
87 ct = part.get_content_type()
88 if ct not in ok_types:
88 if ct not in ok_types:
89 continue
89 continue
90 yield msgfp(part)
90 yield msgfp(part)
91
91
92 def headersplit(stream, cur):
92 def headersplit(stream, cur):
93 inheader = False
93 inheader = False
94
94
95 for line in stream:
95 for line in stream:
96 if not inheader and isheader(line, inheader):
96 if not inheader and isheader(line, inheader):
97 yield chunk(cur)
97 yield chunk(cur)
98 cur = []
98 cur = []
99 inheader = True
99 inheader = True
100 if inheader and not isheader(line, inheader):
100 if inheader and not isheader(line, inheader):
101 inheader = False
101 inheader = False
102
102
103 cur.append(line)
103 cur.append(line)
104
104
105 if cur:
105 if cur:
106 yield chunk(cur)
106 yield chunk(cur)
107
107
108 def remainder(cur):
108 def remainder(cur):
109 yield chunk(cur)
109 yield chunk(cur)
110
110
111 class fiter(object):
111 class fiter(object):
112 def __init__(self, fp):
112 def __init__(self, fp):
113 self.fp = fp
113 self.fp = fp
114
114
115 def __iter__(self):
115 def __iter__(self):
116 return self
116 return self
117
117
118 def next(self):
118 def next(self):
119 l = self.fp.readline()
119 l = self.fp.readline()
120 if not l:
120 if not l:
121 raise StopIteration
121 raise StopIteration
122 return l
122 return l
123
123
124 inheader = False
124 inheader = False
125 cur = []
125 cur = []
126
126
127 mimeheaders = ['content-type']
127 mimeheaders = ['content-type']
128
128
129 if not util.safehasattr(stream, 'next'):
129 if not util.safehasattr(stream, 'next'):
130 # http responses, for example, have readline but not next
130 # http responses, for example, have readline but not next
131 stream = fiter(stream)
131 stream = fiter(stream)
132
132
133 for line in stream:
133 for line in stream:
134 cur.append(line)
134 cur.append(line)
135 if line.startswith('# HG changeset patch'):
135 if line.startswith('# HG changeset patch'):
136 return hgsplit(stream, cur)
136 return hgsplit(stream, cur)
137 elif line.startswith('From '):
137 elif line.startswith('From '):
138 return mboxsplit(stream, cur)
138 return mboxsplit(stream, cur)
139 elif isheader(line, inheader):
139 elif isheader(line, inheader):
140 inheader = True
140 inheader = True
141 if line.split(':', 1)[0].lower() in mimeheaders:
141 if line.split(':', 1)[0].lower() in mimeheaders:
142 # let email parser handle this
142 # let email parser handle this
143 return mimesplit(stream, cur)
143 return mimesplit(stream, cur)
144 elif line.startswith('--- ') and inheader:
144 elif line.startswith('--- ') and inheader:
145 # No evil headers seen by diff start, split by hand
145 # No evil headers seen by diff start, split by hand
146 return headersplit(stream, cur)
146 return headersplit(stream, cur)
147 # Not enough info, keep reading
147 # Not enough info, keep reading
148
148
149 # if we are here, we have a very plain patch
149 # if we are here, we have a very plain patch
150 return remainder(cur)
150 return remainder(cur)
151
151
152 def extract(ui, fileobj):
152 def extract(ui, fileobj):
153 '''extract patch from data read from fileobj.
153 '''extract patch from data read from fileobj.
154
154
155 patch can be a normal patch or contained in an email message.
155 patch can be a normal patch or contained in an email message.
156
156
157 return tuple (filename, message, user, date, branch, node, p1, p2).
157 return tuple (filename, message, user, date, branch, node, p1, p2).
158 Any item in the returned tuple can be None. If filename is None,
158 Any item in the returned tuple can be None. If filename is None,
159 fileobj did not contain a patch. Caller must unlink filename when done.'''
159 fileobj did not contain a patch. Caller must unlink filename when done.'''
160
160
161 # attempt to detect the start of a patch
161 # attempt to detect the start of a patch
162 # (this heuristic is borrowed from quilt)
162 # (this heuristic is borrowed from quilt)
163 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |'
163 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |'
164 r'retrieving revision [0-9]+(\.[0-9]+)*$|'
164 r'retrieving revision [0-9]+(\.[0-9]+)*$|'
165 r'---[ \t].*?^\+\+\+[ \t]|'
165 r'---[ \t].*?^\+\+\+[ \t]|'
166 r'\*\*\*[ \t].*?^---[ \t])', re.MULTILINE|re.DOTALL)
166 r'\*\*\*[ \t].*?^---[ \t])', re.MULTILINE|re.DOTALL)
167
167
168 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
168 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
169 tmpfp = os.fdopen(fd, 'w')
169 tmpfp = os.fdopen(fd, 'w')
170 try:
170 try:
171 msg = email.Parser.Parser().parse(fileobj)
171 msg = email.Parser.Parser().parse(fileobj)
172
172
173 subject = msg['Subject']
173 subject = msg['Subject']
174 user = msg['From']
174 user = msg['From']
175 if not subject and not user:
175 if not subject and not user:
176 # Not an email, restore parsed headers if any
176 # Not an email, restore parsed headers if any
177 subject = '\n'.join(': '.join(h) for h in msg.items()) + '\n'
177 subject = '\n'.join(': '.join(h) for h in msg.items()) + '\n'
178
178
179 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
179 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
180 # should try to parse msg['Date']
180 # should try to parse msg['Date']
181 date = None
181 date = None
182 nodeid = None
182 nodeid = None
183 branch = None
183 branch = None
184 parents = []
184 parents = []
185
185
186 if subject:
186 if subject:
187 if subject.startswith('[PATCH'):
187 if subject.startswith('[PATCH'):
188 pend = subject.find(']')
188 pend = subject.find(']')
189 if pend >= 0:
189 if pend >= 0:
190 subject = subject[pend + 1:].lstrip()
190 subject = subject[pend + 1:].lstrip()
191 subject = re.sub(r'\n[ \t]+', ' ', subject)
191 subject = re.sub(r'\n[ \t]+', ' ', subject)
192 ui.debug('Subject: %s\n' % subject)
192 ui.debug('Subject: %s\n' % subject)
193 if user:
193 if user:
194 ui.debug('From: %s\n' % user)
194 ui.debug('From: %s\n' % user)
195 diffs_seen = 0
195 diffs_seen = 0
196 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
196 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
197 message = ''
197 message = ''
198 for part in msg.walk():
198 for part in msg.walk():
199 content_type = part.get_content_type()
199 content_type = part.get_content_type()
200 ui.debug('Content-Type: %s\n' % content_type)
200 ui.debug('Content-Type: %s\n' % content_type)
201 if content_type not in ok_types:
201 if content_type not in ok_types:
202 continue
202 continue
203 payload = part.get_payload(decode=True)
203 payload = part.get_payload(decode=True)
204 m = diffre.search(payload)
204 m = diffre.search(payload)
205 if m:
205 if m:
206 hgpatch = False
206 hgpatch = False
207 hgpatchheader = False
207 hgpatchheader = False
208 ignoretext = False
208 ignoretext = False
209
209
210 ui.debug('found patch at byte %d\n' % m.start(0))
210 ui.debug('found patch at byte %d\n' % m.start(0))
211 diffs_seen += 1
211 diffs_seen += 1
212 cfp = cStringIO.StringIO()
212 cfp = cStringIO.StringIO()
213 for line in payload[:m.start(0)].splitlines():
213 for line in payload[:m.start(0)].splitlines():
214 if line.startswith('# HG changeset patch') and not hgpatch:
214 if line.startswith('# HG changeset patch') and not hgpatch:
215 ui.debug('patch generated by hg export\n')
215 ui.debug('patch generated by hg export\n')
216 hgpatch = True
216 hgpatch = True
217 hgpatchheader = True
217 hgpatchheader = True
218 # drop earlier commit message content
218 # drop earlier commit message content
219 cfp.seek(0)
219 cfp.seek(0)
220 cfp.truncate()
220 cfp.truncate()
221 subject = None
221 subject = None
222 elif hgpatchheader:
222 elif hgpatchheader:
223 if line.startswith('# User '):
223 if line.startswith('# User '):
224 user = line[7:]
224 user = line[7:]
225 ui.debug('From: %s\n' % user)
225 ui.debug('From: %s\n' % user)
226 elif line.startswith("# Date "):
226 elif line.startswith("# Date "):
227 date = line[7:]
227 date = line[7:]
228 elif line.startswith("# Branch "):
228 elif line.startswith("# Branch "):
229 branch = line[9:]
229 branch = line[9:]
230 elif line.startswith("# Node ID "):
230 elif line.startswith("# Node ID "):
231 nodeid = line[10:]
231 nodeid = line[10:]
232 elif line.startswith("# Parent "):
232 elif line.startswith("# Parent "):
233 parents.append(line[9:].lstrip())
233 parents.append(line[9:].lstrip())
234 elif not line.startswith("# "):
234 elif not line.startswith("# "):
235 hgpatchheader = False
235 hgpatchheader = False
236 elif line == '---' and gitsendmail:
236 elif line == '---' and gitsendmail:
237 ignoretext = True
237 ignoretext = True
238 if not hgpatchheader and not ignoretext:
238 if not hgpatchheader and not ignoretext:
239 cfp.write(line)
239 cfp.write(line)
240 cfp.write('\n')
240 cfp.write('\n')
241 message = cfp.getvalue()
241 message = cfp.getvalue()
242 if tmpfp:
242 if tmpfp:
243 tmpfp.write(payload)
243 tmpfp.write(payload)
244 if not payload.endswith('\n'):
244 if not payload.endswith('\n'):
245 tmpfp.write('\n')
245 tmpfp.write('\n')
246 elif not diffs_seen and message and content_type == 'text/plain':
246 elif not diffs_seen and message and content_type == 'text/plain':
247 message += '\n' + payload
247 message += '\n' + payload
248 except:
248 except: # re-raises
249 tmpfp.close()
249 tmpfp.close()
250 os.unlink(tmpname)
250 os.unlink(tmpname)
251 raise
251 raise
252
252
253 if subject and not message.startswith(subject):
253 if subject and not message.startswith(subject):
254 message = '%s\n%s' % (subject, message)
254 message = '%s\n%s' % (subject, message)
255 tmpfp.close()
255 tmpfp.close()
256 if not diffs_seen:
256 if not diffs_seen:
257 os.unlink(tmpname)
257 os.unlink(tmpname)
258 return None, message, user, date, branch, None, None, None
258 return None, message, user, date, branch, None, None, None
259 p1 = parents and parents.pop(0) or None
259 p1 = parents and parents.pop(0) or None
260 p2 = parents and parents.pop(0) or None
260 p2 = parents and parents.pop(0) or None
261 return tmpname, message, user, date, branch, nodeid, p1, p2
261 return tmpname, message, user, date, branch, nodeid, p1, p2
262
262
263 class patchmeta(object):
263 class patchmeta(object):
264 """Patched file metadata
264 """Patched file metadata
265
265
266 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
266 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
267 or COPY. 'path' is patched file path. 'oldpath' is set to the
267 or COPY. 'path' is patched file path. 'oldpath' is set to the
268 origin file when 'op' is either COPY or RENAME, None otherwise. If
268 origin file when 'op' is either COPY or RENAME, None otherwise. If
269 file mode is changed, 'mode' is a tuple (islink, isexec) where
269 file mode is changed, 'mode' is a tuple (islink, isexec) where
270 'islink' is True if the file is a symlink and 'isexec' is True if
270 'islink' is True if the file is a symlink and 'isexec' is True if
271 the file is executable. Otherwise, 'mode' is None.
271 the file is executable. Otherwise, 'mode' is None.
272 """
272 """
273 def __init__(self, path):
273 def __init__(self, path):
274 self.path = path
274 self.path = path
275 self.oldpath = None
275 self.oldpath = None
276 self.mode = None
276 self.mode = None
277 self.op = 'MODIFY'
277 self.op = 'MODIFY'
278 self.binary = False
278 self.binary = False
279
279
280 def setmode(self, mode):
280 def setmode(self, mode):
281 islink = mode & 020000
281 islink = mode & 020000
282 isexec = mode & 0100
282 isexec = mode & 0100
283 self.mode = (islink, isexec)
283 self.mode = (islink, isexec)
284
284
285 def copy(self):
285 def copy(self):
286 other = patchmeta(self.path)
286 other = patchmeta(self.path)
287 other.oldpath = self.oldpath
287 other.oldpath = self.oldpath
288 other.mode = self.mode
288 other.mode = self.mode
289 other.op = self.op
289 other.op = self.op
290 other.binary = self.binary
290 other.binary = self.binary
291 return other
291 return other
292
292
293 def _ispatchinga(self, afile):
293 def _ispatchinga(self, afile):
294 if afile == '/dev/null':
294 if afile == '/dev/null':
295 return self.op == 'ADD'
295 return self.op == 'ADD'
296 return afile == 'a/' + (self.oldpath or self.path)
296 return afile == 'a/' + (self.oldpath or self.path)
297
297
298 def _ispatchingb(self, bfile):
298 def _ispatchingb(self, bfile):
299 if bfile == '/dev/null':
299 if bfile == '/dev/null':
300 return self.op == 'DELETE'
300 return self.op == 'DELETE'
301 return bfile == 'b/' + self.path
301 return bfile == 'b/' + self.path
302
302
303 def ispatching(self, afile, bfile):
303 def ispatching(self, afile, bfile):
304 return self._ispatchinga(afile) and self._ispatchingb(bfile)
304 return self._ispatchinga(afile) and self._ispatchingb(bfile)
305
305
306 def __repr__(self):
306 def __repr__(self):
307 return "<patchmeta %s %r>" % (self.op, self.path)
307 return "<patchmeta %s %r>" % (self.op, self.path)
308
308
309 def readgitpatch(lr):
309 def readgitpatch(lr):
310 """extract git-style metadata about patches from <patchname>"""
310 """extract git-style metadata about patches from <patchname>"""
311
311
312 # Filter patch for git information
312 # Filter patch for git information
313 gp = None
313 gp = None
314 gitpatches = []
314 gitpatches = []
315 for line in lr:
315 for line in lr:
316 line = line.rstrip(' \r\n')
316 line = line.rstrip(' \r\n')
317 if line.startswith('diff --git'):
317 if line.startswith('diff --git'):
318 m = gitre.match(line)
318 m = gitre.match(line)
319 if m:
319 if m:
320 if gp:
320 if gp:
321 gitpatches.append(gp)
321 gitpatches.append(gp)
322 dst = m.group(2)
322 dst = m.group(2)
323 gp = patchmeta(dst)
323 gp = patchmeta(dst)
324 elif gp:
324 elif gp:
325 if line.startswith('--- '):
325 if line.startswith('--- '):
326 gitpatches.append(gp)
326 gitpatches.append(gp)
327 gp = None
327 gp = None
328 continue
328 continue
329 if line.startswith('rename from '):
329 if line.startswith('rename from '):
330 gp.op = 'RENAME'
330 gp.op = 'RENAME'
331 gp.oldpath = line[12:]
331 gp.oldpath = line[12:]
332 elif line.startswith('rename to '):
332 elif line.startswith('rename to '):
333 gp.path = line[10:]
333 gp.path = line[10:]
334 elif line.startswith('copy from '):
334 elif line.startswith('copy from '):
335 gp.op = 'COPY'
335 gp.op = 'COPY'
336 gp.oldpath = line[10:]
336 gp.oldpath = line[10:]
337 elif line.startswith('copy to '):
337 elif line.startswith('copy to '):
338 gp.path = line[8:]
338 gp.path = line[8:]
339 elif line.startswith('deleted file'):
339 elif line.startswith('deleted file'):
340 gp.op = 'DELETE'
340 gp.op = 'DELETE'
341 elif line.startswith('new file mode '):
341 elif line.startswith('new file mode '):
342 gp.op = 'ADD'
342 gp.op = 'ADD'
343 gp.setmode(int(line[-6:], 8))
343 gp.setmode(int(line[-6:], 8))
344 elif line.startswith('new mode '):
344 elif line.startswith('new mode '):
345 gp.setmode(int(line[-6:], 8))
345 gp.setmode(int(line[-6:], 8))
346 elif line.startswith('GIT binary patch'):
346 elif line.startswith('GIT binary patch'):
347 gp.binary = True
347 gp.binary = True
348 if gp:
348 if gp:
349 gitpatches.append(gp)
349 gitpatches.append(gp)
350
350
351 return gitpatches
351 return gitpatches
352
352
353 class linereader(object):
353 class linereader(object):
354 # simple class to allow pushing lines back into the input stream
354 # simple class to allow pushing lines back into the input stream
355 def __init__(self, fp):
355 def __init__(self, fp):
356 self.fp = fp
356 self.fp = fp
357 self.buf = []
357 self.buf = []
358
358
359 def push(self, line):
359 def push(self, line):
360 if line is not None:
360 if line is not None:
361 self.buf.append(line)
361 self.buf.append(line)
362
362
363 def readline(self):
363 def readline(self):
364 if self.buf:
364 if self.buf:
365 l = self.buf[0]
365 l = self.buf[0]
366 del self.buf[0]
366 del self.buf[0]
367 return l
367 return l
368 return self.fp.readline()
368 return self.fp.readline()
369
369
370 def __iter__(self):
370 def __iter__(self):
371 while True:
371 while True:
372 l = self.readline()
372 l = self.readline()
373 if not l:
373 if not l:
374 break
374 break
375 yield l
375 yield l
376
376
377 class abstractbackend(object):
377 class abstractbackend(object):
378 def __init__(self, ui):
378 def __init__(self, ui):
379 self.ui = ui
379 self.ui = ui
380
380
381 def getfile(self, fname):
381 def getfile(self, fname):
382 """Return target file data and flags as a (data, (islink,
382 """Return target file data and flags as a (data, (islink,
383 isexec)) tuple.
383 isexec)) tuple.
384 """
384 """
385 raise NotImplementedError
385 raise NotImplementedError
386
386
387 def setfile(self, fname, data, mode, copysource):
387 def setfile(self, fname, data, mode, copysource):
388 """Write data to target file fname and set its mode. mode is a
388 """Write data to target file fname and set its mode. mode is a
389 (islink, isexec) tuple. If data is None, the file content should
389 (islink, isexec) tuple. If data is None, the file content should
390 be left unchanged. If the file is modified after being copied,
390 be left unchanged. If the file is modified after being copied,
391 copysource is set to the original file name.
391 copysource is set to the original file name.
392 """
392 """
393 raise NotImplementedError
393 raise NotImplementedError
394
394
395 def unlink(self, fname):
395 def unlink(self, fname):
396 """Unlink target file."""
396 """Unlink target file."""
397 raise NotImplementedError
397 raise NotImplementedError
398
398
399 def writerej(self, fname, failed, total, lines):
399 def writerej(self, fname, failed, total, lines):
400 """Write rejected lines for fname. total is the number of hunks
400 """Write rejected lines for fname. total is the number of hunks
401 which failed to apply and total the total number of hunks for this
401 which failed to apply and total the total number of hunks for this
402 files.
402 files.
403 """
403 """
404 pass
404 pass
405
405
406 def exists(self, fname):
406 def exists(self, fname):
407 raise NotImplementedError
407 raise NotImplementedError
408
408
409 class fsbackend(abstractbackend):
409 class fsbackend(abstractbackend):
410 def __init__(self, ui, basedir):
410 def __init__(self, ui, basedir):
411 super(fsbackend, self).__init__(ui)
411 super(fsbackend, self).__init__(ui)
412 self.opener = scmutil.opener(basedir)
412 self.opener = scmutil.opener(basedir)
413
413
414 def _join(self, f):
414 def _join(self, f):
415 return os.path.join(self.opener.base, f)
415 return os.path.join(self.opener.base, f)
416
416
417 def getfile(self, fname):
417 def getfile(self, fname):
418 path = self._join(fname)
418 path = self._join(fname)
419 if os.path.islink(path):
419 if os.path.islink(path):
420 return (os.readlink(path), (True, False))
420 return (os.readlink(path), (True, False))
421 isexec = False
421 isexec = False
422 try:
422 try:
423 isexec = os.lstat(path).st_mode & 0100 != 0
423 isexec = os.lstat(path).st_mode & 0100 != 0
424 except OSError, e:
424 except OSError, e:
425 if e.errno != errno.ENOENT:
425 if e.errno != errno.ENOENT:
426 raise
426 raise
427 return (self.opener.read(fname), (False, isexec))
427 return (self.opener.read(fname), (False, isexec))
428
428
429 def setfile(self, fname, data, mode, copysource):
429 def setfile(self, fname, data, mode, copysource):
430 islink, isexec = mode
430 islink, isexec = mode
431 if data is None:
431 if data is None:
432 util.setflags(self._join(fname), islink, isexec)
432 util.setflags(self._join(fname), islink, isexec)
433 return
433 return
434 if islink:
434 if islink:
435 self.opener.symlink(data, fname)
435 self.opener.symlink(data, fname)
436 else:
436 else:
437 self.opener.write(fname, data)
437 self.opener.write(fname, data)
438 if isexec:
438 if isexec:
439 util.setflags(self._join(fname), False, True)
439 util.setflags(self._join(fname), False, True)
440
440
441 def unlink(self, fname):
441 def unlink(self, fname):
442 try:
442 try:
443 util.unlinkpath(self._join(fname))
443 util.unlinkpath(self._join(fname))
444 except OSError, inst:
444 except OSError, inst:
445 if inst.errno != errno.ENOENT:
445 if inst.errno != errno.ENOENT:
446 raise
446 raise
447
447
448 def writerej(self, fname, failed, total, lines):
448 def writerej(self, fname, failed, total, lines):
449 fname = fname + ".rej"
449 fname = fname + ".rej"
450 self.ui.warn(
450 self.ui.warn(
451 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
451 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
452 (failed, total, fname))
452 (failed, total, fname))
453 fp = self.opener(fname, 'w')
453 fp = self.opener(fname, 'w')
454 fp.writelines(lines)
454 fp.writelines(lines)
455 fp.close()
455 fp.close()
456
456
457 def exists(self, fname):
457 def exists(self, fname):
458 return os.path.lexists(self._join(fname))
458 return os.path.lexists(self._join(fname))
459
459
460 class workingbackend(fsbackend):
460 class workingbackend(fsbackend):
461 def __init__(self, ui, repo, similarity):
461 def __init__(self, ui, repo, similarity):
462 super(workingbackend, self).__init__(ui, repo.root)
462 super(workingbackend, self).__init__(ui, repo.root)
463 self.repo = repo
463 self.repo = repo
464 self.similarity = similarity
464 self.similarity = similarity
465 self.removed = set()
465 self.removed = set()
466 self.changed = set()
466 self.changed = set()
467 self.copied = []
467 self.copied = []
468
468
469 def _checkknown(self, fname):
469 def _checkknown(self, fname):
470 if self.repo.dirstate[fname] == '?' and self.exists(fname):
470 if self.repo.dirstate[fname] == '?' and self.exists(fname):
471 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
471 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
472
472
473 def setfile(self, fname, data, mode, copysource):
473 def setfile(self, fname, data, mode, copysource):
474 self._checkknown(fname)
474 self._checkknown(fname)
475 super(workingbackend, self).setfile(fname, data, mode, copysource)
475 super(workingbackend, self).setfile(fname, data, mode, copysource)
476 if copysource is not None:
476 if copysource is not None:
477 self.copied.append((copysource, fname))
477 self.copied.append((copysource, fname))
478 self.changed.add(fname)
478 self.changed.add(fname)
479
479
480 def unlink(self, fname):
480 def unlink(self, fname):
481 self._checkknown(fname)
481 self._checkknown(fname)
482 super(workingbackend, self).unlink(fname)
482 super(workingbackend, self).unlink(fname)
483 self.removed.add(fname)
483 self.removed.add(fname)
484 self.changed.add(fname)
484 self.changed.add(fname)
485
485
486 def close(self):
486 def close(self):
487 wctx = self.repo[None]
487 wctx = self.repo[None]
488 addremoved = set(self.changed)
488 addremoved = set(self.changed)
489 for src, dst in self.copied:
489 for src, dst in self.copied:
490 scmutil.dirstatecopy(self.ui, self.repo, wctx, src, dst)
490 scmutil.dirstatecopy(self.ui, self.repo, wctx, src, dst)
491 if self.removed:
491 if self.removed:
492 wctx.forget(sorted(self.removed))
492 wctx.forget(sorted(self.removed))
493 for f in self.removed:
493 for f in self.removed:
494 if f not in self.repo.dirstate:
494 if f not in self.repo.dirstate:
495 # File was deleted and no longer belongs to the
495 # File was deleted and no longer belongs to the
496 # dirstate, it was probably marked added then
496 # dirstate, it was probably marked added then
497 # deleted, and should not be considered by
497 # deleted, and should not be considered by
498 # addremove().
498 # addremove().
499 addremoved.discard(f)
499 addremoved.discard(f)
500 if addremoved:
500 if addremoved:
501 cwd = self.repo.getcwd()
501 cwd = self.repo.getcwd()
502 if cwd:
502 if cwd:
503 addremoved = [util.pathto(self.repo.root, cwd, f)
503 addremoved = [util.pathto(self.repo.root, cwd, f)
504 for f in addremoved]
504 for f in addremoved]
505 scmutil.addremove(self.repo, addremoved, similarity=self.similarity)
505 scmutil.addremove(self.repo, addremoved, similarity=self.similarity)
506 return sorted(self.changed)
506 return sorted(self.changed)
507
507
508 class filestore(object):
508 class filestore(object):
509 def __init__(self, maxsize=None):
509 def __init__(self, maxsize=None):
510 self.opener = None
510 self.opener = None
511 self.files = {}
511 self.files = {}
512 self.created = 0
512 self.created = 0
513 self.maxsize = maxsize
513 self.maxsize = maxsize
514 if self.maxsize is None:
514 if self.maxsize is None:
515 self.maxsize = 4*(2**20)
515 self.maxsize = 4*(2**20)
516 self.size = 0
516 self.size = 0
517 self.data = {}
517 self.data = {}
518
518
519 def setfile(self, fname, data, mode, copied=None):
519 def setfile(self, fname, data, mode, copied=None):
520 if self.maxsize < 0 or (len(data) + self.size) <= self.maxsize:
520 if self.maxsize < 0 or (len(data) + self.size) <= self.maxsize:
521 self.data[fname] = (data, mode, copied)
521 self.data[fname] = (data, mode, copied)
522 self.size += len(data)
522 self.size += len(data)
523 else:
523 else:
524 if self.opener is None:
524 if self.opener is None:
525 root = tempfile.mkdtemp(prefix='hg-patch-')
525 root = tempfile.mkdtemp(prefix='hg-patch-')
526 self.opener = scmutil.opener(root)
526 self.opener = scmutil.opener(root)
527 # Avoid filename issues with these simple names
527 # Avoid filename issues with these simple names
528 fn = str(self.created)
528 fn = str(self.created)
529 self.opener.write(fn, data)
529 self.opener.write(fn, data)
530 self.created += 1
530 self.created += 1
531 self.files[fname] = (fn, mode, copied)
531 self.files[fname] = (fn, mode, copied)
532
532
533 def getfile(self, fname):
533 def getfile(self, fname):
534 if fname in self.data:
534 if fname in self.data:
535 return self.data[fname]
535 return self.data[fname]
536 if not self.opener or fname not in self.files:
536 if not self.opener or fname not in self.files:
537 raise IOError
537 raise IOError
538 fn, mode, copied = self.files[fname]
538 fn, mode, copied = self.files[fname]
539 return self.opener.read(fn), mode, copied
539 return self.opener.read(fn), mode, copied
540
540
541 def close(self):
541 def close(self):
542 if self.opener:
542 if self.opener:
543 shutil.rmtree(self.opener.base)
543 shutil.rmtree(self.opener.base)
544
544
545 class repobackend(abstractbackend):
545 class repobackend(abstractbackend):
546 def __init__(self, ui, repo, ctx, store):
546 def __init__(self, ui, repo, ctx, store):
547 super(repobackend, self).__init__(ui)
547 super(repobackend, self).__init__(ui)
548 self.repo = repo
548 self.repo = repo
549 self.ctx = ctx
549 self.ctx = ctx
550 self.store = store
550 self.store = store
551 self.changed = set()
551 self.changed = set()
552 self.removed = set()
552 self.removed = set()
553 self.copied = {}
553 self.copied = {}
554
554
555 def _checkknown(self, fname):
555 def _checkknown(self, fname):
556 if fname not in self.ctx:
556 if fname not in self.ctx:
557 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
557 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
558
558
559 def getfile(self, fname):
559 def getfile(self, fname):
560 try:
560 try:
561 fctx = self.ctx[fname]
561 fctx = self.ctx[fname]
562 except error.LookupError:
562 except error.LookupError:
563 raise IOError
563 raise IOError
564 flags = fctx.flags()
564 flags = fctx.flags()
565 return fctx.data(), ('l' in flags, 'x' in flags)
565 return fctx.data(), ('l' in flags, 'x' in flags)
566
566
567 def setfile(self, fname, data, mode, copysource):
567 def setfile(self, fname, data, mode, copysource):
568 if copysource:
568 if copysource:
569 self._checkknown(copysource)
569 self._checkknown(copysource)
570 if data is None:
570 if data is None:
571 data = self.ctx[fname].data()
571 data = self.ctx[fname].data()
572 self.store.setfile(fname, data, mode, copysource)
572 self.store.setfile(fname, data, mode, copysource)
573 self.changed.add(fname)
573 self.changed.add(fname)
574 if copysource:
574 if copysource:
575 self.copied[fname] = copysource
575 self.copied[fname] = copysource
576
576
577 def unlink(self, fname):
577 def unlink(self, fname):
578 self._checkknown(fname)
578 self._checkknown(fname)
579 self.removed.add(fname)
579 self.removed.add(fname)
580
580
581 def exists(self, fname):
581 def exists(self, fname):
582 return fname in self.ctx
582 return fname in self.ctx
583
583
584 def close(self):
584 def close(self):
585 return self.changed | self.removed
585 return self.changed | self.removed
586
586
587 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
587 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
588 unidesc = re.compile('@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@')
588 unidesc = re.compile('@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@')
589 contextdesc = re.compile('(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)')
589 contextdesc = re.compile('(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)')
590 eolmodes = ['strict', 'crlf', 'lf', 'auto']
590 eolmodes = ['strict', 'crlf', 'lf', 'auto']
591
591
592 class patchfile(object):
592 class patchfile(object):
593 def __init__(self, ui, gp, backend, store, eolmode='strict'):
593 def __init__(self, ui, gp, backend, store, eolmode='strict'):
594 self.fname = gp.path
594 self.fname = gp.path
595 self.eolmode = eolmode
595 self.eolmode = eolmode
596 self.eol = None
596 self.eol = None
597 self.backend = backend
597 self.backend = backend
598 self.ui = ui
598 self.ui = ui
599 self.lines = []
599 self.lines = []
600 self.exists = False
600 self.exists = False
601 self.missing = True
601 self.missing = True
602 self.mode = gp.mode
602 self.mode = gp.mode
603 self.copysource = gp.oldpath
603 self.copysource = gp.oldpath
604 self.create = gp.op in ('ADD', 'COPY', 'RENAME')
604 self.create = gp.op in ('ADD', 'COPY', 'RENAME')
605 self.remove = gp.op == 'DELETE'
605 self.remove = gp.op == 'DELETE'
606 try:
606 try:
607 if self.copysource is None:
607 if self.copysource is None:
608 data, mode = backend.getfile(self.fname)
608 data, mode = backend.getfile(self.fname)
609 self.exists = True
609 self.exists = True
610 else:
610 else:
611 data, mode = store.getfile(self.copysource)[:2]
611 data, mode = store.getfile(self.copysource)[:2]
612 self.exists = backend.exists(self.fname)
612 self.exists = backend.exists(self.fname)
613 self.missing = False
613 self.missing = False
614 if data:
614 if data:
615 self.lines = mdiff.splitnewlines(data)
615 self.lines = mdiff.splitnewlines(data)
616 if self.mode is None:
616 if self.mode is None:
617 self.mode = mode
617 self.mode = mode
618 if self.lines:
618 if self.lines:
619 # Normalize line endings
619 # Normalize line endings
620 if self.lines[0].endswith('\r\n'):
620 if self.lines[0].endswith('\r\n'):
621 self.eol = '\r\n'
621 self.eol = '\r\n'
622 elif self.lines[0].endswith('\n'):
622 elif self.lines[0].endswith('\n'):
623 self.eol = '\n'
623 self.eol = '\n'
624 if eolmode != 'strict':
624 if eolmode != 'strict':
625 nlines = []
625 nlines = []
626 for l in self.lines:
626 for l in self.lines:
627 if l.endswith('\r\n'):
627 if l.endswith('\r\n'):
628 l = l[:-2] + '\n'
628 l = l[:-2] + '\n'
629 nlines.append(l)
629 nlines.append(l)
630 self.lines = nlines
630 self.lines = nlines
631 except IOError:
631 except IOError:
632 if self.create:
632 if self.create:
633 self.missing = False
633 self.missing = False
634 if self.mode is None:
634 if self.mode is None:
635 self.mode = (False, False)
635 self.mode = (False, False)
636 if self.missing:
636 if self.missing:
637 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
637 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
638
638
639 self.hash = {}
639 self.hash = {}
640 self.dirty = 0
640 self.dirty = 0
641 self.offset = 0
641 self.offset = 0
642 self.skew = 0
642 self.skew = 0
643 self.rej = []
643 self.rej = []
644 self.fileprinted = False
644 self.fileprinted = False
645 self.printfile(False)
645 self.printfile(False)
646 self.hunks = 0
646 self.hunks = 0
647
647
648 def writelines(self, fname, lines, mode):
648 def writelines(self, fname, lines, mode):
649 if self.eolmode == 'auto':
649 if self.eolmode == 'auto':
650 eol = self.eol
650 eol = self.eol
651 elif self.eolmode == 'crlf':
651 elif self.eolmode == 'crlf':
652 eol = '\r\n'
652 eol = '\r\n'
653 else:
653 else:
654 eol = '\n'
654 eol = '\n'
655
655
656 if self.eolmode != 'strict' and eol and eol != '\n':
656 if self.eolmode != 'strict' and eol and eol != '\n':
657 rawlines = []
657 rawlines = []
658 for l in lines:
658 for l in lines:
659 if l and l[-1] == '\n':
659 if l and l[-1] == '\n':
660 l = l[:-1] + eol
660 l = l[:-1] + eol
661 rawlines.append(l)
661 rawlines.append(l)
662 lines = rawlines
662 lines = rawlines
663
663
664 self.backend.setfile(fname, ''.join(lines), mode, self.copysource)
664 self.backend.setfile(fname, ''.join(lines), mode, self.copysource)
665
665
666 def printfile(self, warn):
666 def printfile(self, warn):
667 if self.fileprinted:
667 if self.fileprinted:
668 return
668 return
669 if warn or self.ui.verbose:
669 if warn or self.ui.verbose:
670 self.fileprinted = True
670 self.fileprinted = True
671 s = _("patching file %s\n") % self.fname
671 s = _("patching file %s\n") % self.fname
672 if warn:
672 if warn:
673 self.ui.warn(s)
673 self.ui.warn(s)
674 else:
674 else:
675 self.ui.note(s)
675 self.ui.note(s)
676
676
677
677
678 def findlines(self, l, linenum):
678 def findlines(self, l, linenum):
679 # looks through the hash and finds candidate lines. The
679 # looks through the hash and finds candidate lines. The
680 # result is a list of line numbers sorted based on distance
680 # result is a list of line numbers sorted based on distance
681 # from linenum
681 # from linenum
682
682
683 cand = self.hash.get(l, [])
683 cand = self.hash.get(l, [])
684 if len(cand) > 1:
684 if len(cand) > 1:
685 # resort our list of potentials forward then back.
685 # resort our list of potentials forward then back.
686 cand.sort(key=lambda x: abs(x - linenum))
686 cand.sort(key=lambda x: abs(x - linenum))
687 return cand
687 return cand
688
688
689 def write_rej(self):
689 def write_rej(self):
690 # our rejects are a little different from patch(1). This always
690 # our rejects are a little different from patch(1). This always
691 # creates rejects in the same form as the original patch. A file
691 # creates rejects in the same form as the original patch. A file
692 # header is inserted so that you can run the reject through patch again
692 # header is inserted so that you can run the reject through patch again
693 # without having to type the filename.
693 # without having to type the filename.
694 if not self.rej:
694 if not self.rej:
695 return
695 return
696 base = os.path.basename(self.fname)
696 base = os.path.basename(self.fname)
697 lines = ["--- %s\n+++ %s\n" % (base, base)]
697 lines = ["--- %s\n+++ %s\n" % (base, base)]
698 for x in self.rej:
698 for x in self.rej:
699 for l in x.hunk:
699 for l in x.hunk:
700 lines.append(l)
700 lines.append(l)
701 if l[-1] != '\n':
701 if l[-1] != '\n':
702 lines.append("\n\ No newline at end of file\n")
702 lines.append("\n\ No newline at end of file\n")
703 self.backend.writerej(self.fname, len(self.rej), self.hunks, lines)
703 self.backend.writerej(self.fname, len(self.rej), self.hunks, lines)
704
704
705 def apply(self, h):
705 def apply(self, h):
706 if not h.complete():
706 if not h.complete():
707 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
707 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
708 (h.number, h.desc, len(h.a), h.lena, len(h.b),
708 (h.number, h.desc, len(h.a), h.lena, len(h.b),
709 h.lenb))
709 h.lenb))
710
710
711 self.hunks += 1
711 self.hunks += 1
712
712
713 if self.missing:
713 if self.missing:
714 self.rej.append(h)
714 self.rej.append(h)
715 return -1
715 return -1
716
716
717 if self.exists and self.create:
717 if self.exists and self.create:
718 if self.copysource:
718 if self.copysource:
719 self.ui.warn(_("cannot create %s: destination already "
719 self.ui.warn(_("cannot create %s: destination already "
720 "exists\n" % self.fname))
720 "exists\n" % self.fname))
721 else:
721 else:
722 self.ui.warn(_("file %s already exists\n") % self.fname)
722 self.ui.warn(_("file %s already exists\n") % self.fname)
723 self.rej.append(h)
723 self.rej.append(h)
724 return -1
724 return -1
725
725
726 if isinstance(h, binhunk):
726 if isinstance(h, binhunk):
727 if self.remove:
727 if self.remove:
728 self.backend.unlink(self.fname)
728 self.backend.unlink(self.fname)
729 else:
729 else:
730 self.lines[:] = h.new()
730 self.lines[:] = h.new()
731 self.offset += len(h.new())
731 self.offset += len(h.new())
732 self.dirty = True
732 self.dirty = True
733 return 0
733 return 0
734
734
735 horig = h
735 horig = h
736 if (self.eolmode in ('crlf', 'lf')
736 if (self.eolmode in ('crlf', 'lf')
737 or self.eolmode == 'auto' and self.eol):
737 or self.eolmode == 'auto' and self.eol):
738 # If new eols are going to be normalized, then normalize
738 # If new eols are going to be normalized, then normalize
739 # hunk data before patching. Otherwise, preserve input
739 # hunk data before patching. Otherwise, preserve input
740 # line-endings.
740 # line-endings.
741 h = h.getnormalized()
741 h = h.getnormalized()
742
742
743 # fast case first, no offsets, no fuzz
743 # fast case first, no offsets, no fuzz
744 old, oldstart, new, newstart = h.fuzzit(0, False)
744 old, oldstart, new, newstart = h.fuzzit(0, False)
745 oldstart += self.offset
745 oldstart += self.offset
746 orig_start = oldstart
746 orig_start = oldstart
747 # if there's skew we want to emit the "(offset %d lines)" even
747 # if there's skew we want to emit the "(offset %d lines)" even
748 # when the hunk cleanly applies at start + skew, so skip the
748 # when the hunk cleanly applies at start + skew, so skip the
749 # fast case code
749 # fast case code
750 if (self.skew == 0 and
750 if (self.skew == 0 and
751 diffhelpers.testhunk(old, self.lines, oldstart) == 0):
751 diffhelpers.testhunk(old, self.lines, oldstart) == 0):
752 if self.remove:
752 if self.remove:
753 self.backend.unlink(self.fname)
753 self.backend.unlink(self.fname)
754 else:
754 else:
755 self.lines[oldstart:oldstart + len(old)] = new
755 self.lines[oldstart:oldstart + len(old)] = new
756 self.offset += len(new) - len(old)
756 self.offset += len(new) - len(old)
757 self.dirty = True
757 self.dirty = True
758 return 0
758 return 0
759
759
760 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
760 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
761 self.hash = {}
761 self.hash = {}
762 for x, s in enumerate(self.lines):
762 for x, s in enumerate(self.lines):
763 self.hash.setdefault(s, []).append(x)
763 self.hash.setdefault(s, []).append(x)
764
764
765 for fuzzlen in xrange(3):
765 for fuzzlen in xrange(3):
766 for toponly in [True, False]:
766 for toponly in [True, False]:
767 old, oldstart, new, newstart = h.fuzzit(fuzzlen, toponly)
767 old, oldstart, new, newstart = h.fuzzit(fuzzlen, toponly)
768 oldstart = oldstart + self.offset + self.skew
768 oldstart = oldstart + self.offset + self.skew
769 oldstart = min(oldstart, len(self.lines))
769 oldstart = min(oldstart, len(self.lines))
770 if old:
770 if old:
771 cand = self.findlines(old[0][1:], oldstart)
771 cand = self.findlines(old[0][1:], oldstart)
772 else:
772 else:
773 # Only adding lines with no or fuzzed context, just
773 # Only adding lines with no or fuzzed context, just
774 # take the skew in account
774 # take the skew in account
775 cand = [oldstart]
775 cand = [oldstart]
776
776
777 for l in cand:
777 for l in cand:
778 if not old or diffhelpers.testhunk(old, self.lines, l) == 0:
778 if not old or diffhelpers.testhunk(old, self.lines, l) == 0:
779 self.lines[l : l + len(old)] = new
779 self.lines[l : l + len(old)] = new
780 self.offset += len(new) - len(old)
780 self.offset += len(new) - len(old)
781 self.skew = l - orig_start
781 self.skew = l - orig_start
782 self.dirty = True
782 self.dirty = True
783 offset = l - orig_start - fuzzlen
783 offset = l - orig_start - fuzzlen
784 if fuzzlen:
784 if fuzzlen:
785 msg = _("Hunk #%d succeeded at %d "
785 msg = _("Hunk #%d succeeded at %d "
786 "with fuzz %d "
786 "with fuzz %d "
787 "(offset %d lines).\n")
787 "(offset %d lines).\n")
788 self.printfile(True)
788 self.printfile(True)
789 self.ui.warn(msg %
789 self.ui.warn(msg %
790 (h.number, l + 1, fuzzlen, offset))
790 (h.number, l + 1, fuzzlen, offset))
791 else:
791 else:
792 msg = _("Hunk #%d succeeded at %d "
792 msg = _("Hunk #%d succeeded at %d "
793 "(offset %d lines).\n")
793 "(offset %d lines).\n")
794 self.ui.note(msg % (h.number, l + 1, offset))
794 self.ui.note(msg % (h.number, l + 1, offset))
795 return fuzzlen
795 return fuzzlen
796 self.printfile(True)
796 self.printfile(True)
797 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
797 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
798 self.rej.append(horig)
798 self.rej.append(horig)
799 return -1
799 return -1
800
800
801 def close(self):
801 def close(self):
802 if self.dirty:
802 if self.dirty:
803 self.writelines(self.fname, self.lines, self.mode)
803 self.writelines(self.fname, self.lines, self.mode)
804 self.write_rej()
804 self.write_rej()
805 return len(self.rej)
805 return len(self.rej)
806
806
807 class hunk(object):
807 class hunk(object):
808 def __init__(self, desc, num, lr, context):
808 def __init__(self, desc, num, lr, context):
809 self.number = num
809 self.number = num
810 self.desc = desc
810 self.desc = desc
811 self.hunk = [desc]
811 self.hunk = [desc]
812 self.a = []
812 self.a = []
813 self.b = []
813 self.b = []
814 self.starta = self.lena = None
814 self.starta = self.lena = None
815 self.startb = self.lenb = None
815 self.startb = self.lenb = None
816 if lr is not None:
816 if lr is not None:
817 if context:
817 if context:
818 self.read_context_hunk(lr)
818 self.read_context_hunk(lr)
819 else:
819 else:
820 self.read_unified_hunk(lr)
820 self.read_unified_hunk(lr)
821
821
822 def getnormalized(self):
822 def getnormalized(self):
823 """Return a copy with line endings normalized to LF."""
823 """Return a copy with line endings normalized to LF."""
824
824
825 def normalize(lines):
825 def normalize(lines):
826 nlines = []
826 nlines = []
827 for line in lines:
827 for line in lines:
828 if line.endswith('\r\n'):
828 if line.endswith('\r\n'):
829 line = line[:-2] + '\n'
829 line = line[:-2] + '\n'
830 nlines.append(line)
830 nlines.append(line)
831 return nlines
831 return nlines
832
832
833 # Dummy object, it is rebuilt manually
833 # Dummy object, it is rebuilt manually
834 nh = hunk(self.desc, self.number, None, None)
834 nh = hunk(self.desc, self.number, None, None)
835 nh.number = self.number
835 nh.number = self.number
836 nh.desc = self.desc
836 nh.desc = self.desc
837 nh.hunk = self.hunk
837 nh.hunk = self.hunk
838 nh.a = normalize(self.a)
838 nh.a = normalize(self.a)
839 nh.b = normalize(self.b)
839 nh.b = normalize(self.b)
840 nh.starta = self.starta
840 nh.starta = self.starta
841 nh.startb = self.startb
841 nh.startb = self.startb
842 nh.lena = self.lena
842 nh.lena = self.lena
843 nh.lenb = self.lenb
843 nh.lenb = self.lenb
844 return nh
844 return nh
845
845
846 def read_unified_hunk(self, lr):
846 def read_unified_hunk(self, lr):
847 m = unidesc.match(self.desc)
847 m = unidesc.match(self.desc)
848 if not m:
848 if not m:
849 raise PatchError(_("bad hunk #%d") % self.number)
849 raise PatchError(_("bad hunk #%d") % self.number)
850 self.starta, self.lena, self.startb, self.lenb = m.groups()
850 self.starta, self.lena, self.startb, self.lenb = m.groups()
851 if self.lena is None:
851 if self.lena is None:
852 self.lena = 1
852 self.lena = 1
853 else:
853 else:
854 self.lena = int(self.lena)
854 self.lena = int(self.lena)
855 if self.lenb is None:
855 if self.lenb is None:
856 self.lenb = 1
856 self.lenb = 1
857 else:
857 else:
858 self.lenb = int(self.lenb)
858 self.lenb = int(self.lenb)
859 self.starta = int(self.starta)
859 self.starta = int(self.starta)
860 self.startb = int(self.startb)
860 self.startb = int(self.startb)
861 diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a,
861 diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a,
862 self.b)
862 self.b)
863 # if we hit eof before finishing out the hunk, the last line will
863 # if we hit eof before finishing out the hunk, the last line will
864 # be zero length. Lets try to fix it up.
864 # be zero length. Lets try to fix it up.
865 while len(self.hunk[-1]) == 0:
865 while len(self.hunk[-1]) == 0:
866 del self.hunk[-1]
866 del self.hunk[-1]
867 del self.a[-1]
867 del self.a[-1]
868 del self.b[-1]
868 del self.b[-1]
869 self.lena -= 1
869 self.lena -= 1
870 self.lenb -= 1
870 self.lenb -= 1
871 self._fixnewline(lr)
871 self._fixnewline(lr)
872
872
873 def read_context_hunk(self, lr):
873 def read_context_hunk(self, lr):
874 self.desc = lr.readline()
874 self.desc = lr.readline()
875 m = contextdesc.match(self.desc)
875 m = contextdesc.match(self.desc)
876 if not m:
876 if not m:
877 raise PatchError(_("bad hunk #%d") % self.number)
877 raise PatchError(_("bad hunk #%d") % self.number)
878 self.starta, aend = m.groups()
878 self.starta, aend = m.groups()
879 self.starta = int(self.starta)
879 self.starta = int(self.starta)
880 if aend is None:
880 if aend is None:
881 aend = self.starta
881 aend = self.starta
882 self.lena = int(aend) - self.starta
882 self.lena = int(aend) - self.starta
883 if self.starta:
883 if self.starta:
884 self.lena += 1
884 self.lena += 1
885 for x in xrange(self.lena):
885 for x in xrange(self.lena):
886 l = lr.readline()
886 l = lr.readline()
887 if l.startswith('---'):
887 if l.startswith('---'):
888 # lines addition, old block is empty
888 # lines addition, old block is empty
889 lr.push(l)
889 lr.push(l)
890 break
890 break
891 s = l[2:]
891 s = l[2:]
892 if l.startswith('- ') or l.startswith('! '):
892 if l.startswith('- ') or l.startswith('! '):
893 u = '-' + s
893 u = '-' + s
894 elif l.startswith(' '):
894 elif l.startswith(' '):
895 u = ' ' + s
895 u = ' ' + s
896 else:
896 else:
897 raise PatchError(_("bad hunk #%d old text line %d") %
897 raise PatchError(_("bad hunk #%d old text line %d") %
898 (self.number, x))
898 (self.number, x))
899 self.a.append(u)
899 self.a.append(u)
900 self.hunk.append(u)
900 self.hunk.append(u)
901
901
902 l = lr.readline()
902 l = lr.readline()
903 if l.startswith('\ '):
903 if l.startswith('\ '):
904 s = self.a[-1][:-1]
904 s = self.a[-1][:-1]
905 self.a[-1] = s
905 self.a[-1] = s
906 self.hunk[-1] = s
906 self.hunk[-1] = s
907 l = lr.readline()
907 l = lr.readline()
908 m = contextdesc.match(l)
908 m = contextdesc.match(l)
909 if not m:
909 if not m:
910 raise PatchError(_("bad hunk #%d") % self.number)
910 raise PatchError(_("bad hunk #%d") % self.number)
911 self.startb, bend = m.groups()
911 self.startb, bend = m.groups()
912 self.startb = int(self.startb)
912 self.startb = int(self.startb)
913 if bend is None:
913 if bend is None:
914 bend = self.startb
914 bend = self.startb
915 self.lenb = int(bend) - self.startb
915 self.lenb = int(bend) - self.startb
916 if self.startb:
916 if self.startb:
917 self.lenb += 1
917 self.lenb += 1
918 hunki = 1
918 hunki = 1
919 for x in xrange(self.lenb):
919 for x in xrange(self.lenb):
920 l = lr.readline()
920 l = lr.readline()
921 if l.startswith('\ '):
921 if l.startswith('\ '):
922 # XXX: the only way to hit this is with an invalid line range.
922 # XXX: the only way to hit this is with an invalid line range.
923 # The no-eol marker is not counted in the line range, but I
923 # The no-eol marker is not counted in the line range, but I
924 # guess there are diff(1) out there which behave differently.
924 # guess there are diff(1) out there which behave differently.
925 s = self.b[-1][:-1]
925 s = self.b[-1][:-1]
926 self.b[-1] = s
926 self.b[-1] = s
927 self.hunk[hunki - 1] = s
927 self.hunk[hunki - 1] = s
928 continue
928 continue
929 if not l:
929 if not l:
930 # line deletions, new block is empty and we hit EOF
930 # line deletions, new block is empty and we hit EOF
931 lr.push(l)
931 lr.push(l)
932 break
932 break
933 s = l[2:]
933 s = l[2:]
934 if l.startswith('+ ') or l.startswith('! '):
934 if l.startswith('+ ') or l.startswith('! '):
935 u = '+' + s
935 u = '+' + s
936 elif l.startswith(' '):
936 elif l.startswith(' '):
937 u = ' ' + s
937 u = ' ' + s
938 elif len(self.b) == 0:
938 elif len(self.b) == 0:
939 # line deletions, new block is empty
939 # line deletions, new block is empty
940 lr.push(l)
940 lr.push(l)
941 break
941 break
942 else:
942 else:
943 raise PatchError(_("bad hunk #%d old text line %d") %
943 raise PatchError(_("bad hunk #%d old text line %d") %
944 (self.number, x))
944 (self.number, x))
945 self.b.append(s)
945 self.b.append(s)
946 while True:
946 while True:
947 if hunki >= len(self.hunk):
947 if hunki >= len(self.hunk):
948 h = ""
948 h = ""
949 else:
949 else:
950 h = self.hunk[hunki]
950 h = self.hunk[hunki]
951 hunki += 1
951 hunki += 1
952 if h == u:
952 if h == u:
953 break
953 break
954 elif h.startswith('-'):
954 elif h.startswith('-'):
955 continue
955 continue
956 else:
956 else:
957 self.hunk.insert(hunki - 1, u)
957 self.hunk.insert(hunki - 1, u)
958 break
958 break
959
959
960 if not self.a:
960 if not self.a:
961 # this happens when lines were only added to the hunk
961 # this happens when lines were only added to the hunk
962 for x in self.hunk:
962 for x in self.hunk:
963 if x.startswith('-') or x.startswith(' '):
963 if x.startswith('-') or x.startswith(' '):
964 self.a.append(x)
964 self.a.append(x)
965 if not self.b:
965 if not self.b:
966 # this happens when lines were only deleted from the hunk
966 # this happens when lines were only deleted from the hunk
967 for x in self.hunk:
967 for x in self.hunk:
968 if x.startswith('+') or x.startswith(' '):
968 if x.startswith('+') or x.startswith(' '):
969 self.b.append(x[1:])
969 self.b.append(x[1:])
970 # @@ -start,len +start,len @@
970 # @@ -start,len +start,len @@
971 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
971 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
972 self.startb, self.lenb)
972 self.startb, self.lenb)
973 self.hunk[0] = self.desc
973 self.hunk[0] = self.desc
974 self._fixnewline(lr)
974 self._fixnewline(lr)
975
975
976 def _fixnewline(self, lr):
976 def _fixnewline(self, lr):
977 l = lr.readline()
977 l = lr.readline()
978 if l.startswith('\ '):
978 if l.startswith('\ '):
979 diffhelpers.fix_newline(self.hunk, self.a, self.b)
979 diffhelpers.fix_newline(self.hunk, self.a, self.b)
980 else:
980 else:
981 lr.push(l)
981 lr.push(l)
982
982
983 def complete(self):
983 def complete(self):
984 return len(self.a) == self.lena and len(self.b) == self.lenb
984 return len(self.a) == self.lena and len(self.b) == self.lenb
985
985
986 def _fuzzit(self, old, new, fuzz, toponly):
986 def _fuzzit(self, old, new, fuzz, toponly):
987 # this removes context lines from the top and bottom of list 'l'. It
987 # this removes context lines from the top and bottom of list 'l'. It
988 # checks the hunk to make sure only context lines are removed, and then
988 # checks the hunk to make sure only context lines are removed, and then
989 # returns a new shortened list of lines.
989 # returns a new shortened list of lines.
990 fuzz = min(fuzz, len(old))
990 fuzz = min(fuzz, len(old))
991 if fuzz:
991 if fuzz:
992 top = 0
992 top = 0
993 bot = 0
993 bot = 0
994 hlen = len(self.hunk)
994 hlen = len(self.hunk)
995 for x in xrange(hlen - 1):
995 for x in xrange(hlen - 1):
996 # the hunk starts with the @@ line, so use x+1
996 # the hunk starts with the @@ line, so use x+1
997 if self.hunk[x + 1][0] == ' ':
997 if self.hunk[x + 1][0] == ' ':
998 top += 1
998 top += 1
999 else:
999 else:
1000 break
1000 break
1001 if not toponly:
1001 if not toponly:
1002 for x in xrange(hlen - 1):
1002 for x in xrange(hlen - 1):
1003 if self.hunk[hlen - bot - 1][0] == ' ':
1003 if self.hunk[hlen - bot - 1][0] == ' ':
1004 bot += 1
1004 bot += 1
1005 else:
1005 else:
1006 break
1006 break
1007
1007
1008 bot = min(fuzz, bot)
1008 bot = min(fuzz, bot)
1009 top = min(fuzz, top)
1009 top = min(fuzz, top)
1010 return old[top:len(old)-bot], new[top:len(new)-bot], top
1010 return old[top:len(old)-bot], new[top:len(new)-bot], top
1011 return old, new, 0
1011 return old, new, 0
1012
1012
1013 def fuzzit(self, fuzz, toponly):
1013 def fuzzit(self, fuzz, toponly):
1014 old, new, top = self._fuzzit(self.a, self.b, fuzz, toponly)
1014 old, new, top = self._fuzzit(self.a, self.b, fuzz, toponly)
1015 oldstart = self.starta + top
1015 oldstart = self.starta + top
1016 newstart = self.startb + top
1016 newstart = self.startb + top
1017 # zero length hunk ranges already have their start decremented
1017 # zero length hunk ranges already have their start decremented
1018 if self.lena and oldstart > 0:
1018 if self.lena and oldstart > 0:
1019 oldstart -= 1
1019 oldstart -= 1
1020 if self.lenb and newstart > 0:
1020 if self.lenb and newstart > 0:
1021 newstart -= 1
1021 newstart -= 1
1022 return old, oldstart, new, newstart
1022 return old, oldstart, new, newstart
1023
1023
1024 class binhunk(object):
1024 class binhunk(object):
1025 'A binary patch file. Only understands literals so far.'
1025 'A binary patch file. Only understands literals so far.'
1026 def __init__(self, lr, fname):
1026 def __init__(self, lr, fname):
1027 self.text = None
1027 self.text = None
1028 self.hunk = ['GIT binary patch\n']
1028 self.hunk = ['GIT binary patch\n']
1029 self._fname = fname
1029 self._fname = fname
1030 self._read(lr)
1030 self._read(lr)
1031
1031
1032 def complete(self):
1032 def complete(self):
1033 return self.text is not None
1033 return self.text is not None
1034
1034
1035 def new(self):
1035 def new(self):
1036 return [self.text]
1036 return [self.text]
1037
1037
1038 def _read(self, lr):
1038 def _read(self, lr):
1039 def getline(lr, hunk):
1039 def getline(lr, hunk):
1040 l = lr.readline()
1040 l = lr.readline()
1041 hunk.append(l)
1041 hunk.append(l)
1042 return l.rstrip('\r\n')
1042 return l.rstrip('\r\n')
1043
1043
1044 while True:
1044 while True:
1045 line = getline(lr, self.hunk)
1045 line = getline(lr, self.hunk)
1046 if not line:
1046 if not line:
1047 raise PatchError(_('could not extract "%s" binary data')
1047 raise PatchError(_('could not extract "%s" binary data')
1048 % self._fname)
1048 % self._fname)
1049 if line.startswith('literal '):
1049 if line.startswith('literal '):
1050 break
1050 break
1051 size = int(line[8:].rstrip())
1051 size = int(line[8:].rstrip())
1052 dec = []
1052 dec = []
1053 line = getline(lr, self.hunk)
1053 line = getline(lr, self.hunk)
1054 while len(line) > 1:
1054 while len(line) > 1:
1055 l = line[0]
1055 l = line[0]
1056 if l <= 'Z' and l >= 'A':
1056 if l <= 'Z' and l >= 'A':
1057 l = ord(l) - ord('A') + 1
1057 l = ord(l) - ord('A') + 1
1058 else:
1058 else:
1059 l = ord(l) - ord('a') + 27
1059 l = ord(l) - ord('a') + 27
1060 try:
1060 try:
1061 dec.append(base85.b85decode(line[1:])[:l])
1061 dec.append(base85.b85decode(line[1:])[:l])
1062 except ValueError, e:
1062 except ValueError, e:
1063 raise PatchError(_('could not decode "%s" binary patch: %s')
1063 raise PatchError(_('could not decode "%s" binary patch: %s')
1064 % (self._fname, str(e)))
1064 % (self._fname, str(e)))
1065 line = getline(lr, self.hunk)
1065 line = getline(lr, self.hunk)
1066 text = zlib.decompress(''.join(dec))
1066 text = zlib.decompress(''.join(dec))
1067 if len(text) != size:
1067 if len(text) != size:
1068 raise PatchError(_('"%s" length is %d bytes, should be %d')
1068 raise PatchError(_('"%s" length is %d bytes, should be %d')
1069 % (self._fname, len(text), size))
1069 % (self._fname, len(text), size))
1070 self.text = text
1070 self.text = text
1071
1071
1072 def parsefilename(str):
1072 def parsefilename(str):
1073 # --- filename \t|space stuff
1073 # --- filename \t|space stuff
1074 s = str[4:].rstrip('\r\n')
1074 s = str[4:].rstrip('\r\n')
1075 i = s.find('\t')
1075 i = s.find('\t')
1076 if i < 0:
1076 if i < 0:
1077 i = s.find(' ')
1077 i = s.find(' ')
1078 if i < 0:
1078 if i < 0:
1079 return s
1079 return s
1080 return s[:i]
1080 return s[:i]
1081
1081
1082 def pathstrip(path, strip):
1082 def pathstrip(path, strip):
1083 pathlen = len(path)
1083 pathlen = len(path)
1084 i = 0
1084 i = 0
1085 if strip == 0:
1085 if strip == 0:
1086 return '', path.rstrip()
1086 return '', path.rstrip()
1087 count = strip
1087 count = strip
1088 while count > 0:
1088 while count > 0:
1089 i = path.find('/', i)
1089 i = path.find('/', i)
1090 if i == -1:
1090 if i == -1:
1091 raise PatchError(_("unable to strip away %d of %d dirs from %s") %
1091 raise PatchError(_("unable to strip away %d of %d dirs from %s") %
1092 (count, strip, path))
1092 (count, strip, path))
1093 i += 1
1093 i += 1
1094 # consume '//' in the path
1094 # consume '//' in the path
1095 while i < pathlen - 1 and path[i] == '/':
1095 while i < pathlen - 1 and path[i] == '/':
1096 i += 1
1096 i += 1
1097 count -= 1
1097 count -= 1
1098 return path[:i].lstrip(), path[i:].rstrip()
1098 return path[:i].lstrip(), path[i:].rstrip()
1099
1099
1100 def makepatchmeta(backend, afile_orig, bfile_orig, hunk, strip):
1100 def makepatchmeta(backend, afile_orig, bfile_orig, hunk, strip):
1101 nulla = afile_orig == "/dev/null"
1101 nulla = afile_orig == "/dev/null"
1102 nullb = bfile_orig == "/dev/null"
1102 nullb = bfile_orig == "/dev/null"
1103 create = nulla and hunk.starta == 0 and hunk.lena == 0
1103 create = nulla and hunk.starta == 0 and hunk.lena == 0
1104 remove = nullb and hunk.startb == 0 and hunk.lenb == 0
1104 remove = nullb and hunk.startb == 0 and hunk.lenb == 0
1105 abase, afile = pathstrip(afile_orig, strip)
1105 abase, afile = pathstrip(afile_orig, strip)
1106 gooda = not nulla and backend.exists(afile)
1106 gooda = not nulla and backend.exists(afile)
1107 bbase, bfile = pathstrip(bfile_orig, strip)
1107 bbase, bfile = pathstrip(bfile_orig, strip)
1108 if afile == bfile:
1108 if afile == bfile:
1109 goodb = gooda
1109 goodb = gooda
1110 else:
1110 else:
1111 goodb = not nullb and backend.exists(bfile)
1111 goodb = not nullb and backend.exists(bfile)
1112 missing = not goodb and not gooda and not create
1112 missing = not goodb and not gooda and not create
1113
1113
1114 # some diff programs apparently produce patches where the afile is
1114 # some diff programs apparently produce patches where the afile is
1115 # not /dev/null, but afile starts with bfile
1115 # not /dev/null, but afile starts with bfile
1116 abasedir = afile[:afile.rfind('/') + 1]
1116 abasedir = afile[:afile.rfind('/') + 1]
1117 bbasedir = bfile[:bfile.rfind('/') + 1]
1117 bbasedir = bfile[:bfile.rfind('/') + 1]
1118 if (missing and abasedir == bbasedir and afile.startswith(bfile)
1118 if (missing and abasedir == bbasedir and afile.startswith(bfile)
1119 and hunk.starta == 0 and hunk.lena == 0):
1119 and hunk.starta == 0 and hunk.lena == 0):
1120 create = True
1120 create = True
1121 missing = False
1121 missing = False
1122
1122
1123 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
1123 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
1124 # diff is between a file and its backup. In this case, the original
1124 # diff is between a file and its backup. In this case, the original
1125 # file should be patched (see original mpatch code).
1125 # file should be patched (see original mpatch code).
1126 isbackup = (abase == bbase and bfile.startswith(afile))
1126 isbackup = (abase == bbase and bfile.startswith(afile))
1127 fname = None
1127 fname = None
1128 if not missing:
1128 if not missing:
1129 if gooda and goodb:
1129 if gooda and goodb:
1130 fname = isbackup and afile or bfile
1130 fname = isbackup and afile or bfile
1131 elif gooda:
1131 elif gooda:
1132 fname = afile
1132 fname = afile
1133
1133
1134 if not fname:
1134 if not fname:
1135 if not nullb:
1135 if not nullb:
1136 fname = isbackup and afile or bfile
1136 fname = isbackup and afile or bfile
1137 elif not nulla:
1137 elif not nulla:
1138 fname = afile
1138 fname = afile
1139 else:
1139 else:
1140 raise PatchError(_("undefined source and destination files"))
1140 raise PatchError(_("undefined source and destination files"))
1141
1141
1142 gp = patchmeta(fname)
1142 gp = patchmeta(fname)
1143 if create:
1143 if create:
1144 gp.op = 'ADD'
1144 gp.op = 'ADD'
1145 elif remove:
1145 elif remove:
1146 gp.op = 'DELETE'
1146 gp.op = 'DELETE'
1147 return gp
1147 return gp
1148
1148
1149 def scangitpatch(lr, firstline):
1149 def scangitpatch(lr, firstline):
1150 """
1150 """
1151 Git patches can emit:
1151 Git patches can emit:
1152 - rename a to b
1152 - rename a to b
1153 - change b
1153 - change b
1154 - copy a to c
1154 - copy a to c
1155 - change c
1155 - change c
1156
1156
1157 We cannot apply this sequence as-is, the renamed 'a' could not be
1157 We cannot apply this sequence as-is, the renamed 'a' could not be
1158 found for it would have been renamed already. And we cannot copy
1158 found for it would have been renamed already. And we cannot copy
1159 from 'b' instead because 'b' would have been changed already. So
1159 from 'b' instead because 'b' would have been changed already. So
1160 we scan the git patch for copy and rename commands so we can
1160 we scan the git patch for copy and rename commands so we can
1161 perform the copies ahead of time.
1161 perform the copies ahead of time.
1162 """
1162 """
1163 pos = 0
1163 pos = 0
1164 try:
1164 try:
1165 pos = lr.fp.tell()
1165 pos = lr.fp.tell()
1166 fp = lr.fp
1166 fp = lr.fp
1167 except IOError:
1167 except IOError:
1168 fp = cStringIO.StringIO(lr.fp.read())
1168 fp = cStringIO.StringIO(lr.fp.read())
1169 gitlr = linereader(fp)
1169 gitlr = linereader(fp)
1170 gitlr.push(firstline)
1170 gitlr.push(firstline)
1171 gitpatches = readgitpatch(gitlr)
1171 gitpatches = readgitpatch(gitlr)
1172 fp.seek(pos)
1172 fp.seek(pos)
1173 return gitpatches
1173 return gitpatches
1174
1174
1175 def iterhunks(fp):
1175 def iterhunks(fp):
1176 """Read a patch and yield the following events:
1176 """Read a patch and yield the following events:
1177 - ("file", afile, bfile, firsthunk): select a new target file.
1177 - ("file", afile, bfile, firsthunk): select a new target file.
1178 - ("hunk", hunk): a new hunk is ready to be applied, follows a
1178 - ("hunk", hunk): a new hunk is ready to be applied, follows a
1179 "file" event.
1179 "file" event.
1180 - ("git", gitchanges): current diff is in git format, gitchanges
1180 - ("git", gitchanges): current diff is in git format, gitchanges
1181 maps filenames to gitpatch records. Unique event.
1181 maps filenames to gitpatch records. Unique event.
1182 """
1182 """
1183 afile = ""
1183 afile = ""
1184 bfile = ""
1184 bfile = ""
1185 state = None
1185 state = None
1186 hunknum = 0
1186 hunknum = 0
1187 emitfile = newfile = False
1187 emitfile = newfile = False
1188 gitpatches = None
1188 gitpatches = None
1189
1189
1190 # our states
1190 # our states
1191 BFILE = 1
1191 BFILE = 1
1192 context = None
1192 context = None
1193 lr = linereader(fp)
1193 lr = linereader(fp)
1194
1194
1195 while True:
1195 while True:
1196 x = lr.readline()
1196 x = lr.readline()
1197 if not x:
1197 if not x:
1198 break
1198 break
1199 if state == BFILE and (
1199 if state == BFILE and (
1200 (not context and x[0] == '@')
1200 (not context and x[0] == '@')
1201 or (context is not False and x.startswith('***************'))
1201 or (context is not False and x.startswith('***************'))
1202 or x.startswith('GIT binary patch')):
1202 or x.startswith('GIT binary patch')):
1203 gp = None
1203 gp = None
1204 if (gitpatches and
1204 if (gitpatches and
1205 gitpatches[-1].ispatching(afile, bfile)):
1205 gitpatches[-1].ispatching(afile, bfile)):
1206 gp = gitpatches.pop()
1206 gp = gitpatches.pop()
1207 if x.startswith('GIT binary patch'):
1207 if x.startswith('GIT binary patch'):
1208 h = binhunk(lr, gp.path)
1208 h = binhunk(lr, gp.path)
1209 else:
1209 else:
1210 if context is None and x.startswith('***************'):
1210 if context is None and x.startswith('***************'):
1211 context = True
1211 context = True
1212 h = hunk(x, hunknum + 1, lr, context)
1212 h = hunk(x, hunknum + 1, lr, context)
1213 hunknum += 1
1213 hunknum += 1
1214 if emitfile:
1214 if emitfile:
1215 emitfile = False
1215 emitfile = False
1216 yield 'file', (afile, bfile, h, gp and gp.copy() or None)
1216 yield 'file', (afile, bfile, h, gp and gp.copy() or None)
1217 yield 'hunk', h
1217 yield 'hunk', h
1218 elif x.startswith('diff --git'):
1218 elif x.startswith('diff --git'):
1219 m = gitre.match(x.rstrip(' \r\n'))
1219 m = gitre.match(x.rstrip(' \r\n'))
1220 if not m:
1220 if not m:
1221 continue
1221 continue
1222 if gitpatches is None:
1222 if gitpatches is None:
1223 # scan whole input for git metadata
1223 # scan whole input for git metadata
1224 gitpatches = scangitpatch(lr, x)
1224 gitpatches = scangitpatch(lr, x)
1225 yield 'git', [g.copy() for g in gitpatches
1225 yield 'git', [g.copy() for g in gitpatches
1226 if g.op in ('COPY', 'RENAME')]
1226 if g.op in ('COPY', 'RENAME')]
1227 gitpatches.reverse()
1227 gitpatches.reverse()
1228 afile = 'a/' + m.group(1)
1228 afile = 'a/' + m.group(1)
1229 bfile = 'b/' + m.group(2)
1229 bfile = 'b/' + m.group(2)
1230 while gitpatches and not gitpatches[-1].ispatching(afile, bfile):
1230 while gitpatches and not gitpatches[-1].ispatching(afile, bfile):
1231 gp = gitpatches.pop()
1231 gp = gitpatches.pop()
1232 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1232 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1233 if not gitpatches:
1233 if not gitpatches:
1234 raise PatchError(_('failed to synchronize metadata for "%s"')
1234 raise PatchError(_('failed to synchronize metadata for "%s"')
1235 % afile[2:])
1235 % afile[2:])
1236 gp = gitpatches[-1]
1236 gp = gitpatches[-1]
1237 newfile = True
1237 newfile = True
1238 elif x.startswith('---'):
1238 elif x.startswith('---'):
1239 # check for a unified diff
1239 # check for a unified diff
1240 l2 = lr.readline()
1240 l2 = lr.readline()
1241 if not l2.startswith('+++'):
1241 if not l2.startswith('+++'):
1242 lr.push(l2)
1242 lr.push(l2)
1243 continue
1243 continue
1244 newfile = True
1244 newfile = True
1245 context = False
1245 context = False
1246 afile = parsefilename(x)
1246 afile = parsefilename(x)
1247 bfile = parsefilename(l2)
1247 bfile = parsefilename(l2)
1248 elif x.startswith('***'):
1248 elif x.startswith('***'):
1249 # check for a context diff
1249 # check for a context diff
1250 l2 = lr.readline()
1250 l2 = lr.readline()
1251 if not l2.startswith('---'):
1251 if not l2.startswith('---'):
1252 lr.push(l2)
1252 lr.push(l2)
1253 continue
1253 continue
1254 l3 = lr.readline()
1254 l3 = lr.readline()
1255 lr.push(l3)
1255 lr.push(l3)
1256 if not l3.startswith("***************"):
1256 if not l3.startswith("***************"):
1257 lr.push(l2)
1257 lr.push(l2)
1258 continue
1258 continue
1259 newfile = True
1259 newfile = True
1260 context = True
1260 context = True
1261 afile = parsefilename(x)
1261 afile = parsefilename(x)
1262 bfile = parsefilename(l2)
1262 bfile = parsefilename(l2)
1263
1263
1264 if newfile:
1264 if newfile:
1265 newfile = False
1265 newfile = False
1266 emitfile = True
1266 emitfile = True
1267 state = BFILE
1267 state = BFILE
1268 hunknum = 0
1268 hunknum = 0
1269
1269
1270 while gitpatches:
1270 while gitpatches:
1271 gp = gitpatches.pop()
1271 gp = gitpatches.pop()
1272 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1272 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1273
1273
1274 def applydiff(ui, fp, backend, store, strip=1, eolmode='strict'):
1274 def applydiff(ui, fp, backend, store, strip=1, eolmode='strict'):
1275 """Reads a patch from fp and tries to apply it.
1275 """Reads a patch from fp and tries to apply it.
1276
1276
1277 Returns 0 for a clean patch, -1 if any rejects were found and 1 if
1277 Returns 0 for a clean patch, -1 if any rejects were found and 1 if
1278 there was any fuzz.
1278 there was any fuzz.
1279
1279
1280 If 'eolmode' is 'strict', the patch content and patched file are
1280 If 'eolmode' is 'strict', the patch content and patched file are
1281 read in binary mode. Otherwise, line endings are ignored when
1281 read in binary mode. Otherwise, line endings are ignored when
1282 patching then normalized according to 'eolmode'.
1282 patching then normalized according to 'eolmode'.
1283 """
1283 """
1284 return _applydiff(ui, fp, patchfile, backend, store, strip=strip,
1284 return _applydiff(ui, fp, patchfile, backend, store, strip=strip,
1285 eolmode=eolmode)
1285 eolmode=eolmode)
1286
1286
1287 def _applydiff(ui, fp, patcher, backend, store, strip=1,
1287 def _applydiff(ui, fp, patcher, backend, store, strip=1,
1288 eolmode='strict'):
1288 eolmode='strict'):
1289
1289
1290 def pstrip(p):
1290 def pstrip(p):
1291 return pathstrip(p, strip - 1)[1]
1291 return pathstrip(p, strip - 1)[1]
1292
1292
1293 rejects = 0
1293 rejects = 0
1294 err = 0
1294 err = 0
1295 current_file = None
1295 current_file = None
1296
1296
1297 for state, values in iterhunks(fp):
1297 for state, values in iterhunks(fp):
1298 if state == 'hunk':
1298 if state == 'hunk':
1299 if not current_file:
1299 if not current_file:
1300 continue
1300 continue
1301 ret = current_file.apply(values)
1301 ret = current_file.apply(values)
1302 if ret > 0:
1302 if ret > 0:
1303 err = 1
1303 err = 1
1304 elif state == 'file':
1304 elif state == 'file':
1305 if current_file:
1305 if current_file:
1306 rejects += current_file.close()
1306 rejects += current_file.close()
1307 current_file = None
1307 current_file = None
1308 afile, bfile, first_hunk, gp = values
1308 afile, bfile, first_hunk, gp = values
1309 if gp:
1309 if gp:
1310 gp.path = pstrip(gp.path)
1310 gp.path = pstrip(gp.path)
1311 if gp.oldpath:
1311 if gp.oldpath:
1312 gp.oldpath = pstrip(gp.oldpath)
1312 gp.oldpath = pstrip(gp.oldpath)
1313 else:
1313 else:
1314 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip)
1314 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip)
1315 if gp.op == 'RENAME':
1315 if gp.op == 'RENAME':
1316 backend.unlink(gp.oldpath)
1316 backend.unlink(gp.oldpath)
1317 if not first_hunk:
1317 if not first_hunk:
1318 if gp.op == 'DELETE':
1318 if gp.op == 'DELETE':
1319 backend.unlink(gp.path)
1319 backend.unlink(gp.path)
1320 continue
1320 continue
1321 data, mode = None, None
1321 data, mode = None, None
1322 if gp.op in ('RENAME', 'COPY'):
1322 if gp.op in ('RENAME', 'COPY'):
1323 data, mode = store.getfile(gp.oldpath)[:2]
1323 data, mode = store.getfile(gp.oldpath)[:2]
1324 if gp.mode:
1324 if gp.mode:
1325 mode = gp.mode
1325 mode = gp.mode
1326 if gp.op == 'ADD':
1326 if gp.op == 'ADD':
1327 # Added files without content have no hunk and
1327 # Added files without content have no hunk and
1328 # must be created
1328 # must be created
1329 data = ''
1329 data = ''
1330 if data or mode:
1330 if data or mode:
1331 if (gp.op in ('ADD', 'RENAME', 'COPY')
1331 if (gp.op in ('ADD', 'RENAME', 'COPY')
1332 and backend.exists(gp.path)):
1332 and backend.exists(gp.path)):
1333 raise PatchError(_("cannot create %s: destination "
1333 raise PatchError(_("cannot create %s: destination "
1334 "already exists") % gp.path)
1334 "already exists") % gp.path)
1335 backend.setfile(gp.path, data, mode, gp.oldpath)
1335 backend.setfile(gp.path, data, mode, gp.oldpath)
1336 continue
1336 continue
1337 try:
1337 try:
1338 current_file = patcher(ui, gp, backend, store,
1338 current_file = patcher(ui, gp, backend, store,
1339 eolmode=eolmode)
1339 eolmode=eolmode)
1340 except PatchError, inst:
1340 except PatchError, inst:
1341 ui.warn(str(inst) + '\n')
1341 ui.warn(str(inst) + '\n')
1342 current_file = None
1342 current_file = None
1343 rejects += 1
1343 rejects += 1
1344 continue
1344 continue
1345 elif state == 'git':
1345 elif state == 'git':
1346 for gp in values:
1346 for gp in values:
1347 path = pstrip(gp.oldpath)
1347 path = pstrip(gp.oldpath)
1348 data, mode = backend.getfile(path)
1348 data, mode = backend.getfile(path)
1349 store.setfile(path, data, mode)
1349 store.setfile(path, data, mode)
1350 else:
1350 else:
1351 raise util.Abort(_('unsupported parser state: %s') % state)
1351 raise util.Abort(_('unsupported parser state: %s') % state)
1352
1352
1353 if current_file:
1353 if current_file:
1354 rejects += current_file.close()
1354 rejects += current_file.close()
1355
1355
1356 if rejects:
1356 if rejects:
1357 return -1
1357 return -1
1358 return err
1358 return err
1359
1359
1360 def _externalpatch(ui, repo, patcher, patchname, strip, files,
1360 def _externalpatch(ui, repo, patcher, patchname, strip, files,
1361 similarity):
1361 similarity):
1362 """use <patcher> to apply <patchname> to the working directory.
1362 """use <patcher> to apply <patchname> to the working directory.
1363 returns whether patch was applied with fuzz factor."""
1363 returns whether patch was applied with fuzz factor."""
1364
1364
1365 fuzz = False
1365 fuzz = False
1366 args = []
1366 args = []
1367 cwd = repo.root
1367 cwd = repo.root
1368 if cwd:
1368 if cwd:
1369 args.append('-d %s' % util.shellquote(cwd))
1369 args.append('-d %s' % util.shellquote(cwd))
1370 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
1370 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
1371 util.shellquote(patchname)))
1371 util.shellquote(patchname)))
1372 try:
1372 try:
1373 for line in fp:
1373 for line in fp:
1374 line = line.rstrip()
1374 line = line.rstrip()
1375 ui.note(line + '\n')
1375 ui.note(line + '\n')
1376 if line.startswith('patching file '):
1376 if line.startswith('patching file '):
1377 pf = util.parsepatchoutput(line)
1377 pf = util.parsepatchoutput(line)
1378 printed_file = False
1378 printed_file = False
1379 files.add(pf)
1379 files.add(pf)
1380 elif line.find('with fuzz') >= 0:
1380 elif line.find('with fuzz') >= 0:
1381 fuzz = True
1381 fuzz = True
1382 if not printed_file:
1382 if not printed_file:
1383 ui.warn(pf + '\n')
1383 ui.warn(pf + '\n')
1384 printed_file = True
1384 printed_file = True
1385 ui.warn(line + '\n')
1385 ui.warn(line + '\n')
1386 elif line.find('saving rejects to file') >= 0:
1386 elif line.find('saving rejects to file') >= 0:
1387 ui.warn(line + '\n')
1387 ui.warn(line + '\n')
1388 elif line.find('FAILED') >= 0:
1388 elif line.find('FAILED') >= 0:
1389 if not printed_file:
1389 if not printed_file:
1390 ui.warn(pf + '\n')
1390 ui.warn(pf + '\n')
1391 printed_file = True
1391 printed_file = True
1392 ui.warn(line + '\n')
1392 ui.warn(line + '\n')
1393 finally:
1393 finally:
1394 if files:
1394 if files:
1395 cfiles = list(files)
1395 cfiles = list(files)
1396 cwd = repo.getcwd()
1396 cwd = repo.getcwd()
1397 if cwd:
1397 if cwd:
1398 cfiles = [util.pathto(repo.root, cwd, f)
1398 cfiles = [util.pathto(repo.root, cwd, f)
1399 for f in cfiles]
1399 for f in cfiles]
1400 scmutil.addremove(repo, cfiles, similarity=similarity)
1400 scmutil.addremove(repo, cfiles, similarity=similarity)
1401 code = fp.close()
1401 code = fp.close()
1402 if code:
1402 if code:
1403 raise PatchError(_("patch command failed: %s") %
1403 raise PatchError(_("patch command failed: %s") %
1404 util.explainexit(code)[0])
1404 util.explainexit(code)[0])
1405 return fuzz
1405 return fuzz
1406
1406
1407 def patchbackend(ui, backend, patchobj, strip, files=None, eolmode='strict'):
1407 def patchbackend(ui, backend, patchobj, strip, files=None, eolmode='strict'):
1408 if files is None:
1408 if files is None:
1409 files = set()
1409 files = set()
1410 if eolmode is None:
1410 if eolmode is None:
1411 eolmode = ui.config('patch', 'eol', 'strict')
1411 eolmode = ui.config('patch', 'eol', 'strict')
1412 if eolmode.lower() not in eolmodes:
1412 if eolmode.lower() not in eolmodes:
1413 raise util.Abort(_('unsupported line endings type: %s') % eolmode)
1413 raise util.Abort(_('unsupported line endings type: %s') % eolmode)
1414 eolmode = eolmode.lower()
1414 eolmode = eolmode.lower()
1415
1415
1416 store = filestore()
1416 store = filestore()
1417 try:
1417 try:
1418 fp = open(patchobj, 'rb')
1418 fp = open(patchobj, 'rb')
1419 except TypeError:
1419 except TypeError:
1420 fp = patchobj
1420 fp = patchobj
1421 try:
1421 try:
1422 ret = applydiff(ui, fp, backend, store, strip=strip,
1422 ret = applydiff(ui, fp, backend, store, strip=strip,
1423 eolmode=eolmode)
1423 eolmode=eolmode)
1424 finally:
1424 finally:
1425 if fp != patchobj:
1425 if fp != patchobj:
1426 fp.close()
1426 fp.close()
1427 files.update(backend.close())
1427 files.update(backend.close())
1428 store.close()
1428 store.close()
1429 if ret < 0:
1429 if ret < 0:
1430 raise PatchError(_('patch failed to apply'))
1430 raise PatchError(_('patch failed to apply'))
1431 return ret > 0
1431 return ret > 0
1432
1432
1433 def internalpatch(ui, repo, patchobj, strip, files=None, eolmode='strict',
1433 def internalpatch(ui, repo, patchobj, strip, files=None, eolmode='strict',
1434 similarity=0):
1434 similarity=0):
1435 """use builtin patch to apply <patchobj> to the working directory.
1435 """use builtin patch to apply <patchobj> to the working directory.
1436 returns whether patch was applied with fuzz factor."""
1436 returns whether patch was applied with fuzz factor."""
1437 backend = workingbackend(ui, repo, similarity)
1437 backend = workingbackend(ui, repo, similarity)
1438 return patchbackend(ui, backend, patchobj, strip, files, eolmode)
1438 return patchbackend(ui, backend, patchobj, strip, files, eolmode)
1439
1439
1440 def patchrepo(ui, repo, ctx, store, patchobj, strip, files=None,
1440 def patchrepo(ui, repo, ctx, store, patchobj, strip, files=None,
1441 eolmode='strict'):
1441 eolmode='strict'):
1442 backend = repobackend(ui, repo, ctx, store)
1442 backend = repobackend(ui, repo, ctx, store)
1443 return patchbackend(ui, backend, patchobj, strip, files, eolmode)
1443 return patchbackend(ui, backend, patchobj, strip, files, eolmode)
1444
1444
1445 def makememctx(repo, parents, text, user, date, branch, files, store,
1445 def makememctx(repo, parents, text, user, date, branch, files, store,
1446 editor=None):
1446 editor=None):
1447 def getfilectx(repo, memctx, path):
1447 def getfilectx(repo, memctx, path):
1448 data, (islink, isexec), copied = store.getfile(path)
1448 data, (islink, isexec), copied = store.getfile(path)
1449 return context.memfilectx(path, data, islink=islink, isexec=isexec,
1449 return context.memfilectx(path, data, islink=islink, isexec=isexec,
1450 copied=copied)
1450 copied=copied)
1451 extra = {}
1451 extra = {}
1452 if branch:
1452 if branch:
1453 extra['branch'] = encoding.fromlocal(branch)
1453 extra['branch'] = encoding.fromlocal(branch)
1454 ctx = context.memctx(repo, parents, text, files, getfilectx, user,
1454 ctx = context.memctx(repo, parents, text, files, getfilectx, user,
1455 date, extra)
1455 date, extra)
1456 if editor:
1456 if editor:
1457 ctx._text = editor(repo, ctx, [])
1457 ctx._text = editor(repo, ctx, [])
1458 return ctx
1458 return ctx
1459
1459
1460 def patch(ui, repo, patchname, strip=1, files=None, eolmode='strict',
1460 def patch(ui, repo, patchname, strip=1, files=None, eolmode='strict',
1461 similarity=0):
1461 similarity=0):
1462 """Apply <patchname> to the working directory.
1462 """Apply <patchname> to the working directory.
1463
1463
1464 'eolmode' specifies how end of lines should be handled. It can be:
1464 'eolmode' specifies how end of lines should be handled. It can be:
1465 - 'strict': inputs are read in binary mode, EOLs are preserved
1465 - 'strict': inputs are read in binary mode, EOLs are preserved
1466 - 'crlf': EOLs are ignored when patching and reset to CRLF
1466 - 'crlf': EOLs are ignored when patching and reset to CRLF
1467 - 'lf': EOLs are ignored when patching and reset to LF
1467 - 'lf': EOLs are ignored when patching and reset to LF
1468 - None: get it from user settings, default to 'strict'
1468 - None: get it from user settings, default to 'strict'
1469 'eolmode' is ignored when using an external patcher program.
1469 'eolmode' is ignored when using an external patcher program.
1470
1470
1471 Returns whether patch was applied with fuzz factor.
1471 Returns whether patch was applied with fuzz factor.
1472 """
1472 """
1473 patcher = ui.config('ui', 'patch')
1473 patcher = ui.config('ui', 'patch')
1474 if files is None:
1474 if files is None:
1475 files = set()
1475 files = set()
1476 try:
1476 try:
1477 if patcher:
1477 if patcher:
1478 return _externalpatch(ui, repo, patcher, patchname, strip,
1478 return _externalpatch(ui, repo, patcher, patchname, strip,
1479 files, similarity)
1479 files, similarity)
1480 return internalpatch(ui, repo, patchname, strip, files, eolmode,
1480 return internalpatch(ui, repo, patchname, strip, files, eolmode,
1481 similarity)
1481 similarity)
1482 except PatchError, err:
1482 except PatchError, err:
1483 raise util.Abort(str(err))
1483 raise util.Abort(str(err))
1484
1484
1485 def changedfiles(ui, repo, patchpath, strip=1):
1485 def changedfiles(ui, repo, patchpath, strip=1):
1486 backend = fsbackend(ui, repo.root)
1486 backend = fsbackend(ui, repo.root)
1487 fp = open(patchpath, 'rb')
1487 fp = open(patchpath, 'rb')
1488 try:
1488 try:
1489 changed = set()
1489 changed = set()
1490 for state, values in iterhunks(fp):
1490 for state, values in iterhunks(fp):
1491 if state == 'file':
1491 if state == 'file':
1492 afile, bfile, first_hunk, gp = values
1492 afile, bfile, first_hunk, gp = values
1493 if gp:
1493 if gp:
1494 gp.path = pathstrip(gp.path, strip - 1)[1]
1494 gp.path = pathstrip(gp.path, strip - 1)[1]
1495 if gp.oldpath:
1495 if gp.oldpath:
1496 gp.oldpath = pathstrip(gp.oldpath, strip - 1)[1]
1496 gp.oldpath = pathstrip(gp.oldpath, strip - 1)[1]
1497 else:
1497 else:
1498 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip)
1498 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip)
1499 changed.add(gp.path)
1499 changed.add(gp.path)
1500 if gp.op == 'RENAME':
1500 if gp.op == 'RENAME':
1501 changed.add(gp.oldpath)
1501 changed.add(gp.oldpath)
1502 elif state not in ('hunk', 'git'):
1502 elif state not in ('hunk', 'git'):
1503 raise util.Abort(_('unsupported parser state: %s') % state)
1503 raise util.Abort(_('unsupported parser state: %s') % state)
1504 return changed
1504 return changed
1505 finally:
1505 finally:
1506 fp.close()
1506 fp.close()
1507
1507
1508 def b85diff(to, tn):
1508 def b85diff(to, tn):
1509 '''print base85-encoded binary diff'''
1509 '''print base85-encoded binary diff'''
1510 def gitindex(text):
1510 def gitindex(text):
1511 if not text:
1511 if not text:
1512 return hex(nullid)
1512 return hex(nullid)
1513 l = len(text)
1513 l = len(text)
1514 s = util.sha1('blob %d\0' % l)
1514 s = util.sha1('blob %d\0' % l)
1515 s.update(text)
1515 s.update(text)
1516 return s.hexdigest()
1516 return s.hexdigest()
1517
1517
1518 def fmtline(line):
1518 def fmtline(line):
1519 l = len(line)
1519 l = len(line)
1520 if l <= 26:
1520 if l <= 26:
1521 l = chr(ord('A') + l - 1)
1521 l = chr(ord('A') + l - 1)
1522 else:
1522 else:
1523 l = chr(l - 26 + ord('a') - 1)
1523 l = chr(l - 26 + ord('a') - 1)
1524 return '%c%s\n' % (l, base85.b85encode(line, True))
1524 return '%c%s\n' % (l, base85.b85encode(line, True))
1525
1525
1526 def chunk(text, csize=52):
1526 def chunk(text, csize=52):
1527 l = len(text)
1527 l = len(text)
1528 i = 0
1528 i = 0
1529 while i < l:
1529 while i < l:
1530 yield text[i:i + csize]
1530 yield text[i:i + csize]
1531 i += csize
1531 i += csize
1532
1532
1533 tohash = gitindex(to)
1533 tohash = gitindex(to)
1534 tnhash = gitindex(tn)
1534 tnhash = gitindex(tn)
1535 if tohash == tnhash:
1535 if tohash == tnhash:
1536 return ""
1536 return ""
1537
1537
1538 # TODO: deltas
1538 # TODO: deltas
1539 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1539 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1540 (tohash, tnhash, len(tn))]
1540 (tohash, tnhash, len(tn))]
1541 for l in chunk(zlib.compress(tn)):
1541 for l in chunk(zlib.compress(tn)):
1542 ret.append(fmtline(l))
1542 ret.append(fmtline(l))
1543 ret.append('\n')
1543 ret.append('\n')
1544 return ''.join(ret)
1544 return ''.join(ret)
1545
1545
1546 class GitDiffRequired(Exception):
1546 class GitDiffRequired(Exception):
1547 pass
1547 pass
1548
1548
1549 def diffopts(ui, opts=None, untrusted=False, section='diff'):
1549 def diffopts(ui, opts=None, untrusted=False, section='diff'):
1550 def get(key, name=None, getter=ui.configbool):
1550 def get(key, name=None, getter=ui.configbool):
1551 return ((opts and opts.get(key)) or
1551 return ((opts and opts.get(key)) or
1552 getter(section, name or key, None, untrusted=untrusted))
1552 getter(section, name or key, None, untrusted=untrusted))
1553 return mdiff.diffopts(
1553 return mdiff.diffopts(
1554 text=opts and opts.get('text'),
1554 text=opts and opts.get('text'),
1555 git=get('git'),
1555 git=get('git'),
1556 nodates=get('nodates'),
1556 nodates=get('nodates'),
1557 showfunc=get('show_function', 'showfunc'),
1557 showfunc=get('show_function', 'showfunc'),
1558 ignorews=get('ignore_all_space', 'ignorews'),
1558 ignorews=get('ignore_all_space', 'ignorews'),
1559 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1559 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1560 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1560 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1561 context=get('unified', getter=ui.config))
1561 context=get('unified', getter=ui.config))
1562
1562
1563 def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None,
1563 def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None,
1564 losedatafn=None, prefix=''):
1564 losedatafn=None, prefix=''):
1565 '''yields diff of changes to files between two nodes, or node and
1565 '''yields diff of changes to files between two nodes, or node and
1566 working directory.
1566 working directory.
1567
1567
1568 if node1 is None, use first dirstate parent instead.
1568 if node1 is None, use first dirstate parent instead.
1569 if node2 is None, compare node1 with working directory.
1569 if node2 is None, compare node1 with working directory.
1570
1570
1571 losedatafn(**kwarg) is a callable run when opts.upgrade=True and
1571 losedatafn(**kwarg) is a callable run when opts.upgrade=True and
1572 every time some change cannot be represented with the current
1572 every time some change cannot be represented with the current
1573 patch format. Return False to upgrade to git patch format, True to
1573 patch format. Return False to upgrade to git patch format, True to
1574 accept the loss or raise an exception to abort the diff. It is
1574 accept the loss or raise an exception to abort the diff. It is
1575 called with the name of current file being diffed as 'fn'. If set
1575 called with the name of current file being diffed as 'fn'. If set
1576 to None, patches will always be upgraded to git format when
1576 to None, patches will always be upgraded to git format when
1577 necessary.
1577 necessary.
1578
1578
1579 prefix is a filename prefix that is prepended to all filenames on
1579 prefix is a filename prefix that is prepended to all filenames on
1580 display (used for subrepos).
1580 display (used for subrepos).
1581 '''
1581 '''
1582
1582
1583 if opts is None:
1583 if opts is None:
1584 opts = mdiff.defaultopts
1584 opts = mdiff.defaultopts
1585
1585
1586 if not node1 and not node2:
1586 if not node1 and not node2:
1587 node1 = repo.dirstate.p1()
1587 node1 = repo.dirstate.p1()
1588
1588
1589 def lrugetfilectx():
1589 def lrugetfilectx():
1590 cache = {}
1590 cache = {}
1591 order = []
1591 order = []
1592 def getfilectx(f, ctx):
1592 def getfilectx(f, ctx):
1593 fctx = ctx.filectx(f, filelog=cache.get(f))
1593 fctx = ctx.filectx(f, filelog=cache.get(f))
1594 if f not in cache:
1594 if f not in cache:
1595 if len(cache) > 20:
1595 if len(cache) > 20:
1596 del cache[order.pop(0)]
1596 del cache[order.pop(0)]
1597 cache[f] = fctx.filelog()
1597 cache[f] = fctx.filelog()
1598 else:
1598 else:
1599 order.remove(f)
1599 order.remove(f)
1600 order.append(f)
1600 order.append(f)
1601 return fctx
1601 return fctx
1602 return getfilectx
1602 return getfilectx
1603 getfilectx = lrugetfilectx()
1603 getfilectx = lrugetfilectx()
1604
1604
1605 ctx1 = repo[node1]
1605 ctx1 = repo[node1]
1606 ctx2 = repo[node2]
1606 ctx2 = repo[node2]
1607
1607
1608 if not changes:
1608 if not changes:
1609 changes = repo.status(ctx1, ctx2, match=match)
1609 changes = repo.status(ctx1, ctx2, match=match)
1610 modified, added, removed = changes[:3]
1610 modified, added, removed = changes[:3]
1611
1611
1612 if not modified and not added and not removed:
1612 if not modified and not added and not removed:
1613 return []
1613 return []
1614
1614
1615 revs = None
1615 revs = None
1616 if not repo.ui.quiet:
1616 if not repo.ui.quiet:
1617 hexfunc = repo.ui.debugflag and hex or short
1617 hexfunc = repo.ui.debugflag and hex or short
1618 revs = [hexfunc(node) for node in [node1, node2] if node]
1618 revs = [hexfunc(node) for node in [node1, node2] if node]
1619
1619
1620 copy = {}
1620 copy = {}
1621 if opts.git or opts.upgrade:
1621 if opts.git or opts.upgrade:
1622 copy = copies.pathcopies(ctx1, ctx2)
1622 copy = copies.pathcopies(ctx1, ctx2)
1623
1623
1624 difffn = (lambda opts, losedata:
1624 difffn = (lambda opts, losedata:
1625 trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
1625 trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
1626 copy, getfilectx, opts, losedata, prefix))
1626 copy, getfilectx, opts, losedata, prefix))
1627 if opts.upgrade and not opts.git:
1627 if opts.upgrade and not opts.git:
1628 try:
1628 try:
1629 def losedata(fn):
1629 def losedata(fn):
1630 if not losedatafn or not losedatafn(fn=fn):
1630 if not losedatafn or not losedatafn(fn=fn):
1631 raise GitDiffRequired
1631 raise GitDiffRequired
1632 # Buffer the whole output until we are sure it can be generated
1632 # Buffer the whole output until we are sure it can be generated
1633 return list(difffn(opts.copy(git=False), losedata))
1633 return list(difffn(opts.copy(git=False), losedata))
1634 except GitDiffRequired:
1634 except GitDiffRequired:
1635 return difffn(opts.copy(git=True), None)
1635 return difffn(opts.copy(git=True), None)
1636 else:
1636 else:
1637 return difffn(opts, None)
1637 return difffn(opts, None)
1638
1638
1639 def difflabel(func, *args, **kw):
1639 def difflabel(func, *args, **kw):
1640 '''yields 2-tuples of (output, label) based on the output of func()'''
1640 '''yields 2-tuples of (output, label) based on the output of func()'''
1641 headprefixes = [('diff', 'diff.diffline'),
1641 headprefixes = [('diff', 'diff.diffline'),
1642 ('copy', 'diff.extended'),
1642 ('copy', 'diff.extended'),
1643 ('rename', 'diff.extended'),
1643 ('rename', 'diff.extended'),
1644 ('old', 'diff.extended'),
1644 ('old', 'diff.extended'),
1645 ('new', 'diff.extended'),
1645 ('new', 'diff.extended'),
1646 ('deleted', 'diff.extended'),
1646 ('deleted', 'diff.extended'),
1647 ('---', 'diff.file_a'),
1647 ('---', 'diff.file_a'),
1648 ('+++', 'diff.file_b')]
1648 ('+++', 'diff.file_b')]
1649 textprefixes = [('@', 'diff.hunk'),
1649 textprefixes = [('@', 'diff.hunk'),
1650 ('-', 'diff.deleted'),
1650 ('-', 'diff.deleted'),
1651 ('+', 'diff.inserted')]
1651 ('+', 'diff.inserted')]
1652 head = False
1652 head = False
1653 for chunk in func(*args, **kw):
1653 for chunk in func(*args, **kw):
1654 lines = chunk.split('\n')
1654 lines = chunk.split('\n')
1655 for i, line in enumerate(lines):
1655 for i, line in enumerate(lines):
1656 if i != 0:
1656 if i != 0:
1657 yield ('\n', '')
1657 yield ('\n', '')
1658 if head:
1658 if head:
1659 if line.startswith('@'):
1659 if line.startswith('@'):
1660 head = False
1660 head = False
1661 else:
1661 else:
1662 if line and line[0] not in ' +-@\\':
1662 if line and line[0] not in ' +-@\\':
1663 head = True
1663 head = True
1664 stripline = line
1664 stripline = line
1665 if not head and line and line[0] in '+-':
1665 if not head and line and line[0] in '+-':
1666 # highlight trailing whitespace, but only in changed lines
1666 # highlight trailing whitespace, but only in changed lines
1667 stripline = line.rstrip()
1667 stripline = line.rstrip()
1668 prefixes = textprefixes
1668 prefixes = textprefixes
1669 if head:
1669 if head:
1670 prefixes = headprefixes
1670 prefixes = headprefixes
1671 for prefix, label in prefixes:
1671 for prefix, label in prefixes:
1672 if stripline.startswith(prefix):
1672 if stripline.startswith(prefix):
1673 yield (stripline, label)
1673 yield (stripline, label)
1674 break
1674 break
1675 else:
1675 else:
1676 yield (line, '')
1676 yield (line, '')
1677 if line != stripline:
1677 if line != stripline:
1678 yield (line[len(stripline):], 'diff.trailingwhitespace')
1678 yield (line[len(stripline):], 'diff.trailingwhitespace')
1679
1679
1680 def diffui(*args, **kw):
1680 def diffui(*args, **kw):
1681 '''like diff(), but yields 2-tuples of (output, label) for ui.write()'''
1681 '''like diff(), but yields 2-tuples of (output, label) for ui.write()'''
1682 return difflabel(diff, *args, **kw)
1682 return difflabel(diff, *args, **kw)
1683
1683
1684
1684
1685 def _addmodehdr(header, omode, nmode):
1685 def _addmodehdr(header, omode, nmode):
1686 if omode != nmode:
1686 if omode != nmode:
1687 header.append('old mode %s\n' % omode)
1687 header.append('old mode %s\n' % omode)
1688 header.append('new mode %s\n' % nmode)
1688 header.append('new mode %s\n' % nmode)
1689
1689
1690 def trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
1690 def trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
1691 copy, getfilectx, opts, losedatafn, prefix):
1691 copy, getfilectx, opts, losedatafn, prefix):
1692
1692
1693 def join(f):
1693 def join(f):
1694 return os.path.join(prefix, f)
1694 return os.path.join(prefix, f)
1695
1695
1696 date1 = util.datestr(ctx1.date())
1696 date1 = util.datestr(ctx1.date())
1697 man1 = ctx1.manifest()
1697 man1 = ctx1.manifest()
1698
1698
1699 gone = set()
1699 gone = set()
1700 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
1700 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
1701
1701
1702 copyto = dict([(v, k) for k, v in copy.items()])
1702 copyto = dict([(v, k) for k, v in copy.items()])
1703
1703
1704 if opts.git:
1704 if opts.git:
1705 revs = None
1705 revs = None
1706
1706
1707 for f in sorted(modified + added + removed):
1707 for f in sorted(modified + added + removed):
1708 to = None
1708 to = None
1709 tn = None
1709 tn = None
1710 dodiff = True
1710 dodiff = True
1711 header = []
1711 header = []
1712 if f in man1:
1712 if f in man1:
1713 to = getfilectx(f, ctx1).data()
1713 to = getfilectx(f, ctx1).data()
1714 if f not in removed:
1714 if f not in removed:
1715 tn = getfilectx(f, ctx2).data()
1715 tn = getfilectx(f, ctx2).data()
1716 a, b = f, f
1716 a, b = f, f
1717 if opts.git or losedatafn:
1717 if opts.git or losedatafn:
1718 if f in added:
1718 if f in added:
1719 mode = gitmode[ctx2.flags(f)]
1719 mode = gitmode[ctx2.flags(f)]
1720 if f in copy or f in copyto:
1720 if f in copy or f in copyto:
1721 if opts.git:
1721 if opts.git:
1722 if f in copy:
1722 if f in copy:
1723 a = copy[f]
1723 a = copy[f]
1724 else:
1724 else:
1725 a = copyto[f]
1725 a = copyto[f]
1726 omode = gitmode[man1.flags(a)]
1726 omode = gitmode[man1.flags(a)]
1727 _addmodehdr(header, omode, mode)
1727 _addmodehdr(header, omode, mode)
1728 if a in removed and a not in gone:
1728 if a in removed and a not in gone:
1729 op = 'rename'
1729 op = 'rename'
1730 gone.add(a)
1730 gone.add(a)
1731 else:
1731 else:
1732 op = 'copy'
1732 op = 'copy'
1733 header.append('%s from %s\n' % (op, join(a)))
1733 header.append('%s from %s\n' % (op, join(a)))
1734 header.append('%s to %s\n' % (op, join(f)))
1734 header.append('%s to %s\n' % (op, join(f)))
1735 to = getfilectx(a, ctx1).data()
1735 to = getfilectx(a, ctx1).data()
1736 else:
1736 else:
1737 losedatafn(f)
1737 losedatafn(f)
1738 else:
1738 else:
1739 if opts.git:
1739 if opts.git:
1740 header.append('new file mode %s\n' % mode)
1740 header.append('new file mode %s\n' % mode)
1741 elif ctx2.flags(f):
1741 elif ctx2.flags(f):
1742 losedatafn(f)
1742 losedatafn(f)
1743 # In theory, if tn was copied or renamed we should check
1743 # In theory, if tn was copied or renamed we should check
1744 # if the source is binary too but the copy record already
1744 # if the source is binary too but the copy record already
1745 # forces git mode.
1745 # forces git mode.
1746 if util.binary(tn):
1746 if util.binary(tn):
1747 if opts.git:
1747 if opts.git:
1748 dodiff = 'binary'
1748 dodiff = 'binary'
1749 else:
1749 else:
1750 losedatafn(f)
1750 losedatafn(f)
1751 if not opts.git and not tn:
1751 if not opts.git and not tn:
1752 # regular diffs cannot represent new empty file
1752 # regular diffs cannot represent new empty file
1753 losedatafn(f)
1753 losedatafn(f)
1754 elif f in removed:
1754 elif f in removed:
1755 if opts.git:
1755 if opts.git:
1756 # have we already reported a copy above?
1756 # have we already reported a copy above?
1757 if ((f in copy and copy[f] in added
1757 if ((f in copy and copy[f] in added
1758 and copyto[copy[f]] == f) or
1758 and copyto[copy[f]] == f) or
1759 (f in copyto and copyto[f] in added
1759 (f in copyto and copyto[f] in added
1760 and copy[copyto[f]] == f)):
1760 and copy[copyto[f]] == f)):
1761 dodiff = False
1761 dodiff = False
1762 else:
1762 else:
1763 header.append('deleted file mode %s\n' %
1763 header.append('deleted file mode %s\n' %
1764 gitmode[man1.flags(f)])
1764 gitmode[man1.flags(f)])
1765 elif not to or util.binary(to):
1765 elif not to or util.binary(to):
1766 # regular diffs cannot represent empty file deletion
1766 # regular diffs cannot represent empty file deletion
1767 losedatafn(f)
1767 losedatafn(f)
1768 else:
1768 else:
1769 oflag = man1.flags(f)
1769 oflag = man1.flags(f)
1770 nflag = ctx2.flags(f)
1770 nflag = ctx2.flags(f)
1771 binary = util.binary(to) or util.binary(tn)
1771 binary = util.binary(to) or util.binary(tn)
1772 if opts.git:
1772 if opts.git:
1773 _addmodehdr(header, gitmode[oflag], gitmode[nflag])
1773 _addmodehdr(header, gitmode[oflag], gitmode[nflag])
1774 if binary:
1774 if binary:
1775 dodiff = 'binary'
1775 dodiff = 'binary'
1776 elif binary or nflag != oflag:
1776 elif binary or nflag != oflag:
1777 losedatafn(f)
1777 losedatafn(f)
1778 if opts.git:
1778 if opts.git:
1779 header.insert(0, mdiff.diffline(revs, join(a), join(b), opts))
1779 header.insert(0, mdiff.diffline(revs, join(a), join(b), opts))
1780
1780
1781 if dodiff:
1781 if dodiff:
1782 if dodiff == 'binary':
1782 if dodiff == 'binary':
1783 text = b85diff(to, tn)
1783 text = b85diff(to, tn)
1784 else:
1784 else:
1785 text = mdiff.unidiff(to, date1,
1785 text = mdiff.unidiff(to, date1,
1786 # ctx2 date may be dynamic
1786 # ctx2 date may be dynamic
1787 tn, util.datestr(ctx2.date()),
1787 tn, util.datestr(ctx2.date()),
1788 join(a), join(b), revs, opts=opts)
1788 join(a), join(b), revs, opts=opts)
1789 if header and (text or len(header) > 1):
1789 if header and (text or len(header) > 1):
1790 yield ''.join(header)
1790 yield ''.join(header)
1791 if text:
1791 if text:
1792 yield text
1792 yield text
1793
1793
1794 def diffstatsum(stats):
1794 def diffstatsum(stats):
1795 maxfile, maxtotal, addtotal, removetotal, binary = 0, 0, 0, 0, False
1795 maxfile, maxtotal, addtotal, removetotal, binary = 0, 0, 0, 0, False
1796 for f, a, r, b in stats:
1796 for f, a, r, b in stats:
1797 maxfile = max(maxfile, encoding.colwidth(f))
1797 maxfile = max(maxfile, encoding.colwidth(f))
1798 maxtotal = max(maxtotal, a + r)
1798 maxtotal = max(maxtotal, a + r)
1799 addtotal += a
1799 addtotal += a
1800 removetotal += r
1800 removetotal += r
1801 binary = binary or b
1801 binary = binary or b
1802
1802
1803 return maxfile, maxtotal, addtotal, removetotal, binary
1803 return maxfile, maxtotal, addtotal, removetotal, binary
1804
1804
1805 def diffstatdata(lines):
1805 def diffstatdata(lines):
1806 diffre = re.compile('^diff .*-r [a-z0-9]+\s(.*)$')
1806 diffre = re.compile('^diff .*-r [a-z0-9]+\s(.*)$')
1807
1807
1808 results = []
1808 results = []
1809 filename, adds, removes, isbinary = None, 0, 0, False
1809 filename, adds, removes, isbinary = None, 0, 0, False
1810
1810
1811 def addresult():
1811 def addresult():
1812 if filename:
1812 if filename:
1813 results.append((filename, adds, removes, isbinary))
1813 results.append((filename, adds, removes, isbinary))
1814
1814
1815 for line in lines:
1815 for line in lines:
1816 if line.startswith('diff'):
1816 if line.startswith('diff'):
1817 addresult()
1817 addresult()
1818 # set numbers to 0 anyway when starting new file
1818 # set numbers to 0 anyway when starting new file
1819 adds, removes, isbinary = 0, 0, False
1819 adds, removes, isbinary = 0, 0, False
1820 if line.startswith('diff --git'):
1820 if line.startswith('diff --git'):
1821 filename = gitre.search(line).group(1)
1821 filename = gitre.search(line).group(1)
1822 elif line.startswith('diff -r'):
1822 elif line.startswith('diff -r'):
1823 # format: "diff -r ... -r ... filename"
1823 # format: "diff -r ... -r ... filename"
1824 filename = diffre.search(line).group(1)
1824 filename = diffre.search(line).group(1)
1825 elif line.startswith('+') and not line.startswith('+++ '):
1825 elif line.startswith('+') and not line.startswith('+++ '):
1826 adds += 1
1826 adds += 1
1827 elif line.startswith('-') and not line.startswith('--- '):
1827 elif line.startswith('-') and not line.startswith('--- '):
1828 removes += 1
1828 removes += 1
1829 elif (line.startswith('GIT binary patch') or
1829 elif (line.startswith('GIT binary patch') or
1830 line.startswith('Binary file')):
1830 line.startswith('Binary file')):
1831 isbinary = True
1831 isbinary = True
1832 addresult()
1832 addresult()
1833 return results
1833 return results
1834
1834
1835 def diffstat(lines, width=80, git=False):
1835 def diffstat(lines, width=80, git=False):
1836 output = []
1836 output = []
1837 stats = diffstatdata(lines)
1837 stats = diffstatdata(lines)
1838 maxname, maxtotal, totaladds, totalremoves, hasbinary = diffstatsum(stats)
1838 maxname, maxtotal, totaladds, totalremoves, hasbinary = diffstatsum(stats)
1839
1839
1840 countwidth = len(str(maxtotal))
1840 countwidth = len(str(maxtotal))
1841 if hasbinary and countwidth < 3:
1841 if hasbinary and countwidth < 3:
1842 countwidth = 3
1842 countwidth = 3
1843 graphwidth = width - countwidth - maxname - 6
1843 graphwidth = width - countwidth - maxname - 6
1844 if graphwidth < 10:
1844 if graphwidth < 10:
1845 graphwidth = 10
1845 graphwidth = 10
1846
1846
1847 def scale(i):
1847 def scale(i):
1848 if maxtotal <= graphwidth:
1848 if maxtotal <= graphwidth:
1849 return i
1849 return i
1850 # If diffstat runs out of room it doesn't print anything,
1850 # If diffstat runs out of room it doesn't print anything,
1851 # which isn't very useful, so always print at least one + or -
1851 # which isn't very useful, so always print at least one + or -
1852 # if there were at least some changes.
1852 # if there were at least some changes.
1853 return max(i * graphwidth // maxtotal, int(bool(i)))
1853 return max(i * graphwidth // maxtotal, int(bool(i)))
1854
1854
1855 for filename, adds, removes, isbinary in stats:
1855 for filename, adds, removes, isbinary in stats:
1856 if isbinary:
1856 if isbinary:
1857 count = 'Bin'
1857 count = 'Bin'
1858 else:
1858 else:
1859 count = adds + removes
1859 count = adds + removes
1860 pluses = '+' * scale(adds)
1860 pluses = '+' * scale(adds)
1861 minuses = '-' * scale(removes)
1861 minuses = '-' * scale(removes)
1862 output.append(' %s%s | %*s %s%s\n' %
1862 output.append(' %s%s | %*s %s%s\n' %
1863 (filename, ' ' * (maxname - encoding.colwidth(filename)),
1863 (filename, ' ' * (maxname - encoding.colwidth(filename)),
1864 countwidth, count, pluses, minuses))
1864 countwidth, count, pluses, minuses))
1865
1865
1866 if stats:
1866 if stats:
1867 output.append(_(' %d files changed, %d insertions(+), '
1867 output.append(_(' %d files changed, %d insertions(+), '
1868 '%d deletions(-)\n')
1868 '%d deletions(-)\n')
1869 % (len(stats), totaladds, totalremoves))
1869 % (len(stats), totaladds, totalremoves))
1870
1870
1871 return ''.join(output)
1871 return ''.join(output)
1872
1872
1873 def diffstatui(*args, **kw):
1873 def diffstatui(*args, **kw):
1874 '''like diffstat(), but yields 2-tuples of (output, label) for
1874 '''like diffstat(), but yields 2-tuples of (output, label) for
1875 ui.write()
1875 ui.write()
1876 '''
1876 '''
1877
1877
1878 for line in diffstat(*args, **kw).splitlines():
1878 for line in diffstat(*args, **kw).splitlines():
1879 if line and line[-1] in '+-':
1879 if line and line[-1] in '+-':
1880 name, graph = line.rsplit(' ', 1)
1880 name, graph = line.rsplit(' ', 1)
1881 yield (name + ' ', '')
1881 yield (name + ' ', '')
1882 m = re.search(r'\++', graph)
1882 m = re.search(r'\++', graph)
1883 if m:
1883 if m:
1884 yield (m.group(0), 'diffstat.inserted')
1884 yield (m.group(0), 'diffstat.inserted')
1885 m = re.search(r'-+', graph)
1885 m = re.search(r'-+', graph)
1886 if m:
1886 if m:
1887 yield (m.group(0), 'diffstat.deleted')
1887 yield (m.group(0), 'diffstat.deleted')
1888 else:
1888 else:
1889 yield (line, '')
1889 yield (line, '')
1890 yield ('\n', '')
1890 yield ('\n', '')
@@ -1,172 +1,172 b''
1 # repair.py - functions for repository repair for mercurial
1 # repair.py - functions for repository repair for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 # Copyright 2007 Matt Mackall
4 # Copyright 2007 Matt Mackall
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from mercurial import changegroup, bookmarks
9 from mercurial import changegroup, bookmarks
10 from mercurial.node import short
10 from mercurial.node import short
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12 import os
12 import os
13 import errno
13 import errno
14
14
15 def _bundle(repo, bases, heads, node, suffix, compress=True):
15 def _bundle(repo, bases, heads, node, suffix, compress=True):
16 """create a bundle with the specified revisions as a backup"""
16 """create a bundle with the specified revisions as a backup"""
17 cg = repo.changegroupsubset(bases, heads, 'strip')
17 cg = repo.changegroupsubset(bases, heads, 'strip')
18 backupdir = repo.join("strip-backup")
18 backupdir = repo.join("strip-backup")
19 if not os.path.isdir(backupdir):
19 if not os.path.isdir(backupdir):
20 os.mkdir(backupdir)
20 os.mkdir(backupdir)
21 name = os.path.join(backupdir, "%s-%s.hg" % (short(node), suffix))
21 name = os.path.join(backupdir, "%s-%s.hg" % (short(node), suffix))
22 if compress:
22 if compress:
23 bundletype = "HG10BZ"
23 bundletype = "HG10BZ"
24 else:
24 else:
25 bundletype = "HG10UN"
25 bundletype = "HG10UN"
26 return changegroup.writebundle(cg, name, bundletype)
26 return changegroup.writebundle(cg, name, bundletype)
27
27
28 def _collectfiles(repo, striprev):
28 def _collectfiles(repo, striprev):
29 """find out the filelogs affected by the strip"""
29 """find out the filelogs affected by the strip"""
30 files = set()
30 files = set()
31
31
32 for x in xrange(striprev, len(repo)):
32 for x in xrange(striprev, len(repo)):
33 files.update(repo[x].files())
33 files.update(repo[x].files())
34
34
35 return sorted(files)
35 return sorted(files)
36
36
37 def _collectbrokencsets(repo, files, striprev):
37 def _collectbrokencsets(repo, files, striprev):
38 """return the changesets which will be broken by the truncation"""
38 """return the changesets which will be broken by the truncation"""
39 s = set()
39 s = set()
40 def collectone(revlog):
40 def collectone(revlog):
41 linkgen = (revlog.linkrev(i) for i in revlog)
41 linkgen = (revlog.linkrev(i) for i in revlog)
42 # find the truncation point of the revlog
42 # find the truncation point of the revlog
43 for lrev in linkgen:
43 for lrev in linkgen:
44 if lrev >= striprev:
44 if lrev >= striprev:
45 break
45 break
46 # see if any revision after this point has a linkrev
46 # see if any revision after this point has a linkrev
47 # less than striprev (those will be broken by strip)
47 # less than striprev (those will be broken by strip)
48 for lrev in linkgen:
48 for lrev in linkgen:
49 if lrev < striprev:
49 if lrev < striprev:
50 s.add(lrev)
50 s.add(lrev)
51
51
52 collectone(repo.manifest)
52 collectone(repo.manifest)
53 for fname in files:
53 for fname in files:
54 collectone(repo.file(fname))
54 collectone(repo.file(fname))
55
55
56 return s
56 return s
57
57
58 def strip(ui, repo, nodelist, backup="all", topic='backup'):
58 def strip(ui, repo, nodelist, backup="all", topic='backup'):
59 cl = repo.changelog
59 cl = repo.changelog
60 # TODO handle undo of merge sets
60 # TODO handle undo of merge sets
61 if isinstance(nodelist, str):
61 if isinstance(nodelist, str):
62 nodelist = [nodelist]
62 nodelist = [nodelist]
63 striplist = [cl.rev(node) for node in nodelist]
63 striplist = [cl.rev(node) for node in nodelist]
64 striprev = min(striplist)
64 striprev = min(striplist)
65
65
66 keeppartialbundle = backup == 'strip'
66 keeppartialbundle = backup == 'strip'
67
67
68 # Some revisions with rev > striprev may not be descendants of striprev.
68 # Some revisions with rev > striprev may not be descendants of striprev.
69 # We have to find these revisions and put them in a bundle, so that
69 # We have to find these revisions and put them in a bundle, so that
70 # we can restore them after the truncations.
70 # we can restore them after the truncations.
71 # To create the bundle we use repo.changegroupsubset which requires
71 # To create the bundle we use repo.changegroupsubset which requires
72 # the list of heads and bases of the set of interesting revisions.
72 # the list of heads and bases of the set of interesting revisions.
73 # (head = revision in the set that has no descendant in the set;
73 # (head = revision in the set that has no descendant in the set;
74 # base = revision in the set that has no ancestor in the set)
74 # base = revision in the set that has no ancestor in the set)
75 tostrip = set(striplist)
75 tostrip = set(striplist)
76 for rev in striplist:
76 for rev in striplist:
77 for desc in cl.descendants(rev):
77 for desc in cl.descendants(rev):
78 tostrip.add(desc)
78 tostrip.add(desc)
79
79
80 files = _collectfiles(repo, striprev)
80 files = _collectfiles(repo, striprev)
81 saverevs = _collectbrokencsets(repo, files, striprev)
81 saverevs = _collectbrokencsets(repo, files, striprev)
82
82
83 # compute heads
83 # compute heads
84 saveheads = set(saverevs)
84 saveheads = set(saverevs)
85 for r in xrange(striprev + 1, len(cl)):
85 for r in xrange(striprev + 1, len(cl)):
86 if r not in tostrip:
86 if r not in tostrip:
87 saverevs.add(r)
87 saverevs.add(r)
88 saveheads.difference_update(cl.parentrevs(r))
88 saveheads.difference_update(cl.parentrevs(r))
89 saveheads.add(r)
89 saveheads.add(r)
90 saveheads = [cl.node(r) for r in saveheads]
90 saveheads = [cl.node(r) for r in saveheads]
91
91
92 # compute base nodes
92 # compute base nodes
93 if saverevs:
93 if saverevs:
94 descendants = set(cl.descendants(*saverevs))
94 descendants = set(cl.descendants(*saverevs))
95 saverevs.difference_update(descendants)
95 saverevs.difference_update(descendants)
96 savebases = [cl.node(r) for r in saverevs]
96 savebases = [cl.node(r) for r in saverevs]
97 stripbases = [cl.node(r) for r in tostrip]
97 stripbases = [cl.node(r) for r in tostrip]
98
98
99 bm = repo._bookmarks
99 bm = repo._bookmarks
100 updatebm = []
100 updatebm = []
101 for m in bm:
101 for m in bm:
102 rev = repo[bm[m]].rev()
102 rev = repo[bm[m]].rev()
103 if rev in tostrip:
103 if rev in tostrip:
104 updatebm.append(m)
104 updatebm.append(m)
105
105
106 # create a changegroup for all the branches we need to keep
106 # create a changegroup for all the branches we need to keep
107 backupfile = None
107 backupfile = None
108 if backup == "all":
108 if backup == "all":
109 backupfile = _bundle(repo, stripbases, cl.heads(), node, topic)
109 backupfile = _bundle(repo, stripbases, cl.heads(), node, topic)
110 repo.ui.status(_("saved backup bundle to %s\n") % backupfile)
110 repo.ui.status(_("saved backup bundle to %s\n") % backupfile)
111 if saveheads or savebases:
111 if saveheads or savebases:
112 # do not compress partial bundle if we remove it from disk later
112 # do not compress partial bundle if we remove it from disk later
113 chgrpfile = _bundle(repo, savebases, saveheads, node, 'temp',
113 chgrpfile = _bundle(repo, savebases, saveheads, node, 'temp',
114 compress=keeppartialbundle)
114 compress=keeppartialbundle)
115
115
116 mfst = repo.manifest
116 mfst = repo.manifest
117
117
118 tr = repo.transaction("strip")
118 tr = repo.transaction("strip")
119 offset = len(tr.entries)
119 offset = len(tr.entries)
120
120
121 try:
121 try:
122 tr.startgroup()
122 tr.startgroup()
123 cl.strip(striprev, tr)
123 cl.strip(striprev, tr)
124 mfst.strip(striprev, tr)
124 mfst.strip(striprev, tr)
125 for fn in files:
125 for fn in files:
126 repo.file(fn).strip(striprev, tr)
126 repo.file(fn).strip(striprev, tr)
127 tr.endgroup()
127 tr.endgroup()
128
128
129 try:
129 try:
130 for i in xrange(offset, len(tr.entries)):
130 for i in xrange(offset, len(tr.entries)):
131 file, troffset, ignore = tr.entries[i]
131 file, troffset, ignore = tr.entries[i]
132 repo.sopener(file, 'a').truncate(troffset)
132 repo.sopener(file, 'a').truncate(troffset)
133 tr.close()
133 tr.close()
134 except:
134 except: # re-raises
135 tr.abort()
135 tr.abort()
136 raise
136 raise
137
137
138 if saveheads or savebases:
138 if saveheads or savebases:
139 ui.note(_("adding branch\n"))
139 ui.note(_("adding branch\n"))
140 f = open(chgrpfile, "rb")
140 f = open(chgrpfile, "rb")
141 gen = changegroup.readbundle(f, chgrpfile)
141 gen = changegroup.readbundle(f, chgrpfile)
142 if not repo.ui.verbose:
142 if not repo.ui.verbose:
143 # silence internal shuffling chatter
143 # silence internal shuffling chatter
144 repo.ui.pushbuffer()
144 repo.ui.pushbuffer()
145 repo.addchangegroup(gen, 'strip', 'bundle:' + chgrpfile, True)
145 repo.addchangegroup(gen, 'strip', 'bundle:' + chgrpfile, True)
146 if not repo.ui.verbose:
146 if not repo.ui.verbose:
147 repo.ui.popbuffer()
147 repo.ui.popbuffer()
148 f.close()
148 f.close()
149 if not keeppartialbundle:
149 if not keeppartialbundle:
150 os.unlink(chgrpfile)
150 os.unlink(chgrpfile)
151
151
152 # remove undo files
152 # remove undo files
153 for undofile in repo.undofiles():
153 for undofile in repo.undofiles():
154 try:
154 try:
155 os.unlink(undofile)
155 os.unlink(undofile)
156 except OSError, e:
156 except OSError, e:
157 if e.errno != errno.ENOENT:
157 if e.errno != errno.ENOENT:
158 ui.warn(_('error removing %s: %s\n') % (undofile, str(e)))
158 ui.warn(_('error removing %s: %s\n') % (undofile, str(e)))
159
159
160 for m in updatebm:
160 for m in updatebm:
161 bm[m] = repo['.'].node()
161 bm[m] = repo['.'].node()
162 bookmarks.write(repo)
162 bookmarks.write(repo)
163 except:
163 except: # re-raises
164 if backupfile:
164 if backupfile:
165 ui.warn(_("strip failed, full bundle stored in '%s'\n")
165 ui.warn(_("strip failed, full bundle stored in '%s'\n")
166 % backupfile)
166 % backupfile)
167 elif saveheads:
167 elif saveheads:
168 ui.warn(_("strip failed, partial bundle stored in '%s'\n")
168 ui.warn(_("strip failed, partial bundle stored in '%s'\n")
169 % chgrpfile)
169 % chgrpfile)
170 raise
170 raise
171
171
172 repo.destroyed()
172 repo.destroyed()
@@ -1,1766 +1,1766 b''
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil, encoding
17 import error, osutil, encoding
18 import errno, re, shutil, sys, tempfile, traceback
18 import errno, re, shutil, sys, tempfile, traceback
19 import os, time, datetime, calendar, textwrap, signal
19 import os, time, datetime, calendar, textwrap, signal
20 import imp, socket, urllib
20 import imp, socket, urllib
21
21
22 if os.name == 'nt':
22 if os.name == 'nt':
23 import windows as platform
23 import windows as platform
24 else:
24 else:
25 import posix as platform
25 import posix as platform
26
26
27 platform.encodinglower = encoding.lower
27 platform.encodinglower = encoding.lower
28 platform.encodingupper = encoding.upper
28 platform.encodingupper = encoding.upper
29
29
30 cachestat = platform.cachestat
30 cachestat = platform.cachestat
31 checkexec = platform.checkexec
31 checkexec = platform.checkexec
32 checklink = platform.checklink
32 checklink = platform.checklink
33 copymode = platform.copymode
33 copymode = platform.copymode
34 executablepath = platform.executablepath
34 executablepath = platform.executablepath
35 expandglobs = platform.expandglobs
35 expandglobs = platform.expandglobs
36 explainexit = platform.explainexit
36 explainexit = platform.explainexit
37 findexe = platform.findexe
37 findexe = platform.findexe
38 gethgcmd = platform.gethgcmd
38 gethgcmd = platform.gethgcmd
39 getuser = platform.getuser
39 getuser = platform.getuser
40 groupmembers = platform.groupmembers
40 groupmembers = platform.groupmembers
41 groupname = platform.groupname
41 groupname = platform.groupname
42 hidewindow = platform.hidewindow
42 hidewindow = platform.hidewindow
43 isexec = platform.isexec
43 isexec = platform.isexec
44 isowner = platform.isowner
44 isowner = platform.isowner
45 localpath = platform.localpath
45 localpath = platform.localpath
46 lookupreg = platform.lookupreg
46 lookupreg = platform.lookupreg
47 makedir = platform.makedir
47 makedir = platform.makedir
48 nlinks = platform.nlinks
48 nlinks = platform.nlinks
49 normpath = platform.normpath
49 normpath = platform.normpath
50 normcase = platform.normcase
50 normcase = platform.normcase
51 nulldev = platform.nulldev
51 nulldev = platform.nulldev
52 openhardlinks = platform.openhardlinks
52 openhardlinks = platform.openhardlinks
53 oslink = platform.oslink
53 oslink = platform.oslink
54 parsepatchoutput = platform.parsepatchoutput
54 parsepatchoutput = platform.parsepatchoutput
55 pconvert = platform.pconvert
55 pconvert = platform.pconvert
56 popen = platform.popen
56 popen = platform.popen
57 posixfile = platform.posixfile
57 posixfile = platform.posixfile
58 quotecommand = platform.quotecommand
58 quotecommand = platform.quotecommand
59 realpath = platform.realpath
59 realpath = platform.realpath
60 rename = platform.rename
60 rename = platform.rename
61 samedevice = platform.samedevice
61 samedevice = platform.samedevice
62 samefile = platform.samefile
62 samefile = platform.samefile
63 samestat = platform.samestat
63 samestat = platform.samestat
64 setbinary = platform.setbinary
64 setbinary = platform.setbinary
65 setflags = platform.setflags
65 setflags = platform.setflags
66 setsignalhandler = platform.setsignalhandler
66 setsignalhandler = platform.setsignalhandler
67 shellquote = platform.shellquote
67 shellquote = platform.shellquote
68 spawndetached = platform.spawndetached
68 spawndetached = platform.spawndetached
69 sshargs = platform.sshargs
69 sshargs = platform.sshargs
70 statfiles = platform.statfiles
70 statfiles = platform.statfiles
71 termwidth = platform.termwidth
71 termwidth = platform.termwidth
72 testpid = platform.testpid
72 testpid = platform.testpid
73 umask = platform.umask
73 umask = platform.umask
74 unlink = platform.unlink
74 unlink = platform.unlink
75 unlinkpath = platform.unlinkpath
75 unlinkpath = platform.unlinkpath
76 username = platform.username
76 username = platform.username
77
77
78 # Python compatibility
78 # Python compatibility
79
79
80 _notset = object()
80 _notset = object()
81
81
82 def safehasattr(thing, attr):
82 def safehasattr(thing, attr):
83 return getattr(thing, attr, _notset) is not _notset
83 return getattr(thing, attr, _notset) is not _notset
84
84
85 def sha1(s=''):
85 def sha1(s=''):
86 '''
86 '''
87 Low-overhead wrapper around Python's SHA support
87 Low-overhead wrapper around Python's SHA support
88
88
89 >>> f = _fastsha1
89 >>> f = _fastsha1
90 >>> a = sha1()
90 >>> a = sha1()
91 >>> a = f()
91 >>> a = f()
92 >>> a.hexdigest()
92 >>> a.hexdigest()
93 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
93 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
94 '''
94 '''
95
95
96 return _fastsha1(s)
96 return _fastsha1(s)
97
97
98 def _fastsha1(s=''):
98 def _fastsha1(s=''):
99 # This function will import sha1 from hashlib or sha (whichever is
99 # This function will import sha1 from hashlib or sha (whichever is
100 # available) and overwrite itself with it on the first call.
100 # available) and overwrite itself with it on the first call.
101 # Subsequent calls will go directly to the imported function.
101 # Subsequent calls will go directly to the imported function.
102 if sys.version_info >= (2, 5):
102 if sys.version_info >= (2, 5):
103 from hashlib import sha1 as _sha1
103 from hashlib import sha1 as _sha1
104 else:
104 else:
105 from sha import sha as _sha1
105 from sha import sha as _sha1
106 global _fastsha1, sha1
106 global _fastsha1, sha1
107 _fastsha1 = sha1 = _sha1
107 _fastsha1 = sha1 = _sha1
108 return _sha1(s)
108 return _sha1(s)
109
109
110 try:
110 try:
111 buffer = buffer
111 buffer = buffer
112 except NameError:
112 except NameError:
113 if sys.version_info[0] < 3:
113 if sys.version_info[0] < 3:
114 def buffer(sliceable, offset=0):
114 def buffer(sliceable, offset=0):
115 return sliceable[offset:]
115 return sliceable[offset:]
116 else:
116 else:
117 def buffer(sliceable, offset=0):
117 def buffer(sliceable, offset=0):
118 return memoryview(sliceable)[offset:]
118 return memoryview(sliceable)[offset:]
119
119
120 import subprocess
120 import subprocess
121 closefds = os.name == 'posix'
121 closefds = os.name == 'posix'
122
122
123 def popen2(cmd, env=None, newlines=False):
123 def popen2(cmd, env=None, newlines=False):
124 # Setting bufsize to -1 lets the system decide the buffer size.
124 # Setting bufsize to -1 lets the system decide the buffer size.
125 # The default for bufsize is 0, meaning unbuffered. This leads to
125 # The default for bufsize is 0, meaning unbuffered. This leads to
126 # poor performance on Mac OS X: http://bugs.python.org/issue4194
126 # poor performance on Mac OS X: http://bugs.python.org/issue4194
127 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
127 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
128 close_fds=closefds,
128 close_fds=closefds,
129 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
129 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
130 universal_newlines=newlines,
130 universal_newlines=newlines,
131 env=env)
131 env=env)
132 return p.stdin, p.stdout
132 return p.stdin, p.stdout
133
133
134 def popen3(cmd, env=None, newlines=False):
134 def popen3(cmd, env=None, newlines=False):
135 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
135 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
136 close_fds=closefds,
136 close_fds=closefds,
137 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
137 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
138 stderr=subprocess.PIPE,
138 stderr=subprocess.PIPE,
139 universal_newlines=newlines,
139 universal_newlines=newlines,
140 env=env)
140 env=env)
141 return p.stdin, p.stdout, p.stderr
141 return p.stdin, p.stdout, p.stderr
142
142
143 def version():
143 def version():
144 """Return version information if available."""
144 """Return version information if available."""
145 try:
145 try:
146 import __version__
146 import __version__
147 return __version__.version
147 return __version__.version
148 except ImportError:
148 except ImportError:
149 return 'unknown'
149 return 'unknown'
150
150
151 # used by parsedate
151 # used by parsedate
152 defaultdateformats = (
152 defaultdateformats = (
153 '%Y-%m-%d %H:%M:%S',
153 '%Y-%m-%d %H:%M:%S',
154 '%Y-%m-%d %I:%M:%S%p',
154 '%Y-%m-%d %I:%M:%S%p',
155 '%Y-%m-%d %H:%M',
155 '%Y-%m-%d %H:%M',
156 '%Y-%m-%d %I:%M%p',
156 '%Y-%m-%d %I:%M%p',
157 '%Y-%m-%d',
157 '%Y-%m-%d',
158 '%m-%d',
158 '%m-%d',
159 '%m/%d',
159 '%m/%d',
160 '%m/%d/%y',
160 '%m/%d/%y',
161 '%m/%d/%Y',
161 '%m/%d/%Y',
162 '%a %b %d %H:%M:%S %Y',
162 '%a %b %d %H:%M:%S %Y',
163 '%a %b %d %I:%M:%S%p %Y',
163 '%a %b %d %I:%M:%S%p %Y',
164 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
164 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
165 '%b %d %H:%M:%S %Y',
165 '%b %d %H:%M:%S %Y',
166 '%b %d %I:%M:%S%p %Y',
166 '%b %d %I:%M:%S%p %Y',
167 '%b %d %H:%M:%S',
167 '%b %d %H:%M:%S',
168 '%b %d %I:%M:%S%p',
168 '%b %d %I:%M:%S%p',
169 '%b %d %H:%M',
169 '%b %d %H:%M',
170 '%b %d %I:%M%p',
170 '%b %d %I:%M%p',
171 '%b %d %Y',
171 '%b %d %Y',
172 '%b %d',
172 '%b %d',
173 '%H:%M:%S',
173 '%H:%M:%S',
174 '%I:%M:%S%p',
174 '%I:%M:%S%p',
175 '%H:%M',
175 '%H:%M',
176 '%I:%M%p',
176 '%I:%M%p',
177 )
177 )
178
178
179 extendeddateformats = defaultdateformats + (
179 extendeddateformats = defaultdateformats + (
180 "%Y",
180 "%Y",
181 "%Y-%m",
181 "%Y-%m",
182 "%b",
182 "%b",
183 "%b %Y",
183 "%b %Y",
184 )
184 )
185
185
186 def cachefunc(func):
186 def cachefunc(func):
187 '''cache the result of function calls'''
187 '''cache the result of function calls'''
188 # XXX doesn't handle keywords args
188 # XXX doesn't handle keywords args
189 cache = {}
189 cache = {}
190 if func.func_code.co_argcount == 1:
190 if func.func_code.co_argcount == 1:
191 # we gain a small amount of time because
191 # we gain a small amount of time because
192 # we don't need to pack/unpack the list
192 # we don't need to pack/unpack the list
193 def f(arg):
193 def f(arg):
194 if arg not in cache:
194 if arg not in cache:
195 cache[arg] = func(arg)
195 cache[arg] = func(arg)
196 return cache[arg]
196 return cache[arg]
197 else:
197 else:
198 def f(*args):
198 def f(*args):
199 if args not in cache:
199 if args not in cache:
200 cache[args] = func(*args)
200 cache[args] = func(*args)
201 return cache[args]
201 return cache[args]
202
202
203 return f
203 return f
204
204
205 def lrucachefunc(func):
205 def lrucachefunc(func):
206 '''cache most recent results of function calls'''
206 '''cache most recent results of function calls'''
207 cache = {}
207 cache = {}
208 order = []
208 order = []
209 if func.func_code.co_argcount == 1:
209 if func.func_code.co_argcount == 1:
210 def f(arg):
210 def f(arg):
211 if arg not in cache:
211 if arg not in cache:
212 if len(cache) > 20:
212 if len(cache) > 20:
213 del cache[order.pop(0)]
213 del cache[order.pop(0)]
214 cache[arg] = func(arg)
214 cache[arg] = func(arg)
215 else:
215 else:
216 order.remove(arg)
216 order.remove(arg)
217 order.append(arg)
217 order.append(arg)
218 return cache[arg]
218 return cache[arg]
219 else:
219 else:
220 def f(*args):
220 def f(*args):
221 if args not in cache:
221 if args not in cache:
222 if len(cache) > 20:
222 if len(cache) > 20:
223 del cache[order.pop(0)]
223 del cache[order.pop(0)]
224 cache[args] = func(*args)
224 cache[args] = func(*args)
225 else:
225 else:
226 order.remove(args)
226 order.remove(args)
227 order.append(args)
227 order.append(args)
228 return cache[args]
228 return cache[args]
229
229
230 return f
230 return f
231
231
232 class propertycache(object):
232 class propertycache(object):
233 def __init__(self, func):
233 def __init__(self, func):
234 self.func = func
234 self.func = func
235 self.name = func.__name__
235 self.name = func.__name__
236 def __get__(self, obj, type=None):
236 def __get__(self, obj, type=None):
237 result = self.func(obj)
237 result = self.func(obj)
238 setattr(obj, self.name, result)
238 setattr(obj, self.name, result)
239 return result
239 return result
240
240
241 def pipefilter(s, cmd):
241 def pipefilter(s, cmd):
242 '''filter string S through command CMD, returning its output'''
242 '''filter string S through command CMD, returning its output'''
243 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
243 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
244 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
244 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
245 pout, perr = p.communicate(s)
245 pout, perr = p.communicate(s)
246 return pout
246 return pout
247
247
248 def tempfilter(s, cmd):
248 def tempfilter(s, cmd):
249 '''filter string S through a pair of temporary files with CMD.
249 '''filter string S through a pair of temporary files with CMD.
250 CMD is used as a template to create the real command to be run,
250 CMD is used as a template to create the real command to be run,
251 with the strings INFILE and OUTFILE replaced by the real names of
251 with the strings INFILE and OUTFILE replaced by the real names of
252 the temporary files generated.'''
252 the temporary files generated.'''
253 inname, outname = None, None
253 inname, outname = None, None
254 try:
254 try:
255 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
255 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
256 fp = os.fdopen(infd, 'wb')
256 fp = os.fdopen(infd, 'wb')
257 fp.write(s)
257 fp.write(s)
258 fp.close()
258 fp.close()
259 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
259 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
260 os.close(outfd)
260 os.close(outfd)
261 cmd = cmd.replace('INFILE', inname)
261 cmd = cmd.replace('INFILE', inname)
262 cmd = cmd.replace('OUTFILE', outname)
262 cmd = cmd.replace('OUTFILE', outname)
263 code = os.system(cmd)
263 code = os.system(cmd)
264 if sys.platform == 'OpenVMS' and code & 1:
264 if sys.platform == 'OpenVMS' and code & 1:
265 code = 0
265 code = 0
266 if code:
266 if code:
267 raise Abort(_("command '%s' failed: %s") %
267 raise Abort(_("command '%s' failed: %s") %
268 (cmd, explainexit(code)))
268 (cmd, explainexit(code)))
269 fp = open(outname, 'rb')
269 fp = open(outname, 'rb')
270 r = fp.read()
270 r = fp.read()
271 fp.close()
271 fp.close()
272 return r
272 return r
273 finally:
273 finally:
274 try:
274 try:
275 if inname:
275 if inname:
276 os.unlink(inname)
276 os.unlink(inname)
277 except OSError:
277 except OSError:
278 pass
278 pass
279 try:
279 try:
280 if outname:
280 if outname:
281 os.unlink(outname)
281 os.unlink(outname)
282 except OSError:
282 except OSError:
283 pass
283 pass
284
284
285 filtertable = {
285 filtertable = {
286 'tempfile:': tempfilter,
286 'tempfile:': tempfilter,
287 'pipe:': pipefilter,
287 'pipe:': pipefilter,
288 }
288 }
289
289
290 def filter(s, cmd):
290 def filter(s, cmd):
291 "filter a string through a command that transforms its input to its output"
291 "filter a string through a command that transforms its input to its output"
292 for name, fn in filtertable.iteritems():
292 for name, fn in filtertable.iteritems():
293 if cmd.startswith(name):
293 if cmd.startswith(name):
294 return fn(s, cmd[len(name):].lstrip())
294 return fn(s, cmd[len(name):].lstrip())
295 return pipefilter(s, cmd)
295 return pipefilter(s, cmd)
296
296
297 def binary(s):
297 def binary(s):
298 """return true if a string is binary data"""
298 """return true if a string is binary data"""
299 return bool(s and '\0' in s)
299 return bool(s and '\0' in s)
300
300
301 def increasingchunks(source, min=1024, max=65536):
301 def increasingchunks(source, min=1024, max=65536):
302 '''return no less than min bytes per chunk while data remains,
302 '''return no less than min bytes per chunk while data remains,
303 doubling min after each chunk until it reaches max'''
303 doubling min after each chunk until it reaches max'''
304 def log2(x):
304 def log2(x):
305 if not x:
305 if not x:
306 return 0
306 return 0
307 i = 0
307 i = 0
308 while x:
308 while x:
309 x >>= 1
309 x >>= 1
310 i += 1
310 i += 1
311 return i - 1
311 return i - 1
312
312
313 buf = []
313 buf = []
314 blen = 0
314 blen = 0
315 for chunk in source:
315 for chunk in source:
316 buf.append(chunk)
316 buf.append(chunk)
317 blen += len(chunk)
317 blen += len(chunk)
318 if blen >= min:
318 if blen >= min:
319 if min < max:
319 if min < max:
320 min = min << 1
320 min = min << 1
321 nmin = 1 << log2(blen)
321 nmin = 1 << log2(blen)
322 if nmin > min:
322 if nmin > min:
323 min = nmin
323 min = nmin
324 if min > max:
324 if min > max:
325 min = max
325 min = max
326 yield ''.join(buf)
326 yield ''.join(buf)
327 blen = 0
327 blen = 0
328 buf = []
328 buf = []
329 if buf:
329 if buf:
330 yield ''.join(buf)
330 yield ''.join(buf)
331
331
332 Abort = error.Abort
332 Abort = error.Abort
333
333
334 def always(fn):
334 def always(fn):
335 return True
335 return True
336
336
337 def never(fn):
337 def never(fn):
338 return False
338 return False
339
339
340 def pathto(root, n1, n2):
340 def pathto(root, n1, n2):
341 '''return the relative path from one place to another.
341 '''return the relative path from one place to another.
342 root should use os.sep to separate directories
342 root should use os.sep to separate directories
343 n1 should use os.sep to separate directories
343 n1 should use os.sep to separate directories
344 n2 should use "/" to separate directories
344 n2 should use "/" to separate directories
345 returns an os.sep-separated path.
345 returns an os.sep-separated path.
346
346
347 If n1 is a relative path, it's assumed it's
347 If n1 is a relative path, it's assumed it's
348 relative to root.
348 relative to root.
349 n2 should always be relative to root.
349 n2 should always be relative to root.
350 '''
350 '''
351 if not n1:
351 if not n1:
352 return localpath(n2)
352 return localpath(n2)
353 if os.path.isabs(n1):
353 if os.path.isabs(n1):
354 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
354 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
355 return os.path.join(root, localpath(n2))
355 return os.path.join(root, localpath(n2))
356 n2 = '/'.join((pconvert(root), n2))
356 n2 = '/'.join((pconvert(root), n2))
357 a, b = splitpath(n1), n2.split('/')
357 a, b = splitpath(n1), n2.split('/')
358 a.reverse()
358 a.reverse()
359 b.reverse()
359 b.reverse()
360 while a and b and a[-1] == b[-1]:
360 while a and b and a[-1] == b[-1]:
361 a.pop()
361 a.pop()
362 b.pop()
362 b.pop()
363 b.reverse()
363 b.reverse()
364 return os.sep.join((['..'] * len(a)) + b) or '.'
364 return os.sep.join((['..'] * len(a)) + b) or '.'
365
365
366 _hgexecutable = None
366 _hgexecutable = None
367
367
368 def mainfrozen():
368 def mainfrozen():
369 """return True if we are a frozen executable.
369 """return True if we are a frozen executable.
370
370
371 The code supports py2exe (most common, Windows only) and tools/freeze
371 The code supports py2exe (most common, Windows only) and tools/freeze
372 (portable, not much used).
372 (portable, not much used).
373 """
373 """
374 return (safehasattr(sys, "frozen") or # new py2exe
374 return (safehasattr(sys, "frozen") or # new py2exe
375 safehasattr(sys, "importers") or # old py2exe
375 safehasattr(sys, "importers") or # old py2exe
376 imp.is_frozen("__main__")) # tools/freeze
376 imp.is_frozen("__main__")) # tools/freeze
377
377
378 def hgexecutable():
378 def hgexecutable():
379 """return location of the 'hg' executable.
379 """return location of the 'hg' executable.
380
380
381 Defaults to $HG or 'hg' in the search path.
381 Defaults to $HG or 'hg' in the search path.
382 """
382 """
383 if _hgexecutable is None:
383 if _hgexecutable is None:
384 hg = os.environ.get('HG')
384 hg = os.environ.get('HG')
385 mainmod = sys.modules['__main__']
385 mainmod = sys.modules['__main__']
386 if hg:
386 if hg:
387 _sethgexecutable(hg)
387 _sethgexecutable(hg)
388 elif mainfrozen():
388 elif mainfrozen():
389 _sethgexecutable(sys.executable)
389 _sethgexecutable(sys.executable)
390 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
390 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
391 _sethgexecutable(mainmod.__file__)
391 _sethgexecutable(mainmod.__file__)
392 else:
392 else:
393 exe = findexe('hg') or os.path.basename(sys.argv[0])
393 exe = findexe('hg') or os.path.basename(sys.argv[0])
394 _sethgexecutable(exe)
394 _sethgexecutable(exe)
395 return _hgexecutable
395 return _hgexecutable
396
396
397 def _sethgexecutable(path):
397 def _sethgexecutable(path):
398 """set location of the 'hg' executable"""
398 """set location of the 'hg' executable"""
399 global _hgexecutable
399 global _hgexecutable
400 _hgexecutable = path
400 _hgexecutable = path
401
401
402 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
402 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
403 '''enhanced shell command execution.
403 '''enhanced shell command execution.
404 run with environment maybe modified, maybe in different dir.
404 run with environment maybe modified, maybe in different dir.
405
405
406 if command fails and onerr is None, return status. if ui object,
406 if command fails and onerr is None, return status. if ui object,
407 print error message and return status, else raise onerr object as
407 print error message and return status, else raise onerr object as
408 exception.
408 exception.
409
409
410 if out is specified, it is assumed to be a file-like object that has a
410 if out is specified, it is assumed to be a file-like object that has a
411 write() method. stdout and stderr will be redirected to out.'''
411 write() method. stdout and stderr will be redirected to out.'''
412 try:
412 try:
413 sys.stdout.flush()
413 sys.stdout.flush()
414 except Exception:
414 except Exception:
415 pass
415 pass
416 def py2shell(val):
416 def py2shell(val):
417 'convert python object into string that is useful to shell'
417 'convert python object into string that is useful to shell'
418 if val is None or val is False:
418 if val is None or val is False:
419 return '0'
419 return '0'
420 if val is True:
420 if val is True:
421 return '1'
421 return '1'
422 return str(val)
422 return str(val)
423 origcmd = cmd
423 origcmd = cmd
424 cmd = quotecommand(cmd)
424 cmd = quotecommand(cmd)
425 if sys.platform == 'plan9':
425 if sys.platform == 'plan9':
426 # subprocess kludge to work around issues in half-baked Python
426 # subprocess kludge to work around issues in half-baked Python
427 # ports, notably bichued/python:
427 # ports, notably bichued/python:
428 if not cwd is None:
428 if not cwd is None:
429 os.chdir(cwd)
429 os.chdir(cwd)
430 rc = os.system(cmd)
430 rc = os.system(cmd)
431 else:
431 else:
432 env = dict(os.environ)
432 env = dict(os.environ)
433 env.update((k, py2shell(v)) for k, v in environ.iteritems())
433 env.update((k, py2shell(v)) for k, v in environ.iteritems())
434 env['HG'] = hgexecutable()
434 env['HG'] = hgexecutable()
435 if out is None or out == sys.__stdout__:
435 if out is None or out == sys.__stdout__:
436 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
436 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
437 env=env, cwd=cwd)
437 env=env, cwd=cwd)
438 else:
438 else:
439 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
439 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
440 env=env, cwd=cwd, stdout=subprocess.PIPE,
440 env=env, cwd=cwd, stdout=subprocess.PIPE,
441 stderr=subprocess.STDOUT)
441 stderr=subprocess.STDOUT)
442 for line in proc.stdout:
442 for line in proc.stdout:
443 out.write(line)
443 out.write(line)
444 proc.wait()
444 proc.wait()
445 rc = proc.returncode
445 rc = proc.returncode
446 if sys.platform == 'OpenVMS' and rc & 1:
446 if sys.platform == 'OpenVMS' and rc & 1:
447 rc = 0
447 rc = 0
448 if rc and onerr:
448 if rc and onerr:
449 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
449 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
450 explainexit(rc)[0])
450 explainexit(rc)[0])
451 if errprefix:
451 if errprefix:
452 errmsg = '%s: %s' % (errprefix, errmsg)
452 errmsg = '%s: %s' % (errprefix, errmsg)
453 try:
453 try:
454 onerr.warn(errmsg + '\n')
454 onerr.warn(errmsg + '\n')
455 except AttributeError:
455 except AttributeError:
456 raise onerr(errmsg)
456 raise onerr(errmsg)
457 return rc
457 return rc
458
458
459 def checksignature(func):
459 def checksignature(func):
460 '''wrap a function with code to check for calling errors'''
460 '''wrap a function with code to check for calling errors'''
461 def check(*args, **kwargs):
461 def check(*args, **kwargs):
462 try:
462 try:
463 return func(*args, **kwargs)
463 return func(*args, **kwargs)
464 except TypeError:
464 except TypeError:
465 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
465 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
466 raise error.SignatureError
466 raise error.SignatureError
467 raise
467 raise
468
468
469 return check
469 return check
470
470
471 def copyfile(src, dest):
471 def copyfile(src, dest):
472 "copy a file, preserving mode and atime/mtime"
472 "copy a file, preserving mode and atime/mtime"
473 if os.path.islink(src):
473 if os.path.islink(src):
474 try:
474 try:
475 os.unlink(dest)
475 os.unlink(dest)
476 except OSError:
476 except OSError:
477 pass
477 pass
478 os.symlink(os.readlink(src), dest)
478 os.symlink(os.readlink(src), dest)
479 else:
479 else:
480 try:
480 try:
481 shutil.copyfile(src, dest)
481 shutil.copyfile(src, dest)
482 shutil.copymode(src, dest)
482 shutil.copymode(src, dest)
483 except shutil.Error, inst:
483 except shutil.Error, inst:
484 raise Abort(str(inst))
484 raise Abort(str(inst))
485
485
486 def copyfiles(src, dst, hardlink=None):
486 def copyfiles(src, dst, hardlink=None):
487 """Copy a directory tree using hardlinks if possible"""
487 """Copy a directory tree using hardlinks if possible"""
488
488
489 if hardlink is None:
489 if hardlink is None:
490 hardlink = (os.stat(src).st_dev ==
490 hardlink = (os.stat(src).st_dev ==
491 os.stat(os.path.dirname(dst)).st_dev)
491 os.stat(os.path.dirname(dst)).st_dev)
492
492
493 num = 0
493 num = 0
494 if os.path.isdir(src):
494 if os.path.isdir(src):
495 os.mkdir(dst)
495 os.mkdir(dst)
496 for name, kind in osutil.listdir(src):
496 for name, kind in osutil.listdir(src):
497 srcname = os.path.join(src, name)
497 srcname = os.path.join(src, name)
498 dstname = os.path.join(dst, name)
498 dstname = os.path.join(dst, name)
499 hardlink, n = copyfiles(srcname, dstname, hardlink)
499 hardlink, n = copyfiles(srcname, dstname, hardlink)
500 num += n
500 num += n
501 else:
501 else:
502 if hardlink:
502 if hardlink:
503 try:
503 try:
504 oslink(src, dst)
504 oslink(src, dst)
505 except (IOError, OSError):
505 except (IOError, OSError):
506 hardlink = False
506 hardlink = False
507 shutil.copy(src, dst)
507 shutil.copy(src, dst)
508 else:
508 else:
509 shutil.copy(src, dst)
509 shutil.copy(src, dst)
510 num += 1
510 num += 1
511
511
512 return hardlink, num
512 return hardlink, num
513
513
514 _winreservednames = '''con prn aux nul
514 _winreservednames = '''con prn aux nul
515 com1 com2 com3 com4 com5 com6 com7 com8 com9
515 com1 com2 com3 com4 com5 com6 com7 com8 com9
516 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
516 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
517 _winreservedchars = ':*?"<>|'
517 _winreservedchars = ':*?"<>|'
518 def checkwinfilename(path):
518 def checkwinfilename(path):
519 '''Check that the base-relative path is a valid filename on Windows.
519 '''Check that the base-relative path is a valid filename on Windows.
520 Returns None if the path is ok, or a UI string describing the problem.
520 Returns None if the path is ok, or a UI string describing the problem.
521
521
522 >>> checkwinfilename("just/a/normal/path")
522 >>> checkwinfilename("just/a/normal/path")
523 >>> checkwinfilename("foo/bar/con.xml")
523 >>> checkwinfilename("foo/bar/con.xml")
524 "filename contains 'con', which is reserved on Windows"
524 "filename contains 'con', which is reserved on Windows"
525 >>> checkwinfilename("foo/con.xml/bar")
525 >>> checkwinfilename("foo/con.xml/bar")
526 "filename contains 'con', which is reserved on Windows"
526 "filename contains 'con', which is reserved on Windows"
527 >>> checkwinfilename("foo/bar/xml.con")
527 >>> checkwinfilename("foo/bar/xml.con")
528 >>> checkwinfilename("foo/bar/AUX/bla.txt")
528 >>> checkwinfilename("foo/bar/AUX/bla.txt")
529 "filename contains 'AUX', which is reserved on Windows"
529 "filename contains 'AUX', which is reserved on Windows"
530 >>> checkwinfilename("foo/bar/bla:.txt")
530 >>> checkwinfilename("foo/bar/bla:.txt")
531 "filename contains ':', which is reserved on Windows"
531 "filename contains ':', which is reserved on Windows"
532 >>> checkwinfilename("foo/bar/b\07la.txt")
532 >>> checkwinfilename("foo/bar/b\07la.txt")
533 "filename contains '\\\\x07', which is invalid on Windows"
533 "filename contains '\\\\x07', which is invalid on Windows"
534 >>> checkwinfilename("foo/bar/bla ")
534 >>> checkwinfilename("foo/bar/bla ")
535 "filename ends with ' ', which is not allowed on Windows"
535 "filename ends with ' ', which is not allowed on Windows"
536 >>> checkwinfilename("../bar")
536 >>> checkwinfilename("../bar")
537 '''
537 '''
538 for n in path.replace('\\', '/').split('/'):
538 for n in path.replace('\\', '/').split('/'):
539 if not n:
539 if not n:
540 continue
540 continue
541 for c in n:
541 for c in n:
542 if c in _winreservedchars:
542 if c in _winreservedchars:
543 return _("filename contains '%s', which is reserved "
543 return _("filename contains '%s', which is reserved "
544 "on Windows") % c
544 "on Windows") % c
545 if ord(c) <= 31:
545 if ord(c) <= 31:
546 return _("filename contains %r, which is invalid "
546 return _("filename contains %r, which is invalid "
547 "on Windows") % c
547 "on Windows") % c
548 base = n.split('.')[0]
548 base = n.split('.')[0]
549 if base and base.lower() in _winreservednames:
549 if base and base.lower() in _winreservednames:
550 return _("filename contains '%s', which is reserved "
550 return _("filename contains '%s', which is reserved "
551 "on Windows") % base
551 "on Windows") % base
552 t = n[-1]
552 t = n[-1]
553 if t in '. ' and n not in '..':
553 if t in '. ' and n not in '..':
554 return _("filename ends with '%s', which is not allowed "
554 return _("filename ends with '%s', which is not allowed "
555 "on Windows") % t
555 "on Windows") % t
556
556
557 if os.name == 'nt':
557 if os.name == 'nt':
558 checkosfilename = checkwinfilename
558 checkosfilename = checkwinfilename
559 else:
559 else:
560 checkosfilename = platform.checkosfilename
560 checkosfilename = platform.checkosfilename
561
561
562 def makelock(info, pathname):
562 def makelock(info, pathname):
563 try:
563 try:
564 return os.symlink(info, pathname)
564 return os.symlink(info, pathname)
565 except OSError, why:
565 except OSError, why:
566 if why.errno == errno.EEXIST:
566 if why.errno == errno.EEXIST:
567 raise
567 raise
568 except AttributeError: # no symlink in os
568 except AttributeError: # no symlink in os
569 pass
569 pass
570
570
571 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
571 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
572 os.write(ld, info)
572 os.write(ld, info)
573 os.close(ld)
573 os.close(ld)
574
574
575 def readlock(pathname):
575 def readlock(pathname):
576 try:
576 try:
577 return os.readlink(pathname)
577 return os.readlink(pathname)
578 except OSError, why:
578 except OSError, why:
579 if why.errno not in (errno.EINVAL, errno.ENOSYS):
579 if why.errno not in (errno.EINVAL, errno.ENOSYS):
580 raise
580 raise
581 except AttributeError: # no symlink in os
581 except AttributeError: # no symlink in os
582 pass
582 pass
583 fp = posixfile(pathname)
583 fp = posixfile(pathname)
584 r = fp.read()
584 r = fp.read()
585 fp.close()
585 fp.close()
586 return r
586 return r
587
587
588 def fstat(fp):
588 def fstat(fp):
589 '''stat file object that may not have fileno method.'''
589 '''stat file object that may not have fileno method.'''
590 try:
590 try:
591 return os.fstat(fp.fileno())
591 return os.fstat(fp.fileno())
592 except AttributeError:
592 except AttributeError:
593 return os.stat(fp.name)
593 return os.stat(fp.name)
594
594
595 # File system features
595 # File system features
596
596
597 def checkcase(path):
597 def checkcase(path):
598 """
598 """
599 Check whether the given path is on a case-sensitive filesystem
599 Check whether the given path is on a case-sensitive filesystem
600
600
601 Requires a path (like /foo/.hg) ending with a foldable final
601 Requires a path (like /foo/.hg) ending with a foldable final
602 directory component.
602 directory component.
603 """
603 """
604 s1 = os.stat(path)
604 s1 = os.stat(path)
605 d, b = os.path.split(path)
605 d, b = os.path.split(path)
606 b2 = b.upper()
606 b2 = b.upper()
607 if b == b2:
607 if b == b2:
608 b2 = b.lower()
608 b2 = b.lower()
609 if b == b2:
609 if b == b2:
610 return True # no evidence against case sensitivity
610 return True # no evidence against case sensitivity
611 p2 = os.path.join(d, b2)
611 p2 = os.path.join(d, b2)
612 try:
612 try:
613 s2 = os.stat(p2)
613 s2 = os.stat(p2)
614 if s2 == s1:
614 if s2 == s1:
615 return False
615 return False
616 return True
616 return True
617 except OSError:
617 except OSError:
618 return True
618 return True
619
619
620 _fspathcache = {}
620 _fspathcache = {}
621 def fspath(name, root):
621 def fspath(name, root):
622 '''Get name in the case stored in the filesystem
622 '''Get name in the case stored in the filesystem
623
623
624 The name should be relative to root, and be normcase-ed for efficiency.
624 The name should be relative to root, and be normcase-ed for efficiency.
625
625
626 Note that this function is unnecessary, and should not be
626 Note that this function is unnecessary, and should not be
627 called, for case-sensitive filesystems (simply because it's expensive).
627 called, for case-sensitive filesystems (simply because it's expensive).
628
628
629 The root should be normcase-ed, too.
629 The root should be normcase-ed, too.
630 '''
630 '''
631 def find(p, contents):
631 def find(p, contents):
632 for n in contents:
632 for n in contents:
633 if normcase(n) == p:
633 if normcase(n) == p:
634 return n
634 return n
635 return None
635 return None
636
636
637 seps = os.sep
637 seps = os.sep
638 if os.altsep:
638 if os.altsep:
639 seps = seps + os.altsep
639 seps = seps + os.altsep
640 # Protect backslashes. This gets silly very quickly.
640 # Protect backslashes. This gets silly very quickly.
641 seps.replace('\\','\\\\')
641 seps.replace('\\','\\\\')
642 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
642 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
643 dir = os.path.normpath(root)
643 dir = os.path.normpath(root)
644 result = []
644 result = []
645 for part, sep in pattern.findall(name):
645 for part, sep in pattern.findall(name):
646 if sep:
646 if sep:
647 result.append(sep)
647 result.append(sep)
648 continue
648 continue
649
649
650 if dir not in _fspathcache:
650 if dir not in _fspathcache:
651 _fspathcache[dir] = os.listdir(dir)
651 _fspathcache[dir] = os.listdir(dir)
652 contents = _fspathcache[dir]
652 contents = _fspathcache[dir]
653
653
654 found = find(part, contents)
654 found = find(part, contents)
655 if not found:
655 if not found:
656 # retry "once per directory" per "dirstate.walk" which
656 # retry "once per directory" per "dirstate.walk" which
657 # may take place for each patches of "hg qpush", for example
657 # may take place for each patches of "hg qpush", for example
658 contents = os.listdir(dir)
658 contents = os.listdir(dir)
659 _fspathcache[dir] = contents
659 _fspathcache[dir] = contents
660 found = find(part, contents)
660 found = find(part, contents)
661
661
662 result.append(found or part)
662 result.append(found or part)
663 dir = os.path.join(dir, part)
663 dir = os.path.join(dir, part)
664
664
665 return ''.join(result)
665 return ''.join(result)
666
666
667 def checknlink(testfile):
667 def checknlink(testfile):
668 '''check whether hardlink count reporting works properly'''
668 '''check whether hardlink count reporting works properly'''
669
669
670 # testfile may be open, so we need a separate file for checking to
670 # testfile may be open, so we need a separate file for checking to
671 # work around issue2543 (or testfile may get lost on Samba shares)
671 # work around issue2543 (or testfile may get lost on Samba shares)
672 f1 = testfile + ".hgtmp1"
672 f1 = testfile + ".hgtmp1"
673 if os.path.lexists(f1):
673 if os.path.lexists(f1):
674 return False
674 return False
675 try:
675 try:
676 posixfile(f1, 'w').close()
676 posixfile(f1, 'w').close()
677 except IOError:
677 except IOError:
678 return False
678 return False
679
679
680 f2 = testfile + ".hgtmp2"
680 f2 = testfile + ".hgtmp2"
681 fd = None
681 fd = None
682 try:
682 try:
683 try:
683 try:
684 oslink(f1, f2)
684 oslink(f1, f2)
685 except OSError:
685 except OSError:
686 return False
686 return False
687
687
688 # nlinks() may behave differently for files on Windows shares if
688 # nlinks() may behave differently for files on Windows shares if
689 # the file is open.
689 # the file is open.
690 fd = posixfile(f2)
690 fd = posixfile(f2)
691 return nlinks(f2) > 1
691 return nlinks(f2) > 1
692 finally:
692 finally:
693 if fd is not None:
693 if fd is not None:
694 fd.close()
694 fd.close()
695 for f in (f1, f2):
695 for f in (f1, f2):
696 try:
696 try:
697 os.unlink(f)
697 os.unlink(f)
698 except OSError:
698 except OSError:
699 pass
699 pass
700
700
701 return False
701 return False
702
702
703 def endswithsep(path):
703 def endswithsep(path):
704 '''Check path ends with os.sep or os.altsep.'''
704 '''Check path ends with os.sep or os.altsep.'''
705 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
705 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
706
706
707 def splitpath(path):
707 def splitpath(path):
708 '''Split path by os.sep.
708 '''Split path by os.sep.
709 Note that this function does not use os.altsep because this is
709 Note that this function does not use os.altsep because this is
710 an alternative of simple "xxx.split(os.sep)".
710 an alternative of simple "xxx.split(os.sep)".
711 It is recommended to use os.path.normpath() before using this
711 It is recommended to use os.path.normpath() before using this
712 function if need.'''
712 function if need.'''
713 return path.split(os.sep)
713 return path.split(os.sep)
714
714
715 def gui():
715 def gui():
716 '''Are we running in a GUI?'''
716 '''Are we running in a GUI?'''
717 if sys.platform == 'darwin':
717 if sys.platform == 'darwin':
718 if 'SSH_CONNECTION' in os.environ:
718 if 'SSH_CONNECTION' in os.environ:
719 # handle SSH access to a box where the user is logged in
719 # handle SSH access to a box where the user is logged in
720 return False
720 return False
721 elif getattr(osutil, 'isgui', None):
721 elif getattr(osutil, 'isgui', None):
722 # check if a CoreGraphics session is available
722 # check if a CoreGraphics session is available
723 return osutil.isgui()
723 return osutil.isgui()
724 else:
724 else:
725 # pure build; use a safe default
725 # pure build; use a safe default
726 return True
726 return True
727 else:
727 else:
728 return os.name == "nt" or os.environ.get("DISPLAY")
728 return os.name == "nt" or os.environ.get("DISPLAY")
729
729
730 def mktempcopy(name, emptyok=False, createmode=None):
730 def mktempcopy(name, emptyok=False, createmode=None):
731 """Create a temporary file with the same contents from name
731 """Create a temporary file with the same contents from name
732
732
733 The permission bits are copied from the original file.
733 The permission bits are copied from the original file.
734
734
735 If the temporary file is going to be truncated immediately, you
735 If the temporary file is going to be truncated immediately, you
736 can use emptyok=True as an optimization.
736 can use emptyok=True as an optimization.
737
737
738 Returns the name of the temporary file.
738 Returns the name of the temporary file.
739 """
739 """
740 d, fn = os.path.split(name)
740 d, fn = os.path.split(name)
741 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
741 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
742 os.close(fd)
742 os.close(fd)
743 # Temporary files are created with mode 0600, which is usually not
743 # Temporary files are created with mode 0600, which is usually not
744 # what we want. If the original file already exists, just copy
744 # what we want. If the original file already exists, just copy
745 # its mode. Otherwise, manually obey umask.
745 # its mode. Otherwise, manually obey umask.
746 copymode(name, temp, createmode)
746 copymode(name, temp, createmode)
747 if emptyok:
747 if emptyok:
748 return temp
748 return temp
749 try:
749 try:
750 try:
750 try:
751 ifp = posixfile(name, "rb")
751 ifp = posixfile(name, "rb")
752 except IOError, inst:
752 except IOError, inst:
753 if inst.errno == errno.ENOENT:
753 if inst.errno == errno.ENOENT:
754 return temp
754 return temp
755 if not getattr(inst, 'filename', None):
755 if not getattr(inst, 'filename', None):
756 inst.filename = name
756 inst.filename = name
757 raise
757 raise
758 ofp = posixfile(temp, "wb")
758 ofp = posixfile(temp, "wb")
759 for chunk in filechunkiter(ifp):
759 for chunk in filechunkiter(ifp):
760 ofp.write(chunk)
760 ofp.write(chunk)
761 ifp.close()
761 ifp.close()
762 ofp.close()
762 ofp.close()
763 except:
763 except: # re-raises
764 try: os.unlink(temp)
764 try: os.unlink(temp)
765 except OSError: pass
765 except OSError: pass
766 raise
766 raise
767 return temp
767 return temp
768
768
769 class atomictempfile(object):
769 class atomictempfile(object):
770 '''writeable file object that atomically updates a file
770 '''writeable file object that atomically updates a file
771
771
772 All writes will go to a temporary copy of the original file. Call
772 All writes will go to a temporary copy of the original file. Call
773 close() when you are done writing, and atomictempfile will rename
773 close() when you are done writing, and atomictempfile will rename
774 the temporary copy to the original name, making the changes
774 the temporary copy to the original name, making the changes
775 visible. If the object is destroyed without being closed, all your
775 visible. If the object is destroyed without being closed, all your
776 writes are discarded.
776 writes are discarded.
777 '''
777 '''
778 def __init__(self, name, mode='w+b', createmode=None):
778 def __init__(self, name, mode='w+b', createmode=None):
779 self.__name = name # permanent name
779 self.__name = name # permanent name
780 self._tempname = mktempcopy(name, emptyok=('w' in mode),
780 self._tempname = mktempcopy(name, emptyok=('w' in mode),
781 createmode=createmode)
781 createmode=createmode)
782 self._fp = posixfile(self._tempname, mode)
782 self._fp = posixfile(self._tempname, mode)
783
783
784 # delegated methods
784 # delegated methods
785 self.write = self._fp.write
785 self.write = self._fp.write
786 self.fileno = self._fp.fileno
786 self.fileno = self._fp.fileno
787
787
788 def close(self):
788 def close(self):
789 if not self._fp.closed:
789 if not self._fp.closed:
790 self._fp.close()
790 self._fp.close()
791 rename(self._tempname, localpath(self.__name))
791 rename(self._tempname, localpath(self.__name))
792
792
793 def discard(self):
793 def discard(self):
794 if not self._fp.closed:
794 if not self._fp.closed:
795 try:
795 try:
796 os.unlink(self._tempname)
796 os.unlink(self._tempname)
797 except OSError:
797 except OSError:
798 pass
798 pass
799 self._fp.close()
799 self._fp.close()
800
800
801 def __del__(self):
801 def __del__(self):
802 if safehasattr(self, '_fp'): # constructor actually did something
802 if safehasattr(self, '_fp'): # constructor actually did something
803 self.discard()
803 self.discard()
804
804
805 def makedirs(name, mode=None):
805 def makedirs(name, mode=None):
806 """recursive directory creation with parent mode inheritance"""
806 """recursive directory creation with parent mode inheritance"""
807 try:
807 try:
808 os.mkdir(name)
808 os.mkdir(name)
809 except OSError, err:
809 except OSError, err:
810 if err.errno == errno.EEXIST:
810 if err.errno == errno.EEXIST:
811 return
811 return
812 if err.errno != errno.ENOENT or not name:
812 if err.errno != errno.ENOENT or not name:
813 raise
813 raise
814 parent = os.path.dirname(os.path.abspath(name))
814 parent = os.path.dirname(os.path.abspath(name))
815 if parent == name:
815 if parent == name:
816 raise
816 raise
817 makedirs(parent, mode)
817 makedirs(parent, mode)
818 os.mkdir(name)
818 os.mkdir(name)
819 if mode is not None:
819 if mode is not None:
820 os.chmod(name, mode)
820 os.chmod(name, mode)
821
821
822 def readfile(path):
822 def readfile(path):
823 fp = open(path, 'rb')
823 fp = open(path, 'rb')
824 try:
824 try:
825 return fp.read()
825 return fp.read()
826 finally:
826 finally:
827 fp.close()
827 fp.close()
828
828
829 def writefile(path, text):
829 def writefile(path, text):
830 fp = open(path, 'wb')
830 fp = open(path, 'wb')
831 try:
831 try:
832 fp.write(text)
832 fp.write(text)
833 finally:
833 finally:
834 fp.close()
834 fp.close()
835
835
836 def appendfile(path, text):
836 def appendfile(path, text):
837 fp = open(path, 'ab')
837 fp = open(path, 'ab')
838 try:
838 try:
839 fp.write(text)
839 fp.write(text)
840 finally:
840 finally:
841 fp.close()
841 fp.close()
842
842
843 class chunkbuffer(object):
843 class chunkbuffer(object):
844 """Allow arbitrary sized chunks of data to be efficiently read from an
844 """Allow arbitrary sized chunks of data to be efficiently read from an
845 iterator over chunks of arbitrary size."""
845 iterator over chunks of arbitrary size."""
846
846
847 def __init__(self, in_iter):
847 def __init__(self, in_iter):
848 """in_iter is the iterator that's iterating over the input chunks.
848 """in_iter is the iterator that's iterating over the input chunks.
849 targetsize is how big a buffer to try to maintain."""
849 targetsize is how big a buffer to try to maintain."""
850 def splitbig(chunks):
850 def splitbig(chunks):
851 for chunk in chunks:
851 for chunk in chunks:
852 if len(chunk) > 2**20:
852 if len(chunk) > 2**20:
853 pos = 0
853 pos = 0
854 while pos < len(chunk):
854 while pos < len(chunk):
855 end = pos + 2 ** 18
855 end = pos + 2 ** 18
856 yield chunk[pos:end]
856 yield chunk[pos:end]
857 pos = end
857 pos = end
858 else:
858 else:
859 yield chunk
859 yield chunk
860 self.iter = splitbig(in_iter)
860 self.iter = splitbig(in_iter)
861 self._queue = []
861 self._queue = []
862
862
863 def read(self, l):
863 def read(self, l):
864 """Read L bytes of data from the iterator of chunks of data.
864 """Read L bytes of data from the iterator of chunks of data.
865 Returns less than L bytes if the iterator runs dry."""
865 Returns less than L bytes if the iterator runs dry."""
866 left = l
866 left = l
867 buf = ''
867 buf = ''
868 queue = self._queue
868 queue = self._queue
869 while left > 0:
869 while left > 0:
870 # refill the queue
870 # refill the queue
871 if not queue:
871 if not queue:
872 target = 2**18
872 target = 2**18
873 for chunk in self.iter:
873 for chunk in self.iter:
874 queue.append(chunk)
874 queue.append(chunk)
875 target -= len(chunk)
875 target -= len(chunk)
876 if target <= 0:
876 if target <= 0:
877 break
877 break
878 if not queue:
878 if not queue:
879 break
879 break
880
880
881 chunk = queue.pop(0)
881 chunk = queue.pop(0)
882 left -= len(chunk)
882 left -= len(chunk)
883 if left < 0:
883 if left < 0:
884 queue.insert(0, chunk[left:])
884 queue.insert(0, chunk[left:])
885 buf += chunk[:left]
885 buf += chunk[:left]
886 else:
886 else:
887 buf += chunk
887 buf += chunk
888
888
889 return buf
889 return buf
890
890
891 def filechunkiter(f, size=65536, limit=None):
891 def filechunkiter(f, size=65536, limit=None):
892 """Create a generator that produces the data in the file size
892 """Create a generator that produces the data in the file size
893 (default 65536) bytes at a time, up to optional limit (default is
893 (default 65536) bytes at a time, up to optional limit (default is
894 to read all data). Chunks may be less than size bytes if the
894 to read all data). Chunks may be less than size bytes if the
895 chunk is the last chunk in the file, or the file is a socket or
895 chunk is the last chunk in the file, or the file is a socket or
896 some other type of file that sometimes reads less data than is
896 some other type of file that sometimes reads less data than is
897 requested."""
897 requested."""
898 assert size >= 0
898 assert size >= 0
899 assert limit is None or limit >= 0
899 assert limit is None or limit >= 0
900 while True:
900 while True:
901 if limit is None:
901 if limit is None:
902 nbytes = size
902 nbytes = size
903 else:
903 else:
904 nbytes = min(limit, size)
904 nbytes = min(limit, size)
905 s = nbytes and f.read(nbytes)
905 s = nbytes and f.read(nbytes)
906 if not s:
906 if not s:
907 break
907 break
908 if limit:
908 if limit:
909 limit -= len(s)
909 limit -= len(s)
910 yield s
910 yield s
911
911
912 def makedate():
912 def makedate():
913 ct = time.time()
913 ct = time.time()
914 if ct < 0:
914 if ct < 0:
915 hint = _("check your clock")
915 hint = _("check your clock")
916 raise Abort(_("negative timestamp: %d") % ct, hint=hint)
916 raise Abort(_("negative timestamp: %d") % ct, hint=hint)
917 delta = (datetime.datetime.utcfromtimestamp(ct) -
917 delta = (datetime.datetime.utcfromtimestamp(ct) -
918 datetime.datetime.fromtimestamp(ct))
918 datetime.datetime.fromtimestamp(ct))
919 tz = delta.days * 86400 + delta.seconds
919 tz = delta.days * 86400 + delta.seconds
920 return ct, tz
920 return ct, tz
921
921
922 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
922 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
923 """represent a (unixtime, offset) tuple as a localized time.
923 """represent a (unixtime, offset) tuple as a localized time.
924 unixtime is seconds since the epoch, and offset is the time zone's
924 unixtime is seconds since the epoch, and offset is the time zone's
925 number of seconds away from UTC. if timezone is false, do not
925 number of seconds away from UTC. if timezone is false, do not
926 append time zone to string."""
926 append time zone to string."""
927 t, tz = date or makedate()
927 t, tz = date or makedate()
928 if t < 0:
928 if t < 0:
929 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
929 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
930 tz = 0
930 tz = 0
931 if "%1" in format or "%2" in format:
931 if "%1" in format or "%2" in format:
932 sign = (tz > 0) and "-" or "+"
932 sign = (tz > 0) and "-" or "+"
933 minutes = abs(tz) // 60
933 minutes = abs(tz) // 60
934 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
934 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
935 format = format.replace("%2", "%02d" % (minutes % 60))
935 format = format.replace("%2", "%02d" % (minutes % 60))
936 try:
936 try:
937 t = time.gmtime(float(t) - tz)
937 t = time.gmtime(float(t) - tz)
938 except ValueError:
938 except ValueError:
939 # time was out of range
939 # time was out of range
940 t = time.gmtime(sys.maxint)
940 t = time.gmtime(sys.maxint)
941 s = time.strftime(format, t)
941 s = time.strftime(format, t)
942 return s
942 return s
943
943
944 def shortdate(date=None):
944 def shortdate(date=None):
945 """turn (timestamp, tzoff) tuple into iso 8631 date."""
945 """turn (timestamp, tzoff) tuple into iso 8631 date."""
946 return datestr(date, format='%Y-%m-%d')
946 return datestr(date, format='%Y-%m-%d')
947
947
948 def strdate(string, format, defaults=[]):
948 def strdate(string, format, defaults=[]):
949 """parse a localized time string and return a (unixtime, offset) tuple.
949 """parse a localized time string and return a (unixtime, offset) tuple.
950 if the string cannot be parsed, ValueError is raised."""
950 if the string cannot be parsed, ValueError is raised."""
951 def timezone(string):
951 def timezone(string):
952 tz = string.split()[-1]
952 tz = string.split()[-1]
953 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
953 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
954 sign = (tz[0] == "+") and 1 or -1
954 sign = (tz[0] == "+") and 1 or -1
955 hours = int(tz[1:3])
955 hours = int(tz[1:3])
956 minutes = int(tz[3:5])
956 minutes = int(tz[3:5])
957 return -sign * (hours * 60 + minutes) * 60
957 return -sign * (hours * 60 + minutes) * 60
958 if tz == "GMT" or tz == "UTC":
958 if tz == "GMT" or tz == "UTC":
959 return 0
959 return 0
960 return None
960 return None
961
961
962 # NOTE: unixtime = localunixtime + offset
962 # NOTE: unixtime = localunixtime + offset
963 offset, date = timezone(string), string
963 offset, date = timezone(string), string
964 if offset is not None:
964 if offset is not None:
965 date = " ".join(string.split()[:-1])
965 date = " ".join(string.split()[:-1])
966
966
967 # add missing elements from defaults
967 # add missing elements from defaults
968 usenow = False # default to using biased defaults
968 usenow = False # default to using biased defaults
969 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
969 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
970 found = [True for p in part if ("%"+p) in format]
970 found = [True for p in part if ("%"+p) in format]
971 if not found:
971 if not found:
972 date += "@" + defaults[part][usenow]
972 date += "@" + defaults[part][usenow]
973 format += "@%" + part[0]
973 format += "@%" + part[0]
974 else:
974 else:
975 # We've found a specific time element, less specific time
975 # We've found a specific time element, less specific time
976 # elements are relative to today
976 # elements are relative to today
977 usenow = True
977 usenow = True
978
978
979 timetuple = time.strptime(date, format)
979 timetuple = time.strptime(date, format)
980 localunixtime = int(calendar.timegm(timetuple))
980 localunixtime = int(calendar.timegm(timetuple))
981 if offset is None:
981 if offset is None:
982 # local timezone
982 # local timezone
983 unixtime = int(time.mktime(timetuple))
983 unixtime = int(time.mktime(timetuple))
984 offset = unixtime - localunixtime
984 offset = unixtime - localunixtime
985 else:
985 else:
986 unixtime = localunixtime + offset
986 unixtime = localunixtime + offset
987 return unixtime, offset
987 return unixtime, offset
988
988
989 def parsedate(date, formats=None, bias={}):
989 def parsedate(date, formats=None, bias={}):
990 """parse a localized date/time and return a (unixtime, offset) tuple.
990 """parse a localized date/time and return a (unixtime, offset) tuple.
991
991
992 The date may be a "unixtime offset" string or in one of the specified
992 The date may be a "unixtime offset" string or in one of the specified
993 formats. If the date already is a (unixtime, offset) tuple, it is returned.
993 formats. If the date already is a (unixtime, offset) tuple, it is returned.
994 """
994 """
995 if not date:
995 if not date:
996 return 0, 0
996 return 0, 0
997 if isinstance(date, tuple) and len(date) == 2:
997 if isinstance(date, tuple) and len(date) == 2:
998 return date
998 return date
999 if not formats:
999 if not formats:
1000 formats = defaultdateformats
1000 formats = defaultdateformats
1001 date = date.strip()
1001 date = date.strip()
1002 try:
1002 try:
1003 when, offset = map(int, date.split(' '))
1003 when, offset = map(int, date.split(' '))
1004 except ValueError:
1004 except ValueError:
1005 # fill out defaults
1005 # fill out defaults
1006 now = makedate()
1006 now = makedate()
1007 defaults = {}
1007 defaults = {}
1008 for part in ("d", "mb", "yY", "HI", "M", "S"):
1008 for part in ("d", "mb", "yY", "HI", "M", "S"):
1009 # this piece is for rounding the specific end of unknowns
1009 # this piece is for rounding the specific end of unknowns
1010 b = bias.get(part)
1010 b = bias.get(part)
1011 if b is None:
1011 if b is None:
1012 if part[0] in "HMS":
1012 if part[0] in "HMS":
1013 b = "00"
1013 b = "00"
1014 else:
1014 else:
1015 b = "0"
1015 b = "0"
1016
1016
1017 # this piece is for matching the generic end to today's date
1017 # this piece is for matching the generic end to today's date
1018 n = datestr(now, "%" + part[0])
1018 n = datestr(now, "%" + part[0])
1019
1019
1020 defaults[part] = (b, n)
1020 defaults[part] = (b, n)
1021
1021
1022 for format in formats:
1022 for format in formats:
1023 try:
1023 try:
1024 when, offset = strdate(date, format, defaults)
1024 when, offset = strdate(date, format, defaults)
1025 except (ValueError, OverflowError):
1025 except (ValueError, OverflowError):
1026 pass
1026 pass
1027 else:
1027 else:
1028 break
1028 break
1029 else:
1029 else:
1030 raise Abort(_('invalid date: %r') % date)
1030 raise Abort(_('invalid date: %r') % date)
1031 # validate explicit (probably user-specified) date and
1031 # validate explicit (probably user-specified) date and
1032 # time zone offset. values must fit in signed 32 bits for
1032 # time zone offset. values must fit in signed 32 bits for
1033 # current 32-bit linux runtimes. timezones go from UTC-12
1033 # current 32-bit linux runtimes. timezones go from UTC-12
1034 # to UTC+14
1034 # to UTC+14
1035 if abs(when) > 0x7fffffff:
1035 if abs(when) > 0x7fffffff:
1036 raise Abort(_('date exceeds 32 bits: %d') % when)
1036 raise Abort(_('date exceeds 32 bits: %d') % when)
1037 if when < 0:
1037 if when < 0:
1038 raise Abort(_('negative date value: %d') % when)
1038 raise Abort(_('negative date value: %d') % when)
1039 if offset < -50400 or offset > 43200:
1039 if offset < -50400 or offset > 43200:
1040 raise Abort(_('impossible time zone offset: %d') % offset)
1040 raise Abort(_('impossible time zone offset: %d') % offset)
1041 return when, offset
1041 return when, offset
1042
1042
1043 def matchdate(date):
1043 def matchdate(date):
1044 """Return a function that matches a given date match specifier
1044 """Return a function that matches a given date match specifier
1045
1045
1046 Formats include:
1046 Formats include:
1047
1047
1048 '{date}' match a given date to the accuracy provided
1048 '{date}' match a given date to the accuracy provided
1049
1049
1050 '<{date}' on or before a given date
1050 '<{date}' on or before a given date
1051
1051
1052 '>{date}' on or after a given date
1052 '>{date}' on or after a given date
1053
1053
1054 >>> p1 = parsedate("10:29:59")
1054 >>> p1 = parsedate("10:29:59")
1055 >>> p2 = parsedate("10:30:00")
1055 >>> p2 = parsedate("10:30:00")
1056 >>> p3 = parsedate("10:30:59")
1056 >>> p3 = parsedate("10:30:59")
1057 >>> p4 = parsedate("10:31:00")
1057 >>> p4 = parsedate("10:31:00")
1058 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1058 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1059 >>> f = matchdate("10:30")
1059 >>> f = matchdate("10:30")
1060 >>> f(p1[0])
1060 >>> f(p1[0])
1061 False
1061 False
1062 >>> f(p2[0])
1062 >>> f(p2[0])
1063 True
1063 True
1064 >>> f(p3[0])
1064 >>> f(p3[0])
1065 True
1065 True
1066 >>> f(p4[0])
1066 >>> f(p4[0])
1067 False
1067 False
1068 >>> f(p5[0])
1068 >>> f(p5[0])
1069 False
1069 False
1070 """
1070 """
1071
1071
1072 def lower(date):
1072 def lower(date):
1073 d = dict(mb="1", d="1")
1073 d = dict(mb="1", d="1")
1074 return parsedate(date, extendeddateformats, d)[0]
1074 return parsedate(date, extendeddateformats, d)[0]
1075
1075
1076 def upper(date):
1076 def upper(date):
1077 d = dict(mb="12", HI="23", M="59", S="59")
1077 d = dict(mb="12", HI="23", M="59", S="59")
1078 for days in ("31", "30", "29"):
1078 for days in ("31", "30", "29"):
1079 try:
1079 try:
1080 d["d"] = days
1080 d["d"] = days
1081 return parsedate(date, extendeddateformats, d)[0]
1081 return parsedate(date, extendeddateformats, d)[0]
1082 except Abort:
1082 except Abort:
1083 pass
1083 pass
1084 d["d"] = "28"
1084 d["d"] = "28"
1085 return parsedate(date, extendeddateformats, d)[0]
1085 return parsedate(date, extendeddateformats, d)[0]
1086
1086
1087 date = date.strip()
1087 date = date.strip()
1088
1088
1089 if not date:
1089 if not date:
1090 raise Abort(_("dates cannot consist entirely of whitespace"))
1090 raise Abort(_("dates cannot consist entirely of whitespace"))
1091 elif date[0] == "<":
1091 elif date[0] == "<":
1092 if not date[1:]:
1092 if not date[1:]:
1093 raise Abort(_("invalid day spec, use '<DATE'"))
1093 raise Abort(_("invalid day spec, use '<DATE'"))
1094 when = upper(date[1:])
1094 when = upper(date[1:])
1095 return lambda x: x <= when
1095 return lambda x: x <= when
1096 elif date[0] == ">":
1096 elif date[0] == ">":
1097 if not date[1:]:
1097 if not date[1:]:
1098 raise Abort(_("invalid day spec, use '>DATE'"))
1098 raise Abort(_("invalid day spec, use '>DATE'"))
1099 when = lower(date[1:])
1099 when = lower(date[1:])
1100 return lambda x: x >= when
1100 return lambda x: x >= when
1101 elif date[0] == "-":
1101 elif date[0] == "-":
1102 try:
1102 try:
1103 days = int(date[1:])
1103 days = int(date[1:])
1104 except ValueError:
1104 except ValueError:
1105 raise Abort(_("invalid day spec: %s") % date[1:])
1105 raise Abort(_("invalid day spec: %s") % date[1:])
1106 if days < 0:
1106 if days < 0:
1107 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1107 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1108 % date[1:])
1108 % date[1:])
1109 when = makedate()[0] - days * 3600 * 24
1109 when = makedate()[0] - days * 3600 * 24
1110 return lambda x: x >= when
1110 return lambda x: x >= when
1111 elif " to " in date:
1111 elif " to " in date:
1112 a, b = date.split(" to ")
1112 a, b = date.split(" to ")
1113 start, stop = lower(a), upper(b)
1113 start, stop = lower(a), upper(b)
1114 return lambda x: x >= start and x <= stop
1114 return lambda x: x >= start and x <= stop
1115 else:
1115 else:
1116 start, stop = lower(date), upper(date)
1116 start, stop = lower(date), upper(date)
1117 return lambda x: x >= start and x <= stop
1117 return lambda x: x >= start and x <= stop
1118
1118
1119 def shortuser(user):
1119 def shortuser(user):
1120 """Return a short representation of a user name or email address."""
1120 """Return a short representation of a user name or email address."""
1121 f = user.find('@')
1121 f = user.find('@')
1122 if f >= 0:
1122 if f >= 0:
1123 user = user[:f]
1123 user = user[:f]
1124 f = user.find('<')
1124 f = user.find('<')
1125 if f >= 0:
1125 if f >= 0:
1126 user = user[f + 1:]
1126 user = user[f + 1:]
1127 f = user.find(' ')
1127 f = user.find(' ')
1128 if f >= 0:
1128 if f >= 0:
1129 user = user[:f]
1129 user = user[:f]
1130 f = user.find('.')
1130 f = user.find('.')
1131 if f >= 0:
1131 if f >= 0:
1132 user = user[:f]
1132 user = user[:f]
1133 return user
1133 return user
1134
1134
1135 def emailuser(user):
1135 def emailuser(user):
1136 """Return the user portion of an email address."""
1136 """Return the user portion of an email address."""
1137 f = user.find('@')
1137 f = user.find('@')
1138 if f >= 0:
1138 if f >= 0:
1139 user = user[:f]
1139 user = user[:f]
1140 f = user.find('<')
1140 f = user.find('<')
1141 if f >= 0:
1141 if f >= 0:
1142 user = user[f + 1:]
1142 user = user[f + 1:]
1143 return user
1143 return user
1144
1144
1145 def email(author):
1145 def email(author):
1146 '''get email of author.'''
1146 '''get email of author.'''
1147 r = author.find('>')
1147 r = author.find('>')
1148 if r == -1:
1148 if r == -1:
1149 r = None
1149 r = None
1150 return author[author.find('<') + 1:r]
1150 return author[author.find('<') + 1:r]
1151
1151
1152 def _ellipsis(text, maxlength):
1152 def _ellipsis(text, maxlength):
1153 if len(text) <= maxlength:
1153 if len(text) <= maxlength:
1154 return text, False
1154 return text, False
1155 else:
1155 else:
1156 return "%s..." % (text[:maxlength - 3]), True
1156 return "%s..." % (text[:maxlength - 3]), True
1157
1157
1158 def ellipsis(text, maxlength=400):
1158 def ellipsis(text, maxlength=400):
1159 """Trim string to at most maxlength (default: 400) characters."""
1159 """Trim string to at most maxlength (default: 400) characters."""
1160 try:
1160 try:
1161 # use unicode not to split at intermediate multi-byte sequence
1161 # use unicode not to split at intermediate multi-byte sequence
1162 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1162 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1163 maxlength)
1163 maxlength)
1164 if not truncated:
1164 if not truncated:
1165 return text
1165 return text
1166 return utext.encode(encoding.encoding)
1166 return utext.encode(encoding.encoding)
1167 except (UnicodeDecodeError, UnicodeEncodeError):
1167 except (UnicodeDecodeError, UnicodeEncodeError):
1168 return _ellipsis(text, maxlength)[0]
1168 return _ellipsis(text, maxlength)[0]
1169
1169
1170 _byteunits = (
1170 _byteunits = (
1171 (100, 1 << 30, _('%.0f GB')),
1171 (100, 1 << 30, _('%.0f GB')),
1172 (10, 1 << 30, _('%.1f GB')),
1172 (10, 1 << 30, _('%.1f GB')),
1173 (1, 1 << 30, _('%.2f GB')),
1173 (1, 1 << 30, _('%.2f GB')),
1174 (100, 1 << 20, _('%.0f MB')),
1174 (100, 1 << 20, _('%.0f MB')),
1175 (10, 1 << 20, _('%.1f MB')),
1175 (10, 1 << 20, _('%.1f MB')),
1176 (1, 1 << 20, _('%.2f MB')),
1176 (1, 1 << 20, _('%.2f MB')),
1177 (100, 1 << 10, _('%.0f KB')),
1177 (100, 1 << 10, _('%.0f KB')),
1178 (10, 1 << 10, _('%.1f KB')),
1178 (10, 1 << 10, _('%.1f KB')),
1179 (1, 1 << 10, _('%.2f KB')),
1179 (1, 1 << 10, _('%.2f KB')),
1180 (1, 1, _('%.0f bytes')),
1180 (1, 1, _('%.0f bytes')),
1181 )
1181 )
1182
1182
1183 def bytecount(nbytes):
1183 def bytecount(nbytes):
1184 '''return byte count formatted as readable string, with units'''
1184 '''return byte count formatted as readable string, with units'''
1185
1185
1186 for multiplier, divisor, format in _byteunits:
1186 for multiplier, divisor, format in _byteunits:
1187 if nbytes >= divisor * multiplier:
1187 if nbytes >= divisor * multiplier:
1188 return format % (nbytes / float(divisor))
1188 return format % (nbytes / float(divisor))
1189 return units[-1][2] % nbytes
1189 return units[-1][2] % nbytes
1190
1190
1191 def uirepr(s):
1191 def uirepr(s):
1192 # Avoid double backslash in Windows path repr()
1192 # Avoid double backslash in Windows path repr()
1193 return repr(s).replace('\\\\', '\\')
1193 return repr(s).replace('\\\\', '\\')
1194
1194
1195 # delay import of textwrap
1195 # delay import of textwrap
1196 def MBTextWrapper(**kwargs):
1196 def MBTextWrapper(**kwargs):
1197 class tw(textwrap.TextWrapper):
1197 class tw(textwrap.TextWrapper):
1198 """
1198 """
1199 Extend TextWrapper for width-awareness.
1199 Extend TextWrapper for width-awareness.
1200
1200
1201 Neither number of 'bytes' in any encoding nor 'characters' is
1201 Neither number of 'bytes' in any encoding nor 'characters' is
1202 appropriate to calculate terminal columns for specified string.
1202 appropriate to calculate terminal columns for specified string.
1203
1203
1204 Original TextWrapper implementation uses built-in 'len()' directly,
1204 Original TextWrapper implementation uses built-in 'len()' directly,
1205 so overriding is needed to use width information of each characters.
1205 so overriding is needed to use width information of each characters.
1206
1206
1207 In addition, characters classified into 'ambiguous' width are
1207 In addition, characters classified into 'ambiguous' width are
1208 treated as wide in east asian area, but as narrow in other.
1208 treated as wide in east asian area, but as narrow in other.
1209
1209
1210 This requires use decision to determine width of such characters.
1210 This requires use decision to determine width of such characters.
1211 """
1211 """
1212 def __init__(self, **kwargs):
1212 def __init__(self, **kwargs):
1213 textwrap.TextWrapper.__init__(self, **kwargs)
1213 textwrap.TextWrapper.__init__(self, **kwargs)
1214
1214
1215 # for compatibility between 2.4 and 2.6
1215 # for compatibility between 2.4 and 2.6
1216 if getattr(self, 'drop_whitespace', None) is None:
1216 if getattr(self, 'drop_whitespace', None) is None:
1217 self.drop_whitespace = kwargs.get('drop_whitespace', True)
1217 self.drop_whitespace = kwargs.get('drop_whitespace', True)
1218
1218
1219 def _cutdown(self, ucstr, space_left):
1219 def _cutdown(self, ucstr, space_left):
1220 l = 0
1220 l = 0
1221 colwidth = encoding.ucolwidth
1221 colwidth = encoding.ucolwidth
1222 for i in xrange(len(ucstr)):
1222 for i in xrange(len(ucstr)):
1223 l += colwidth(ucstr[i])
1223 l += colwidth(ucstr[i])
1224 if space_left < l:
1224 if space_left < l:
1225 return (ucstr[:i], ucstr[i:])
1225 return (ucstr[:i], ucstr[i:])
1226 return ucstr, ''
1226 return ucstr, ''
1227
1227
1228 # overriding of base class
1228 # overriding of base class
1229 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1229 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1230 space_left = max(width - cur_len, 1)
1230 space_left = max(width - cur_len, 1)
1231
1231
1232 if self.break_long_words:
1232 if self.break_long_words:
1233 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1233 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1234 cur_line.append(cut)
1234 cur_line.append(cut)
1235 reversed_chunks[-1] = res
1235 reversed_chunks[-1] = res
1236 elif not cur_line:
1236 elif not cur_line:
1237 cur_line.append(reversed_chunks.pop())
1237 cur_line.append(reversed_chunks.pop())
1238
1238
1239 # this overriding code is imported from TextWrapper of python 2.6
1239 # this overriding code is imported from TextWrapper of python 2.6
1240 # to calculate columns of string by 'encoding.ucolwidth()'
1240 # to calculate columns of string by 'encoding.ucolwidth()'
1241 def _wrap_chunks(self, chunks):
1241 def _wrap_chunks(self, chunks):
1242 colwidth = encoding.ucolwidth
1242 colwidth = encoding.ucolwidth
1243
1243
1244 lines = []
1244 lines = []
1245 if self.width <= 0:
1245 if self.width <= 0:
1246 raise ValueError("invalid width %r (must be > 0)" % self.width)
1246 raise ValueError("invalid width %r (must be > 0)" % self.width)
1247
1247
1248 # Arrange in reverse order so items can be efficiently popped
1248 # Arrange in reverse order so items can be efficiently popped
1249 # from a stack of chucks.
1249 # from a stack of chucks.
1250 chunks.reverse()
1250 chunks.reverse()
1251
1251
1252 while chunks:
1252 while chunks:
1253
1253
1254 # Start the list of chunks that will make up the current line.
1254 # Start the list of chunks that will make up the current line.
1255 # cur_len is just the length of all the chunks in cur_line.
1255 # cur_len is just the length of all the chunks in cur_line.
1256 cur_line = []
1256 cur_line = []
1257 cur_len = 0
1257 cur_len = 0
1258
1258
1259 # Figure out which static string will prefix this line.
1259 # Figure out which static string will prefix this line.
1260 if lines:
1260 if lines:
1261 indent = self.subsequent_indent
1261 indent = self.subsequent_indent
1262 else:
1262 else:
1263 indent = self.initial_indent
1263 indent = self.initial_indent
1264
1264
1265 # Maximum width for this line.
1265 # Maximum width for this line.
1266 width = self.width - len(indent)
1266 width = self.width - len(indent)
1267
1267
1268 # First chunk on line is whitespace -- drop it, unless this
1268 # First chunk on line is whitespace -- drop it, unless this
1269 # is the very beginning of the text (ie. no lines started yet).
1269 # is the very beginning of the text (ie. no lines started yet).
1270 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
1270 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
1271 del chunks[-1]
1271 del chunks[-1]
1272
1272
1273 while chunks:
1273 while chunks:
1274 l = colwidth(chunks[-1])
1274 l = colwidth(chunks[-1])
1275
1275
1276 # Can at least squeeze this chunk onto the current line.
1276 # Can at least squeeze this chunk onto the current line.
1277 if cur_len + l <= width:
1277 if cur_len + l <= width:
1278 cur_line.append(chunks.pop())
1278 cur_line.append(chunks.pop())
1279 cur_len += l
1279 cur_len += l
1280
1280
1281 # Nope, this line is full.
1281 # Nope, this line is full.
1282 else:
1282 else:
1283 break
1283 break
1284
1284
1285 # The current line is full, and the next chunk is too big to
1285 # The current line is full, and the next chunk is too big to
1286 # fit on *any* line (not just this one).
1286 # fit on *any* line (not just this one).
1287 if chunks and colwidth(chunks[-1]) > width:
1287 if chunks and colwidth(chunks[-1]) > width:
1288 self._handle_long_word(chunks, cur_line, cur_len, width)
1288 self._handle_long_word(chunks, cur_line, cur_len, width)
1289
1289
1290 # If the last chunk on this line is all whitespace, drop it.
1290 # If the last chunk on this line is all whitespace, drop it.
1291 if (self.drop_whitespace and
1291 if (self.drop_whitespace and
1292 cur_line and cur_line[-1].strip() == ''):
1292 cur_line and cur_line[-1].strip() == ''):
1293 del cur_line[-1]
1293 del cur_line[-1]
1294
1294
1295 # Convert current line back to a string and store it in list
1295 # Convert current line back to a string and store it in list
1296 # of all lines (return value).
1296 # of all lines (return value).
1297 if cur_line:
1297 if cur_line:
1298 lines.append(indent + ''.join(cur_line))
1298 lines.append(indent + ''.join(cur_line))
1299
1299
1300 return lines
1300 return lines
1301
1301
1302 global MBTextWrapper
1302 global MBTextWrapper
1303 MBTextWrapper = tw
1303 MBTextWrapper = tw
1304 return tw(**kwargs)
1304 return tw(**kwargs)
1305
1305
1306 def wrap(line, width, initindent='', hangindent=''):
1306 def wrap(line, width, initindent='', hangindent=''):
1307 maxindent = max(len(hangindent), len(initindent))
1307 maxindent = max(len(hangindent), len(initindent))
1308 if width <= maxindent:
1308 if width <= maxindent:
1309 # adjust for weird terminal size
1309 # adjust for weird terminal size
1310 width = max(78, maxindent + 1)
1310 width = max(78, maxindent + 1)
1311 line = line.decode(encoding.encoding, encoding.encodingmode)
1311 line = line.decode(encoding.encoding, encoding.encodingmode)
1312 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
1312 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
1313 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
1313 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
1314 wrapper = MBTextWrapper(width=width,
1314 wrapper = MBTextWrapper(width=width,
1315 initial_indent=initindent,
1315 initial_indent=initindent,
1316 subsequent_indent=hangindent)
1316 subsequent_indent=hangindent)
1317 return wrapper.fill(line).encode(encoding.encoding)
1317 return wrapper.fill(line).encode(encoding.encoding)
1318
1318
1319 def iterlines(iterator):
1319 def iterlines(iterator):
1320 for chunk in iterator:
1320 for chunk in iterator:
1321 for line in chunk.splitlines():
1321 for line in chunk.splitlines():
1322 yield line
1322 yield line
1323
1323
1324 def expandpath(path):
1324 def expandpath(path):
1325 return os.path.expanduser(os.path.expandvars(path))
1325 return os.path.expanduser(os.path.expandvars(path))
1326
1326
1327 def hgcmd():
1327 def hgcmd():
1328 """Return the command used to execute current hg
1328 """Return the command used to execute current hg
1329
1329
1330 This is different from hgexecutable() because on Windows we want
1330 This is different from hgexecutable() because on Windows we want
1331 to avoid things opening new shell windows like batch files, so we
1331 to avoid things opening new shell windows like batch files, so we
1332 get either the python call or current executable.
1332 get either the python call or current executable.
1333 """
1333 """
1334 if mainfrozen():
1334 if mainfrozen():
1335 return [sys.executable]
1335 return [sys.executable]
1336 return gethgcmd()
1336 return gethgcmd()
1337
1337
1338 def rundetached(args, condfn):
1338 def rundetached(args, condfn):
1339 """Execute the argument list in a detached process.
1339 """Execute the argument list in a detached process.
1340
1340
1341 condfn is a callable which is called repeatedly and should return
1341 condfn is a callable which is called repeatedly and should return
1342 True once the child process is known to have started successfully.
1342 True once the child process is known to have started successfully.
1343 At this point, the child process PID is returned. If the child
1343 At this point, the child process PID is returned. If the child
1344 process fails to start or finishes before condfn() evaluates to
1344 process fails to start or finishes before condfn() evaluates to
1345 True, return -1.
1345 True, return -1.
1346 """
1346 """
1347 # Windows case is easier because the child process is either
1347 # Windows case is easier because the child process is either
1348 # successfully starting and validating the condition or exiting
1348 # successfully starting and validating the condition or exiting
1349 # on failure. We just poll on its PID. On Unix, if the child
1349 # on failure. We just poll on its PID. On Unix, if the child
1350 # process fails to start, it will be left in a zombie state until
1350 # process fails to start, it will be left in a zombie state until
1351 # the parent wait on it, which we cannot do since we expect a long
1351 # the parent wait on it, which we cannot do since we expect a long
1352 # running process on success. Instead we listen for SIGCHLD telling
1352 # running process on success. Instead we listen for SIGCHLD telling
1353 # us our child process terminated.
1353 # us our child process terminated.
1354 terminated = set()
1354 terminated = set()
1355 def handler(signum, frame):
1355 def handler(signum, frame):
1356 terminated.add(os.wait())
1356 terminated.add(os.wait())
1357 prevhandler = None
1357 prevhandler = None
1358 SIGCHLD = getattr(signal, 'SIGCHLD', None)
1358 SIGCHLD = getattr(signal, 'SIGCHLD', None)
1359 if SIGCHLD is not None:
1359 if SIGCHLD is not None:
1360 prevhandler = signal.signal(SIGCHLD, handler)
1360 prevhandler = signal.signal(SIGCHLD, handler)
1361 try:
1361 try:
1362 pid = spawndetached(args)
1362 pid = spawndetached(args)
1363 while not condfn():
1363 while not condfn():
1364 if ((pid in terminated or not testpid(pid))
1364 if ((pid in terminated or not testpid(pid))
1365 and not condfn()):
1365 and not condfn()):
1366 return -1
1366 return -1
1367 time.sleep(0.1)
1367 time.sleep(0.1)
1368 return pid
1368 return pid
1369 finally:
1369 finally:
1370 if prevhandler is not None:
1370 if prevhandler is not None:
1371 signal.signal(signal.SIGCHLD, prevhandler)
1371 signal.signal(signal.SIGCHLD, prevhandler)
1372
1372
1373 try:
1373 try:
1374 any, all = any, all
1374 any, all = any, all
1375 except NameError:
1375 except NameError:
1376 def any(iterable):
1376 def any(iterable):
1377 for i in iterable:
1377 for i in iterable:
1378 if i:
1378 if i:
1379 return True
1379 return True
1380 return False
1380 return False
1381
1381
1382 def all(iterable):
1382 def all(iterable):
1383 for i in iterable:
1383 for i in iterable:
1384 if not i:
1384 if not i:
1385 return False
1385 return False
1386 return True
1386 return True
1387
1387
1388 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1388 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1389 """Return the result of interpolating items in the mapping into string s.
1389 """Return the result of interpolating items in the mapping into string s.
1390
1390
1391 prefix is a single character string, or a two character string with
1391 prefix is a single character string, or a two character string with
1392 a backslash as the first character if the prefix needs to be escaped in
1392 a backslash as the first character if the prefix needs to be escaped in
1393 a regular expression.
1393 a regular expression.
1394
1394
1395 fn is an optional function that will be applied to the replacement text
1395 fn is an optional function that will be applied to the replacement text
1396 just before replacement.
1396 just before replacement.
1397
1397
1398 escape_prefix is an optional flag that allows using doubled prefix for
1398 escape_prefix is an optional flag that allows using doubled prefix for
1399 its escaping.
1399 its escaping.
1400 """
1400 """
1401 fn = fn or (lambda s: s)
1401 fn = fn or (lambda s: s)
1402 patterns = '|'.join(mapping.keys())
1402 patterns = '|'.join(mapping.keys())
1403 if escape_prefix:
1403 if escape_prefix:
1404 patterns += '|' + prefix
1404 patterns += '|' + prefix
1405 if len(prefix) > 1:
1405 if len(prefix) > 1:
1406 prefix_char = prefix[1:]
1406 prefix_char = prefix[1:]
1407 else:
1407 else:
1408 prefix_char = prefix
1408 prefix_char = prefix
1409 mapping[prefix_char] = prefix_char
1409 mapping[prefix_char] = prefix_char
1410 r = re.compile(r'%s(%s)' % (prefix, patterns))
1410 r = re.compile(r'%s(%s)' % (prefix, patterns))
1411 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1411 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1412
1412
1413 def getport(port):
1413 def getport(port):
1414 """Return the port for a given network service.
1414 """Return the port for a given network service.
1415
1415
1416 If port is an integer, it's returned as is. If it's a string, it's
1416 If port is an integer, it's returned as is. If it's a string, it's
1417 looked up using socket.getservbyname(). If there's no matching
1417 looked up using socket.getservbyname(). If there's no matching
1418 service, util.Abort is raised.
1418 service, util.Abort is raised.
1419 """
1419 """
1420 try:
1420 try:
1421 return int(port)
1421 return int(port)
1422 except ValueError:
1422 except ValueError:
1423 pass
1423 pass
1424
1424
1425 try:
1425 try:
1426 return socket.getservbyname(port)
1426 return socket.getservbyname(port)
1427 except socket.error:
1427 except socket.error:
1428 raise Abort(_("no port number associated with service '%s'") % port)
1428 raise Abort(_("no port number associated with service '%s'") % port)
1429
1429
1430 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1430 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1431 '0': False, 'no': False, 'false': False, 'off': False,
1431 '0': False, 'no': False, 'false': False, 'off': False,
1432 'never': False}
1432 'never': False}
1433
1433
1434 def parsebool(s):
1434 def parsebool(s):
1435 """Parse s into a boolean.
1435 """Parse s into a boolean.
1436
1436
1437 If s is not a valid boolean, returns None.
1437 If s is not a valid boolean, returns None.
1438 """
1438 """
1439 return _booleans.get(s.lower(), None)
1439 return _booleans.get(s.lower(), None)
1440
1440
1441 _hexdig = '0123456789ABCDEFabcdef'
1441 _hexdig = '0123456789ABCDEFabcdef'
1442 _hextochr = dict((a + b, chr(int(a + b, 16)))
1442 _hextochr = dict((a + b, chr(int(a + b, 16)))
1443 for a in _hexdig for b in _hexdig)
1443 for a in _hexdig for b in _hexdig)
1444
1444
1445 def _urlunquote(s):
1445 def _urlunquote(s):
1446 """unquote('abc%20def') -> 'abc def'."""
1446 """unquote('abc%20def') -> 'abc def'."""
1447 res = s.split('%')
1447 res = s.split('%')
1448 # fastpath
1448 # fastpath
1449 if len(res) == 1:
1449 if len(res) == 1:
1450 return s
1450 return s
1451 s = res[0]
1451 s = res[0]
1452 for item in res[1:]:
1452 for item in res[1:]:
1453 try:
1453 try:
1454 s += _hextochr[item[:2]] + item[2:]
1454 s += _hextochr[item[:2]] + item[2:]
1455 except KeyError:
1455 except KeyError:
1456 s += '%' + item
1456 s += '%' + item
1457 except UnicodeDecodeError:
1457 except UnicodeDecodeError:
1458 s += unichr(int(item[:2], 16)) + item[2:]
1458 s += unichr(int(item[:2], 16)) + item[2:]
1459 return s
1459 return s
1460
1460
1461 class url(object):
1461 class url(object):
1462 r"""Reliable URL parser.
1462 r"""Reliable URL parser.
1463
1463
1464 This parses URLs and provides attributes for the following
1464 This parses URLs and provides attributes for the following
1465 components:
1465 components:
1466
1466
1467 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
1467 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
1468
1468
1469 Missing components are set to None. The only exception is
1469 Missing components are set to None. The only exception is
1470 fragment, which is set to '' if present but empty.
1470 fragment, which is set to '' if present but empty.
1471
1471
1472 If parsefragment is False, fragment is included in query. If
1472 If parsefragment is False, fragment is included in query. If
1473 parsequery is False, query is included in path. If both are
1473 parsequery is False, query is included in path. If both are
1474 False, both fragment and query are included in path.
1474 False, both fragment and query are included in path.
1475
1475
1476 See http://www.ietf.org/rfc/rfc2396.txt for more information.
1476 See http://www.ietf.org/rfc/rfc2396.txt for more information.
1477
1477
1478 Note that for backward compatibility reasons, bundle URLs do not
1478 Note that for backward compatibility reasons, bundle URLs do not
1479 take host names. That means 'bundle://../' has a path of '../'.
1479 take host names. That means 'bundle://../' has a path of '../'.
1480
1480
1481 Examples:
1481 Examples:
1482
1482
1483 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
1483 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
1484 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
1484 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
1485 >>> url('ssh://[::1]:2200//home/joe/repo')
1485 >>> url('ssh://[::1]:2200//home/joe/repo')
1486 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
1486 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
1487 >>> url('file:///home/joe/repo')
1487 >>> url('file:///home/joe/repo')
1488 <url scheme: 'file', path: '/home/joe/repo'>
1488 <url scheme: 'file', path: '/home/joe/repo'>
1489 >>> url('file:///c:/temp/foo/')
1489 >>> url('file:///c:/temp/foo/')
1490 <url scheme: 'file', path: 'c:/temp/foo/'>
1490 <url scheme: 'file', path: 'c:/temp/foo/'>
1491 >>> url('bundle:foo')
1491 >>> url('bundle:foo')
1492 <url scheme: 'bundle', path: 'foo'>
1492 <url scheme: 'bundle', path: 'foo'>
1493 >>> url('bundle://../foo')
1493 >>> url('bundle://../foo')
1494 <url scheme: 'bundle', path: '../foo'>
1494 <url scheme: 'bundle', path: '../foo'>
1495 >>> url(r'c:\foo\bar')
1495 >>> url(r'c:\foo\bar')
1496 <url path: 'c:\\foo\\bar'>
1496 <url path: 'c:\\foo\\bar'>
1497 >>> url(r'\\blah\blah\blah')
1497 >>> url(r'\\blah\blah\blah')
1498 <url path: '\\\\blah\\blah\\blah'>
1498 <url path: '\\\\blah\\blah\\blah'>
1499 >>> url(r'\\blah\blah\blah#baz')
1499 >>> url(r'\\blah\blah\blah#baz')
1500 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
1500 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
1501
1501
1502 Authentication credentials:
1502 Authentication credentials:
1503
1503
1504 >>> url('ssh://joe:xyz@x/repo')
1504 >>> url('ssh://joe:xyz@x/repo')
1505 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
1505 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
1506 >>> url('ssh://joe@x/repo')
1506 >>> url('ssh://joe@x/repo')
1507 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
1507 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
1508
1508
1509 Query strings and fragments:
1509 Query strings and fragments:
1510
1510
1511 >>> url('http://host/a?b#c')
1511 >>> url('http://host/a?b#c')
1512 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
1512 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
1513 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
1513 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
1514 <url scheme: 'http', host: 'host', path: 'a?b#c'>
1514 <url scheme: 'http', host: 'host', path: 'a?b#c'>
1515 """
1515 """
1516
1516
1517 _safechars = "!~*'()+"
1517 _safechars = "!~*'()+"
1518 _safepchars = "/!~*'()+:"
1518 _safepchars = "/!~*'()+:"
1519 _matchscheme = re.compile(r'^[a-zA-Z0-9+.\-]+:').match
1519 _matchscheme = re.compile(r'^[a-zA-Z0-9+.\-]+:').match
1520
1520
1521 def __init__(self, path, parsequery=True, parsefragment=True):
1521 def __init__(self, path, parsequery=True, parsefragment=True):
1522 # We slowly chomp away at path until we have only the path left
1522 # We slowly chomp away at path until we have only the path left
1523 self.scheme = self.user = self.passwd = self.host = None
1523 self.scheme = self.user = self.passwd = self.host = None
1524 self.port = self.path = self.query = self.fragment = None
1524 self.port = self.path = self.query = self.fragment = None
1525 self._localpath = True
1525 self._localpath = True
1526 self._hostport = ''
1526 self._hostport = ''
1527 self._origpath = path
1527 self._origpath = path
1528
1528
1529 if parsefragment and '#' in path:
1529 if parsefragment and '#' in path:
1530 path, self.fragment = path.split('#', 1)
1530 path, self.fragment = path.split('#', 1)
1531 if not path:
1531 if not path:
1532 path = None
1532 path = None
1533
1533
1534 # special case for Windows drive letters and UNC paths
1534 # special case for Windows drive letters and UNC paths
1535 if hasdriveletter(path) or path.startswith(r'\\'):
1535 if hasdriveletter(path) or path.startswith(r'\\'):
1536 self.path = path
1536 self.path = path
1537 return
1537 return
1538
1538
1539 # For compatibility reasons, we can't handle bundle paths as
1539 # For compatibility reasons, we can't handle bundle paths as
1540 # normal URLS
1540 # normal URLS
1541 if path.startswith('bundle:'):
1541 if path.startswith('bundle:'):
1542 self.scheme = 'bundle'
1542 self.scheme = 'bundle'
1543 path = path[7:]
1543 path = path[7:]
1544 if path.startswith('//'):
1544 if path.startswith('//'):
1545 path = path[2:]
1545 path = path[2:]
1546 self.path = path
1546 self.path = path
1547 return
1547 return
1548
1548
1549 if self._matchscheme(path):
1549 if self._matchscheme(path):
1550 parts = path.split(':', 1)
1550 parts = path.split(':', 1)
1551 if parts[0]:
1551 if parts[0]:
1552 self.scheme, path = parts
1552 self.scheme, path = parts
1553 self._localpath = False
1553 self._localpath = False
1554
1554
1555 if not path:
1555 if not path:
1556 path = None
1556 path = None
1557 if self._localpath:
1557 if self._localpath:
1558 self.path = ''
1558 self.path = ''
1559 return
1559 return
1560 else:
1560 else:
1561 if self._localpath:
1561 if self._localpath:
1562 self.path = path
1562 self.path = path
1563 return
1563 return
1564
1564
1565 if parsequery and '?' in path:
1565 if parsequery and '?' in path:
1566 path, self.query = path.split('?', 1)
1566 path, self.query = path.split('?', 1)
1567 if not path:
1567 if not path:
1568 path = None
1568 path = None
1569 if not self.query:
1569 if not self.query:
1570 self.query = None
1570 self.query = None
1571
1571
1572 # // is required to specify a host/authority
1572 # // is required to specify a host/authority
1573 if path and path.startswith('//'):
1573 if path and path.startswith('//'):
1574 parts = path[2:].split('/', 1)
1574 parts = path[2:].split('/', 1)
1575 if len(parts) > 1:
1575 if len(parts) > 1:
1576 self.host, path = parts
1576 self.host, path = parts
1577 path = path
1577 path = path
1578 else:
1578 else:
1579 self.host = parts[0]
1579 self.host = parts[0]
1580 path = None
1580 path = None
1581 if not self.host:
1581 if not self.host:
1582 self.host = None
1582 self.host = None
1583 # path of file:///d is /d
1583 # path of file:///d is /d
1584 # path of file:///d:/ is d:/, not /d:/
1584 # path of file:///d:/ is d:/, not /d:/
1585 if path and not hasdriveletter(path):
1585 if path and not hasdriveletter(path):
1586 path = '/' + path
1586 path = '/' + path
1587
1587
1588 if self.host and '@' in self.host:
1588 if self.host and '@' in self.host:
1589 self.user, self.host = self.host.rsplit('@', 1)
1589 self.user, self.host = self.host.rsplit('@', 1)
1590 if ':' in self.user:
1590 if ':' in self.user:
1591 self.user, self.passwd = self.user.split(':', 1)
1591 self.user, self.passwd = self.user.split(':', 1)
1592 if not self.host:
1592 if not self.host:
1593 self.host = None
1593 self.host = None
1594
1594
1595 # Don't split on colons in IPv6 addresses without ports
1595 # Don't split on colons in IPv6 addresses without ports
1596 if (self.host and ':' in self.host and
1596 if (self.host and ':' in self.host and
1597 not (self.host.startswith('[') and self.host.endswith(']'))):
1597 not (self.host.startswith('[') and self.host.endswith(']'))):
1598 self._hostport = self.host
1598 self._hostport = self.host
1599 self.host, self.port = self.host.rsplit(':', 1)
1599 self.host, self.port = self.host.rsplit(':', 1)
1600 if not self.host:
1600 if not self.host:
1601 self.host = None
1601 self.host = None
1602
1602
1603 if (self.host and self.scheme == 'file' and
1603 if (self.host and self.scheme == 'file' and
1604 self.host not in ('localhost', '127.0.0.1', '[::1]')):
1604 self.host not in ('localhost', '127.0.0.1', '[::1]')):
1605 raise Abort(_('file:// URLs can only refer to localhost'))
1605 raise Abort(_('file:// URLs can only refer to localhost'))
1606
1606
1607 self.path = path
1607 self.path = path
1608
1608
1609 # leave the query string escaped
1609 # leave the query string escaped
1610 for a in ('user', 'passwd', 'host', 'port',
1610 for a in ('user', 'passwd', 'host', 'port',
1611 'path', 'fragment'):
1611 'path', 'fragment'):
1612 v = getattr(self, a)
1612 v = getattr(self, a)
1613 if v is not None:
1613 if v is not None:
1614 setattr(self, a, _urlunquote(v))
1614 setattr(self, a, _urlunquote(v))
1615
1615
1616 def __repr__(self):
1616 def __repr__(self):
1617 attrs = []
1617 attrs = []
1618 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
1618 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
1619 'query', 'fragment'):
1619 'query', 'fragment'):
1620 v = getattr(self, a)
1620 v = getattr(self, a)
1621 if v is not None:
1621 if v is not None:
1622 attrs.append('%s: %r' % (a, v))
1622 attrs.append('%s: %r' % (a, v))
1623 return '<url %s>' % ', '.join(attrs)
1623 return '<url %s>' % ', '.join(attrs)
1624
1624
1625 def __str__(self):
1625 def __str__(self):
1626 r"""Join the URL's components back into a URL string.
1626 r"""Join the URL's components back into a URL string.
1627
1627
1628 Examples:
1628 Examples:
1629
1629
1630 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
1630 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
1631 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
1631 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
1632 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
1632 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
1633 'http://user:pw@host:80/?foo=bar&baz=42'
1633 'http://user:pw@host:80/?foo=bar&baz=42'
1634 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
1634 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
1635 'http://user:pw@host:80/?foo=bar%3dbaz'
1635 'http://user:pw@host:80/?foo=bar%3dbaz'
1636 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
1636 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
1637 'ssh://user:pw@[::1]:2200//home/joe#'
1637 'ssh://user:pw@[::1]:2200//home/joe#'
1638 >>> str(url('http://localhost:80//'))
1638 >>> str(url('http://localhost:80//'))
1639 'http://localhost:80//'
1639 'http://localhost:80//'
1640 >>> str(url('http://localhost:80/'))
1640 >>> str(url('http://localhost:80/'))
1641 'http://localhost:80/'
1641 'http://localhost:80/'
1642 >>> str(url('http://localhost:80'))
1642 >>> str(url('http://localhost:80'))
1643 'http://localhost:80/'
1643 'http://localhost:80/'
1644 >>> str(url('bundle:foo'))
1644 >>> str(url('bundle:foo'))
1645 'bundle:foo'
1645 'bundle:foo'
1646 >>> str(url('bundle://../foo'))
1646 >>> str(url('bundle://../foo'))
1647 'bundle:../foo'
1647 'bundle:../foo'
1648 >>> str(url('path'))
1648 >>> str(url('path'))
1649 'path'
1649 'path'
1650 >>> str(url('file:///tmp/foo/bar'))
1650 >>> str(url('file:///tmp/foo/bar'))
1651 'file:///tmp/foo/bar'
1651 'file:///tmp/foo/bar'
1652 >>> str(url('file:///c:/tmp/foo/bar'))
1652 >>> str(url('file:///c:/tmp/foo/bar'))
1653 'file:///c:/tmp/foo/bar'
1653 'file:///c:/tmp/foo/bar'
1654 >>> print url(r'bundle:foo\bar')
1654 >>> print url(r'bundle:foo\bar')
1655 bundle:foo\bar
1655 bundle:foo\bar
1656 """
1656 """
1657 if self._localpath:
1657 if self._localpath:
1658 s = self.path
1658 s = self.path
1659 if self.scheme == 'bundle':
1659 if self.scheme == 'bundle':
1660 s = 'bundle:' + s
1660 s = 'bundle:' + s
1661 if self.fragment:
1661 if self.fragment:
1662 s += '#' + self.fragment
1662 s += '#' + self.fragment
1663 return s
1663 return s
1664
1664
1665 s = self.scheme + ':'
1665 s = self.scheme + ':'
1666 if self.user or self.passwd or self.host:
1666 if self.user or self.passwd or self.host:
1667 s += '//'
1667 s += '//'
1668 elif self.scheme and (not self.path or self.path.startswith('/')
1668 elif self.scheme and (not self.path or self.path.startswith('/')
1669 or hasdriveletter(self.path)):
1669 or hasdriveletter(self.path)):
1670 s += '//'
1670 s += '//'
1671 if hasdriveletter(self.path):
1671 if hasdriveletter(self.path):
1672 s += '/'
1672 s += '/'
1673 if self.user:
1673 if self.user:
1674 s += urllib.quote(self.user, safe=self._safechars)
1674 s += urllib.quote(self.user, safe=self._safechars)
1675 if self.passwd:
1675 if self.passwd:
1676 s += ':' + urllib.quote(self.passwd, safe=self._safechars)
1676 s += ':' + urllib.quote(self.passwd, safe=self._safechars)
1677 if self.user or self.passwd:
1677 if self.user or self.passwd:
1678 s += '@'
1678 s += '@'
1679 if self.host:
1679 if self.host:
1680 if not (self.host.startswith('[') and self.host.endswith(']')):
1680 if not (self.host.startswith('[') and self.host.endswith(']')):
1681 s += urllib.quote(self.host)
1681 s += urllib.quote(self.host)
1682 else:
1682 else:
1683 s += self.host
1683 s += self.host
1684 if self.port:
1684 if self.port:
1685 s += ':' + urllib.quote(self.port)
1685 s += ':' + urllib.quote(self.port)
1686 if self.host:
1686 if self.host:
1687 s += '/'
1687 s += '/'
1688 if self.path:
1688 if self.path:
1689 # TODO: similar to the query string, we should not unescape the
1689 # TODO: similar to the query string, we should not unescape the
1690 # path when we store it, the path might contain '%2f' = '/',
1690 # path when we store it, the path might contain '%2f' = '/',
1691 # which we should *not* escape.
1691 # which we should *not* escape.
1692 s += urllib.quote(self.path, safe=self._safepchars)
1692 s += urllib.quote(self.path, safe=self._safepchars)
1693 if self.query:
1693 if self.query:
1694 # we store the query in escaped form.
1694 # we store the query in escaped form.
1695 s += '?' + self.query
1695 s += '?' + self.query
1696 if self.fragment is not None:
1696 if self.fragment is not None:
1697 s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
1697 s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
1698 return s
1698 return s
1699
1699
1700 def authinfo(self):
1700 def authinfo(self):
1701 user, passwd = self.user, self.passwd
1701 user, passwd = self.user, self.passwd
1702 try:
1702 try:
1703 self.user, self.passwd = None, None
1703 self.user, self.passwd = None, None
1704 s = str(self)
1704 s = str(self)
1705 finally:
1705 finally:
1706 self.user, self.passwd = user, passwd
1706 self.user, self.passwd = user, passwd
1707 if not self.user:
1707 if not self.user:
1708 return (s, None)
1708 return (s, None)
1709 # authinfo[1] is passed to urllib2 password manager, and its
1709 # authinfo[1] is passed to urllib2 password manager, and its
1710 # URIs must not contain credentials. The host is passed in the
1710 # URIs must not contain credentials. The host is passed in the
1711 # URIs list because Python < 2.4.3 uses only that to search for
1711 # URIs list because Python < 2.4.3 uses only that to search for
1712 # a password.
1712 # a password.
1713 return (s, (None, (s, self.host),
1713 return (s, (None, (s, self.host),
1714 self.user, self.passwd or ''))
1714 self.user, self.passwd or ''))
1715
1715
1716 def isabs(self):
1716 def isabs(self):
1717 if self.scheme and self.scheme != 'file':
1717 if self.scheme and self.scheme != 'file':
1718 return True # remote URL
1718 return True # remote URL
1719 if hasdriveletter(self.path):
1719 if hasdriveletter(self.path):
1720 return True # absolute for our purposes - can't be joined()
1720 return True # absolute for our purposes - can't be joined()
1721 if self.path.startswith(r'\\'):
1721 if self.path.startswith(r'\\'):
1722 return True # Windows UNC path
1722 return True # Windows UNC path
1723 if self.path.startswith('/'):
1723 if self.path.startswith('/'):
1724 return True # POSIX-style
1724 return True # POSIX-style
1725 return False
1725 return False
1726
1726
1727 def localpath(self):
1727 def localpath(self):
1728 if self.scheme == 'file' or self.scheme == 'bundle':
1728 if self.scheme == 'file' or self.scheme == 'bundle':
1729 path = self.path or '/'
1729 path = self.path or '/'
1730 # For Windows, we need to promote hosts containing drive
1730 # For Windows, we need to promote hosts containing drive
1731 # letters to paths with drive letters.
1731 # letters to paths with drive letters.
1732 if hasdriveletter(self._hostport):
1732 if hasdriveletter(self._hostport):
1733 path = self._hostport + '/' + self.path
1733 path = self._hostport + '/' + self.path
1734 elif (self.host is not None and self.path
1734 elif (self.host is not None and self.path
1735 and not hasdriveletter(path)):
1735 and not hasdriveletter(path)):
1736 path = '/' + path
1736 path = '/' + path
1737 return path
1737 return path
1738 return self._origpath
1738 return self._origpath
1739
1739
1740 def hasscheme(path):
1740 def hasscheme(path):
1741 return bool(url(path).scheme)
1741 return bool(url(path).scheme)
1742
1742
1743 def hasdriveletter(path):
1743 def hasdriveletter(path):
1744 return path and path[1:2] == ':' and path[0:1].isalpha()
1744 return path and path[1:2] == ':' and path[0:1].isalpha()
1745
1745
1746 def urllocalpath(path):
1746 def urllocalpath(path):
1747 return url(path, parsequery=False, parsefragment=False).localpath()
1747 return url(path, parsequery=False, parsefragment=False).localpath()
1748
1748
1749 def hidepassword(u):
1749 def hidepassword(u):
1750 '''hide user credential in a url string'''
1750 '''hide user credential in a url string'''
1751 u = url(u)
1751 u = url(u)
1752 if u.passwd:
1752 if u.passwd:
1753 u.passwd = '***'
1753 u.passwd = '***'
1754 return str(u)
1754 return str(u)
1755
1755
1756 def removeauth(u):
1756 def removeauth(u):
1757 '''remove all authentication information from a url string'''
1757 '''remove all authentication information from a url string'''
1758 u = url(u)
1758 u = url(u)
1759 u.user = u.passwd = None
1759 u.user = u.passwd = None
1760 return str(u)
1760 return str(u)
1761
1761
1762 def isatty(fd):
1762 def isatty(fd):
1763 try:
1763 try:
1764 return fd.isatty()
1764 return fd.isatty()
1765 except AttributeError:
1765 except AttributeError:
1766 return False
1766 return False
@@ -1,228 +1,186 b''
1 $ check_code="$TESTDIR"/../contrib/check-code.py
1 $ check_code="$TESTDIR"/../contrib/check-code.py
2 $ cd "$TESTDIR"/..
2 $ cd "$TESTDIR"/..
3 $ if hg identify -q > /dev/null; then :
3 $ if hg identify -q > /dev/null; then :
4 > else
4 > else
5 > echo "skipped: not a Mercurial working dir" >&2
5 > echo "skipped: not a Mercurial working dir" >&2
6 > exit 80
6 > exit 80
7 > fi
7 > fi
8 $ hg manifest | xargs "$check_code" || echo 'FAILURE IS NOT AN OPTION!!!'
8 $ hg manifest | xargs "$check_code" || echo 'FAILURE IS NOT AN OPTION!!!'
9
9
10 $ hg manifest | xargs "$check_code" --warnings --nolineno --per-file=0 || true
10 $ hg manifest | xargs "$check_code" --warnings --nolineno --per-file=0 || true
11 contrib/shrink-revlog.py:0:
12 > except:
13 warning: naked except clause
14 hgext/convert/cvsps.py:0:
11 hgext/convert/cvsps.py:0:
15 > ui.write('Ancestors: %s\n' % (','.join(r)))
12 > ui.write('Ancestors: %s\n' % (','.join(r)))
16 warning: unwrapped ui message
13 warning: unwrapped ui message
17 hgext/convert/cvsps.py:0:
14 hgext/convert/cvsps.py:0:
18 > ui.write('Parent: %d\n' % cs.parents[0].id)
15 > ui.write('Parent: %d\n' % cs.parents[0].id)
19 warning: unwrapped ui message
16 warning: unwrapped ui message
20 hgext/convert/cvsps.py:0:
17 hgext/convert/cvsps.py:0:
21 > ui.write('Parents: %s\n' %
18 > ui.write('Parents: %s\n' %
22 warning: unwrapped ui message
19 warning: unwrapped ui message
23 hgext/convert/cvsps.py:0:
20 hgext/convert/cvsps.py:0:
24 > ui.write('Branchpoints: %s \n' % ', '.join(branchpoints))
21 > ui.write('Branchpoints: %s \n' % ', '.join(branchpoints))
25 warning: unwrapped ui message
22 warning: unwrapped ui message
26 hgext/convert/cvsps.py:0:
23 hgext/convert/cvsps.py:0:
27 > ui.write('Author: %s\n' % cs.author)
24 > ui.write('Author: %s\n' % cs.author)
28 warning: unwrapped ui message
25 warning: unwrapped ui message
29 hgext/convert/cvsps.py:0:
26 hgext/convert/cvsps.py:0:
30 > ui.write('Branch: %s\n' % (cs.branch or 'HEAD'))
27 > ui.write('Branch: %s\n' % (cs.branch or 'HEAD'))
31 warning: unwrapped ui message
28 warning: unwrapped ui message
32 hgext/convert/cvsps.py:0:
29 hgext/convert/cvsps.py:0:
33 > ui.write('Date: %s\n' % util.datestr(cs.date,
30 > ui.write('Date: %s\n' % util.datestr(cs.date,
34 warning: unwrapped ui message
31 warning: unwrapped ui message
35 hgext/convert/cvsps.py:0:
32 hgext/convert/cvsps.py:0:
36 > ui.write('Log:\n')
33 > ui.write('Log:\n')
37 warning: unwrapped ui message
34 warning: unwrapped ui message
38 hgext/convert/cvsps.py:0:
35 hgext/convert/cvsps.py:0:
39 > ui.write('Members: \n')
36 > ui.write('Members: \n')
40 warning: unwrapped ui message
37 warning: unwrapped ui message
41 hgext/convert/cvsps.py:0:
38 hgext/convert/cvsps.py:0:
42 > ui.write('PatchSet %d \n' % cs.id)
39 > ui.write('PatchSet %d \n' % cs.id)
43 warning: unwrapped ui message
40 warning: unwrapped ui message
44 hgext/convert/cvsps.py:0:
41 hgext/convert/cvsps.py:0:
45 > ui.write('Tag%s: %s \n' % (['', 's'][len(cs.tags) > 1],
42 > ui.write('Tag%s: %s \n' % (['', 's'][len(cs.tags) > 1],
46 warning: unwrapped ui message
43 warning: unwrapped ui message
47 hgext/hgk.py:0:
44 hgext/hgk.py:0:
48 > ui.write("parent %s\n" % p)
45 > ui.write("parent %s\n" % p)
49 warning: unwrapped ui message
46 warning: unwrapped ui message
50 hgext/hgk.py:0:
47 hgext/hgk.py:0:
51 > ui.write('k=%s\nv=%s\n' % (name, value))
48 > ui.write('k=%s\nv=%s\n' % (name, value))
52 warning: unwrapped ui message
49 warning: unwrapped ui message
53 hgext/hgk.py:0:
50 hgext/hgk.py:0:
54 > ui.write("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1]))
51 > ui.write("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1]))
55 warning: unwrapped ui message
52 warning: unwrapped ui message
56 hgext/hgk.py:0:
53 hgext/hgk.py:0:
57 > ui.write("branch %s\n\n" % ctx.branch())
54 > ui.write("branch %s\n\n" % ctx.branch())
58 warning: unwrapped ui message
55 warning: unwrapped ui message
59 hgext/hgk.py:0:
56 hgext/hgk.py:0:
60 > ui.write("committer %s %s %s\n" % (committer, int(date[0]), date[1]))
57 > ui.write("committer %s %s %s\n" % (committer, int(date[0]), date[1]))
61 warning: unwrapped ui message
58 warning: unwrapped ui message
62 hgext/hgk.py:0:
59 hgext/hgk.py:0:
63 > ui.write("revision %d\n" % ctx.rev())
60 > ui.write("revision %d\n" % ctx.rev())
64 warning: unwrapped ui message
61 warning: unwrapped ui message
65 hgext/hgk.py:0:
62 hgext/hgk.py:0:
66 > ui.write("tree %s\n" % short(ctx.changeset()[0]))
63 > ui.write("tree %s\n" % short(ctx.changeset()[0]))
67 warning: unwrapped ui message
64 warning: unwrapped ui message
68 hgext/keyword.py:0:
65 hgext/keyword.py:0:
69 > ui.note("hg ci -m '%s'\n" % msg)
66 > ui.note("hg ci -m '%s'\n" % msg)
70 warning: unwrapped ui message
67 warning: unwrapped ui message
71 hgext/mq.py:0:
68 hgext/mq.py:0:
72 > except:
73 warning: naked except clause
74 hgext/mq.py:0:
75 > except:
76 warning: naked except clause
77 warning: naked except clause
78 warning: naked except clause
79 warning: naked except clause
80 hgext/mq.py:0:
81 > ui.write("mq: %s\n" % ', '.join(m))
69 > ui.write("mq: %s\n" % ', '.join(m))
82 warning: unwrapped ui message
70 warning: unwrapped ui message
83 hgext/patchbomb.py:0:
71 hgext/patchbomb.py:0:
84 > ui.write('Subject: %s\n' % subj)
72 > ui.write('Subject: %s\n' % subj)
85 warning: unwrapped ui message
73 warning: unwrapped ui message
86 hgext/patchbomb.py:0:
74 hgext/patchbomb.py:0:
87 > ui.write('From: %s\n' % sender)
75 > ui.write('From: %s\n' % sender)
88 warning: unwrapped ui message
76 warning: unwrapped ui message
89 mercurial/commands.py:0:
77 mercurial/commands.py:0:
90 > ui.note('branch %s\n' % data)
78 > ui.note('branch %s\n' % data)
91 warning: unwrapped ui message
79 warning: unwrapped ui message
92 mercurial/commands.py:0:
80 mercurial/commands.py:0:
93 > ui.note('node %s\n' % str(data))
81 > ui.note('node %s\n' % str(data))
94 warning: unwrapped ui message
82 warning: unwrapped ui message
95 mercurial/commands.py:0:
83 mercurial/commands.py:0:
96 > ui.note('tag %s\n' % name)
84 > ui.note('tag %s\n' % name)
97 warning: unwrapped ui message
85 warning: unwrapped ui message
98 mercurial/commands.py:0:
86 mercurial/commands.py:0:
99 > ui.write("unpruned common: %s\n" % " ".join([short(n)
87 > ui.write("unpruned common: %s\n" % " ".join([short(n)
100 warning: unwrapped ui message
88 warning: unwrapped ui message
101 mercurial/commands.py:0:
89 mercurial/commands.py:0:
102 > ui.write("format: id, p1, p2, cset, delta base, len(delta)\n")
90 > ui.write("format: id, p1, p2, cset, delta base, len(delta)\n")
103 warning: unwrapped ui message
91 warning: unwrapped ui message
104 mercurial/commands.py:0:
92 mercurial/commands.py:0:
105 > ui.write("local is subset\n")
93 > ui.write("local is subset\n")
106 warning: unwrapped ui message
94 warning: unwrapped ui message
107 mercurial/commands.py:0:
95 mercurial/commands.py:0:
108 > ui.write("remote is subset\n")
96 > ui.write("remote is subset\n")
109 warning: unwrapped ui message
97 warning: unwrapped ui message
110 mercurial/commands.py:0:
98 mercurial/commands.py:0:
111 > ui.write('deltas against other : ' + fmt % pcfmt(numother,
99 > ui.write('deltas against other : ' + fmt % pcfmt(numother,
112 warning: unwrapped ui message
100 warning: unwrapped ui message
113 mercurial/commands.py:0:
101 mercurial/commands.py:0:
114 > ui.write('deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas))
102 > ui.write('deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas))
115 warning: unwrapped ui message
103 warning: unwrapped ui message
116 mercurial/commands.py:0:
104 mercurial/commands.py:0:
117 > ui.write('deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas))
105 > ui.write('deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas))
118 warning: unwrapped ui message
106 warning: unwrapped ui message
119 mercurial/commands.py:0:
107 mercurial/commands.py:0:
120 > except:
121 warning: naked except clause
122 mercurial/commands.py:0:
123 > ui.write("common heads: %s\n" % " ".join([short(n) for n in common]))
108 > ui.write("common heads: %s\n" % " ".join([short(n) for n in common]))
124 warning: unwrapped ui message
109 warning: unwrapped ui message
125 mercurial/commands.py:0:
110 mercurial/commands.py:0:
126 > ui.write("match: %s\n" % m(d[0]))
111 > ui.write("match: %s\n" % m(d[0]))
127 warning: unwrapped ui message
112 warning: unwrapped ui message
128 mercurial/commands.py:0:
113 mercurial/commands.py:0:
129 > ui.write('deltas against prev : ' + fmt % pcfmt(numprev, numdeltas))
114 > ui.write('deltas against prev : ' + fmt % pcfmt(numprev, numdeltas))
130 warning: unwrapped ui message
115 warning: unwrapped ui message
131 mercurial/commands.py:0:
116 mercurial/commands.py:0:
132 > ui.write('path %s\n' % k)
117 > ui.write('path %s\n' % k)
133 warning: unwrapped ui message
118 warning: unwrapped ui message
134 mercurial/commands.py:0:
119 mercurial/commands.py:0:
135 > ui.write('uncompressed data size (min/max/avg) : %d / %d / %d\n'
120 > ui.write('uncompressed data size (min/max/avg) : %d / %d / %d\n'
136 warning: unwrapped ui message
121 warning: unwrapped ui message
137 mercurial/commands.py:0:
122 mercurial/commands.py:0:
138 > ui.write("digraph G {\n")
123 > ui.write("digraph G {\n")
139 warning: unwrapped ui message
124 warning: unwrapped ui message
140 mercurial/commands.py:0:
125 mercurial/commands.py:0:
141 > ui.write("internal: %s %s\n" % d)
126 > ui.write("internal: %s %s\n" % d)
142 warning: unwrapped ui message
127 warning: unwrapped ui message
143 mercurial/commands.py:0:
128 mercurial/commands.py:0:
144 > ui.write("standard: %s\n" % util.datestr(d))
129 > ui.write("standard: %s\n" % util.datestr(d))
145 warning: unwrapped ui message
130 warning: unwrapped ui message
146 mercurial/commands.py:0:
131 mercurial/commands.py:0:
147 > ui.write('avg chain length : ' + fmt % avgchainlen)
132 > ui.write('avg chain length : ' + fmt % avgchainlen)
148 warning: unwrapped ui message
133 warning: unwrapped ui message
149 mercurial/commands.py:0:
134 mercurial/commands.py:0:
150 > ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
135 > ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
151 warning: unwrapped ui message
136 warning: unwrapped ui message
152 mercurial/commands.py:0:
137 mercurial/commands.py:0:
153 > ui.write('compression ratio : ' + fmt % compratio)
138 > ui.write('compression ratio : ' + fmt % compratio)
154 warning: unwrapped ui message
139 warning: unwrapped ui message
155 mercurial/commands.py:0:
140 mercurial/commands.py:0:
156 > ui.write('delta size (min/max/avg) : %d / %d / %d\n'
141 > ui.write('delta size (min/max/avg) : %d / %d / %d\n'
157 warning: unwrapped ui message
142 warning: unwrapped ui message
158 mercurial/commands.py:0:
143 mercurial/commands.py:0:
159 > ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
144 > ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
160 warning: unwrapped ui message
145 warning: unwrapped ui message
161 mercurial/commands.py:0:
146 mercurial/commands.py:0:
162 > ui.write('flags : %s\n' % ', '.join(flags))
147 > ui.write('flags : %s\n' % ', '.join(flags))
163 warning: unwrapped ui message
148 warning: unwrapped ui message
164 mercurial/commands.py:0:
149 mercurial/commands.py:0:
165 > ui.write('format : %d\n' % format)
150 > ui.write('format : %d\n' % format)
166 warning: unwrapped ui message
151 warning: unwrapped ui message
167 mercurial/commands.py:0:
152 mercurial/commands.py:0:
168 > ui.write('full revision size (min/max/avg) : %d / %d / %d\n'
153 > ui.write('full revision size (min/max/avg) : %d / %d / %d\n'
169 warning: unwrapped ui message
154 warning: unwrapped ui message
170 mercurial/commands.py:0:
155 mercurial/commands.py:0:
171 > ui.write('revision size : ' + fmt2 % totalsize)
156 > ui.write('revision size : ' + fmt2 % totalsize)
172 warning: unwrapped ui message
157 warning: unwrapped ui message
173 mercurial/commands.py:0:
158 mercurial/commands.py:0:
174 > ui.write('revisions : ' + fmt2 % numrevs)
159 > ui.write('revisions : ' + fmt2 % numrevs)
175 warning: unwrapped ui message
160 warning: unwrapped ui message
176 warning: unwrapped ui message
161 warning: unwrapped ui message
177 mercurial/commands.py:0:
162 mercurial/commands.py:0:
178 > ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
163 > ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
179 warning: unwrapped ui message
164 warning: unwrapped ui message
180 mercurial/dispatch.py:0:
181 > except:
182 warning: naked except clause
183 mercurial/dispatch.py:0:
184 > except:
185 warning: naked except clause
186 mercurial/hg.py:0:
187 > except:
188 warning: naked except clause
189 mercurial/keepalive.py:0:
190 > except:
191 warning: naked except clause
192 mercurial/localrepo.py:0:
193 > except:
194 warning: naked except clause
195 mercurial/patch.py:0:
196 > except:
197 warning: naked except clause
198 mercurial/repair.py:0:
199 > except:
200 warning: naked except clause
201 mercurial/repair.py:0:
202 > except:
203 warning: naked except clause
204 mercurial/util.py:0:
205 > except:
206 warning: naked except clause
207 tests/autodiff.py:0:
165 tests/autodiff.py:0:
208 > ui.write('data lost for: %s\n' % fn)
166 > ui.write('data lost for: %s\n' % fn)
209 warning: unwrapped ui message
167 warning: unwrapped ui message
210 tests/test-convert-mtn.t:0:
168 tests/test-convert-mtn.t:0:
211 > > function get_passphrase(keypair_id)
169 > > function get_passphrase(keypair_id)
212 don't use 'function', use old style
170 don't use 'function', use old style
213 tests/test-import-git.t:0:
171 tests/test-import-git.t:0:
214 > > Mc\${NkU|\`?^000jF3jhEB
172 > > Mc\${NkU|\`?^000jF3jhEB
215 ^ must be quoted
173 ^ must be quoted
216 tests/test-import.t:0:
174 tests/test-import.t:0:
217 > > diff -Naur proj-orig/foo proj-new/foo
175 > > diff -Naur proj-orig/foo proj-new/foo
218 don't use 'diff -N'
176 don't use 'diff -N'
219 don't use 'diff -N'
177 don't use 'diff -N'
220 tests/test-schemes.t:0:
178 tests/test-schemes.t:0:
221 > > z = file:\$PWD/
179 > > z = file:\$PWD/
222 don't use $PWD, use `pwd`
180 don't use $PWD, use `pwd`
223 tests/test-ui-color.py:0:
181 tests/test-ui-color.py:0:
224 > testui.warn('warning\n')
182 > testui.warn('warning\n')
225 warning: unwrapped ui message
183 warning: unwrapped ui message
226 tests/test-ui-color.py:0:
184 tests/test-ui-color.py:0:
227 > testui.write('buffered\n')
185 > testui.write('buffered\n')
228 warning: unwrapped ui message
186 warning: unwrapped ui message
General Comments 0
You need to be logged in to leave comments. Login now