Show More
@@ -1,294 +1,295 | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | # |
|
2 | # | |
3 | # check-code - a style and portability checker for Mercurial |
|
3 | # check-code - a style and portability checker for Mercurial | |
4 | # |
|
4 | # | |
5 | # Copyright 2010 Matt Mackall <mpm@selenic.com> |
|
5 | # Copyright 2010 Matt Mackall <mpm@selenic.com> | |
6 | # |
|
6 | # | |
7 | # This software may be used and distributed according to the terms of the |
|
7 | # This software may be used and distributed according to the terms of the | |
8 | # GNU General Public License version 2 or any later version. |
|
8 | # GNU General Public License version 2 or any later version. | |
9 |
|
9 | |||
10 | import re, glob, os, sys |
|
10 | import re, glob, os, sys | |
11 | import optparse |
|
11 | import optparse | |
12 |
|
12 | |||
13 | def repquote(m): |
|
13 | def repquote(m): | |
14 | t = re.sub(r"\w", "x", m.group('text')) |
|
14 | t = re.sub(r"\w", "x", m.group('text')) | |
15 | t = re.sub(r"[^\sx]", "o", t) |
|
15 | t = re.sub(r"[^\sx]", "o", t) | |
16 | return m.group('quote') + t + m.group('quote') |
|
16 | return m.group('quote') + t + m.group('quote') | |
17 |
|
17 | |||
18 | def reppython(m): |
|
18 | def reppython(m): | |
19 | comment = m.group('comment') |
|
19 | comment = m.group('comment') | |
20 | if comment: |
|
20 | if comment: | |
21 | return "#" * len(comment) |
|
21 | return "#" * len(comment) | |
22 | return repquote(m) |
|
22 | return repquote(m) | |
23 |
|
23 | |||
24 | def repcomment(m): |
|
24 | def repcomment(m): | |
25 | return m.group(1) + "#" * len(m.group(2)) |
|
25 | return m.group(1) + "#" * len(m.group(2)) | |
26 |
|
26 | |||
27 | def repccomment(m): |
|
27 | def repccomment(m): | |
28 | t = re.sub(r"((?<=\n) )|\S", "x", m.group(2)) |
|
28 | t = re.sub(r"((?<=\n) )|\S", "x", m.group(2)) | |
29 | return m.group(1) + t + "*/" |
|
29 | return m.group(1) + t + "*/" | |
30 |
|
30 | |||
31 | def repcallspaces(m): |
|
31 | def repcallspaces(m): | |
32 | t = re.sub(r"\n\s+", "\n", m.group(2)) |
|
32 | t = re.sub(r"\n\s+", "\n", m.group(2)) | |
33 | return m.group(1) + t |
|
33 | return m.group(1) + t | |
34 |
|
34 | |||
35 | def repinclude(m): |
|
35 | def repinclude(m): | |
36 | return m.group(1) + "<foo>" |
|
36 | return m.group(1) + "<foo>" | |
37 |
|
37 | |||
38 | def rephere(m): |
|
38 | def rephere(m): | |
39 | t = re.sub(r"\S", "x", m.group(2)) |
|
39 | t = re.sub(r"\S", "x", m.group(2)) | |
40 | return m.group(1) + t |
|
40 | return m.group(1) + t | |
41 |
|
41 | |||
42 |
|
42 | |||
43 | testpats = [ |
|
43 | testpats = [ | |
44 | (r'(pushd|popd)', "don't use 'pushd' or 'popd', use 'cd'"), |
|
44 | (r'(pushd|popd)', "don't use 'pushd' or 'popd', use 'cd'"), | |
45 | (r'\W\$?\(\([^\)]*\)\)', "don't use (()) or $(()), use 'expr'"), |
|
45 | (r'\W\$?\(\([^\)]*\)\)', "don't use (()) or $(()), use 'expr'"), | |
46 | (r'^function', "don't use 'function', use old style"), |
|
46 | (r'^function', "don't use 'function', use old style"), | |
47 | (r'grep.*-q', "don't use 'grep -q', redirect to /dev/null"), |
|
47 | (r'grep.*-q', "don't use 'grep -q', redirect to /dev/null"), | |
48 | (r'echo.*\\n', "don't use 'echo \\n', use printf"), |
|
48 | (r'echo.*\\n', "don't use 'echo \\n', use printf"), | |
49 | (r'echo -n', "don't use 'echo -n', use printf"), |
|
49 | (r'echo -n', "don't use 'echo -n', use printf"), | |
50 | (r'^diff.*-\w*N', "don't use 'diff -N'"), |
|
50 | (r'^diff.*-\w*N', "don't use 'diff -N'"), | |
51 | (r'(^| )wc[^|]*$', "filter wc output"), |
|
51 | (r'(^| )wc[^|]*$', "filter wc output"), | |
52 | (r'head -c', "don't use 'head -c', use 'dd'"), |
|
52 | (r'head -c', "don't use 'head -c', use 'dd'"), | |
53 | (r'ls.*-\w*R', "don't use 'ls -R', use 'find'"), |
|
53 | (r'ls.*-\w*R', "don't use 'ls -R', use 'find'"), | |
54 | (r'printf.*\\\d\d\d', "don't use 'printf \NNN', use Python"), |
|
54 | (r'printf.*\\\d\d\d', "don't use 'printf \NNN', use Python"), | |
55 | (r'printf.*\\x', "don't use printf \\x, use Python"), |
|
55 | (r'printf.*\\x', "don't use printf \\x, use Python"), | |
56 | (r'\$\(.*\)', "don't use $(expr), use `expr`"), |
|
56 | (r'\$\(.*\)', "don't use $(expr), use `expr`"), | |
57 | (r'rm -rf \*', "don't use naked rm -rf, target a directory"), |
|
57 | (r'rm -rf \*', "don't use naked rm -rf, target a directory"), | |
58 | (r'(^|\|\s*)grep (-\w\s+)*[^|]*[(|]\w', |
|
58 | (r'(^|\|\s*)grep (-\w\s+)*[^|]*[(|]\w', | |
59 | "use egrep for extended grep syntax"), |
|
59 | "use egrep for extended grep syntax"), | |
60 | (r'/bin/', "don't use explicit paths for tools"), |
|
60 | (r'/bin/', "don't use explicit paths for tools"), | |
61 | (r'\$PWD', "don't use $PWD, use `pwd`"), |
|
61 | (r'\$PWD', "don't use $PWD, use `pwd`"), | |
62 | (r'[^\n]\Z', "no trailing newline"), |
|
62 | (r'[^\n]\Z', "no trailing newline"), | |
63 | (r'export.*=', "don't export and assign at once"), |
|
63 | (r'export.*=', "don't export and assign at once"), | |
64 | ('^([^"\']|("[^"]*")|(\'[^\']*\'))*\\^', "^ must be quoted"), |
|
64 | ('^([^"\']|("[^"]*")|(\'[^\']*\'))*\\^', "^ must be quoted"), | |
65 | (r'^source\b', "don't use 'source', use '.'"), |
|
65 | (r'^source\b', "don't use 'source', use '.'"), | |
66 | (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"), |
|
66 | (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"), | |
67 | ] |
|
67 | ] | |
68 |
|
68 | |||
69 | testfilters = [ |
|
69 | testfilters = [ | |
70 | (r"( *)(#([^\n]*\S)?)", repcomment), |
|
70 | (r"( *)(#([^\n]*\S)?)", repcomment), | |
71 | (r"<<(\S+)((.|\n)*?\n\1)", rephere), |
|
71 | (r"<<(\S+)((.|\n)*?\n\1)", rephere), | |
72 | ] |
|
72 | ] | |
73 |
|
73 | |||
74 | uprefix = r"^ \$ " |
|
74 | uprefix = r"^ \$ " | |
75 | uprefixc = r"^ > " |
|
75 | uprefixc = r"^ > " | |
76 | utestpats = [ |
|
76 | utestpats = [ | |
77 | (uprefix + r'.*\|\s*sed', "use regex test output patterns instead of sed"), |
|
77 | (uprefix + r'.*\|\s*sed', "use regex test output patterns instead of sed"), | |
78 | (uprefix + r'(true|exit 0)', "explicit zero exit unnecessary"), |
|
78 | (uprefix + r'(true|exit 0)', "explicit zero exit unnecessary"), | |
79 | (uprefix + r'.*\$\?', "explicit exit code checks unnecessary"), |
|
79 | (uprefix + r'.*\$\?', "explicit exit code checks unnecessary"), | |
80 | (uprefix + r'.*\|\| echo.*(fail|error)', |
|
80 | (uprefix + r'.*\|\| echo.*(fail|error)', | |
81 | "explicit exit code checks unnecessary"), |
|
81 | "explicit exit code checks unnecessary"), | |
82 | (uprefix + r'set -e', "don't use set -e"), |
|
82 | (uprefix + r'set -e', "don't use set -e"), | |
83 | (uprefixc + r'( *)\t', "don't use tabs to indent"), |
|
83 | (uprefixc + r'( *)\t', "don't use tabs to indent"), | |
84 | ] |
|
84 | ] | |
85 |
|
85 | |||
86 | for p, m in testpats: |
|
86 | for p, m in testpats: | |
87 | if p.startswith('^'): |
|
87 | if p.startswith('^'): | |
88 | p = uprefix + p[1:] |
|
88 | p = uprefix + p[1:] | |
89 | else: |
|
89 | else: | |
90 | p = uprefix + p |
|
90 | p = uprefix + p | |
91 | utestpats.append((p, m)) |
|
91 | utestpats.append((p, m)) | |
92 |
|
92 | |||
93 | utestfilters = [ |
|
93 | utestfilters = [ | |
94 | (r"( *)(#([^\n]*\S)?)", repcomment), |
|
94 | (r"( *)(#([^\n]*\S)?)", repcomment), | |
95 | ] |
|
95 | ] | |
96 |
|
96 | |||
97 | pypats = [ |
|
97 | pypats = [ | |
98 | (r'^\s*def\s*\w+\s*\(.*,\s*\(', |
|
98 | (r'^\s*def\s*\w+\s*\(.*,\s*\(', | |
99 | "tuple parameter unpacking not available in Python 3+"), |
|
99 | "tuple parameter unpacking not available in Python 3+"), | |
100 | (r'lambda\s*\(.*,.*\)', |
|
100 | (r'lambda\s*\(.*,.*\)', | |
101 | "tuple parameter unpacking not available in Python 3+"), |
|
101 | "tuple parameter unpacking not available in Python 3+"), | |
102 | (r'(?<!def)\s+(cmp)\(', "cmp is not available in Python 3+"), |
|
102 | (r'(?<!def)\s+(cmp)\(', "cmp is not available in Python 3+"), | |
103 | (r'\breduce\s*\(.*', "reduce is not available in Python 3+"), |
|
103 | (r'\breduce\s*\(.*', "reduce is not available in Python 3+"), | |
104 | (r'\.has_key\b', "dict.has_key is not available in Python 3+"), |
|
104 | (r'\.has_key\b', "dict.has_key is not available in Python 3+"), | |
105 | (r'^\s*\t', "don't use tabs"), |
|
105 | (r'^\s*\t', "don't use tabs"), | |
106 | (r'\S;\s*\n', "semicolon"), |
|
106 | (r'\S;\s*\n', "semicolon"), | |
107 | (r'\w,\w', "missing whitespace after ,"), |
|
107 | (r'\w,\w', "missing whitespace after ,"), | |
108 | (r'\w[+/*\-<>]\w', "missing whitespace in expression"), |
|
108 | (r'\w[+/*\-<>]\w', "missing whitespace in expression"), | |
109 | (r'^\s+\w+=\w+[^,)]$', "missing whitespace in assignment"), |
|
109 | (r'^\s+\w+=\w+[^,)]$', "missing whitespace in assignment"), | |
110 | (r'.{85}', "line too long"), |
|
110 | (r'.{85}', "line too long"), | |
111 | (r'.{81}', "warning: line over 80 characters"), |
|
111 | (r'.{81}', "warning: line over 80 characters"), | |
112 | (r'[^\n]\Z', "no trailing newline"), |
|
112 | (r'[^\n]\Z', "no trailing newline"), | |
|
113 | (r'(\S\s+|^\s+)\n', "trailing whitespace"), | |||
113 | # (r'^\s+[^_ ][^_. ]+_[^_]+\s*=', "don't use underbars in identifiers"), |
|
114 | # (r'^\s+[^_ ][^_. ]+_[^_]+\s*=', "don't use underbars in identifiers"), | |
114 | # (r'\w*[a-z][A-Z]\w*\s*=', "don't use camelcase in identifiers"), |
|
115 | # (r'\w*[a-z][A-Z]\w*\s*=', "don't use camelcase in identifiers"), | |
115 | (r'^\s*(if|while|def|class|except|try)\s[^[]*:\s*[^\]#\s]+', |
|
116 | (r'^\s*(if|while|def|class|except|try)\s[^[]*:\s*[^\]#\s]+', | |
116 | "linebreak after :"), |
|
117 | "linebreak after :"), | |
117 | (r'class\s[^(]:', "old-style class, use class foo(object)"), |
|
118 | (r'class\s[^(]:', "old-style class, use class foo(object)"), | |
118 | (r'^\s+del\(', "del isn't a function"), |
|
119 | (r'^\s+del\(', "del isn't a function"), | |
119 | (r'^\s+except\(', "except isn't a function"), |
|
120 | (r'^\s+except\(', "except isn't a function"), | |
120 | (r',]', "unneeded trailing ',' in list"), |
|
121 | (r',]', "unneeded trailing ',' in list"), | |
121 | # (r'class\s[A-Z][^\(]*\((?!Exception)', |
|
122 | # (r'class\s[A-Z][^\(]*\((?!Exception)', | |
122 | # "don't capitalize non-exception classes"), |
|
123 | # "don't capitalize non-exception classes"), | |
123 | # (r'in range\(', "use xrange"), |
|
124 | # (r'in range\(', "use xrange"), | |
124 | # (r'^\s*print\s+', "avoid using print in core and extensions"), |
|
125 | # (r'^\s*print\s+', "avoid using print in core and extensions"), | |
125 | (r'[\x80-\xff]', "non-ASCII character literal"), |
|
126 | (r'[\x80-\xff]', "non-ASCII character literal"), | |
126 | (r'("\')\.format\(', "str.format() not available in Python 2.4"), |
|
127 | (r'("\')\.format\(', "str.format() not available in Python 2.4"), | |
127 | (r'^\s*with\s+', "with not available in Python 2.4"), |
|
128 | (r'^\s*with\s+', "with not available in Python 2.4"), | |
128 | (r'(?<!def)\s+(any|all|format)\(', |
|
129 | (r'(?<!def)\s+(any|all|format)\(', | |
129 | "any/all/format not available in Python 2.4"), |
|
130 | "any/all/format not available in Python 2.4"), | |
130 | (r'(?<!def)\s+(callable)\(', |
|
131 | (r'(?<!def)\s+(callable)\(', | |
131 | "callable not available in Python 3, use hasattr(f, '__call__')"), |
|
132 | "callable not available in Python 3, use hasattr(f, '__call__')"), | |
132 | (r'if\s.*\selse', "if ... else form not available in Python 2.4"), |
|
133 | (r'if\s.*\selse', "if ... else form not available in Python 2.4"), | |
133 | (r'([\(\[]\s\S)|(\S\s[\)\]])', "gratuitous whitespace in () or []"), |
|
134 | (r'([\(\[]\s\S)|(\S\s[\)\]])', "gratuitous whitespace in () or []"), | |
134 | # (r'\s\s=', "gratuitous whitespace before ="), |
|
135 | # (r'\s\s=', "gratuitous whitespace before ="), | |
135 | (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=)\S', |
|
136 | (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=)\S', | |
136 | "missing whitespace around operator"), |
|
137 | "missing whitespace around operator"), | |
137 | (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=)\s', |
|
138 | (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=)\s', | |
138 | "missing whitespace around operator"), |
|
139 | "missing whitespace around operator"), | |
139 | (r'\s(\+=|-=|!=|<>|<=|>=|<<=|>>=)\S', |
|
140 | (r'\s(\+=|-=|!=|<>|<=|>=|<<=|>>=)\S', | |
140 | "missing whitespace around operator"), |
|
141 | "missing whitespace around operator"), | |
141 | (r'[^+=*!<>&| -](\s=|=\s)[^= ]', |
|
142 | (r'[^+=*!<>&| -](\s=|=\s)[^= ]', | |
142 | "wrong whitespace around ="), |
|
143 | "wrong whitespace around ="), | |
143 | (r'raise Exception', "don't raise generic exceptions"), |
|
144 | (r'raise Exception', "don't raise generic exceptions"), | |
144 | (r'ui\.(status|progress|write|note|warn)\([\'\"]x', |
|
145 | (r'ui\.(status|progress|write|note|warn)\([\'\"]x', | |
145 | "warning: unwrapped ui message"), |
|
146 | "warning: unwrapped ui message"), | |
146 | ] |
|
147 | ] | |
147 |
|
148 | |||
148 | pyfilters = [ |
|
149 | pyfilters = [ | |
149 | (r"""(?msx)(?P<comment>\#.*?$)| |
|
150 | (r"""(?msx)(?P<comment>\#.*?$)| | |
150 | ((?P<quote>('''|\"\"\"|(?<!')'(?!')|(?<!")"(?!"))) |
|
151 | ((?P<quote>('''|\"\"\"|(?<!')'(?!')|(?<!")"(?!"))) | |
151 | (?P<text>(([^\\]|\\.)*?)) |
|
152 | (?P<text>(([^\\]|\\.)*?)) | |
152 | (?P=quote))""", reppython), |
|
153 | (?P=quote))""", reppython), | |
153 | ] |
|
154 | ] | |
154 |
|
155 | |||
155 | cpats = [ |
|
156 | cpats = [ | |
156 | (r'//', "don't use //-style comments"), |
|
157 | (r'//', "don't use //-style comments"), | |
157 | (r'^ ', "don't use spaces to indent"), |
|
158 | (r'^ ', "don't use spaces to indent"), | |
158 | (r'\S\t', "don't use tabs except for indent"), |
|
159 | (r'\S\t', "don't use tabs except for indent"), | |
159 | (r'(\S\s+|^\s+)\n', "trailing whitespace"), |
|
160 | (r'(\S\s+|^\s+)\n', "trailing whitespace"), | |
160 | (r'.{85}', "line too long"), |
|
161 | (r'.{85}', "line too long"), | |
161 | (r'(while|if|do|for)\(', "use space after while/if/do/for"), |
|
162 | (r'(while|if|do|for)\(', "use space after while/if/do/for"), | |
162 | (r'return\(', "return is not a function"), |
|
163 | (r'return\(', "return is not a function"), | |
163 | (r' ;', "no space before ;"), |
|
164 | (r' ;', "no space before ;"), | |
164 | (r'\w+\* \w+', "use int *foo, not int* foo"), |
|
165 | (r'\w+\* \w+', "use int *foo, not int* foo"), | |
165 | (r'\([^\)]+\) \w+', "use (int)foo, not (int) foo"), |
|
166 | (r'\([^\)]+\) \w+', "use (int)foo, not (int) foo"), | |
166 | (r'\S+ (\+\+|--)', "use foo++, not foo ++"), |
|
167 | (r'\S+ (\+\+|--)', "use foo++, not foo ++"), | |
167 | (r'\w,\w', "missing whitespace after ,"), |
|
168 | (r'\w,\w', "missing whitespace after ,"), | |
168 | (r'\w[+/*]\w', "missing whitespace in expression"), |
|
169 | (r'\w[+/*]\w', "missing whitespace in expression"), | |
169 | (r'^#\s+\w', "use #foo, not # foo"), |
|
170 | (r'^#\s+\w', "use #foo, not # foo"), | |
170 | (r'[^\n]\Z', "no trailing newline"), |
|
171 | (r'[^\n]\Z', "no trailing newline"), | |
171 | ] |
|
172 | ] | |
172 |
|
173 | |||
173 | cfilters = [ |
|
174 | cfilters = [ | |
174 | (r'(/\*)(((\*(?!/))|[^*])*)\*/', repccomment), |
|
175 | (r'(/\*)(((\*(?!/))|[^*])*)\*/', repccomment), | |
175 | (r'''(?P<quote>(?<!")")(?P<text>([^"]|\\")+)"(?!")''', repquote), |
|
176 | (r'''(?P<quote>(?<!")")(?P<text>([^"]|\\")+)"(?!")''', repquote), | |
176 | (r'''(#\s*include\s+<)([^>]+)>''', repinclude), |
|
177 | (r'''(#\s*include\s+<)([^>]+)>''', repinclude), | |
177 | (r'(\()([^)]+\))', repcallspaces), |
|
178 | (r'(\()([^)]+\))', repcallspaces), | |
178 | ] |
|
179 | ] | |
179 |
|
180 | |||
180 | checks = [ |
|
181 | checks = [ | |
181 | ('python', r'.*\.(py|cgi)$', pyfilters, pypats), |
|
182 | ('python', r'.*\.(py|cgi)$', pyfilters, pypats), | |
182 | ('test script', r'(.*/)?test-[^.~]*$', testfilters, testpats), |
|
183 | ('test script', r'(.*/)?test-[^.~]*$', testfilters, testpats), | |
183 | ('c', r'.*\.c$', cfilters, cpats), |
|
184 | ('c', r'.*\.c$', cfilters, cpats), | |
184 | ('unified test', r'.*\.t$', utestfilters, utestpats), |
|
185 | ('unified test', r'.*\.t$', utestfilters, utestpats), | |
185 | ] |
|
186 | ] | |
186 |
|
187 | |||
187 | class norepeatlogger(object): |
|
188 | class norepeatlogger(object): | |
188 | def __init__(self): |
|
189 | def __init__(self): | |
189 | self._lastseen = None |
|
190 | self._lastseen = None | |
190 |
|
191 | |||
191 | def log(self, fname, lineno, line, msg, blame): |
|
192 | def log(self, fname, lineno, line, msg, blame): | |
192 | """print error related a to given line of a given file. |
|
193 | """print error related a to given line of a given file. | |
193 |
|
194 | |||
194 | The faulty line will also be printed but only once in the case |
|
195 | The faulty line will also be printed but only once in the case | |
195 | of multiple errors. |
|
196 | of multiple errors. | |
196 |
|
197 | |||
197 | :fname: filename |
|
198 | :fname: filename | |
198 | :lineno: line number |
|
199 | :lineno: line number | |
199 | :line: actual content of the line |
|
200 | :line: actual content of the line | |
200 | :msg: error message |
|
201 | :msg: error message | |
201 | """ |
|
202 | """ | |
202 | msgid = fname, lineno, line |
|
203 | msgid = fname, lineno, line | |
203 | if msgid != self._lastseen: |
|
204 | if msgid != self._lastseen: | |
204 | if blame: |
|
205 | if blame: | |
205 | print "%s:%d (%s):" % (fname, lineno, blame) |
|
206 | print "%s:%d (%s):" % (fname, lineno, blame) | |
206 | else: |
|
207 | else: | |
207 | print "%s:%d:" % (fname, lineno) |
|
208 | print "%s:%d:" % (fname, lineno) | |
208 | print " > %s" % line |
|
209 | print " > %s" % line | |
209 | self._lastseen = msgid |
|
210 | self._lastseen = msgid | |
210 | print " " + msg |
|
211 | print " " + msg | |
211 |
|
212 | |||
212 | _defaultlogger = norepeatlogger() |
|
213 | _defaultlogger = norepeatlogger() | |
213 |
|
214 | |||
214 | def getblame(f): |
|
215 | def getblame(f): | |
215 | lines = [] |
|
216 | lines = [] | |
216 | for l in os.popen('hg annotate -un %s' % f): |
|
217 | for l in os.popen('hg annotate -un %s' % f): | |
217 | start, line = l.split(':', 1) |
|
218 | start, line = l.split(':', 1) | |
218 | user, rev = start.split() |
|
219 | user, rev = start.split() | |
219 | lines.append((line[1:-1], user, rev)) |
|
220 | lines.append((line[1:-1], user, rev)) | |
220 | return lines |
|
221 | return lines | |
221 |
|
222 | |||
222 | def checkfile(f, logfunc=_defaultlogger.log, maxerr=None, warnings=False, |
|
223 | def checkfile(f, logfunc=_defaultlogger.log, maxerr=None, warnings=False, | |
223 | blame=False): |
|
224 | blame=False): | |
224 | """checks style and portability of a given file |
|
225 | """checks style and portability of a given file | |
225 |
|
226 | |||
226 | :f: filepath |
|
227 | :f: filepath | |
227 | :logfunc: function used to report error |
|
228 | :logfunc: function used to report error | |
228 | logfunc(filename, linenumber, linecontent, errormessage) |
|
229 | logfunc(filename, linenumber, linecontent, errormessage) | |
229 | :maxerr: number of error to display before arborting. |
|
230 | :maxerr: number of error to display before arborting. | |
230 | Set to None (default) to report all errors |
|
231 | Set to None (default) to report all errors | |
231 |
|
232 | |||
232 | return True if no error is found, False otherwise. |
|
233 | return True if no error is found, False otherwise. | |
233 | """ |
|
234 | """ | |
234 | blamecache = None |
|
235 | blamecache = None | |
235 | result = True |
|
236 | result = True | |
236 | for name, match, filters, pats in checks: |
|
237 | for name, match, filters, pats in checks: | |
237 | fc = 0 |
|
238 | fc = 0 | |
238 | if not re.match(match, f): |
|
239 | if not re.match(match, f): | |
239 | continue |
|
240 | continue | |
240 | pre = post = open(f).read() |
|
241 | pre = post = open(f).read() | |
241 | if "no-" + "check-code" in pre: |
|
242 | if "no-" + "check-code" in pre: | |
242 | break |
|
243 | break | |
243 | for p, r in filters: |
|
244 | for p, r in filters: | |
244 | post = re.sub(p, r, post) |
|
245 | post = re.sub(p, r, post) | |
245 | # print post # uncomment to show filtered version |
|
246 | # print post # uncomment to show filtered version | |
246 | z = enumerate(zip(pre.splitlines(), post.splitlines(True))) |
|
247 | z = enumerate(zip(pre.splitlines(), post.splitlines(True))) | |
247 | for n, l in z: |
|
248 | for n, l in z: | |
248 | if "check-code" + "-ignore" in l[0]: |
|
249 | if "check-code" + "-ignore" in l[0]: | |
249 | continue |
|
250 | continue | |
250 | for p, msg in pats: |
|
251 | for p, msg in pats: | |
251 | if not warnings and msg.startswith("warning"): |
|
252 | if not warnings and msg.startswith("warning"): | |
252 | continue |
|
253 | continue | |
253 | if re.search(p, l[1]): |
|
254 | if re.search(p, l[1]): | |
254 | bd = "" |
|
255 | bd = "" | |
255 | if blame: |
|
256 | if blame: | |
256 | bd = 'working directory' |
|
257 | bd = 'working directory' | |
257 | if not blamecache: |
|
258 | if not blamecache: | |
258 | blamecache = getblame(f) |
|
259 | blamecache = getblame(f) | |
259 | if n < len(blamecache): |
|
260 | if n < len(blamecache): | |
260 | bl, bu, br = blamecache[n] |
|
261 | bl, bu, br = blamecache[n] | |
261 | if bl == l[0]: |
|
262 | if bl == l[0]: | |
262 | bd = '%s@%s' % (bu, br) |
|
263 | bd = '%s@%s' % (bu, br) | |
263 | logfunc(f, n + 1, l[0], msg, bd) |
|
264 | logfunc(f, n + 1, l[0], msg, bd) | |
264 | fc += 1 |
|
265 | fc += 1 | |
265 | result = False |
|
266 | result = False | |
266 | if maxerr is not None and fc >= maxerr: |
|
267 | if maxerr is not None and fc >= maxerr: | |
267 | print " (too many errors, giving up)" |
|
268 | print " (too many errors, giving up)" | |
268 | break |
|
269 | break | |
269 | break |
|
270 | break | |
270 | return result |
|
271 | return result | |
271 |
|
272 | |||
272 | if __name__ == "__main__": |
|
273 | if __name__ == "__main__": | |
273 | parser = optparse.OptionParser("%prog [options] [files]") |
|
274 | parser = optparse.OptionParser("%prog [options] [files]") | |
274 | parser.add_option("-w", "--warnings", action="store_true", |
|
275 | parser.add_option("-w", "--warnings", action="store_true", | |
275 | help="include warning-level checks") |
|
276 | help="include warning-level checks") | |
276 | parser.add_option("-p", "--per-file", type="int", |
|
277 | parser.add_option("-p", "--per-file", type="int", | |
277 | help="max warnings per file") |
|
278 | help="max warnings per file") | |
278 | parser.add_option("-b", "--blame", action="store_true", |
|
279 | parser.add_option("-b", "--blame", action="store_true", | |
279 | help="use annotate to generate blame info") |
|
280 | help="use annotate to generate blame info") | |
280 |
|
281 | |||
281 | parser.set_defaults(per_file=15, warnings=False, blame=False) |
|
282 | parser.set_defaults(per_file=15, warnings=False, blame=False) | |
282 | (options, args) = parser.parse_args() |
|
283 | (options, args) = parser.parse_args() | |
283 |
|
284 | |||
284 | if len(args) == 0: |
|
285 | if len(args) == 0: | |
285 | check = glob.glob("*") |
|
286 | check = glob.glob("*") | |
286 | else: |
|
287 | else: | |
287 | check = args |
|
288 | check = args | |
288 |
|
289 | |||
289 | for f in check: |
|
290 | for f in check: | |
290 | ret = 0 |
|
291 | ret = 0 | |
291 | if not checkfile(f, maxerr=options.per_file, warnings=options.warnings, |
|
292 | if not checkfile(f, maxerr=options.per_file, warnings=options.warnings, | |
292 | blame=options.blame): |
|
293 | blame=options.blame): | |
293 | ret = 1 |
|
294 | ret = 1 | |
294 | sys.exit(ret) |
|
295 | sys.exit(ret) |
@@ -1,1161 +1,1161 | |||||
1 | # Subversion 1.4/1.5 Python API backend |
|
1 | # Subversion 1.4/1.5 Python API backend | |
2 | # |
|
2 | # | |
3 | # Copyright(C) 2007 Daniel Holth et al |
|
3 | # Copyright(C) 2007 Daniel Holth et al | |
4 |
|
4 | |||
5 | import os |
|
5 | import os | |
6 | import re |
|
6 | import re | |
7 | import sys |
|
7 | import sys | |
8 | import cPickle as pickle |
|
8 | import cPickle as pickle | |
9 | import tempfile |
|
9 | import tempfile | |
10 | import urllib |
|
10 | import urllib | |
11 | import urllib2 |
|
11 | import urllib2 | |
12 |
|
12 | |||
13 | from mercurial import strutil, util, encoding |
|
13 | from mercurial import strutil, util, encoding | |
14 | from mercurial.i18n import _ |
|
14 | from mercurial.i18n import _ | |
15 |
|
15 | |||
16 | # Subversion stuff. Works best with very recent Python SVN bindings |
|
16 | # Subversion stuff. Works best with very recent Python SVN bindings | |
17 | # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing |
|
17 | # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing | |
18 | # these bindings. |
|
18 | # these bindings. | |
19 |
|
19 | |||
20 | from cStringIO import StringIO |
|
20 | from cStringIO import StringIO | |
21 |
|
21 | |||
22 | from common import NoRepo, MissingTool, commit, encodeargs, decodeargs |
|
22 | from common import NoRepo, MissingTool, commit, encodeargs, decodeargs | |
23 | from common import commandline, converter_source, converter_sink, mapfile |
|
23 | from common import commandline, converter_source, converter_sink, mapfile | |
24 |
|
24 | |||
25 | try: |
|
25 | try: | |
26 | from svn.core import SubversionException, Pool |
|
26 | from svn.core import SubversionException, Pool | |
27 | import svn |
|
27 | import svn | |
28 | import svn.client |
|
28 | import svn.client | |
29 | import svn.core |
|
29 | import svn.core | |
30 | import svn.ra |
|
30 | import svn.ra | |
31 | import svn.delta |
|
31 | import svn.delta | |
32 | import transport |
|
32 | import transport | |
33 | import warnings |
|
33 | import warnings | |
34 | warnings.filterwarnings('ignore', |
|
34 | warnings.filterwarnings('ignore', | |
35 | module='svn.core', |
|
35 | module='svn.core', | |
36 | category=DeprecationWarning) |
|
36 | category=DeprecationWarning) | |
37 |
|
37 | |||
38 | except ImportError: |
|
38 | except ImportError: | |
39 | pass |
|
39 | pass | |
40 |
|
40 | |||
41 | class SvnPathNotFound(Exception): |
|
41 | class SvnPathNotFound(Exception): | |
42 | pass |
|
42 | pass | |
43 |
|
43 | |||
44 | def geturl(path): |
|
44 | def geturl(path): | |
45 | try: |
|
45 | try: | |
46 | return svn.client.url_from_path(svn.core.svn_path_canonicalize(path)) |
|
46 | return svn.client.url_from_path(svn.core.svn_path_canonicalize(path)) | |
47 | except SubversionException: |
|
47 | except SubversionException: | |
48 | pass |
|
48 | pass | |
49 | if os.path.isdir(path): |
|
49 | if os.path.isdir(path): | |
50 | path = os.path.normpath(os.path.abspath(path)) |
|
50 | path = os.path.normpath(os.path.abspath(path)) | |
51 | if os.name == 'nt': |
|
51 | if os.name == 'nt': | |
52 | path = '/' + util.normpath(path) |
|
52 | path = '/' + util.normpath(path) | |
53 | # Module URL is later compared with the repository URL returned |
|
53 | # Module URL is later compared with the repository URL returned | |
54 | # by svn API, which is UTF-8. |
|
54 | # by svn API, which is UTF-8. | |
55 | path = encoding.tolocal(path) |
|
55 | path = encoding.tolocal(path) | |
56 | return 'file://%s' % urllib.quote(path) |
|
56 | return 'file://%s' % urllib.quote(path) | |
57 | return path |
|
57 | return path | |
58 |
|
58 | |||
59 | def optrev(number): |
|
59 | def optrev(number): | |
60 | optrev = svn.core.svn_opt_revision_t() |
|
60 | optrev = svn.core.svn_opt_revision_t() | |
61 | optrev.kind = svn.core.svn_opt_revision_number |
|
61 | optrev.kind = svn.core.svn_opt_revision_number | |
62 | optrev.value.number = number |
|
62 | optrev.value.number = number | |
63 | return optrev |
|
63 | return optrev | |
64 |
|
64 | |||
65 | class changedpath(object): |
|
65 | class changedpath(object): | |
66 | def __init__(self, p): |
|
66 | def __init__(self, p): | |
67 | self.copyfrom_path = p.copyfrom_path |
|
67 | self.copyfrom_path = p.copyfrom_path | |
68 | self.copyfrom_rev = p.copyfrom_rev |
|
68 | self.copyfrom_rev = p.copyfrom_rev | |
69 | self.action = p.action |
|
69 | self.action = p.action | |
70 |
|
70 | |||
71 | def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True, |
|
71 | def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True, | |
72 | strict_node_history=False): |
|
72 | strict_node_history=False): | |
73 | protocol = -1 |
|
73 | protocol = -1 | |
74 | def receiver(orig_paths, revnum, author, date, message, pool): |
|
74 | def receiver(orig_paths, revnum, author, date, message, pool): | |
75 | if orig_paths is not None: |
|
75 | if orig_paths is not None: | |
76 | for k, v in orig_paths.iteritems(): |
|
76 | for k, v in orig_paths.iteritems(): | |
77 | orig_paths[k] = changedpath(v) |
|
77 | orig_paths[k] = changedpath(v) | |
78 | pickle.dump((orig_paths, revnum, author, date, message), |
|
78 | pickle.dump((orig_paths, revnum, author, date, message), | |
79 | fp, protocol) |
|
79 | fp, protocol) | |
80 |
|
80 | |||
81 | try: |
|
81 | try: | |
82 | # Use an ra of our own so that our parent can consume |
|
82 | # Use an ra of our own so that our parent can consume | |
83 | # our results without confusing the server. |
|
83 | # our results without confusing the server. | |
84 | t = transport.SvnRaTransport(url=url) |
|
84 | t = transport.SvnRaTransport(url=url) | |
85 | svn.ra.get_log(t.ra, paths, start, end, limit, |
|
85 | svn.ra.get_log(t.ra, paths, start, end, limit, | |
86 | discover_changed_paths, |
|
86 | discover_changed_paths, | |
87 | strict_node_history, |
|
87 | strict_node_history, | |
88 | receiver) |
|
88 | receiver) | |
89 | except SubversionException, (inst, num): |
|
89 | except SubversionException, (inst, num): | |
90 | pickle.dump(num, fp, protocol) |
|
90 | pickle.dump(num, fp, protocol) | |
91 | except IOError: |
|
91 | except IOError: | |
92 | # Caller may interrupt the iteration |
|
92 | # Caller may interrupt the iteration | |
93 | pickle.dump(None, fp, protocol) |
|
93 | pickle.dump(None, fp, protocol) | |
94 | else: |
|
94 | else: | |
95 | pickle.dump(None, fp, protocol) |
|
95 | pickle.dump(None, fp, protocol) | |
96 | fp.close() |
|
96 | fp.close() | |
97 | # With large history, cleanup process goes crazy and suddenly |
|
97 | # With large history, cleanup process goes crazy and suddenly | |
98 | # consumes *huge* amount of memory. The output file being closed, |
|
98 | # consumes *huge* amount of memory. The output file being closed, | |
99 | # there is no need for clean termination. |
|
99 | # there is no need for clean termination. | |
100 | os._exit(0) |
|
100 | os._exit(0) | |
101 |
|
101 | |||
102 | def debugsvnlog(ui, **opts): |
|
102 | def debugsvnlog(ui, **opts): | |
103 | """Fetch SVN log in a subprocess and channel them back to parent to |
|
103 | """Fetch SVN log in a subprocess and channel them back to parent to | |
104 | avoid memory collection issues. |
|
104 | avoid memory collection issues. | |
105 | """ |
|
105 | """ | |
106 | util.set_binary(sys.stdin) |
|
106 | util.set_binary(sys.stdin) | |
107 | util.set_binary(sys.stdout) |
|
107 | util.set_binary(sys.stdout) | |
108 | args = decodeargs(sys.stdin.read()) |
|
108 | args = decodeargs(sys.stdin.read()) | |
109 | get_log_child(sys.stdout, *args) |
|
109 | get_log_child(sys.stdout, *args) | |
110 |
|
110 | |||
111 | class logstream(object): |
|
111 | class logstream(object): | |
112 | """Interruptible revision log iterator.""" |
|
112 | """Interruptible revision log iterator.""" | |
113 | def __init__(self, stdout): |
|
113 | def __init__(self, stdout): | |
114 | self._stdout = stdout |
|
114 | self._stdout = stdout | |
115 |
|
115 | |||
116 | def __iter__(self): |
|
116 | def __iter__(self): | |
117 | while True: |
|
117 | while True: | |
118 | try: |
|
118 | try: | |
119 | entry = pickle.load(self._stdout) |
|
119 | entry = pickle.load(self._stdout) | |
120 | except EOFError: |
|
120 | except EOFError: | |
121 | raise util.Abort(_('Mercurial failed to run itself, check' |
|
121 | raise util.Abort(_('Mercurial failed to run itself, check' | |
122 | ' hg executable is in PATH')) |
|
122 | ' hg executable is in PATH')) | |
123 | try: |
|
123 | try: | |
124 | orig_paths, revnum, author, date, message = entry |
|
124 | orig_paths, revnum, author, date, message = entry | |
125 | except: |
|
125 | except: | |
126 | if entry is None: |
|
126 | if entry is None: | |
127 | break |
|
127 | break | |
128 | raise SubversionException("child raised exception", entry) |
|
128 | raise SubversionException("child raised exception", entry) | |
129 | yield entry |
|
129 | yield entry | |
130 |
|
130 | |||
131 | def close(self): |
|
131 | def close(self): | |
132 | if self._stdout: |
|
132 | if self._stdout: | |
133 | self._stdout.close() |
|
133 | self._stdout.close() | |
134 | self._stdout = None |
|
134 | self._stdout = None | |
135 |
|
135 | |||
136 |
|
136 | |||
137 | # Check to see if the given path is a local Subversion repo. Verify this by |
|
137 | # Check to see if the given path is a local Subversion repo. Verify this by | |
138 | # looking for several svn-specific files and directories in the given |
|
138 | # looking for several svn-specific files and directories in the given | |
139 | # directory. |
|
139 | # directory. | |
140 | def filecheck(ui, path, proto): |
|
140 | def filecheck(ui, path, proto): | |
141 | for x in ('locks', 'hooks', 'format', 'db'): |
|
141 | for x in ('locks', 'hooks', 'format', 'db'): | |
142 | if not os.path.exists(os.path.join(path, x)): |
|
142 | if not os.path.exists(os.path.join(path, x)): | |
143 | return False |
|
143 | return False | |
144 | return True |
|
144 | return True | |
145 |
|
145 | |||
146 | # Check to see if a given path is the root of an svn repo over http. We verify |
|
146 | # Check to see if a given path is the root of an svn repo over http. We verify | |
147 | # this by requesting a version-controlled URL we know can't exist and looking |
|
147 | # this by requesting a version-controlled URL we know can't exist and looking | |
148 | # for the svn-specific "not found" XML. |
|
148 | # for the svn-specific "not found" XML. | |
149 | def httpcheck(ui, path, proto): |
|
149 | def httpcheck(ui, path, proto): | |
150 | try: |
|
150 | try: | |
151 | opener = urllib2.build_opener() |
|
151 | opener = urllib2.build_opener() | |
152 | rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path)) |
|
152 | rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path)) | |
153 | data = rsp.read() |
|
153 | data = rsp.read() | |
154 | except urllib2.HTTPError, inst: |
|
154 | except urllib2.HTTPError, inst: | |
155 | if inst.code != 404: |
|
155 | if inst.code != 404: | |
156 | # Except for 404 we cannot know for sure this is not an svn repo |
|
156 | # Except for 404 we cannot know for sure this is not an svn repo | |
157 | ui.warn(_('svn: cannot probe remote repository, assume it could ' |
|
157 | ui.warn(_('svn: cannot probe remote repository, assume it could ' | |
158 | 'be a subversion repository. Use --source-type if you ' |
|
158 | 'be a subversion repository. Use --source-type if you ' | |
159 | 'know better.\n')) |
|
159 | 'know better.\n')) | |
160 | return True |
|
160 | return True | |
161 | data = inst.fp.read() |
|
161 | data = inst.fp.read() | |
162 | except: |
|
162 | except: | |
163 | # Could be urllib2.URLError if the URL is invalid or anything else. |
|
163 | # Could be urllib2.URLError if the URL is invalid or anything else. | |
164 | return False |
|
164 | return False | |
165 | return '<m:human-readable errcode="160013">' in data |
|
165 | return '<m:human-readable errcode="160013">' in data | |
166 |
|
166 | |||
167 | protomap = {'http': httpcheck, |
|
167 | protomap = {'http': httpcheck, | |
168 | 'https': httpcheck, |
|
168 | 'https': httpcheck, | |
169 | 'file': filecheck, |
|
169 | 'file': filecheck, | |
170 | } |
|
170 | } | |
171 | def issvnurl(ui, url): |
|
171 | def issvnurl(ui, url): | |
172 | try: |
|
172 | try: | |
173 | proto, path = url.split('://', 1) |
|
173 | proto, path = url.split('://', 1) | |
174 | if proto == 'file': |
|
174 | if proto == 'file': | |
175 | path = urllib.url2pathname(path) |
|
175 | path = urllib.url2pathname(path) | |
176 | except ValueError: |
|
176 | except ValueError: | |
177 | proto = 'file' |
|
177 | proto = 'file' | |
178 | path = os.path.abspath(url) |
|
178 | path = os.path.abspath(url) | |
179 | if proto == 'file': |
|
179 | if proto == 'file': | |
180 | path = path.replace(os.sep, '/') |
|
180 | path = path.replace(os.sep, '/') | |
181 | check = protomap.get(proto, lambda *args: False) |
|
181 | check = protomap.get(proto, lambda *args: False) | |
182 | while '/' in path: |
|
182 | while '/' in path: | |
183 | if check(ui, path, proto): |
|
183 | if check(ui, path, proto): | |
184 | return True |
|
184 | return True | |
185 | path = path.rsplit('/', 1)[0] |
|
185 | path = path.rsplit('/', 1)[0] | |
186 | return False |
|
186 | return False | |
187 |
|
187 | |||
188 | # SVN conversion code stolen from bzr-svn and tailor |
|
188 | # SVN conversion code stolen from bzr-svn and tailor | |
189 | # |
|
189 | # | |
190 | # Subversion looks like a versioned filesystem, branches structures |
|
190 | # Subversion looks like a versioned filesystem, branches structures | |
191 | # are defined by conventions and not enforced by the tool. First, |
|
191 | # are defined by conventions and not enforced by the tool. First, | |
192 | # we define the potential branches (modules) as "trunk" and "branches" |
|
192 | # we define the potential branches (modules) as "trunk" and "branches" | |
193 | # children directories. Revisions are then identified by their |
|
193 | # children directories. Revisions are then identified by their | |
194 | # module and revision number (and a repository identifier). |
|
194 | # module and revision number (and a repository identifier). | |
195 | # |
|
195 | # | |
196 | # The revision graph is really a tree (or a forest). By default, a |
|
196 | # The revision graph is really a tree (or a forest). By default, a | |
197 | # revision parent is the previous revision in the same module. If the |
|
197 | # revision parent is the previous revision in the same module. If the | |
198 | # module directory is copied/moved from another module then the |
|
198 | # module directory is copied/moved from another module then the | |
199 | # revision is the module root and its parent the source revision in |
|
199 | # revision is the module root and its parent the source revision in | |
200 | # the parent module. A revision has at most one parent. |
|
200 | # the parent module. A revision has at most one parent. | |
201 | # |
|
201 | # | |
202 | class svn_source(converter_source): |
|
202 | class svn_source(converter_source): | |
203 | def __init__(self, ui, url, rev=None): |
|
203 | def __init__(self, ui, url, rev=None): | |
204 | super(svn_source, self).__init__(ui, url, rev=rev) |
|
204 | super(svn_source, self).__init__(ui, url, rev=rev) | |
205 |
|
205 | |||
206 | if not (url.startswith('svn://') or url.startswith('svn+ssh://') or |
|
206 | if not (url.startswith('svn://') or url.startswith('svn+ssh://') or | |
207 | (os.path.exists(url) and |
|
207 | (os.path.exists(url) and | |
208 | os.path.exists(os.path.join(url, '.svn'))) or |
|
208 | os.path.exists(os.path.join(url, '.svn'))) or | |
209 | issvnurl(ui, url)): |
|
209 | issvnurl(ui, url)): | |
210 | raise NoRepo(_("%s does not look like a Subversion repository") |
|
210 | raise NoRepo(_("%s does not look like a Subversion repository") | |
211 | % url) |
|
211 | % url) | |
212 |
|
212 | |||
213 | try: |
|
213 | try: | |
214 | SubversionException |
|
214 | SubversionException | |
215 | except NameError: |
|
215 | except NameError: | |
216 | raise MissingTool(_('Subversion python bindings could not be loaded')) |
|
216 | raise MissingTool(_('Subversion python bindings could not be loaded')) | |
217 |
|
217 | |||
218 | try: |
|
218 | try: | |
219 | version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR |
|
219 | version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR | |
220 | if version < (1, 4): |
|
220 | if version < (1, 4): | |
221 | raise MissingTool(_('Subversion python bindings %d.%d found, ' |
|
221 | raise MissingTool(_('Subversion python bindings %d.%d found, ' | |
222 | '1.4 or later required') % version) |
|
222 | '1.4 or later required') % version) | |
223 | except AttributeError: |
|
223 | except AttributeError: | |
224 | raise MissingTool(_('Subversion python bindings are too old, 1.4 ' |
|
224 | raise MissingTool(_('Subversion python bindings are too old, 1.4 ' | |
225 | 'or later required')) |
|
225 | 'or later required')) | |
226 |
|
226 | |||
227 | self.lastrevs = {} |
|
227 | self.lastrevs = {} | |
228 |
|
228 | |||
229 | latest = None |
|
229 | latest = None | |
230 | try: |
|
230 | try: | |
231 | # Support file://path@rev syntax. Useful e.g. to convert |
|
231 | # Support file://path@rev syntax. Useful e.g. to convert | |
232 | # deleted branches. |
|
232 | # deleted branches. | |
233 | at = url.rfind('@') |
|
233 | at = url.rfind('@') | |
234 | if at >= 0: |
|
234 | if at >= 0: | |
235 | latest = int(url[at + 1:]) |
|
235 | latest = int(url[at + 1:]) | |
236 | url = url[:at] |
|
236 | url = url[:at] | |
237 | except ValueError: |
|
237 | except ValueError: | |
238 | pass |
|
238 | pass | |
239 | self.url = geturl(url) |
|
239 | self.url = geturl(url) | |
240 | self.encoding = 'UTF-8' # Subversion is always nominal UTF-8 |
|
240 | self.encoding = 'UTF-8' # Subversion is always nominal UTF-8 | |
241 | try: |
|
241 | try: | |
242 | self.transport = transport.SvnRaTransport(url=self.url) |
|
242 | self.transport = transport.SvnRaTransport(url=self.url) | |
243 | self.ra = self.transport.ra |
|
243 | self.ra = self.transport.ra | |
244 | self.ctx = self.transport.client |
|
244 | self.ctx = self.transport.client | |
245 | self.baseurl = svn.ra.get_repos_root(self.ra) |
|
245 | self.baseurl = svn.ra.get_repos_root(self.ra) | |
246 | # Module is either empty or a repository path starting with |
|
246 | # Module is either empty or a repository path starting with | |
247 | # a slash and not ending with a slash. |
|
247 | # a slash and not ending with a slash. | |
248 | self.module = urllib.unquote(self.url[len(self.baseurl):]) |
|
248 | self.module = urllib.unquote(self.url[len(self.baseurl):]) | |
249 | self.prevmodule = None |
|
249 | self.prevmodule = None | |
250 | self.rootmodule = self.module |
|
250 | self.rootmodule = self.module | |
251 | self.commits = {} |
|
251 | self.commits = {} | |
252 | self.paths = {} |
|
252 | self.paths = {} | |
253 | self.uuid = svn.ra.get_uuid(self.ra) |
|
253 | self.uuid = svn.ra.get_uuid(self.ra) | |
254 | except SubversionException: |
|
254 | except SubversionException: | |
255 | ui.traceback() |
|
255 | ui.traceback() | |
256 | raise NoRepo(_("%s does not look like a Subversion repository") |
|
256 | raise NoRepo(_("%s does not look like a Subversion repository") | |
257 | % self.url) |
|
257 | % self.url) | |
258 |
|
258 | |||
259 | if rev: |
|
259 | if rev: | |
260 | try: |
|
260 | try: | |
261 | latest = int(rev) |
|
261 | latest = int(rev) | |
262 | except ValueError: |
|
262 | except ValueError: | |
263 | raise util.Abort(_('svn: revision %s is not an integer') % rev) |
|
263 | raise util.Abort(_('svn: revision %s is not an integer') % rev) | |
264 |
|
264 | |||
265 | self.startrev = self.ui.config('convert', 'svn.startrev', default=0) |
|
265 | self.startrev = self.ui.config('convert', 'svn.startrev', default=0) | |
266 | try: |
|
266 | try: | |
267 | self.startrev = int(self.startrev) |
|
267 | self.startrev = int(self.startrev) | |
268 | if self.startrev < 0: |
|
268 | if self.startrev < 0: | |
269 | self.startrev = 0 |
|
269 | self.startrev = 0 | |
270 | except ValueError: |
|
270 | except ValueError: | |
271 | raise util.Abort(_('svn: start revision %s is not an integer') |
|
271 | raise util.Abort(_('svn: start revision %s is not an integer') | |
272 | % self.startrev) |
|
272 | % self.startrev) | |
273 |
|
273 | |||
274 | self.head = self.latest(self.module, latest) |
|
274 | self.head = self.latest(self.module, latest) | |
275 | if not self.head: |
|
275 | if not self.head: | |
276 | raise util.Abort(_('no revision found in module %s') |
|
276 | raise util.Abort(_('no revision found in module %s') | |
277 | % self.module) |
|
277 | % self.module) | |
278 | self.last_changed = self.revnum(self.head) |
|
278 | self.last_changed = self.revnum(self.head) | |
279 |
|
279 | |||
280 | self._changescache = None |
|
280 | self._changescache = None | |
281 |
|
281 | |||
282 | if os.path.exists(os.path.join(url, '.svn/entries')): |
|
282 | if os.path.exists(os.path.join(url, '.svn/entries')): | |
283 | self.wc = url |
|
283 | self.wc = url | |
284 | else: |
|
284 | else: | |
285 | self.wc = None |
|
285 | self.wc = None | |
286 | self.convertfp = None |
|
286 | self.convertfp = None | |
287 |
|
287 | |||
288 | def setrevmap(self, revmap): |
|
288 | def setrevmap(self, revmap): | |
289 | lastrevs = {} |
|
289 | lastrevs = {} | |
290 | for revid in revmap.iterkeys(): |
|
290 | for revid in revmap.iterkeys(): | |
291 | uuid, module, revnum = self.revsplit(revid) |
|
291 | uuid, module, revnum = self.revsplit(revid) | |
292 | lastrevnum = lastrevs.setdefault(module, revnum) |
|
292 | lastrevnum = lastrevs.setdefault(module, revnum) | |
293 | if revnum > lastrevnum: |
|
293 | if revnum > lastrevnum: | |
294 | lastrevs[module] = revnum |
|
294 | lastrevs[module] = revnum | |
295 | self.lastrevs = lastrevs |
|
295 | self.lastrevs = lastrevs | |
296 |
|
296 | |||
297 | def exists(self, path, optrev): |
|
297 | def exists(self, path, optrev): | |
298 | try: |
|
298 | try: | |
299 | svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path), |
|
299 | svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path), | |
300 | optrev, False, self.ctx) |
|
300 | optrev, False, self.ctx) | |
301 | return True |
|
301 | return True | |
302 | except SubversionException: |
|
302 | except SubversionException: | |
303 | return False |
|
303 | return False | |
304 |
|
304 | |||
305 | def getheads(self): |
|
305 | def getheads(self): | |
306 |
|
306 | |||
307 | def isdir(path, revnum): |
|
307 | def isdir(path, revnum): | |
308 | kind = self._checkpath(path, revnum) |
|
308 | kind = self._checkpath(path, revnum) | |
309 | return kind == svn.core.svn_node_dir |
|
309 | return kind == svn.core.svn_node_dir | |
310 |
|
310 | |||
311 | def getcfgpath(name, rev): |
|
311 | def getcfgpath(name, rev): | |
312 | cfgpath = self.ui.config('convert', 'svn.' + name) |
|
312 | cfgpath = self.ui.config('convert', 'svn.' + name) | |
313 | if cfgpath is not None and cfgpath.strip() == '': |
|
313 | if cfgpath is not None and cfgpath.strip() == '': | |
314 | return None |
|
314 | return None | |
315 | path = (cfgpath or name).strip('/') |
|
315 | path = (cfgpath or name).strip('/') | |
316 | if not self.exists(path, rev): |
|
316 | if not self.exists(path, rev): | |
317 | if cfgpath: |
|
317 | if cfgpath: | |
318 | raise util.Abort(_('expected %s to be at %r, but not found') |
|
318 | raise util.Abort(_('expected %s to be at %r, but not found') | |
319 | % (name, path)) |
|
319 | % (name, path)) | |
320 | return None |
|
320 | return None | |
321 | self.ui.note(_('found %s at %r\n') % (name, path)) |
|
321 | self.ui.note(_('found %s at %r\n') % (name, path)) | |
322 | return path |
|
322 | return path | |
323 |
|
323 | |||
324 | rev = optrev(self.last_changed) |
|
324 | rev = optrev(self.last_changed) | |
325 | oldmodule = '' |
|
325 | oldmodule = '' | |
326 | trunk = getcfgpath('trunk', rev) |
|
326 | trunk = getcfgpath('trunk', rev) | |
327 | self.tags = getcfgpath('tags', rev) |
|
327 | self.tags = getcfgpath('tags', rev) | |
328 | branches = getcfgpath('branches', rev) |
|
328 | branches = getcfgpath('branches', rev) | |
329 |
|
329 | |||
330 | # If the project has a trunk or branches, we will extract heads |
|
330 | # If the project has a trunk or branches, we will extract heads | |
331 | # from them. We keep the project root otherwise. |
|
331 | # from them. We keep the project root otherwise. | |
332 | if trunk: |
|
332 | if trunk: | |
333 | oldmodule = self.module or '' |
|
333 | oldmodule = self.module or '' | |
334 | self.module += '/' + trunk |
|
334 | self.module += '/' + trunk | |
335 | self.head = self.latest(self.module, self.last_changed) |
|
335 | self.head = self.latest(self.module, self.last_changed) | |
336 | if not self.head: |
|
336 | if not self.head: | |
337 | raise util.Abort(_('no revision found in module %s') |
|
337 | raise util.Abort(_('no revision found in module %s') | |
338 | % self.module) |
|
338 | % self.module) | |
339 |
|
339 | |||
340 | # First head in the list is the module's head |
|
340 | # First head in the list is the module's head | |
341 | self.heads = [self.head] |
|
341 | self.heads = [self.head] | |
342 | if self.tags is not None: |
|
342 | if self.tags is not None: | |
343 | self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags')) |
|
343 | self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags')) | |
344 |
|
344 | |||
345 | # Check if branches bring a few more heads to the list |
|
345 | # Check if branches bring a few more heads to the list | |
346 | if branches: |
|
346 | if branches: | |
347 | rpath = self.url.strip('/') |
|
347 | rpath = self.url.strip('/') | |
348 | branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches), |
|
348 | branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches), | |
349 | rev, False, self.ctx) |
|
349 | rev, False, self.ctx) | |
350 | for branch in branchnames.keys(): |
|
350 | for branch in branchnames.keys(): | |
351 | module = '%s/%s/%s' % (oldmodule, branches, branch) |
|
351 | module = '%s/%s/%s' % (oldmodule, branches, branch) | |
352 | if not isdir(module, self.last_changed): |
|
352 | if not isdir(module, self.last_changed): | |
353 | continue |
|
353 | continue | |
354 | brevid = self.latest(module, self.last_changed) |
|
354 | brevid = self.latest(module, self.last_changed) | |
355 | if not brevid: |
|
355 | if not brevid: | |
356 | self.ui.note(_('ignoring empty branch %s\n') % branch) |
|
356 | self.ui.note(_('ignoring empty branch %s\n') % branch) | |
357 | continue |
|
357 | continue | |
358 | self.ui.note(_('found branch %s at %d\n') % |
|
358 | self.ui.note(_('found branch %s at %d\n') % | |
359 | (branch, self.revnum(brevid))) |
|
359 | (branch, self.revnum(brevid))) | |
360 | self.heads.append(brevid) |
|
360 | self.heads.append(brevid) | |
361 |
|
361 | |||
362 | if self.startrev and self.heads: |
|
362 | if self.startrev and self.heads: | |
363 | if len(self.heads) > 1: |
|
363 | if len(self.heads) > 1: | |
364 | raise util.Abort(_('svn: start revision is not supported ' |
|
364 | raise util.Abort(_('svn: start revision is not supported ' | |
365 | 'with more than one branch')) |
|
365 | 'with more than one branch')) | |
366 | revnum = self.revnum(self.heads[0]) |
|
366 | revnum = self.revnum(self.heads[0]) | |
367 | if revnum < self.startrev: |
|
367 | if revnum < self.startrev: | |
368 | raise util.Abort( |
|
368 | raise util.Abort( | |
369 | _('svn: no revision found after start revision %d') |
|
369 | _('svn: no revision found after start revision %d') | |
370 | % self.startrev) |
|
370 | % self.startrev) | |
371 |
|
371 | |||
372 | return self.heads |
|
372 | return self.heads | |
373 |
|
373 | |||
374 | def getchanges(self, rev): |
|
374 | def getchanges(self, rev): | |
375 | if self._changescache and self._changescache[0] == rev: |
|
375 | if self._changescache and self._changescache[0] == rev: | |
376 | return self._changescache[1] |
|
376 | return self._changescache[1] | |
377 | self._changescache = None |
|
377 | self._changescache = None | |
378 | (paths, parents) = self.paths[rev] |
|
378 | (paths, parents) = self.paths[rev] | |
379 | if parents: |
|
379 | if parents: | |
380 | files, self.removed, copies = self.expandpaths(rev, paths, parents) |
|
380 | files, self.removed, copies = self.expandpaths(rev, paths, parents) | |
381 | else: |
|
381 | else: | |
382 | # Perform a full checkout on roots |
|
382 | # Perform a full checkout on roots | |
383 | uuid, module, revnum = self.revsplit(rev) |
|
383 | uuid, module, revnum = self.revsplit(rev) | |
384 | entries = svn.client.ls(self.baseurl + urllib.quote(module), |
|
384 | entries = svn.client.ls(self.baseurl + urllib.quote(module), | |
385 | optrev(revnum), True, self.ctx) |
|
385 | optrev(revnum), True, self.ctx) | |
386 | files = [n for n, e in entries.iteritems() |
|
386 | files = [n for n, e in entries.iteritems() | |
387 | if e.kind == svn.core.svn_node_file] |
|
387 | if e.kind == svn.core.svn_node_file] | |
388 | copies = {} |
|
388 | copies = {} | |
389 | self.removed = set() |
|
389 | self.removed = set() | |
390 |
|
390 | |||
391 | files.sort() |
|
391 | files.sort() | |
392 | files = zip(files, [rev] * len(files)) |
|
392 | files = zip(files, [rev] * len(files)) | |
393 |
|
393 | |||
394 | # caller caches the result, so free it here to release memory |
|
394 | # caller caches the result, so free it here to release memory | |
395 | del self.paths[rev] |
|
395 | del self.paths[rev] | |
396 | return (files, copies) |
|
396 | return (files, copies) | |
397 |
|
397 | |||
398 | def getchangedfiles(self, rev, i): |
|
398 | def getchangedfiles(self, rev, i): | |
399 | changes = self.getchanges(rev) |
|
399 | changes = self.getchanges(rev) | |
400 | self._changescache = (rev, changes) |
|
400 | self._changescache = (rev, changes) | |
401 | return [f[0] for f in changes[0]] |
|
401 | return [f[0] for f in changes[0]] | |
402 |
|
402 | |||
403 | def getcommit(self, rev): |
|
403 | def getcommit(self, rev): | |
404 | if rev not in self.commits: |
|
404 | if rev not in self.commits: | |
405 | uuid, module, revnum = self.revsplit(rev) |
|
405 | uuid, module, revnum = self.revsplit(rev) | |
406 | self.module = module |
|
406 | self.module = module | |
407 | self.reparent(module) |
|
407 | self.reparent(module) | |
408 | # We assume that: |
|
408 | # We assume that: | |
409 | # - requests for revisions after "stop" come from the |
|
409 | # - requests for revisions after "stop" come from the | |
410 | # revision graph backward traversal. Cache all of them |
|
410 | # revision graph backward traversal. Cache all of them | |
411 | # down to stop, they will be used eventually. |
|
411 | # down to stop, they will be used eventually. | |
412 | # - requests for revisions before "stop" come to get |
|
412 | # - requests for revisions before "stop" come to get | |
413 | # isolated branches parents. Just fetch what is needed. |
|
413 | # isolated branches parents. Just fetch what is needed. | |
414 | stop = self.lastrevs.get(module, 0) |
|
414 | stop = self.lastrevs.get(module, 0) | |
415 | if revnum < stop: |
|
415 | if revnum < stop: | |
416 | stop = revnum + 1 |
|
416 | stop = revnum + 1 | |
417 | self._fetch_revisions(revnum, stop) |
|
417 | self._fetch_revisions(revnum, stop) | |
418 | commit = self.commits[rev] |
|
418 | commit = self.commits[rev] | |
419 | # caller caches the result, so free it here to release memory |
|
419 | # caller caches the result, so free it here to release memory | |
420 | del self.commits[rev] |
|
420 | del self.commits[rev] | |
421 | return commit |
|
421 | return commit | |
422 |
|
422 | |||
423 | def gettags(self): |
|
423 | def gettags(self): | |
424 | tags = {} |
|
424 | tags = {} | |
425 | if self.tags is None: |
|
425 | if self.tags is None: | |
426 | return tags |
|
426 | return tags | |
427 |
|
427 | |||
428 | # svn tags are just a convention, project branches left in a |
|
428 | # svn tags are just a convention, project branches left in a | |
429 | # 'tags' directory. There is no other relationship than |
|
429 | # 'tags' directory. There is no other relationship than | |
430 | # ancestry, which is expensive to discover and makes them hard |
|
430 | # ancestry, which is expensive to discover and makes them hard | |
431 | # to update incrementally. Worse, past revisions may be |
|
431 | # to update incrementally. Worse, past revisions may be | |
432 | # referenced by tags far away in the future, requiring a deep |
|
432 | # referenced by tags far away in the future, requiring a deep | |
433 | # history traversal on every calculation. Current code |
|
433 | # history traversal on every calculation. Current code | |
434 | # performs a single backward traversal, tracking moves within |
|
434 | # performs a single backward traversal, tracking moves within | |
435 | # the tags directory (tag renaming) and recording a new tag |
|
435 | # the tags directory (tag renaming) and recording a new tag | |
436 | # everytime a project is copied from outside the tags |
|
436 | # everytime a project is copied from outside the tags | |
437 | # directory. It also lists deleted tags, this behaviour may |
|
437 | # directory. It also lists deleted tags, this behaviour may | |
438 | # change in the future. |
|
438 | # change in the future. | |
439 | pendings = [] |
|
439 | pendings = [] | |
440 | tagspath = self.tags |
|
440 | tagspath = self.tags | |
441 | start = svn.ra.get_latest_revnum(self.ra) |
|
441 | start = svn.ra.get_latest_revnum(self.ra) | |
442 | stream = self._getlog([self.tags], start, self.startrev) |
|
442 | stream = self._getlog([self.tags], start, self.startrev) | |
443 | try: |
|
443 | try: | |
444 | for entry in stream: |
|
444 | for entry in stream: | |
445 | origpaths, revnum, author, date, message = entry |
|
445 | origpaths, revnum, author, date, message = entry | |
446 | copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e |
|
446 | copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e | |
447 | in origpaths.iteritems() if e.copyfrom_path] |
|
447 | in origpaths.iteritems() if e.copyfrom_path] | |
448 | # Apply moves/copies from more specific to general |
|
448 | # Apply moves/copies from more specific to general | |
449 | copies.sort(reverse=True) |
|
449 | copies.sort(reverse=True) | |
450 |
|
450 | |||
451 | srctagspath = tagspath |
|
451 | srctagspath = tagspath | |
452 | if copies and copies[-1][2] == tagspath: |
|
452 | if copies and copies[-1][2] == tagspath: | |
453 | # Track tags directory moves |
|
453 | # Track tags directory moves | |
454 | srctagspath = copies.pop()[0] |
|
454 | srctagspath = copies.pop()[0] | |
455 |
|
455 | |||
456 | for source, sourcerev, dest in copies: |
|
456 | for source, sourcerev, dest in copies: | |
457 | if not dest.startswith(tagspath + '/'): |
|
457 | if not dest.startswith(tagspath + '/'): | |
458 | continue |
|
458 | continue | |
459 | for tag in pendings: |
|
459 | for tag in pendings: | |
460 | if tag[0].startswith(dest): |
|
460 | if tag[0].startswith(dest): | |
461 | tagpath = source + tag[0][len(dest):] |
|
461 | tagpath = source + tag[0][len(dest):] | |
462 | tag[:2] = [tagpath, sourcerev] |
|
462 | tag[:2] = [tagpath, sourcerev] | |
463 | break |
|
463 | break | |
464 | else: |
|
464 | else: | |
465 | pendings.append([source, sourcerev, dest]) |
|
465 | pendings.append([source, sourcerev, dest]) | |
466 |
|
466 | |||
467 | # Filter out tags with children coming from different |
|
467 | # Filter out tags with children coming from different | |
468 | # parts of the repository like: |
|
468 | # parts of the repository like: | |
469 | # /tags/tag.1 (from /trunk:10) |
|
469 | # /tags/tag.1 (from /trunk:10) | |
470 | # /tags/tag.1/foo (from /branches/foo:12) |
|
470 | # /tags/tag.1/foo (from /branches/foo:12) | |
471 | # Here/tags/tag.1 discarded as well as its children. |
|
471 | # Here/tags/tag.1 discarded as well as its children. | |
472 | # It happens with tools like cvs2svn. Such tags cannot |
|
472 | # It happens with tools like cvs2svn. Such tags cannot | |
473 | # be represented in mercurial. |
|
473 | # be represented in mercurial. | |
474 | addeds = dict((p, e.copyfrom_path) for p, e |
|
474 | addeds = dict((p, e.copyfrom_path) for p, e | |
475 | in origpaths.iteritems() |
|
475 | in origpaths.iteritems() | |
476 | if e.action == 'A' and e.copyfrom_path) |
|
476 | if e.action == 'A' and e.copyfrom_path) | |
477 | badroots = set() |
|
477 | badroots = set() | |
478 | for destroot in addeds: |
|
478 | for destroot in addeds: | |
479 | for source, sourcerev, dest in pendings: |
|
479 | for source, sourcerev, dest in pendings: | |
480 | if (not dest.startswith(destroot + '/') |
|
480 | if (not dest.startswith(destroot + '/') | |
481 | or source.startswith(addeds[destroot] + '/')): |
|
481 | or source.startswith(addeds[destroot] + '/')): | |
482 | continue |
|
482 | continue | |
483 | badroots.add(destroot) |
|
483 | badroots.add(destroot) | |
484 | break |
|
484 | break | |
485 |
|
485 | |||
486 | for badroot in badroots: |
|
486 | for badroot in badroots: | |
487 | pendings = [p for p in pendings if p[2] != badroot |
|
487 | pendings = [p for p in pendings if p[2] != badroot | |
488 | and not p[2].startswith(badroot + '/')] |
|
488 | and not p[2].startswith(badroot + '/')] | |
489 |
|
489 | |||
490 | # Tell tag renamings from tag creations |
|
490 | # Tell tag renamings from tag creations | |
491 | remainings = [] |
|
491 | remainings = [] | |
492 | for source, sourcerev, dest in pendings: |
|
492 | for source, sourcerev, dest in pendings: | |
493 | tagname = dest.split('/')[-1] |
|
493 | tagname = dest.split('/')[-1] | |
494 | if source.startswith(srctagspath): |
|
494 | if source.startswith(srctagspath): | |
495 | remainings.append([source, sourcerev, tagname]) |
|
495 | remainings.append([source, sourcerev, tagname]) | |
496 | continue |
|
496 | continue | |
497 | if tagname in tags: |
|
497 | if tagname in tags: | |
498 | # Keep the latest tag value |
|
498 | # Keep the latest tag value | |
499 | continue |
|
499 | continue | |
500 | # From revision may be fake, get one with changes |
|
500 | # From revision may be fake, get one with changes | |
501 | try: |
|
501 | try: | |
502 | tagid = self.latest(source, sourcerev) |
|
502 | tagid = self.latest(source, sourcerev) | |
503 | if tagid and tagname not in tags: |
|
503 | if tagid and tagname not in tags: | |
504 | tags[tagname] = tagid |
|
504 | tags[tagname] = tagid | |
505 | except SvnPathNotFound: |
|
505 | except SvnPathNotFound: | |
506 | # It happens when we are following directories |
|
506 | # It happens when we are following directories | |
507 | # we assumed were copied with their parents |
|
507 | # we assumed were copied with their parents | |
508 | # but were really created in the tag |
|
508 | # but were really created in the tag | |
509 | # directory. |
|
509 | # directory. | |
510 | pass |
|
510 | pass | |
511 | pendings = remainings |
|
511 | pendings = remainings | |
512 | tagspath = srctagspath |
|
512 | tagspath = srctagspath | |
513 | finally: |
|
513 | finally: | |
514 | stream.close() |
|
514 | stream.close() | |
515 | return tags |
|
515 | return tags | |
516 |
|
516 | |||
517 | def converted(self, rev, destrev): |
|
517 | def converted(self, rev, destrev): | |
518 | if not self.wc: |
|
518 | if not self.wc: | |
519 | return |
|
519 | return | |
520 | if self.convertfp is None: |
|
520 | if self.convertfp is None: | |
521 | self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'), |
|
521 | self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'), | |
522 | 'a') |
|
522 | 'a') | |
523 | self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev))) |
|
523 | self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev))) | |
524 | self.convertfp.flush() |
|
524 | self.convertfp.flush() | |
525 |
|
525 | |||
526 | def revid(self, revnum, module=None): |
|
526 | def revid(self, revnum, module=None): | |
527 | return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum) |
|
527 | return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum) | |
528 |
|
528 | |||
529 | def revnum(self, rev): |
|
529 | def revnum(self, rev): | |
530 | return int(rev.split('@')[-1]) |
|
530 | return int(rev.split('@')[-1]) | |
531 |
|
531 | |||
532 | def revsplit(self, rev): |
|
532 | def revsplit(self, rev): | |
533 | url, revnum = rev.rsplit('@', 1) |
|
533 | url, revnum = rev.rsplit('@', 1) | |
534 | revnum = int(revnum) |
|
534 | revnum = int(revnum) | |
535 | parts = url.split('/', 1) |
|
535 | parts = url.split('/', 1) | |
536 | uuid = parts.pop(0)[4:] |
|
536 | uuid = parts.pop(0)[4:] | |
537 | mod = '' |
|
537 | mod = '' | |
538 | if parts: |
|
538 | if parts: | |
539 | mod = '/' + parts[0] |
|
539 | mod = '/' + parts[0] | |
540 | return uuid, mod, revnum |
|
540 | return uuid, mod, revnum | |
541 |
|
541 | |||
542 | def latest(self, path, stop=0): |
|
542 | def latest(self, path, stop=0): | |
543 | """Find the latest revid affecting path, up to stop. It may return |
|
543 | """Find the latest revid affecting path, up to stop. It may return | |
544 | a revision in a different module, since a branch may be moved without |
|
544 | a revision in a different module, since a branch may be moved without | |
545 | a change being reported. Return None if computed module does not |
|
545 | a change being reported. Return None if computed module does not | |
546 | belong to rootmodule subtree. |
|
546 | belong to rootmodule subtree. | |
547 | """ |
|
547 | """ | |
548 | if not path.startswith(self.rootmodule): |
|
548 | if not path.startswith(self.rootmodule): | |
549 | # Requests on foreign branches may be forbidden at server level |
|
549 | # Requests on foreign branches may be forbidden at server level | |
550 | self.ui.debug('ignoring foreign branch %r\n' % path) |
|
550 | self.ui.debug('ignoring foreign branch %r\n' % path) | |
551 | return None |
|
551 | return None | |
552 |
|
552 | |||
553 | if not stop: |
|
553 | if not stop: | |
554 | stop = svn.ra.get_latest_revnum(self.ra) |
|
554 | stop = svn.ra.get_latest_revnum(self.ra) | |
555 | try: |
|
555 | try: | |
556 | prevmodule = self.reparent('') |
|
556 | prevmodule = self.reparent('') | |
557 | dirent = svn.ra.stat(self.ra, path.strip('/'), stop) |
|
557 | dirent = svn.ra.stat(self.ra, path.strip('/'), stop) | |
558 | self.reparent(prevmodule) |
|
558 | self.reparent(prevmodule) | |
559 | except SubversionException: |
|
559 | except SubversionException: | |
560 | dirent = None |
|
560 | dirent = None | |
561 | if not dirent: |
|
561 | if not dirent: | |
562 | raise SvnPathNotFound(_('%s not found up to revision %d') |
|
562 | raise SvnPathNotFound(_('%s not found up to revision %d') | |
563 | % (path, stop)) |
|
563 | % (path, stop)) | |
564 |
|
564 | |||
565 | # stat() gives us the previous revision on this line of |
|
565 | # stat() gives us the previous revision on this line of | |
566 | # development, but it might be in *another module*. Fetch the |
|
566 | # development, but it might be in *another module*. Fetch the | |
567 | # log and detect renames down to the latest revision. |
|
567 | # log and detect renames down to the latest revision. | |
568 | stream = self._getlog([path], stop, dirent.created_rev) |
|
568 | stream = self._getlog([path], stop, dirent.created_rev) | |
569 | try: |
|
569 | try: | |
570 | for entry in stream: |
|
570 | for entry in stream: | |
571 | paths, revnum, author, date, message = entry |
|
571 | paths, revnum, author, date, message = entry | |
572 | if revnum <= dirent.created_rev: |
|
572 | if revnum <= dirent.created_rev: | |
573 | break |
|
573 | break | |
574 |
|
574 | |||
575 | for p in paths: |
|
575 | for p in paths: | |
576 | if not path.startswith(p) or not paths[p].copyfrom_path: |
|
576 | if not path.startswith(p) or not paths[p].copyfrom_path: | |
577 | continue |
|
577 | continue | |
578 | newpath = paths[p].copyfrom_path + path[len(p):] |
|
578 | newpath = paths[p].copyfrom_path + path[len(p):] | |
579 | self.ui.debug("branch renamed from %s to %s at %d\n" % |
|
579 | self.ui.debug("branch renamed from %s to %s at %d\n" % | |
580 | (path, newpath, revnum)) |
|
580 | (path, newpath, revnum)) | |
581 | path = newpath |
|
581 | path = newpath | |
582 | break |
|
582 | break | |
583 | finally: |
|
583 | finally: | |
584 | stream.close() |
|
584 | stream.close() | |
585 |
|
585 | |||
586 | if not path.startswith(self.rootmodule): |
|
586 | if not path.startswith(self.rootmodule): | |
587 | self.ui.debug('ignoring foreign branch %r\n' % path) |
|
587 | self.ui.debug('ignoring foreign branch %r\n' % path) | |
588 | return None |
|
588 | return None | |
589 | return self.revid(dirent.created_rev, path) |
|
589 | return self.revid(dirent.created_rev, path) | |
590 |
|
590 | |||
591 | def reparent(self, module): |
|
591 | def reparent(self, module): | |
592 | """Reparent the svn transport and return the previous parent.""" |
|
592 | """Reparent the svn transport and return the previous parent.""" | |
593 | if self.prevmodule == module: |
|
593 | if self.prevmodule == module: | |
594 | return module |
|
594 | return module | |
595 | svnurl = self.baseurl + urllib.quote(module) |
|
595 | svnurl = self.baseurl + urllib.quote(module) | |
596 | prevmodule = self.prevmodule |
|
596 | prevmodule = self.prevmodule | |
597 | if prevmodule is None: |
|
597 | if prevmodule is None: | |
598 | prevmodule = '' |
|
598 | prevmodule = '' | |
599 | self.ui.debug("reparent to %s\n" % svnurl) |
|
599 | self.ui.debug("reparent to %s\n" % svnurl) | |
600 | svn.ra.reparent(self.ra, svnurl) |
|
600 | svn.ra.reparent(self.ra, svnurl) | |
601 | self.prevmodule = module |
|
601 | self.prevmodule = module | |
602 | return prevmodule |
|
602 | return prevmodule | |
603 |
|
603 | |||
604 | def expandpaths(self, rev, paths, parents): |
|
604 | def expandpaths(self, rev, paths, parents): | |
605 | changed, removed = set(), set() |
|
605 | changed, removed = set(), set() | |
606 | copies = {} |
|
606 | copies = {} | |
607 |
|
607 | |||
608 | new_module, revnum = self.revsplit(rev)[1:] |
|
608 | new_module, revnum = self.revsplit(rev)[1:] | |
609 | if new_module != self.module: |
|
609 | if new_module != self.module: | |
610 | self.module = new_module |
|
610 | self.module = new_module | |
611 | self.reparent(self.module) |
|
611 | self.reparent(self.module) | |
612 |
|
612 | |||
613 | for i, (path, ent) in enumerate(paths): |
|
613 | for i, (path, ent) in enumerate(paths): | |
614 | self.ui.progress(_('scanning paths'), i, item=path, |
|
614 | self.ui.progress(_('scanning paths'), i, item=path, | |
615 | total=len(paths)) |
|
615 | total=len(paths)) | |
616 | entrypath = self.getrelpath(path) |
|
616 | entrypath = self.getrelpath(path) | |
617 |
|
617 | |||
618 | kind = self._checkpath(entrypath, revnum) |
|
618 | kind = self._checkpath(entrypath, revnum) | |
619 | if kind == svn.core.svn_node_file: |
|
619 | if kind == svn.core.svn_node_file: | |
620 | changed.add(self.recode(entrypath)) |
|
620 | changed.add(self.recode(entrypath)) | |
621 | if not ent.copyfrom_path or not parents: |
|
621 | if not ent.copyfrom_path or not parents: | |
622 | continue |
|
622 | continue | |
623 | # Copy sources not in parent revisions cannot be |
|
623 | # Copy sources not in parent revisions cannot be | |
624 | # represented, ignore their origin for now |
|
624 | # represented, ignore their origin for now | |
625 | pmodule, prevnum = self.revsplit(parents[0])[1:] |
|
625 | pmodule, prevnum = self.revsplit(parents[0])[1:] | |
626 | if ent.copyfrom_rev < prevnum: |
|
626 | if ent.copyfrom_rev < prevnum: | |
627 | continue |
|
627 | continue | |
628 | copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule) |
|
628 | copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule) | |
629 | if not copyfrom_path: |
|
629 | if not copyfrom_path: | |
630 | continue |
|
630 | continue | |
631 | self.ui.debug("copied to %s from %s@%s\n" % |
|
631 | self.ui.debug("copied to %s from %s@%s\n" % | |
632 | (entrypath, copyfrom_path, ent.copyfrom_rev)) |
|
632 | (entrypath, copyfrom_path, ent.copyfrom_rev)) | |
633 | copies[self.recode(entrypath)] = self.recode(copyfrom_path) |
|
633 | copies[self.recode(entrypath)] = self.recode(copyfrom_path) | |
634 | elif kind == 0: # gone, but had better be a deleted *file* |
|
634 | elif kind == 0: # gone, but had better be a deleted *file* | |
635 | self.ui.debug("gone from %s\n" % ent.copyfrom_rev) |
|
635 | self.ui.debug("gone from %s\n" % ent.copyfrom_rev) | |
636 | pmodule, prevnum = self.revsplit(parents[0])[1:] |
|
636 | pmodule, prevnum = self.revsplit(parents[0])[1:] | |
637 | parentpath = pmodule + "/" + entrypath |
|
637 | parentpath = pmodule + "/" + entrypath | |
638 | fromkind = self._checkpath(entrypath, prevnum, pmodule) |
|
638 | fromkind = self._checkpath(entrypath, prevnum, pmodule) | |
639 |
|
639 | |||
640 | if fromkind == svn.core.svn_node_file: |
|
640 | if fromkind == svn.core.svn_node_file: | |
641 | removed.add(self.recode(entrypath)) |
|
641 | removed.add(self.recode(entrypath)) | |
642 | elif fromkind == svn.core.svn_node_dir: |
|
642 | elif fromkind == svn.core.svn_node_dir: | |
643 | oroot = parentpath.strip('/') |
|
643 | oroot = parentpath.strip('/') | |
644 | nroot = path.strip('/') |
|
644 | nroot = path.strip('/') | |
645 | children = self._iterfiles(oroot, prevnum) |
|
645 | children = self._iterfiles(oroot, prevnum) | |
646 | for childpath in children: |
|
646 | for childpath in children: | |
647 | childpath = childpath.replace(oroot, nroot) |
|
647 | childpath = childpath.replace(oroot, nroot) | |
648 | childpath = self.getrelpath("/" + childpath, pmodule) |
|
648 | childpath = self.getrelpath("/" + childpath, pmodule) | |
649 | if childpath: |
|
649 | if childpath: | |
650 | removed.add(self.recode(childpath)) |
|
650 | removed.add(self.recode(childpath)) | |
651 | else: |
|
651 | else: | |
652 | self.ui.debug('unknown path in revision %d: %s\n' % \ |
|
652 | self.ui.debug('unknown path in revision %d: %s\n' % \ | |
653 | (revnum, path)) |
|
653 | (revnum, path)) | |
654 |
elif kind == svn.core.svn_node_dir: |
|
654 | elif kind == svn.core.svn_node_dir: | |
655 | if ent.action == 'M': |
|
655 | if ent.action == 'M': | |
656 | # If the directory just had a prop change, |
|
656 | # If the directory just had a prop change, | |
657 | # then we shouldn't need to look for its children. |
|
657 | # then we shouldn't need to look for its children. | |
658 | continue |
|
658 | continue | |
659 | elif ent.action == 'R' and parents: |
|
659 | elif ent.action == 'R' and parents: | |
660 | # If a directory is replacing a file, mark the previous |
|
660 | # If a directory is replacing a file, mark the previous | |
661 | # file as deleted |
|
661 | # file as deleted | |
662 | pmodule, prevnum = self.revsplit(parents[0])[1:] |
|
662 | pmodule, prevnum = self.revsplit(parents[0])[1:] | |
663 | pkind = self._checkpath(entrypath, prevnum, pmodule) |
|
663 | pkind = self._checkpath(entrypath, prevnum, pmodule) | |
664 | if pkind == svn.core.svn_node_file: |
|
664 | if pkind == svn.core.svn_node_file: | |
665 | removed.add(self.recode(entrypath)) |
|
665 | removed.add(self.recode(entrypath)) | |
666 |
|
666 | |||
667 | for childpath in self._iterfiles(path, revnum): |
|
667 | for childpath in self._iterfiles(path, revnum): | |
668 | childpath = self.getrelpath("/" + childpath) |
|
668 | childpath = self.getrelpath("/" + childpath) | |
669 | if childpath: |
|
669 | if childpath: | |
670 | changed.add(self.recode(childpath)) |
|
670 | changed.add(self.recode(childpath)) | |
671 |
|
671 | |||
672 | # Handle directory copies |
|
672 | # Handle directory copies | |
673 | if not ent.copyfrom_path or not parents: |
|
673 | if not ent.copyfrom_path or not parents: | |
674 | continue |
|
674 | continue | |
675 | # Copy sources not in parent revisions cannot be |
|
675 | # Copy sources not in parent revisions cannot be | |
676 | # represented, ignore their origin for now |
|
676 | # represented, ignore their origin for now | |
677 | pmodule, prevnum = self.revsplit(parents[0])[1:] |
|
677 | pmodule, prevnum = self.revsplit(parents[0])[1:] | |
678 | if ent.copyfrom_rev < prevnum: |
|
678 | if ent.copyfrom_rev < prevnum: | |
679 | continue |
|
679 | continue | |
680 | copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule) |
|
680 | copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule) | |
681 | if not copyfrompath: |
|
681 | if not copyfrompath: | |
682 | continue |
|
682 | continue | |
683 | self.ui.debug("mark %s came from %s:%d\n" |
|
683 | self.ui.debug("mark %s came from %s:%d\n" | |
684 | % (path, copyfrompath, ent.copyfrom_rev)) |
|
684 | % (path, copyfrompath, ent.copyfrom_rev)) | |
685 | children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev) |
|
685 | children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev) | |
686 | for childpath in children: |
|
686 | for childpath in children: | |
687 | childpath = self.getrelpath("/" + childpath, pmodule) |
|
687 | childpath = self.getrelpath("/" + childpath, pmodule) | |
688 | if not childpath: |
|
688 | if not childpath: | |
689 | continue |
|
689 | continue | |
690 | copytopath = path + childpath[len(copyfrompath):] |
|
690 | copytopath = path + childpath[len(copyfrompath):] | |
691 | copytopath = self.getrelpath(copytopath) |
|
691 | copytopath = self.getrelpath(copytopath) | |
692 | copies[self.recode(copytopath)] = self.recode(childpath) |
|
692 | copies[self.recode(copytopath)] = self.recode(childpath) | |
693 |
|
693 | |||
694 | self.ui.progress(_('scanning paths'), None) |
|
694 | self.ui.progress(_('scanning paths'), None) | |
695 | changed.update(removed) |
|
695 | changed.update(removed) | |
696 | return (list(changed), removed, copies) |
|
696 | return (list(changed), removed, copies) | |
697 |
|
697 | |||
698 | def _fetch_revisions(self, from_revnum, to_revnum): |
|
698 | def _fetch_revisions(self, from_revnum, to_revnum): | |
699 | if from_revnum < to_revnum: |
|
699 | if from_revnum < to_revnum: | |
700 | from_revnum, to_revnum = to_revnum, from_revnum |
|
700 | from_revnum, to_revnum = to_revnum, from_revnum | |
701 |
|
701 | |||
702 | self.child_cset = None |
|
702 | self.child_cset = None | |
703 |
|
703 | |||
704 | def parselogentry(orig_paths, revnum, author, date, message): |
|
704 | def parselogentry(orig_paths, revnum, author, date, message): | |
705 | """Return the parsed commit object or None, and True if |
|
705 | """Return the parsed commit object or None, and True if | |
706 | the revision is a branch root. |
|
706 | the revision is a branch root. | |
707 | """ |
|
707 | """ | |
708 | self.ui.debug("parsing revision %d (%d changes)\n" % |
|
708 | self.ui.debug("parsing revision %d (%d changes)\n" % | |
709 | (revnum, len(orig_paths))) |
|
709 | (revnum, len(orig_paths))) | |
710 |
|
710 | |||
711 | branched = False |
|
711 | branched = False | |
712 | rev = self.revid(revnum) |
|
712 | rev = self.revid(revnum) | |
713 | # branch log might return entries for a parent we already have |
|
713 | # branch log might return entries for a parent we already have | |
714 |
|
714 | |||
715 | if rev in self.commits or revnum < to_revnum: |
|
715 | if rev in self.commits or revnum < to_revnum: | |
716 | return None, branched |
|
716 | return None, branched | |
717 |
|
717 | |||
718 | parents = [] |
|
718 | parents = [] | |
719 | # check whether this revision is the start of a branch or part |
|
719 | # check whether this revision is the start of a branch or part | |
720 | # of a branch renaming |
|
720 | # of a branch renaming | |
721 | orig_paths = sorted(orig_paths.iteritems()) |
|
721 | orig_paths = sorted(orig_paths.iteritems()) | |
722 | root_paths = [(p, e) for p, e in orig_paths |
|
722 | root_paths = [(p, e) for p, e in orig_paths | |
723 | if self.module.startswith(p)] |
|
723 | if self.module.startswith(p)] | |
724 | if root_paths: |
|
724 | if root_paths: | |
725 | path, ent = root_paths[-1] |
|
725 | path, ent = root_paths[-1] | |
726 | if ent.copyfrom_path: |
|
726 | if ent.copyfrom_path: | |
727 | branched = True |
|
727 | branched = True | |
728 | newpath = ent.copyfrom_path + self.module[len(path):] |
|
728 | newpath = ent.copyfrom_path + self.module[len(path):] | |
729 | # ent.copyfrom_rev may not be the actual last revision |
|
729 | # ent.copyfrom_rev may not be the actual last revision | |
730 | previd = self.latest(newpath, ent.copyfrom_rev) |
|
730 | previd = self.latest(newpath, ent.copyfrom_rev) | |
731 | if previd is not None: |
|
731 | if previd is not None: | |
732 | prevmodule, prevnum = self.revsplit(previd)[1:] |
|
732 | prevmodule, prevnum = self.revsplit(previd)[1:] | |
733 | if prevnum >= self.startrev: |
|
733 | if prevnum >= self.startrev: | |
734 | parents = [previd] |
|
734 | parents = [previd] | |
735 | self.ui.note( |
|
735 | self.ui.note( | |
736 | _('found parent of branch %s at %d: %s\n') % |
|
736 | _('found parent of branch %s at %d: %s\n') % | |
737 | (self.module, prevnum, prevmodule)) |
|
737 | (self.module, prevnum, prevmodule)) | |
738 | else: |
|
738 | else: | |
739 | self.ui.debug("no copyfrom path, don't know what to do.\n") |
|
739 | self.ui.debug("no copyfrom path, don't know what to do.\n") | |
740 |
|
740 | |||
741 | paths = [] |
|
741 | paths = [] | |
742 | # filter out unrelated paths |
|
742 | # filter out unrelated paths | |
743 | for path, ent in orig_paths: |
|
743 | for path, ent in orig_paths: | |
744 | if self.getrelpath(path) is None: |
|
744 | if self.getrelpath(path) is None: | |
745 | continue |
|
745 | continue | |
746 | paths.append((path, ent)) |
|
746 | paths.append((path, ent)) | |
747 |
|
747 | |||
748 | # Example SVN datetime. Includes microseconds. |
|
748 | # Example SVN datetime. Includes microseconds. | |
749 | # ISO-8601 conformant |
|
749 | # ISO-8601 conformant | |
750 | # '2007-01-04T17:35:00.902377Z' |
|
750 | # '2007-01-04T17:35:00.902377Z' | |
751 | date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"]) |
|
751 | date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"]) | |
752 |
|
752 | |||
753 | log = message and self.recode(message) or '' |
|
753 | log = message and self.recode(message) or '' | |
754 | author = author and self.recode(author) or '' |
|
754 | author = author and self.recode(author) or '' | |
755 | try: |
|
755 | try: | |
756 | branch = self.module.split("/")[-1] |
|
756 | branch = self.module.split("/")[-1] | |
757 | if branch == 'trunk': |
|
757 | if branch == 'trunk': | |
758 | branch = '' |
|
758 | branch = '' | |
759 | except IndexError: |
|
759 | except IndexError: | |
760 | branch = None |
|
760 | branch = None | |
761 |
|
761 | |||
762 | cset = commit(author=author, |
|
762 | cset = commit(author=author, | |
763 | date=util.datestr(date), |
|
763 | date=util.datestr(date), | |
764 | desc=log, |
|
764 | desc=log, | |
765 | parents=parents, |
|
765 | parents=parents, | |
766 | branch=branch, |
|
766 | branch=branch, | |
767 | rev=rev) |
|
767 | rev=rev) | |
768 |
|
768 | |||
769 | self.commits[rev] = cset |
|
769 | self.commits[rev] = cset | |
770 | # The parents list is *shared* among self.paths and the |
|
770 | # The parents list is *shared* among self.paths and the | |
771 | # commit object. Both will be updated below. |
|
771 | # commit object. Both will be updated below. | |
772 | self.paths[rev] = (paths, cset.parents) |
|
772 | self.paths[rev] = (paths, cset.parents) | |
773 | if self.child_cset and not self.child_cset.parents: |
|
773 | if self.child_cset and not self.child_cset.parents: | |
774 | self.child_cset.parents[:] = [rev] |
|
774 | self.child_cset.parents[:] = [rev] | |
775 | self.child_cset = cset |
|
775 | self.child_cset = cset | |
776 | return cset, branched |
|
776 | return cset, branched | |
777 |
|
777 | |||
778 | self.ui.note(_('fetching revision log for "%s" from %d to %d\n') % |
|
778 | self.ui.note(_('fetching revision log for "%s" from %d to %d\n') % | |
779 | (self.module, from_revnum, to_revnum)) |
|
779 | (self.module, from_revnum, to_revnum)) | |
780 |
|
780 | |||
781 | try: |
|
781 | try: | |
782 | firstcset = None |
|
782 | firstcset = None | |
783 | lastonbranch = False |
|
783 | lastonbranch = False | |
784 | stream = self._getlog([self.module], from_revnum, to_revnum) |
|
784 | stream = self._getlog([self.module], from_revnum, to_revnum) | |
785 | try: |
|
785 | try: | |
786 | for entry in stream: |
|
786 | for entry in stream: | |
787 | paths, revnum, author, date, message = entry |
|
787 | paths, revnum, author, date, message = entry | |
788 | if revnum < self.startrev: |
|
788 | if revnum < self.startrev: | |
789 | lastonbranch = True |
|
789 | lastonbranch = True | |
790 | break |
|
790 | break | |
791 | if not paths: |
|
791 | if not paths: | |
792 | self.ui.debug('revision %d has no entries\n' % revnum) |
|
792 | self.ui.debug('revision %d has no entries\n' % revnum) | |
793 | # If we ever leave the loop on an empty |
|
793 | # If we ever leave the loop on an empty | |
794 | # revision, do not try to get a parent branch |
|
794 | # revision, do not try to get a parent branch | |
795 | lastonbranch = lastonbranch or revnum == 0 |
|
795 | lastonbranch = lastonbranch or revnum == 0 | |
796 | continue |
|
796 | continue | |
797 | cset, lastonbranch = parselogentry(paths, revnum, author, |
|
797 | cset, lastonbranch = parselogentry(paths, revnum, author, | |
798 | date, message) |
|
798 | date, message) | |
799 | if cset: |
|
799 | if cset: | |
800 | firstcset = cset |
|
800 | firstcset = cset | |
801 | if lastonbranch: |
|
801 | if lastonbranch: | |
802 | break |
|
802 | break | |
803 | finally: |
|
803 | finally: | |
804 | stream.close() |
|
804 | stream.close() | |
805 |
|
805 | |||
806 | if not lastonbranch and firstcset and not firstcset.parents: |
|
806 | if not lastonbranch and firstcset and not firstcset.parents: | |
807 | # The first revision of the sequence (the last fetched one) |
|
807 | # The first revision of the sequence (the last fetched one) | |
808 | # has invalid parents if not a branch root. Find the parent |
|
808 | # has invalid parents if not a branch root. Find the parent | |
809 | # revision now, if any. |
|
809 | # revision now, if any. | |
810 | try: |
|
810 | try: | |
811 | firstrevnum = self.revnum(firstcset.rev) |
|
811 | firstrevnum = self.revnum(firstcset.rev) | |
812 | if firstrevnum > 1: |
|
812 | if firstrevnum > 1: | |
813 | latest = self.latest(self.module, firstrevnum - 1) |
|
813 | latest = self.latest(self.module, firstrevnum - 1) | |
814 | if latest: |
|
814 | if latest: | |
815 | firstcset.parents.append(latest) |
|
815 | firstcset.parents.append(latest) | |
816 | except SvnPathNotFound: |
|
816 | except SvnPathNotFound: | |
817 | pass |
|
817 | pass | |
818 | except SubversionException, (inst, num): |
|
818 | except SubversionException, (inst, num): | |
819 | if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION: |
|
819 | if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION: | |
820 | raise util.Abort(_('svn: branch has no revision %s') % to_revnum) |
|
820 | raise util.Abort(_('svn: branch has no revision %s') % to_revnum) | |
821 | raise |
|
821 | raise | |
822 |
|
822 | |||
823 | def getfile(self, file, rev): |
|
823 | def getfile(self, file, rev): | |
824 | # TODO: ra.get_file transmits the whole file instead of diffs. |
|
824 | # TODO: ra.get_file transmits the whole file instead of diffs. | |
825 | if file in self.removed: |
|
825 | if file in self.removed: | |
826 |
raise IOError() |
|
826 | raise IOError() | |
827 | mode = '' |
|
827 | mode = '' | |
828 | try: |
|
828 | try: | |
829 | new_module, revnum = self.revsplit(rev)[1:] |
|
829 | new_module, revnum = self.revsplit(rev)[1:] | |
830 | if self.module != new_module: |
|
830 | if self.module != new_module: | |
831 | self.module = new_module |
|
831 | self.module = new_module | |
832 | self.reparent(self.module) |
|
832 | self.reparent(self.module) | |
833 | io = StringIO() |
|
833 | io = StringIO() | |
834 | info = svn.ra.get_file(self.ra, file, revnum, io) |
|
834 | info = svn.ra.get_file(self.ra, file, revnum, io) | |
835 | data = io.getvalue() |
|
835 | data = io.getvalue() | |
836 | # ra.get_files() seems to keep a reference on the input buffer |
|
836 | # ra.get_files() seems to keep a reference on the input buffer | |
837 | # preventing collection. Release it explicitely. |
|
837 | # preventing collection. Release it explicitely. | |
838 | io.close() |
|
838 | io.close() | |
839 | if isinstance(info, list): |
|
839 | if isinstance(info, list): | |
840 | info = info[-1] |
|
840 | info = info[-1] | |
841 | mode = ("svn:executable" in info) and 'x' or '' |
|
841 | mode = ("svn:executable" in info) and 'x' or '' | |
842 | mode = ("svn:special" in info) and 'l' or mode |
|
842 | mode = ("svn:special" in info) and 'l' or mode | |
843 | except SubversionException, e: |
|
843 | except SubversionException, e: | |
844 | notfound = (svn.core.SVN_ERR_FS_NOT_FOUND, |
|
844 | notfound = (svn.core.SVN_ERR_FS_NOT_FOUND, | |
845 | svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND) |
|
845 | svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND) | |
846 | if e.apr_err in notfound: # File not found |
|
846 | if e.apr_err in notfound: # File not found | |
847 | raise IOError() |
|
847 | raise IOError() | |
848 | raise |
|
848 | raise | |
849 | if mode == 'l': |
|
849 | if mode == 'l': | |
850 | link_prefix = "link " |
|
850 | link_prefix = "link " | |
851 | if data.startswith(link_prefix): |
|
851 | if data.startswith(link_prefix): | |
852 | data = data[len(link_prefix):] |
|
852 | data = data[len(link_prefix):] | |
853 | return data, mode |
|
853 | return data, mode | |
854 |
|
854 | |||
855 | def _iterfiles(self, path, revnum): |
|
855 | def _iterfiles(self, path, revnum): | |
856 | """Enumerate all files in path at revnum, recursively.""" |
|
856 | """Enumerate all files in path at revnum, recursively.""" | |
857 | path = path.strip('/') |
|
857 | path = path.strip('/') | |
858 | pool = Pool() |
|
858 | pool = Pool() | |
859 | rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/') |
|
859 | rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/') | |
860 | entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool) |
|
860 | entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool) | |
861 | return ((path + '/' + p) for p, e in entries.iteritems() |
|
861 | return ((path + '/' + p) for p, e in entries.iteritems() | |
862 | if e.kind == svn.core.svn_node_file) |
|
862 | if e.kind == svn.core.svn_node_file) | |
863 |
|
863 | |||
864 | def getrelpath(self, path, module=None): |
|
864 | def getrelpath(self, path, module=None): | |
865 | if module is None: |
|
865 | if module is None: | |
866 | module = self.module |
|
866 | module = self.module | |
867 | # Given the repository url of this wc, say |
|
867 | # Given the repository url of this wc, say | |
868 | # "http://server/plone/CMFPlone/branches/Plone-2_0-branch" |
|
868 | # "http://server/plone/CMFPlone/branches/Plone-2_0-branch" | |
869 | # extract the "entry" portion (a relative path) from what |
|
869 | # extract the "entry" portion (a relative path) from what | |
870 | # svn log --xml says, ie |
|
870 | # svn log --xml says, ie | |
871 | # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py" |
|
871 | # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py" | |
872 | # that is to say "tests/PloneTestCase.py" |
|
872 | # that is to say "tests/PloneTestCase.py" | |
873 | if path.startswith(module): |
|
873 | if path.startswith(module): | |
874 | relative = path.rstrip('/')[len(module):] |
|
874 | relative = path.rstrip('/')[len(module):] | |
875 | if relative.startswith('/'): |
|
875 | if relative.startswith('/'): | |
876 | return relative[1:] |
|
876 | return relative[1:] | |
877 | elif relative == '': |
|
877 | elif relative == '': | |
878 | return relative |
|
878 | return relative | |
879 |
|
879 | |||
880 | # The path is outside our tracked tree... |
|
880 | # The path is outside our tracked tree... | |
881 | self.ui.debug('%r is not under %r, ignoring\n' % (path, module)) |
|
881 | self.ui.debug('%r is not under %r, ignoring\n' % (path, module)) | |
882 | return None |
|
882 | return None | |
883 |
|
883 | |||
884 | def _checkpath(self, path, revnum, module=None): |
|
884 | def _checkpath(self, path, revnum, module=None): | |
885 | if module is not None: |
|
885 | if module is not None: | |
886 | prevmodule = self.reparent('') |
|
886 | prevmodule = self.reparent('') | |
887 | path = module + '/' + path |
|
887 | path = module + '/' + path | |
888 | try: |
|
888 | try: | |
889 | # ra.check_path does not like leading slashes very much, it leads |
|
889 | # ra.check_path does not like leading slashes very much, it leads | |
890 | # to PROPFIND subversion errors |
|
890 | # to PROPFIND subversion errors | |
891 | return svn.ra.check_path(self.ra, path.strip('/'), revnum) |
|
891 | return svn.ra.check_path(self.ra, path.strip('/'), revnum) | |
892 | finally: |
|
892 | finally: | |
893 | if module is not None: |
|
893 | if module is not None: | |
894 | self.reparent(prevmodule) |
|
894 | self.reparent(prevmodule) | |
895 |
|
895 | |||
896 | def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True, |
|
896 | def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True, | |
897 | strict_node_history=False): |
|
897 | strict_node_history=False): | |
898 | # Normalize path names, svn >= 1.5 only wants paths relative to |
|
898 | # Normalize path names, svn >= 1.5 only wants paths relative to | |
899 | # supplied URL |
|
899 | # supplied URL | |
900 | relpaths = [] |
|
900 | relpaths = [] | |
901 | for p in paths: |
|
901 | for p in paths: | |
902 | if not p.startswith('/'): |
|
902 | if not p.startswith('/'): | |
903 | p = self.module + '/' + p |
|
903 | p = self.module + '/' + p | |
904 | relpaths.append(p.strip('/')) |
|
904 | relpaths.append(p.strip('/')) | |
905 | args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths, |
|
905 | args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths, | |
906 | strict_node_history] |
|
906 | strict_node_history] | |
907 | arg = encodeargs(args) |
|
907 | arg = encodeargs(args) | |
908 | hgexe = util.hgexecutable() |
|
908 | hgexe = util.hgexecutable() | |
909 | cmd = '%s debugsvnlog' % util.shellquote(hgexe) |
|
909 | cmd = '%s debugsvnlog' % util.shellquote(hgexe) | |
910 | stdin, stdout = util.popen2(cmd) |
|
910 | stdin, stdout = util.popen2(cmd) | |
911 | stdin.write(arg) |
|
911 | stdin.write(arg) | |
912 | try: |
|
912 | try: | |
913 | stdin.close() |
|
913 | stdin.close() | |
914 | except IOError: |
|
914 | except IOError: | |
915 | raise util.Abort(_('Mercurial failed to run itself, check' |
|
915 | raise util.Abort(_('Mercurial failed to run itself, check' | |
916 | ' hg executable is in PATH')) |
|
916 | ' hg executable is in PATH')) | |
917 | return logstream(stdout) |
|
917 | return logstream(stdout) | |
918 |
|
918 | |||
919 | pre_revprop_change = '''#!/bin/sh |
|
919 | pre_revprop_change = '''#!/bin/sh | |
920 |
|
920 | |||
921 | REPOS="$1" |
|
921 | REPOS="$1" | |
922 | REV="$2" |
|
922 | REV="$2" | |
923 | USER="$3" |
|
923 | USER="$3" | |
924 | PROPNAME="$4" |
|
924 | PROPNAME="$4" | |
925 | ACTION="$5" |
|
925 | ACTION="$5" | |
926 |
|
926 | |||
927 | if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi |
|
927 | if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi | |
928 | if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi |
|
928 | if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi | |
929 | if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi |
|
929 | if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi | |
930 |
|
930 | |||
931 | echo "Changing prohibited revision property" >&2 |
|
931 | echo "Changing prohibited revision property" >&2 | |
932 | exit 1 |
|
932 | exit 1 | |
933 | ''' |
|
933 | ''' | |
934 |
|
934 | |||
935 | class svn_sink(converter_sink, commandline): |
|
935 | class svn_sink(converter_sink, commandline): | |
936 | commit_re = re.compile(r'Committed revision (\d+).', re.M) |
|
936 | commit_re = re.compile(r'Committed revision (\d+).', re.M) | |
937 |
|
937 | |||
938 | def prerun(self): |
|
938 | def prerun(self): | |
939 | if self.wc: |
|
939 | if self.wc: | |
940 | os.chdir(self.wc) |
|
940 | os.chdir(self.wc) | |
941 |
|
941 | |||
942 | def postrun(self): |
|
942 | def postrun(self): | |
943 | if self.wc: |
|
943 | if self.wc: | |
944 | os.chdir(self.cwd) |
|
944 | os.chdir(self.cwd) | |
945 |
|
945 | |||
946 | def join(self, name): |
|
946 | def join(self, name): | |
947 | return os.path.join(self.wc, '.svn', name) |
|
947 | return os.path.join(self.wc, '.svn', name) | |
948 |
|
948 | |||
949 | def revmapfile(self): |
|
949 | def revmapfile(self): | |
950 | return self.join('hg-shamap') |
|
950 | return self.join('hg-shamap') | |
951 |
|
951 | |||
952 | def authorfile(self): |
|
952 | def authorfile(self): | |
953 | return self.join('hg-authormap') |
|
953 | return self.join('hg-authormap') | |
954 |
|
954 | |||
955 | def __init__(self, ui, path): |
|
955 | def __init__(self, ui, path): | |
956 | converter_sink.__init__(self, ui, path) |
|
956 | converter_sink.__init__(self, ui, path) | |
957 | commandline.__init__(self, ui, 'svn') |
|
957 | commandline.__init__(self, ui, 'svn') | |
958 | self.delete = [] |
|
958 | self.delete = [] | |
959 | self.setexec = [] |
|
959 | self.setexec = [] | |
960 | self.delexec = [] |
|
960 | self.delexec = [] | |
961 | self.copies = [] |
|
961 | self.copies = [] | |
962 | self.wc = None |
|
962 | self.wc = None | |
963 | self.cwd = os.getcwd() |
|
963 | self.cwd = os.getcwd() | |
964 |
|
964 | |||
965 | path = os.path.realpath(path) |
|
965 | path = os.path.realpath(path) | |
966 |
|
966 | |||
967 | created = False |
|
967 | created = False | |
968 | if os.path.isfile(os.path.join(path, '.svn', 'entries')): |
|
968 | if os.path.isfile(os.path.join(path, '.svn', 'entries')): | |
969 | self.wc = path |
|
969 | self.wc = path | |
970 | self.run0('update') |
|
970 | self.run0('update') | |
971 | else: |
|
971 | else: | |
972 | wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc') |
|
972 | wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc') | |
973 |
|
973 | |||
974 | if os.path.isdir(os.path.dirname(path)): |
|
974 | if os.path.isdir(os.path.dirname(path)): | |
975 | if not os.path.exists(os.path.join(path, 'db', 'fs-type')): |
|
975 | if not os.path.exists(os.path.join(path, 'db', 'fs-type')): | |
976 | ui.status(_('initializing svn repository %r\n') % |
|
976 | ui.status(_('initializing svn repository %r\n') % | |
977 | os.path.basename(path)) |
|
977 | os.path.basename(path)) | |
978 | commandline(ui, 'svnadmin').run0('create', path) |
|
978 | commandline(ui, 'svnadmin').run0('create', path) | |
979 | created = path |
|
979 | created = path | |
980 | path = util.normpath(path) |
|
980 | path = util.normpath(path) | |
981 | if not path.startswith('/'): |
|
981 | if not path.startswith('/'): | |
982 | path = '/' + path |
|
982 | path = '/' + path | |
983 | path = 'file://' + path |
|
983 | path = 'file://' + path | |
984 |
|
984 | |||
985 | ui.status(_('initializing svn working copy %r\n') |
|
985 | ui.status(_('initializing svn working copy %r\n') | |
986 | % os.path.basename(wcpath)) |
|
986 | % os.path.basename(wcpath)) | |
987 | self.run0('checkout', path, wcpath) |
|
987 | self.run0('checkout', path, wcpath) | |
988 |
|
988 | |||
989 | self.wc = wcpath |
|
989 | self.wc = wcpath | |
990 | self.opener = util.opener(self.wc) |
|
990 | self.opener = util.opener(self.wc) | |
991 | self.wopener = util.opener(self.wc) |
|
991 | self.wopener = util.opener(self.wc) | |
992 | self.childmap = mapfile(ui, self.join('hg-childmap')) |
|
992 | self.childmap = mapfile(ui, self.join('hg-childmap')) | |
993 | self.is_exec = util.checkexec(self.wc) and util.is_exec or None |
|
993 | self.is_exec = util.checkexec(self.wc) and util.is_exec or None | |
994 |
|
994 | |||
995 | if created: |
|
995 | if created: | |
996 | hook = os.path.join(created, 'hooks', 'pre-revprop-change') |
|
996 | hook = os.path.join(created, 'hooks', 'pre-revprop-change') | |
997 | fp = open(hook, 'w') |
|
997 | fp = open(hook, 'w') | |
998 | fp.write(pre_revprop_change) |
|
998 | fp.write(pre_revprop_change) | |
999 | fp.close() |
|
999 | fp.close() | |
1000 | util.set_flags(hook, False, True) |
|
1000 | util.set_flags(hook, False, True) | |
1001 |
|
1001 | |||
1002 | xport = transport.SvnRaTransport(url=geturl(path)) |
|
1002 | xport = transport.SvnRaTransport(url=geturl(path)) | |
1003 | self.uuid = svn.ra.get_uuid(xport.ra) |
|
1003 | self.uuid = svn.ra.get_uuid(xport.ra) | |
1004 |
|
1004 | |||
1005 | def wjoin(self, *names): |
|
1005 | def wjoin(self, *names): | |
1006 | return os.path.join(self.wc, *names) |
|
1006 | return os.path.join(self.wc, *names) | |
1007 |
|
1007 | |||
1008 | def putfile(self, filename, flags, data): |
|
1008 | def putfile(self, filename, flags, data): | |
1009 | if 'l' in flags: |
|
1009 | if 'l' in flags: | |
1010 | self.wopener.symlink(data, filename) |
|
1010 | self.wopener.symlink(data, filename) | |
1011 | else: |
|
1011 | else: | |
1012 | try: |
|
1012 | try: | |
1013 | if os.path.islink(self.wjoin(filename)): |
|
1013 | if os.path.islink(self.wjoin(filename)): | |
1014 | os.unlink(filename) |
|
1014 | os.unlink(filename) | |
1015 | except OSError: |
|
1015 | except OSError: | |
1016 | pass |
|
1016 | pass | |
1017 | self.wopener(filename, 'w').write(data) |
|
1017 | self.wopener(filename, 'w').write(data) | |
1018 |
|
1018 | |||
1019 | if self.is_exec: |
|
1019 | if self.is_exec: | |
1020 | was_exec = self.is_exec(self.wjoin(filename)) |
|
1020 | was_exec = self.is_exec(self.wjoin(filename)) | |
1021 | else: |
|
1021 | else: | |
1022 | # On filesystems not supporting execute-bit, there is no way |
|
1022 | # On filesystems not supporting execute-bit, there is no way | |
1023 | # to know if it is set but asking subversion. Setting it |
|
1023 | # to know if it is set but asking subversion. Setting it | |
1024 | # systematically is just as expensive and much simpler. |
|
1024 | # systematically is just as expensive and much simpler. | |
1025 | was_exec = 'x' not in flags |
|
1025 | was_exec = 'x' not in flags | |
1026 |
|
1026 | |||
1027 | util.set_flags(self.wjoin(filename), False, 'x' in flags) |
|
1027 | util.set_flags(self.wjoin(filename), False, 'x' in flags) | |
1028 | if was_exec: |
|
1028 | if was_exec: | |
1029 | if 'x' not in flags: |
|
1029 | if 'x' not in flags: | |
1030 | self.delexec.append(filename) |
|
1030 | self.delexec.append(filename) | |
1031 | else: |
|
1031 | else: | |
1032 | if 'x' in flags: |
|
1032 | if 'x' in flags: | |
1033 | self.setexec.append(filename) |
|
1033 | self.setexec.append(filename) | |
1034 |
|
1034 | |||
1035 | def _copyfile(self, source, dest): |
|
1035 | def _copyfile(self, source, dest): | |
1036 | # SVN's copy command pukes if the destination file exists, but |
|
1036 | # SVN's copy command pukes if the destination file exists, but | |
1037 | # our copyfile method expects to record a copy that has |
|
1037 | # our copyfile method expects to record a copy that has | |
1038 | # already occurred. Cross the semantic gap. |
|
1038 | # already occurred. Cross the semantic gap. | |
1039 | wdest = self.wjoin(dest) |
|
1039 | wdest = self.wjoin(dest) | |
1040 | exists = os.path.lexists(wdest) |
|
1040 | exists = os.path.lexists(wdest) | |
1041 | if exists: |
|
1041 | if exists: | |
1042 | fd, tempname = tempfile.mkstemp( |
|
1042 | fd, tempname = tempfile.mkstemp( | |
1043 | prefix='hg-copy-', dir=os.path.dirname(wdest)) |
|
1043 | prefix='hg-copy-', dir=os.path.dirname(wdest)) | |
1044 | os.close(fd) |
|
1044 | os.close(fd) | |
1045 | os.unlink(tempname) |
|
1045 | os.unlink(tempname) | |
1046 | os.rename(wdest, tempname) |
|
1046 | os.rename(wdest, tempname) | |
1047 | try: |
|
1047 | try: | |
1048 | self.run0('copy', source, dest) |
|
1048 | self.run0('copy', source, dest) | |
1049 | finally: |
|
1049 | finally: | |
1050 | if exists: |
|
1050 | if exists: | |
1051 | try: |
|
1051 | try: | |
1052 | os.unlink(wdest) |
|
1052 | os.unlink(wdest) | |
1053 | except OSError: |
|
1053 | except OSError: | |
1054 | pass |
|
1054 | pass | |
1055 | os.rename(tempname, wdest) |
|
1055 | os.rename(tempname, wdest) | |
1056 |
|
1056 | |||
1057 | def dirs_of(self, files): |
|
1057 | def dirs_of(self, files): | |
1058 | dirs = set() |
|
1058 | dirs = set() | |
1059 | for f in files: |
|
1059 | for f in files: | |
1060 | if os.path.isdir(self.wjoin(f)): |
|
1060 | if os.path.isdir(self.wjoin(f)): | |
1061 | dirs.add(f) |
|
1061 | dirs.add(f) | |
1062 | for i in strutil.rfindall(f, '/'): |
|
1062 | for i in strutil.rfindall(f, '/'): | |
1063 | dirs.add(f[:i]) |
|
1063 | dirs.add(f[:i]) | |
1064 | return dirs |
|
1064 | return dirs | |
1065 |
|
1065 | |||
1066 | def add_dirs(self, files): |
|
1066 | def add_dirs(self, files): | |
1067 | add_dirs = [d for d in sorted(self.dirs_of(files)) |
|
1067 | add_dirs = [d for d in sorted(self.dirs_of(files)) | |
1068 | if not os.path.exists(self.wjoin(d, '.svn', 'entries'))] |
|
1068 | if not os.path.exists(self.wjoin(d, '.svn', 'entries'))] | |
1069 | if add_dirs: |
|
1069 | if add_dirs: | |
1070 | self.xargs(add_dirs, 'add', non_recursive=True, quiet=True) |
|
1070 | self.xargs(add_dirs, 'add', non_recursive=True, quiet=True) | |
1071 | return add_dirs |
|
1071 | return add_dirs | |
1072 |
|
1072 | |||
1073 | def add_files(self, files): |
|
1073 | def add_files(self, files): | |
1074 | if files: |
|
1074 | if files: | |
1075 | self.xargs(files, 'add', quiet=True) |
|
1075 | self.xargs(files, 'add', quiet=True) | |
1076 | return files |
|
1076 | return files | |
1077 |
|
1077 | |||
1078 | def tidy_dirs(self, names): |
|
1078 | def tidy_dirs(self, names): | |
1079 | deleted = [] |
|
1079 | deleted = [] | |
1080 | for d in sorted(self.dirs_of(names), reverse=True): |
|
1080 | for d in sorted(self.dirs_of(names), reverse=True): | |
1081 | wd = self.wjoin(d) |
|
1081 | wd = self.wjoin(d) | |
1082 | if os.listdir(wd) == '.svn': |
|
1082 | if os.listdir(wd) == '.svn': | |
1083 | self.run0('delete', d) |
|
1083 | self.run0('delete', d) | |
1084 | deleted.append(d) |
|
1084 | deleted.append(d) | |
1085 | return deleted |
|
1085 | return deleted | |
1086 |
|
1086 | |||
1087 | def addchild(self, parent, child): |
|
1087 | def addchild(self, parent, child): | |
1088 | self.childmap[parent] = child |
|
1088 | self.childmap[parent] = child | |
1089 |
|
1089 | |||
1090 | def revid(self, rev): |
|
1090 | def revid(self, rev): | |
1091 | return u"svn:%s@%s" % (self.uuid, rev) |
|
1091 | return u"svn:%s@%s" % (self.uuid, rev) | |
1092 |
|
1092 | |||
1093 | def putcommit(self, files, copies, parents, commit, source, revmap): |
|
1093 | def putcommit(self, files, copies, parents, commit, source, revmap): | |
1094 | # Apply changes to working copy |
|
1094 | # Apply changes to working copy | |
1095 | for f, v in files: |
|
1095 | for f, v in files: | |
1096 | try: |
|
1096 | try: | |
1097 | data, mode = source.getfile(f, v) |
|
1097 | data, mode = source.getfile(f, v) | |
1098 | except IOError: |
|
1098 | except IOError: | |
1099 | self.delete.append(f) |
|
1099 | self.delete.append(f) | |
1100 | else: |
|
1100 | else: | |
1101 | self.putfile(f, mode, data) |
|
1101 | self.putfile(f, mode, data) | |
1102 | if f in copies: |
|
1102 | if f in copies: | |
1103 | self.copies.append([copies[f], f]) |
|
1103 | self.copies.append([copies[f], f]) | |
1104 | files = [f[0] for f in files] |
|
1104 | files = [f[0] for f in files] | |
1105 |
|
1105 | |||
1106 | for parent in parents: |
|
1106 | for parent in parents: | |
1107 | try: |
|
1107 | try: | |
1108 | return self.revid(self.childmap[parent]) |
|
1108 | return self.revid(self.childmap[parent]) | |
1109 | except KeyError: |
|
1109 | except KeyError: | |
1110 | pass |
|
1110 | pass | |
1111 | entries = set(self.delete) |
|
1111 | entries = set(self.delete) | |
1112 | files = frozenset(files) |
|
1112 | files = frozenset(files) | |
1113 | entries.update(self.add_dirs(files.difference(entries))) |
|
1113 | entries.update(self.add_dirs(files.difference(entries))) | |
1114 | if self.copies: |
|
1114 | if self.copies: | |
1115 | for s, d in self.copies: |
|
1115 | for s, d in self.copies: | |
1116 | self._copyfile(s, d) |
|
1116 | self._copyfile(s, d) | |
1117 | self.copies = [] |
|
1117 | self.copies = [] | |
1118 | if self.delete: |
|
1118 | if self.delete: | |
1119 | self.xargs(self.delete, 'delete') |
|
1119 | self.xargs(self.delete, 'delete') | |
1120 | self.delete = [] |
|
1120 | self.delete = [] | |
1121 | entries.update(self.add_files(files.difference(entries))) |
|
1121 | entries.update(self.add_files(files.difference(entries))) | |
1122 | entries.update(self.tidy_dirs(entries)) |
|
1122 | entries.update(self.tidy_dirs(entries)) | |
1123 | if self.delexec: |
|
1123 | if self.delexec: | |
1124 | self.xargs(self.delexec, 'propdel', 'svn:executable') |
|
1124 | self.xargs(self.delexec, 'propdel', 'svn:executable') | |
1125 | self.delexec = [] |
|
1125 | self.delexec = [] | |
1126 | if self.setexec: |
|
1126 | if self.setexec: | |
1127 | self.xargs(self.setexec, 'propset', 'svn:executable', '*') |
|
1127 | self.xargs(self.setexec, 'propset', 'svn:executable', '*') | |
1128 | self.setexec = [] |
|
1128 | self.setexec = [] | |
1129 |
|
1129 | |||
1130 | fd, messagefile = tempfile.mkstemp(prefix='hg-convert-') |
|
1130 | fd, messagefile = tempfile.mkstemp(prefix='hg-convert-') | |
1131 | fp = os.fdopen(fd, 'w') |
|
1131 | fp = os.fdopen(fd, 'w') | |
1132 | fp.write(commit.desc) |
|
1132 | fp.write(commit.desc) | |
1133 | fp.close() |
|
1133 | fp.close() | |
1134 | try: |
|
1134 | try: | |
1135 | output = self.run0('commit', |
|
1135 | output = self.run0('commit', | |
1136 | username=util.shortuser(commit.author), |
|
1136 | username=util.shortuser(commit.author), | |
1137 | file=messagefile, |
|
1137 | file=messagefile, | |
1138 | encoding='utf-8') |
|
1138 | encoding='utf-8') | |
1139 | try: |
|
1139 | try: | |
1140 | rev = self.commit_re.search(output).group(1) |
|
1140 | rev = self.commit_re.search(output).group(1) | |
1141 | except AttributeError: |
|
1141 | except AttributeError: | |
1142 | if not files: |
|
1142 | if not files: | |
1143 | return parents[0] |
|
1143 | return parents[0] | |
1144 | self.ui.warn(_('unexpected svn output:\n')) |
|
1144 | self.ui.warn(_('unexpected svn output:\n')) | |
1145 | self.ui.warn(output) |
|
1145 | self.ui.warn(output) | |
1146 | raise util.Abort(_('unable to cope with svn output')) |
|
1146 | raise util.Abort(_('unable to cope with svn output')) | |
1147 | if commit.rev: |
|
1147 | if commit.rev: | |
1148 | self.run('propset', 'hg:convert-rev', commit.rev, |
|
1148 | self.run('propset', 'hg:convert-rev', commit.rev, | |
1149 | revprop=True, revision=rev) |
|
1149 | revprop=True, revision=rev) | |
1150 | if commit.branch and commit.branch != 'default': |
|
1150 | if commit.branch and commit.branch != 'default': | |
1151 | self.run('propset', 'hg:convert-branch', commit.branch, |
|
1151 | self.run('propset', 'hg:convert-branch', commit.branch, | |
1152 | revprop=True, revision=rev) |
|
1152 | revprop=True, revision=rev) | |
1153 | for parent in parents: |
|
1153 | for parent in parents: | |
1154 | self.addchild(parent, rev) |
|
1154 | self.addchild(parent, rev) | |
1155 | return self.revid(rev) |
|
1155 | return self.revid(rev) | |
1156 | finally: |
|
1156 | finally: | |
1157 | os.unlink(messagefile) |
|
1157 | os.unlink(messagefile) | |
1158 |
|
1158 | |||
1159 | def puttags(self, tags): |
|
1159 | def puttags(self, tags): | |
1160 | self.ui.warn(_('writing Subversion tags is not yet implemented\n')) |
|
1160 | self.ui.warn(_('writing Subversion tags is not yet implemented\n')) | |
1161 | return None, None |
|
1161 | return None, None |
@@ -1,642 +1,642 | |||||
1 | # dispatch.py - command dispatching for mercurial |
|
1 | # dispatch.py - command dispatching for mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from i18n import _ |
|
8 | from i18n import _ | |
9 | import os, sys, atexit, signal, pdb, socket, errno, shlex, time, traceback, re |
|
9 | import os, sys, atexit, signal, pdb, socket, errno, shlex, time, traceback, re | |
10 | import util, commands, hg, fancyopts, extensions, hook, error |
|
10 | import util, commands, hg, fancyopts, extensions, hook, error | |
11 | import cmdutil, encoding |
|
11 | import cmdutil, encoding | |
12 | import ui as uimod |
|
12 | import ui as uimod | |
13 |
|
13 | |||
14 | def run(): |
|
14 | def run(): | |
15 | "run the command in sys.argv" |
|
15 | "run the command in sys.argv" | |
16 | sys.exit(dispatch(sys.argv[1:])) |
|
16 | sys.exit(dispatch(sys.argv[1:])) | |
17 |
|
17 | |||
18 | def dispatch(args): |
|
18 | def dispatch(args): | |
19 | "run the command specified in args" |
|
19 | "run the command specified in args" | |
20 | try: |
|
20 | try: | |
21 | u = uimod.ui() |
|
21 | u = uimod.ui() | |
22 | if '--traceback' in args: |
|
22 | if '--traceback' in args: | |
23 | u.setconfig('ui', 'traceback', 'on') |
|
23 | u.setconfig('ui', 'traceback', 'on') | |
24 | except util.Abort, inst: |
|
24 | except util.Abort, inst: | |
25 | sys.stderr.write(_("abort: %s\n") % inst) |
|
25 | sys.stderr.write(_("abort: %s\n") % inst) | |
26 | if inst.hint: |
|
26 | if inst.hint: | |
27 | sys.stderr.write("(%s)\n" % inst.hint) |
|
27 | sys.stderr.write("(%s)\n" % inst.hint) | |
28 | return -1 |
|
28 | return -1 | |
29 | except error.ParseError, inst: |
|
29 | except error.ParseError, inst: | |
30 | if len(inst.args) > 1: |
|
30 | if len(inst.args) > 1: | |
31 | sys.stderr.write(_("hg: parse error at %s: %s\n") % |
|
31 | sys.stderr.write(_("hg: parse error at %s: %s\n") % | |
32 | (inst.args[1], inst.args[0])) |
|
32 | (inst.args[1], inst.args[0])) | |
33 | else: |
|
33 | else: | |
34 | sys.stderr.write(_("hg: parse error: %s\n") % inst.args[0]) |
|
34 | sys.stderr.write(_("hg: parse error: %s\n") % inst.args[0]) | |
35 | return -1 |
|
35 | return -1 | |
36 | return _runcatch(u, args) |
|
36 | return _runcatch(u, args) | |
37 |
|
37 | |||
38 | def _runcatch(ui, args): |
|
38 | def _runcatch(ui, args): | |
39 | def catchterm(*args): |
|
39 | def catchterm(*args): | |
40 | raise error.SignalInterrupt |
|
40 | raise error.SignalInterrupt | |
41 |
|
41 | |||
42 | try: |
|
42 | try: | |
43 | for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM': |
|
43 | for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM': | |
44 | num = getattr(signal, name, None) |
|
44 | num = getattr(signal, name, None) | |
45 | if num: |
|
45 | if num: | |
46 | signal.signal(num, catchterm) |
|
46 | signal.signal(num, catchterm) | |
47 | except ValueError: |
|
47 | except ValueError: | |
48 | pass # happens if called in a thread |
|
48 | pass # happens if called in a thread | |
49 |
|
49 | |||
50 | try: |
|
50 | try: | |
51 | try: |
|
51 | try: | |
52 | # enter the debugger before command execution |
|
52 | # enter the debugger before command execution | |
53 | if '--debugger' in args: |
|
53 | if '--debugger' in args: | |
54 | ui.warn(_("entering debugger - " |
|
54 | ui.warn(_("entering debugger - " | |
55 | "type c to continue starting hg or h for help\n")) |
|
55 | "type c to continue starting hg or h for help\n")) | |
56 | pdb.set_trace() |
|
56 | pdb.set_trace() | |
57 | try: |
|
57 | try: | |
58 | return _dispatch(ui, args) |
|
58 | return _dispatch(ui, args) | |
59 | finally: |
|
59 | finally: | |
60 | ui.flush() |
|
60 | ui.flush() | |
61 | except: |
|
61 | except: | |
62 | # enter the debugger when we hit an exception |
|
62 | # enter the debugger when we hit an exception | |
63 | if '--debugger' in args: |
|
63 | if '--debugger' in args: | |
64 | traceback.print_exc() |
|
64 | traceback.print_exc() | |
65 | pdb.post_mortem(sys.exc_info()[2]) |
|
65 | pdb.post_mortem(sys.exc_info()[2]) | |
66 | ui.traceback() |
|
66 | ui.traceback() | |
67 | raise |
|
67 | raise | |
68 |
|
68 | |||
69 | # Global exception handling, alphabetically |
|
69 | # Global exception handling, alphabetically | |
70 | # Mercurial-specific first, followed by built-in and library exceptions |
|
70 | # Mercurial-specific first, followed by built-in and library exceptions | |
71 | except error.AmbiguousCommand, inst: |
|
71 | except error.AmbiguousCommand, inst: | |
72 | ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") % |
|
72 | ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") % | |
73 | (inst.args[0], " ".join(inst.args[1]))) |
|
73 | (inst.args[0], " ".join(inst.args[1]))) | |
74 | except error.ParseError, inst: |
|
74 | except error.ParseError, inst: | |
75 | if len(inst.args) > 1: |
|
75 | if len(inst.args) > 1: | |
76 | ui.warn(_("hg: parse error at %s: %s\n") % |
|
76 | ui.warn(_("hg: parse error at %s: %s\n") % | |
77 | (inst.args[1], inst.args[0])) |
|
77 | (inst.args[1], inst.args[0])) | |
78 | else: |
|
78 | else: | |
79 | ui.warn(_("hg: parse error: %s\n") % inst.args[0]) |
|
79 | ui.warn(_("hg: parse error: %s\n") % inst.args[0]) | |
80 | return -1 |
|
80 | return -1 | |
81 | except error.LockHeld, inst: |
|
81 | except error.LockHeld, inst: | |
82 | if inst.errno == errno.ETIMEDOUT: |
|
82 | if inst.errno == errno.ETIMEDOUT: | |
83 | reason = _('timed out waiting for lock held by %s') % inst.locker |
|
83 | reason = _('timed out waiting for lock held by %s') % inst.locker | |
84 | else: |
|
84 | else: | |
85 | reason = _('lock held by %s') % inst.locker |
|
85 | reason = _('lock held by %s') % inst.locker | |
86 | ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason)) |
|
86 | ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason)) | |
87 | except error.LockUnavailable, inst: |
|
87 | except error.LockUnavailable, inst: | |
88 | ui.warn(_("abort: could not lock %s: %s\n") % |
|
88 | ui.warn(_("abort: could not lock %s: %s\n") % | |
89 | (inst.desc or inst.filename, inst.strerror)) |
|
89 | (inst.desc or inst.filename, inst.strerror)) | |
90 | except error.CommandError, inst: |
|
90 | except error.CommandError, inst: | |
91 | if inst.args[0]: |
|
91 | if inst.args[0]: | |
92 | ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1])) |
|
92 | ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1])) | |
93 | commands.help_(ui, inst.args[0]) |
|
93 | commands.help_(ui, inst.args[0]) | |
94 | else: |
|
94 | else: | |
95 | ui.warn(_("hg: %s\n") % inst.args[1]) |
|
95 | ui.warn(_("hg: %s\n") % inst.args[1]) | |
96 | commands.help_(ui, 'shortlist') |
|
96 | commands.help_(ui, 'shortlist') | |
97 | except error.RepoError, inst: |
|
97 | except error.RepoError, inst: | |
98 | ui.warn(_("abort: %s!\n") % inst) |
|
98 | ui.warn(_("abort: %s!\n") % inst) | |
99 | except error.ResponseError, inst: |
|
99 | except error.ResponseError, inst: | |
100 | ui.warn(_("abort: %s") % inst.args[0]) |
|
100 | ui.warn(_("abort: %s") % inst.args[0]) | |
101 | if not isinstance(inst.args[1], basestring): |
|
101 | if not isinstance(inst.args[1], basestring): | |
102 | ui.warn(" %r\n" % (inst.args[1],)) |
|
102 | ui.warn(" %r\n" % (inst.args[1],)) | |
103 | elif not inst.args[1]: |
|
103 | elif not inst.args[1]: | |
104 | ui.warn(_(" empty string\n")) |
|
104 | ui.warn(_(" empty string\n")) | |
105 | else: |
|
105 | else: | |
106 | ui.warn("\n%r\n" % util.ellipsis(inst.args[1])) |
|
106 | ui.warn("\n%r\n" % util.ellipsis(inst.args[1])) | |
107 | except error.RevlogError, inst: |
|
107 | except error.RevlogError, inst: | |
108 | ui.warn(_("abort: %s!\n") % inst) |
|
108 | ui.warn(_("abort: %s!\n") % inst) | |
109 | except error.SignalInterrupt: |
|
109 | except error.SignalInterrupt: | |
110 | ui.warn(_("killed!\n")) |
|
110 | ui.warn(_("killed!\n")) | |
111 | except error.UnknownCommand, inst: |
|
111 | except error.UnknownCommand, inst: | |
112 | ui.warn(_("hg: unknown command '%s'\n") % inst.args[0]) |
|
112 | ui.warn(_("hg: unknown command '%s'\n") % inst.args[0]) | |
113 | try: |
|
113 | try: | |
114 | # check if the command is in a disabled extension |
|
114 | # check if the command is in a disabled extension | |
115 | # (but don't check for extensions themselves) |
|
115 | # (but don't check for extensions themselves) | |
116 | commands.help_(ui, inst.args[0], unknowncmd=True) |
|
116 | commands.help_(ui, inst.args[0], unknowncmd=True) | |
117 | except error.UnknownCommand: |
|
117 | except error.UnknownCommand: | |
118 | commands.help_(ui, 'shortlist') |
|
118 | commands.help_(ui, 'shortlist') | |
119 | except util.Abort, inst: |
|
119 | except util.Abort, inst: | |
120 | ui.warn(_("abort: %s\n") % inst) |
|
120 | ui.warn(_("abort: %s\n") % inst) | |
121 | if inst.hint: |
|
121 | if inst.hint: | |
122 | ui.warn(_("(%s)\n") % inst.hint) |
|
122 | ui.warn(_("(%s)\n") % inst.hint) | |
123 | except ImportError, inst: |
|
123 | except ImportError, inst: | |
124 | ui.warn(_("abort: %s!\n") % inst) |
|
124 | ui.warn(_("abort: %s!\n") % inst) | |
125 | m = str(inst).split()[-1] |
|
125 | m = str(inst).split()[-1] | |
126 | if m in "mpatch bdiff".split(): |
|
126 | if m in "mpatch bdiff".split(): | |
127 | ui.warn(_("(did you forget to compile extensions?)\n")) |
|
127 | ui.warn(_("(did you forget to compile extensions?)\n")) | |
128 | elif m in "zlib".split(): |
|
128 | elif m in "zlib".split(): | |
129 | ui.warn(_("(is your Python install correct?)\n")) |
|
129 | ui.warn(_("(is your Python install correct?)\n")) | |
130 | except IOError, inst: |
|
130 | except IOError, inst: | |
131 | if hasattr(inst, "code"): |
|
131 | if hasattr(inst, "code"): | |
132 | ui.warn(_("abort: %s\n") % inst) |
|
132 | ui.warn(_("abort: %s\n") % inst) | |
133 | elif hasattr(inst, "reason"): |
|
133 | elif hasattr(inst, "reason"): | |
134 | try: # usually it is in the form (errno, strerror) |
|
134 | try: # usually it is in the form (errno, strerror) | |
135 | reason = inst.reason.args[1] |
|
135 | reason = inst.reason.args[1] | |
136 | except: # it might be anything, for example a string |
|
136 | except: # it might be anything, for example a string | |
137 | reason = inst.reason |
|
137 | reason = inst.reason | |
138 | ui.warn(_("abort: error: %s\n") % reason) |
|
138 | ui.warn(_("abort: error: %s\n") % reason) | |
139 | elif hasattr(inst, "args") and inst.args[0] == errno.EPIPE: |
|
139 | elif hasattr(inst, "args") and inst.args[0] == errno.EPIPE: | |
140 | if ui.debugflag: |
|
140 | if ui.debugflag: | |
141 | ui.warn(_("broken pipe\n")) |
|
141 | ui.warn(_("broken pipe\n")) | |
142 | elif getattr(inst, "strerror", None): |
|
142 | elif getattr(inst, "strerror", None): | |
143 | if getattr(inst, "filename", None): |
|
143 | if getattr(inst, "filename", None): | |
144 | ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename)) |
|
144 | ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename)) | |
145 | else: |
|
145 | else: | |
146 | ui.warn(_("abort: %s\n") % inst.strerror) |
|
146 | ui.warn(_("abort: %s\n") % inst.strerror) | |
147 | else: |
|
147 | else: | |
148 | raise |
|
148 | raise | |
149 | except OSError, inst: |
|
149 | except OSError, inst: | |
150 | if getattr(inst, "filename", None): |
|
150 | if getattr(inst, "filename", None): | |
151 | ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename)) |
|
151 | ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename)) | |
152 | else: |
|
152 | else: | |
153 | ui.warn(_("abort: %s\n") % inst.strerror) |
|
153 | ui.warn(_("abort: %s\n") % inst.strerror) | |
154 | except KeyboardInterrupt: |
|
154 | except KeyboardInterrupt: | |
155 | try: |
|
155 | try: | |
156 | ui.warn(_("interrupted!\n")) |
|
156 | ui.warn(_("interrupted!\n")) | |
157 | except IOError, inst: |
|
157 | except IOError, inst: | |
158 | if inst.errno == errno.EPIPE: |
|
158 | if inst.errno == errno.EPIPE: | |
159 | if ui.debugflag: |
|
159 | if ui.debugflag: | |
160 | ui.warn(_("\nbroken pipe\n")) |
|
160 | ui.warn(_("\nbroken pipe\n")) | |
161 | else: |
|
161 | else: | |
162 | raise |
|
162 | raise | |
163 | except MemoryError: |
|
163 | except MemoryError: | |
164 | ui.warn(_("abort: out of memory\n")) |
|
164 | ui.warn(_("abort: out of memory\n")) | |
165 | except SystemExit, inst: |
|
165 | except SystemExit, inst: | |
166 | # Commands shouldn't sys.exit directly, but give a return code. |
|
166 | # Commands shouldn't sys.exit directly, but give a return code. | |
167 | # Just in case catch this and and pass exit code to caller. |
|
167 | # Just in case catch this and and pass exit code to caller. | |
168 | return inst.code |
|
168 | return inst.code | |
169 | except socket.error, inst: |
|
169 | except socket.error, inst: | |
170 | ui.warn(_("abort: %s\n") % inst.args[-1]) |
|
170 | ui.warn(_("abort: %s\n") % inst.args[-1]) | |
171 | except: |
|
171 | except: | |
172 | ui.warn(_("** unknown exception encountered, details follow\n")) |
|
172 | ui.warn(_("** unknown exception encountered, details follow\n")) | |
173 | ui.warn(_("** report bug details to " |
|
173 | ui.warn(_("** report bug details to " | |
174 | "http://mercurial.selenic.com/bts/\n")) |
|
174 | "http://mercurial.selenic.com/bts/\n")) | |
175 | ui.warn(_("** or mercurial@selenic.com\n")) |
|
175 | ui.warn(_("** or mercurial@selenic.com\n")) | |
176 | ui.warn(_("** Python %s\n") % sys.version.replace('\n', '')) |
|
176 | ui.warn(_("** Python %s\n") % sys.version.replace('\n', '')) | |
177 | ui.warn(_("** Mercurial Distributed SCM (version %s)\n") |
|
177 | ui.warn(_("** Mercurial Distributed SCM (version %s)\n") | |
178 | % util.version()) |
|
178 | % util.version()) | |
179 | ui.warn(_("** Extensions loaded: %s\n") |
|
179 | ui.warn(_("** Extensions loaded: %s\n") | |
180 | % ", ".join([x[0] for x in extensions.extensions()])) |
|
180 | % ", ".join([x[0] for x in extensions.extensions()])) | |
181 | raise |
|
181 | raise | |
182 |
|
182 | |||
183 | return -1 |
|
183 | return -1 | |
184 |
|
184 | |||
185 | def aliasargs(fn): |
|
185 | def aliasargs(fn): | |
186 | if hasattr(fn, 'args'): |
|
186 | if hasattr(fn, 'args'): | |
187 | return fn.args |
|
187 | return fn.args | |
188 | return [] |
|
188 | return [] | |
189 |
|
189 | |||
190 | class cmdalias(object): |
|
190 | class cmdalias(object): | |
191 | def __init__(self, name, definition, cmdtable): |
|
191 | def __init__(self, name, definition, cmdtable): | |
192 | self.name = self.cmd = name |
|
192 | self.name = self.cmd = name | |
193 | self.cmdname = '' |
|
193 | self.cmdname = '' | |
194 | self.definition = definition |
|
194 | self.definition = definition | |
195 | self.args = [] |
|
195 | self.args = [] | |
196 | self.opts = [] |
|
196 | self.opts = [] | |
197 | self.help = '' |
|
197 | self.help = '' | |
198 | self.norepo = True |
|
198 | self.norepo = True | |
199 | self.badalias = False |
|
199 | self.badalias = False | |
200 |
|
200 | |||
201 | try: |
|
201 | try: | |
202 | aliases, entry = cmdutil.findcmd(self.name, cmdtable) |
|
202 | aliases, entry = cmdutil.findcmd(self.name, cmdtable) | |
203 | for alias, e in cmdtable.iteritems(): |
|
203 | for alias, e in cmdtable.iteritems(): | |
204 | if e is entry: |
|
204 | if e is entry: | |
205 | self.cmd = alias |
|
205 | self.cmd = alias | |
206 | break |
|
206 | break | |
207 | self.shadows = True |
|
207 | self.shadows = True | |
208 | except error.UnknownCommand: |
|
208 | except error.UnknownCommand: | |
209 | self.shadows = False |
|
209 | self.shadows = False | |
210 |
|
210 | |||
211 | if not self.definition: |
|
211 | if not self.definition: | |
212 | def fn(ui, *args): |
|
212 | def fn(ui, *args): | |
213 | ui.warn(_("no definition for alias '%s'\n") % self.name) |
|
213 | ui.warn(_("no definition for alias '%s'\n") % self.name) | |
214 | return 1 |
|
214 | return 1 | |
215 | self.fn = fn |
|
215 | self.fn = fn | |
216 | self.badalias = True |
|
216 | self.badalias = True | |
217 |
|
217 | |||
218 | return |
|
218 | return | |
219 |
|
219 | |||
220 | if self.definition.startswith('!'): |
|
220 | if self.definition.startswith('!'): | |
221 | self.shell = True |
|
221 | self.shell = True | |
222 | def fn(ui, *args): |
|
222 | def fn(ui, *args): | |
223 | env = {'HG_ARGS': ' '.join((self.name,) + args)} |
|
223 | env = {'HG_ARGS': ' '.join((self.name,) + args)} | |
224 | def _checkvar(m): |
|
224 | def _checkvar(m): | |
225 | if int(m.groups()[0]) <= len(args): |
|
225 | if int(m.groups()[0]) <= len(args): | |
226 | return m.group() |
|
226 | return m.group() | |
227 | else: |
|
227 | else: | |
228 | return '' |
|
228 | return '' | |
229 | cmd = re.sub(r'\$(\d+)', _checkvar, self.definition[1:]) |
|
229 | cmd = re.sub(r'\$(\d+)', _checkvar, self.definition[1:]) | |
230 | replace = dict((str(i + 1), arg) for i, arg in enumerate(args)) |
|
230 | replace = dict((str(i + 1), arg) for i, arg in enumerate(args)) | |
231 | replace['0'] = self.name |
|
231 | replace['0'] = self.name | |
232 | replace['@'] = ' '.join(args) |
|
232 | replace['@'] = ' '.join(args) | |
233 | cmd = util.interpolate(r'\$', replace, cmd) |
|
233 | cmd = util.interpolate(r'\$', replace, cmd) | |
234 | return util.system(cmd, environ=env) |
|
234 | return util.system(cmd, environ=env) | |
235 | self.fn = fn |
|
235 | self.fn = fn | |
236 | return |
|
236 | return | |
237 |
|
237 | |||
238 | args = shlex.split(self.definition) |
|
238 | args = shlex.split(self.definition) | |
239 | self.cmdname = cmd = args.pop(0) |
|
239 | self.cmdname = cmd = args.pop(0) | |
240 | args = map(util.expandpath, args) |
|
240 | args = map(util.expandpath, args) | |
241 |
|
241 | |||
242 | for invalidarg in ("--cwd", "-R", "--repository", "--repo"): |
|
242 | for invalidarg in ("--cwd", "-R", "--repository", "--repo"): | |
243 | if _earlygetopt([invalidarg], args): |
|
243 | if _earlygetopt([invalidarg], args): | |
244 | def fn(ui, *args): |
|
244 | def fn(ui, *args): | |
245 | ui.warn(_("error in definition for alias '%s': %s may only " |
|
245 | ui.warn(_("error in definition for alias '%s': %s may only " | |
246 | "be given on the command line\n") |
|
246 | "be given on the command line\n") | |
247 | % (self.name, invalidarg)) |
|
247 | % (self.name, invalidarg)) | |
248 | return 1 |
|
248 | return 1 | |
249 |
|
249 | |||
250 | self.fn = fn |
|
250 | self.fn = fn | |
251 | self.badalias = True |
|
251 | self.badalias = True | |
252 | return |
|
252 | return | |
253 |
|
253 | |||
254 | try: |
|
254 | try: | |
255 | tableentry = cmdutil.findcmd(cmd, cmdtable, False)[1] |
|
255 | tableentry = cmdutil.findcmd(cmd, cmdtable, False)[1] | |
256 | if len(tableentry) > 2: |
|
256 | if len(tableentry) > 2: | |
257 | self.fn, self.opts, self.help = tableentry |
|
257 | self.fn, self.opts, self.help = tableentry | |
258 | else: |
|
258 | else: | |
259 | self.fn, self.opts = tableentry |
|
259 | self.fn, self.opts = tableentry | |
260 |
|
260 | |||
261 | self.args = aliasargs(self.fn) + args |
|
261 | self.args = aliasargs(self.fn) + args | |
262 | if cmd not in commands.norepo.split(' '): |
|
262 | if cmd not in commands.norepo.split(' '): | |
263 | self.norepo = False |
|
263 | self.norepo = False | |
264 | if self.help.startswith("hg " + cmd): |
|
264 | if self.help.startswith("hg " + cmd): | |
265 | # drop prefix in old-style help lines so hg shows the alias |
|
265 | # drop prefix in old-style help lines so hg shows the alias | |
266 | self.help = self.help[4 + len(cmd):] |
|
266 | self.help = self.help[4 + len(cmd):] | |
267 | self.__doc__ = self.fn.__doc__ |
|
267 | self.__doc__ = self.fn.__doc__ | |
268 |
|
268 | |||
269 | except error.UnknownCommand: |
|
269 | except error.UnknownCommand: | |
270 | def fn(ui, *args): |
|
270 | def fn(ui, *args): | |
271 | ui.warn(_("alias '%s' resolves to unknown command '%s'\n") \ |
|
271 | ui.warn(_("alias '%s' resolves to unknown command '%s'\n") \ | |
272 | % (self.name, cmd)) |
|
272 | % (self.name, cmd)) | |
273 | try: |
|
273 | try: | |
274 | # check if the command is in a disabled extension |
|
274 | # check if the command is in a disabled extension | |
275 | commands.help_(ui, cmd, unknowncmd=True) |
|
275 | commands.help_(ui, cmd, unknowncmd=True) | |
276 | except error.UnknownCommand: |
|
276 | except error.UnknownCommand: | |
277 | pass |
|
277 | pass | |
278 | return 1 |
|
278 | return 1 | |
279 | self.fn = fn |
|
279 | self.fn = fn | |
280 | self.badalias = True |
|
280 | self.badalias = True | |
281 | except error.AmbiguousCommand: |
|
281 | except error.AmbiguousCommand: | |
282 | def fn(ui, *args): |
|
282 | def fn(ui, *args): | |
283 | ui.warn(_("alias '%s' resolves to ambiguous command '%s'\n") \ |
|
283 | ui.warn(_("alias '%s' resolves to ambiguous command '%s'\n") \ | |
284 | % (self.name, cmd)) |
|
284 | % (self.name, cmd)) | |
285 | return 1 |
|
285 | return 1 | |
286 | self.fn = fn |
|
286 | self.fn = fn | |
287 | self.badalias = True |
|
287 | self.badalias = True | |
288 |
|
288 | |||
289 | def __call__(self, ui, *args, **opts): |
|
289 | def __call__(self, ui, *args, **opts): | |
290 | if self.shadows: |
|
290 | if self.shadows: | |
291 | ui.debug("alias '%s' shadows command '%s'\n" % |
|
291 | ui.debug("alias '%s' shadows command '%s'\n" % | |
292 | (self.name, self.cmdname)) |
|
292 | (self.name, self.cmdname)) | |
293 |
|
293 | |||
294 | if self.definition.startswith('!'): |
|
294 | if self.definition.startswith('!'): | |
295 | return self.fn(ui, *args, **opts) |
|
295 | return self.fn(ui, *args, **opts) | |
296 | else: |
|
296 | else: | |
297 | try: |
|
297 | try: | |
298 | util.checksignature(self.fn)(ui, *args, **opts) |
|
298 | util.checksignature(self.fn)(ui, *args, **opts) | |
299 | except error.SignatureError: |
|
299 | except error.SignatureError: | |
300 | args = ' '.join([self.cmdname] + self.args) |
|
300 | args = ' '.join([self.cmdname] + self.args) | |
301 | ui.debug("alias '%s' expands to '%s'\n" % (self.name, args)) |
|
301 | ui.debug("alias '%s' expands to '%s'\n" % (self.name, args)) | |
302 | raise |
|
302 | raise | |
303 |
|
303 | |||
304 | def addaliases(ui, cmdtable): |
|
304 | def addaliases(ui, cmdtable): | |
305 | # aliases are processed after extensions have been loaded, so they |
|
305 | # aliases are processed after extensions have been loaded, so they | |
306 | # may use extension commands. Aliases can also use other alias definitions, |
|
306 | # may use extension commands. Aliases can also use other alias definitions, | |
307 | # but only if they have been defined prior to the current definition. |
|
307 | # but only if they have been defined prior to the current definition. | |
308 | for alias, definition in ui.configitems('alias'): |
|
308 | for alias, definition in ui.configitems('alias'): | |
309 | aliasdef = cmdalias(alias, definition, cmdtable) |
|
309 | aliasdef = cmdalias(alias, definition, cmdtable) | |
310 | cmdtable[aliasdef.cmd] = (aliasdef, aliasdef.opts, aliasdef.help) |
|
310 | cmdtable[aliasdef.cmd] = (aliasdef, aliasdef.opts, aliasdef.help) | |
311 | if aliasdef.norepo: |
|
311 | if aliasdef.norepo: | |
312 | commands.norepo += ' %s' % alias |
|
312 | commands.norepo += ' %s' % alias | |
313 |
|
313 | |||
314 | def _parse(ui, args): |
|
314 | def _parse(ui, args): | |
315 | options = {} |
|
315 | options = {} | |
316 | cmdoptions = {} |
|
316 | cmdoptions = {} | |
317 |
|
317 | |||
318 | try: |
|
318 | try: | |
319 | args = fancyopts.fancyopts(args, commands.globalopts, options) |
|
319 | args = fancyopts.fancyopts(args, commands.globalopts, options) | |
320 | except fancyopts.getopt.GetoptError, inst: |
|
320 | except fancyopts.getopt.GetoptError, inst: | |
321 | raise error.CommandError(None, inst) |
|
321 | raise error.CommandError(None, inst) | |
322 |
|
322 | |||
323 | if args: |
|
323 | if args: | |
324 | cmd, args = args[0], args[1:] |
|
324 | cmd, args = args[0], args[1:] | |
325 | aliases, entry = cmdutil.findcmd(cmd, commands.table, |
|
325 | aliases, entry = cmdutil.findcmd(cmd, commands.table, | |
326 | ui.config("ui", "strict")) |
|
326 | ui.config("ui", "strict")) | |
327 | cmd = aliases[0] |
|
327 | cmd = aliases[0] | |
328 | args = aliasargs(entry[0]) + args |
|
328 | args = aliasargs(entry[0]) + args | |
329 | defaults = ui.config("defaults", cmd) |
|
329 | defaults = ui.config("defaults", cmd) | |
330 | if defaults: |
|
330 | if defaults: | |
331 | args = map(util.expandpath, shlex.split(defaults)) + args |
|
331 | args = map(util.expandpath, shlex.split(defaults)) + args | |
332 | c = list(entry[1]) |
|
332 | c = list(entry[1]) | |
333 | else: |
|
333 | else: | |
334 | cmd = None |
|
334 | cmd = None | |
335 | c = [] |
|
335 | c = [] | |
336 |
|
336 | |||
337 | # combine global options into local |
|
337 | # combine global options into local | |
338 | for o in commands.globalopts: |
|
338 | for o in commands.globalopts: | |
339 | c.append((o[0], o[1], options[o[1]], o[3])) |
|
339 | c.append((o[0], o[1], options[o[1]], o[3])) | |
340 |
|
340 | |||
341 | try: |
|
341 | try: | |
342 | args = fancyopts.fancyopts(args, c, cmdoptions, True) |
|
342 | args = fancyopts.fancyopts(args, c, cmdoptions, True) | |
343 | except fancyopts.getopt.GetoptError, inst: |
|
343 | except fancyopts.getopt.GetoptError, inst: | |
344 | raise error.CommandError(cmd, inst) |
|
344 | raise error.CommandError(cmd, inst) | |
345 |
|
345 | |||
346 | # separate global options back out |
|
346 | # separate global options back out | |
347 | for o in commands.globalopts: |
|
347 | for o in commands.globalopts: | |
348 | n = o[1] |
|
348 | n = o[1] | |
349 | options[n] = cmdoptions[n] |
|
349 | options[n] = cmdoptions[n] | |
350 | del cmdoptions[n] |
|
350 | del cmdoptions[n] | |
351 |
|
351 | |||
352 | return (cmd, cmd and entry[0] or None, args, options, cmdoptions) |
|
352 | return (cmd, cmd and entry[0] or None, args, options, cmdoptions) | |
353 |
|
353 | |||
354 | def _parseconfig(ui, config): |
|
354 | def _parseconfig(ui, config): | |
355 | """parse the --config options from the command line""" |
|
355 | """parse the --config options from the command line""" | |
356 | for cfg in config: |
|
356 | for cfg in config: | |
357 | try: |
|
357 | try: | |
358 | name, value = cfg.split('=', 1) |
|
358 | name, value = cfg.split('=', 1) | |
359 | section, name = name.split('.', 1) |
|
359 | section, name = name.split('.', 1) | |
360 | if not section or not name: |
|
360 | if not section or not name: | |
361 | raise IndexError |
|
361 | raise IndexError | |
362 | ui.setconfig(section, name, value) |
|
362 | ui.setconfig(section, name, value) | |
363 | except (IndexError, ValueError): |
|
363 | except (IndexError, ValueError): | |
364 | raise util.Abort(_('malformed --config option: %r ' |
|
364 | raise util.Abort(_('malformed --config option: %r ' | |
365 | '(use --config section.name=value)') % cfg) |
|
365 | '(use --config section.name=value)') % cfg) | |
366 |
|
366 | |||
367 | def _earlygetopt(aliases, args): |
|
367 | def _earlygetopt(aliases, args): | |
368 | """Return list of values for an option (or aliases). |
|
368 | """Return list of values for an option (or aliases). | |
369 |
|
369 | |||
370 | The values are listed in the order they appear in args. |
|
370 | The values are listed in the order they appear in args. | |
371 | The options and values are removed from args. |
|
371 | The options and values are removed from args. | |
372 | """ |
|
372 | """ | |
373 | try: |
|
373 | try: | |
374 | argcount = args.index("--") |
|
374 | argcount = args.index("--") | |
375 | except ValueError: |
|
375 | except ValueError: | |
376 | argcount = len(args) |
|
376 | argcount = len(args) | |
377 | shortopts = [opt for opt in aliases if len(opt) == 2] |
|
377 | shortopts = [opt for opt in aliases if len(opt) == 2] | |
378 | values = [] |
|
378 | values = [] | |
379 | pos = 0 |
|
379 | pos = 0 | |
380 | while pos < argcount: |
|
380 | while pos < argcount: | |
381 | if args[pos] in aliases: |
|
381 | if args[pos] in aliases: | |
382 | if pos + 1 >= argcount: |
|
382 | if pos + 1 >= argcount: | |
383 | # ignore and let getopt report an error if there is no value |
|
383 | # ignore and let getopt report an error if there is no value | |
384 | break |
|
384 | break | |
385 | del args[pos] |
|
385 | del args[pos] | |
386 | values.append(args.pop(pos)) |
|
386 | values.append(args.pop(pos)) | |
387 | argcount -= 2 |
|
387 | argcount -= 2 | |
388 | elif args[pos][:2] in shortopts: |
|
388 | elif args[pos][:2] in shortopts: | |
389 | # short option can have no following space, e.g. hg log -Rfoo |
|
389 | # short option can have no following space, e.g. hg log -Rfoo | |
390 | values.append(args.pop(pos)[2:]) |
|
390 | values.append(args.pop(pos)[2:]) | |
391 | argcount -= 1 |
|
391 | argcount -= 1 | |
392 | else: |
|
392 | else: | |
393 | pos += 1 |
|
393 | pos += 1 | |
394 | return values |
|
394 | return values | |
395 |
|
395 | |||
396 | def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions): |
|
396 | def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions): | |
397 | # run pre-hook, and abort if it fails |
|
397 | # run pre-hook, and abort if it fails | |
398 | ret = hook.hook(lui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs), |
|
398 | ret = hook.hook(lui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs), | |
399 | pats=cmdpats, opts=cmdoptions) |
|
399 | pats=cmdpats, opts=cmdoptions) | |
400 | if ret: |
|
400 | if ret: | |
401 | return ret |
|
401 | return ret | |
402 | ret = _runcommand(ui, options, cmd, d) |
|
402 | ret = _runcommand(ui, options, cmd, d) | |
403 | # run post-hook, passing command result |
|
403 | # run post-hook, passing command result | |
404 | hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs), |
|
404 | hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs), | |
405 | result=ret, pats=cmdpats, opts=cmdoptions) |
|
405 | result=ret, pats=cmdpats, opts=cmdoptions) | |
406 | return ret |
|
406 | return ret | |
407 |
|
407 | |||
408 | def _getlocal(ui, rpath): |
|
408 | def _getlocal(ui, rpath): | |
409 | """Return (path, local ui object) for the given target path. |
|
409 | """Return (path, local ui object) for the given target path. | |
410 |
|
410 | |||
411 | Takes paths in [cwd]/.hg/hgrc into account." |
|
411 | Takes paths in [cwd]/.hg/hgrc into account." | |
412 | """ |
|
412 | """ | |
413 | try: |
|
413 | try: | |
414 | wd = os.getcwd() |
|
414 | wd = os.getcwd() | |
415 | except OSError, e: |
|
415 | except OSError, e: | |
416 | raise util.Abort(_("error getting current working directory: %s") % |
|
416 | raise util.Abort(_("error getting current working directory: %s") % | |
417 | e.strerror) |
|
417 | e.strerror) | |
418 | path = cmdutil.findrepo(wd) or "" |
|
418 | path = cmdutil.findrepo(wd) or "" | |
419 | if not path: |
|
419 | if not path: | |
420 | lui = ui |
|
420 | lui = ui | |
421 | else: |
|
421 | else: | |
422 | lui = ui.copy() |
|
422 | lui = ui.copy() | |
423 | lui.readconfig(os.path.join(path, ".hg", "hgrc"), path) |
|
423 | lui.readconfig(os.path.join(path, ".hg", "hgrc"), path) | |
424 |
|
424 | |||
425 | if rpath: |
|
425 | if rpath: | |
426 | path = lui.expandpath(rpath[-1]) |
|
426 | path = lui.expandpath(rpath[-1]) | |
427 | lui = ui.copy() |
|
427 | lui = ui.copy() | |
428 | lui.readconfig(os.path.join(path, ".hg", "hgrc"), path) |
|
428 | lui.readconfig(os.path.join(path, ".hg", "hgrc"), path) | |
429 |
|
429 | |||
430 | return path, lui |
|
430 | return path, lui | |
431 |
|
431 | |||
432 | def _checkshellalias(ui, args): |
|
432 | def _checkshellalias(ui, args): | |
433 | cwd = os.getcwd() |
|
433 | cwd = os.getcwd() | |
434 | norepo = commands.norepo |
|
434 | norepo = commands.norepo | |
435 | options = {} |
|
435 | options = {} | |
436 |
|
436 | |||
437 | try: |
|
437 | try: | |
438 | args = fancyopts.fancyopts(args, commands.globalopts, options) |
|
438 | args = fancyopts.fancyopts(args, commands.globalopts, options) | |
439 | except fancyopts.getopt.GetoptError: |
|
439 | except fancyopts.getopt.GetoptError: | |
440 | return |
|
440 | return | |
441 |
|
441 | |||
442 | if not args: |
|
442 | if not args: | |
443 | return |
|
443 | return | |
444 |
|
444 | |||
445 | _parseconfig(ui, options['config']) |
|
445 | _parseconfig(ui, options['config']) | |
446 | if options['cwd']: |
|
446 | if options['cwd']: | |
447 | os.chdir(options['cwd']) |
|
447 | os.chdir(options['cwd']) | |
448 |
|
448 | |||
449 | path, lui = _getlocal(ui, [options['repository']]) |
|
449 | path, lui = _getlocal(ui, [options['repository']]) | |
450 |
|
450 | |||
451 | cmdtable = commands.table.copy() |
|
451 | cmdtable = commands.table.copy() | |
452 | addaliases(lui, cmdtable) |
|
452 | addaliases(lui, cmdtable) | |
453 |
|
453 | |||
454 | cmd = args[0] |
|
454 | cmd = args[0] | |
455 | try: |
|
455 | try: | |
456 | aliases, entry = cmdutil.findcmd(cmd, cmdtable, lui.config("ui", "strict")) |
|
456 | aliases, entry = cmdutil.findcmd(cmd, cmdtable, lui.config("ui", "strict")) | |
457 | except error.UnknownCommand: |
|
457 | except error.UnknownCommand: | |
458 | commands.norepo = norepo |
|
458 | commands.norepo = norepo | |
459 | os.chdir(cwd) |
|
459 | os.chdir(cwd) | |
460 | return |
|
460 | return | |
461 |
|
461 | |||
462 | cmd = aliases[0] |
|
462 | cmd = aliases[0] | |
463 | fn = entry[0] |
|
463 | fn = entry[0] | |
464 |
|
464 | |||
465 | if cmd and hasattr(fn, 'shell'): |
|
465 | if cmd and hasattr(fn, 'shell'): | |
466 | d = lambda: fn(ui, *args[1:]) |
|
466 | d = lambda: fn(ui, *args[1:]) | |
467 | return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d, [], {}) |
|
467 | return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d, [], {}) | |
468 |
|
468 | |||
469 | commands.norepo = norepo |
|
469 | commands.norepo = norepo | |
470 | os.chdir(cwd) |
|
470 | os.chdir(cwd) | |
471 |
|
471 | |||
472 | _loaded = set() |
|
472 | _loaded = set() | |
473 | def _dispatch(ui, args): |
|
473 | def _dispatch(ui, args): | |
474 | shellaliasfn = _checkshellalias(ui, args) |
|
474 | shellaliasfn = _checkshellalias(ui, args) | |
475 | if shellaliasfn: |
|
475 | if shellaliasfn: | |
476 | return shellaliasfn() |
|
476 | return shellaliasfn() | |
477 |
|
477 | |||
478 | # read --config before doing anything else |
|
478 | # read --config before doing anything else | |
479 | # (e.g. to change trust settings for reading .hg/hgrc) |
|
479 | # (e.g. to change trust settings for reading .hg/hgrc) | |
480 | _parseconfig(ui, _earlygetopt(['--config'], args)) |
|
480 | _parseconfig(ui, _earlygetopt(['--config'], args)) | |
481 |
|
481 | |||
482 | # check for cwd |
|
482 | # check for cwd | |
483 | cwd = _earlygetopt(['--cwd'], args) |
|
483 | cwd = _earlygetopt(['--cwd'], args) | |
484 | if cwd: |
|
484 | if cwd: | |
485 | os.chdir(cwd[-1]) |
|
485 | os.chdir(cwd[-1]) | |
486 |
|
486 | |||
487 | rpath = _earlygetopt(["-R", "--repository", "--repo"], args) |
|
487 | rpath = _earlygetopt(["-R", "--repository", "--repo"], args) | |
488 | path, lui = _getlocal(ui, rpath) |
|
488 | path, lui = _getlocal(ui, rpath) | |
489 |
|
489 | |||
490 | # Configure extensions in phases: uisetup, extsetup, cmdtable, and |
|
490 | # Configure extensions in phases: uisetup, extsetup, cmdtable, and | |
491 | # reposetup. Programs like TortoiseHg will call _dispatch several |
|
491 | # reposetup. Programs like TortoiseHg will call _dispatch several | |
492 | # times so we keep track of configured extensions in _loaded. |
|
492 | # times so we keep track of configured extensions in _loaded. | |
493 | extensions.loadall(lui) |
|
493 | extensions.loadall(lui) | |
494 | exts = [ext for ext in extensions.extensions() if ext[0] not in _loaded] |
|
494 | exts = [ext for ext in extensions.extensions() if ext[0] not in _loaded] | |
495 | # Propagate any changes to lui.__class__ by extensions |
|
495 | # Propagate any changes to lui.__class__ by extensions | |
496 | ui.__class__ = lui.__class__ |
|
496 | ui.__class__ = lui.__class__ | |
497 |
|
497 | |||
498 | # (uisetup and extsetup are handled in extensions.loadall) |
|
498 | # (uisetup and extsetup are handled in extensions.loadall) | |
499 |
|
499 | |||
500 | for name, module in exts: |
|
500 | for name, module in exts: | |
501 | cmdtable = getattr(module, 'cmdtable', {}) |
|
501 | cmdtable = getattr(module, 'cmdtable', {}) | |
502 | overrides = [cmd for cmd in cmdtable if cmd in commands.table] |
|
502 | overrides = [cmd for cmd in cmdtable if cmd in commands.table] | |
503 | if overrides: |
|
503 | if overrides: | |
504 | ui.warn(_("extension '%s' overrides commands: %s\n") |
|
504 | ui.warn(_("extension '%s' overrides commands: %s\n") | |
505 | % (name, " ".join(overrides))) |
|
505 | % (name, " ".join(overrides))) | |
506 | commands.table.update(cmdtable) |
|
506 | commands.table.update(cmdtable) | |
507 | _loaded.add(name) |
|
507 | _loaded.add(name) | |
508 |
|
508 | |||
509 | # (reposetup is handled in hg.repository) |
|
509 | # (reposetup is handled in hg.repository) | |
510 |
|
510 | |||
511 | addaliases(lui, commands.table) |
|
511 | addaliases(lui, commands.table) | |
512 |
|
512 | |||
513 | # check for fallback encoding |
|
513 | # check for fallback encoding | |
514 | fallback = lui.config('ui', 'fallbackencoding') |
|
514 | fallback = lui.config('ui', 'fallbackencoding') | |
515 | if fallback: |
|
515 | if fallback: | |
516 | encoding.fallbackencoding = fallback |
|
516 | encoding.fallbackencoding = fallback | |
517 |
|
517 | |||
518 | fullargs = args |
|
518 | fullargs = args | |
519 | cmd, func, args, options, cmdoptions = _parse(lui, args) |
|
519 | cmd, func, args, options, cmdoptions = _parse(lui, args) | |
520 |
|
520 | |||
521 | if options["config"]: |
|
521 | if options["config"]: | |
522 | raise util.Abort(_("option --config may not be abbreviated!")) |
|
522 | raise util.Abort(_("option --config may not be abbreviated!")) | |
523 | if options["cwd"]: |
|
523 | if options["cwd"]: | |
524 | raise util.Abort(_("option --cwd may not be abbreviated!")) |
|
524 | raise util.Abort(_("option --cwd may not be abbreviated!")) | |
525 | if options["repository"]: |
|
525 | if options["repository"]: | |
526 | raise util.Abort(_( |
|
526 | raise util.Abort(_( | |
527 | "Option -R has to be separated from other options (e.g. not -qR) " |
|
527 | "Option -R has to be separated from other options (e.g. not -qR) " | |
528 | "and --repository may only be abbreviated as --repo!")) |
|
528 | "and --repository may only be abbreviated as --repo!")) | |
529 |
|
529 | |||
530 | if options["encoding"]: |
|
530 | if options["encoding"]: | |
531 | encoding.encoding = options["encoding"] |
|
531 | encoding.encoding = options["encoding"] | |
532 | if options["encodingmode"]: |
|
532 | if options["encodingmode"]: | |
533 | encoding.encodingmode = options["encodingmode"] |
|
533 | encoding.encodingmode = options["encodingmode"] | |
534 | if options["time"]: |
|
534 | if options["time"]: | |
535 | def get_times(): |
|
535 | def get_times(): | |
536 | t = os.times() |
|
536 | t = os.times() | |
537 | if t[4] == 0.0: # Windows leaves this as zero, so use time.clock() |
|
537 | if t[4] == 0.0: # Windows leaves this as zero, so use time.clock() | |
538 | t = (t[0], t[1], t[2], t[3], time.clock()) |
|
538 | t = (t[0], t[1], t[2], t[3], time.clock()) | |
539 | return t |
|
539 | return t | |
540 | s = get_times() |
|
540 | s = get_times() | |
541 | def print_time(): |
|
541 | def print_time(): | |
542 | t = get_times() |
|
542 | t = get_times() | |
543 | ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") % |
|
543 | ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") % | |
544 | (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3])) |
|
544 | (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3])) | |
545 | atexit.register(print_time) |
|
545 | atexit.register(print_time) | |
546 |
|
546 | |||
547 | if options['verbose'] or options['debug'] or options['quiet']: |
|
547 | if options['verbose'] or options['debug'] or options['quiet']: | |
548 | ui.setconfig('ui', 'verbose', str(bool(options['verbose']))) |
|
548 | ui.setconfig('ui', 'verbose', str(bool(options['verbose']))) | |
549 | ui.setconfig('ui', 'debug', str(bool(options['debug']))) |
|
549 | ui.setconfig('ui', 'debug', str(bool(options['debug']))) | |
550 | ui.setconfig('ui', 'quiet', str(bool(options['quiet']))) |
|
550 | ui.setconfig('ui', 'quiet', str(bool(options['quiet']))) | |
551 | if options['traceback']: |
|
551 | if options['traceback']: | |
552 | ui.setconfig('ui', 'traceback', 'on') |
|
552 | ui.setconfig('ui', 'traceback', 'on') | |
553 | if options['noninteractive']: |
|
553 | if options['noninteractive']: | |
554 | ui.setconfig('ui', 'interactive', 'off') |
|
554 | ui.setconfig('ui', 'interactive', 'off') | |
555 |
|
555 | |||
556 | if options['help']: |
|
556 | if options['help']: | |
557 | return commands.help_(ui, cmd, options['version']) |
|
557 | return commands.help_(ui, cmd, options['version']) | |
558 | elif options['version']: |
|
558 | elif options['version']: | |
559 | return commands.version_(ui) |
|
559 | return commands.version_(ui) | |
560 | elif not cmd: |
|
560 | elif not cmd: | |
561 | return commands.help_(ui, 'shortlist') |
|
561 | return commands.help_(ui, 'shortlist') | |
562 |
|
562 | |||
563 | repo = None |
|
563 | repo = None | |
564 | cmdpats = args[:] |
|
564 | cmdpats = args[:] | |
565 | if cmd not in commands.norepo.split(): |
|
565 | if cmd not in commands.norepo.split(): | |
566 | try: |
|
566 | try: | |
567 | repo = hg.repository(ui, path=path) |
|
567 | repo = hg.repository(ui, path=path) | |
568 | ui = repo.ui |
|
568 | ui = repo.ui | |
569 | if not repo.local(): |
|
569 | if not repo.local(): | |
570 | raise util.Abort(_("repository '%s' is not local") % path) |
|
570 | raise util.Abort(_("repository '%s' is not local") % path) | |
571 | ui.setconfig("bundle", "mainreporoot", repo.root) |
|
571 | ui.setconfig("bundle", "mainreporoot", repo.root) | |
572 | except error.RepoError: |
|
572 | except error.RepoError: | |
573 | if cmd not in commands.optionalrepo.split(): |
|
573 | if cmd not in commands.optionalrepo.split(): | |
574 | if args and not path: # try to infer -R from command args |
|
574 | if args and not path: # try to infer -R from command args | |
575 | repos = map(cmdutil.findrepo, args) |
|
575 | repos = map(cmdutil.findrepo, args) | |
576 | guess = repos[0] |
|
576 | guess = repos[0] | |
577 | if guess and repos.count(guess) == len(repos): |
|
577 | if guess and repos.count(guess) == len(repos): | |
578 | return _dispatch(ui, ['--repository', guess] + fullargs) |
|
578 | return _dispatch(ui, ['--repository', guess] + fullargs) | |
579 | if not path: |
|
579 | if not path: | |
580 | raise error.RepoError(_("There is no Mercurial repository" |
|
580 | raise error.RepoError(_("There is no Mercurial repository" | |
581 | " here (.hg not found)")) |
|
581 | " here (.hg not found)")) | |
582 | raise |
|
582 | raise | |
583 | args.insert(0, repo) |
|
583 | args.insert(0, repo) | |
584 | elif rpath: |
|
584 | elif rpath: | |
585 | ui.warn(_("warning: --repository ignored\n")) |
|
585 | ui.warn(_("warning: --repository ignored\n")) | |
586 |
|
586 | |||
587 | msg = ' '.join(' ' in a and repr(a) or a for a in fullargs) |
|
587 | msg = ' '.join(' ' in a and repr(a) or a for a in fullargs) | |
588 | ui.log("command", msg + "\n") |
|
588 | ui.log("command", msg + "\n") | |
589 | d = lambda: util.checksignature(func)(ui, *args, **cmdoptions) |
|
589 | d = lambda: util.checksignature(func)(ui, *args, **cmdoptions) | |
590 | return runcommand(lui, repo, cmd, fullargs, ui, options, d, |
|
590 | return runcommand(lui, repo, cmd, fullargs, ui, options, d, | |
591 | cmdpats, cmdoptions) |
|
591 | cmdpats, cmdoptions) | |
592 |
|
592 | |||
593 | def _runcommand(ui, options, cmd, cmdfunc): |
|
593 | def _runcommand(ui, options, cmd, cmdfunc): | |
594 | def checkargs(): |
|
594 | def checkargs(): | |
595 | try: |
|
595 | try: | |
596 | return cmdfunc() |
|
596 | return cmdfunc() | |
597 | except error.SignatureError: |
|
597 | except error.SignatureError: | |
598 | raise error.CommandError(cmd, _("invalid arguments")) |
|
598 | raise error.CommandError(cmd, _("invalid arguments")) | |
599 |
|
599 | |||
600 | if options['profile']: |
|
600 | if options['profile']: | |
601 | format = ui.config('profiling', 'format', default='text') |
|
601 | format = ui.config('profiling', 'format', default='text') | |
602 |
|
602 | |||
603 | if not format in ['text', 'kcachegrind']: |
|
603 | if not format in ['text', 'kcachegrind']: | |
604 | ui.warn(_("unrecognized profiling format '%s'" |
|
604 | ui.warn(_("unrecognized profiling format '%s'" | |
605 | " - Ignored\n") % format) |
|
605 | " - Ignored\n") % format) | |
606 | format = 'text' |
|
606 | format = 'text' | |
607 |
|
607 | |||
608 | output = ui.config('profiling', 'output') |
|
608 | output = ui.config('profiling', 'output') | |
609 |
|
609 | |||
610 | if output: |
|
610 | if output: | |
611 | path = ui.expandpath(output) |
|
611 | path = ui.expandpath(output) | |
612 | ostream = open(path, 'wb') |
|
612 | ostream = open(path, 'wb') | |
613 | else: |
|
613 | else: | |
614 | ostream = sys.stderr |
|
614 | ostream = sys.stderr | |
615 |
|
615 | |||
616 | try: |
|
616 | try: | |
617 | from mercurial import lsprof |
|
617 | from mercurial import lsprof | |
618 | except ImportError: |
|
618 | except ImportError: | |
619 | raise util.Abort(_( |
|
619 | raise util.Abort(_( | |
620 | 'lsprof not available - install from ' |
|
620 | 'lsprof not available - install from ' | |
621 | 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/')) |
|
621 | 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/')) | |
622 | p = lsprof.Profiler() |
|
622 | p = lsprof.Profiler() | |
623 | p.enable(subcalls=True) |
|
623 | p.enable(subcalls=True) | |
624 | try: |
|
624 | try: | |
625 | return checkargs() |
|
625 | return checkargs() | |
626 | finally: |
|
626 | finally: | |
627 | p.disable() |
|
627 | p.disable() | |
628 |
|
628 | |||
629 | if format == 'kcachegrind': |
|
629 | if format == 'kcachegrind': | |
630 | import lsprofcalltree |
|
630 | import lsprofcalltree | |
631 | calltree = lsprofcalltree.KCacheGrind(p) |
|
631 | calltree = lsprofcalltree.KCacheGrind(p) | |
632 | calltree.output(ostream) |
|
632 | calltree.output(ostream) | |
633 | else: |
|
633 | else: | |
634 | # format == 'text' |
|
634 | # format == 'text' | |
635 | stats = lsprof.Stats(p.getstats()) |
|
635 | stats = lsprof.Stats(p.getstats()) | |
636 | stats.sort() |
|
636 | stats.sort() | |
637 | stats.pprint(top=10, file=ostream, climit=5) |
|
637 | stats.pprint(top=10, file=ostream, climit=5) | |
638 |
|
638 | |||
639 | if output: |
|
639 | if output: | |
640 | ostream.close() |
|
640 | ostream.close() | |
641 | else: |
|
641 | else: | |
642 | return checkargs() |
|
642 | return checkargs() |
@@ -1,97 +1,97 | |||||
1 | # encoding.py - character transcoding support for Mercurial |
|
1 | # encoding.py - character transcoding support for Mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others |
|
3 | # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | import error |
|
8 | import error | |
9 | import unicodedata, locale, os |
|
9 | import unicodedata, locale, os | |
10 |
|
10 | |||
11 | def _getpreferredencoding(): |
|
11 | def _getpreferredencoding(): | |
12 | ''' |
|
12 | ''' | |
13 | On darwin, getpreferredencoding ignores the locale environment and |
|
13 | On darwin, getpreferredencoding ignores the locale environment and | |
14 | always returns mac-roman. http://bugs.python.org/issue6202 fixes this |
|
14 | always returns mac-roman. http://bugs.python.org/issue6202 fixes this | |
15 | for Python 2.7 and up. This is the same corrected code for earlier |
|
15 | for Python 2.7 and up. This is the same corrected code for earlier | |
16 | Python versions. |
|
16 | Python versions. | |
17 |
|
17 | |||
18 |
However, we can't use a version check for this method, as some distributions |
|
18 | However, we can't use a version check for this method, as some distributions | |
19 | patch Python to fix this. Instead, we use it as a 'fixer' for the mac-roman |
|
19 | patch Python to fix this. Instead, we use it as a 'fixer' for the mac-roman | |
20 | encoding, as it is unlikely that this encoding is the actually expected. |
|
20 | encoding, as it is unlikely that this encoding is the actually expected. | |
21 | ''' |
|
21 | ''' | |
22 | try: |
|
22 | try: | |
23 | locale.CODESET |
|
23 | locale.CODESET | |
24 | except AttributeError: |
|
24 | except AttributeError: | |
25 | # Fall back to parsing environment variables :-( |
|
25 | # Fall back to parsing environment variables :-( | |
26 | return locale.getdefaultlocale()[1] |
|
26 | return locale.getdefaultlocale()[1] | |
27 |
|
27 | |||
28 | oldloc = locale.setlocale(locale.LC_CTYPE) |
|
28 | oldloc = locale.setlocale(locale.LC_CTYPE) | |
29 | locale.setlocale(locale.LC_CTYPE, "") |
|
29 | locale.setlocale(locale.LC_CTYPE, "") | |
30 | result = locale.nl_langinfo(locale.CODESET) |
|
30 | result = locale.nl_langinfo(locale.CODESET) | |
31 | locale.setlocale(locale.LC_CTYPE, oldloc) |
|
31 | locale.setlocale(locale.LC_CTYPE, oldloc) | |
32 |
|
32 | |||
33 | return result |
|
33 | return result | |
34 |
|
34 | |||
35 | _encodingfixers = { |
|
35 | _encodingfixers = { | |
36 | '646': lambda: 'ascii', |
|
36 | '646': lambda: 'ascii', | |
37 | 'ANSI_X3.4-1968': lambda: 'ascii', |
|
37 | 'ANSI_X3.4-1968': lambda: 'ascii', | |
38 | 'mac-roman': _getpreferredencoding |
|
38 | 'mac-roman': _getpreferredencoding | |
39 | } |
|
39 | } | |
40 |
|
40 | |||
41 | try: |
|
41 | try: | |
42 | encoding = os.environ.get("HGENCODING") |
|
42 | encoding = os.environ.get("HGENCODING") | |
43 | if not encoding: |
|
43 | if not encoding: | |
44 | encoding = locale.getpreferredencoding() or 'ascii' |
|
44 | encoding = locale.getpreferredencoding() or 'ascii' | |
45 | encoding = _encodingfixers.get(encoding, lambda: encoding)() |
|
45 | encoding = _encodingfixers.get(encoding, lambda: encoding)() | |
46 | except locale.Error: |
|
46 | except locale.Error: | |
47 | encoding = 'ascii' |
|
47 | encoding = 'ascii' | |
48 | encodingmode = os.environ.get("HGENCODINGMODE", "strict") |
|
48 | encodingmode = os.environ.get("HGENCODINGMODE", "strict") | |
49 | fallbackencoding = 'ISO-8859-1' |
|
49 | fallbackencoding = 'ISO-8859-1' | |
50 |
|
50 | |||
51 | def tolocal(s): |
|
51 | def tolocal(s): | |
52 | """ |
|
52 | """ | |
53 | Convert a string from internal UTF-8 to local encoding |
|
53 | Convert a string from internal UTF-8 to local encoding | |
54 |
|
54 | |||
55 | All internal strings should be UTF-8 but some repos before the |
|
55 | All internal strings should be UTF-8 but some repos before the | |
56 | implementation of locale support may contain latin1 or possibly |
|
56 | implementation of locale support may contain latin1 or possibly | |
57 | other character sets. We attempt to decode everything strictly |
|
57 | other character sets. We attempt to decode everything strictly | |
58 | using UTF-8, then Latin-1, and failing that, we use UTF-8 and |
|
58 | using UTF-8, then Latin-1, and failing that, we use UTF-8 and | |
59 | replace unknown characters. |
|
59 | replace unknown characters. | |
60 | """ |
|
60 | """ | |
61 | for e in ('UTF-8', fallbackencoding): |
|
61 | for e in ('UTF-8', fallbackencoding): | |
62 | try: |
|
62 | try: | |
63 | u = s.decode(e) # attempt strict decoding |
|
63 | u = s.decode(e) # attempt strict decoding | |
64 | return u.encode(encoding, "replace") |
|
64 | return u.encode(encoding, "replace") | |
65 | except LookupError, k: |
|
65 | except LookupError, k: | |
66 | raise error.Abort("%s, please check your locale settings" % k) |
|
66 | raise error.Abort("%s, please check your locale settings" % k) | |
67 | except UnicodeDecodeError: |
|
67 | except UnicodeDecodeError: | |
68 | pass |
|
68 | pass | |
69 | u = s.decode("utf-8", "replace") # last ditch |
|
69 | u = s.decode("utf-8", "replace") # last ditch | |
70 | return u.encode(encoding, "replace") |
|
70 | return u.encode(encoding, "replace") | |
71 |
|
71 | |||
72 | def fromlocal(s): |
|
72 | def fromlocal(s): | |
73 | """ |
|
73 | """ | |
74 | Convert a string from the local character encoding to UTF-8 |
|
74 | Convert a string from the local character encoding to UTF-8 | |
75 |
|
75 | |||
76 | We attempt to decode strings using the encoding mode set by |
|
76 | We attempt to decode strings using the encoding mode set by | |
77 | HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown |
|
77 | HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown | |
78 | characters will cause an error message. Other modes include |
|
78 | characters will cause an error message. Other modes include | |
79 | 'replace', which replaces unknown characters with a special |
|
79 | 'replace', which replaces unknown characters with a special | |
80 | Unicode character, and 'ignore', which drops the character. |
|
80 | Unicode character, and 'ignore', which drops the character. | |
81 | """ |
|
81 | """ | |
82 | try: |
|
82 | try: | |
83 | return s.decode(encoding, encodingmode).encode("utf-8") |
|
83 | return s.decode(encoding, encodingmode).encode("utf-8") | |
84 | except UnicodeDecodeError, inst: |
|
84 | except UnicodeDecodeError, inst: | |
85 | sub = s[max(0, inst.start - 10):inst.start + 10] |
|
85 | sub = s[max(0, inst.start - 10):inst.start + 10] | |
86 | raise error.Abort("decoding near '%s': %s!" % (sub, inst)) |
|
86 | raise error.Abort("decoding near '%s': %s!" % (sub, inst)) | |
87 | except LookupError, k: |
|
87 | except LookupError, k: | |
88 | raise error.Abort("%s, please check your locale settings" % k) |
|
88 | raise error.Abort("%s, please check your locale settings" % k) | |
89 |
|
89 | |||
90 | def colwidth(s): |
|
90 | def colwidth(s): | |
91 | "Find the column width of a UTF-8 string for display" |
|
91 | "Find the column width of a UTF-8 string for display" | |
92 | d = s.decode(encoding, 'replace') |
|
92 | d = s.decode(encoding, 'replace') | |
93 | if hasattr(unicodedata, 'east_asian_width'): |
|
93 | if hasattr(unicodedata, 'east_asian_width'): | |
94 | w = unicodedata.east_asian_width |
|
94 | w = unicodedata.east_asian_width | |
95 | return sum([w(c) in 'WFA' and 2 or 1 for c in d]) |
|
95 | return sum([w(c) in 'WFA' and 2 or 1 for c in d]) | |
96 | return len(d) |
|
96 | return len(d) | |
97 |
|
97 |
@@ -1,580 +1,580 | |||||
1 | # subrepo.py - sub-repository handling for Mercurial |
|
1 | # subrepo.py - sub-repository handling for Mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2009-2010 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2009-2010 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | import errno, os, re, xml.dom.minidom, shutil, urlparse, posixpath |
|
8 | import errno, os, re, xml.dom.minidom, shutil, urlparse, posixpath | |
9 | from i18n import _ |
|
9 | from i18n import _ | |
10 | import config, util, node, error, cmdutil |
|
10 | import config, util, node, error, cmdutil | |
11 | hg = None |
|
11 | hg = None | |
12 |
|
12 | |||
13 | nullstate = ('', '', 'empty') |
|
13 | nullstate = ('', '', 'empty') | |
14 |
|
14 | |||
15 | def state(ctx, ui): |
|
15 | def state(ctx, ui): | |
16 | """return a state dict, mapping subrepo paths configured in .hgsub |
|
16 | """return a state dict, mapping subrepo paths configured in .hgsub | |
17 | to tuple: (source from .hgsub, revision from .hgsubstate, kind |
|
17 | to tuple: (source from .hgsub, revision from .hgsubstate, kind | |
18 | (key in types dict)) |
|
18 | (key in types dict)) | |
19 | """ |
|
19 | """ | |
20 | p = config.config() |
|
20 | p = config.config() | |
21 | def read(f, sections=None, remap=None): |
|
21 | def read(f, sections=None, remap=None): | |
22 | if f in ctx: |
|
22 | if f in ctx: | |
23 | p.parse(f, ctx[f].data(), sections, remap, read) |
|
23 | p.parse(f, ctx[f].data(), sections, remap, read) | |
24 | else: |
|
24 | else: | |
25 | raise util.Abort(_("subrepo spec file %s not found") % f) |
|
25 | raise util.Abort(_("subrepo spec file %s not found") % f) | |
26 |
|
26 | |||
27 | if '.hgsub' in ctx: |
|
27 | if '.hgsub' in ctx: | |
28 | read('.hgsub') |
|
28 | read('.hgsub') | |
29 |
|
29 | |||
30 | for path, src in ui.configitems('subpaths'): |
|
30 | for path, src in ui.configitems('subpaths'): | |
31 | p.set('subpaths', path, src, ui.configsource('subpaths', path)) |
|
31 | p.set('subpaths', path, src, ui.configsource('subpaths', path)) | |
32 |
|
32 | |||
33 | rev = {} |
|
33 | rev = {} | |
34 | if '.hgsubstate' in ctx: |
|
34 | if '.hgsubstate' in ctx: | |
35 | try: |
|
35 | try: | |
36 | for l in ctx['.hgsubstate'].data().splitlines(): |
|
36 | for l in ctx['.hgsubstate'].data().splitlines(): | |
37 | revision, path = l.split(" ", 1) |
|
37 | revision, path = l.split(" ", 1) | |
38 | rev[path] = revision |
|
38 | rev[path] = revision | |
39 | except IOError, err: |
|
39 | except IOError, err: | |
40 | if err.errno != errno.ENOENT: |
|
40 | if err.errno != errno.ENOENT: | |
41 | raise |
|
41 | raise | |
42 |
|
42 | |||
43 | state = {} |
|
43 | state = {} | |
44 | for path, src in p[''].items(): |
|
44 | for path, src in p[''].items(): | |
45 | kind = 'hg' |
|
45 | kind = 'hg' | |
46 | if src.startswith('['): |
|
46 | if src.startswith('['): | |
47 | if ']' not in src: |
|
47 | if ']' not in src: | |
48 | raise util.Abort(_('missing ] in subrepo source')) |
|
48 | raise util.Abort(_('missing ] in subrepo source')) | |
49 | kind, src = src.split(']', 1) |
|
49 | kind, src = src.split(']', 1) | |
50 | kind = kind[1:] |
|
50 | kind = kind[1:] | |
51 |
|
51 | |||
52 | for pattern, repl in p.items('subpaths'): |
|
52 | for pattern, repl in p.items('subpaths'): | |
53 | # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub |
|
53 | # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub | |
54 | # does a string decode. |
|
54 | # does a string decode. | |
55 | repl = repl.encode('string-escape') |
|
55 | repl = repl.encode('string-escape') | |
56 | # However, we still want to allow back references to go |
|
56 | # However, we still want to allow back references to go | |
57 | # through unharmed, so we turn r'\\1' into r'\1'. Again, |
|
57 | # through unharmed, so we turn r'\\1' into r'\1'. Again, | |
58 | # extra escapes are needed because re.sub string decodes. |
|
58 | # extra escapes are needed because re.sub string decodes. | |
59 | repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl) |
|
59 | repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl) | |
60 | try: |
|
60 | try: | |
61 | src = re.sub(pattern, repl, src, 1) |
|
61 | src = re.sub(pattern, repl, src, 1) | |
62 | except re.error, e: |
|
62 | except re.error, e: | |
63 | raise util.Abort(_("bad subrepository pattern in %s: %s") |
|
63 | raise util.Abort(_("bad subrepository pattern in %s: %s") | |
64 | % (p.source('subpaths', pattern), e)) |
|
64 | % (p.source('subpaths', pattern), e)) | |
65 |
|
65 | |||
66 | state[path] = (src.strip(), rev.get(path, ''), kind) |
|
66 | state[path] = (src.strip(), rev.get(path, ''), kind) | |
67 |
|
67 | |||
68 | return state |
|
68 | return state | |
69 |
|
69 | |||
70 | def writestate(repo, state): |
|
70 | def writestate(repo, state): | |
71 | """rewrite .hgsubstate in (outer) repo with these subrepo states""" |
|
71 | """rewrite .hgsubstate in (outer) repo with these subrepo states""" | |
72 | repo.wwrite('.hgsubstate', |
|
72 | repo.wwrite('.hgsubstate', | |
73 | ''.join(['%s %s\n' % (state[s][1], s) |
|
73 | ''.join(['%s %s\n' % (state[s][1], s) | |
74 | for s in sorted(state)]), '') |
|
74 | for s in sorted(state)]), '') | |
75 |
|
75 | |||
76 | def submerge(repo, wctx, mctx, actx): |
|
76 | def submerge(repo, wctx, mctx, actx): | |
77 | """delegated from merge.applyupdates: merging of .hgsubstate file |
|
77 | """delegated from merge.applyupdates: merging of .hgsubstate file | |
78 | in working context, merging context and ancestor context""" |
|
78 | in working context, merging context and ancestor context""" | |
79 | if mctx == actx: # backwards? |
|
79 | if mctx == actx: # backwards? | |
80 | actx = wctx.p1() |
|
80 | actx = wctx.p1() | |
81 | s1 = wctx.substate |
|
81 | s1 = wctx.substate | |
82 | s2 = mctx.substate |
|
82 | s2 = mctx.substate | |
83 | sa = actx.substate |
|
83 | sa = actx.substate | |
84 | sm = {} |
|
84 | sm = {} | |
85 |
|
85 | |||
86 | repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx)) |
|
86 | repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx)) | |
87 |
|
87 | |||
88 | def debug(s, msg, r=""): |
|
88 | def debug(s, msg, r=""): | |
89 | if r: |
|
89 | if r: | |
90 | r = "%s:%s:%s" % r |
|
90 | r = "%s:%s:%s" % r | |
91 | repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r)) |
|
91 | repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r)) | |
92 |
|
92 | |||
93 | for s, l in s1.items(): |
|
93 | for s, l in s1.items(): | |
94 | a = sa.get(s, nullstate) |
|
94 | a = sa.get(s, nullstate) | |
95 | ld = l # local state with possible dirty flag for compares |
|
95 | ld = l # local state with possible dirty flag for compares | |
96 | if wctx.sub(s).dirty(): |
|
96 | if wctx.sub(s).dirty(): | |
97 | ld = (l[0], l[1] + "+") |
|
97 | ld = (l[0], l[1] + "+") | |
98 | if wctx == actx: # overwrite |
|
98 | if wctx == actx: # overwrite | |
99 | a = ld |
|
99 | a = ld | |
100 |
|
100 | |||
101 | if s in s2: |
|
101 | if s in s2: | |
102 | r = s2[s] |
|
102 | r = s2[s] | |
103 | if ld == r or r == a: # no change or local is newer |
|
103 | if ld == r or r == a: # no change or local is newer | |
104 | sm[s] = l |
|
104 | sm[s] = l | |
105 | continue |
|
105 | continue | |
106 | elif ld == a: # other side changed |
|
106 | elif ld == a: # other side changed | |
107 | debug(s, "other changed, get", r) |
|
107 | debug(s, "other changed, get", r) | |
108 | wctx.sub(s).get(r) |
|
108 | wctx.sub(s).get(r) | |
109 | sm[s] = r |
|
109 | sm[s] = r | |
110 | elif ld[0] != r[0]: # sources differ |
|
110 | elif ld[0] != r[0]: # sources differ | |
111 | if repo.ui.promptchoice( |
|
111 | if repo.ui.promptchoice( | |
112 | _(' subrepository sources for %s differ\n' |
|
112 | _(' subrepository sources for %s differ\n' | |
113 | 'use (l)ocal source (%s) or (r)emote source (%s)?') |
|
113 | 'use (l)ocal source (%s) or (r)emote source (%s)?') | |
114 | % (s, l[0], r[0]), |
|
114 | % (s, l[0], r[0]), | |
115 | (_('&Local'), _('&Remote')), 0): |
|
115 | (_('&Local'), _('&Remote')), 0): | |
116 | debug(s, "prompt changed, get", r) |
|
116 | debug(s, "prompt changed, get", r) | |
117 | wctx.sub(s).get(r) |
|
117 | wctx.sub(s).get(r) | |
118 | sm[s] = r |
|
118 | sm[s] = r | |
119 | elif ld[1] == a[1]: # local side is unchanged |
|
119 | elif ld[1] == a[1]: # local side is unchanged | |
120 | debug(s, "other side changed, get", r) |
|
120 | debug(s, "other side changed, get", r) | |
121 | wctx.sub(s).get(r) |
|
121 | wctx.sub(s).get(r) | |
122 | sm[s] = r |
|
122 | sm[s] = r | |
123 | else: |
|
123 | else: | |
124 | debug(s, "both sides changed, merge with", r) |
|
124 | debug(s, "both sides changed, merge with", r) | |
125 | wctx.sub(s).merge(r) |
|
125 | wctx.sub(s).merge(r) | |
126 | sm[s] = l |
|
126 | sm[s] = l | |
127 | elif ld == a: # remote removed, local unchanged |
|
127 | elif ld == a: # remote removed, local unchanged | |
128 | debug(s, "remote removed, remove") |
|
128 | debug(s, "remote removed, remove") | |
129 | wctx.sub(s).remove() |
|
129 | wctx.sub(s).remove() | |
130 | else: |
|
130 | else: | |
131 | if repo.ui.promptchoice( |
|
131 | if repo.ui.promptchoice( | |
132 | _(' local changed subrepository %s which remote removed\n' |
|
132 | _(' local changed subrepository %s which remote removed\n' | |
133 | 'use (c)hanged version or (d)elete?') % s, |
|
133 | 'use (c)hanged version or (d)elete?') % s, | |
134 | (_('&Changed'), _('&Delete')), 0): |
|
134 | (_('&Changed'), _('&Delete')), 0): | |
135 | debug(s, "prompt remove") |
|
135 | debug(s, "prompt remove") | |
136 | wctx.sub(s).remove() |
|
136 | wctx.sub(s).remove() | |
137 |
|
137 | |||
138 | for s, r in s2.items(): |
|
138 | for s, r in s2.items(): | |
139 | if s in s1: |
|
139 | if s in s1: | |
140 | continue |
|
140 | continue | |
141 | elif s not in sa: |
|
141 | elif s not in sa: | |
142 | debug(s, "remote added, get", r) |
|
142 | debug(s, "remote added, get", r) | |
143 | mctx.sub(s).get(r) |
|
143 | mctx.sub(s).get(r) | |
144 | sm[s] = r |
|
144 | sm[s] = r | |
145 | elif r != sa[s]: |
|
145 | elif r != sa[s]: | |
146 | if repo.ui.promptchoice( |
|
146 | if repo.ui.promptchoice( | |
147 | _(' remote changed subrepository %s which local removed\n' |
|
147 | _(' remote changed subrepository %s which local removed\n' | |
148 | 'use (c)hanged version or (d)elete?') % s, |
|
148 | 'use (c)hanged version or (d)elete?') % s, | |
149 | (_('&Changed'), _('&Delete')), 0) == 0: |
|
149 | (_('&Changed'), _('&Delete')), 0) == 0: | |
150 | debug(s, "prompt recreate", r) |
|
150 | debug(s, "prompt recreate", r) | |
151 | wctx.sub(s).get(r) |
|
151 | wctx.sub(s).get(r) | |
152 | sm[s] = r |
|
152 | sm[s] = r | |
153 |
|
153 | |||
154 | # record merged .hgsubstate |
|
154 | # record merged .hgsubstate | |
155 | writestate(repo, sm) |
|
155 | writestate(repo, sm) | |
156 |
|
156 | |||
157 | def reporelpath(repo): |
|
157 | def reporelpath(repo): | |
158 | """return path to this (sub)repo as seen from outermost repo""" |
|
158 | """return path to this (sub)repo as seen from outermost repo""" | |
159 | parent = repo |
|
159 | parent = repo | |
160 | while hasattr(parent, '_subparent'): |
|
160 | while hasattr(parent, '_subparent'): | |
161 | parent = parent._subparent |
|
161 | parent = parent._subparent | |
162 | return repo.root[len(parent.root)+1:] |
|
162 | return repo.root[len(parent.root)+1:] | |
163 |
|
163 | |||
164 | def subrelpath(sub): |
|
164 | def subrelpath(sub): | |
165 | """return path to this subrepo as seen from outermost repo""" |
|
165 | """return path to this subrepo as seen from outermost repo""" | |
166 | if not hasattr(sub, '_repo'): |
|
166 | if not hasattr(sub, '_repo'): | |
167 | return sub._path |
|
167 | return sub._path | |
168 | return reporelpath(sub._repo) |
|
168 | return reporelpath(sub._repo) | |
169 |
|
169 | |||
170 | def _abssource(repo, push=False, abort=True): |
|
170 | def _abssource(repo, push=False, abort=True): | |
171 | """return pull/push path of repo - either based on parent repo .hgsub info |
|
171 | """return pull/push path of repo - either based on parent repo .hgsub info | |
172 | or on the top repo config. Abort or return None if no source found.""" |
|
172 | or on the top repo config. Abort or return None if no source found.""" | |
173 | if hasattr(repo, '_subparent'): |
|
173 | if hasattr(repo, '_subparent'): | |
174 | source = repo._subsource |
|
174 | source = repo._subsource | |
175 | if source.startswith('/') or '://' in source: |
|
175 | if source.startswith('/') or '://' in source: | |
176 | return source |
|
176 | return source | |
177 | parent = _abssource(repo._subparent, push, abort=False) |
|
177 | parent = _abssource(repo._subparent, push, abort=False) | |
178 | if parent: |
|
178 | if parent: | |
179 | if '://' in parent: |
|
179 | if '://' in parent: | |
180 | if parent[-1] == '/': |
|
180 | if parent[-1] == '/': | |
181 | parent = parent[:-1] |
|
181 | parent = parent[:-1] | |
182 | r = urlparse.urlparse(parent + '/' + source) |
|
182 | r = urlparse.urlparse(parent + '/' + source) | |
183 | r = urlparse.urlunparse((r[0], r[1], |
|
183 | r = urlparse.urlunparse((r[0], r[1], | |
184 | posixpath.normpath(r[2]), |
|
184 | posixpath.normpath(r[2]), | |
185 | r[3], r[4], r[5])) |
|
185 | r[3], r[4], r[5])) | |
186 | return r |
|
186 | return r | |
187 | else: # plain file system path |
|
187 | else: # plain file system path | |
188 | return posixpath.normpath(os.path.join(parent, repo._subsource)) |
|
188 | return posixpath.normpath(os.path.join(parent, repo._subsource)) | |
189 | else: # recursion reached top repo |
|
189 | else: # recursion reached top repo | |
190 | if push and repo.ui.config('paths', 'default-push'): |
|
190 | if push and repo.ui.config('paths', 'default-push'): | |
191 | return repo.ui.config('paths', 'default-push') |
|
191 | return repo.ui.config('paths', 'default-push') | |
192 | if repo.ui.config('paths', 'default'): |
|
192 | if repo.ui.config('paths', 'default'): | |
193 | return repo.ui.config('paths', 'default') |
|
193 | return repo.ui.config('paths', 'default') | |
194 | if abort: |
|
194 | if abort: | |
195 |
raise util.Abort(_("default path for subrepository %s not found") % |
|
195 | raise util.Abort(_("default path for subrepository %s not found") % | |
196 | reporelpath(repo)) |
|
196 | reporelpath(repo)) | |
197 |
|
197 | |||
198 | def itersubrepos(ctx1, ctx2): |
|
198 | def itersubrepos(ctx1, ctx2): | |
199 | """find subrepos in ctx1 or ctx2""" |
|
199 | """find subrepos in ctx1 or ctx2""" | |
200 | # Create a (subpath, ctx) mapping where we prefer subpaths from |
|
200 | # Create a (subpath, ctx) mapping where we prefer subpaths from | |
201 | # ctx1. The subpaths from ctx2 are important when the .hgsub file |
|
201 | # ctx1. The subpaths from ctx2 are important when the .hgsub file | |
202 | # has been modified (in ctx2) but not yet committed (in ctx1). |
|
202 | # has been modified (in ctx2) but not yet committed (in ctx1). | |
203 | subpaths = dict.fromkeys(ctx2.substate, ctx2) |
|
203 | subpaths = dict.fromkeys(ctx2.substate, ctx2) | |
204 | subpaths.update(dict.fromkeys(ctx1.substate, ctx1)) |
|
204 | subpaths.update(dict.fromkeys(ctx1.substate, ctx1)) | |
205 | for subpath, ctx in sorted(subpaths.iteritems()): |
|
205 | for subpath, ctx in sorted(subpaths.iteritems()): | |
206 | yield subpath, ctx.sub(subpath) |
|
206 | yield subpath, ctx.sub(subpath) | |
207 |
|
207 | |||
208 | def subrepo(ctx, path): |
|
208 | def subrepo(ctx, path): | |
209 | """return instance of the right subrepo class for subrepo in path""" |
|
209 | """return instance of the right subrepo class for subrepo in path""" | |
210 | # subrepo inherently violates our import layering rules |
|
210 | # subrepo inherently violates our import layering rules | |
211 | # because it wants to make repo objects from deep inside the stack |
|
211 | # because it wants to make repo objects from deep inside the stack | |
212 | # so we manually delay the circular imports to not break |
|
212 | # so we manually delay the circular imports to not break | |
213 | # scripts that don't use our demand-loading |
|
213 | # scripts that don't use our demand-loading | |
214 | global hg |
|
214 | global hg | |
215 | import hg as h |
|
215 | import hg as h | |
216 | hg = h |
|
216 | hg = h | |
217 |
|
217 | |||
218 | util.path_auditor(ctx._repo.root)(path) |
|
218 | util.path_auditor(ctx._repo.root)(path) | |
219 | state = ctx.substate.get(path, nullstate) |
|
219 | state = ctx.substate.get(path, nullstate) | |
220 | if state[2] not in types: |
|
220 | if state[2] not in types: | |
221 | raise util.Abort(_('unknown subrepo type %s') % state[2]) |
|
221 | raise util.Abort(_('unknown subrepo type %s') % state[2]) | |
222 | return types[state[2]](ctx, path, state[:2]) |
|
222 | return types[state[2]](ctx, path, state[:2]) | |
223 |
|
223 | |||
224 | # subrepo classes need to implement the following abstract class: |
|
224 | # subrepo classes need to implement the following abstract class: | |
225 |
|
225 | |||
226 | class abstractsubrepo(object): |
|
226 | class abstractsubrepo(object): | |
227 |
|
227 | |||
228 | def dirty(self): |
|
228 | def dirty(self): | |
229 | """returns true if the dirstate of the subrepo does not match |
|
229 | """returns true if the dirstate of the subrepo does not match | |
230 | current stored state |
|
230 | current stored state | |
231 | """ |
|
231 | """ | |
232 | raise NotImplementedError |
|
232 | raise NotImplementedError | |
233 |
|
233 | |||
234 | def checknested(self, path): |
|
234 | def checknested(self, path): | |
235 | """check if path is a subrepository within this repository""" |
|
235 | """check if path is a subrepository within this repository""" | |
236 | return False |
|
236 | return False | |
237 |
|
237 | |||
238 | def commit(self, text, user, date): |
|
238 | def commit(self, text, user, date): | |
239 | """commit the current changes to the subrepo with the given |
|
239 | """commit the current changes to the subrepo with the given | |
240 | log message. Use given user and date if possible. Return the |
|
240 | log message. Use given user and date if possible. Return the | |
241 | new state of the subrepo. |
|
241 | new state of the subrepo. | |
242 | """ |
|
242 | """ | |
243 | raise NotImplementedError |
|
243 | raise NotImplementedError | |
244 |
|
244 | |||
245 | def remove(self): |
|
245 | def remove(self): | |
246 | """remove the subrepo |
|
246 | """remove the subrepo | |
247 |
|
247 | |||
248 | (should verify the dirstate is not dirty first) |
|
248 | (should verify the dirstate is not dirty first) | |
249 | """ |
|
249 | """ | |
250 | raise NotImplementedError |
|
250 | raise NotImplementedError | |
251 |
|
251 | |||
252 | def get(self, state): |
|
252 | def get(self, state): | |
253 | """run whatever commands are needed to put the subrepo into |
|
253 | """run whatever commands are needed to put the subrepo into | |
254 | this state |
|
254 | this state | |
255 | """ |
|
255 | """ | |
256 | raise NotImplementedError |
|
256 | raise NotImplementedError | |
257 |
|
257 | |||
258 | def merge(self, state): |
|
258 | def merge(self, state): | |
259 | """merge currently-saved state with the new state.""" |
|
259 | """merge currently-saved state with the new state.""" | |
260 | raise NotImplementedError |
|
260 | raise NotImplementedError | |
261 |
|
261 | |||
262 | def push(self, force): |
|
262 | def push(self, force): | |
263 | """perform whatever action is analogous to 'hg push' |
|
263 | """perform whatever action is analogous to 'hg push' | |
264 |
|
264 | |||
265 | This may be a no-op on some systems. |
|
265 | This may be a no-op on some systems. | |
266 | """ |
|
266 | """ | |
267 | raise NotImplementedError |
|
267 | raise NotImplementedError | |
268 |
|
268 | |||
269 | def add(self, ui, match, dryrun, prefix): |
|
269 | def add(self, ui, match, dryrun, prefix): | |
270 | return [] |
|
270 | return [] | |
271 |
|
271 | |||
272 | def status(self, rev2, **opts): |
|
272 | def status(self, rev2, **opts): | |
273 | return [], [], [], [], [], [], [] |
|
273 | return [], [], [], [], [], [], [] | |
274 |
|
274 | |||
275 | def diff(self, diffopts, node2, match, prefix, **opts): |
|
275 | def diff(self, diffopts, node2, match, prefix, **opts): | |
276 | pass |
|
276 | pass | |
277 |
|
277 | |||
278 | def outgoing(self, ui, dest, opts): |
|
278 | def outgoing(self, ui, dest, opts): | |
279 | return 1 |
|
279 | return 1 | |
280 |
|
280 | |||
281 | def incoming(self, ui, source, opts): |
|
281 | def incoming(self, ui, source, opts): | |
282 | return 1 |
|
282 | return 1 | |
283 |
|
283 | |||
284 | def files(self): |
|
284 | def files(self): | |
285 | """return filename iterator""" |
|
285 | """return filename iterator""" | |
286 | raise NotImplementedError |
|
286 | raise NotImplementedError | |
287 |
|
287 | |||
288 | def filedata(self, name): |
|
288 | def filedata(self, name): | |
289 | """return file data""" |
|
289 | """return file data""" | |
290 | raise NotImplementedError |
|
290 | raise NotImplementedError | |
291 |
|
291 | |||
292 | def fileflags(self, name): |
|
292 | def fileflags(self, name): | |
293 | """return file flags""" |
|
293 | """return file flags""" | |
294 | return '' |
|
294 | return '' | |
295 |
|
295 | |||
296 | def archive(self, archiver, prefix): |
|
296 | def archive(self, archiver, prefix): | |
297 | for name in self.files(): |
|
297 | for name in self.files(): | |
298 | flags = self.fileflags(name) |
|
298 | flags = self.fileflags(name) | |
299 | mode = 'x' in flags and 0755 or 0644 |
|
299 | mode = 'x' in flags and 0755 or 0644 | |
300 | symlink = 'l' in flags |
|
300 | symlink = 'l' in flags | |
301 | archiver.addfile(os.path.join(prefix, self._path, name), |
|
301 | archiver.addfile(os.path.join(prefix, self._path, name), | |
302 | mode, symlink, self.filedata(name)) |
|
302 | mode, symlink, self.filedata(name)) | |
303 |
|
303 | |||
304 |
|
304 | |||
305 | class hgsubrepo(abstractsubrepo): |
|
305 | class hgsubrepo(abstractsubrepo): | |
306 | def __init__(self, ctx, path, state): |
|
306 | def __init__(self, ctx, path, state): | |
307 | self._path = path |
|
307 | self._path = path | |
308 | self._state = state |
|
308 | self._state = state | |
309 | r = ctx._repo |
|
309 | r = ctx._repo | |
310 | root = r.wjoin(path) |
|
310 | root = r.wjoin(path) | |
311 | create = False |
|
311 | create = False | |
312 | if not os.path.exists(os.path.join(root, '.hg')): |
|
312 | if not os.path.exists(os.path.join(root, '.hg')): | |
313 | create = True |
|
313 | create = True | |
314 | util.makedirs(root) |
|
314 | util.makedirs(root) | |
315 | self._repo = hg.repository(r.ui, root, create=create) |
|
315 | self._repo = hg.repository(r.ui, root, create=create) | |
316 | self._repo._subparent = r |
|
316 | self._repo._subparent = r | |
317 | self._repo._subsource = state[0] |
|
317 | self._repo._subsource = state[0] | |
318 |
|
318 | |||
319 | if create: |
|
319 | if create: | |
320 | fp = self._repo.opener("hgrc", "w", text=True) |
|
320 | fp = self._repo.opener("hgrc", "w", text=True) | |
321 | fp.write('[paths]\n') |
|
321 | fp.write('[paths]\n') | |
322 |
|
322 | |||
323 | def addpathconfig(key, value): |
|
323 | def addpathconfig(key, value): | |
324 | if value: |
|
324 | if value: | |
325 | fp.write('%s = %s\n' % (key, value)) |
|
325 | fp.write('%s = %s\n' % (key, value)) | |
326 | self._repo.ui.setconfig('paths', key, value) |
|
326 | self._repo.ui.setconfig('paths', key, value) | |
327 |
|
327 | |||
328 | defpath = _abssource(self._repo, abort=False) |
|
328 | defpath = _abssource(self._repo, abort=False) | |
329 | defpushpath = _abssource(self._repo, True, abort=False) |
|
329 | defpushpath = _abssource(self._repo, True, abort=False) | |
330 | addpathconfig('default', defpath) |
|
330 | addpathconfig('default', defpath) | |
331 | if defpath != defpushpath: |
|
331 | if defpath != defpushpath: | |
332 | addpathconfig('default-push', defpushpath) |
|
332 | addpathconfig('default-push', defpushpath) | |
333 | fp.close() |
|
333 | fp.close() | |
334 |
|
334 | |||
335 | def add(self, ui, match, dryrun, prefix): |
|
335 | def add(self, ui, match, dryrun, prefix): | |
336 | return cmdutil.add(ui, self._repo, match, dryrun, True, |
|
336 | return cmdutil.add(ui, self._repo, match, dryrun, True, | |
337 | os.path.join(prefix, self._path)) |
|
337 | os.path.join(prefix, self._path)) | |
338 |
|
338 | |||
339 | def status(self, rev2, **opts): |
|
339 | def status(self, rev2, **opts): | |
340 | try: |
|
340 | try: | |
341 | rev1 = self._state[1] |
|
341 | rev1 = self._state[1] | |
342 | ctx1 = self._repo[rev1] |
|
342 | ctx1 = self._repo[rev1] | |
343 | ctx2 = self._repo[rev2] |
|
343 | ctx2 = self._repo[rev2] | |
344 | return self._repo.status(ctx1, ctx2, **opts) |
|
344 | return self._repo.status(ctx1, ctx2, **opts) | |
345 | except error.RepoLookupError, inst: |
|
345 | except error.RepoLookupError, inst: | |
346 | self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n') |
|
346 | self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n') | |
347 | % (inst, subrelpath(self))) |
|
347 | % (inst, subrelpath(self))) | |
348 | return [], [], [], [], [], [], [] |
|
348 | return [], [], [], [], [], [], [] | |
349 |
|
349 | |||
350 | def diff(self, diffopts, node2, match, prefix, **opts): |
|
350 | def diff(self, diffopts, node2, match, prefix, **opts): | |
351 | try: |
|
351 | try: | |
352 | node1 = node.bin(self._state[1]) |
|
352 | node1 = node.bin(self._state[1]) | |
353 | # We currently expect node2 to come from substate and be |
|
353 | # We currently expect node2 to come from substate and be | |
354 | # in hex format |
|
354 | # in hex format | |
355 | if node2 is not None: |
|
355 | if node2 is not None: | |
356 | node2 = node.bin(node2) |
|
356 | node2 = node.bin(node2) | |
357 | cmdutil.diffordiffstat(self._repo.ui, self._repo, diffopts, |
|
357 | cmdutil.diffordiffstat(self._repo.ui, self._repo, diffopts, | |
358 | node1, node2, match, |
|
358 | node1, node2, match, | |
359 | prefix=os.path.join(prefix, self._path), |
|
359 | prefix=os.path.join(prefix, self._path), | |
360 | listsubrepos=True, **opts) |
|
360 | listsubrepos=True, **opts) | |
361 | except error.RepoLookupError, inst: |
|
361 | except error.RepoLookupError, inst: | |
362 | self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n') |
|
362 | self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n') | |
363 | % (inst, subrelpath(self))) |
|
363 | % (inst, subrelpath(self))) | |
364 |
|
364 | |||
365 | def archive(self, archiver, prefix): |
|
365 | def archive(self, archiver, prefix): | |
366 | abstractsubrepo.archive(self, archiver, prefix) |
|
366 | abstractsubrepo.archive(self, archiver, prefix) | |
367 |
|
367 | |||
368 | rev = self._state[1] |
|
368 | rev = self._state[1] | |
369 | ctx = self._repo[rev] |
|
369 | ctx = self._repo[rev] | |
370 | for subpath in ctx.substate: |
|
370 | for subpath in ctx.substate: | |
371 | s = subrepo(ctx, subpath) |
|
371 | s = subrepo(ctx, subpath) | |
372 | s.archive(archiver, os.path.join(prefix, self._path)) |
|
372 | s.archive(archiver, os.path.join(prefix, self._path)) | |
373 |
|
373 | |||
374 | def dirty(self): |
|
374 | def dirty(self): | |
375 | r = self._state[1] |
|
375 | r = self._state[1] | |
376 | if r == '': |
|
376 | if r == '': | |
377 | return True |
|
377 | return True | |
378 | w = self._repo[None] |
|
378 | w = self._repo[None] | |
379 | if w.p1() != self._repo[r]: # version checked out change |
|
379 | if w.p1() != self._repo[r]: # version checked out change | |
380 | return True |
|
380 | return True | |
381 | return w.dirty() # working directory changed |
|
381 | return w.dirty() # working directory changed | |
382 |
|
382 | |||
383 | def checknested(self, path): |
|
383 | def checknested(self, path): | |
384 | return self._repo._checknested(self._repo.wjoin(path)) |
|
384 | return self._repo._checknested(self._repo.wjoin(path)) | |
385 |
|
385 | |||
386 | def commit(self, text, user, date): |
|
386 | def commit(self, text, user, date): | |
387 | self._repo.ui.debug("committing subrepo %s\n" % subrelpath(self)) |
|
387 | self._repo.ui.debug("committing subrepo %s\n" % subrelpath(self)) | |
388 | n = self._repo.commit(text, user, date) |
|
388 | n = self._repo.commit(text, user, date) | |
389 | if not n: |
|
389 | if not n: | |
390 | return self._repo['.'].hex() # different version checked out |
|
390 | return self._repo['.'].hex() # different version checked out | |
391 | return node.hex(n) |
|
391 | return node.hex(n) | |
392 |
|
392 | |||
393 | def remove(self): |
|
393 | def remove(self): | |
394 | # we can't fully delete the repository as it may contain |
|
394 | # we can't fully delete the repository as it may contain | |
395 | # local-only history |
|
395 | # local-only history | |
396 | self._repo.ui.note(_('removing subrepo %s\n') % subrelpath(self)) |
|
396 | self._repo.ui.note(_('removing subrepo %s\n') % subrelpath(self)) | |
397 | hg.clean(self._repo, node.nullid, False) |
|
397 | hg.clean(self._repo, node.nullid, False) | |
398 |
|
398 | |||
399 | def _get(self, state): |
|
399 | def _get(self, state): | |
400 | source, revision, kind = state |
|
400 | source, revision, kind = state | |
401 | try: |
|
401 | try: | |
402 | self._repo.lookup(revision) |
|
402 | self._repo.lookup(revision) | |
403 | except error.RepoError: |
|
403 | except error.RepoError: | |
404 | self._repo._subsource = source |
|
404 | self._repo._subsource = source | |
405 | srcurl = _abssource(self._repo) |
|
405 | srcurl = _abssource(self._repo) | |
406 | self._repo.ui.status(_('pulling subrepo %s from %s\n') |
|
406 | self._repo.ui.status(_('pulling subrepo %s from %s\n') | |
407 | % (subrelpath(self), srcurl)) |
|
407 | % (subrelpath(self), srcurl)) | |
408 | other = hg.repository(self._repo.ui, srcurl) |
|
408 | other = hg.repository(self._repo.ui, srcurl) | |
409 | self._repo.pull(other) |
|
409 | self._repo.pull(other) | |
410 |
|
410 | |||
411 | def get(self, state): |
|
411 | def get(self, state): | |
412 | self._get(state) |
|
412 | self._get(state) | |
413 | source, revision, kind = state |
|
413 | source, revision, kind = state | |
414 | self._repo.ui.debug("getting subrepo %s\n" % self._path) |
|
414 | self._repo.ui.debug("getting subrepo %s\n" % self._path) | |
415 | hg.clean(self._repo, revision, False) |
|
415 | hg.clean(self._repo, revision, False) | |
416 |
|
416 | |||
417 | def merge(self, state): |
|
417 | def merge(self, state): | |
418 | self._get(state) |
|
418 | self._get(state) | |
419 | cur = self._repo['.'] |
|
419 | cur = self._repo['.'] | |
420 | dst = self._repo[state[1]] |
|
420 | dst = self._repo[state[1]] | |
421 | anc = dst.ancestor(cur) |
|
421 | anc = dst.ancestor(cur) | |
422 | if anc == cur: |
|
422 | if anc == cur: | |
423 | self._repo.ui.debug("updating subrepo %s\n" % subrelpath(self)) |
|
423 | self._repo.ui.debug("updating subrepo %s\n" % subrelpath(self)) | |
424 | hg.update(self._repo, state[1]) |
|
424 | hg.update(self._repo, state[1]) | |
425 | elif anc == dst: |
|
425 | elif anc == dst: | |
426 | self._repo.ui.debug("skipping subrepo %s\n" % subrelpath(self)) |
|
426 | self._repo.ui.debug("skipping subrepo %s\n" % subrelpath(self)) | |
427 | else: |
|
427 | else: | |
428 | self._repo.ui.debug("merging subrepo %s\n" % subrelpath(self)) |
|
428 | self._repo.ui.debug("merging subrepo %s\n" % subrelpath(self)) | |
429 | hg.merge(self._repo, state[1], remind=False) |
|
429 | hg.merge(self._repo, state[1], remind=False) | |
430 |
|
430 | |||
431 | def push(self, force): |
|
431 | def push(self, force): | |
432 | # push subrepos depth-first for coherent ordering |
|
432 | # push subrepos depth-first for coherent ordering | |
433 | c = self._repo[''] |
|
433 | c = self._repo[''] | |
434 | subs = c.substate # only repos that are committed |
|
434 | subs = c.substate # only repos that are committed | |
435 | for s in sorted(subs): |
|
435 | for s in sorted(subs): | |
436 | if not c.sub(s).push(force): |
|
436 | if not c.sub(s).push(force): | |
437 | return False |
|
437 | return False | |
438 |
|
438 | |||
439 | dsturl = _abssource(self._repo, True) |
|
439 | dsturl = _abssource(self._repo, True) | |
440 | self._repo.ui.status(_('pushing subrepo %s to %s\n') % |
|
440 | self._repo.ui.status(_('pushing subrepo %s to %s\n') % | |
441 | (subrelpath(self), dsturl)) |
|
441 | (subrelpath(self), dsturl)) | |
442 | other = hg.repository(self._repo.ui, dsturl) |
|
442 | other = hg.repository(self._repo.ui, dsturl) | |
443 | return self._repo.push(other, force) |
|
443 | return self._repo.push(other, force) | |
444 |
|
444 | |||
445 | def outgoing(self, ui, dest, opts): |
|
445 | def outgoing(self, ui, dest, opts): | |
446 | return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts) |
|
446 | return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts) | |
447 |
|
447 | |||
448 | def incoming(self, ui, source, opts): |
|
448 | def incoming(self, ui, source, opts): | |
449 | return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts) |
|
449 | return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts) | |
450 |
|
450 | |||
451 | def files(self): |
|
451 | def files(self): | |
452 | rev = self._state[1] |
|
452 | rev = self._state[1] | |
453 | ctx = self._repo[rev] |
|
453 | ctx = self._repo[rev] | |
454 | return ctx.manifest() |
|
454 | return ctx.manifest() | |
455 |
|
455 | |||
456 | def filedata(self, name): |
|
456 | def filedata(self, name): | |
457 | rev = self._state[1] |
|
457 | rev = self._state[1] | |
458 | return self._repo[rev][name].data() |
|
458 | return self._repo[rev][name].data() | |
459 |
|
459 | |||
460 | def fileflags(self, name): |
|
460 | def fileflags(self, name): | |
461 | rev = self._state[1] |
|
461 | rev = self._state[1] | |
462 | ctx = self._repo[rev] |
|
462 | ctx = self._repo[rev] | |
463 | return ctx.flags(name) |
|
463 | return ctx.flags(name) | |
464 |
|
464 | |||
465 |
|
465 | |||
466 | class svnsubrepo(abstractsubrepo): |
|
466 | class svnsubrepo(abstractsubrepo): | |
467 | def __init__(self, ctx, path, state): |
|
467 | def __init__(self, ctx, path, state): | |
468 | self._path = path |
|
468 | self._path = path | |
469 | self._state = state |
|
469 | self._state = state | |
470 | self._ctx = ctx |
|
470 | self._ctx = ctx | |
471 | self._ui = ctx._repo.ui |
|
471 | self._ui = ctx._repo.ui | |
472 |
|
472 | |||
473 | def _svncommand(self, commands, filename=''): |
|
473 | def _svncommand(self, commands, filename=''): | |
474 | path = os.path.join(self._ctx._repo.origroot, self._path, filename) |
|
474 | path = os.path.join(self._ctx._repo.origroot, self._path, filename) | |
475 | cmd = ['svn'] + commands + [path] |
|
475 | cmd = ['svn'] + commands + [path] | |
476 | cmd = [util.shellquote(arg) for arg in cmd] |
|
476 | cmd = [util.shellquote(arg) for arg in cmd] | |
477 | cmd = util.quotecommand(' '.join(cmd)) |
|
477 | cmd = util.quotecommand(' '.join(cmd)) | |
478 | env = dict(os.environ) |
|
478 | env = dict(os.environ) | |
479 | # Avoid localized output, preserve current locale for everything else. |
|
479 | # Avoid localized output, preserve current locale for everything else. | |
480 | env['LC_MESSAGES'] = 'C' |
|
480 | env['LC_MESSAGES'] = 'C' | |
481 | write, read, err = util.popen3(cmd, env=env, newlines=True) |
|
481 | write, read, err = util.popen3(cmd, env=env, newlines=True) | |
482 | retdata = read.read() |
|
482 | retdata = read.read() | |
483 | err = err.read().strip() |
|
483 | err = err.read().strip() | |
484 | if err: |
|
484 | if err: | |
485 | raise util.Abort(err) |
|
485 | raise util.Abort(err) | |
486 | return retdata |
|
486 | return retdata | |
487 |
|
487 | |||
488 | def _wcrev(self): |
|
488 | def _wcrev(self): | |
489 | output = self._svncommand(['info', '--xml']) |
|
489 | output = self._svncommand(['info', '--xml']) | |
490 | doc = xml.dom.minidom.parseString(output) |
|
490 | doc = xml.dom.minidom.parseString(output) | |
491 | entries = doc.getElementsByTagName('entry') |
|
491 | entries = doc.getElementsByTagName('entry') | |
492 | if not entries: |
|
492 | if not entries: | |
493 | return 0 |
|
493 | return 0 | |
494 | return int(entries[0].getAttribute('revision') or 0) |
|
494 | return int(entries[0].getAttribute('revision') or 0) | |
495 |
|
495 | |||
496 | def _wcchanged(self): |
|
496 | def _wcchanged(self): | |
497 | """Return (changes, extchanges) where changes is True |
|
497 | """Return (changes, extchanges) where changes is True | |
498 | if the working directory was changed, and extchanges is |
|
498 | if the working directory was changed, and extchanges is | |
499 | True if any of these changes concern an external entry. |
|
499 | True if any of these changes concern an external entry. | |
500 | """ |
|
500 | """ | |
501 | output = self._svncommand(['status', '--xml']) |
|
501 | output = self._svncommand(['status', '--xml']) | |
502 | externals, changes = [], [] |
|
502 | externals, changes = [], [] | |
503 | doc = xml.dom.minidom.parseString(output) |
|
503 | doc = xml.dom.minidom.parseString(output) | |
504 | for e in doc.getElementsByTagName('entry'): |
|
504 | for e in doc.getElementsByTagName('entry'): | |
505 | s = e.getElementsByTagName('wc-status') |
|
505 | s = e.getElementsByTagName('wc-status') | |
506 | if not s: |
|
506 | if not s: | |
507 | continue |
|
507 | continue | |
508 | item = s[0].getAttribute('item') |
|
508 | item = s[0].getAttribute('item') | |
509 | props = s[0].getAttribute('props') |
|
509 | props = s[0].getAttribute('props') | |
510 | path = e.getAttribute('path') |
|
510 | path = e.getAttribute('path') | |
511 | if item == 'external': |
|
511 | if item == 'external': | |
512 | externals.append(path) |
|
512 | externals.append(path) | |
513 | if (item not in ('', 'normal', 'unversioned', 'external') |
|
513 | if (item not in ('', 'normal', 'unversioned', 'external') | |
514 | or props not in ('', 'none')): |
|
514 | or props not in ('', 'none')): | |
515 | changes.append(path) |
|
515 | changes.append(path) | |
516 | for path in changes: |
|
516 | for path in changes: | |
517 | for ext in externals: |
|
517 | for ext in externals: | |
518 | if path == ext or path.startswith(ext + os.sep): |
|
518 | if path == ext or path.startswith(ext + os.sep): | |
519 | return True, True |
|
519 | return True, True | |
520 | return bool(changes), False |
|
520 | return bool(changes), False | |
521 |
|
521 | |||
522 | def dirty(self): |
|
522 | def dirty(self): | |
523 | if self._wcrev() == self._state[1] and not self._wcchanged()[0]: |
|
523 | if self._wcrev() == self._state[1] and not self._wcchanged()[0]: | |
524 | return False |
|
524 | return False | |
525 | return True |
|
525 | return True | |
526 |
|
526 | |||
527 | def commit(self, text, user, date): |
|
527 | def commit(self, text, user, date): | |
528 | # user and date are out of our hands since svn is centralized |
|
528 | # user and date are out of our hands since svn is centralized | |
529 | changed, extchanged = self._wcchanged() |
|
529 | changed, extchanged = self._wcchanged() | |
530 | if not changed: |
|
530 | if not changed: | |
531 | return self._wcrev() |
|
531 | return self._wcrev() | |
532 | if extchanged: |
|
532 | if extchanged: | |
533 | # Do not try to commit externals |
|
533 | # Do not try to commit externals | |
534 | raise util.Abort(_('cannot commit svn externals')) |
|
534 | raise util.Abort(_('cannot commit svn externals')) | |
535 | commitinfo = self._svncommand(['commit', '-m', text]) |
|
535 | commitinfo = self._svncommand(['commit', '-m', text]) | |
536 | self._ui.status(commitinfo) |
|
536 | self._ui.status(commitinfo) | |
537 | newrev = re.search('Committed revision ([0-9]+).', commitinfo) |
|
537 | newrev = re.search('Committed revision ([0-9]+).', commitinfo) | |
538 | if not newrev: |
|
538 | if not newrev: | |
539 | raise util.Abort(commitinfo.splitlines()[-1]) |
|
539 | raise util.Abort(commitinfo.splitlines()[-1]) | |
540 | newrev = newrev.groups()[0] |
|
540 | newrev = newrev.groups()[0] | |
541 | self._ui.status(self._svncommand(['update', '-r', newrev])) |
|
541 | self._ui.status(self._svncommand(['update', '-r', newrev])) | |
542 | return newrev |
|
542 | return newrev | |
543 |
|
543 | |||
544 | def remove(self): |
|
544 | def remove(self): | |
545 | if self.dirty(): |
|
545 | if self.dirty(): | |
546 | self._ui.warn(_('not removing repo %s because ' |
|
546 | self._ui.warn(_('not removing repo %s because ' | |
547 | 'it has changes.\n' % self._path)) |
|
547 | 'it has changes.\n' % self._path)) | |
548 | return |
|
548 | return | |
549 | self._ui.note(_('removing subrepo %s\n') % self._path) |
|
549 | self._ui.note(_('removing subrepo %s\n') % self._path) | |
550 | shutil.rmtree(self._ctx.repo.join(self._path)) |
|
550 | shutil.rmtree(self._ctx.repo.join(self._path)) | |
551 |
|
551 | |||
552 | def get(self, state): |
|
552 | def get(self, state): | |
553 | status = self._svncommand(['checkout', state[0], '--revision', state[1]]) |
|
553 | status = self._svncommand(['checkout', state[0], '--revision', state[1]]) | |
554 | if not re.search('Checked out revision [0-9]+.', status): |
|
554 | if not re.search('Checked out revision [0-9]+.', status): | |
555 | raise util.Abort(status.splitlines()[-1]) |
|
555 | raise util.Abort(status.splitlines()[-1]) | |
556 | self._ui.status(status) |
|
556 | self._ui.status(status) | |
557 |
|
557 | |||
558 | def merge(self, state): |
|
558 | def merge(self, state): | |
559 | old = int(self._state[1]) |
|
559 | old = int(self._state[1]) | |
560 | new = int(state[1]) |
|
560 | new = int(state[1]) | |
561 | if new > old: |
|
561 | if new > old: | |
562 | self.get(state) |
|
562 | self.get(state) | |
563 |
|
563 | |||
564 | def push(self, force): |
|
564 | def push(self, force): | |
565 | # push is a no-op for SVN |
|
565 | # push is a no-op for SVN | |
566 | return True |
|
566 | return True | |
567 |
|
567 | |||
568 | def files(self): |
|
568 | def files(self): | |
569 | output = self._svncommand(['list']) |
|
569 | output = self._svncommand(['list']) | |
570 | # This works because svn forbids \n in filenames. |
|
570 | # This works because svn forbids \n in filenames. | |
571 | return output.splitlines() |
|
571 | return output.splitlines() | |
572 |
|
572 | |||
573 | def filedata(self, name): |
|
573 | def filedata(self, name): | |
574 | return self._svncommand(['cat'], name) |
|
574 | return self._svncommand(['cat'], name) | |
575 |
|
575 | |||
576 |
|
576 | |||
577 | types = { |
|
577 | types = { | |
578 | 'hg': hgsubrepo, |
|
578 | 'hg': hgsubrepo, | |
579 | 'svn': svnsubrepo, |
|
579 | 'svn': svnsubrepo, | |
580 | } |
|
580 | } |
@@ -1,698 +1,698 | |||||
1 | # url.py - HTTP handling for mercurial |
|
1 | # url.py - HTTP handling for mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com> | |
4 | # Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br> |
|
4 | # Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br> | |
5 | # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> |
|
5 | # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> | |
6 | # |
|
6 | # | |
7 | # This software may be used and distributed according to the terms of the |
|
7 | # This software may be used and distributed according to the terms of the | |
8 | # GNU General Public License version 2 or any later version. |
|
8 | # GNU General Public License version 2 or any later version. | |
9 |
|
9 | |||
10 | import urllib, urllib2, urlparse, httplib, os, re, socket, cStringIO |
|
10 | import urllib, urllib2, urlparse, httplib, os, re, socket, cStringIO | |
11 | import __builtin__ |
|
11 | import __builtin__ | |
12 | from i18n import _ |
|
12 | from i18n import _ | |
13 | import keepalive, util |
|
13 | import keepalive, util | |
14 |
|
14 | |||
15 | def _urlunparse(scheme, netloc, path, params, query, fragment, url): |
|
15 | def _urlunparse(scheme, netloc, path, params, query, fragment, url): | |
16 | '''Handle cases where urlunparse(urlparse(x://)) doesn't preserve the "//"''' |
|
16 | '''Handle cases where urlunparse(urlparse(x://)) doesn't preserve the "//"''' | |
17 | result = urlparse.urlunparse((scheme, netloc, path, params, query, fragment)) |
|
17 | result = urlparse.urlunparse((scheme, netloc, path, params, query, fragment)) | |
18 | if (scheme and |
|
18 | if (scheme and | |
19 | result.startswith(scheme + ':') and |
|
19 | result.startswith(scheme + ':') and | |
20 | not result.startswith(scheme + '://') and |
|
20 | not result.startswith(scheme + '://') and | |
21 | url.startswith(scheme + '://') |
|
21 | url.startswith(scheme + '://') | |
22 | ): |
|
22 | ): | |
23 | result = scheme + '://' + result[len(scheme + ':'):] |
|
23 | result = scheme + '://' + result[len(scheme + ':'):] | |
24 | return result |
|
24 | return result | |
25 |
|
25 | |||
26 | def hidepassword(url): |
|
26 | def hidepassword(url): | |
27 | '''hide user credential in a url string''' |
|
27 | '''hide user credential in a url string''' | |
28 | scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) |
|
28 | scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) | |
29 | netloc = re.sub('([^:]*):([^@]*)@(.*)', r'\1:***@\3', netloc) |
|
29 | netloc = re.sub('([^:]*):([^@]*)@(.*)', r'\1:***@\3', netloc) | |
30 | return _urlunparse(scheme, netloc, path, params, query, fragment, url) |
|
30 | return _urlunparse(scheme, netloc, path, params, query, fragment, url) | |
31 |
|
31 | |||
32 | def removeauth(url): |
|
32 | def removeauth(url): | |
33 | '''remove all authentication information from a url string''' |
|
33 | '''remove all authentication information from a url string''' | |
34 | scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) |
|
34 | scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) | |
35 | netloc = netloc[netloc.find('@')+1:] |
|
35 | netloc = netloc[netloc.find('@')+1:] | |
36 | return _urlunparse(scheme, netloc, path, params, query, fragment, url) |
|
36 | return _urlunparse(scheme, netloc, path, params, query, fragment, url) | |
37 |
|
37 | |||
38 | def netlocsplit(netloc): |
|
38 | def netlocsplit(netloc): | |
39 | '''split [user[:passwd]@]host[:port] into 4-tuple.''' |
|
39 | '''split [user[:passwd]@]host[:port] into 4-tuple.''' | |
40 |
|
40 | |||
41 | a = netloc.find('@') |
|
41 | a = netloc.find('@') | |
42 | if a == -1: |
|
42 | if a == -1: | |
43 | user, passwd = None, None |
|
43 | user, passwd = None, None | |
44 | else: |
|
44 | else: | |
45 | userpass, netloc = netloc[:a], netloc[a + 1:] |
|
45 | userpass, netloc = netloc[:a], netloc[a + 1:] | |
46 | c = userpass.find(':') |
|
46 | c = userpass.find(':') | |
47 | if c == -1: |
|
47 | if c == -1: | |
48 | user, passwd = urllib.unquote(userpass), None |
|
48 | user, passwd = urllib.unquote(userpass), None | |
49 | else: |
|
49 | else: | |
50 | user = urllib.unquote(userpass[:c]) |
|
50 | user = urllib.unquote(userpass[:c]) | |
51 | passwd = urllib.unquote(userpass[c + 1:]) |
|
51 | passwd = urllib.unquote(userpass[c + 1:]) | |
52 | c = netloc.find(':') |
|
52 | c = netloc.find(':') | |
53 | if c == -1: |
|
53 | if c == -1: | |
54 | host, port = netloc, None |
|
54 | host, port = netloc, None | |
55 | else: |
|
55 | else: | |
56 | host, port = netloc[:c], netloc[c + 1:] |
|
56 | host, port = netloc[:c], netloc[c + 1:] | |
57 | return host, port, user, passwd |
|
57 | return host, port, user, passwd | |
58 |
|
58 | |||
59 | def netlocunsplit(host, port, user=None, passwd=None): |
|
59 | def netlocunsplit(host, port, user=None, passwd=None): | |
60 | '''turn host, port, user, passwd into [user[:passwd]@]host[:port].''' |
|
60 | '''turn host, port, user, passwd into [user[:passwd]@]host[:port].''' | |
61 | if port: |
|
61 | if port: | |
62 | hostport = host + ':' + port |
|
62 | hostport = host + ':' + port | |
63 | else: |
|
63 | else: | |
64 | hostport = host |
|
64 | hostport = host | |
65 | if user: |
|
65 | if user: | |
66 | quote = lambda s: urllib.quote(s, safe='') |
|
66 | quote = lambda s: urllib.quote(s, safe='') | |
67 | if passwd: |
|
67 | if passwd: | |
68 | userpass = quote(user) + ':' + quote(passwd) |
|
68 | userpass = quote(user) + ':' + quote(passwd) | |
69 | else: |
|
69 | else: | |
70 | userpass = quote(user) |
|
70 | userpass = quote(user) | |
71 | return userpass + '@' + hostport |
|
71 | return userpass + '@' + hostport | |
72 | return hostport |
|
72 | return hostport | |
73 |
|
73 | |||
74 | _safe = ('abcdefghijklmnopqrstuvwxyz' |
|
74 | _safe = ('abcdefghijklmnopqrstuvwxyz' | |
75 | 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' |
|
75 | 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' | |
76 | '0123456789' '_.-/') |
|
76 | '0123456789' '_.-/') | |
77 | _safeset = None |
|
77 | _safeset = None | |
78 | _hex = None |
|
78 | _hex = None | |
79 | def quotepath(path): |
|
79 | def quotepath(path): | |
80 | '''quote the path part of a URL |
|
80 | '''quote the path part of a URL | |
81 |
|
81 | |||
82 | This is similar to urllib.quote, but it also tries to avoid |
|
82 | This is similar to urllib.quote, but it also tries to avoid | |
83 | quoting things twice (inspired by wget): |
|
83 | quoting things twice (inspired by wget): | |
84 |
|
84 | |||
85 | >>> quotepath('abc def') |
|
85 | >>> quotepath('abc def') | |
86 | 'abc%20def' |
|
86 | 'abc%20def' | |
87 | >>> quotepath('abc%20def') |
|
87 | >>> quotepath('abc%20def') | |
88 | 'abc%20def' |
|
88 | 'abc%20def' | |
89 | >>> quotepath('abc%20 def') |
|
89 | >>> quotepath('abc%20 def') | |
90 | 'abc%20%20def' |
|
90 | 'abc%20%20def' | |
91 | >>> quotepath('abc def%20') |
|
91 | >>> quotepath('abc def%20') | |
92 | 'abc%20def%20' |
|
92 | 'abc%20def%20' | |
93 | >>> quotepath('abc def%2') |
|
93 | >>> quotepath('abc def%2') | |
94 | 'abc%20def%252' |
|
94 | 'abc%20def%252' | |
95 | >>> quotepath('abc def%') |
|
95 | >>> quotepath('abc def%') | |
96 | 'abc%20def%25' |
|
96 | 'abc%20def%25' | |
97 | ''' |
|
97 | ''' | |
98 | global _safeset, _hex |
|
98 | global _safeset, _hex | |
99 | if _safeset is None: |
|
99 | if _safeset is None: | |
100 | _safeset = set(_safe) |
|
100 | _safeset = set(_safe) | |
101 | _hex = set('abcdefABCDEF0123456789') |
|
101 | _hex = set('abcdefABCDEF0123456789') | |
102 | l = list(path) |
|
102 | l = list(path) | |
103 | for i in xrange(len(l)): |
|
103 | for i in xrange(len(l)): | |
104 | c = l[i] |
|
104 | c = l[i] | |
105 | if (c == '%' and i + 2 < len(l) and |
|
105 | if (c == '%' and i + 2 < len(l) and | |
106 | l[i + 1] in _hex and l[i + 2] in _hex): |
|
106 | l[i + 1] in _hex and l[i + 2] in _hex): | |
107 | pass |
|
107 | pass | |
108 | elif c not in _safeset: |
|
108 | elif c not in _safeset: | |
109 | l[i] = '%%%02X' % ord(c) |
|
109 | l[i] = '%%%02X' % ord(c) | |
110 | return ''.join(l) |
|
110 | return ''.join(l) | |
111 |
|
111 | |||
112 | class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm): |
|
112 | class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm): | |
113 | def __init__(self, ui): |
|
113 | def __init__(self, ui): | |
114 | urllib2.HTTPPasswordMgrWithDefaultRealm.__init__(self) |
|
114 | urllib2.HTTPPasswordMgrWithDefaultRealm.__init__(self) | |
115 | self.ui = ui |
|
115 | self.ui = ui | |
116 |
|
116 | |||
117 | def find_user_password(self, realm, authuri): |
|
117 | def find_user_password(self, realm, authuri): | |
118 | authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password( |
|
118 | authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password( | |
119 | self, realm, authuri) |
|
119 | self, realm, authuri) | |
120 | user, passwd = authinfo |
|
120 | user, passwd = authinfo | |
121 | if user and passwd: |
|
121 | if user and passwd: | |
122 | self._writedebug(user, passwd) |
|
122 | self._writedebug(user, passwd) | |
123 | return (user, passwd) |
|
123 | return (user, passwd) | |
124 |
|
124 | |||
125 | if not user: |
|
125 | if not user: | |
126 | auth = self.readauthtoken(authuri) |
|
126 | auth = self.readauthtoken(authuri) | |
127 | if auth: |
|
127 | if auth: | |
128 | user, passwd = auth.get('username'), auth.get('password') |
|
128 | user, passwd = auth.get('username'), auth.get('password') | |
129 | if not user or not passwd: |
|
129 | if not user or not passwd: | |
130 | if not self.ui.interactive(): |
|
130 | if not self.ui.interactive(): | |
131 | raise util.Abort(_('http authorization required')) |
|
131 | raise util.Abort(_('http authorization required')) | |
132 |
|
132 | |||
133 | self.ui.write(_("http authorization required\n")) |
|
133 | self.ui.write(_("http authorization required\n")) | |
134 | self.ui.status(_("realm: %s\n") % realm) |
|
134 | self.ui.status(_("realm: %s\n") % realm) | |
135 | if user: |
|
135 | if user: | |
136 | self.ui.status(_("user: %s\n") % user) |
|
136 | self.ui.status(_("user: %s\n") % user) | |
137 | else: |
|
137 | else: | |
138 | user = self.ui.prompt(_("user:"), default=None) |
|
138 | user = self.ui.prompt(_("user:"), default=None) | |
139 |
|
139 | |||
140 | if not passwd: |
|
140 | if not passwd: | |
141 | passwd = self.ui.getpass() |
|
141 | passwd = self.ui.getpass() | |
142 |
|
142 | |||
143 | self.add_password(realm, authuri, user, passwd) |
|
143 | self.add_password(realm, authuri, user, passwd) | |
144 | self._writedebug(user, passwd) |
|
144 | self._writedebug(user, passwd) | |
145 | return (user, passwd) |
|
145 | return (user, passwd) | |
146 |
|
146 | |||
147 | def _writedebug(self, user, passwd): |
|
147 | def _writedebug(self, user, passwd): | |
148 | msg = _('http auth: user %s, password %s\n') |
|
148 | msg = _('http auth: user %s, password %s\n') | |
149 | self.ui.debug(msg % (user, passwd and '*' * len(passwd) or 'not set')) |
|
149 | self.ui.debug(msg % (user, passwd and '*' * len(passwd) or 'not set')) | |
150 |
|
150 | |||
151 | def readauthtoken(self, uri): |
|
151 | def readauthtoken(self, uri): | |
152 | # Read configuration |
|
152 | # Read configuration | |
153 | config = dict() |
|
153 | config = dict() | |
154 | for key, val in self.ui.configitems('auth'): |
|
154 | for key, val in self.ui.configitems('auth'): | |
155 | if '.' not in key: |
|
155 | if '.' not in key: | |
156 | self.ui.warn(_("ignoring invalid [auth] key '%s'\n") % key) |
|
156 | self.ui.warn(_("ignoring invalid [auth] key '%s'\n") % key) | |
157 | continue |
|
157 | continue | |
158 | group, setting = key.split('.', 1) |
|
158 | group, setting = key.split('.', 1) | |
159 | gdict = config.setdefault(group, dict()) |
|
159 | gdict = config.setdefault(group, dict()) | |
160 | if setting in ('username', 'cert', 'key'): |
|
160 | if setting in ('username', 'cert', 'key'): | |
161 | val = util.expandpath(val) |
|
161 | val = util.expandpath(val) | |
162 | gdict[setting] = val |
|
162 | gdict[setting] = val | |
163 |
|
163 | |||
164 | # Find the best match |
|
164 | # Find the best match | |
165 | scheme, hostpath = uri.split('://', 1) |
|
165 | scheme, hostpath = uri.split('://', 1) | |
166 | bestlen = 0 |
|
166 | bestlen = 0 | |
167 | bestauth = None |
|
167 | bestauth = None | |
168 | for auth in config.itervalues(): |
|
168 | for auth in config.itervalues(): | |
169 | prefix = auth.get('prefix') |
|
169 | prefix = auth.get('prefix') | |
170 | if not prefix: |
|
170 | if not prefix: | |
171 | continue |
|
171 | continue | |
172 | p = prefix.split('://', 1) |
|
172 | p = prefix.split('://', 1) | |
173 | if len(p) > 1: |
|
173 | if len(p) > 1: | |
174 | schemes, prefix = [p[0]], p[1] |
|
174 | schemes, prefix = [p[0]], p[1] | |
175 | else: |
|
175 | else: | |
176 | schemes = (auth.get('schemes') or 'https').split() |
|
176 | schemes = (auth.get('schemes') or 'https').split() | |
177 | if (prefix == '*' or hostpath.startswith(prefix)) and \ |
|
177 | if (prefix == '*' or hostpath.startswith(prefix)) and \ | |
178 | len(prefix) > bestlen and scheme in schemes: |
|
178 | len(prefix) > bestlen and scheme in schemes: | |
179 | bestlen = len(prefix) |
|
179 | bestlen = len(prefix) | |
180 | bestauth = auth |
|
180 | bestauth = auth | |
181 | return bestauth |
|
181 | return bestauth | |
182 |
|
182 | |||
183 | class proxyhandler(urllib2.ProxyHandler): |
|
183 | class proxyhandler(urllib2.ProxyHandler): | |
184 | def __init__(self, ui): |
|
184 | def __init__(self, ui): | |
185 | proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy') |
|
185 | proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy') | |
186 | # XXX proxyauthinfo = None |
|
186 | # XXX proxyauthinfo = None | |
187 |
|
187 | |||
188 | if proxyurl: |
|
188 | if proxyurl: | |
189 | # proxy can be proper url or host[:port] |
|
189 | # proxy can be proper url or host[:port] | |
190 | if not (proxyurl.startswith('http:') or |
|
190 | if not (proxyurl.startswith('http:') or | |
191 | proxyurl.startswith('https:')): |
|
191 | proxyurl.startswith('https:')): | |
192 | proxyurl = 'http://' + proxyurl + '/' |
|
192 | proxyurl = 'http://' + proxyurl + '/' | |
193 | snpqf = urlparse.urlsplit(proxyurl) |
|
193 | snpqf = urlparse.urlsplit(proxyurl) | |
194 | proxyscheme, proxynetloc, proxypath, proxyquery, proxyfrag = snpqf |
|
194 | proxyscheme, proxynetloc, proxypath, proxyquery, proxyfrag = snpqf | |
195 | hpup = netlocsplit(proxynetloc) |
|
195 | hpup = netlocsplit(proxynetloc) | |
196 |
|
196 | |||
197 | proxyhost, proxyport, proxyuser, proxypasswd = hpup |
|
197 | proxyhost, proxyport, proxyuser, proxypasswd = hpup | |
198 | if not proxyuser: |
|
198 | if not proxyuser: | |
199 | proxyuser = ui.config("http_proxy", "user") |
|
199 | proxyuser = ui.config("http_proxy", "user") | |
200 | proxypasswd = ui.config("http_proxy", "passwd") |
|
200 | proxypasswd = ui.config("http_proxy", "passwd") | |
201 |
|
201 | |||
202 | # see if we should use a proxy for this url |
|
202 | # see if we should use a proxy for this url | |
203 | no_list = ["localhost", "127.0.0.1"] |
|
203 | no_list = ["localhost", "127.0.0.1"] | |
204 | no_list.extend([p.lower() for |
|
204 | no_list.extend([p.lower() for | |
205 | p in ui.configlist("http_proxy", "no")]) |
|
205 | p in ui.configlist("http_proxy", "no")]) | |
206 | no_list.extend([p.strip().lower() for |
|
206 | no_list.extend([p.strip().lower() for | |
207 | p in os.getenv("no_proxy", '').split(',') |
|
207 | p in os.getenv("no_proxy", '').split(',') | |
208 | if p.strip()]) |
|
208 | if p.strip()]) | |
209 | # "http_proxy.always" config is for running tests on localhost |
|
209 | # "http_proxy.always" config is for running tests on localhost | |
210 | if ui.configbool("http_proxy", "always"): |
|
210 | if ui.configbool("http_proxy", "always"): | |
211 | self.no_list = [] |
|
211 | self.no_list = [] | |
212 | else: |
|
212 | else: | |
213 | self.no_list = no_list |
|
213 | self.no_list = no_list | |
214 |
|
214 | |||
215 | proxyurl = urlparse.urlunsplit(( |
|
215 | proxyurl = urlparse.urlunsplit(( | |
216 | proxyscheme, netlocunsplit(proxyhost, proxyport, |
|
216 | proxyscheme, netlocunsplit(proxyhost, proxyport, | |
217 | proxyuser, proxypasswd or ''), |
|
217 | proxyuser, proxypasswd or ''), | |
218 | proxypath, proxyquery, proxyfrag)) |
|
218 | proxypath, proxyquery, proxyfrag)) | |
219 | proxies = {'http': proxyurl, 'https': proxyurl} |
|
219 | proxies = {'http': proxyurl, 'https': proxyurl} | |
220 | ui.debug('proxying through http://%s:%s\n' % |
|
220 | ui.debug('proxying through http://%s:%s\n' % | |
221 | (proxyhost, proxyport)) |
|
221 | (proxyhost, proxyport)) | |
222 | else: |
|
222 | else: | |
223 | proxies = {} |
|
223 | proxies = {} | |
224 |
|
224 | |||
225 | # urllib2 takes proxy values from the environment and those |
|
225 | # urllib2 takes proxy values from the environment and those | |
226 | # will take precedence if found, so drop them |
|
226 | # will take precedence if found, so drop them | |
227 | for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]: |
|
227 | for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]: | |
228 | try: |
|
228 | try: | |
229 | if env in os.environ: |
|
229 | if env in os.environ: | |
230 | del os.environ[env] |
|
230 | del os.environ[env] | |
231 | except OSError: |
|
231 | except OSError: | |
232 | pass |
|
232 | pass | |
233 |
|
233 | |||
234 | urllib2.ProxyHandler.__init__(self, proxies) |
|
234 | urllib2.ProxyHandler.__init__(self, proxies) | |
235 | self.ui = ui |
|
235 | self.ui = ui | |
236 |
|
236 | |||
237 | def proxy_open(self, req, proxy, type_): |
|
237 | def proxy_open(self, req, proxy, type_): | |
238 | host = req.get_host().split(':')[0] |
|
238 | host = req.get_host().split(':')[0] | |
239 | if host in self.no_list: |
|
239 | if host in self.no_list: | |
240 | return None |
|
240 | return None | |
241 |
|
241 | |||
242 | # work around a bug in Python < 2.4.2 |
|
242 | # work around a bug in Python < 2.4.2 | |
243 | # (it leaves a "\n" at the end of Proxy-authorization headers) |
|
243 | # (it leaves a "\n" at the end of Proxy-authorization headers) | |
244 | baseclass = req.__class__ |
|
244 | baseclass = req.__class__ | |
245 | class _request(baseclass): |
|
245 | class _request(baseclass): | |
246 | def add_header(self, key, val): |
|
246 | def add_header(self, key, val): | |
247 | if key.lower() == 'proxy-authorization': |
|
247 | if key.lower() == 'proxy-authorization': | |
248 | val = val.strip() |
|
248 | val = val.strip() | |
249 | return baseclass.add_header(self, key, val) |
|
249 | return baseclass.add_header(self, key, val) | |
250 | req.__class__ = _request |
|
250 | req.__class__ = _request | |
251 |
|
251 | |||
252 | return urllib2.ProxyHandler.proxy_open(self, req, proxy, type_) |
|
252 | return urllib2.ProxyHandler.proxy_open(self, req, proxy, type_) | |
253 |
|
253 | |||
254 | class httpsendfile(object): |
|
254 | class httpsendfile(object): | |
255 | """This is a wrapper around the objects returned by python's "open". |
|
255 | """This is a wrapper around the objects returned by python's "open". | |
256 |
|
256 | |||
257 | Its purpose is to send file-like objects via HTTP and, to do so, it |
|
257 | Its purpose is to send file-like objects via HTTP and, to do so, it | |
258 | defines a __len__ attribute to feed the Content-Length header. |
|
258 | defines a __len__ attribute to feed the Content-Length header. | |
259 | """ |
|
259 | """ | |
260 |
|
260 | |||
261 | def __init__(self, *args, **kwargs): |
|
261 | def __init__(self, *args, **kwargs): | |
262 | # We can't just "self._data = open(*args, **kwargs)" here because there |
|
262 | # We can't just "self._data = open(*args, **kwargs)" here because there | |
263 | # is an "open" function defined in this module that shadows the global |
|
263 | # is an "open" function defined in this module that shadows the global | |
264 | # one |
|
264 | # one | |
265 | self._data = __builtin__.open(*args, **kwargs) |
|
265 | self._data = __builtin__.open(*args, **kwargs) | |
266 | self.read = self._data.read |
|
266 | self.read = self._data.read | |
267 | self.seek = self._data.seek |
|
267 | self.seek = self._data.seek | |
268 | self.close = self._data.close |
|
268 | self.close = self._data.close | |
269 | self.write = self._data.write |
|
269 | self.write = self._data.write | |
270 |
|
270 | |||
271 | def __len__(self): |
|
271 | def __len__(self): | |
272 | return os.fstat(self._data.fileno()).st_size |
|
272 | return os.fstat(self._data.fileno()).st_size | |
273 |
|
273 | |||
274 | def _gen_sendfile(connection): |
|
274 | def _gen_sendfile(connection): | |
275 | def _sendfile(self, data): |
|
275 | def _sendfile(self, data): | |
276 | # send a file |
|
276 | # send a file | |
277 | if isinstance(data, httpsendfile): |
|
277 | if isinstance(data, httpsendfile): | |
278 | # if auth required, some data sent twice, so rewind here |
|
278 | # if auth required, some data sent twice, so rewind here | |
279 | data.seek(0) |
|
279 | data.seek(0) | |
280 | for chunk in util.filechunkiter(data): |
|
280 | for chunk in util.filechunkiter(data): | |
281 | connection.send(self, chunk) |
|
281 | connection.send(self, chunk) | |
282 | else: |
|
282 | else: | |
283 | connection.send(self, data) |
|
283 | connection.send(self, data) | |
284 | return _sendfile |
|
284 | return _sendfile | |
285 |
|
285 | |||
286 | has_https = hasattr(urllib2, 'HTTPSHandler') |
|
286 | has_https = hasattr(urllib2, 'HTTPSHandler') | |
287 | if has_https: |
|
287 | if has_https: | |
288 | try: |
|
288 | try: | |
289 | # avoid using deprecated/broken FakeSocket in python 2.6 |
|
289 | # avoid using deprecated/broken FakeSocket in python 2.6 | |
290 | import ssl |
|
290 | import ssl | |
291 | _ssl_wrap_socket = ssl.wrap_socket |
|
291 | _ssl_wrap_socket = ssl.wrap_socket | |
292 | CERT_REQUIRED = ssl.CERT_REQUIRED |
|
292 | CERT_REQUIRED = ssl.CERT_REQUIRED | |
293 | except ImportError: |
|
293 | except ImportError: | |
294 | CERT_REQUIRED = 2 |
|
294 | CERT_REQUIRED = 2 | |
295 |
|
295 | |||
296 | def _ssl_wrap_socket(sock, key_file, cert_file, |
|
296 | def _ssl_wrap_socket(sock, key_file, cert_file, | |
297 | cert_reqs=CERT_REQUIRED, ca_certs=None): |
|
297 | cert_reqs=CERT_REQUIRED, ca_certs=None): | |
298 | if ca_certs: |
|
298 | if ca_certs: | |
299 | raise util.Abort(_( |
|
299 | raise util.Abort(_( | |
300 | 'certificate checking requires Python 2.6')) |
|
300 | 'certificate checking requires Python 2.6')) | |
301 |
|
301 | |||
302 | ssl = socket.ssl(sock, key_file, cert_file) |
|
302 | ssl = socket.ssl(sock, key_file, cert_file) | |
303 | return httplib.FakeSocket(sock, ssl) |
|
303 | return httplib.FakeSocket(sock, ssl) | |
304 |
|
304 | |||
305 | try: |
|
305 | try: | |
306 | _create_connection = socket.create_connection |
|
306 | _create_connection = socket.create_connection | |
307 | except AttributeError: |
|
307 | except AttributeError: | |
308 | _GLOBAL_DEFAULT_TIMEOUT = object() |
|
308 | _GLOBAL_DEFAULT_TIMEOUT = object() | |
309 |
|
309 | |||
310 | def _create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT, |
|
310 | def _create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT, | |
311 | source_address=None): |
|
311 | source_address=None): | |
312 | # lifted from Python 2.6 |
|
312 | # lifted from Python 2.6 | |
313 |
|
313 | |||
314 | msg = "getaddrinfo returns an empty list" |
|
314 | msg = "getaddrinfo returns an empty list" | |
315 | host, port = address |
|
315 | host, port = address | |
316 | for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM): |
|
316 | for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM): | |
317 | af, socktype, proto, canonname, sa = res |
|
317 | af, socktype, proto, canonname, sa = res | |
318 | sock = None |
|
318 | sock = None | |
319 | try: |
|
319 | try: | |
320 | sock = socket.socket(af, socktype, proto) |
|
320 | sock = socket.socket(af, socktype, proto) | |
321 | if timeout is not _GLOBAL_DEFAULT_TIMEOUT: |
|
321 | if timeout is not _GLOBAL_DEFAULT_TIMEOUT: | |
322 | sock.settimeout(timeout) |
|
322 | sock.settimeout(timeout) | |
323 | if source_address: |
|
323 | if source_address: | |
324 | sock.bind(source_address) |
|
324 | sock.bind(source_address) | |
325 | sock.connect(sa) |
|
325 | sock.connect(sa) | |
326 | return sock |
|
326 | return sock | |
327 |
|
327 | |||
328 | except socket.error, msg: |
|
328 | except socket.error, msg: | |
329 | if sock is not None: |
|
329 | if sock is not None: | |
330 | sock.close() |
|
330 | sock.close() | |
331 |
|
331 | |||
332 | raise socket.error, msg |
|
332 | raise socket.error, msg | |
333 |
|
333 | |||
334 | class httpconnection(keepalive.HTTPConnection): |
|
334 | class httpconnection(keepalive.HTTPConnection): | |
335 | # must be able to send big bundle as stream. |
|
335 | # must be able to send big bundle as stream. | |
336 | send = _gen_sendfile(keepalive.HTTPConnection) |
|
336 | send = _gen_sendfile(keepalive.HTTPConnection) | |
337 |
|
337 | |||
338 | def connect(self): |
|
338 | def connect(self): | |
339 | if has_https and self.realhostport: # use CONNECT proxy |
|
339 | if has_https and self.realhostport: # use CONNECT proxy | |
340 | self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) |
|
340 | self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) | |
341 | self.sock.connect((self.host, self.port)) |
|
341 | self.sock.connect((self.host, self.port)) | |
342 | if _generic_proxytunnel(self): |
|
342 | if _generic_proxytunnel(self): | |
343 | # we do not support client x509 certificates |
|
343 | # we do not support client x509 certificates | |
344 | self.sock = _ssl_wrap_socket(self.sock, None, None) |
|
344 | self.sock = _ssl_wrap_socket(self.sock, None, None) | |
345 | else: |
|
345 | else: | |
346 | keepalive.HTTPConnection.connect(self) |
|
346 | keepalive.HTTPConnection.connect(self) | |
347 |
|
347 | |||
348 | def getresponse(self): |
|
348 | def getresponse(self): | |
349 | proxyres = getattr(self, 'proxyres', None) |
|
349 | proxyres = getattr(self, 'proxyres', None) | |
350 | if proxyres: |
|
350 | if proxyres: | |
351 | if proxyres.will_close: |
|
351 | if proxyres.will_close: | |
352 | self.close() |
|
352 | self.close() | |
353 | self.proxyres = None |
|
353 | self.proxyres = None | |
354 | return proxyres |
|
354 | return proxyres | |
355 | return keepalive.HTTPConnection.getresponse(self) |
|
355 | return keepalive.HTTPConnection.getresponse(self) | |
356 |
|
356 | |||
357 | # general transaction handler to support different ways to handle |
|
357 | # general transaction handler to support different ways to handle | |
358 | # HTTPS proxying before and after Python 2.6.3. |
|
358 | # HTTPS proxying before and after Python 2.6.3. | |
359 | def _generic_start_transaction(handler, h, req): |
|
359 | def _generic_start_transaction(handler, h, req): | |
360 | if hasattr(req, '_tunnel_host') and req._tunnel_host: |
|
360 | if hasattr(req, '_tunnel_host') and req._tunnel_host: | |
361 | tunnel_host = req._tunnel_host |
|
361 | tunnel_host = req._tunnel_host | |
362 | if tunnel_host[:7] not in ['http://', 'https:/']: |
|
362 | if tunnel_host[:7] not in ['http://', 'https:/']: | |
363 | tunnel_host = 'https://' + tunnel_host |
|
363 | tunnel_host = 'https://' + tunnel_host | |
364 | new_tunnel = True |
|
364 | new_tunnel = True | |
365 | else: |
|
365 | else: | |
366 | tunnel_host = req.get_selector() |
|
366 | tunnel_host = req.get_selector() | |
367 | new_tunnel = False |
|
367 | new_tunnel = False | |
368 |
|
368 | |||
369 | if new_tunnel or tunnel_host == req.get_full_url(): # has proxy |
|
369 | if new_tunnel or tunnel_host == req.get_full_url(): # has proxy | |
370 | urlparts = urlparse.urlparse(tunnel_host) |
|
370 | urlparts = urlparse.urlparse(tunnel_host) | |
371 | if new_tunnel or urlparts[0] == 'https': # only use CONNECT for HTTPS |
|
371 | if new_tunnel or urlparts[0] == 'https': # only use CONNECT for HTTPS | |
372 | realhostport = urlparts[1] |
|
372 | realhostport = urlparts[1] | |
373 | if realhostport[-1] == ']' or ':' not in realhostport: |
|
373 | if realhostport[-1] == ']' or ':' not in realhostport: | |
374 | realhostport += ':443' |
|
374 | realhostport += ':443' | |
375 |
|
375 | |||
376 | h.realhostport = realhostport |
|
376 | h.realhostport = realhostport | |
377 | h.headers = req.headers.copy() |
|
377 | h.headers = req.headers.copy() | |
378 | h.headers.update(handler.parent.addheaders) |
|
378 | h.headers.update(handler.parent.addheaders) | |
379 | return |
|
379 | return | |
380 |
|
380 | |||
381 | h.realhostport = None |
|
381 | h.realhostport = None | |
382 | h.headers = None |
|
382 | h.headers = None | |
383 |
|
383 | |||
384 | def _generic_proxytunnel(self): |
|
384 | def _generic_proxytunnel(self): | |
385 | proxyheaders = dict( |
|
385 | proxyheaders = dict( | |
386 | [(x, self.headers[x]) for x in self.headers |
|
386 | [(x, self.headers[x]) for x in self.headers | |
387 | if x.lower().startswith('proxy-')]) |
|
387 | if x.lower().startswith('proxy-')]) | |
388 | self._set_hostport(self.host, self.port) |
|
388 | self._set_hostport(self.host, self.port) | |
389 | self.send('CONNECT %s HTTP/1.0\r\n' % self.realhostport) |
|
389 | self.send('CONNECT %s HTTP/1.0\r\n' % self.realhostport) | |
390 | for header in proxyheaders.iteritems(): |
|
390 | for header in proxyheaders.iteritems(): | |
391 | self.send('%s: %s\r\n' % header) |
|
391 | self.send('%s: %s\r\n' % header) | |
392 | self.send('\r\n') |
|
392 | self.send('\r\n') | |
393 |
|
393 | |||
394 | # majority of the following code is duplicated from |
|
394 | # majority of the following code is duplicated from | |
395 | # httplib.HTTPConnection as there are no adequate places to |
|
395 | # httplib.HTTPConnection as there are no adequate places to | |
396 | # override functions to provide the needed functionality |
|
396 | # override functions to provide the needed functionality | |
397 | res = self.response_class(self.sock, |
|
397 | res = self.response_class(self.sock, | |
398 | strict=self.strict, |
|
398 | strict=self.strict, | |
399 | method=self._method) |
|
399 | method=self._method) | |
400 |
|
400 | |||
401 | while True: |
|
401 | while True: | |
402 | version, status, reason = res._read_status() |
|
402 | version, status, reason = res._read_status() | |
403 | if status != httplib.CONTINUE: |
|
403 | if status != httplib.CONTINUE: | |
404 | break |
|
404 | break | |
405 | while True: |
|
405 | while True: | |
406 | skip = res.fp.readline().strip() |
|
406 | skip = res.fp.readline().strip() | |
407 | if not skip: |
|
407 | if not skip: | |
408 | break |
|
408 | break | |
409 | res.status = status |
|
409 | res.status = status | |
410 | res.reason = reason.strip() |
|
410 | res.reason = reason.strip() | |
411 |
|
411 | |||
412 | if res.status == 200: |
|
412 | if res.status == 200: | |
413 | while True: |
|
413 | while True: | |
414 | line = res.fp.readline() |
|
414 | line = res.fp.readline() | |
415 | if line == '\r\n': |
|
415 | if line == '\r\n': | |
416 | break |
|
416 | break | |
417 | return True |
|
417 | return True | |
418 |
|
418 | |||
419 | if version == 'HTTP/1.0': |
|
419 | if version == 'HTTP/1.0': | |
420 | res.version = 10 |
|
420 | res.version = 10 | |
421 | elif version.startswith('HTTP/1.'): |
|
421 | elif version.startswith('HTTP/1.'): | |
422 | res.version = 11 |
|
422 | res.version = 11 | |
423 | elif version == 'HTTP/0.9': |
|
423 | elif version == 'HTTP/0.9': | |
424 | res.version = 9 |
|
424 | res.version = 9 | |
425 | else: |
|
425 | else: | |
426 | raise httplib.UnknownProtocol(version) |
|
426 | raise httplib.UnknownProtocol(version) | |
427 |
|
427 | |||
428 | if res.version == 9: |
|
428 | if res.version == 9: | |
429 | res.length = None |
|
429 | res.length = None | |
430 | res.chunked = 0 |
|
430 | res.chunked = 0 | |
431 | res.will_close = 1 |
|
431 | res.will_close = 1 | |
432 | res.msg = httplib.HTTPMessage(cStringIO.StringIO()) |
|
432 | res.msg = httplib.HTTPMessage(cStringIO.StringIO()) | |
433 | return False |
|
433 | return False | |
434 |
|
434 | |||
435 | res.msg = httplib.HTTPMessage(res.fp) |
|
435 | res.msg = httplib.HTTPMessage(res.fp) | |
436 | res.msg.fp = None |
|
436 | res.msg.fp = None | |
437 |
|
437 | |||
438 | # are we using the chunked-style of transfer encoding? |
|
438 | # are we using the chunked-style of transfer encoding? | |
439 | trenc = res.msg.getheader('transfer-encoding') |
|
439 | trenc = res.msg.getheader('transfer-encoding') | |
440 | if trenc and trenc.lower() == "chunked": |
|
440 | if trenc and trenc.lower() == "chunked": | |
441 | res.chunked = 1 |
|
441 | res.chunked = 1 | |
442 | res.chunk_left = None |
|
442 | res.chunk_left = None | |
443 | else: |
|
443 | else: | |
444 | res.chunked = 0 |
|
444 | res.chunked = 0 | |
445 |
|
445 | |||
446 | # will the connection close at the end of the response? |
|
446 | # will the connection close at the end of the response? | |
447 | res.will_close = res._check_close() |
|
447 | res.will_close = res._check_close() | |
448 |
|
448 | |||
449 | # do we have a Content-Length? |
|
449 | # do we have a Content-Length? | |
450 | # NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked" |
|
450 | # NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked" | |
451 | length = res.msg.getheader('content-length') |
|
451 | length = res.msg.getheader('content-length') | |
452 | if length and not res.chunked: |
|
452 | if length and not res.chunked: | |
453 | try: |
|
453 | try: | |
454 | res.length = int(length) |
|
454 | res.length = int(length) | |
455 | except ValueError: |
|
455 | except ValueError: | |
456 | res.length = None |
|
456 | res.length = None | |
457 | else: |
|
457 | else: | |
458 | if res.length < 0: # ignore nonsensical negative lengths |
|
458 | if res.length < 0: # ignore nonsensical negative lengths | |
459 | res.length = None |
|
459 | res.length = None | |
460 | else: |
|
460 | else: | |
461 | res.length = None |
|
461 | res.length = None | |
462 |
|
462 | |||
463 | # does the body have a fixed length? (of zero) |
|
463 | # does the body have a fixed length? (of zero) | |
464 | if (status == httplib.NO_CONTENT or status == httplib.NOT_MODIFIED or |
|
464 | if (status == httplib.NO_CONTENT or status == httplib.NOT_MODIFIED or | |
465 | 100 <= status < 200 or # 1xx codes |
|
465 | 100 <= status < 200 or # 1xx codes | |
466 | res._method == 'HEAD'): |
|
466 | res._method == 'HEAD'): | |
467 | res.length = 0 |
|
467 | res.length = 0 | |
468 |
|
468 | |||
469 | # if the connection remains open, and we aren't using chunked, and |
|
469 | # if the connection remains open, and we aren't using chunked, and | |
470 | # a content-length was not provided, then assume that the connection |
|
470 | # a content-length was not provided, then assume that the connection | |
471 | # WILL close. |
|
471 | # WILL close. | |
472 | if (not res.will_close and |
|
472 | if (not res.will_close and | |
473 | not res.chunked and |
|
473 | not res.chunked and | |
474 | res.length is None): |
|
474 | res.length is None): | |
475 | res.will_close = 1 |
|
475 | res.will_close = 1 | |
476 |
|
476 | |||
477 | self.proxyres = res |
|
477 | self.proxyres = res | |
478 |
|
478 | |||
479 | return False |
|
479 | return False | |
480 |
|
480 | |||
481 | class httphandler(keepalive.HTTPHandler): |
|
481 | class httphandler(keepalive.HTTPHandler): | |
482 | def http_open(self, req): |
|
482 | def http_open(self, req): | |
483 | return self.do_open(httpconnection, req) |
|
483 | return self.do_open(httpconnection, req) | |
484 |
|
484 | |||
485 | def _start_transaction(self, h, req): |
|
485 | def _start_transaction(self, h, req): | |
486 | _generic_start_transaction(self, h, req) |
|
486 | _generic_start_transaction(self, h, req) | |
487 | return keepalive.HTTPHandler._start_transaction(self, h, req) |
|
487 | return keepalive.HTTPHandler._start_transaction(self, h, req) | |
488 |
|
488 | |||
489 | def _verifycert(cert, hostname): |
|
489 | def _verifycert(cert, hostname): | |
490 | '''Verify that cert (in socket.getpeercert() format) matches hostname. |
|
490 | '''Verify that cert (in socket.getpeercert() format) matches hostname. | |
491 | CRLs and subjectAltName are not handled. |
|
491 | CRLs and subjectAltName are not handled. | |
492 |
|
492 | |||
493 | Returns error message if any problems are found and None on success. |
|
493 | Returns error message if any problems are found and None on success. | |
494 | ''' |
|
494 | ''' | |
495 | if not cert: |
|
495 | if not cert: | |
496 | return _('no certificate received') |
|
496 | return _('no certificate received') | |
497 | dnsname = hostname.lower() |
|
497 | dnsname = hostname.lower() | |
498 | for s in cert.get('subject', []): |
|
498 | for s in cert.get('subject', []): | |
499 | key, value = s[0] |
|
499 | key, value = s[0] | |
500 | if key == 'commonName': |
|
500 | if key == 'commonName': | |
501 | certname = value.lower() |
|
501 | certname = value.lower() | |
502 | if (certname == dnsname or |
|
502 | if (certname == dnsname or | |
503 | '.' in dnsname and certname == '*.' + dnsname.split('.', 1)[1]): |
|
503 | '.' in dnsname and certname == '*.' + dnsname.split('.', 1)[1]): | |
504 | return None |
|
504 | return None | |
505 | return _('certificate is for %s') % certname |
|
505 | return _('certificate is for %s') % certname | |
506 | return _('no commonName found in certificate') |
|
506 | return _('no commonName found in certificate') | |
507 |
|
507 | |||
508 | if has_https: |
|
508 | if has_https: | |
509 | class BetterHTTPS(httplib.HTTPSConnection): |
|
509 | class BetterHTTPS(httplib.HTTPSConnection): | |
510 | send = keepalive.safesend |
|
510 | send = keepalive.safesend | |
511 |
|
511 | |||
512 | def connect(self): |
|
512 | def connect(self): | |
513 | if hasattr(self, 'ui'): |
|
513 | if hasattr(self, 'ui'): | |
514 | cacerts = self.ui.config('web', 'cacerts') |
|
514 | cacerts = self.ui.config('web', 'cacerts') | |
515 | else: |
|
515 | else: | |
516 | cacerts = None |
|
516 | cacerts = None | |
517 |
|
517 | |||
518 | if cacerts: |
|
518 | if cacerts: | |
519 | sock = _create_connection((self.host, self.port)) |
|
519 | sock = _create_connection((self.host, self.port)) | |
520 | self.sock = _ssl_wrap_socket(sock, self.key_file, |
|
520 | self.sock = _ssl_wrap_socket(sock, self.key_file, | |
521 | self.cert_file, cert_reqs=CERT_REQUIRED, |
|
521 | self.cert_file, cert_reqs=CERT_REQUIRED, | |
522 | ca_certs=cacerts) |
|
522 | ca_certs=cacerts) | |
523 | msg = _verifycert(self.sock.getpeercert(), self.host) |
|
523 | msg = _verifycert(self.sock.getpeercert(), self.host) | |
524 | if msg: |
|
524 | if msg: | |
525 | raise util.Abort(_('%s certificate error: %s') % |
|
525 | raise util.Abort(_('%s certificate error: %s') % | |
526 | (self.host, msg)) |
|
526 | (self.host, msg)) | |
527 | self.ui.debug('%s certificate successfully verified\n' % |
|
527 | self.ui.debug('%s certificate successfully verified\n' % | |
528 | self.host) |
|
528 | self.host) | |
529 | else: |
|
529 | else: | |
530 | httplib.HTTPSConnection.connect(self) |
|
530 | httplib.HTTPSConnection.connect(self) | |
531 |
|
531 | |||
532 | class httpsconnection(BetterHTTPS): |
|
532 | class httpsconnection(BetterHTTPS): | |
533 | response_class = keepalive.HTTPResponse |
|
533 | response_class = keepalive.HTTPResponse | |
534 | # must be able to send big bundle as stream. |
|
534 | # must be able to send big bundle as stream. | |
535 | send = _gen_sendfile(BetterHTTPS) |
|
535 | send = _gen_sendfile(BetterHTTPS) | |
536 | getresponse = keepalive.wrapgetresponse(httplib.HTTPSConnection) |
|
536 | getresponse = keepalive.wrapgetresponse(httplib.HTTPSConnection) | |
537 |
|
537 | |||
538 | def connect(self): |
|
538 | def connect(self): | |
539 | if self.realhostport: # use CONNECT proxy |
|
539 | if self.realhostport: # use CONNECT proxy | |
540 | self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) |
|
540 | self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) | |
541 | self.sock.connect((self.host, self.port)) |
|
541 | self.sock.connect((self.host, self.port)) | |
542 | if _generic_proxytunnel(self): |
|
542 | if _generic_proxytunnel(self): | |
543 | self.sock = _ssl_wrap_socket(self.sock, self.cert_file, |
|
543 | self.sock = _ssl_wrap_socket(self.sock, self.cert_file, | |
544 | self.key_file) |
|
544 | self.key_file) | |
545 | else: |
|
545 | else: | |
546 | BetterHTTPS.connect(self) |
|
546 | BetterHTTPS.connect(self) | |
547 |
|
547 | |||
548 | class httpshandler(keepalive.KeepAliveHandler, urllib2.HTTPSHandler): |
|
548 | class httpshandler(keepalive.KeepAliveHandler, urllib2.HTTPSHandler): | |
549 | def __init__(self, ui): |
|
549 | def __init__(self, ui): | |
550 | keepalive.KeepAliveHandler.__init__(self) |
|
550 | keepalive.KeepAliveHandler.__init__(self) | |
551 | urllib2.HTTPSHandler.__init__(self) |
|
551 | urllib2.HTTPSHandler.__init__(self) | |
552 | self.ui = ui |
|
552 | self.ui = ui | |
553 | self.pwmgr = passwordmgr(self.ui) |
|
553 | self.pwmgr = passwordmgr(self.ui) | |
554 |
|
554 | |||
555 | def _start_transaction(self, h, req): |
|
555 | def _start_transaction(self, h, req): | |
556 | _generic_start_transaction(self, h, req) |
|
556 | _generic_start_transaction(self, h, req) | |
557 | return keepalive.KeepAliveHandler._start_transaction(self, h, req) |
|
557 | return keepalive.KeepAliveHandler._start_transaction(self, h, req) | |
558 |
|
558 | |||
559 | def https_open(self, req): |
|
559 | def https_open(self, req): | |
560 | self.auth = self.pwmgr.readauthtoken(req.get_full_url()) |
|
560 | self.auth = self.pwmgr.readauthtoken(req.get_full_url()) | |
561 | return self.do_open(self._makeconnection, req) |
|
561 | return self.do_open(self._makeconnection, req) | |
562 |
|
562 | |||
563 | def _makeconnection(self, host, port=None, *args, **kwargs): |
|
563 | def _makeconnection(self, host, port=None, *args, **kwargs): | |
564 | keyfile = None |
|
564 | keyfile = None | |
565 | certfile = None |
|
565 | certfile = None | |
566 |
|
566 | |||
567 | if len(args) >= 1: # key_file |
|
567 | if len(args) >= 1: # key_file | |
568 | keyfile = args[0] |
|
568 | keyfile = args[0] | |
569 | if len(args) >= 2: # cert_file |
|
569 | if len(args) >= 2: # cert_file | |
570 | certfile = args[1] |
|
570 | certfile = args[1] | |
571 | args = args[2:] |
|
571 | args = args[2:] | |
572 |
|
572 | |||
573 | # if the user has specified different key/cert files in |
|
573 | # if the user has specified different key/cert files in | |
574 | # hgrc, we prefer these |
|
574 | # hgrc, we prefer these | |
575 | if self.auth and 'key' in self.auth and 'cert' in self.auth: |
|
575 | if self.auth and 'key' in self.auth and 'cert' in self.auth: | |
576 | keyfile = self.auth['key'] |
|
576 | keyfile = self.auth['key'] | |
577 | certfile = self.auth['cert'] |
|
577 | certfile = self.auth['cert'] | |
578 |
|
578 | |||
579 | conn = httpsconnection(host, port, keyfile, certfile, *args, **kwargs) |
|
579 | conn = httpsconnection(host, port, keyfile, certfile, *args, **kwargs) | |
580 | conn.ui = self.ui |
|
580 | conn.ui = self.ui | |
581 | return conn |
|
581 | return conn | |
582 |
|
582 | |||
583 | class httpdigestauthhandler(urllib2.HTTPDigestAuthHandler): |
|
583 | class httpdigestauthhandler(urllib2.HTTPDigestAuthHandler): | |
584 | def __init__(self, *args, **kwargs): |
|
584 | def __init__(self, *args, **kwargs): | |
585 | urllib2.HTTPDigestAuthHandler.__init__(self, *args, **kwargs) |
|
585 | urllib2.HTTPDigestAuthHandler.__init__(self, *args, **kwargs) | |
586 | self.retried_req = None |
|
586 | self.retried_req = None | |
587 |
|
587 | |||
588 | def reset_retry_count(self): |
|
588 | def reset_retry_count(self): | |
589 | # Python 2.6.5 will call this on 401 or 407 errors and thus loop |
|
589 | # Python 2.6.5 will call this on 401 or 407 errors and thus loop | |
590 | # forever. We disable reset_retry_count completely and reset in |
|
590 | # forever. We disable reset_retry_count completely and reset in | |
591 | # http_error_auth_reqed instead. |
|
591 | # http_error_auth_reqed instead. | |
592 | pass |
|
592 | pass | |
593 |
|
593 | |||
594 | def http_error_auth_reqed(self, auth_header, host, req, headers): |
|
594 | def http_error_auth_reqed(self, auth_header, host, req, headers): | |
595 | # Reset the retry counter once for each request. |
|
595 | # Reset the retry counter once for each request. | |
596 | if req is not self.retried_req: |
|
596 | if req is not self.retried_req: | |
597 | self.retried_req = req |
|
597 | self.retried_req = req | |
598 | self.retried = 0 |
|
598 | self.retried = 0 | |
599 | # In python < 2.5 AbstractDigestAuthHandler raises a ValueError if |
|
599 | # In python < 2.5 AbstractDigestAuthHandler raises a ValueError if | |
600 | # it doesn't know about the auth type requested. This can happen if |
|
600 | # it doesn't know about the auth type requested. This can happen if | |
601 | # somebody is using BasicAuth and types a bad password. |
|
601 | # somebody is using BasicAuth and types a bad password. | |
602 | try: |
|
602 | try: | |
603 | return urllib2.HTTPDigestAuthHandler.http_error_auth_reqed( |
|
603 | return urllib2.HTTPDigestAuthHandler.http_error_auth_reqed( | |
604 | self, auth_header, host, req, headers) |
|
604 | self, auth_header, host, req, headers) | |
605 | except ValueError, inst: |
|
605 | except ValueError, inst: | |
606 | arg = inst.args[0] |
|
606 | arg = inst.args[0] | |
607 | if arg.startswith("AbstractDigestAuthHandler doesn't know "): |
|
607 | if arg.startswith("AbstractDigestAuthHandler doesn't know "): | |
608 | return |
|
608 | return | |
609 | raise |
|
609 | raise | |
610 |
|
610 | |||
611 | class httpbasicauthhandler(urllib2.HTTPBasicAuthHandler): |
|
611 | class httpbasicauthhandler(urllib2.HTTPBasicAuthHandler): | |
612 | def __init__(self, *args, **kwargs): |
|
612 | def __init__(self, *args, **kwargs): | |
613 | urllib2.HTTPBasicAuthHandler.__init__(self, *args, **kwargs) |
|
613 | urllib2.HTTPBasicAuthHandler.__init__(self, *args, **kwargs) | |
614 | self.retried_req = None |
|
614 | self.retried_req = None | |
615 |
|
615 | |||
616 | def reset_retry_count(self): |
|
616 | def reset_retry_count(self): | |
617 | # Python 2.6.5 will call this on 401 or 407 errors and thus loop |
|
617 | # Python 2.6.5 will call this on 401 or 407 errors and thus loop | |
618 | # forever. We disable reset_retry_count completely and reset in |
|
618 | # forever. We disable reset_retry_count completely and reset in | |
619 | # http_error_auth_reqed instead. |
|
619 | # http_error_auth_reqed instead. | |
620 | pass |
|
620 | pass | |
621 |
|
621 | |||
622 | def http_error_auth_reqed(self, auth_header, host, req, headers): |
|
622 | def http_error_auth_reqed(self, auth_header, host, req, headers): | |
623 | # Reset the retry counter once for each request. |
|
623 | # Reset the retry counter once for each request. | |
624 | if req is not self.retried_req: |
|
624 | if req is not self.retried_req: | |
625 | self.retried_req = req |
|
625 | self.retried_req = req | |
626 | self.retried = 0 |
|
626 | self.retried = 0 | |
627 | return urllib2.HTTPBasicAuthHandler.http_error_auth_reqed( |
|
627 | return urllib2.HTTPBasicAuthHandler.http_error_auth_reqed( | |
628 | self, auth_header, host, req, headers) |
|
628 | self, auth_header, host, req, headers) | |
629 |
|
629 | |||
630 | def getauthinfo(path): |
|
630 | def getauthinfo(path): | |
631 | scheme, netloc, urlpath, query, frag = urlparse.urlsplit(path) |
|
631 | scheme, netloc, urlpath, query, frag = urlparse.urlsplit(path) | |
632 | if not urlpath: |
|
632 | if not urlpath: | |
633 | urlpath = '/' |
|
633 | urlpath = '/' | |
634 | if scheme != 'file': |
|
634 | if scheme != 'file': | |
635 | # XXX: why are we quoting the path again with some smart |
|
635 | # XXX: why are we quoting the path again with some smart | |
636 | # heuristic here? Anyway, it cannot be done with file:// |
|
636 | # heuristic here? Anyway, it cannot be done with file:// | |
637 | # urls since path encoding is os/fs dependent (see |
|
637 | # urls since path encoding is os/fs dependent (see | |
638 | # urllib.pathname2url() for details). |
|
638 | # urllib.pathname2url() for details). | |
639 | urlpath = quotepath(urlpath) |
|
639 | urlpath = quotepath(urlpath) | |
640 | host, port, user, passwd = netlocsplit(netloc) |
|
640 | host, port, user, passwd = netlocsplit(netloc) | |
641 |
|
641 | |||
642 | # urllib cannot handle URLs with embedded user or passwd |
|
642 | # urllib cannot handle URLs with embedded user or passwd | |
643 | url = urlparse.urlunsplit((scheme, netlocunsplit(host, port), |
|
643 | url = urlparse.urlunsplit((scheme, netlocunsplit(host, port), | |
644 | urlpath, query, frag)) |
|
644 | urlpath, query, frag)) | |
645 | if user: |
|
645 | if user: | |
646 | netloc = host |
|
646 | netloc = host | |
647 | if port: |
|
647 | if port: | |
648 | netloc += ':' + port |
|
648 | netloc += ':' + port | |
649 | # Python < 2.4.3 uses only the netloc to search for a password |
|
649 | # Python < 2.4.3 uses only the netloc to search for a password | |
650 | authinfo = (None, (url, netloc), user, passwd or '') |
|
650 | authinfo = (None, (url, netloc), user, passwd or '') | |
651 | else: |
|
651 | else: | |
652 | authinfo = None |
|
652 | authinfo = None | |
653 | return url, authinfo |
|
653 | return url, authinfo | |
654 |
|
654 | |||
655 | handlerfuncs = [] |
|
655 | handlerfuncs = [] | |
656 |
|
656 | |||
657 | def opener(ui, authinfo=None): |
|
657 | def opener(ui, authinfo=None): | |
658 | ''' |
|
658 | ''' | |
659 | construct an opener suitable for urllib2 |
|
659 | construct an opener suitable for urllib2 | |
660 | authinfo will be added to the password manager |
|
660 | authinfo will be added to the password manager | |
661 | ''' |
|
661 | ''' | |
662 | handlers = [httphandler()] |
|
662 | handlers = [httphandler()] | |
663 | if has_https: |
|
663 | if has_https: | |
664 | handlers.append(httpshandler(ui)) |
|
664 | handlers.append(httpshandler(ui)) | |
665 |
|
665 | |||
666 | handlers.append(proxyhandler(ui)) |
|
666 | handlers.append(proxyhandler(ui)) | |
667 |
|
667 | |||
668 | passmgr = passwordmgr(ui) |
|
668 | passmgr = passwordmgr(ui) | |
669 | if authinfo is not None: |
|
669 | if authinfo is not None: | |
670 | passmgr.add_password(*authinfo) |
|
670 | passmgr.add_password(*authinfo) | |
671 | user, passwd = authinfo[2:4] |
|
671 | user, passwd = authinfo[2:4] | |
672 | ui.debug('http auth: user %s, password %s\n' % |
|
672 | ui.debug('http auth: user %s, password %s\n' % | |
673 | (user, passwd and '*' * len(passwd) or 'not set')) |
|
673 | (user, passwd and '*' * len(passwd) or 'not set')) | |
674 |
|
674 | |||
675 | handlers.extend((httpbasicauthhandler(passmgr), |
|
675 | handlers.extend((httpbasicauthhandler(passmgr), | |
676 | httpdigestauthhandler(passmgr))) |
|
676 | httpdigestauthhandler(passmgr))) | |
677 | handlers.extend([h(ui, passmgr) for h in handlerfuncs]) |
|
677 | handlers.extend([h(ui, passmgr) for h in handlerfuncs]) | |
678 | opener = urllib2.build_opener(*handlers) |
|
678 | opener = urllib2.build_opener(*handlers) | |
679 |
|
679 | |||
680 | # 1.0 here is the _protocol_ version |
|
680 | # 1.0 here is the _protocol_ version | |
681 | opener.addheaders = [('User-agent', 'mercurial/proto-1.0')] |
|
681 | opener.addheaders = [('User-agent', 'mercurial/proto-1.0')] | |
682 | opener.addheaders.append(('Accept', 'application/mercurial-0.1')) |
|
682 | opener.addheaders.append(('Accept', 'application/mercurial-0.1')) | |
683 | return opener |
|
683 | return opener | |
684 |
|
684 | |||
685 | scheme_re = re.compile(r'^([a-zA-Z0-9+-.]+)://') |
|
685 | scheme_re = re.compile(r'^([a-zA-Z0-9+-.]+)://') | |
686 |
|
686 | |||
687 | def open(ui, url, data=None): |
|
687 | def open(ui, url, data=None): | |
688 | scheme = None |
|
688 | scheme = None | |
689 | m = scheme_re.search(url) |
|
689 | m = scheme_re.search(url) | |
690 | if m: |
|
690 | if m: | |
691 | scheme = m.group(1).lower() |
|
691 | scheme = m.group(1).lower() | |
692 | if not scheme: |
|
692 | if not scheme: | |
693 | path = util.normpath(os.path.abspath(url)) |
|
693 | path = util.normpath(os.path.abspath(url)) | |
694 | url = 'file://' + urllib.pathname2url(path) |
|
694 | url = 'file://' + urllib.pathname2url(path) | |
695 | authinfo = None |
|
695 | authinfo = None | |
696 | else: |
|
696 | else: | |
697 | url, authinfo = getauthinfo(url) |
|
697 | url, authinfo = getauthinfo(url) | |
698 | return opener(ui, authinfo).open(url, data) |
|
698 | return opener(ui, authinfo).open(url, data) |
General Comments 0
You need to be logged in to leave comments.
Login now