Show More
@@ -1,294 +1,295 | |||
|
1 | 1 | #!/usr/bin/env python |
|
2 | 2 | # |
|
3 | 3 | # check-code - a style and portability checker for Mercurial |
|
4 | 4 | # |
|
5 | 5 | # Copyright 2010 Matt Mackall <mpm@selenic.com> |
|
6 | 6 | # |
|
7 | 7 | # This software may be used and distributed according to the terms of the |
|
8 | 8 | # GNU General Public License version 2 or any later version. |
|
9 | 9 | |
|
10 | 10 | import re, glob, os, sys |
|
11 | 11 | import optparse |
|
12 | 12 | |
|
13 | 13 | def repquote(m): |
|
14 | 14 | t = re.sub(r"\w", "x", m.group('text')) |
|
15 | 15 | t = re.sub(r"[^\sx]", "o", t) |
|
16 | 16 | return m.group('quote') + t + m.group('quote') |
|
17 | 17 | |
|
18 | 18 | def reppython(m): |
|
19 | 19 | comment = m.group('comment') |
|
20 | 20 | if comment: |
|
21 | 21 | return "#" * len(comment) |
|
22 | 22 | return repquote(m) |
|
23 | 23 | |
|
24 | 24 | def repcomment(m): |
|
25 | 25 | return m.group(1) + "#" * len(m.group(2)) |
|
26 | 26 | |
|
27 | 27 | def repccomment(m): |
|
28 | 28 | t = re.sub(r"((?<=\n) )|\S", "x", m.group(2)) |
|
29 | 29 | return m.group(1) + t + "*/" |
|
30 | 30 | |
|
31 | 31 | def repcallspaces(m): |
|
32 | 32 | t = re.sub(r"\n\s+", "\n", m.group(2)) |
|
33 | 33 | return m.group(1) + t |
|
34 | 34 | |
|
35 | 35 | def repinclude(m): |
|
36 | 36 | return m.group(1) + "<foo>" |
|
37 | 37 | |
|
38 | 38 | def rephere(m): |
|
39 | 39 | t = re.sub(r"\S", "x", m.group(2)) |
|
40 | 40 | return m.group(1) + t |
|
41 | 41 | |
|
42 | 42 | |
|
43 | 43 | testpats = [ |
|
44 | 44 | (r'(pushd|popd)', "don't use 'pushd' or 'popd', use 'cd'"), |
|
45 | 45 | (r'\W\$?\(\([^\)]*\)\)', "don't use (()) or $(()), use 'expr'"), |
|
46 | 46 | (r'^function', "don't use 'function', use old style"), |
|
47 | 47 | (r'grep.*-q', "don't use 'grep -q', redirect to /dev/null"), |
|
48 | 48 | (r'echo.*\\n', "don't use 'echo \\n', use printf"), |
|
49 | 49 | (r'echo -n', "don't use 'echo -n', use printf"), |
|
50 | 50 | (r'^diff.*-\w*N', "don't use 'diff -N'"), |
|
51 | 51 | (r'(^| )wc[^|]*$', "filter wc output"), |
|
52 | 52 | (r'head -c', "don't use 'head -c', use 'dd'"), |
|
53 | 53 | (r'ls.*-\w*R', "don't use 'ls -R', use 'find'"), |
|
54 | 54 | (r'printf.*\\\d\d\d', "don't use 'printf \NNN', use Python"), |
|
55 | 55 | (r'printf.*\\x', "don't use printf \\x, use Python"), |
|
56 | 56 | (r'\$\(.*\)', "don't use $(expr), use `expr`"), |
|
57 | 57 | (r'rm -rf \*', "don't use naked rm -rf, target a directory"), |
|
58 | 58 | (r'(^|\|\s*)grep (-\w\s+)*[^|]*[(|]\w', |
|
59 | 59 | "use egrep for extended grep syntax"), |
|
60 | 60 | (r'/bin/', "don't use explicit paths for tools"), |
|
61 | 61 | (r'\$PWD', "don't use $PWD, use `pwd`"), |
|
62 | 62 | (r'[^\n]\Z', "no trailing newline"), |
|
63 | 63 | (r'export.*=', "don't export and assign at once"), |
|
64 | 64 | ('^([^"\']|("[^"]*")|(\'[^\']*\'))*\\^', "^ must be quoted"), |
|
65 | 65 | (r'^source\b', "don't use 'source', use '.'"), |
|
66 | 66 | (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"), |
|
67 | 67 | ] |
|
68 | 68 | |
|
69 | 69 | testfilters = [ |
|
70 | 70 | (r"( *)(#([^\n]*\S)?)", repcomment), |
|
71 | 71 | (r"<<(\S+)((.|\n)*?\n\1)", rephere), |
|
72 | 72 | ] |
|
73 | 73 | |
|
74 | 74 | uprefix = r"^ \$ " |
|
75 | 75 | uprefixc = r"^ > " |
|
76 | 76 | utestpats = [ |
|
77 | 77 | (uprefix + r'.*\|\s*sed', "use regex test output patterns instead of sed"), |
|
78 | 78 | (uprefix + r'(true|exit 0)', "explicit zero exit unnecessary"), |
|
79 | 79 | (uprefix + r'.*\$\?', "explicit exit code checks unnecessary"), |
|
80 | 80 | (uprefix + r'.*\|\| echo.*(fail|error)', |
|
81 | 81 | "explicit exit code checks unnecessary"), |
|
82 | 82 | (uprefix + r'set -e', "don't use set -e"), |
|
83 | 83 | (uprefixc + r'( *)\t', "don't use tabs to indent"), |
|
84 | 84 | ] |
|
85 | 85 | |
|
86 | 86 | for p, m in testpats: |
|
87 | 87 | if p.startswith('^'): |
|
88 | 88 | p = uprefix + p[1:] |
|
89 | 89 | else: |
|
90 | 90 | p = uprefix + p |
|
91 | 91 | utestpats.append((p, m)) |
|
92 | 92 | |
|
93 | 93 | utestfilters = [ |
|
94 | 94 | (r"( *)(#([^\n]*\S)?)", repcomment), |
|
95 | 95 | ] |
|
96 | 96 | |
|
97 | 97 | pypats = [ |
|
98 | 98 | (r'^\s*def\s*\w+\s*\(.*,\s*\(', |
|
99 | 99 | "tuple parameter unpacking not available in Python 3+"), |
|
100 | 100 | (r'lambda\s*\(.*,.*\)', |
|
101 | 101 | "tuple parameter unpacking not available in Python 3+"), |
|
102 | 102 | (r'(?<!def)\s+(cmp)\(', "cmp is not available in Python 3+"), |
|
103 | 103 | (r'\breduce\s*\(.*', "reduce is not available in Python 3+"), |
|
104 | 104 | (r'\.has_key\b', "dict.has_key is not available in Python 3+"), |
|
105 | 105 | (r'^\s*\t', "don't use tabs"), |
|
106 | 106 | (r'\S;\s*\n', "semicolon"), |
|
107 | 107 | (r'\w,\w', "missing whitespace after ,"), |
|
108 | 108 | (r'\w[+/*\-<>]\w', "missing whitespace in expression"), |
|
109 | 109 | (r'^\s+\w+=\w+[^,)]$', "missing whitespace in assignment"), |
|
110 | 110 | (r'.{85}', "line too long"), |
|
111 | 111 | (r'.{81}', "warning: line over 80 characters"), |
|
112 | 112 | (r'[^\n]\Z', "no trailing newline"), |
|
113 | (r'(\S\s+|^\s+)\n', "trailing whitespace"), | |
|
113 | 114 | # (r'^\s+[^_ ][^_. ]+_[^_]+\s*=', "don't use underbars in identifiers"), |
|
114 | 115 | # (r'\w*[a-z][A-Z]\w*\s*=', "don't use camelcase in identifiers"), |
|
115 | 116 | (r'^\s*(if|while|def|class|except|try)\s[^[]*:\s*[^\]#\s]+', |
|
116 | 117 | "linebreak after :"), |
|
117 | 118 | (r'class\s[^(]:', "old-style class, use class foo(object)"), |
|
118 | 119 | (r'^\s+del\(', "del isn't a function"), |
|
119 | 120 | (r'^\s+except\(', "except isn't a function"), |
|
120 | 121 | (r',]', "unneeded trailing ',' in list"), |
|
121 | 122 | # (r'class\s[A-Z][^\(]*\((?!Exception)', |
|
122 | 123 | # "don't capitalize non-exception classes"), |
|
123 | 124 | # (r'in range\(', "use xrange"), |
|
124 | 125 | # (r'^\s*print\s+', "avoid using print in core and extensions"), |
|
125 | 126 | (r'[\x80-\xff]', "non-ASCII character literal"), |
|
126 | 127 | (r'("\')\.format\(', "str.format() not available in Python 2.4"), |
|
127 | 128 | (r'^\s*with\s+', "with not available in Python 2.4"), |
|
128 | 129 | (r'(?<!def)\s+(any|all|format)\(', |
|
129 | 130 | "any/all/format not available in Python 2.4"), |
|
130 | 131 | (r'(?<!def)\s+(callable)\(', |
|
131 | 132 | "callable not available in Python 3, use hasattr(f, '__call__')"), |
|
132 | 133 | (r'if\s.*\selse', "if ... else form not available in Python 2.4"), |
|
133 | 134 | (r'([\(\[]\s\S)|(\S\s[\)\]])', "gratuitous whitespace in () or []"), |
|
134 | 135 | # (r'\s\s=', "gratuitous whitespace before ="), |
|
135 | 136 | (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=)\S', |
|
136 | 137 | "missing whitespace around operator"), |
|
137 | 138 | (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=)\s', |
|
138 | 139 | "missing whitespace around operator"), |
|
139 | 140 | (r'\s(\+=|-=|!=|<>|<=|>=|<<=|>>=)\S', |
|
140 | 141 | "missing whitespace around operator"), |
|
141 | 142 | (r'[^+=*!<>&| -](\s=|=\s)[^= ]', |
|
142 | 143 | "wrong whitespace around ="), |
|
143 | 144 | (r'raise Exception', "don't raise generic exceptions"), |
|
144 | 145 | (r'ui\.(status|progress|write|note|warn)\([\'\"]x', |
|
145 | 146 | "warning: unwrapped ui message"), |
|
146 | 147 | ] |
|
147 | 148 | |
|
148 | 149 | pyfilters = [ |
|
149 | 150 | (r"""(?msx)(?P<comment>\#.*?$)| |
|
150 | 151 | ((?P<quote>('''|\"\"\"|(?<!')'(?!')|(?<!")"(?!"))) |
|
151 | 152 | (?P<text>(([^\\]|\\.)*?)) |
|
152 | 153 | (?P=quote))""", reppython), |
|
153 | 154 | ] |
|
154 | 155 | |
|
155 | 156 | cpats = [ |
|
156 | 157 | (r'//', "don't use //-style comments"), |
|
157 | 158 | (r'^ ', "don't use spaces to indent"), |
|
158 | 159 | (r'\S\t', "don't use tabs except for indent"), |
|
159 | 160 | (r'(\S\s+|^\s+)\n', "trailing whitespace"), |
|
160 | 161 | (r'.{85}', "line too long"), |
|
161 | 162 | (r'(while|if|do|for)\(', "use space after while/if/do/for"), |
|
162 | 163 | (r'return\(', "return is not a function"), |
|
163 | 164 | (r' ;', "no space before ;"), |
|
164 | 165 | (r'\w+\* \w+', "use int *foo, not int* foo"), |
|
165 | 166 | (r'\([^\)]+\) \w+', "use (int)foo, not (int) foo"), |
|
166 | 167 | (r'\S+ (\+\+|--)', "use foo++, not foo ++"), |
|
167 | 168 | (r'\w,\w', "missing whitespace after ,"), |
|
168 | 169 | (r'\w[+/*]\w', "missing whitespace in expression"), |
|
169 | 170 | (r'^#\s+\w', "use #foo, not # foo"), |
|
170 | 171 | (r'[^\n]\Z', "no trailing newline"), |
|
171 | 172 | ] |
|
172 | 173 | |
|
173 | 174 | cfilters = [ |
|
174 | 175 | (r'(/\*)(((\*(?!/))|[^*])*)\*/', repccomment), |
|
175 | 176 | (r'''(?P<quote>(?<!")")(?P<text>([^"]|\\")+)"(?!")''', repquote), |
|
176 | 177 | (r'''(#\s*include\s+<)([^>]+)>''', repinclude), |
|
177 | 178 | (r'(\()([^)]+\))', repcallspaces), |
|
178 | 179 | ] |
|
179 | 180 | |
|
180 | 181 | checks = [ |
|
181 | 182 | ('python', r'.*\.(py|cgi)$', pyfilters, pypats), |
|
182 | 183 | ('test script', r'(.*/)?test-[^.~]*$', testfilters, testpats), |
|
183 | 184 | ('c', r'.*\.c$', cfilters, cpats), |
|
184 | 185 | ('unified test', r'.*\.t$', utestfilters, utestpats), |
|
185 | 186 | ] |
|
186 | 187 | |
|
187 | 188 | class norepeatlogger(object): |
|
188 | 189 | def __init__(self): |
|
189 | 190 | self._lastseen = None |
|
190 | 191 | |
|
191 | 192 | def log(self, fname, lineno, line, msg, blame): |
|
192 | 193 | """print error related a to given line of a given file. |
|
193 | 194 | |
|
194 | 195 | The faulty line will also be printed but only once in the case |
|
195 | 196 | of multiple errors. |
|
196 | 197 | |
|
197 | 198 | :fname: filename |
|
198 | 199 | :lineno: line number |
|
199 | 200 | :line: actual content of the line |
|
200 | 201 | :msg: error message |
|
201 | 202 | """ |
|
202 | 203 | msgid = fname, lineno, line |
|
203 | 204 | if msgid != self._lastseen: |
|
204 | 205 | if blame: |
|
205 | 206 | print "%s:%d (%s):" % (fname, lineno, blame) |
|
206 | 207 | else: |
|
207 | 208 | print "%s:%d:" % (fname, lineno) |
|
208 | 209 | print " > %s" % line |
|
209 | 210 | self._lastseen = msgid |
|
210 | 211 | print " " + msg |
|
211 | 212 | |
|
212 | 213 | _defaultlogger = norepeatlogger() |
|
213 | 214 | |
|
214 | 215 | def getblame(f): |
|
215 | 216 | lines = [] |
|
216 | 217 | for l in os.popen('hg annotate -un %s' % f): |
|
217 | 218 | start, line = l.split(':', 1) |
|
218 | 219 | user, rev = start.split() |
|
219 | 220 | lines.append((line[1:-1], user, rev)) |
|
220 | 221 | return lines |
|
221 | 222 | |
|
222 | 223 | def checkfile(f, logfunc=_defaultlogger.log, maxerr=None, warnings=False, |
|
223 | 224 | blame=False): |
|
224 | 225 | """checks style and portability of a given file |
|
225 | 226 | |
|
226 | 227 | :f: filepath |
|
227 | 228 | :logfunc: function used to report error |
|
228 | 229 | logfunc(filename, linenumber, linecontent, errormessage) |
|
229 | 230 | :maxerr: number of error to display before arborting. |
|
230 | 231 | Set to None (default) to report all errors |
|
231 | 232 | |
|
232 | 233 | return True if no error is found, False otherwise. |
|
233 | 234 | """ |
|
234 | 235 | blamecache = None |
|
235 | 236 | result = True |
|
236 | 237 | for name, match, filters, pats in checks: |
|
237 | 238 | fc = 0 |
|
238 | 239 | if not re.match(match, f): |
|
239 | 240 | continue |
|
240 | 241 | pre = post = open(f).read() |
|
241 | 242 | if "no-" + "check-code" in pre: |
|
242 | 243 | break |
|
243 | 244 | for p, r in filters: |
|
244 | 245 | post = re.sub(p, r, post) |
|
245 | 246 | # print post # uncomment to show filtered version |
|
246 | 247 | z = enumerate(zip(pre.splitlines(), post.splitlines(True))) |
|
247 | 248 | for n, l in z: |
|
248 | 249 | if "check-code" + "-ignore" in l[0]: |
|
249 | 250 | continue |
|
250 | 251 | for p, msg in pats: |
|
251 | 252 | if not warnings and msg.startswith("warning"): |
|
252 | 253 | continue |
|
253 | 254 | if re.search(p, l[1]): |
|
254 | 255 | bd = "" |
|
255 | 256 | if blame: |
|
256 | 257 | bd = 'working directory' |
|
257 | 258 | if not blamecache: |
|
258 | 259 | blamecache = getblame(f) |
|
259 | 260 | if n < len(blamecache): |
|
260 | 261 | bl, bu, br = blamecache[n] |
|
261 | 262 | if bl == l[0]: |
|
262 | 263 | bd = '%s@%s' % (bu, br) |
|
263 | 264 | logfunc(f, n + 1, l[0], msg, bd) |
|
264 | 265 | fc += 1 |
|
265 | 266 | result = False |
|
266 | 267 | if maxerr is not None and fc >= maxerr: |
|
267 | 268 | print " (too many errors, giving up)" |
|
268 | 269 | break |
|
269 | 270 | break |
|
270 | 271 | return result |
|
271 | 272 | |
|
272 | 273 | if __name__ == "__main__": |
|
273 | 274 | parser = optparse.OptionParser("%prog [options] [files]") |
|
274 | 275 | parser.add_option("-w", "--warnings", action="store_true", |
|
275 | 276 | help="include warning-level checks") |
|
276 | 277 | parser.add_option("-p", "--per-file", type="int", |
|
277 | 278 | help="max warnings per file") |
|
278 | 279 | parser.add_option("-b", "--blame", action="store_true", |
|
279 | 280 | help="use annotate to generate blame info") |
|
280 | 281 | |
|
281 | 282 | parser.set_defaults(per_file=15, warnings=False, blame=False) |
|
282 | 283 | (options, args) = parser.parse_args() |
|
283 | 284 | |
|
284 | 285 | if len(args) == 0: |
|
285 | 286 | check = glob.glob("*") |
|
286 | 287 | else: |
|
287 | 288 | check = args |
|
288 | 289 | |
|
289 | 290 | for f in check: |
|
290 | 291 | ret = 0 |
|
291 | 292 | if not checkfile(f, maxerr=options.per_file, warnings=options.warnings, |
|
292 | 293 | blame=options.blame): |
|
293 | 294 | ret = 1 |
|
294 | 295 | sys.exit(ret) |
@@ -1,1161 +1,1161 | |||
|
1 | 1 | # Subversion 1.4/1.5 Python API backend |
|
2 | 2 | # |
|
3 | 3 | # Copyright(C) 2007 Daniel Holth et al |
|
4 | 4 | |
|
5 | 5 | import os |
|
6 | 6 | import re |
|
7 | 7 | import sys |
|
8 | 8 | import cPickle as pickle |
|
9 | 9 | import tempfile |
|
10 | 10 | import urllib |
|
11 | 11 | import urllib2 |
|
12 | 12 | |
|
13 | 13 | from mercurial import strutil, util, encoding |
|
14 | 14 | from mercurial.i18n import _ |
|
15 | 15 | |
|
16 | 16 | # Subversion stuff. Works best with very recent Python SVN bindings |
|
17 | 17 | # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing |
|
18 | 18 | # these bindings. |
|
19 | 19 | |
|
20 | 20 | from cStringIO import StringIO |
|
21 | 21 | |
|
22 | 22 | from common import NoRepo, MissingTool, commit, encodeargs, decodeargs |
|
23 | 23 | from common import commandline, converter_source, converter_sink, mapfile |
|
24 | 24 | |
|
25 | 25 | try: |
|
26 | 26 | from svn.core import SubversionException, Pool |
|
27 | 27 | import svn |
|
28 | 28 | import svn.client |
|
29 | 29 | import svn.core |
|
30 | 30 | import svn.ra |
|
31 | 31 | import svn.delta |
|
32 | 32 | import transport |
|
33 | 33 | import warnings |
|
34 | 34 | warnings.filterwarnings('ignore', |
|
35 | 35 | module='svn.core', |
|
36 | 36 | category=DeprecationWarning) |
|
37 | 37 | |
|
38 | 38 | except ImportError: |
|
39 | 39 | pass |
|
40 | 40 | |
|
41 | 41 | class SvnPathNotFound(Exception): |
|
42 | 42 | pass |
|
43 | 43 | |
|
44 | 44 | def geturl(path): |
|
45 | 45 | try: |
|
46 | 46 | return svn.client.url_from_path(svn.core.svn_path_canonicalize(path)) |
|
47 | 47 | except SubversionException: |
|
48 | 48 | pass |
|
49 | 49 | if os.path.isdir(path): |
|
50 | 50 | path = os.path.normpath(os.path.abspath(path)) |
|
51 | 51 | if os.name == 'nt': |
|
52 | 52 | path = '/' + util.normpath(path) |
|
53 | 53 | # Module URL is later compared with the repository URL returned |
|
54 | 54 | # by svn API, which is UTF-8. |
|
55 | 55 | path = encoding.tolocal(path) |
|
56 | 56 | return 'file://%s' % urllib.quote(path) |
|
57 | 57 | return path |
|
58 | 58 | |
|
59 | 59 | def optrev(number): |
|
60 | 60 | optrev = svn.core.svn_opt_revision_t() |
|
61 | 61 | optrev.kind = svn.core.svn_opt_revision_number |
|
62 | 62 | optrev.value.number = number |
|
63 | 63 | return optrev |
|
64 | 64 | |
|
65 | 65 | class changedpath(object): |
|
66 | 66 | def __init__(self, p): |
|
67 | 67 | self.copyfrom_path = p.copyfrom_path |
|
68 | 68 | self.copyfrom_rev = p.copyfrom_rev |
|
69 | 69 | self.action = p.action |
|
70 | 70 | |
|
71 | 71 | def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True, |
|
72 | 72 | strict_node_history=False): |
|
73 | 73 | protocol = -1 |
|
74 | 74 | def receiver(orig_paths, revnum, author, date, message, pool): |
|
75 | 75 | if orig_paths is not None: |
|
76 | 76 | for k, v in orig_paths.iteritems(): |
|
77 | 77 | orig_paths[k] = changedpath(v) |
|
78 | 78 | pickle.dump((orig_paths, revnum, author, date, message), |
|
79 | 79 | fp, protocol) |
|
80 | 80 | |
|
81 | 81 | try: |
|
82 | 82 | # Use an ra of our own so that our parent can consume |
|
83 | 83 | # our results without confusing the server. |
|
84 | 84 | t = transport.SvnRaTransport(url=url) |
|
85 | 85 | svn.ra.get_log(t.ra, paths, start, end, limit, |
|
86 | 86 | discover_changed_paths, |
|
87 | 87 | strict_node_history, |
|
88 | 88 | receiver) |
|
89 | 89 | except SubversionException, (inst, num): |
|
90 | 90 | pickle.dump(num, fp, protocol) |
|
91 | 91 | except IOError: |
|
92 | 92 | # Caller may interrupt the iteration |
|
93 | 93 | pickle.dump(None, fp, protocol) |
|
94 | 94 | else: |
|
95 | 95 | pickle.dump(None, fp, protocol) |
|
96 | 96 | fp.close() |
|
97 | 97 | # With large history, cleanup process goes crazy and suddenly |
|
98 | 98 | # consumes *huge* amount of memory. The output file being closed, |
|
99 | 99 | # there is no need for clean termination. |
|
100 | 100 | os._exit(0) |
|
101 | 101 | |
|
102 | 102 | def debugsvnlog(ui, **opts): |
|
103 | 103 | """Fetch SVN log in a subprocess and channel them back to parent to |
|
104 | 104 | avoid memory collection issues. |
|
105 | 105 | """ |
|
106 | 106 | util.set_binary(sys.stdin) |
|
107 | 107 | util.set_binary(sys.stdout) |
|
108 | 108 | args = decodeargs(sys.stdin.read()) |
|
109 | 109 | get_log_child(sys.stdout, *args) |
|
110 | 110 | |
|
111 | 111 | class logstream(object): |
|
112 | 112 | """Interruptible revision log iterator.""" |
|
113 | 113 | def __init__(self, stdout): |
|
114 | 114 | self._stdout = stdout |
|
115 | 115 | |
|
116 | 116 | def __iter__(self): |
|
117 | 117 | while True: |
|
118 | 118 | try: |
|
119 | 119 | entry = pickle.load(self._stdout) |
|
120 | 120 | except EOFError: |
|
121 | 121 | raise util.Abort(_('Mercurial failed to run itself, check' |
|
122 | 122 | ' hg executable is in PATH')) |
|
123 | 123 | try: |
|
124 | 124 | orig_paths, revnum, author, date, message = entry |
|
125 | 125 | except: |
|
126 | 126 | if entry is None: |
|
127 | 127 | break |
|
128 | 128 | raise SubversionException("child raised exception", entry) |
|
129 | 129 | yield entry |
|
130 | 130 | |
|
131 | 131 | def close(self): |
|
132 | 132 | if self._stdout: |
|
133 | 133 | self._stdout.close() |
|
134 | 134 | self._stdout = None |
|
135 | 135 | |
|
136 | 136 | |
|
137 | 137 | # Check to see if the given path is a local Subversion repo. Verify this by |
|
138 | 138 | # looking for several svn-specific files and directories in the given |
|
139 | 139 | # directory. |
|
140 | 140 | def filecheck(ui, path, proto): |
|
141 | 141 | for x in ('locks', 'hooks', 'format', 'db'): |
|
142 | 142 | if not os.path.exists(os.path.join(path, x)): |
|
143 | 143 | return False |
|
144 | 144 | return True |
|
145 | 145 | |
|
146 | 146 | # Check to see if a given path is the root of an svn repo over http. We verify |
|
147 | 147 | # this by requesting a version-controlled URL we know can't exist and looking |
|
148 | 148 | # for the svn-specific "not found" XML. |
|
149 | 149 | def httpcheck(ui, path, proto): |
|
150 | 150 | try: |
|
151 | 151 | opener = urllib2.build_opener() |
|
152 | 152 | rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path)) |
|
153 | 153 | data = rsp.read() |
|
154 | 154 | except urllib2.HTTPError, inst: |
|
155 | 155 | if inst.code != 404: |
|
156 | 156 | # Except for 404 we cannot know for sure this is not an svn repo |
|
157 | 157 | ui.warn(_('svn: cannot probe remote repository, assume it could ' |
|
158 | 158 | 'be a subversion repository. Use --source-type if you ' |
|
159 | 159 | 'know better.\n')) |
|
160 | 160 | return True |
|
161 | 161 | data = inst.fp.read() |
|
162 | 162 | except: |
|
163 | 163 | # Could be urllib2.URLError if the URL is invalid or anything else. |
|
164 | 164 | return False |
|
165 | 165 | return '<m:human-readable errcode="160013">' in data |
|
166 | 166 | |
|
167 | 167 | protomap = {'http': httpcheck, |
|
168 | 168 | 'https': httpcheck, |
|
169 | 169 | 'file': filecheck, |
|
170 | 170 | } |
|
171 | 171 | def issvnurl(ui, url): |
|
172 | 172 | try: |
|
173 | 173 | proto, path = url.split('://', 1) |
|
174 | 174 | if proto == 'file': |
|
175 | 175 | path = urllib.url2pathname(path) |
|
176 | 176 | except ValueError: |
|
177 | 177 | proto = 'file' |
|
178 | 178 | path = os.path.abspath(url) |
|
179 | 179 | if proto == 'file': |
|
180 | 180 | path = path.replace(os.sep, '/') |
|
181 | 181 | check = protomap.get(proto, lambda *args: False) |
|
182 | 182 | while '/' in path: |
|
183 | 183 | if check(ui, path, proto): |
|
184 | 184 | return True |
|
185 | 185 | path = path.rsplit('/', 1)[0] |
|
186 | 186 | return False |
|
187 | 187 | |
|
188 | 188 | # SVN conversion code stolen from bzr-svn and tailor |
|
189 | 189 | # |
|
190 | 190 | # Subversion looks like a versioned filesystem, branches structures |
|
191 | 191 | # are defined by conventions and not enforced by the tool. First, |
|
192 | 192 | # we define the potential branches (modules) as "trunk" and "branches" |
|
193 | 193 | # children directories. Revisions are then identified by their |
|
194 | 194 | # module and revision number (and a repository identifier). |
|
195 | 195 | # |
|
196 | 196 | # The revision graph is really a tree (or a forest). By default, a |
|
197 | 197 | # revision parent is the previous revision in the same module. If the |
|
198 | 198 | # module directory is copied/moved from another module then the |
|
199 | 199 | # revision is the module root and its parent the source revision in |
|
200 | 200 | # the parent module. A revision has at most one parent. |
|
201 | 201 | # |
|
202 | 202 | class svn_source(converter_source): |
|
203 | 203 | def __init__(self, ui, url, rev=None): |
|
204 | 204 | super(svn_source, self).__init__(ui, url, rev=rev) |
|
205 | 205 | |
|
206 | 206 | if not (url.startswith('svn://') or url.startswith('svn+ssh://') or |
|
207 | 207 | (os.path.exists(url) and |
|
208 | 208 | os.path.exists(os.path.join(url, '.svn'))) or |
|
209 | 209 | issvnurl(ui, url)): |
|
210 | 210 | raise NoRepo(_("%s does not look like a Subversion repository") |
|
211 | 211 | % url) |
|
212 | 212 | |
|
213 | 213 | try: |
|
214 | 214 | SubversionException |
|
215 | 215 | except NameError: |
|
216 | 216 | raise MissingTool(_('Subversion python bindings could not be loaded')) |
|
217 | 217 | |
|
218 | 218 | try: |
|
219 | 219 | version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR |
|
220 | 220 | if version < (1, 4): |
|
221 | 221 | raise MissingTool(_('Subversion python bindings %d.%d found, ' |
|
222 | 222 | '1.4 or later required') % version) |
|
223 | 223 | except AttributeError: |
|
224 | 224 | raise MissingTool(_('Subversion python bindings are too old, 1.4 ' |
|
225 | 225 | 'or later required')) |
|
226 | 226 | |
|
227 | 227 | self.lastrevs = {} |
|
228 | 228 | |
|
229 | 229 | latest = None |
|
230 | 230 | try: |
|
231 | 231 | # Support file://path@rev syntax. Useful e.g. to convert |
|
232 | 232 | # deleted branches. |
|
233 | 233 | at = url.rfind('@') |
|
234 | 234 | if at >= 0: |
|
235 | 235 | latest = int(url[at + 1:]) |
|
236 | 236 | url = url[:at] |
|
237 | 237 | except ValueError: |
|
238 | 238 | pass |
|
239 | 239 | self.url = geturl(url) |
|
240 | 240 | self.encoding = 'UTF-8' # Subversion is always nominal UTF-8 |
|
241 | 241 | try: |
|
242 | 242 | self.transport = transport.SvnRaTransport(url=self.url) |
|
243 | 243 | self.ra = self.transport.ra |
|
244 | 244 | self.ctx = self.transport.client |
|
245 | 245 | self.baseurl = svn.ra.get_repos_root(self.ra) |
|
246 | 246 | # Module is either empty or a repository path starting with |
|
247 | 247 | # a slash and not ending with a slash. |
|
248 | 248 | self.module = urllib.unquote(self.url[len(self.baseurl):]) |
|
249 | 249 | self.prevmodule = None |
|
250 | 250 | self.rootmodule = self.module |
|
251 | 251 | self.commits = {} |
|
252 | 252 | self.paths = {} |
|
253 | 253 | self.uuid = svn.ra.get_uuid(self.ra) |
|
254 | 254 | except SubversionException: |
|
255 | 255 | ui.traceback() |
|
256 | 256 | raise NoRepo(_("%s does not look like a Subversion repository") |
|
257 | 257 | % self.url) |
|
258 | 258 | |
|
259 | 259 | if rev: |
|
260 | 260 | try: |
|
261 | 261 | latest = int(rev) |
|
262 | 262 | except ValueError: |
|
263 | 263 | raise util.Abort(_('svn: revision %s is not an integer') % rev) |
|
264 | 264 | |
|
265 | 265 | self.startrev = self.ui.config('convert', 'svn.startrev', default=0) |
|
266 | 266 | try: |
|
267 | 267 | self.startrev = int(self.startrev) |
|
268 | 268 | if self.startrev < 0: |
|
269 | 269 | self.startrev = 0 |
|
270 | 270 | except ValueError: |
|
271 | 271 | raise util.Abort(_('svn: start revision %s is not an integer') |
|
272 | 272 | % self.startrev) |
|
273 | 273 | |
|
274 | 274 | self.head = self.latest(self.module, latest) |
|
275 | 275 | if not self.head: |
|
276 | 276 | raise util.Abort(_('no revision found in module %s') |
|
277 | 277 | % self.module) |
|
278 | 278 | self.last_changed = self.revnum(self.head) |
|
279 | 279 | |
|
280 | 280 | self._changescache = None |
|
281 | 281 | |
|
282 | 282 | if os.path.exists(os.path.join(url, '.svn/entries')): |
|
283 | 283 | self.wc = url |
|
284 | 284 | else: |
|
285 | 285 | self.wc = None |
|
286 | 286 | self.convertfp = None |
|
287 | 287 | |
|
288 | 288 | def setrevmap(self, revmap): |
|
289 | 289 | lastrevs = {} |
|
290 | 290 | for revid in revmap.iterkeys(): |
|
291 | 291 | uuid, module, revnum = self.revsplit(revid) |
|
292 | 292 | lastrevnum = lastrevs.setdefault(module, revnum) |
|
293 | 293 | if revnum > lastrevnum: |
|
294 | 294 | lastrevs[module] = revnum |
|
295 | 295 | self.lastrevs = lastrevs |
|
296 | 296 | |
|
297 | 297 | def exists(self, path, optrev): |
|
298 | 298 | try: |
|
299 | 299 | svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path), |
|
300 | 300 | optrev, False, self.ctx) |
|
301 | 301 | return True |
|
302 | 302 | except SubversionException: |
|
303 | 303 | return False |
|
304 | 304 | |
|
305 | 305 | def getheads(self): |
|
306 | 306 | |
|
307 | 307 | def isdir(path, revnum): |
|
308 | 308 | kind = self._checkpath(path, revnum) |
|
309 | 309 | return kind == svn.core.svn_node_dir |
|
310 | 310 | |
|
311 | 311 | def getcfgpath(name, rev): |
|
312 | 312 | cfgpath = self.ui.config('convert', 'svn.' + name) |
|
313 | 313 | if cfgpath is not None and cfgpath.strip() == '': |
|
314 | 314 | return None |
|
315 | 315 | path = (cfgpath or name).strip('/') |
|
316 | 316 | if not self.exists(path, rev): |
|
317 | 317 | if cfgpath: |
|
318 | 318 | raise util.Abort(_('expected %s to be at %r, but not found') |
|
319 | 319 | % (name, path)) |
|
320 | 320 | return None |
|
321 | 321 | self.ui.note(_('found %s at %r\n') % (name, path)) |
|
322 | 322 | return path |
|
323 | 323 | |
|
324 | 324 | rev = optrev(self.last_changed) |
|
325 | 325 | oldmodule = '' |
|
326 | 326 | trunk = getcfgpath('trunk', rev) |
|
327 | 327 | self.tags = getcfgpath('tags', rev) |
|
328 | 328 | branches = getcfgpath('branches', rev) |
|
329 | 329 | |
|
330 | 330 | # If the project has a trunk or branches, we will extract heads |
|
331 | 331 | # from them. We keep the project root otherwise. |
|
332 | 332 | if trunk: |
|
333 | 333 | oldmodule = self.module or '' |
|
334 | 334 | self.module += '/' + trunk |
|
335 | 335 | self.head = self.latest(self.module, self.last_changed) |
|
336 | 336 | if not self.head: |
|
337 | 337 | raise util.Abort(_('no revision found in module %s') |
|
338 | 338 | % self.module) |
|
339 | 339 | |
|
340 | 340 | # First head in the list is the module's head |
|
341 | 341 | self.heads = [self.head] |
|
342 | 342 | if self.tags is not None: |
|
343 | 343 | self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags')) |
|
344 | 344 | |
|
345 | 345 | # Check if branches bring a few more heads to the list |
|
346 | 346 | if branches: |
|
347 | 347 | rpath = self.url.strip('/') |
|
348 | 348 | branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches), |
|
349 | 349 | rev, False, self.ctx) |
|
350 | 350 | for branch in branchnames.keys(): |
|
351 | 351 | module = '%s/%s/%s' % (oldmodule, branches, branch) |
|
352 | 352 | if not isdir(module, self.last_changed): |
|
353 | 353 | continue |
|
354 | 354 | brevid = self.latest(module, self.last_changed) |
|
355 | 355 | if not brevid: |
|
356 | 356 | self.ui.note(_('ignoring empty branch %s\n') % branch) |
|
357 | 357 | continue |
|
358 | 358 | self.ui.note(_('found branch %s at %d\n') % |
|
359 | 359 | (branch, self.revnum(brevid))) |
|
360 | 360 | self.heads.append(brevid) |
|
361 | 361 | |
|
362 | 362 | if self.startrev and self.heads: |
|
363 | 363 | if len(self.heads) > 1: |
|
364 | 364 | raise util.Abort(_('svn: start revision is not supported ' |
|
365 | 365 | 'with more than one branch')) |
|
366 | 366 | revnum = self.revnum(self.heads[0]) |
|
367 | 367 | if revnum < self.startrev: |
|
368 | 368 | raise util.Abort( |
|
369 | 369 | _('svn: no revision found after start revision %d') |
|
370 | 370 | % self.startrev) |
|
371 | 371 | |
|
372 | 372 | return self.heads |
|
373 | 373 | |
|
374 | 374 | def getchanges(self, rev): |
|
375 | 375 | if self._changescache and self._changescache[0] == rev: |
|
376 | 376 | return self._changescache[1] |
|
377 | 377 | self._changescache = None |
|
378 | 378 | (paths, parents) = self.paths[rev] |
|
379 | 379 | if parents: |
|
380 | 380 | files, self.removed, copies = self.expandpaths(rev, paths, parents) |
|
381 | 381 | else: |
|
382 | 382 | # Perform a full checkout on roots |
|
383 | 383 | uuid, module, revnum = self.revsplit(rev) |
|
384 | 384 | entries = svn.client.ls(self.baseurl + urllib.quote(module), |
|
385 | 385 | optrev(revnum), True, self.ctx) |
|
386 | 386 | files = [n for n, e in entries.iteritems() |
|
387 | 387 | if e.kind == svn.core.svn_node_file] |
|
388 | 388 | copies = {} |
|
389 | 389 | self.removed = set() |
|
390 | 390 | |
|
391 | 391 | files.sort() |
|
392 | 392 | files = zip(files, [rev] * len(files)) |
|
393 | 393 | |
|
394 | 394 | # caller caches the result, so free it here to release memory |
|
395 | 395 | del self.paths[rev] |
|
396 | 396 | return (files, copies) |
|
397 | 397 | |
|
398 | 398 | def getchangedfiles(self, rev, i): |
|
399 | 399 | changes = self.getchanges(rev) |
|
400 | 400 | self._changescache = (rev, changes) |
|
401 | 401 | return [f[0] for f in changes[0]] |
|
402 | 402 | |
|
403 | 403 | def getcommit(self, rev): |
|
404 | 404 | if rev not in self.commits: |
|
405 | 405 | uuid, module, revnum = self.revsplit(rev) |
|
406 | 406 | self.module = module |
|
407 | 407 | self.reparent(module) |
|
408 | 408 | # We assume that: |
|
409 | 409 | # - requests for revisions after "stop" come from the |
|
410 | 410 | # revision graph backward traversal. Cache all of them |
|
411 | 411 | # down to stop, they will be used eventually. |
|
412 | 412 | # - requests for revisions before "stop" come to get |
|
413 | 413 | # isolated branches parents. Just fetch what is needed. |
|
414 | 414 | stop = self.lastrevs.get(module, 0) |
|
415 | 415 | if revnum < stop: |
|
416 | 416 | stop = revnum + 1 |
|
417 | 417 | self._fetch_revisions(revnum, stop) |
|
418 | 418 | commit = self.commits[rev] |
|
419 | 419 | # caller caches the result, so free it here to release memory |
|
420 | 420 | del self.commits[rev] |
|
421 | 421 | return commit |
|
422 | 422 | |
|
423 | 423 | def gettags(self): |
|
424 | 424 | tags = {} |
|
425 | 425 | if self.tags is None: |
|
426 | 426 | return tags |
|
427 | 427 | |
|
428 | 428 | # svn tags are just a convention, project branches left in a |
|
429 | 429 | # 'tags' directory. There is no other relationship than |
|
430 | 430 | # ancestry, which is expensive to discover and makes them hard |
|
431 | 431 | # to update incrementally. Worse, past revisions may be |
|
432 | 432 | # referenced by tags far away in the future, requiring a deep |
|
433 | 433 | # history traversal on every calculation. Current code |
|
434 | 434 | # performs a single backward traversal, tracking moves within |
|
435 | 435 | # the tags directory (tag renaming) and recording a new tag |
|
436 | 436 | # everytime a project is copied from outside the tags |
|
437 | 437 | # directory. It also lists deleted tags, this behaviour may |
|
438 | 438 | # change in the future. |
|
439 | 439 | pendings = [] |
|
440 | 440 | tagspath = self.tags |
|
441 | 441 | start = svn.ra.get_latest_revnum(self.ra) |
|
442 | 442 | stream = self._getlog([self.tags], start, self.startrev) |
|
443 | 443 | try: |
|
444 | 444 | for entry in stream: |
|
445 | 445 | origpaths, revnum, author, date, message = entry |
|
446 | 446 | copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e |
|
447 | 447 | in origpaths.iteritems() if e.copyfrom_path] |
|
448 | 448 | # Apply moves/copies from more specific to general |
|
449 | 449 | copies.sort(reverse=True) |
|
450 | 450 | |
|
451 | 451 | srctagspath = tagspath |
|
452 | 452 | if copies and copies[-1][2] == tagspath: |
|
453 | 453 | # Track tags directory moves |
|
454 | 454 | srctagspath = copies.pop()[0] |
|
455 | 455 | |
|
456 | 456 | for source, sourcerev, dest in copies: |
|
457 | 457 | if not dest.startswith(tagspath + '/'): |
|
458 | 458 | continue |
|
459 | 459 | for tag in pendings: |
|
460 | 460 | if tag[0].startswith(dest): |
|
461 | 461 | tagpath = source + tag[0][len(dest):] |
|
462 | 462 | tag[:2] = [tagpath, sourcerev] |
|
463 | 463 | break |
|
464 | 464 | else: |
|
465 | 465 | pendings.append([source, sourcerev, dest]) |
|
466 | 466 | |
|
467 | 467 | # Filter out tags with children coming from different |
|
468 | 468 | # parts of the repository like: |
|
469 | 469 | # /tags/tag.1 (from /trunk:10) |
|
470 | 470 | # /tags/tag.1/foo (from /branches/foo:12) |
|
471 | 471 | # Here/tags/tag.1 discarded as well as its children. |
|
472 | 472 | # It happens with tools like cvs2svn. Such tags cannot |
|
473 | 473 | # be represented in mercurial. |
|
474 | 474 | addeds = dict((p, e.copyfrom_path) for p, e |
|
475 | 475 | in origpaths.iteritems() |
|
476 | 476 | if e.action == 'A' and e.copyfrom_path) |
|
477 | 477 | badroots = set() |
|
478 | 478 | for destroot in addeds: |
|
479 | 479 | for source, sourcerev, dest in pendings: |
|
480 | 480 | if (not dest.startswith(destroot + '/') |
|
481 | 481 | or source.startswith(addeds[destroot] + '/')): |
|
482 | 482 | continue |
|
483 | 483 | badroots.add(destroot) |
|
484 | 484 | break |
|
485 | 485 | |
|
486 | 486 | for badroot in badroots: |
|
487 | 487 | pendings = [p for p in pendings if p[2] != badroot |
|
488 | 488 | and not p[2].startswith(badroot + '/')] |
|
489 | 489 | |
|
490 | 490 | # Tell tag renamings from tag creations |
|
491 | 491 | remainings = [] |
|
492 | 492 | for source, sourcerev, dest in pendings: |
|
493 | 493 | tagname = dest.split('/')[-1] |
|
494 | 494 | if source.startswith(srctagspath): |
|
495 | 495 | remainings.append([source, sourcerev, tagname]) |
|
496 | 496 | continue |
|
497 | 497 | if tagname in tags: |
|
498 | 498 | # Keep the latest tag value |
|
499 | 499 | continue |
|
500 | 500 | # From revision may be fake, get one with changes |
|
501 | 501 | try: |
|
502 | 502 | tagid = self.latest(source, sourcerev) |
|
503 | 503 | if tagid and tagname not in tags: |
|
504 | 504 | tags[tagname] = tagid |
|
505 | 505 | except SvnPathNotFound: |
|
506 | 506 | # It happens when we are following directories |
|
507 | 507 | # we assumed were copied with their parents |
|
508 | 508 | # but were really created in the tag |
|
509 | 509 | # directory. |
|
510 | 510 | pass |
|
511 | 511 | pendings = remainings |
|
512 | 512 | tagspath = srctagspath |
|
513 | 513 | finally: |
|
514 | 514 | stream.close() |
|
515 | 515 | return tags |
|
516 | 516 | |
|
517 | 517 | def converted(self, rev, destrev): |
|
518 | 518 | if not self.wc: |
|
519 | 519 | return |
|
520 | 520 | if self.convertfp is None: |
|
521 | 521 | self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'), |
|
522 | 522 | 'a') |
|
523 | 523 | self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev))) |
|
524 | 524 | self.convertfp.flush() |
|
525 | 525 | |
|
526 | 526 | def revid(self, revnum, module=None): |
|
527 | 527 | return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum) |
|
528 | 528 | |
|
529 | 529 | def revnum(self, rev): |
|
530 | 530 | return int(rev.split('@')[-1]) |
|
531 | 531 | |
|
532 | 532 | def revsplit(self, rev): |
|
533 | 533 | url, revnum = rev.rsplit('@', 1) |
|
534 | 534 | revnum = int(revnum) |
|
535 | 535 | parts = url.split('/', 1) |
|
536 | 536 | uuid = parts.pop(0)[4:] |
|
537 | 537 | mod = '' |
|
538 | 538 | if parts: |
|
539 | 539 | mod = '/' + parts[0] |
|
540 | 540 | return uuid, mod, revnum |
|
541 | 541 | |
|
542 | 542 | def latest(self, path, stop=0): |
|
543 | 543 | """Find the latest revid affecting path, up to stop. It may return |
|
544 | 544 | a revision in a different module, since a branch may be moved without |
|
545 | 545 | a change being reported. Return None if computed module does not |
|
546 | 546 | belong to rootmodule subtree. |
|
547 | 547 | """ |
|
548 | 548 | if not path.startswith(self.rootmodule): |
|
549 | 549 | # Requests on foreign branches may be forbidden at server level |
|
550 | 550 | self.ui.debug('ignoring foreign branch %r\n' % path) |
|
551 | 551 | return None |
|
552 | 552 | |
|
553 | 553 | if not stop: |
|
554 | 554 | stop = svn.ra.get_latest_revnum(self.ra) |
|
555 | 555 | try: |
|
556 | 556 | prevmodule = self.reparent('') |
|
557 | 557 | dirent = svn.ra.stat(self.ra, path.strip('/'), stop) |
|
558 | 558 | self.reparent(prevmodule) |
|
559 | 559 | except SubversionException: |
|
560 | 560 | dirent = None |
|
561 | 561 | if not dirent: |
|
562 | 562 | raise SvnPathNotFound(_('%s not found up to revision %d') |
|
563 | 563 | % (path, stop)) |
|
564 | 564 | |
|
565 | 565 | # stat() gives us the previous revision on this line of |
|
566 | 566 | # development, but it might be in *another module*. Fetch the |
|
567 | 567 | # log and detect renames down to the latest revision. |
|
568 | 568 | stream = self._getlog([path], stop, dirent.created_rev) |
|
569 | 569 | try: |
|
570 | 570 | for entry in stream: |
|
571 | 571 | paths, revnum, author, date, message = entry |
|
572 | 572 | if revnum <= dirent.created_rev: |
|
573 | 573 | break |
|
574 | 574 | |
|
575 | 575 | for p in paths: |
|
576 | 576 | if not path.startswith(p) or not paths[p].copyfrom_path: |
|
577 | 577 | continue |
|
578 | 578 | newpath = paths[p].copyfrom_path + path[len(p):] |
|
579 | 579 | self.ui.debug("branch renamed from %s to %s at %d\n" % |
|
580 | 580 | (path, newpath, revnum)) |
|
581 | 581 | path = newpath |
|
582 | 582 | break |
|
583 | 583 | finally: |
|
584 | 584 | stream.close() |
|
585 | 585 | |
|
586 | 586 | if not path.startswith(self.rootmodule): |
|
587 | 587 | self.ui.debug('ignoring foreign branch %r\n' % path) |
|
588 | 588 | return None |
|
589 | 589 | return self.revid(dirent.created_rev, path) |
|
590 | 590 | |
|
591 | 591 | def reparent(self, module): |
|
592 | 592 | """Reparent the svn transport and return the previous parent.""" |
|
593 | 593 | if self.prevmodule == module: |
|
594 | 594 | return module |
|
595 | 595 | svnurl = self.baseurl + urllib.quote(module) |
|
596 | 596 | prevmodule = self.prevmodule |
|
597 | 597 | if prevmodule is None: |
|
598 | 598 | prevmodule = '' |
|
599 | 599 | self.ui.debug("reparent to %s\n" % svnurl) |
|
600 | 600 | svn.ra.reparent(self.ra, svnurl) |
|
601 | 601 | self.prevmodule = module |
|
602 | 602 | return prevmodule |
|
603 | 603 | |
|
604 | 604 | def expandpaths(self, rev, paths, parents): |
|
605 | 605 | changed, removed = set(), set() |
|
606 | 606 | copies = {} |
|
607 | 607 | |
|
608 | 608 | new_module, revnum = self.revsplit(rev)[1:] |
|
609 | 609 | if new_module != self.module: |
|
610 | 610 | self.module = new_module |
|
611 | 611 | self.reparent(self.module) |
|
612 | 612 | |
|
613 | 613 | for i, (path, ent) in enumerate(paths): |
|
614 | 614 | self.ui.progress(_('scanning paths'), i, item=path, |
|
615 | 615 | total=len(paths)) |
|
616 | 616 | entrypath = self.getrelpath(path) |
|
617 | 617 | |
|
618 | 618 | kind = self._checkpath(entrypath, revnum) |
|
619 | 619 | if kind == svn.core.svn_node_file: |
|
620 | 620 | changed.add(self.recode(entrypath)) |
|
621 | 621 | if not ent.copyfrom_path or not parents: |
|
622 | 622 | continue |
|
623 | 623 | # Copy sources not in parent revisions cannot be |
|
624 | 624 | # represented, ignore their origin for now |
|
625 | 625 | pmodule, prevnum = self.revsplit(parents[0])[1:] |
|
626 | 626 | if ent.copyfrom_rev < prevnum: |
|
627 | 627 | continue |
|
628 | 628 | copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule) |
|
629 | 629 | if not copyfrom_path: |
|
630 | 630 | continue |
|
631 | 631 | self.ui.debug("copied to %s from %s@%s\n" % |
|
632 | 632 | (entrypath, copyfrom_path, ent.copyfrom_rev)) |
|
633 | 633 | copies[self.recode(entrypath)] = self.recode(copyfrom_path) |
|
634 | 634 | elif kind == 0: # gone, but had better be a deleted *file* |
|
635 | 635 | self.ui.debug("gone from %s\n" % ent.copyfrom_rev) |
|
636 | 636 | pmodule, prevnum = self.revsplit(parents[0])[1:] |
|
637 | 637 | parentpath = pmodule + "/" + entrypath |
|
638 | 638 | fromkind = self._checkpath(entrypath, prevnum, pmodule) |
|
639 | 639 | |
|
640 | 640 | if fromkind == svn.core.svn_node_file: |
|
641 | 641 | removed.add(self.recode(entrypath)) |
|
642 | 642 | elif fromkind == svn.core.svn_node_dir: |
|
643 | 643 | oroot = parentpath.strip('/') |
|
644 | 644 | nroot = path.strip('/') |
|
645 | 645 | children = self._iterfiles(oroot, prevnum) |
|
646 | 646 | for childpath in children: |
|
647 | 647 | childpath = childpath.replace(oroot, nroot) |
|
648 | 648 | childpath = self.getrelpath("/" + childpath, pmodule) |
|
649 | 649 | if childpath: |
|
650 | 650 | removed.add(self.recode(childpath)) |
|
651 | 651 | else: |
|
652 | 652 | self.ui.debug('unknown path in revision %d: %s\n' % \ |
|
653 | 653 | (revnum, path)) |
|
654 |
elif kind == svn.core.svn_node_dir: |
|
|
654 | elif kind == svn.core.svn_node_dir: | |
|
655 | 655 | if ent.action == 'M': |
|
656 | 656 | # If the directory just had a prop change, |
|
657 | 657 | # then we shouldn't need to look for its children. |
|
658 | 658 | continue |
|
659 | 659 | elif ent.action == 'R' and parents: |
|
660 | 660 | # If a directory is replacing a file, mark the previous |
|
661 | 661 | # file as deleted |
|
662 | 662 | pmodule, prevnum = self.revsplit(parents[0])[1:] |
|
663 | 663 | pkind = self._checkpath(entrypath, prevnum, pmodule) |
|
664 | 664 | if pkind == svn.core.svn_node_file: |
|
665 | 665 | removed.add(self.recode(entrypath)) |
|
666 | 666 | |
|
667 | 667 | for childpath in self._iterfiles(path, revnum): |
|
668 | 668 | childpath = self.getrelpath("/" + childpath) |
|
669 | 669 | if childpath: |
|
670 | 670 | changed.add(self.recode(childpath)) |
|
671 | 671 | |
|
672 | 672 | # Handle directory copies |
|
673 | 673 | if not ent.copyfrom_path or not parents: |
|
674 | 674 | continue |
|
675 | 675 | # Copy sources not in parent revisions cannot be |
|
676 | 676 | # represented, ignore their origin for now |
|
677 | 677 | pmodule, prevnum = self.revsplit(parents[0])[1:] |
|
678 | 678 | if ent.copyfrom_rev < prevnum: |
|
679 | 679 | continue |
|
680 | 680 | copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule) |
|
681 | 681 | if not copyfrompath: |
|
682 | 682 | continue |
|
683 | 683 | self.ui.debug("mark %s came from %s:%d\n" |
|
684 | 684 | % (path, copyfrompath, ent.copyfrom_rev)) |
|
685 | 685 | children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev) |
|
686 | 686 | for childpath in children: |
|
687 | 687 | childpath = self.getrelpath("/" + childpath, pmodule) |
|
688 | 688 | if not childpath: |
|
689 | 689 | continue |
|
690 | 690 | copytopath = path + childpath[len(copyfrompath):] |
|
691 | 691 | copytopath = self.getrelpath(copytopath) |
|
692 | 692 | copies[self.recode(copytopath)] = self.recode(childpath) |
|
693 | 693 | |
|
694 | 694 | self.ui.progress(_('scanning paths'), None) |
|
695 | 695 | changed.update(removed) |
|
696 | 696 | return (list(changed), removed, copies) |
|
697 | 697 | |
|
698 | 698 | def _fetch_revisions(self, from_revnum, to_revnum): |
|
699 | 699 | if from_revnum < to_revnum: |
|
700 | 700 | from_revnum, to_revnum = to_revnum, from_revnum |
|
701 | 701 | |
|
702 | 702 | self.child_cset = None |
|
703 | 703 | |
|
704 | 704 | def parselogentry(orig_paths, revnum, author, date, message): |
|
705 | 705 | """Return the parsed commit object or None, and True if |
|
706 | 706 | the revision is a branch root. |
|
707 | 707 | """ |
|
708 | 708 | self.ui.debug("parsing revision %d (%d changes)\n" % |
|
709 | 709 | (revnum, len(orig_paths))) |
|
710 | 710 | |
|
711 | 711 | branched = False |
|
712 | 712 | rev = self.revid(revnum) |
|
713 | 713 | # branch log might return entries for a parent we already have |
|
714 | 714 | |
|
715 | 715 | if rev in self.commits or revnum < to_revnum: |
|
716 | 716 | return None, branched |
|
717 | 717 | |
|
718 | 718 | parents = [] |
|
719 | 719 | # check whether this revision is the start of a branch or part |
|
720 | 720 | # of a branch renaming |
|
721 | 721 | orig_paths = sorted(orig_paths.iteritems()) |
|
722 | 722 | root_paths = [(p, e) for p, e in orig_paths |
|
723 | 723 | if self.module.startswith(p)] |
|
724 | 724 | if root_paths: |
|
725 | 725 | path, ent = root_paths[-1] |
|
726 | 726 | if ent.copyfrom_path: |
|
727 | 727 | branched = True |
|
728 | 728 | newpath = ent.copyfrom_path + self.module[len(path):] |
|
729 | 729 | # ent.copyfrom_rev may not be the actual last revision |
|
730 | 730 | previd = self.latest(newpath, ent.copyfrom_rev) |
|
731 | 731 | if previd is not None: |
|
732 | 732 | prevmodule, prevnum = self.revsplit(previd)[1:] |
|
733 | 733 | if prevnum >= self.startrev: |
|
734 | 734 | parents = [previd] |
|
735 | 735 | self.ui.note( |
|
736 | 736 | _('found parent of branch %s at %d: %s\n') % |
|
737 | 737 | (self.module, prevnum, prevmodule)) |
|
738 | 738 | else: |
|
739 | 739 | self.ui.debug("no copyfrom path, don't know what to do.\n") |
|
740 | 740 | |
|
741 | 741 | paths = [] |
|
742 | 742 | # filter out unrelated paths |
|
743 | 743 | for path, ent in orig_paths: |
|
744 | 744 | if self.getrelpath(path) is None: |
|
745 | 745 | continue |
|
746 | 746 | paths.append((path, ent)) |
|
747 | 747 | |
|
748 | 748 | # Example SVN datetime. Includes microseconds. |
|
749 | 749 | # ISO-8601 conformant |
|
750 | 750 | # '2007-01-04T17:35:00.902377Z' |
|
751 | 751 | date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"]) |
|
752 | 752 | |
|
753 | 753 | log = message and self.recode(message) or '' |
|
754 | 754 | author = author and self.recode(author) or '' |
|
755 | 755 | try: |
|
756 | 756 | branch = self.module.split("/")[-1] |
|
757 | 757 | if branch == 'trunk': |
|
758 | 758 | branch = '' |
|
759 | 759 | except IndexError: |
|
760 | 760 | branch = None |
|
761 | 761 | |
|
762 | 762 | cset = commit(author=author, |
|
763 | 763 | date=util.datestr(date), |
|
764 | 764 | desc=log, |
|
765 | 765 | parents=parents, |
|
766 | 766 | branch=branch, |
|
767 | 767 | rev=rev) |
|
768 | 768 | |
|
769 | 769 | self.commits[rev] = cset |
|
770 | 770 | # The parents list is *shared* among self.paths and the |
|
771 | 771 | # commit object. Both will be updated below. |
|
772 | 772 | self.paths[rev] = (paths, cset.parents) |
|
773 | 773 | if self.child_cset and not self.child_cset.parents: |
|
774 | 774 | self.child_cset.parents[:] = [rev] |
|
775 | 775 | self.child_cset = cset |
|
776 | 776 | return cset, branched |
|
777 | 777 | |
|
778 | 778 | self.ui.note(_('fetching revision log for "%s" from %d to %d\n') % |
|
779 | 779 | (self.module, from_revnum, to_revnum)) |
|
780 | 780 | |
|
781 | 781 | try: |
|
782 | 782 | firstcset = None |
|
783 | 783 | lastonbranch = False |
|
784 | 784 | stream = self._getlog([self.module], from_revnum, to_revnum) |
|
785 | 785 | try: |
|
786 | 786 | for entry in stream: |
|
787 | 787 | paths, revnum, author, date, message = entry |
|
788 | 788 | if revnum < self.startrev: |
|
789 | 789 | lastonbranch = True |
|
790 | 790 | break |
|
791 | 791 | if not paths: |
|
792 | 792 | self.ui.debug('revision %d has no entries\n' % revnum) |
|
793 | 793 | # If we ever leave the loop on an empty |
|
794 | 794 | # revision, do not try to get a parent branch |
|
795 | 795 | lastonbranch = lastonbranch or revnum == 0 |
|
796 | 796 | continue |
|
797 | 797 | cset, lastonbranch = parselogentry(paths, revnum, author, |
|
798 | 798 | date, message) |
|
799 | 799 | if cset: |
|
800 | 800 | firstcset = cset |
|
801 | 801 | if lastonbranch: |
|
802 | 802 | break |
|
803 | 803 | finally: |
|
804 | 804 | stream.close() |
|
805 | 805 | |
|
806 | 806 | if not lastonbranch and firstcset and not firstcset.parents: |
|
807 | 807 | # The first revision of the sequence (the last fetched one) |
|
808 | 808 | # has invalid parents if not a branch root. Find the parent |
|
809 | 809 | # revision now, if any. |
|
810 | 810 | try: |
|
811 | 811 | firstrevnum = self.revnum(firstcset.rev) |
|
812 | 812 | if firstrevnum > 1: |
|
813 | 813 | latest = self.latest(self.module, firstrevnum - 1) |
|
814 | 814 | if latest: |
|
815 | 815 | firstcset.parents.append(latest) |
|
816 | 816 | except SvnPathNotFound: |
|
817 | 817 | pass |
|
818 | 818 | except SubversionException, (inst, num): |
|
819 | 819 | if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION: |
|
820 | 820 | raise util.Abort(_('svn: branch has no revision %s') % to_revnum) |
|
821 | 821 | raise |
|
822 | 822 | |
|
823 | 823 | def getfile(self, file, rev): |
|
824 | 824 | # TODO: ra.get_file transmits the whole file instead of diffs. |
|
825 | 825 | if file in self.removed: |
|
826 |
raise IOError() |
|
|
826 | raise IOError() | |
|
827 | 827 | mode = '' |
|
828 | 828 | try: |
|
829 | 829 | new_module, revnum = self.revsplit(rev)[1:] |
|
830 | 830 | if self.module != new_module: |
|
831 | 831 | self.module = new_module |
|
832 | 832 | self.reparent(self.module) |
|
833 | 833 | io = StringIO() |
|
834 | 834 | info = svn.ra.get_file(self.ra, file, revnum, io) |
|
835 | 835 | data = io.getvalue() |
|
836 | 836 | # ra.get_files() seems to keep a reference on the input buffer |
|
837 | 837 | # preventing collection. Release it explicitely. |
|
838 | 838 | io.close() |
|
839 | 839 | if isinstance(info, list): |
|
840 | 840 | info = info[-1] |
|
841 | 841 | mode = ("svn:executable" in info) and 'x' or '' |
|
842 | 842 | mode = ("svn:special" in info) and 'l' or mode |
|
843 | 843 | except SubversionException, e: |
|
844 | 844 | notfound = (svn.core.SVN_ERR_FS_NOT_FOUND, |
|
845 | 845 | svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND) |
|
846 | 846 | if e.apr_err in notfound: # File not found |
|
847 | 847 | raise IOError() |
|
848 | 848 | raise |
|
849 | 849 | if mode == 'l': |
|
850 | 850 | link_prefix = "link " |
|
851 | 851 | if data.startswith(link_prefix): |
|
852 | 852 | data = data[len(link_prefix):] |
|
853 | 853 | return data, mode |
|
854 | 854 | |
|
855 | 855 | def _iterfiles(self, path, revnum): |
|
856 | 856 | """Enumerate all files in path at revnum, recursively.""" |
|
857 | 857 | path = path.strip('/') |
|
858 | 858 | pool = Pool() |
|
859 | 859 | rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/') |
|
860 | 860 | entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool) |
|
861 | 861 | return ((path + '/' + p) for p, e in entries.iteritems() |
|
862 | 862 | if e.kind == svn.core.svn_node_file) |
|
863 | 863 | |
|
864 | 864 | def getrelpath(self, path, module=None): |
|
865 | 865 | if module is None: |
|
866 | 866 | module = self.module |
|
867 | 867 | # Given the repository url of this wc, say |
|
868 | 868 | # "http://server/plone/CMFPlone/branches/Plone-2_0-branch" |
|
869 | 869 | # extract the "entry" portion (a relative path) from what |
|
870 | 870 | # svn log --xml says, ie |
|
871 | 871 | # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py" |
|
872 | 872 | # that is to say "tests/PloneTestCase.py" |
|
873 | 873 | if path.startswith(module): |
|
874 | 874 | relative = path.rstrip('/')[len(module):] |
|
875 | 875 | if relative.startswith('/'): |
|
876 | 876 | return relative[1:] |
|
877 | 877 | elif relative == '': |
|
878 | 878 | return relative |
|
879 | 879 | |
|
880 | 880 | # The path is outside our tracked tree... |
|
881 | 881 | self.ui.debug('%r is not under %r, ignoring\n' % (path, module)) |
|
882 | 882 | return None |
|
883 | 883 | |
|
884 | 884 | def _checkpath(self, path, revnum, module=None): |
|
885 | 885 | if module is not None: |
|
886 | 886 | prevmodule = self.reparent('') |
|
887 | 887 | path = module + '/' + path |
|
888 | 888 | try: |
|
889 | 889 | # ra.check_path does not like leading slashes very much, it leads |
|
890 | 890 | # to PROPFIND subversion errors |
|
891 | 891 | return svn.ra.check_path(self.ra, path.strip('/'), revnum) |
|
892 | 892 | finally: |
|
893 | 893 | if module is not None: |
|
894 | 894 | self.reparent(prevmodule) |
|
895 | ||
|
895 | ||
|
896 | 896 | def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True, |
|
897 | 897 | strict_node_history=False): |
|
898 | 898 | # Normalize path names, svn >= 1.5 only wants paths relative to |
|
899 | 899 | # supplied URL |
|
900 | 900 | relpaths = [] |
|
901 | 901 | for p in paths: |
|
902 | 902 | if not p.startswith('/'): |
|
903 | 903 | p = self.module + '/' + p |
|
904 | 904 | relpaths.append(p.strip('/')) |
|
905 | 905 | args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths, |
|
906 | 906 | strict_node_history] |
|
907 | 907 | arg = encodeargs(args) |
|
908 | 908 | hgexe = util.hgexecutable() |
|
909 | 909 | cmd = '%s debugsvnlog' % util.shellquote(hgexe) |
|
910 | 910 | stdin, stdout = util.popen2(cmd) |
|
911 | 911 | stdin.write(arg) |
|
912 | 912 | try: |
|
913 | 913 | stdin.close() |
|
914 | 914 | except IOError: |
|
915 | 915 | raise util.Abort(_('Mercurial failed to run itself, check' |
|
916 | 916 | ' hg executable is in PATH')) |
|
917 | 917 | return logstream(stdout) |
|
918 | 918 | |
|
919 | 919 | pre_revprop_change = '''#!/bin/sh |
|
920 | 920 | |
|
921 | 921 | REPOS="$1" |
|
922 | 922 | REV="$2" |
|
923 | 923 | USER="$3" |
|
924 | 924 | PROPNAME="$4" |
|
925 | 925 | ACTION="$5" |
|
926 | 926 | |
|
927 | 927 | if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi |
|
928 | 928 | if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi |
|
929 | 929 | if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi |
|
930 | 930 | |
|
931 | 931 | echo "Changing prohibited revision property" >&2 |
|
932 | 932 | exit 1 |
|
933 | 933 | ''' |
|
934 | 934 | |
|
935 | 935 | class svn_sink(converter_sink, commandline): |
|
936 | 936 | commit_re = re.compile(r'Committed revision (\d+).', re.M) |
|
937 | 937 | |
|
938 | 938 | def prerun(self): |
|
939 | 939 | if self.wc: |
|
940 | 940 | os.chdir(self.wc) |
|
941 | 941 | |
|
942 | 942 | def postrun(self): |
|
943 | 943 | if self.wc: |
|
944 | 944 | os.chdir(self.cwd) |
|
945 | 945 | |
|
946 | 946 | def join(self, name): |
|
947 | 947 | return os.path.join(self.wc, '.svn', name) |
|
948 | 948 | |
|
949 | 949 | def revmapfile(self): |
|
950 | 950 | return self.join('hg-shamap') |
|
951 | 951 | |
|
952 | 952 | def authorfile(self): |
|
953 | 953 | return self.join('hg-authormap') |
|
954 | 954 | |
|
955 | 955 | def __init__(self, ui, path): |
|
956 | 956 | converter_sink.__init__(self, ui, path) |
|
957 | 957 | commandline.__init__(self, ui, 'svn') |
|
958 | 958 | self.delete = [] |
|
959 | 959 | self.setexec = [] |
|
960 | 960 | self.delexec = [] |
|
961 | 961 | self.copies = [] |
|
962 | 962 | self.wc = None |
|
963 | 963 | self.cwd = os.getcwd() |
|
964 | 964 | |
|
965 | 965 | path = os.path.realpath(path) |
|
966 | 966 | |
|
967 | 967 | created = False |
|
968 | 968 | if os.path.isfile(os.path.join(path, '.svn', 'entries')): |
|
969 | 969 | self.wc = path |
|
970 | 970 | self.run0('update') |
|
971 | 971 | else: |
|
972 | 972 | wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc') |
|
973 | 973 | |
|
974 | 974 | if os.path.isdir(os.path.dirname(path)): |
|
975 | 975 | if not os.path.exists(os.path.join(path, 'db', 'fs-type')): |
|
976 | 976 | ui.status(_('initializing svn repository %r\n') % |
|
977 | 977 | os.path.basename(path)) |
|
978 | 978 | commandline(ui, 'svnadmin').run0('create', path) |
|
979 | 979 | created = path |
|
980 | 980 | path = util.normpath(path) |
|
981 | 981 | if not path.startswith('/'): |
|
982 | 982 | path = '/' + path |
|
983 | 983 | path = 'file://' + path |
|
984 | 984 | |
|
985 | 985 | ui.status(_('initializing svn working copy %r\n') |
|
986 | 986 | % os.path.basename(wcpath)) |
|
987 | 987 | self.run0('checkout', path, wcpath) |
|
988 | 988 | |
|
989 | 989 | self.wc = wcpath |
|
990 | 990 | self.opener = util.opener(self.wc) |
|
991 | 991 | self.wopener = util.opener(self.wc) |
|
992 | 992 | self.childmap = mapfile(ui, self.join('hg-childmap')) |
|
993 | 993 | self.is_exec = util.checkexec(self.wc) and util.is_exec or None |
|
994 | 994 | |
|
995 | 995 | if created: |
|
996 | 996 | hook = os.path.join(created, 'hooks', 'pre-revprop-change') |
|
997 | 997 | fp = open(hook, 'w') |
|
998 | 998 | fp.write(pre_revprop_change) |
|
999 | 999 | fp.close() |
|
1000 | 1000 | util.set_flags(hook, False, True) |
|
1001 | 1001 | |
|
1002 | 1002 | xport = transport.SvnRaTransport(url=geturl(path)) |
|
1003 | 1003 | self.uuid = svn.ra.get_uuid(xport.ra) |
|
1004 | 1004 | |
|
1005 | 1005 | def wjoin(self, *names): |
|
1006 | 1006 | return os.path.join(self.wc, *names) |
|
1007 | 1007 | |
|
1008 | 1008 | def putfile(self, filename, flags, data): |
|
1009 | 1009 | if 'l' in flags: |
|
1010 | 1010 | self.wopener.symlink(data, filename) |
|
1011 | 1011 | else: |
|
1012 | 1012 | try: |
|
1013 | 1013 | if os.path.islink(self.wjoin(filename)): |
|
1014 | 1014 | os.unlink(filename) |
|
1015 | 1015 | except OSError: |
|
1016 | 1016 | pass |
|
1017 | 1017 | self.wopener(filename, 'w').write(data) |
|
1018 | 1018 | |
|
1019 | 1019 | if self.is_exec: |
|
1020 | 1020 | was_exec = self.is_exec(self.wjoin(filename)) |
|
1021 | 1021 | else: |
|
1022 | 1022 | # On filesystems not supporting execute-bit, there is no way |
|
1023 | 1023 | # to know if it is set but asking subversion. Setting it |
|
1024 | 1024 | # systematically is just as expensive and much simpler. |
|
1025 | 1025 | was_exec = 'x' not in flags |
|
1026 | 1026 | |
|
1027 | 1027 | util.set_flags(self.wjoin(filename), False, 'x' in flags) |
|
1028 | 1028 | if was_exec: |
|
1029 | 1029 | if 'x' not in flags: |
|
1030 | 1030 | self.delexec.append(filename) |
|
1031 | 1031 | else: |
|
1032 | 1032 | if 'x' in flags: |
|
1033 | 1033 | self.setexec.append(filename) |
|
1034 | 1034 | |
|
1035 | 1035 | def _copyfile(self, source, dest): |
|
1036 | 1036 | # SVN's copy command pukes if the destination file exists, but |
|
1037 | 1037 | # our copyfile method expects to record a copy that has |
|
1038 | 1038 | # already occurred. Cross the semantic gap. |
|
1039 | 1039 | wdest = self.wjoin(dest) |
|
1040 | 1040 | exists = os.path.lexists(wdest) |
|
1041 | 1041 | if exists: |
|
1042 | 1042 | fd, tempname = tempfile.mkstemp( |
|
1043 | 1043 | prefix='hg-copy-', dir=os.path.dirname(wdest)) |
|
1044 | 1044 | os.close(fd) |
|
1045 | 1045 | os.unlink(tempname) |
|
1046 | 1046 | os.rename(wdest, tempname) |
|
1047 | 1047 | try: |
|
1048 | 1048 | self.run0('copy', source, dest) |
|
1049 | 1049 | finally: |
|
1050 | 1050 | if exists: |
|
1051 | 1051 | try: |
|
1052 | 1052 | os.unlink(wdest) |
|
1053 | 1053 | except OSError: |
|
1054 | 1054 | pass |
|
1055 | 1055 | os.rename(tempname, wdest) |
|
1056 | 1056 | |
|
1057 | 1057 | def dirs_of(self, files): |
|
1058 | 1058 | dirs = set() |
|
1059 | 1059 | for f in files: |
|
1060 | 1060 | if os.path.isdir(self.wjoin(f)): |
|
1061 | 1061 | dirs.add(f) |
|
1062 | 1062 | for i in strutil.rfindall(f, '/'): |
|
1063 | 1063 | dirs.add(f[:i]) |
|
1064 | 1064 | return dirs |
|
1065 | 1065 | |
|
1066 | 1066 | def add_dirs(self, files): |
|
1067 | 1067 | add_dirs = [d for d in sorted(self.dirs_of(files)) |
|
1068 | 1068 | if not os.path.exists(self.wjoin(d, '.svn', 'entries'))] |
|
1069 | 1069 | if add_dirs: |
|
1070 | 1070 | self.xargs(add_dirs, 'add', non_recursive=True, quiet=True) |
|
1071 | 1071 | return add_dirs |
|
1072 | 1072 | |
|
1073 | 1073 | def add_files(self, files): |
|
1074 | 1074 | if files: |
|
1075 | 1075 | self.xargs(files, 'add', quiet=True) |
|
1076 | 1076 | return files |
|
1077 | 1077 | |
|
1078 | 1078 | def tidy_dirs(self, names): |
|
1079 | 1079 | deleted = [] |
|
1080 | 1080 | for d in sorted(self.dirs_of(names), reverse=True): |
|
1081 | 1081 | wd = self.wjoin(d) |
|
1082 | 1082 | if os.listdir(wd) == '.svn': |
|
1083 | 1083 | self.run0('delete', d) |
|
1084 | 1084 | deleted.append(d) |
|
1085 | 1085 | return deleted |
|
1086 | 1086 | |
|
1087 | 1087 | def addchild(self, parent, child): |
|
1088 | 1088 | self.childmap[parent] = child |
|
1089 | 1089 | |
|
1090 | 1090 | def revid(self, rev): |
|
1091 | 1091 | return u"svn:%s@%s" % (self.uuid, rev) |
|
1092 | 1092 | |
|
1093 | 1093 | def putcommit(self, files, copies, parents, commit, source, revmap): |
|
1094 | 1094 | # Apply changes to working copy |
|
1095 | 1095 | for f, v in files: |
|
1096 | 1096 | try: |
|
1097 | 1097 | data, mode = source.getfile(f, v) |
|
1098 | 1098 | except IOError: |
|
1099 | 1099 | self.delete.append(f) |
|
1100 | 1100 | else: |
|
1101 | 1101 | self.putfile(f, mode, data) |
|
1102 | 1102 | if f in copies: |
|
1103 | 1103 | self.copies.append([copies[f], f]) |
|
1104 | 1104 | files = [f[0] for f in files] |
|
1105 | 1105 | |
|
1106 | 1106 | for parent in parents: |
|
1107 | 1107 | try: |
|
1108 | 1108 | return self.revid(self.childmap[parent]) |
|
1109 | 1109 | except KeyError: |
|
1110 | 1110 | pass |
|
1111 | 1111 | entries = set(self.delete) |
|
1112 | 1112 | files = frozenset(files) |
|
1113 | 1113 | entries.update(self.add_dirs(files.difference(entries))) |
|
1114 | 1114 | if self.copies: |
|
1115 | 1115 | for s, d in self.copies: |
|
1116 | 1116 | self._copyfile(s, d) |
|
1117 | 1117 | self.copies = [] |
|
1118 | 1118 | if self.delete: |
|
1119 | 1119 | self.xargs(self.delete, 'delete') |
|
1120 | 1120 | self.delete = [] |
|
1121 | 1121 | entries.update(self.add_files(files.difference(entries))) |
|
1122 | 1122 | entries.update(self.tidy_dirs(entries)) |
|
1123 | 1123 | if self.delexec: |
|
1124 | 1124 | self.xargs(self.delexec, 'propdel', 'svn:executable') |
|
1125 | 1125 | self.delexec = [] |
|
1126 | 1126 | if self.setexec: |
|
1127 | 1127 | self.xargs(self.setexec, 'propset', 'svn:executable', '*') |
|
1128 | 1128 | self.setexec = [] |
|
1129 | 1129 | |
|
1130 | 1130 | fd, messagefile = tempfile.mkstemp(prefix='hg-convert-') |
|
1131 | 1131 | fp = os.fdopen(fd, 'w') |
|
1132 | 1132 | fp.write(commit.desc) |
|
1133 | 1133 | fp.close() |
|
1134 | 1134 | try: |
|
1135 | 1135 | output = self.run0('commit', |
|
1136 | 1136 | username=util.shortuser(commit.author), |
|
1137 | 1137 | file=messagefile, |
|
1138 | 1138 | encoding='utf-8') |
|
1139 | 1139 | try: |
|
1140 | 1140 | rev = self.commit_re.search(output).group(1) |
|
1141 | 1141 | except AttributeError: |
|
1142 | 1142 | if not files: |
|
1143 | 1143 | return parents[0] |
|
1144 | 1144 | self.ui.warn(_('unexpected svn output:\n')) |
|
1145 | 1145 | self.ui.warn(output) |
|
1146 | 1146 | raise util.Abort(_('unable to cope with svn output')) |
|
1147 | 1147 | if commit.rev: |
|
1148 | 1148 | self.run('propset', 'hg:convert-rev', commit.rev, |
|
1149 | 1149 | revprop=True, revision=rev) |
|
1150 | 1150 | if commit.branch and commit.branch != 'default': |
|
1151 | 1151 | self.run('propset', 'hg:convert-branch', commit.branch, |
|
1152 | 1152 | revprop=True, revision=rev) |
|
1153 | 1153 | for parent in parents: |
|
1154 | 1154 | self.addchild(parent, rev) |
|
1155 | 1155 | return self.revid(rev) |
|
1156 | 1156 | finally: |
|
1157 | 1157 | os.unlink(messagefile) |
|
1158 | 1158 | |
|
1159 | 1159 | def puttags(self, tags): |
|
1160 | 1160 | self.ui.warn(_('writing Subversion tags is not yet implemented\n')) |
|
1161 | 1161 | return None, None |
@@ -1,642 +1,642 | |||
|
1 | 1 | # dispatch.py - command dispatching for mercurial |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | from i18n import _ |
|
9 | 9 | import os, sys, atexit, signal, pdb, socket, errno, shlex, time, traceback, re |
|
10 | 10 | import util, commands, hg, fancyopts, extensions, hook, error |
|
11 | 11 | import cmdutil, encoding |
|
12 | 12 | import ui as uimod |
|
13 | 13 | |
|
14 | 14 | def run(): |
|
15 | 15 | "run the command in sys.argv" |
|
16 | 16 | sys.exit(dispatch(sys.argv[1:])) |
|
17 | 17 | |
|
18 | 18 | def dispatch(args): |
|
19 | 19 | "run the command specified in args" |
|
20 | 20 | try: |
|
21 | 21 | u = uimod.ui() |
|
22 | 22 | if '--traceback' in args: |
|
23 | 23 | u.setconfig('ui', 'traceback', 'on') |
|
24 | 24 | except util.Abort, inst: |
|
25 | 25 | sys.stderr.write(_("abort: %s\n") % inst) |
|
26 | 26 | if inst.hint: |
|
27 | 27 | sys.stderr.write("(%s)\n" % inst.hint) |
|
28 | 28 | return -1 |
|
29 | 29 | except error.ParseError, inst: |
|
30 | 30 | if len(inst.args) > 1: |
|
31 | 31 | sys.stderr.write(_("hg: parse error at %s: %s\n") % |
|
32 | 32 | (inst.args[1], inst.args[0])) |
|
33 | 33 | else: |
|
34 | 34 | sys.stderr.write(_("hg: parse error: %s\n") % inst.args[0]) |
|
35 | 35 | return -1 |
|
36 | 36 | return _runcatch(u, args) |
|
37 | 37 | |
|
38 | 38 | def _runcatch(ui, args): |
|
39 | 39 | def catchterm(*args): |
|
40 | 40 | raise error.SignalInterrupt |
|
41 | 41 | |
|
42 | 42 | try: |
|
43 | 43 | for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM': |
|
44 | 44 | num = getattr(signal, name, None) |
|
45 | 45 | if num: |
|
46 | 46 | signal.signal(num, catchterm) |
|
47 | 47 | except ValueError: |
|
48 | 48 | pass # happens if called in a thread |
|
49 | 49 | |
|
50 | 50 | try: |
|
51 | 51 | try: |
|
52 | 52 | # enter the debugger before command execution |
|
53 | 53 | if '--debugger' in args: |
|
54 | 54 | ui.warn(_("entering debugger - " |
|
55 | 55 | "type c to continue starting hg or h for help\n")) |
|
56 | 56 | pdb.set_trace() |
|
57 | 57 | try: |
|
58 | 58 | return _dispatch(ui, args) |
|
59 | 59 | finally: |
|
60 | 60 | ui.flush() |
|
61 | 61 | except: |
|
62 | 62 | # enter the debugger when we hit an exception |
|
63 | 63 | if '--debugger' in args: |
|
64 | 64 | traceback.print_exc() |
|
65 | 65 | pdb.post_mortem(sys.exc_info()[2]) |
|
66 | 66 | ui.traceback() |
|
67 | 67 | raise |
|
68 | 68 | |
|
69 | 69 | # Global exception handling, alphabetically |
|
70 | 70 | # Mercurial-specific first, followed by built-in and library exceptions |
|
71 | 71 | except error.AmbiguousCommand, inst: |
|
72 | 72 | ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") % |
|
73 | 73 | (inst.args[0], " ".join(inst.args[1]))) |
|
74 | 74 | except error.ParseError, inst: |
|
75 | 75 | if len(inst.args) > 1: |
|
76 | 76 | ui.warn(_("hg: parse error at %s: %s\n") % |
|
77 | 77 | (inst.args[1], inst.args[0])) |
|
78 | 78 | else: |
|
79 | 79 | ui.warn(_("hg: parse error: %s\n") % inst.args[0]) |
|
80 | 80 | return -1 |
|
81 | 81 | except error.LockHeld, inst: |
|
82 | 82 | if inst.errno == errno.ETIMEDOUT: |
|
83 | 83 | reason = _('timed out waiting for lock held by %s') % inst.locker |
|
84 | 84 | else: |
|
85 | 85 | reason = _('lock held by %s') % inst.locker |
|
86 | 86 | ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason)) |
|
87 | 87 | except error.LockUnavailable, inst: |
|
88 | 88 | ui.warn(_("abort: could not lock %s: %s\n") % |
|
89 | 89 | (inst.desc or inst.filename, inst.strerror)) |
|
90 | 90 | except error.CommandError, inst: |
|
91 | 91 | if inst.args[0]: |
|
92 | 92 | ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1])) |
|
93 | 93 | commands.help_(ui, inst.args[0]) |
|
94 | 94 | else: |
|
95 | 95 | ui.warn(_("hg: %s\n") % inst.args[1]) |
|
96 | 96 | commands.help_(ui, 'shortlist') |
|
97 | 97 | except error.RepoError, inst: |
|
98 | 98 | ui.warn(_("abort: %s!\n") % inst) |
|
99 | 99 | except error.ResponseError, inst: |
|
100 | 100 | ui.warn(_("abort: %s") % inst.args[0]) |
|
101 | 101 | if not isinstance(inst.args[1], basestring): |
|
102 | 102 | ui.warn(" %r\n" % (inst.args[1],)) |
|
103 | 103 | elif not inst.args[1]: |
|
104 | 104 | ui.warn(_(" empty string\n")) |
|
105 | 105 | else: |
|
106 | 106 | ui.warn("\n%r\n" % util.ellipsis(inst.args[1])) |
|
107 | 107 | except error.RevlogError, inst: |
|
108 | 108 | ui.warn(_("abort: %s!\n") % inst) |
|
109 | 109 | except error.SignalInterrupt: |
|
110 | 110 | ui.warn(_("killed!\n")) |
|
111 | 111 | except error.UnknownCommand, inst: |
|
112 | 112 | ui.warn(_("hg: unknown command '%s'\n") % inst.args[0]) |
|
113 | 113 | try: |
|
114 | 114 | # check if the command is in a disabled extension |
|
115 | 115 | # (but don't check for extensions themselves) |
|
116 | 116 | commands.help_(ui, inst.args[0], unknowncmd=True) |
|
117 | 117 | except error.UnknownCommand: |
|
118 | 118 | commands.help_(ui, 'shortlist') |
|
119 | 119 | except util.Abort, inst: |
|
120 | 120 | ui.warn(_("abort: %s\n") % inst) |
|
121 | 121 | if inst.hint: |
|
122 | 122 | ui.warn(_("(%s)\n") % inst.hint) |
|
123 | 123 | except ImportError, inst: |
|
124 | 124 | ui.warn(_("abort: %s!\n") % inst) |
|
125 | 125 | m = str(inst).split()[-1] |
|
126 | 126 | if m in "mpatch bdiff".split(): |
|
127 | 127 | ui.warn(_("(did you forget to compile extensions?)\n")) |
|
128 | 128 | elif m in "zlib".split(): |
|
129 | 129 | ui.warn(_("(is your Python install correct?)\n")) |
|
130 | 130 | except IOError, inst: |
|
131 | 131 | if hasattr(inst, "code"): |
|
132 | 132 | ui.warn(_("abort: %s\n") % inst) |
|
133 | 133 | elif hasattr(inst, "reason"): |
|
134 | 134 | try: # usually it is in the form (errno, strerror) |
|
135 | 135 | reason = inst.reason.args[1] |
|
136 | 136 | except: # it might be anything, for example a string |
|
137 | 137 | reason = inst.reason |
|
138 | 138 | ui.warn(_("abort: error: %s\n") % reason) |
|
139 | 139 | elif hasattr(inst, "args") and inst.args[0] == errno.EPIPE: |
|
140 | 140 | if ui.debugflag: |
|
141 | 141 | ui.warn(_("broken pipe\n")) |
|
142 | 142 | elif getattr(inst, "strerror", None): |
|
143 | 143 | if getattr(inst, "filename", None): |
|
144 | 144 | ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename)) |
|
145 | 145 | else: |
|
146 | 146 | ui.warn(_("abort: %s\n") % inst.strerror) |
|
147 | 147 | else: |
|
148 | 148 | raise |
|
149 | 149 | except OSError, inst: |
|
150 | 150 | if getattr(inst, "filename", None): |
|
151 | 151 | ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename)) |
|
152 | 152 | else: |
|
153 | 153 | ui.warn(_("abort: %s\n") % inst.strerror) |
|
154 | 154 | except KeyboardInterrupt: |
|
155 | 155 | try: |
|
156 | 156 | ui.warn(_("interrupted!\n")) |
|
157 | 157 | except IOError, inst: |
|
158 | 158 | if inst.errno == errno.EPIPE: |
|
159 | 159 | if ui.debugflag: |
|
160 | 160 | ui.warn(_("\nbroken pipe\n")) |
|
161 | 161 | else: |
|
162 | 162 | raise |
|
163 | 163 | except MemoryError: |
|
164 | 164 | ui.warn(_("abort: out of memory\n")) |
|
165 | 165 | except SystemExit, inst: |
|
166 | 166 | # Commands shouldn't sys.exit directly, but give a return code. |
|
167 | 167 | # Just in case catch this and and pass exit code to caller. |
|
168 | 168 | return inst.code |
|
169 | 169 | except socket.error, inst: |
|
170 | 170 | ui.warn(_("abort: %s\n") % inst.args[-1]) |
|
171 | 171 | except: |
|
172 | 172 | ui.warn(_("** unknown exception encountered, details follow\n")) |
|
173 | 173 | ui.warn(_("** report bug details to " |
|
174 | 174 | "http://mercurial.selenic.com/bts/\n")) |
|
175 | 175 | ui.warn(_("** or mercurial@selenic.com\n")) |
|
176 | 176 | ui.warn(_("** Python %s\n") % sys.version.replace('\n', '')) |
|
177 | 177 | ui.warn(_("** Mercurial Distributed SCM (version %s)\n") |
|
178 | 178 | % util.version()) |
|
179 | 179 | ui.warn(_("** Extensions loaded: %s\n") |
|
180 | 180 | % ", ".join([x[0] for x in extensions.extensions()])) |
|
181 | 181 | raise |
|
182 | 182 | |
|
183 | 183 | return -1 |
|
184 | 184 | |
|
185 | 185 | def aliasargs(fn): |
|
186 | 186 | if hasattr(fn, 'args'): |
|
187 | 187 | return fn.args |
|
188 | 188 | return [] |
|
189 | 189 | |
|
190 | 190 | class cmdalias(object): |
|
191 | 191 | def __init__(self, name, definition, cmdtable): |
|
192 | 192 | self.name = self.cmd = name |
|
193 | 193 | self.cmdname = '' |
|
194 | 194 | self.definition = definition |
|
195 | 195 | self.args = [] |
|
196 | 196 | self.opts = [] |
|
197 | 197 | self.help = '' |
|
198 | 198 | self.norepo = True |
|
199 | 199 | self.badalias = False |
|
200 | 200 | |
|
201 | 201 | try: |
|
202 | 202 | aliases, entry = cmdutil.findcmd(self.name, cmdtable) |
|
203 | 203 | for alias, e in cmdtable.iteritems(): |
|
204 | 204 | if e is entry: |
|
205 | 205 | self.cmd = alias |
|
206 | 206 | break |
|
207 | 207 | self.shadows = True |
|
208 | 208 | except error.UnknownCommand: |
|
209 | 209 | self.shadows = False |
|
210 | 210 | |
|
211 | 211 | if not self.definition: |
|
212 | 212 | def fn(ui, *args): |
|
213 | 213 | ui.warn(_("no definition for alias '%s'\n") % self.name) |
|
214 | 214 | return 1 |
|
215 | 215 | self.fn = fn |
|
216 | 216 | self.badalias = True |
|
217 | 217 | |
|
218 | 218 | return |
|
219 | 219 | |
|
220 | 220 | if self.definition.startswith('!'): |
|
221 | 221 | self.shell = True |
|
222 | 222 | def fn(ui, *args): |
|
223 | 223 | env = {'HG_ARGS': ' '.join((self.name,) + args)} |
|
224 | 224 | def _checkvar(m): |
|
225 | 225 | if int(m.groups()[0]) <= len(args): |
|
226 | 226 | return m.group() |
|
227 | 227 | else: |
|
228 | 228 | return '' |
|
229 | 229 | cmd = re.sub(r'\$(\d+)', _checkvar, self.definition[1:]) |
|
230 | 230 | replace = dict((str(i + 1), arg) for i, arg in enumerate(args)) |
|
231 | 231 | replace['0'] = self.name |
|
232 | 232 | replace['@'] = ' '.join(args) |
|
233 | 233 | cmd = util.interpolate(r'\$', replace, cmd) |
|
234 | 234 | return util.system(cmd, environ=env) |
|
235 | 235 | self.fn = fn |
|
236 | 236 | return |
|
237 | 237 | |
|
238 | 238 | args = shlex.split(self.definition) |
|
239 | 239 | self.cmdname = cmd = args.pop(0) |
|
240 | 240 | args = map(util.expandpath, args) |
|
241 | 241 | |
|
242 | 242 | for invalidarg in ("--cwd", "-R", "--repository", "--repo"): |
|
243 | 243 | if _earlygetopt([invalidarg], args): |
|
244 | 244 | def fn(ui, *args): |
|
245 | 245 | ui.warn(_("error in definition for alias '%s': %s may only " |
|
246 | 246 | "be given on the command line\n") |
|
247 | 247 | % (self.name, invalidarg)) |
|
248 | 248 | return 1 |
|
249 | 249 | |
|
250 | 250 | self.fn = fn |
|
251 | 251 | self.badalias = True |
|
252 | 252 | return |
|
253 | 253 | |
|
254 | 254 | try: |
|
255 | 255 | tableentry = cmdutil.findcmd(cmd, cmdtable, False)[1] |
|
256 | 256 | if len(tableentry) > 2: |
|
257 | 257 | self.fn, self.opts, self.help = tableentry |
|
258 | 258 | else: |
|
259 | 259 | self.fn, self.opts = tableentry |
|
260 | 260 | |
|
261 | 261 | self.args = aliasargs(self.fn) + args |
|
262 | 262 | if cmd not in commands.norepo.split(' '): |
|
263 | 263 | self.norepo = False |
|
264 | 264 | if self.help.startswith("hg " + cmd): |
|
265 | 265 | # drop prefix in old-style help lines so hg shows the alias |
|
266 | 266 | self.help = self.help[4 + len(cmd):] |
|
267 | 267 | self.__doc__ = self.fn.__doc__ |
|
268 | 268 | |
|
269 | 269 | except error.UnknownCommand: |
|
270 | 270 | def fn(ui, *args): |
|
271 | 271 | ui.warn(_("alias '%s' resolves to unknown command '%s'\n") \ |
|
272 | 272 | % (self.name, cmd)) |
|
273 | 273 | try: |
|
274 | 274 | # check if the command is in a disabled extension |
|
275 | 275 | commands.help_(ui, cmd, unknowncmd=True) |
|
276 | 276 | except error.UnknownCommand: |
|
277 | 277 | pass |
|
278 | 278 | return 1 |
|
279 | 279 | self.fn = fn |
|
280 | 280 | self.badalias = True |
|
281 | 281 | except error.AmbiguousCommand: |
|
282 | 282 | def fn(ui, *args): |
|
283 | 283 | ui.warn(_("alias '%s' resolves to ambiguous command '%s'\n") \ |
|
284 | 284 | % (self.name, cmd)) |
|
285 | 285 | return 1 |
|
286 | 286 | self.fn = fn |
|
287 | 287 | self.badalias = True |
|
288 | 288 | |
|
289 | 289 | def __call__(self, ui, *args, **opts): |
|
290 | 290 | if self.shadows: |
|
291 | 291 | ui.debug("alias '%s' shadows command '%s'\n" % |
|
292 | 292 | (self.name, self.cmdname)) |
|
293 | 293 | |
|
294 | 294 | if self.definition.startswith('!'): |
|
295 | 295 | return self.fn(ui, *args, **opts) |
|
296 | 296 | else: |
|
297 | 297 | try: |
|
298 | 298 | util.checksignature(self.fn)(ui, *args, **opts) |
|
299 | 299 | except error.SignatureError: |
|
300 | 300 | args = ' '.join([self.cmdname] + self.args) |
|
301 | 301 | ui.debug("alias '%s' expands to '%s'\n" % (self.name, args)) |
|
302 | 302 | raise |
|
303 | 303 | |
|
304 | 304 | def addaliases(ui, cmdtable): |
|
305 | 305 | # aliases are processed after extensions have been loaded, so they |
|
306 | 306 | # may use extension commands. Aliases can also use other alias definitions, |
|
307 | 307 | # but only if they have been defined prior to the current definition. |
|
308 | 308 | for alias, definition in ui.configitems('alias'): |
|
309 | 309 | aliasdef = cmdalias(alias, definition, cmdtable) |
|
310 | 310 | cmdtable[aliasdef.cmd] = (aliasdef, aliasdef.opts, aliasdef.help) |
|
311 | 311 | if aliasdef.norepo: |
|
312 | 312 | commands.norepo += ' %s' % alias |
|
313 | 313 | |
|
314 | 314 | def _parse(ui, args): |
|
315 | 315 | options = {} |
|
316 | 316 | cmdoptions = {} |
|
317 | 317 | |
|
318 | 318 | try: |
|
319 | 319 | args = fancyopts.fancyopts(args, commands.globalopts, options) |
|
320 | 320 | except fancyopts.getopt.GetoptError, inst: |
|
321 | 321 | raise error.CommandError(None, inst) |
|
322 | 322 | |
|
323 | 323 | if args: |
|
324 | 324 | cmd, args = args[0], args[1:] |
|
325 | 325 | aliases, entry = cmdutil.findcmd(cmd, commands.table, |
|
326 | 326 | ui.config("ui", "strict")) |
|
327 | 327 | cmd = aliases[0] |
|
328 | 328 | args = aliasargs(entry[0]) + args |
|
329 | 329 | defaults = ui.config("defaults", cmd) |
|
330 | 330 | if defaults: |
|
331 | 331 | args = map(util.expandpath, shlex.split(defaults)) + args |
|
332 | 332 | c = list(entry[1]) |
|
333 | 333 | else: |
|
334 | 334 | cmd = None |
|
335 | 335 | c = [] |
|
336 | 336 | |
|
337 | 337 | # combine global options into local |
|
338 | 338 | for o in commands.globalopts: |
|
339 | 339 | c.append((o[0], o[1], options[o[1]], o[3])) |
|
340 | 340 | |
|
341 | 341 | try: |
|
342 | 342 | args = fancyopts.fancyopts(args, c, cmdoptions, True) |
|
343 | 343 | except fancyopts.getopt.GetoptError, inst: |
|
344 | 344 | raise error.CommandError(cmd, inst) |
|
345 | 345 | |
|
346 | 346 | # separate global options back out |
|
347 | 347 | for o in commands.globalopts: |
|
348 | 348 | n = o[1] |
|
349 | 349 | options[n] = cmdoptions[n] |
|
350 | 350 | del cmdoptions[n] |
|
351 | 351 | |
|
352 | 352 | return (cmd, cmd and entry[0] or None, args, options, cmdoptions) |
|
353 | 353 | |
|
354 | 354 | def _parseconfig(ui, config): |
|
355 | 355 | """parse the --config options from the command line""" |
|
356 | 356 | for cfg in config: |
|
357 | 357 | try: |
|
358 | 358 | name, value = cfg.split('=', 1) |
|
359 | 359 | section, name = name.split('.', 1) |
|
360 | 360 | if not section or not name: |
|
361 | 361 | raise IndexError |
|
362 | 362 | ui.setconfig(section, name, value) |
|
363 | 363 | except (IndexError, ValueError): |
|
364 | 364 | raise util.Abort(_('malformed --config option: %r ' |
|
365 | 365 | '(use --config section.name=value)') % cfg) |
|
366 | 366 | |
|
367 | 367 | def _earlygetopt(aliases, args): |
|
368 | 368 | """Return list of values for an option (or aliases). |
|
369 | 369 | |
|
370 | 370 | The values are listed in the order they appear in args. |
|
371 | 371 | The options and values are removed from args. |
|
372 | 372 | """ |
|
373 | 373 | try: |
|
374 | 374 | argcount = args.index("--") |
|
375 | 375 | except ValueError: |
|
376 | 376 | argcount = len(args) |
|
377 | 377 | shortopts = [opt for opt in aliases if len(opt) == 2] |
|
378 | 378 | values = [] |
|
379 | 379 | pos = 0 |
|
380 | 380 | while pos < argcount: |
|
381 | 381 | if args[pos] in aliases: |
|
382 | 382 | if pos + 1 >= argcount: |
|
383 | 383 | # ignore and let getopt report an error if there is no value |
|
384 | 384 | break |
|
385 | 385 | del args[pos] |
|
386 | 386 | values.append(args.pop(pos)) |
|
387 | 387 | argcount -= 2 |
|
388 | 388 | elif args[pos][:2] in shortopts: |
|
389 | 389 | # short option can have no following space, e.g. hg log -Rfoo |
|
390 | 390 | values.append(args.pop(pos)[2:]) |
|
391 | 391 | argcount -= 1 |
|
392 | 392 | else: |
|
393 | 393 | pos += 1 |
|
394 | 394 | return values |
|
395 | 395 | |
|
396 | 396 | def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions): |
|
397 | 397 | # run pre-hook, and abort if it fails |
|
398 | 398 | ret = hook.hook(lui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs), |
|
399 | 399 | pats=cmdpats, opts=cmdoptions) |
|
400 | 400 | if ret: |
|
401 | 401 | return ret |
|
402 | 402 | ret = _runcommand(ui, options, cmd, d) |
|
403 | 403 | # run post-hook, passing command result |
|
404 | 404 | hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs), |
|
405 | 405 | result=ret, pats=cmdpats, opts=cmdoptions) |
|
406 | 406 | return ret |
|
407 | 407 | |
|
408 | 408 | def _getlocal(ui, rpath): |
|
409 | 409 | """Return (path, local ui object) for the given target path. |
|
410 | ||
|
410 | ||
|
411 | 411 | Takes paths in [cwd]/.hg/hgrc into account." |
|
412 | 412 | """ |
|
413 | 413 | try: |
|
414 | 414 | wd = os.getcwd() |
|
415 | 415 | except OSError, e: |
|
416 | 416 | raise util.Abort(_("error getting current working directory: %s") % |
|
417 | 417 | e.strerror) |
|
418 | 418 | path = cmdutil.findrepo(wd) or "" |
|
419 | 419 | if not path: |
|
420 | 420 | lui = ui |
|
421 | 421 | else: |
|
422 | 422 | lui = ui.copy() |
|
423 | 423 | lui.readconfig(os.path.join(path, ".hg", "hgrc"), path) |
|
424 | 424 | |
|
425 | 425 | if rpath: |
|
426 | 426 | path = lui.expandpath(rpath[-1]) |
|
427 | 427 | lui = ui.copy() |
|
428 | 428 | lui.readconfig(os.path.join(path, ".hg", "hgrc"), path) |
|
429 | 429 | |
|
430 | 430 | return path, lui |
|
431 | 431 | |
|
432 | 432 | def _checkshellalias(ui, args): |
|
433 | 433 | cwd = os.getcwd() |
|
434 | 434 | norepo = commands.norepo |
|
435 | 435 | options = {} |
|
436 | 436 | |
|
437 | 437 | try: |
|
438 | 438 | args = fancyopts.fancyopts(args, commands.globalopts, options) |
|
439 | 439 | except fancyopts.getopt.GetoptError: |
|
440 | 440 | return |
|
441 | 441 | |
|
442 | 442 | if not args: |
|
443 | 443 | return |
|
444 | 444 | |
|
445 | 445 | _parseconfig(ui, options['config']) |
|
446 | 446 | if options['cwd']: |
|
447 | 447 | os.chdir(options['cwd']) |
|
448 | 448 | |
|
449 | 449 | path, lui = _getlocal(ui, [options['repository']]) |
|
450 | 450 | |
|
451 | 451 | cmdtable = commands.table.copy() |
|
452 | 452 | addaliases(lui, cmdtable) |
|
453 | 453 | |
|
454 | 454 | cmd = args[0] |
|
455 | 455 | try: |
|
456 | 456 | aliases, entry = cmdutil.findcmd(cmd, cmdtable, lui.config("ui", "strict")) |
|
457 | 457 | except error.UnknownCommand: |
|
458 | 458 | commands.norepo = norepo |
|
459 | 459 | os.chdir(cwd) |
|
460 | 460 | return |
|
461 | 461 | |
|
462 | 462 | cmd = aliases[0] |
|
463 | 463 | fn = entry[0] |
|
464 | 464 | |
|
465 | 465 | if cmd and hasattr(fn, 'shell'): |
|
466 | 466 | d = lambda: fn(ui, *args[1:]) |
|
467 | 467 | return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d, [], {}) |
|
468 | 468 | |
|
469 | 469 | commands.norepo = norepo |
|
470 | 470 | os.chdir(cwd) |
|
471 | 471 | |
|
472 | 472 | _loaded = set() |
|
473 | 473 | def _dispatch(ui, args): |
|
474 | 474 | shellaliasfn = _checkshellalias(ui, args) |
|
475 | 475 | if shellaliasfn: |
|
476 | 476 | return shellaliasfn() |
|
477 | 477 | |
|
478 | 478 | # read --config before doing anything else |
|
479 | 479 | # (e.g. to change trust settings for reading .hg/hgrc) |
|
480 | 480 | _parseconfig(ui, _earlygetopt(['--config'], args)) |
|
481 | 481 | |
|
482 | 482 | # check for cwd |
|
483 | 483 | cwd = _earlygetopt(['--cwd'], args) |
|
484 | 484 | if cwd: |
|
485 | 485 | os.chdir(cwd[-1]) |
|
486 | 486 | |
|
487 | 487 | rpath = _earlygetopt(["-R", "--repository", "--repo"], args) |
|
488 | 488 | path, lui = _getlocal(ui, rpath) |
|
489 | 489 | |
|
490 | 490 | # Configure extensions in phases: uisetup, extsetup, cmdtable, and |
|
491 | 491 | # reposetup. Programs like TortoiseHg will call _dispatch several |
|
492 | 492 | # times so we keep track of configured extensions in _loaded. |
|
493 | 493 | extensions.loadall(lui) |
|
494 | 494 | exts = [ext for ext in extensions.extensions() if ext[0] not in _loaded] |
|
495 | 495 | # Propagate any changes to lui.__class__ by extensions |
|
496 | 496 | ui.__class__ = lui.__class__ |
|
497 | 497 | |
|
498 | 498 | # (uisetup and extsetup are handled in extensions.loadall) |
|
499 | 499 | |
|
500 | 500 | for name, module in exts: |
|
501 | 501 | cmdtable = getattr(module, 'cmdtable', {}) |
|
502 | 502 | overrides = [cmd for cmd in cmdtable if cmd in commands.table] |
|
503 | 503 | if overrides: |
|
504 | 504 | ui.warn(_("extension '%s' overrides commands: %s\n") |
|
505 | 505 | % (name, " ".join(overrides))) |
|
506 | 506 | commands.table.update(cmdtable) |
|
507 | 507 | _loaded.add(name) |
|
508 | 508 | |
|
509 | 509 | # (reposetup is handled in hg.repository) |
|
510 | 510 | |
|
511 | 511 | addaliases(lui, commands.table) |
|
512 | 512 | |
|
513 | 513 | # check for fallback encoding |
|
514 | 514 | fallback = lui.config('ui', 'fallbackencoding') |
|
515 | 515 | if fallback: |
|
516 | 516 | encoding.fallbackencoding = fallback |
|
517 | 517 | |
|
518 | 518 | fullargs = args |
|
519 | 519 | cmd, func, args, options, cmdoptions = _parse(lui, args) |
|
520 | 520 | |
|
521 | 521 | if options["config"]: |
|
522 | 522 | raise util.Abort(_("option --config may not be abbreviated!")) |
|
523 | 523 | if options["cwd"]: |
|
524 | 524 | raise util.Abort(_("option --cwd may not be abbreviated!")) |
|
525 | 525 | if options["repository"]: |
|
526 | 526 | raise util.Abort(_( |
|
527 | 527 | "Option -R has to be separated from other options (e.g. not -qR) " |
|
528 | 528 | "and --repository may only be abbreviated as --repo!")) |
|
529 | 529 | |
|
530 | 530 | if options["encoding"]: |
|
531 | 531 | encoding.encoding = options["encoding"] |
|
532 | 532 | if options["encodingmode"]: |
|
533 | 533 | encoding.encodingmode = options["encodingmode"] |
|
534 | 534 | if options["time"]: |
|
535 | 535 | def get_times(): |
|
536 | 536 | t = os.times() |
|
537 | 537 | if t[4] == 0.0: # Windows leaves this as zero, so use time.clock() |
|
538 | 538 | t = (t[0], t[1], t[2], t[3], time.clock()) |
|
539 | 539 | return t |
|
540 | 540 | s = get_times() |
|
541 | 541 | def print_time(): |
|
542 | 542 | t = get_times() |
|
543 | 543 | ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") % |
|
544 | 544 | (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3])) |
|
545 | 545 | atexit.register(print_time) |
|
546 | 546 | |
|
547 | 547 | if options['verbose'] or options['debug'] or options['quiet']: |
|
548 | 548 | ui.setconfig('ui', 'verbose', str(bool(options['verbose']))) |
|
549 | 549 | ui.setconfig('ui', 'debug', str(bool(options['debug']))) |
|
550 | 550 | ui.setconfig('ui', 'quiet', str(bool(options['quiet']))) |
|
551 | 551 | if options['traceback']: |
|
552 | 552 | ui.setconfig('ui', 'traceback', 'on') |
|
553 | 553 | if options['noninteractive']: |
|
554 | 554 | ui.setconfig('ui', 'interactive', 'off') |
|
555 | 555 | |
|
556 | 556 | if options['help']: |
|
557 | 557 | return commands.help_(ui, cmd, options['version']) |
|
558 | 558 | elif options['version']: |
|
559 | 559 | return commands.version_(ui) |
|
560 | 560 | elif not cmd: |
|
561 | 561 | return commands.help_(ui, 'shortlist') |
|
562 | 562 | |
|
563 | 563 | repo = None |
|
564 | 564 | cmdpats = args[:] |
|
565 | 565 | if cmd not in commands.norepo.split(): |
|
566 | 566 | try: |
|
567 | 567 | repo = hg.repository(ui, path=path) |
|
568 | 568 | ui = repo.ui |
|
569 | 569 | if not repo.local(): |
|
570 | 570 | raise util.Abort(_("repository '%s' is not local") % path) |
|
571 | 571 | ui.setconfig("bundle", "mainreporoot", repo.root) |
|
572 | 572 | except error.RepoError: |
|
573 | 573 | if cmd not in commands.optionalrepo.split(): |
|
574 | 574 | if args and not path: # try to infer -R from command args |
|
575 | 575 | repos = map(cmdutil.findrepo, args) |
|
576 | 576 | guess = repos[0] |
|
577 | 577 | if guess and repos.count(guess) == len(repos): |
|
578 | 578 | return _dispatch(ui, ['--repository', guess] + fullargs) |
|
579 | 579 | if not path: |
|
580 | 580 | raise error.RepoError(_("There is no Mercurial repository" |
|
581 | 581 | " here (.hg not found)")) |
|
582 | 582 | raise |
|
583 | 583 | args.insert(0, repo) |
|
584 | 584 | elif rpath: |
|
585 | 585 | ui.warn(_("warning: --repository ignored\n")) |
|
586 | 586 | |
|
587 | 587 | msg = ' '.join(' ' in a and repr(a) or a for a in fullargs) |
|
588 | 588 | ui.log("command", msg + "\n") |
|
589 | 589 | d = lambda: util.checksignature(func)(ui, *args, **cmdoptions) |
|
590 | 590 | return runcommand(lui, repo, cmd, fullargs, ui, options, d, |
|
591 | 591 | cmdpats, cmdoptions) |
|
592 | 592 | |
|
593 | 593 | def _runcommand(ui, options, cmd, cmdfunc): |
|
594 | 594 | def checkargs(): |
|
595 | 595 | try: |
|
596 | 596 | return cmdfunc() |
|
597 | 597 | except error.SignatureError: |
|
598 | 598 | raise error.CommandError(cmd, _("invalid arguments")) |
|
599 | 599 | |
|
600 | 600 | if options['profile']: |
|
601 | 601 | format = ui.config('profiling', 'format', default='text') |
|
602 | 602 | |
|
603 | 603 | if not format in ['text', 'kcachegrind']: |
|
604 | 604 | ui.warn(_("unrecognized profiling format '%s'" |
|
605 | 605 | " - Ignored\n") % format) |
|
606 | 606 | format = 'text' |
|
607 | 607 | |
|
608 | 608 | output = ui.config('profiling', 'output') |
|
609 | 609 | |
|
610 | 610 | if output: |
|
611 | 611 | path = ui.expandpath(output) |
|
612 | 612 | ostream = open(path, 'wb') |
|
613 | 613 | else: |
|
614 | 614 | ostream = sys.stderr |
|
615 | 615 | |
|
616 | 616 | try: |
|
617 | 617 | from mercurial import lsprof |
|
618 | 618 | except ImportError: |
|
619 | 619 | raise util.Abort(_( |
|
620 | 620 | 'lsprof not available - install from ' |
|
621 | 621 | 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/')) |
|
622 | 622 | p = lsprof.Profiler() |
|
623 | 623 | p.enable(subcalls=True) |
|
624 | 624 | try: |
|
625 | 625 | return checkargs() |
|
626 | 626 | finally: |
|
627 | 627 | p.disable() |
|
628 | 628 | |
|
629 | 629 | if format == 'kcachegrind': |
|
630 | 630 | import lsprofcalltree |
|
631 | 631 | calltree = lsprofcalltree.KCacheGrind(p) |
|
632 | 632 | calltree.output(ostream) |
|
633 | 633 | else: |
|
634 | 634 | # format == 'text' |
|
635 | 635 | stats = lsprof.Stats(p.getstats()) |
|
636 | 636 | stats.sort() |
|
637 | 637 | stats.pprint(top=10, file=ostream, climit=5) |
|
638 | 638 | |
|
639 | 639 | if output: |
|
640 | 640 | ostream.close() |
|
641 | 641 | else: |
|
642 | 642 | return checkargs() |
@@ -1,97 +1,97 | |||
|
1 | 1 | # encoding.py - character transcoding support for Mercurial |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | import error |
|
9 | 9 | import unicodedata, locale, os |
|
10 | 10 | |
|
11 | 11 | def _getpreferredencoding(): |
|
12 | 12 | ''' |
|
13 | 13 | On darwin, getpreferredencoding ignores the locale environment and |
|
14 | 14 | always returns mac-roman. http://bugs.python.org/issue6202 fixes this |
|
15 | 15 | for Python 2.7 and up. This is the same corrected code for earlier |
|
16 | 16 | Python versions. |
|
17 | 17 | |
|
18 |
However, we can't use a version check for this method, as some distributions |
|
|
18 | However, we can't use a version check for this method, as some distributions | |
|
19 | 19 | patch Python to fix this. Instead, we use it as a 'fixer' for the mac-roman |
|
20 | 20 | encoding, as it is unlikely that this encoding is the actually expected. |
|
21 | 21 | ''' |
|
22 | 22 | try: |
|
23 | 23 | locale.CODESET |
|
24 | 24 | except AttributeError: |
|
25 | 25 | # Fall back to parsing environment variables :-( |
|
26 | 26 | return locale.getdefaultlocale()[1] |
|
27 | 27 | |
|
28 | 28 | oldloc = locale.setlocale(locale.LC_CTYPE) |
|
29 | 29 | locale.setlocale(locale.LC_CTYPE, "") |
|
30 | 30 | result = locale.nl_langinfo(locale.CODESET) |
|
31 | 31 | locale.setlocale(locale.LC_CTYPE, oldloc) |
|
32 | 32 | |
|
33 | 33 | return result |
|
34 | 34 | |
|
35 | 35 | _encodingfixers = { |
|
36 | 36 | '646': lambda: 'ascii', |
|
37 | 37 | 'ANSI_X3.4-1968': lambda: 'ascii', |
|
38 | 38 | 'mac-roman': _getpreferredencoding |
|
39 | 39 | } |
|
40 | 40 | |
|
41 | 41 | try: |
|
42 | 42 | encoding = os.environ.get("HGENCODING") |
|
43 | 43 | if not encoding: |
|
44 | 44 | encoding = locale.getpreferredencoding() or 'ascii' |
|
45 | 45 | encoding = _encodingfixers.get(encoding, lambda: encoding)() |
|
46 | 46 | except locale.Error: |
|
47 | 47 | encoding = 'ascii' |
|
48 | 48 | encodingmode = os.environ.get("HGENCODINGMODE", "strict") |
|
49 | 49 | fallbackencoding = 'ISO-8859-1' |
|
50 | 50 | |
|
51 | 51 | def tolocal(s): |
|
52 | 52 | """ |
|
53 | 53 | Convert a string from internal UTF-8 to local encoding |
|
54 | 54 | |
|
55 | 55 | All internal strings should be UTF-8 but some repos before the |
|
56 | 56 | implementation of locale support may contain latin1 or possibly |
|
57 | 57 | other character sets. We attempt to decode everything strictly |
|
58 | 58 | using UTF-8, then Latin-1, and failing that, we use UTF-8 and |
|
59 | 59 | replace unknown characters. |
|
60 | 60 | """ |
|
61 | 61 | for e in ('UTF-8', fallbackencoding): |
|
62 | 62 | try: |
|
63 | 63 | u = s.decode(e) # attempt strict decoding |
|
64 | 64 | return u.encode(encoding, "replace") |
|
65 | 65 | except LookupError, k: |
|
66 | 66 | raise error.Abort("%s, please check your locale settings" % k) |
|
67 | 67 | except UnicodeDecodeError: |
|
68 | 68 | pass |
|
69 | 69 | u = s.decode("utf-8", "replace") # last ditch |
|
70 | 70 | return u.encode(encoding, "replace") |
|
71 | 71 | |
|
72 | 72 | def fromlocal(s): |
|
73 | 73 | """ |
|
74 | 74 | Convert a string from the local character encoding to UTF-8 |
|
75 | 75 | |
|
76 | 76 | We attempt to decode strings using the encoding mode set by |
|
77 | 77 | HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown |
|
78 | 78 | characters will cause an error message. Other modes include |
|
79 | 79 | 'replace', which replaces unknown characters with a special |
|
80 | 80 | Unicode character, and 'ignore', which drops the character. |
|
81 | 81 | """ |
|
82 | 82 | try: |
|
83 | 83 | return s.decode(encoding, encodingmode).encode("utf-8") |
|
84 | 84 | except UnicodeDecodeError, inst: |
|
85 | 85 | sub = s[max(0, inst.start - 10):inst.start + 10] |
|
86 | 86 | raise error.Abort("decoding near '%s': %s!" % (sub, inst)) |
|
87 | 87 | except LookupError, k: |
|
88 | 88 | raise error.Abort("%s, please check your locale settings" % k) |
|
89 | 89 | |
|
90 | 90 | def colwidth(s): |
|
91 | 91 | "Find the column width of a UTF-8 string for display" |
|
92 | 92 | d = s.decode(encoding, 'replace') |
|
93 | 93 | if hasattr(unicodedata, 'east_asian_width'): |
|
94 | 94 | w = unicodedata.east_asian_width |
|
95 | 95 | return sum([w(c) in 'WFA' and 2 or 1 for c in d]) |
|
96 | 96 | return len(d) |
|
97 | 97 |
@@ -1,580 +1,580 | |||
|
1 | 1 | # subrepo.py - sub-repository handling for Mercurial |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2009-2010 Matt Mackall <mpm@selenic.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | import errno, os, re, xml.dom.minidom, shutil, urlparse, posixpath |
|
9 | 9 | from i18n import _ |
|
10 | 10 | import config, util, node, error, cmdutil |
|
11 | 11 | hg = None |
|
12 | 12 | |
|
13 | 13 | nullstate = ('', '', 'empty') |
|
14 | 14 | |
|
15 | 15 | def state(ctx, ui): |
|
16 | 16 | """return a state dict, mapping subrepo paths configured in .hgsub |
|
17 | 17 | to tuple: (source from .hgsub, revision from .hgsubstate, kind |
|
18 | 18 | (key in types dict)) |
|
19 | 19 | """ |
|
20 | 20 | p = config.config() |
|
21 | 21 | def read(f, sections=None, remap=None): |
|
22 | 22 | if f in ctx: |
|
23 | 23 | p.parse(f, ctx[f].data(), sections, remap, read) |
|
24 | 24 | else: |
|
25 | 25 | raise util.Abort(_("subrepo spec file %s not found") % f) |
|
26 | 26 | |
|
27 | 27 | if '.hgsub' in ctx: |
|
28 | 28 | read('.hgsub') |
|
29 | 29 | |
|
30 | 30 | for path, src in ui.configitems('subpaths'): |
|
31 | 31 | p.set('subpaths', path, src, ui.configsource('subpaths', path)) |
|
32 | 32 | |
|
33 | 33 | rev = {} |
|
34 | 34 | if '.hgsubstate' in ctx: |
|
35 | 35 | try: |
|
36 | 36 | for l in ctx['.hgsubstate'].data().splitlines(): |
|
37 | 37 | revision, path = l.split(" ", 1) |
|
38 | 38 | rev[path] = revision |
|
39 | 39 | except IOError, err: |
|
40 | 40 | if err.errno != errno.ENOENT: |
|
41 | 41 | raise |
|
42 | 42 | |
|
43 | 43 | state = {} |
|
44 | 44 | for path, src in p[''].items(): |
|
45 | 45 | kind = 'hg' |
|
46 | 46 | if src.startswith('['): |
|
47 | 47 | if ']' not in src: |
|
48 | 48 | raise util.Abort(_('missing ] in subrepo source')) |
|
49 | 49 | kind, src = src.split(']', 1) |
|
50 | 50 | kind = kind[1:] |
|
51 | 51 | |
|
52 | 52 | for pattern, repl in p.items('subpaths'): |
|
53 | 53 | # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub |
|
54 | 54 | # does a string decode. |
|
55 | 55 | repl = repl.encode('string-escape') |
|
56 | 56 | # However, we still want to allow back references to go |
|
57 | 57 | # through unharmed, so we turn r'\\1' into r'\1'. Again, |
|
58 | 58 | # extra escapes are needed because re.sub string decodes. |
|
59 | 59 | repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl) |
|
60 | 60 | try: |
|
61 | 61 | src = re.sub(pattern, repl, src, 1) |
|
62 | 62 | except re.error, e: |
|
63 | 63 | raise util.Abort(_("bad subrepository pattern in %s: %s") |
|
64 | 64 | % (p.source('subpaths', pattern), e)) |
|
65 | 65 | |
|
66 | 66 | state[path] = (src.strip(), rev.get(path, ''), kind) |
|
67 | 67 | |
|
68 | 68 | return state |
|
69 | 69 | |
|
70 | 70 | def writestate(repo, state): |
|
71 | 71 | """rewrite .hgsubstate in (outer) repo with these subrepo states""" |
|
72 | 72 | repo.wwrite('.hgsubstate', |
|
73 | 73 | ''.join(['%s %s\n' % (state[s][1], s) |
|
74 | 74 | for s in sorted(state)]), '') |
|
75 | 75 | |
|
76 | 76 | def submerge(repo, wctx, mctx, actx): |
|
77 | 77 | """delegated from merge.applyupdates: merging of .hgsubstate file |
|
78 | 78 | in working context, merging context and ancestor context""" |
|
79 | 79 | if mctx == actx: # backwards? |
|
80 | 80 | actx = wctx.p1() |
|
81 | 81 | s1 = wctx.substate |
|
82 | 82 | s2 = mctx.substate |
|
83 | 83 | sa = actx.substate |
|
84 | 84 | sm = {} |
|
85 | 85 | |
|
86 | 86 | repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx)) |
|
87 | 87 | |
|
88 | 88 | def debug(s, msg, r=""): |
|
89 | 89 | if r: |
|
90 | 90 | r = "%s:%s:%s" % r |
|
91 | 91 | repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r)) |
|
92 | 92 | |
|
93 | 93 | for s, l in s1.items(): |
|
94 | 94 | a = sa.get(s, nullstate) |
|
95 | 95 | ld = l # local state with possible dirty flag for compares |
|
96 | 96 | if wctx.sub(s).dirty(): |
|
97 | 97 | ld = (l[0], l[1] + "+") |
|
98 | 98 | if wctx == actx: # overwrite |
|
99 | 99 | a = ld |
|
100 | 100 | |
|
101 | 101 | if s in s2: |
|
102 | 102 | r = s2[s] |
|
103 | 103 | if ld == r or r == a: # no change or local is newer |
|
104 | 104 | sm[s] = l |
|
105 | 105 | continue |
|
106 | 106 | elif ld == a: # other side changed |
|
107 | 107 | debug(s, "other changed, get", r) |
|
108 | 108 | wctx.sub(s).get(r) |
|
109 | 109 | sm[s] = r |
|
110 | 110 | elif ld[0] != r[0]: # sources differ |
|
111 | 111 | if repo.ui.promptchoice( |
|
112 | 112 | _(' subrepository sources for %s differ\n' |
|
113 | 113 | 'use (l)ocal source (%s) or (r)emote source (%s)?') |
|
114 | 114 | % (s, l[0], r[0]), |
|
115 | 115 | (_('&Local'), _('&Remote')), 0): |
|
116 | 116 | debug(s, "prompt changed, get", r) |
|
117 | 117 | wctx.sub(s).get(r) |
|
118 | 118 | sm[s] = r |
|
119 | 119 | elif ld[1] == a[1]: # local side is unchanged |
|
120 | 120 | debug(s, "other side changed, get", r) |
|
121 | 121 | wctx.sub(s).get(r) |
|
122 | 122 | sm[s] = r |
|
123 | 123 | else: |
|
124 | 124 | debug(s, "both sides changed, merge with", r) |
|
125 | 125 | wctx.sub(s).merge(r) |
|
126 | 126 | sm[s] = l |
|
127 | 127 | elif ld == a: # remote removed, local unchanged |
|
128 | 128 | debug(s, "remote removed, remove") |
|
129 | 129 | wctx.sub(s).remove() |
|
130 | 130 | else: |
|
131 | 131 | if repo.ui.promptchoice( |
|
132 | 132 | _(' local changed subrepository %s which remote removed\n' |
|
133 | 133 | 'use (c)hanged version or (d)elete?') % s, |
|
134 | 134 | (_('&Changed'), _('&Delete')), 0): |
|
135 | 135 | debug(s, "prompt remove") |
|
136 | 136 | wctx.sub(s).remove() |
|
137 | 137 | |
|
138 | 138 | for s, r in s2.items(): |
|
139 | 139 | if s in s1: |
|
140 | 140 | continue |
|
141 | 141 | elif s not in sa: |
|
142 | 142 | debug(s, "remote added, get", r) |
|
143 | 143 | mctx.sub(s).get(r) |
|
144 | 144 | sm[s] = r |
|
145 | 145 | elif r != sa[s]: |
|
146 | 146 | if repo.ui.promptchoice( |
|
147 | 147 | _(' remote changed subrepository %s which local removed\n' |
|
148 | 148 | 'use (c)hanged version or (d)elete?') % s, |
|
149 | 149 | (_('&Changed'), _('&Delete')), 0) == 0: |
|
150 | 150 | debug(s, "prompt recreate", r) |
|
151 | 151 | wctx.sub(s).get(r) |
|
152 | 152 | sm[s] = r |
|
153 | 153 | |
|
154 | 154 | # record merged .hgsubstate |
|
155 | 155 | writestate(repo, sm) |
|
156 | 156 | |
|
157 | 157 | def reporelpath(repo): |
|
158 | 158 | """return path to this (sub)repo as seen from outermost repo""" |
|
159 | 159 | parent = repo |
|
160 | 160 | while hasattr(parent, '_subparent'): |
|
161 | 161 | parent = parent._subparent |
|
162 | 162 | return repo.root[len(parent.root)+1:] |
|
163 | 163 | |
|
164 | 164 | def subrelpath(sub): |
|
165 | 165 | """return path to this subrepo as seen from outermost repo""" |
|
166 | 166 | if not hasattr(sub, '_repo'): |
|
167 | 167 | return sub._path |
|
168 | 168 | return reporelpath(sub._repo) |
|
169 | 169 | |
|
170 | 170 | def _abssource(repo, push=False, abort=True): |
|
171 | 171 | """return pull/push path of repo - either based on parent repo .hgsub info |
|
172 | 172 | or on the top repo config. Abort or return None if no source found.""" |
|
173 | 173 | if hasattr(repo, '_subparent'): |
|
174 | 174 | source = repo._subsource |
|
175 | 175 | if source.startswith('/') or '://' in source: |
|
176 | 176 | return source |
|
177 | 177 | parent = _abssource(repo._subparent, push, abort=False) |
|
178 | 178 | if parent: |
|
179 | 179 | if '://' in parent: |
|
180 | 180 | if parent[-1] == '/': |
|
181 | 181 | parent = parent[:-1] |
|
182 | 182 | r = urlparse.urlparse(parent + '/' + source) |
|
183 | 183 | r = urlparse.urlunparse((r[0], r[1], |
|
184 | 184 | posixpath.normpath(r[2]), |
|
185 | 185 | r[3], r[4], r[5])) |
|
186 | 186 | return r |
|
187 | 187 | else: # plain file system path |
|
188 | 188 | return posixpath.normpath(os.path.join(parent, repo._subsource)) |
|
189 | 189 | else: # recursion reached top repo |
|
190 | 190 | if push and repo.ui.config('paths', 'default-push'): |
|
191 | 191 | return repo.ui.config('paths', 'default-push') |
|
192 | 192 | if repo.ui.config('paths', 'default'): |
|
193 | 193 | return repo.ui.config('paths', 'default') |
|
194 | 194 | if abort: |
|
195 |
raise util.Abort(_("default path for subrepository %s not found") % |
|
|
195 | raise util.Abort(_("default path for subrepository %s not found") % | |
|
196 | 196 | reporelpath(repo)) |
|
197 | 197 | |
|
198 | 198 | def itersubrepos(ctx1, ctx2): |
|
199 | 199 | """find subrepos in ctx1 or ctx2""" |
|
200 | 200 | # Create a (subpath, ctx) mapping where we prefer subpaths from |
|
201 | 201 | # ctx1. The subpaths from ctx2 are important when the .hgsub file |
|
202 | 202 | # has been modified (in ctx2) but not yet committed (in ctx1). |
|
203 | 203 | subpaths = dict.fromkeys(ctx2.substate, ctx2) |
|
204 | 204 | subpaths.update(dict.fromkeys(ctx1.substate, ctx1)) |
|
205 | 205 | for subpath, ctx in sorted(subpaths.iteritems()): |
|
206 | 206 | yield subpath, ctx.sub(subpath) |
|
207 | 207 | |
|
208 | 208 | def subrepo(ctx, path): |
|
209 | 209 | """return instance of the right subrepo class for subrepo in path""" |
|
210 | 210 | # subrepo inherently violates our import layering rules |
|
211 | 211 | # because it wants to make repo objects from deep inside the stack |
|
212 | 212 | # so we manually delay the circular imports to not break |
|
213 | 213 | # scripts that don't use our demand-loading |
|
214 | 214 | global hg |
|
215 | 215 | import hg as h |
|
216 | 216 | hg = h |
|
217 | 217 | |
|
218 | 218 | util.path_auditor(ctx._repo.root)(path) |
|
219 | 219 | state = ctx.substate.get(path, nullstate) |
|
220 | 220 | if state[2] not in types: |
|
221 | 221 | raise util.Abort(_('unknown subrepo type %s') % state[2]) |
|
222 | 222 | return types[state[2]](ctx, path, state[:2]) |
|
223 | 223 | |
|
224 | 224 | # subrepo classes need to implement the following abstract class: |
|
225 | 225 | |
|
226 | 226 | class abstractsubrepo(object): |
|
227 | 227 | |
|
228 | 228 | def dirty(self): |
|
229 | 229 | """returns true if the dirstate of the subrepo does not match |
|
230 | 230 | current stored state |
|
231 | 231 | """ |
|
232 | 232 | raise NotImplementedError |
|
233 | 233 | |
|
234 | 234 | def checknested(self, path): |
|
235 | 235 | """check if path is a subrepository within this repository""" |
|
236 | 236 | return False |
|
237 | 237 | |
|
238 | 238 | def commit(self, text, user, date): |
|
239 | 239 | """commit the current changes to the subrepo with the given |
|
240 | 240 | log message. Use given user and date if possible. Return the |
|
241 | 241 | new state of the subrepo. |
|
242 | 242 | """ |
|
243 | 243 | raise NotImplementedError |
|
244 | 244 | |
|
245 | 245 | def remove(self): |
|
246 | 246 | """remove the subrepo |
|
247 | 247 | |
|
248 | 248 | (should verify the dirstate is not dirty first) |
|
249 | 249 | """ |
|
250 | 250 | raise NotImplementedError |
|
251 | 251 | |
|
252 | 252 | def get(self, state): |
|
253 | 253 | """run whatever commands are needed to put the subrepo into |
|
254 | 254 | this state |
|
255 | 255 | """ |
|
256 | 256 | raise NotImplementedError |
|
257 | 257 | |
|
258 | 258 | def merge(self, state): |
|
259 | 259 | """merge currently-saved state with the new state.""" |
|
260 | 260 | raise NotImplementedError |
|
261 | 261 | |
|
262 | 262 | def push(self, force): |
|
263 | 263 | """perform whatever action is analogous to 'hg push' |
|
264 | 264 | |
|
265 | 265 | This may be a no-op on some systems. |
|
266 | 266 | """ |
|
267 | 267 | raise NotImplementedError |
|
268 | 268 | |
|
269 | 269 | def add(self, ui, match, dryrun, prefix): |
|
270 | 270 | return [] |
|
271 | 271 | |
|
272 | 272 | def status(self, rev2, **opts): |
|
273 | 273 | return [], [], [], [], [], [], [] |
|
274 | 274 | |
|
275 | 275 | def diff(self, diffopts, node2, match, prefix, **opts): |
|
276 | 276 | pass |
|
277 | 277 | |
|
278 | 278 | def outgoing(self, ui, dest, opts): |
|
279 | 279 | return 1 |
|
280 | 280 | |
|
281 | 281 | def incoming(self, ui, source, opts): |
|
282 | 282 | return 1 |
|
283 | 283 | |
|
284 | 284 | def files(self): |
|
285 | 285 | """return filename iterator""" |
|
286 | 286 | raise NotImplementedError |
|
287 | 287 | |
|
288 | 288 | def filedata(self, name): |
|
289 | 289 | """return file data""" |
|
290 | 290 | raise NotImplementedError |
|
291 | 291 | |
|
292 | 292 | def fileflags(self, name): |
|
293 | 293 | """return file flags""" |
|
294 | 294 | return '' |
|
295 | 295 | |
|
296 | 296 | def archive(self, archiver, prefix): |
|
297 | 297 | for name in self.files(): |
|
298 | 298 | flags = self.fileflags(name) |
|
299 | 299 | mode = 'x' in flags and 0755 or 0644 |
|
300 | 300 | symlink = 'l' in flags |
|
301 | 301 | archiver.addfile(os.path.join(prefix, self._path, name), |
|
302 | 302 | mode, symlink, self.filedata(name)) |
|
303 | 303 | |
|
304 | 304 | |
|
305 | 305 | class hgsubrepo(abstractsubrepo): |
|
306 | 306 | def __init__(self, ctx, path, state): |
|
307 | 307 | self._path = path |
|
308 | 308 | self._state = state |
|
309 | 309 | r = ctx._repo |
|
310 | 310 | root = r.wjoin(path) |
|
311 | 311 | create = False |
|
312 | 312 | if not os.path.exists(os.path.join(root, '.hg')): |
|
313 | 313 | create = True |
|
314 | 314 | util.makedirs(root) |
|
315 | 315 | self._repo = hg.repository(r.ui, root, create=create) |
|
316 | 316 | self._repo._subparent = r |
|
317 | 317 | self._repo._subsource = state[0] |
|
318 | 318 | |
|
319 | 319 | if create: |
|
320 | 320 | fp = self._repo.opener("hgrc", "w", text=True) |
|
321 | 321 | fp.write('[paths]\n') |
|
322 | 322 | |
|
323 | 323 | def addpathconfig(key, value): |
|
324 | 324 | if value: |
|
325 | 325 | fp.write('%s = %s\n' % (key, value)) |
|
326 | 326 | self._repo.ui.setconfig('paths', key, value) |
|
327 | 327 | |
|
328 | 328 | defpath = _abssource(self._repo, abort=False) |
|
329 | 329 | defpushpath = _abssource(self._repo, True, abort=False) |
|
330 | 330 | addpathconfig('default', defpath) |
|
331 | 331 | if defpath != defpushpath: |
|
332 | 332 | addpathconfig('default-push', defpushpath) |
|
333 | 333 | fp.close() |
|
334 | 334 | |
|
335 | 335 | def add(self, ui, match, dryrun, prefix): |
|
336 | 336 | return cmdutil.add(ui, self._repo, match, dryrun, True, |
|
337 | 337 | os.path.join(prefix, self._path)) |
|
338 | 338 | |
|
339 | 339 | def status(self, rev2, **opts): |
|
340 | 340 | try: |
|
341 | 341 | rev1 = self._state[1] |
|
342 | 342 | ctx1 = self._repo[rev1] |
|
343 | 343 | ctx2 = self._repo[rev2] |
|
344 | 344 | return self._repo.status(ctx1, ctx2, **opts) |
|
345 | 345 | except error.RepoLookupError, inst: |
|
346 | 346 | self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n') |
|
347 | 347 | % (inst, subrelpath(self))) |
|
348 | 348 | return [], [], [], [], [], [], [] |
|
349 | 349 | |
|
350 | 350 | def diff(self, diffopts, node2, match, prefix, **opts): |
|
351 | 351 | try: |
|
352 | 352 | node1 = node.bin(self._state[1]) |
|
353 | 353 | # We currently expect node2 to come from substate and be |
|
354 | 354 | # in hex format |
|
355 | 355 | if node2 is not None: |
|
356 | 356 | node2 = node.bin(node2) |
|
357 | 357 | cmdutil.diffordiffstat(self._repo.ui, self._repo, diffopts, |
|
358 | 358 | node1, node2, match, |
|
359 | 359 | prefix=os.path.join(prefix, self._path), |
|
360 | 360 | listsubrepos=True, **opts) |
|
361 | 361 | except error.RepoLookupError, inst: |
|
362 | 362 | self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n') |
|
363 | 363 | % (inst, subrelpath(self))) |
|
364 | 364 | |
|
365 | 365 | def archive(self, archiver, prefix): |
|
366 | 366 | abstractsubrepo.archive(self, archiver, prefix) |
|
367 | 367 | |
|
368 | 368 | rev = self._state[1] |
|
369 | 369 | ctx = self._repo[rev] |
|
370 | 370 | for subpath in ctx.substate: |
|
371 | 371 | s = subrepo(ctx, subpath) |
|
372 | 372 | s.archive(archiver, os.path.join(prefix, self._path)) |
|
373 | 373 | |
|
374 | 374 | def dirty(self): |
|
375 | 375 | r = self._state[1] |
|
376 | 376 | if r == '': |
|
377 | 377 | return True |
|
378 | 378 | w = self._repo[None] |
|
379 | 379 | if w.p1() != self._repo[r]: # version checked out change |
|
380 | 380 | return True |
|
381 | 381 | return w.dirty() # working directory changed |
|
382 | 382 | |
|
383 | 383 | def checknested(self, path): |
|
384 | 384 | return self._repo._checknested(self._repo.wjoin(path)) |
|
385 | 385 | |
|
386 | 386 | def commit(self, text, user, date): |
|
387 | 387 | self._repo.ui.debug("committing subrepo %s\n" % subrelpath(self)) |
|
388 | 388 | n = self._repo.commit(text, user, date) |
|
389 | 389 | if not n: |
|
390 | 390 | return self._repo['.'].hex() # different version checked out |
|
391 | 391 | return node.hex(n) |
|
392 | 392 | |
|
393 | 393 | def remove(self): |
|
394 | 394 | # we can't fully delete the repository as it may contain |
|
395 | 395 | # local-only history |
|
396 | 396 | self._repo.ui.note(_('removing subrepo %s\n') % subrelpath(self)) |
|
397 | 397 | hg.clean(self._repo, node.nullid, False) |
|
398 | 398 | |
|
399 | 399 | def _get(self, state): |
|
400 | 400 | source, revision, kind = state |
|
401 | 401 | try: |
|
402 | 402 | self._repo.lookup(revision) |
|
403 | 403 | except error.RepoError: |
|
404 | 404 | self._repo._subsource = source |
|
405 | 405 | srcurl = _abssource(self._repo) |
|
406 | 406 | self._repo.ui.status(_('pulling subrepo %s from %s\n') |
|
407 | 407 | % (subrelpath(self), srcurl)) |
|
408 | 408 | other = hg.repository(self._repo.ui, srcurl) |
|
409 | 409 | self._repo.pull(other) |
|
410 | 410 | |
|
411 | 411 | def get(self, state): |
|
412 | 412 | self._get(state) |
|
413 | 413 | source, revision, kind = state |
|
414 | 414 | self._repo.ui.debug("getting subrepo %s\n" % self._path) |
|
415 | 415 | hg.clean(self._repo, revision, False) |
|
416 | 416 | |
|
417 | 417 | def merge(self, state): |
|
418 | 418 | self._get(state) |
|
419 | 419 | cur = self._repo['.'] |
|
420 | 420 | dst = self._repo[state[1]] |
|
421 | 421 | anc = dst.ancestor(cur) |
|
422 | 422 | if anc == cur: |
|
423 | 423 | self._repo.ui.debug("updating subrepo %s\n" % subrelpath(self)) |
|
424 | 424 | hg.update(self._repo, state[1]) |
|
425 | 425 | elif anc == dst: |
|
426 | 426 | self._repo.ui.debug("skipping subrepo %s\n" % subrelpath(self)) |
|
427 | 427 | else: |
|
428 | 428 | self._repo.ui.debug("merging subrepo %s\n" % subrelpath(self)) |
|
429 | 429 | hg.merge(self._repo, state[1], remind=False) |
|
430 | 430 | |
|
431 | 431 | def push(self, force): |
|
432 | 432 | # push subrepos depth-first for coherent ordering |
|
433 | 433 | c = self._repo[''] |
|
434 | 434 | subs = c.substate # only repos that are committed |
|
435 | 435 | for s in sorted(subs): |
|
436 | 436 | if not c.sub(s).push(force): |
|
437 | 437 | return False |
|
438 | 438 | |
|
439 | 439 | dsturl = _abssource(self._repo, True) |
|
440 | 440 | self._repo.ui.status(_('pushing subrepo %s to %s\n') % |
|
441 | 441 | (subrelpath(self), dsturl)) |
|
442 | 442 | other = hg.repository(self._repo.ui, dsturl) |
|
443 | 443 | return self._repo.push(other, force) |
|
444 | 444 | |
|
445 | 445 | def outgoing(self, ui, dest, opts): |
|
446 | 446 | return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts) |
|
447 | 447 | |
|
448 | 448 | def incoming(self, ui, source, opts): |
|
449 | 449 | return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts) |
|
450 | 450 | |
|
451 | 451 | def files(self): |
|
452 | 452 | rev = self._state[1] |
|
453 | 453 | ctx = self._repo[rev] |
|
454 | 454 | return ctx.manifest() |
|
455 | 455 | |
|
456 | 456 | def filedata(self, name): |
|
457 | 457 | rev = self._state[1] |
|
458 | 458 | return self._repo[rev][name].data() |
|
459 | 459 | |
|
460 | 460 | def fileflags(self, name): |
|
461 | 461 | rev = self._state[1] |
|
462 | 462 | ctx = self._repo[rev] |
|
463 | 463 | return ctx.flags(name) |
|
464 | 464 | |
|
465 | 465 | |
|
466 | 466 | class svnsubrepo(abstractsubrepo): |
|
467 | 467 | def __init__(self, ctx, path, state): |
|
468 | 468 | self._path = path |
|
469 | 469 | self._state = state |
|
470 | 470 | self._ctx = ctx |
|
471 | 471 | self._ui = ctx._repo.ui |
|
472 | 472 | |
|
473 | 473 | def _svncommand(self, commands, filename=''): |
|
474 | 474 | path = os.path.join(self._ctx._repo.origroot, self._path, filename) |
|
475 | 475 | cmd = ['svn'] + commands + [path] |
|
476 | 476 | cmd = [util.shellquote(arg) for arg in cmd] |
|
477 | 477 | cmd = util.quotecommand(' '.join(cmd)) |
|
478 | 478 | env = dict(os.environ) |
|
479 | 479 | # Avoid localized output, preserve current locale for everything else. |
|
480 | 480 | env['LC_MESSAGES'] = 'C' |
|
481 | 481 | write, read, err = util.popen3(cmd, env=env, newlines=True) |
|
482 | 482 | retdata = read.read() |
|
483 | 483 | err = err.read().strip() |
|
484 | 484 | if err: |
|
485 | 485 | raise util.Abort(err) |
|
486 | 486 | return retdata |
|
487 | 487 | |
|
488 | 488 | def _wcrev(self): |
|
489 | 489 | output = self._svncommand(['info', '--xml']) |
|
490 | 490 | doc = xml.dom.minidom.parseString(output) |
|
491 | 491 | entries = doc.getElementsByTagName('entry') |
|
492 | 492 | if not entries: |
|
493 | 493 | return 0 |
|
494 | 494 | return int(entries[0].getAttribute('revision') or 0) |
|
495 | 495 | |
|
496 | 496 | def _wcchanged(self): |
|
497 | 497 | """Return (changes, extchanges) where changes is True |
|
498 | 498 | if the working directory was changed, and extchanges is |
|
499 | 499 | True if any of these changes concern an external entry. |
|
500 | 500 | """ |
|
501 | 501 | output = self._svncommand(['status', '--xml']) |
|
502 | 502 | externals, changes = [], [] |
|
503 | 503 | doc = xml.dom.minidom.parseString(output) |
|
504 | 504 | for e in doc.getElementsByTagName('entry'): |
|
505 | 505 | s = e.getElementsByTagName('wc-status') |
|
506 | 506 | if not s: |
|
507 | 507 | continue |
|
508 | 508 | item = s[0].getAttribute('item') |
|
509 | 509 | props = s[0].getAttribute('props') |
|
510 | 510 | path = e.getAttribute('path') |
|
511 | 511 | if item == 'external': |
|
512 | 512 | externals.append(path) |
|
513 | 513 | if (item not in ('', 'normal', 'unversioned', 'external') |
|
514 | 514 | or props not in ('', 'none')): |
|
515 | 515 | changes.append(path) |
|
516 | 516 | for path in changes: |
|
517 | 517 | for ext in externals: |
|
518 | 518 | if path == ext or path.startswith(ext + os.sep): |
|
519 | 519 | return True, True |
|
520 | 520 | return bool(changes), False |
|
521 | 521 | |
|
522 | 522 | def dirty(self): |
|
523 | 523 | if self._wcrev() == self._state[1] and not self._wcchanged()[0]: |
|
524 | 524 | return False |
|
525 | 525 | return True |
|
526 | 526 | |
|
527 | 527 | def commit(self, text, user, date): |
|
528 | 528 | # user and date are out of our hands since svn is centralized |
|
529 | 529 | changed, extchanged = self._wcchanged() |
|
530 | 530 | if not changed: |
|
531 | 531 | return self._wcrev() |
|
532 | 532 | if extchanged: |
|
533 | 533 | # Do not try to commit externals |
|
534 | 534 | raise util.Abort(_('cannot commit svn externals')) |
|
535 | 535 | commitinfo = self._svncommand(['commit', '-m', text]) |
|
536 | 536 | self._ui.status(commitinfo) |
|
537 | 537 | newrev = re.search('Committed revision ([0-9]+).', commitinfo) |
|
538 | 538 | if not newrev: |
|
539 | 539 | raise util.Abort(commitinfo.splitlines()[-1]) |
|
540 | 540 | newrev = newrev.groups()[0] |
|
541 | 541 | self._ui.status(self._svncommand(['update', '-r', newrev])) |
|
542 | 542 | return newrev |
|
543 | 543 | |
|
544 | 544 | def remove(self): |
|
545 | 545 | if self.dirty(): |
|
546 | 546 | self._ui.warn(_('not removing repo %s because ' |
|
547 | 547 | 'it has changes.\n' % self._path)) |
|
548 | 548 | return |
|
549 | 549 | self._ui.note(_('removing subrepo %s\n') % self._path) |
|
550 | 550 | shutil.rmtree(self._ctx.repo.join(self._path)) |
|
551 | 551 | |
|
552 | 552 | def get(self, state): |
|
553 | 553 | status = self._svncommand(['checkout', state[0], '--revision', state[1]]) |
|
554 | 554 | if not re.search('Checked out revision [0-9]+.', status): |
|
555 | 555 | raise util.Abort(status.splitlines()[-1]) |
|
556 | 556 | self._ui.status(status) |
|
557 | 557 | |
|
558 | 558 | def merge(self, state): |
|
559 | 559 | old = int(self._state[1]) |
|
560 | 560 | new = int(state[1]) |
|
561 | 561 | if new > old: |
|
562 | 562 | self.get(state) |
|
563 | 563 | |
|
564 | 564 | def push(self, force): |
|
565 | 565 | # push is a no-op for SVN |
|
566 | 566 | return True |
|
567 | 567 | |
|
568 | 568 | def files(self): |
|
569 | 569 | output = self._svncommand(['list']) |
|
570 | 570 | # This works because svn forbids \n in filenames. |
|
571 | 571 | return output.splitlines() |
|
572 | 572 | |
|
573 | 573 | def filedata(self, name): |
|
574 | 574 | return self._svncommand(['cat'], name) |
|
575 | 575 | |
|
576 | 576 | |
|
577 | 577 | types = { |
|
578 | 578 | 'hg': hgsubrepo, |
|
579 | 579 | 'svn': svnsubrepo, |
|
580 | 580 | } |
@@ -1,698 +1,698 | |||
|
1 | 1 | # url.py - HTTP handling for mercurial |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com> |
|
4 | 4 | # Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br> |
|
5 | 5 | # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> |
|
6 | 6 | # |
|
7 | 7 | # This software may be used and distributed according to the terms of the |
|
8 | 8 | # GNU General Public License version 2 or any later version. |
|
9 | 9 | |
|
10 | 10 | import urllib, urllib2, urlparse, httplib, os, re, socket, cStringIO |
|
11 | 11 | import __builtin__ |
|
12 | 12 | from i18n import _ |
|
13 | 13 | import keepalive, util |
|
14 | 14 | |
|
15 | 15 | def _urlunparse(scheme, netloc, path, params, query, fragment, url): |
|
16 | 16 | '''Handle cases where urlunparse(urlparse(x://)) doesn't preserve the "//"''' |
|
17 | 17 | result = urlparse.urlunparse((scheme, netloc, path, params, query, fragment)) |
|
18 | 18 | if (scheme and |
|
19 | 19 | result.startswith(scheme + ':') and |
|
20 | 20 | not result.startswith(scheme + '://') and |
|
21 | 21 | url.startswith(scheme + '://') |
|
22 | 22 | ): |
|
23 | 23 | result = scheme + '://' + result[len(scheme + ':'):] |
|
24 | 24 | return result |
|
25 | 25 | |
|
26 | 26 | def hidepassword(url): |
|
27 | 27 | '''hide user credential in a url string''' |
|
28 | 28 | scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) |
|
29 | 29 | netloc = re.sub('([^:]*):([^@]*)@(.*)', r'\1:***@\3', netloc) |
|
30 | 30 | return _urlunparse(scheme, netloc, path, params, query, fragment, url) |
|
31 | 31 | |
|
32 | 32 | def removeauth(url): |
|
33 | 33 | '''remove all authentication information from a url string''' |
|
34 | 34 | scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) |
|
35 | 35 | netloc = netloc[netloc.find('@')+1:] |
|
36 | 36 | return _urlunparse(scheme, netloc, path, params, query, fragment, url) |
|
37 | 37 | |
|
38 | 38 | def netlocsplit(netloc): |
|
39 | 39 | '''split [user[:passwd]@]host[:port] into 4-tuple.''' |
|
40 | 40 | |
|
41 | 41 | a = netloc.find('@') |
|
42 | 42 | if a == -1: |
|
43 | 43 | user, passwd = None, None |
|
44 | 44 | else: |
|
45 | 45 | userpass, netloc = netloc[:a], netloc[a + 1:] |
|
46 | 46 | c = userpass.find(':') |
|
47 | 47 | if c == -1: |
|
48 | 48 | user, passwd = urllib.unquote(userpass), None |
|
49 | 49 | else: |
|
50 | 50 | user = urllib.unquote(userpass[:c]) |
|
51 | 51 | passwd = urllib.unquote(userpass[c + 1:]) |
|
52 | 52 | c = netloc.find(':') |
|
53 | 53 | if c == -1: |
|
54 | 54 | host, port = netloc, None |
|
55 | 55 | else: |
|
56 | 56 | host, port = netloc[:c], netloc[c + 1:] |
|
57 | 57 | return host, port, user, passwd |
|
58 | 58 | |
|
59 | 59 | def netlocunsplit(host, port, user=None, passwd=None): |
|
60 | 60 | '''turn host, port, user, passwd into [user[:passwd]@]host[:port].''' |
|
61 | 61 | if port: |
|
62 | 62 | hostport = host + ':' + port |
|
63 | 63 | else: |
|
64 | 64 | hostport = host |
|
65 | 65 | if user: |
|
66 | 66 | quote = lambda s: urllib.quote(s, safe='') |
|
67 | 67 | if passwd: |
|
68 | 68 | userpass = quote(user) + ':' + quote(passwd) |
|
69 | 69 | else: |
|
70 | 70 | userpass = quote(user) |
|
71 | 71 | return userpass + '@' + hostport |
|
72 | 72 | return hostport |
|
73 | 73 | |
|
74 | 74 | _safe = ('abcdefghijklmnopqrstuvwxyz' |
|
75 | 75 | 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' |
|
76 | 76 | '0123456789' '_.-/') |
|
77 | 77 | _safeset = None |
|
78 | 78 | _hex = None |
|
79 | 79 | def quotepath(path): |
|
80 | 80 | '''quote the path part of a URL |
|
81 | 81 | |
|
82 | 82 | This is similar to urllib.quote, but it also tries to avoid |
|
83 | 83 | quoting things twice (inspired by wget): |
|
84 | 84 | |
|
85 | 85 | >>> quotepath('abc def') |
|
86 | 86 | 'abc%20def' |
|
87 | 87 | >>> quotepath('abc%20def') |
|
88 | 88 | 'abc%20def' |
|
89 | 89 | >>> quotepath('abc%20 def') |
|
90 | 90 | 'abc%20%20def' |
|
91 | 91 | >>> quotepath('abc def%20') |
|
92 | 92 | 'abc%20def%20' |
|
93 | 93 | >>> quotepath('abc def%2') |
|
94 | 94 | 'abc%20def%252' |
|
95 | 95 | >>> quotepath('abc def%') |
|
96 | 96 | 'abc%20def%25' |
|
97 | 97 | ''' |
|
98 | 98 | global _safeset, _hex |
|
99 | 99 | if _safeset is None: |
|
100 | 100 | _safeset = set(_safe) |
|
101 | 101 | _hex = set('abcdefABCDEF0123456789') |
|
102 | 102 | l = list(path) |
|
103 | 103 | for i in xrange(len(l)): |
|
104 | 104 | c = l[i] |
|
105 | 105 | if (c == '%' and i + 2 < len(l) and |
|
106 | 106 | l[i + 1] in _hex and l[i + 2] in _hex): |
|
107 | 107 | pass |
|
108 | 108 | elif c not in _safeset: |
|
109 | 109 | l[i] = '%%%02X' % ord(c) |
|
110 | 110 | return ''.join(l) |
|
111 | 111 | |
|
112 | 112 | class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm): |
|
113 | 113 | def __init__(self, ui): |
|
114 | 114 | urllib2.HTTPPasswordMgrWithDefaultRealm.__init__(self) |
|
115 | 115 | self.ui = ui |
|
116 | 116 | |
|
117 | 117 | def find_user_password(self, realm, authuri): |
|
118 | 118 | authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password( |
|
119 | 119 | self, realm, authuri) |
|
120 | 120 | user, passwd = authinfo |
|
121 | 121 | if user and passwd: |
|
122 | 122 | self._writedebug(user, passwd) |
|
123 | 123 | return (user, passwd) |
|
124 | 124 | |
|
125 | 125 | if not user: |
|
126 | 126 | auth = self.readauthtoken(authuri) |
|
127 | 127 | if auth: |
|
128 | 128 | user, passwd = auth.get('username'), auth.get('password') |
|
129 | 129 | if not user or not passwd: |
|
130 | 130 | if not self.ui.interactive(): |
|
131 | 131 | raise util.Abort(_('http authorization required')) |
|
132 | 132 | |
|
133 | 133 | self.ui.write(_("http authorization required\n")) |
|
134 | 134 | self.ui.status(_("realm: %s\n") % realm) |
|
135 | 135 | if user: |
|
136 | 136 | self.ui.status(_("user: %s\n") % user) |
|
137 | 137 | else: |
|
138 | 138 | user = self.ui.prompt(_("user:"), default=None) |
|
139 | 139 | |
|
140 | 140 | if not passwd: |
|
141 | 141 | passwd = self.ui.getpass() |
|
142 | 142 | |
|
143 | 143 | self.add_password(realm, authuri, user, passwd) |
|
144 | 144 | self._writedebug(user, passwd) |
|
145 | 145 | return (user, passwd) |
|
146 | 146 | |
|
147 | 147 | def _writedebug(self, user, passwd): |
|
148 | 148 | msg = _('http auth: user %s, password %s\n') |
|
149 | 149 | self.ui.debug(msg % (user, passwd and '*' * len(passwd) or 'not set')) |
|
150 | 150 | |
|
151 | 151 | def readauthtoken(self, uri): |
|
152 | 152 | # Read configuration |
|
153 | 153 | config = dict() |
|
154 | 154 | for key, val in self.ui.configitems('auth'): |
|
155 | 155 | if '.' not in key: |
|
156 | 156 | self.ui.warn(_("ignoring invalid [auth] key '%s'\n") % key) |
|
157 | 157 | continue |
|
158 | 158 | group, setting = key.split('.', 1) |
|
159 | 159 | gdict = config.setdefault(group, dict()) |
|
160 | 160 | if setting in ('username', 'cert', 'key'): |
|
161 | 161 | val = util.expandpath(val) |
|
162 | 162 | gdict[setting] = val |
|
163 | 163 | |
|
164 | 164 | # Find the best match |
|
165 | 165 | scheme, hostpath = uri.split('://', 1) |
|
166 | 166 | bestlen = 0 |
|
167 | 167 | bestauth = None |
|
168 | 168 | for auth in config.itervalues(): |
|
169 | 169 | prefix = auth.get('prefix') |
|
170 | 170 | if not prefix: |
|
171 | 171 | continue |
|
172 | 172 | p = prefix.split('://', 1) |
|
173 | 173 | if len(p) > 1: |
|
174 | 174 | schemes, prefix = [p[0]], p[1] |
|
175 | 175 | else: |
|
176 | 176 | schemes = (auth.get('schemes') or 'https').split() |
|
177 | 177 | if (prefix == '*' or hostpath.startswith(prefix)) and \ |
|
178 | 178 | len(prefix) > bestlen and scheme in schemes: |
|
179 | 179 | bestlen = len(prefix) |
|
180 | 180 | bestauth = auth |
|
181 | 181 | return bestauth |
|
182 | 182 | |
|
183 | 183 | class proxyhandler(urllib2.ProxyHandler): |
|
184 | 184 | def __init__(self, ui): |
|
185 | 185 | proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy') |
|
186 | 186 | # XXX proxyauthinfo = None |
|
187 | 187 | |
|
188 | 188 | if proxyurl: |
|
189 | 189 | # proxy can be proper url or host[:port] |
|
190 | 190 | if not (proxyurl.startswith('http:') or |
|
191 | 191 | proxyurl.startswith('https:')): |
|
192 | 192 | proxyurl = 'http://' + proxyurl + '/' |
|
193 | 193 | snpqf = urlparse.urlsplit(proxyurl) |
|
194 | 194 | proxyscheme, proxynetloc, proxypath, proxyquery, proxyfrag = snpqf |
|
195 | 195 | hpup = netlocsplit(proxynetloc) |
|
196 | 196 | |
|
197 | 197 | proxyhost, proxyport, proxyuser, proxypasswd = hpup |
|
198 | 198 | if not proxyuser: |
|
199 | 199 | proxyuser = ui.config("http_proxy", "user") |
|
200 | 200 | proxypasswd = ui.config("http_proxy", "passwd") |
|
201 | 201 | |
|
202 | 202 | # see if we should use a proxy for this url |
|
203 | 203 | no_list = ["localhost", "127.0.0.1"] |
|
204 | 204 | no_list.extend([p.lower() for |
|
205 | 205 | p in ui.configlist("http_proxy", "no")]) |
|
206 | 206 | no_list.extend([p.strip().lower() for |
|
207 | 207 | p in os.getenv("no_proxy", '').split(',') |
|
208 | 208 | if p.strip()]) |
|
209 | 209 | # "http_proxy.always" config is for running tests on localhost |
|
210 | 210 | if ui.configbool("http_proxy", "always"): |
|
211 | 211 | self.no_list = [] |
|
212 | 212 | else: |
|
213 | 213 | self.no_list = no_list |
|
214 | 214 | |
|
215 | 215 | proxyurl = urlparse.urlunsplit(( |
|
216 | 216 | proxyscheme, netlocunsplit(proxyhost, proxyport, |
|
217 | 217 | proxyuser, proxypasswd or ''), |
|
218 | 218 | proxypath, proxyquery, proxyfrag)) |
|
219 | 219 | proxies = {'http': proxyurl, 'https': proxyurl} |
|
220 | 220 | ui.debug('proxying through http://%s:%s\n' % |
|
221 | 221 | (proxyhost, proxyport)) |
|
222 | 222 | else: |
|
223 | 223 | proxies = {} |
|
224 | 224 | |
|
225 | 225 | # urllib2 takes proxy values from the environment and those |
|
226 | 226 | # will take precedence if found, so drop them |
|
227 | 227 | for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]: |
|
228 | 228 | try: |
|
229 | 229 | if env in os.environ: |
|
230 | 230 | del os.environ[env] |
|
231 | 231 | except OSError: |
|
232 | 232 | pass |
|
233 | 233 | |
|
234 | 234 | urllib2.ProxyHandler.__init__(self, proxies) |
|
235 | 235 | self.ui = ui |
|
236 | 236 | |
|
237 | 237 | def proxy_open(self, req, proxy, type_): |
|
238 | 238 | host = req.get_host().split(':')[0] |
|
239 | 239 | if host in self.no_list: |
|
240 | 240 | return None |
|
241 | 241 | |
|
242 | 242 | # work around a bug in Python < 2.4.2 |
|
243 | 243 | # (it leaves a "\n" at the end of Proxy-authorization headers) |
|
244 | 244 | baseclass = req.__class__ |
|
245 | 245 | class _request(baseclass): |
|
246 | 246 | def add_header(self, key, val): |
|
247 | 247 | if key.lower() == 'proxy-authorization': |
|
248 | 248 | val = val.strip() |
|
249 | 249 | return baseclass.add_header(self, key, val) |
|
250 | 250 | req.__class__ = _request |
|
251 | 251 | |
|
252 | 252 | return urllib2.ProxyHandler.proxy_open(self, req, proxy, type_) |
|
253 | 253 | |
|
254 | 254 | class httpsendfile(object): |
|
255 | 255 | """This is a wrapper around the objects returned by python's "open". |
|
256 | 256 | |
|
257 | 257 | Its purpose is to send file-like objects via HTTP and, to do so, it |
|
258 | 258 | defines a __len__ attribute to feed the Content-Length header. |
|
259 | 259 | """ |
|
260 | 260 | |
|
261 | 261 | def __init__(self, *args, **kwargs): |
|
262 | 262 | # We can't just "self._data = open(*args, **kwargs)" here because there |
|
263 | 263 | # is an "open" function defined in this module that shadows the global |
|
264 | 264 | # one |
|
265 | 265 | self._data = __builtin__.open(*args, **kwargs) |
|
266 | 266 | self.read = self._data.read |
|
267 | 267 | self.seek = self._data.seek |
|
268 | 268 | self.close = self._data.close |
|
269 | 269 | self.write = self._data.write |
|
270 | 270 | |
|
271 | 271 | def __len__(self): |
|
272 | 272 | return os.fstat(self._data.fileno()).st_size |
|
273 | 273 | |
|
274 | 274 | def _gen_sendfile(connection): |
|
275 | 275 | def _sendfile(self, data): |
|
276 | 276 | # send a file |
|
277 | 277 | if isinstance(data, httpsendfile): |
|
278 | 278 | # if auth required, some data sent twice, so rewind here |
|
279 | 279 | data.seek(0) |
|
280 | 280 | for chunk in util.filechunkiter(data): |
|
281 | 281 | connection.send(self, chunk) |
|
282 | 282 | else: |
|
283 | 283 | connection.send(self, data) |
|
284 | 284 | return _sendfile |
|
285 | 285 | |
|
286 | 286 | has_https = hasattr(urllib2, 'HTTPSHandler') |
|
287 | 287 | if has_https: |
|
288 | 288 | try: |
|
289 | 289 | # avoid using deprecated/broken FakeSocket in python 2.6 |
|
290 | 290 | import ssl |
|
291 | 291 | _ssl_wrap_socket = ssl.wrap_socket |
|
292 | 292 | CERT_REQUIRED = ssl.CERT_REQUIRED |
|
293 | 293 | except ImportError: |
|
294 | 294 | CERT_REQUIRED = 2 |
|
295 | 295 | |
|
296 | 296 | def _ssl_wrap_socket(sock, key_file, cert_file, |
|
297 | 297 | cert_reqs=CERT_REQUIRED, ca_certs=None): |
|
298 | 298 | if ca_certs: |
|
299 | 299 | raise util.Abort(_( |
|
300 | 300 | 'certificate checking requires Python 2.6')) |
|
301 | 301 | |
|
302 | 302 | ssl = socket.ssl(sock, key_file, cert_file) |
|
303 | 303 | return httplib.FakeSocket(sock, ssl) |
|
304 | 304 | |
|
305 | 305 | try: |
|
306 | 306 | _create_connection = socket.create_connection |
|
307 | 307 | except AttributeError: |
|
308 | 308 | _GLOBAL_DEFAULT_TIMEOUT = object() |
|
309 | 309 | |
|
310 | 310 | def _create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT, |
|
311 | 311 | source_address=None): |
|
312 | 312 | # lifted from Python 2.6 |
|
313 | 313 | |
|
314 | 314 | msg = "getaddrinfo returns an empty list" |
|
315 | 315 | host, port = address |
|
316 | 316 | for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM): |
|
317 | 317 | af, socktype, proto, canonname, sa = res |
|
318 | 318 | sock = None |
|
319 | 319 | try: |
|
320 | 320 | sock = socket.socket(af, socktype, proto) |
|
321 | 321 | if timeout is not _GLOBAL_DEFAULT_TIMEOUT: |
|
322 | 322 | sock.settimeout(timeout) |
|
323 | 323 | if source_address: |
|
324 | 324 | sock.bind(source_address) |
|
325 | 325 | sock.connect(sa) |
|
326 | 326 | return sock |
|
327 | 327 | |
|
328 | 328 | except socket.error, msg: |
|
329 | 329 | if sock is not None: |
|
330 | 330 | sock.close() |
|
331 | 331 | |
|
332 | 332 | raise socket.error, msg |
|
333 | 333 | |
|
334 | 334 | class httpconnection(keepalive.HTTPConnection): |
|
335 | 335 | # must be able to send big bundle as stream. |
|
336 | 336 | send = _gen_sendfile(keepalive.HTTPConnection) |
|
337 | 337 | |
|
338 | 338 | def connect(self): |
|
339 | 339 | if has_https and self.realhostport: # use CONNECT proxy |
|
340 | 340 | self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) |
|
341 | 341 | self.sock.connect((self.host, self.port)) |
|
342 | 342 | if _generic_proxytunnel(self): |
|
343 | 343 | # we do not support client x509 certificates |
|
344 | 344 | self.sock = _ssl_wrap_socket(self.sock, None, None) |
|
345 | 345 | else: |
|
346 | 346 | keepalive.HTTPConnection.connect(self) |
|
347 | 347 | |
|
348 | 348 | def getresponse(self): |
|
349 | 349 | proxyres = getattr(self, 'proxyres', None) |
|
350 | 350 | if proxyres: |
|
351 | 351 | if proxyres.will_close: |
|
352 | 352 | self.close() |
|
353 | 353 | self.proxyres = None |
|
354 | 354 | return proxyres |
|
355 | 355 | return keepalive.HTTPConnection.getresponse(self) |
|
356 | 356 | |
|
357 | 357 | # general transaction handler to support different ways to handle |
|
358 | 358 | # HTTPS proxying before and after Python 2.6.3. |
|
359 | 359 | def _generic_start_transaction(handler, h, req): |
|
360 | 360 | if hasattr(req, '_tunnel_host') and req._tunnel_host: |
|
361 | 361 | tunnel_host = req._tunnel_host |
|
362 | 362 | if tunnel_host[:7] not in ['http://', 'https:/']: |
|
363 | 363 | tunnel_host = 'https://' + tunnel_host |
|
364 | 364 | new_tunnel = True |
|
365 | 365 | else: |
|
366 | 366 | tunnel_host = req.get_selector() |
|
367 | 367 | new_tunnel = False |
|
368 | 368 | |
|
369 | 369 | if new_tunnel or tunnel_host == req.get_full_url(): # has proxy |
|
370 | 370 | urlparts = urlparse.urlparse(tunnel_host) |
|
371 | 371 | if new_tunnel or urlparts[0] == 'https': # only use CONNECT for HTTPS |
|
372 | 372 | realhostport = urlparts[1] |
|
373 | 373 | if realhostport[-1] == ']' or ':' not in realhostport: |
|
374 | 374 | realhostport += ':443' |
|
375 | 375 | |
|
376 | 376 | h.realhostport = realhostport |
|
377 | 377 | h.headers = req.headers.copy() |
|
378 | 378 | h.headers.update(handler.parent.addheaders) |
|
379 | 379 | return |
|
380 | 380 | |
|
381 | 381 | h.realhostport = None |
|
382 | 382 | h.headers = None |
|
383 | 383 | |
|
384 | 384 | def _generic_proxytunnel(self): |
|
385 | 385 | proxyheaders = dict( |
|
386 | 386 | [(x, self.headers[x]) for x in self.headers |
|
387 | 387 | if x.lower().startswith('proxy-')]) |
|
388 | 388 | self._set_hostport(self.host, self.port) |
|
389 | 389 | self.send('CONNECT %s HTTP/1.0\r\n' % self.realhostport) |
|
390 | 390 | for header in proxyheaders.iteritems(): |
|
391 | 391 | self.send('%s: %s\r\n' % header) |
|
392 | 392 | self.send('\r\n') |
|
393 | 393 | |
|
394 | 394 | # majority of the following code is duplicated from |
|
395 | 395 | # httplib.HTTPConnection as there are no adequate places to |
|
396 | 396 | # override functions to provide the needed functionality |
|
397 | 397 | res = self.response_class(self.sock, |
|
398 | 398 | strict=self.strict, |
|
399 | 399 | method=self._method) |
|
400 | 400 | |
|
401 | 401 | while True: |
|
402 | 402 | version, status, reason = res._read_status() |
|
403 | 403 | if status != httplib.CONTINUE: |
|
404 | 404 | break |
|
405 | 405 | while True: |
|
406 | 406 | skip = res.fp.readline().strip() |
|
407 | 407 | if not skip: |
|
408 | 408 | break |
|
409 | 409 | res.status = status |
|
410 | 410 | res.reason = reason.strip() |
|
411 | 411 | |
|
412 | 412 | if res.status == 200: |
|
413 | 413 | while True: |
|
414 | 414 | line = res.fp.readline() |
|
415 | 415 | if line == '\r\n': |
|
416 | 416 | break |
|
417 | 417 | return True |
|
418 | 418 | |
|
419 | 419 | if version == 'HTTP/1.0': |
|
420 | 420 | res.version = 10 |
|
421 | 421 | elif version.startswith('HTTP/1.'): |
|
422 | 422 | res.version = 11 |
|
423 | 423 | elif version == 'HTTP/0.9': |
|
424 | 424 | res.version = 9 |
|
425 | 425 | else: |
|
426 | 426 | raise httplib.UnknownProtocol(version) |
|
427 | 427 | |
|
428 | 428 | if res.version == 9: |
|
429 | 429 | res.length = None |
|
430 | 430 | res.chunked = 0 |
|
431 | 431 | res.will_close = 1 |
|
432 | 432 | res.msg = httplib.HTTPMessage(cStringIO.StringIO()) |
|
433 | 433 | return False |
|
434 | 434 | |
|
435 | 435 | res.msg = httplib.HTTPMessage(res.fp) |
|
436 | 436 | res.msg.fp = None |
|
437 | 437 | |
|
438 | 438 | # are we using the chunked-style of transfer encoding? |
|
439 | 439 | trenc = res.msg.getheader('transfer-encoding') |
|
440 | 440 | if trenc and trenc.lower() == "chunked": |
|
441 | 441 | res.chunked = 1 |
|
442 | 442 | res.chunk_left = None |
|
443 | 443 | else: |
|
444 | 444 | res.chunked = 0 |
|
445 | 445 | |
|
446 | 446 | # will the connection close at the end of the response? |
|
447 | 447 | res.will_close = res._check_close() |
|
448 | 448 | |
|
449 | 449 | # do we have a Content-Length? |
|
450 | 450 | # NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked" |
|
451 | 451 | length = res.msg.getheader('content-length') |
|
452 | 452 | if length and not res.chunked: |
|
453 | 453 | try: |
|
454 | 454 | res.length = int(length) |
|
455 | 455 | except ValueError: |
|
456 | 456 | res.length = None |
|
457 | 457 | else: |
|
458 | 458 | if res.length < 0: # ignore nonsensical negative lengths |
|
459 | 459 | res.length = None |
|
460 | 460 | else: |
|
461 | 461 | res.length = None |
|
462 | 462 | |
|
463 | 463 | # does the body have a fixed length? (of zero) |
|
464 | 464 | if (status == httplib.NO_CONTENT or status == httplib.NOT_MODIFIED or |
|
465 | 465 | 100 <= status < 200 or # 1xx codes |
|
466 | 466 | res._method == 'HEAD'): |
|
467 | 467 | res.length = 0 |
|
468 | 468 | |
|
469 | 469 | # if the connection remains open, and we aren't using chunked, and |
|
470 | 470 | # a content-length was not provided, then assume that the connection |
|
471 | 471 | # WILL close. |
|
472 | 472 | if (not res.will_close and |
|
473 | 473 | not res.chunked and |
|
474 | 474 | res.length is None): |
|
475 | 475 | res.will_close = 1 |
|
476 | 476 | |
|
477 | 477 | self.proxyres = res |
|
478 | 478 | |
|
479 | 479 | return False |
|
480 | 480 | |
|
481 | 481 | class httphandler(keepalive.HTTPHandler): |
|
482 | 482 | def http_open(self, req): |
|
483 | 483 | return self.do_open(httpconnection, req) |
|
484 | 484 | |
|
485 | 485 | def _start_transaction(self, h, req): |
|
486 | 486 | _generic_start_transaction(self, h, req) |
|
487 | 487 | return keepalive.HTTPHandler._start_transaction(self, h, req) |
|
488 | 488 | |
|
489 | 489 | def _verifycert(cert, hostname): |
|
490 | 490 | '''Verify that cert (in socket.getpeercert() format) matches hostname. |
|
491 | 491 | CRLs and subjectAltName are not handled. |
|
492 | ||
|
492 | ||
|
493 | 493 | Returns error message if any problems are found and None on success. |
|
494 | 494 | ''' |
|
495 | 495 | if not cert: |
|
496 | 496 | return _('no certificate received') |
|
497 | 497 | dnsname = hostname.lower() |
|
498 | 498 | for s in cert.get('subject', []): |
|
499 | 499 | key, value = s[0] |
|
500 | 500 | if key == 'commonName': |
|
501 | 501 | certname = value.lower() |
|
502 | 502 | if (certname == dnsname or |
|
503 | 503 | '.' in dnsname and certname == '*.' + dnsname.split('.', 1)[1]): |
|
504 | 504 | return None |
|
505 | 505 | return _('certificate is for %s') % certname |
|
506 | 506 | return _('no commonName found in certificate') |
|
507 | 507 | |
|
508 | 508 | if has_https: |
|
509 | 509 | class BetterHTTPS(httplib.HTTPSConnection): |
|
510 | 510 | send = keepalive.safesend |
|
511 | 511 | |
|
512 | 512 | def connect(self): |
|
513 | 513 | if hasattr(self, 'ui'): |
|
514 | 514 | cacerts = self.ui.config('web', 'cacerts') |
|
515 | 515 | else: |
|
516 | 516 | cacerts = None |
|
517 | 517 | |
|
518 | 518 | if cacerts: |
|
519 | 519 | sock = _create_connection((self.host, self.port)) |
|
520 | 520 | self.sock = _ssl_wrap_socket(sock, self.key_file, |
|
521 | 521 | self.cert_file, cert_reqs=CERT_REQUIRED, |
|
522 | 522 | ca_certs=cacerts) |
|
523 | 523 | msg = _verifycert(self.sock.getpeercert(), self.host) |
|
524 | 524 | if msg: |
|
525 | 525 | raise util.Abort(_('%s certificate error: %s') % |
|
526 | 526 | (self.host, msg)) |
|
527 | 527 | self.ui.debug('%s certificate successfully verified\n' % |
|
528 | 528 | self.host) |
|
529 | 529 | else: |
|
530 | 530 | httplib.HTTPSConnection.connect(self) |
|
531 | 531 | |
|
532 | 532 | class httpsconnection(BetterHTTPS): |
|
533 | 533 | response_class = keepalive.HTTPResponse |
|
534 | 534 | # must be able to send big bundle as stream. |
|
535 | 535 | send = _gen_sendfile(BetterHTTPS) |
|
536 | 536 | getresponse = keepalive.wrapgetresponse(httplib.HTTPSConnection) |
|
537 | 537 | |
|
538 | 538 | def connect(self): |
|
539 | 539 | if self.realhostport: # use CONNECT proxy |
|
540 | 540 | self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) |
|
541 | 541 | self.sock.connect((self.host, self.port)) |
|
542 | 542 | if _generic_proxytunnel(self): |
|
543 | 543 | self.sock = _ssl_wrap_socket(self.sock, self.cert_file, |
|
544 | 544 | self.key_file) |
|
545 | 545 | else: |
|
546 | 546 | BetterHTTPS.connect(self) |
|
547 | 547 | |
|
548 | 548 | class httpshandler(keepalive.KeepAliveHandler, urllib2.HTTPSHandler): |
|
549 | 549 | def __init__(self, ui): |
|
550 | 550 | keepalive.KeepAliveHandler.__init__(self) |
|
551 | 551 | urllib2.HTTPSHandler.__init__(self) |
|
552 | 552 | self.ui = ui |
|
553 | 553 | self.pwmgr = passwordmgr(self.ui) |
|
554 | 554 | |
|
555 | 555 | def _start_transaction(self, h, req): |
|
556 | 556 | _generic_start_transaction(self, h, req) |
|
557 | 557 | return keepalive.KeepAliveHandler._start_transaction(self, h, req) |
|
558 | 558 | |
|
559 | 559 | def https_open(self, req): |
|
560 | 560 | self.auth = self.pwmgr.readauthtoken(req.get_full_url()) |
|
561 | 561 | return self.do_open(self._makeconnection, req) |
|
562 | 562 | |
|
563 | 563 | def _makeconnection(self, host, port=None, *args, **kwargs): |
|
564 | 564 | keyfile = None |
|
565 | 565 | certfile = None |
|
566 | 566 | |
|
567 | 567 | if len(args) >= 1: # key_file |
|
568 | 568 | keyfile = args[0] |
|
569 | 569 | if len(args) >= 2: # cert_file |
|
570 | 570 | certfile = args[1] |
|
571 | 571 | args = args[2:] |
|
572 | 572 | |
|
573 | 573 | # if the user has specified different key/cert files in |
|
574 | 574 | # hgrc, we prefer these |
|
575 | 575 | if self.auth and 'key' in self.auth and 'cert' in self.auth: |
|
576 | 576 | keyfile = self.auth['key'] |
|
577 | 577 | certfile = self.auth['cert'] |
|
578 | 578 | |
|
579 | 579 | conn = httpsconnection(host, port, keyfile, certfile, *args, **kwargs) |
|
580 | 580 | conn.ui = self.ui |
|
581 | 581 | return conn |
|
582 | 582 | |
|
583 | 583 | class httpdigestauthhandler(urllib2.HTTPDigestAuthHandler): |
|
584 | 584 | def __init__(self, *args, **kwargs): |
|
585 | 585 | urllib2.HTTPDigestAuthHandler.__init__(self, *args, **kwargs) |
|
586 | 586 | self.retried_req = None |
|
587 | 587 | |
|
588 | 588 | def reset_retry_count(self): |
|
589 | 589 | # Python 2.6.5 will call this on 401 or 407 errors and thus loop |
|
590 | 590 | # forever. We disable reset_retry_count completely and reset in |
|
591 | 591 | # http_error_auth_reqed instead. |
|
592 | 592 | pass |
|
593 | 593 | |
|
594 | 594 | def http_error_auth_reqed(self, auth_header, host, req, headers): |
|
595 | 595 | # Reset the retry counter once for each request. |
|
596 | 596 | if req is not self.retried_req: |
|
597 | 597 | self.retried_req = req |
|
598 | 598 | self.retried = 0 |
|
599 | 599 | # In python < 2.5 AbstractDigestAuthHandler raises a ValueError if |
|
600 | 600 | # it doesn't know about the auth type requested. This can happen if |
|
601 | 601 | # somebody is using BasicAuth and types a bad password. |
|
602 | 602 | try: |
|
603 | 603 | return urllib2.HTTPDigestAuthHandler.http_error_auth_reqed( |
|
604 | 604 | self, auth_header, host, req, headers) |
|
605 | 605 | except ValueError, inst: |
|
606 | 606 | arg = inst.args[0] |
|
607 | 607 | if arg.startswith("AbstractDigestAuthHandler doesn't know "): |
|
608 | 608 | return |
|
609 | 609 | raise |
|
610 | 610 | |
|
611 | 611 | class httpbasicauthhandler(urllib2.HTTPBasicAuthHandler): |
|
612 | 612 | def __init__(self, *args, **kwargs): |
|
613 | 613 | urllib2.HTTPBasicAuthHandler.__init__(self, *args, **kwargs) |
|
614 | 614 | self.retried_req = None |
|
615 | 615 | |
|
616 | 616 | def reset_retry_count(self): |
|
617 | 617 | # Python 2.6.5 will call this on 401 or 407 errors and thus loop |
|
618 | 618 | # forever. We disable reset_retry_count completely and reset in |
|
619 | 619 | # http_error_auth_reqed instead. |
|
620 | 620 | pass |
|
621 | 621 | |
|
622 | 622 | def http_error_auth_reqed(self, auth_header, host, req, headers): |
|
623 | 623 | # Reset the retry counter once for each request. |
|
624 | 624 | if req is not self.retried_req: |
|
625 | 625 | self.retried_req = req |
|
626 | 626 | self.retried = 0 |
|
627 | 627 | return urllib2.HTTPBasicAuthHandler.http_error_auth_reqed( |
|
628 | 628 | self, auth_header, host, req, headers) |
|
629 | 629 | |
|
630 | 630 | def getauthinfo(path): |
|
631 | 631 | scheme, netloc, urlpath, query, frag = urlparse.urlsplit(path) |
|
632 | 632 | if not urlpath: |
|
633 | 633 | urlpath = '/' |
|
634 | 634 | if scheme != 'file': |
|
635 | 635 | # XXX: why are we quoting the path again with some smart |
|
636 | 636 | # heuristic here? Anyway, it cannot be done with file:// |
|
637 | 637 | # urls since path encoding is os/fs dependent (see |
|
638 | 638 | # urllib.pathname2url() for details). |
|
639 | 639 | urlpath = quotepath(urlpath) |
|
640 | 640 | host, port, user, passwd = netlocsplit(netloc) |
|
641 | 641 | |
|
642 | 642 | # urllib cannot handle URLs with embedded user or passwd |
|
643 | 643 | url = urlparse.urlunsplit((scheme, netlocunsplit(host, port), |
|
644 | 644 | urlpath, query, frag)) |
|
645 | 645 | if user: |
|
646 | 646 | netloc = host |
|
647 | 647 | if port: |
|
648 | 648 | netloc += ':' + port |
|
649 | 649 | # Python < 2.4.3 uses only the netloc to search for a password |
|
650 | 650 | authinfo = (None, (url, netloc), user, passwd or '') |
|
651 | 651 | else: |
|
652 | 652 | authinfo = None |
|
653 | 653 | return url, authinfo |
|
654 | 654 | |
|
655 | 655 | handlerfuncs = [] |
|
656 | 656 | |
|
657 | 657 | def opener(ui, authinfo=None): |
|
658 | 658 | ''' |
|
659 | 659 | construct an opener suitable for urllib2 |
|
660 | 660 | authinfo will be added to the password manager |
|
661 | 661 | ''' |
|
662 | 662 | handlers = [httphandler()] |
|
663 | 663 | if has_https: |
|
664 | 664 | handlers.append(httpshandler(ui)) |
|
665 | 665 | |
|
666 | 666 | handlers.append(proxyhandler(ui)) |
|
667 | 667 | |
|
668 | 668 | passmgr = passwordmgr(ui) |
|
669 | 669 | if authinfo is not None: |
|
670 | 670 | passmgr.add_password(*authinfo) |
|
671 | 671 | user, passwd = authinfo[2:4] |
|
672 | 672 | ui.debug('http auth: user %s, password %s\n' % |
|
673 | 673 | (user, passwd and '*' * len(passwd) or 'not set')) |
|
674 | 674 | |
|
675 | 675 | handlers.extend((httpbasicauthhandler(passmgr), |
|
676 | 676 | httpdigestauthhandler(passmgr))) |
|
677 | 677 | handlers.extend([h(ui, passmgr) for h in handlerfuncs]) |
|
678 | 678 | opener = urllib2.build_opener(*handlers) |
|
679 | 679 | |
|
680 | 680 | # 1.0 here is the _protocol_ version |
|
681 | 681 | opener.addheaders = [('User-agent', 'mercurial/proto-1.0')] |
|
682 | 682 | opener.addheaders.append(('Accept', 'application/mercurial-0.1')) |
|
683 | 683 | return opener |
|
684 | 684 | |
|
685 | 685 | scheme_re = re.compile(r'^([a-zA-Z0-9+-.]+)://') |
|
686 | 686 | |
|
687 | 687 | def open(ui, url, data=None): |
|
688 | 688 | scheme = None |
|
689 | 689 | m = scheme_re.search(url) |
|
690 | 690 | if m: |
|
691 | 691 | scheme = m.group(1).lower() |
|
692 | 692 | if not scheme: |
|
693 | 693 | path = util.normpath(os.path.abspath(url)) |
|
694 | 694 | url = 'file://' + urllib.pathname2url(path) |
|
695 | 695 | authinfo = None |
|
696 | 696 | else: |
|
697 | 697 | url, authinfo = getauthinfo(url) |
|
698 | 698 | return opener(ui, authinfo).open(url, data) |
General Comments 0
You need to be logged in to leave comments.
Login now