Show More
@@ -1,714 +1,715 | |||
|
1 | 1 | # filemerge.py - file-level merge handling for Mercurial |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | from __future__ import absolute_import |
|
9 | 9 | |
|
10 | 10 | import filecmp |
|
11 | 11 | import os |
|
12 | 12 | import re |
|
13 | 13 | import tempfile |
|
14 | 14 | |
|
15 | 15 | from .i18n import _ |
|
16 | 16 | from .node import nullid, short |
|
17 | 17 | |
|
18 | 18 | from . import ( |
|
19 | encoding, | |
|
19 | 20 | error, |
|
20 | 21 | formatter, |
|
21 | 22 | match, |
|
22 | 23 | pycompat, |
|
23 | 24 | scmutil, |
|
24 | 25 | simplemerge, |
|
25 | 26 | tagmerge, |
|
26 | 27 | templatekw, |
|
27 | 28 | templater, |
|
28 | 29 | util, |
|
29 | 30 | ) |
|
30 | 31 | |
|
31 | 32 | def _toolstr(ui, tool, part, default=""): |
|
32 | 33 | return ui.config("merge-tools", tool + "." + part, default) |
|
33 | 34 | |
|
34 | 35 | def _toolbool(ui, tool, part, default=False): |
|
35 | 36 | return ui.configbool("merge-tools", tool + "." + part, default) |
|
36 | 37 | |
|
37 | 38 | def _toollist(ui, tool, part, default=[]): |
|
38 | 39 | return ui.configlist("merge-tools", tool + "." + part, default) |
|
39 | 40 | |
|
40 | 41 | internals = {} |
|
41 | 42 | # Merge tools to document. |
|
42 | 43 | internalsdoc = {} |
|
43 | 44 | |
|
44 | 45 | # internal tool merge types |
|
45 | 46 | nomerge = None |
|
46 | 47 | mergeonly = 'mergeonly' # just the full merge, no premerge |
|
47 | 48 | fullmerge = 'fullmerge' # both premerge and merge |
|
48 | 49 | |
|
49 | 50 | class absentfilectx(object): |
|
50 | 51 | """Represents a file that's ostensibly in a context but is actually not |
|
51 | 52 | present in it. |
|
52 | 53 | |
|
53 | 54 | This is here because it's very specific to the filemerge code for now -- |
|
54 | 55 | other code is likely going to break with the values this returns.""" |
|
55 | 56 | def __init__(self, ctx, f): |
|
56 | 57 | self._ctx = ctx |
|
57 | 58 | self._f = f |
|
58 | 59 | |
|
59 | 60 | def path(self): |
|
60 | 61 | return self._f |
|
61 | 62 | |
|
62 | 63 | def size(self): |
|
63 | 64 | return None |
|
64 | 65 | |
|
65 | 66 | def data(self): |
|
66 | 67 | return None |
|
67 | 68 | |
|
68 | 69 | def filenode(self): |
|
69 | 70 | return nullid |
|
70 | 71 | |
|
71 | 72 | _customcmp = True |
|
72 | 73 | def cmp(self, fctx): |
|
73 | 74 | """compare with other file context |
|
74 | 75 | |
|
75 | 76 | returns True if different from fctx. |
|
76 | 77 | """ |
|
77 | 78 | return not (fctx.isabsent() and |
|
78 | 79 | fctx.ctx() == self.ctx() and |
|
79 | 80 | fctx.path() == self.path()) |
|
80 | 81 | |
|
81 | 82 | def flags(self): |
|
82 | 83 | return '' |
|
83 | 84 | |
|
84 | 85 | def changectx(self): |
|
85 | 86 | return self._ctx |
|
86 | 87 | |
|
87 | 88 | def isbinary(self): |
|
88 | 89 | return False |
|
89 | 90 | |
|
90 | 91 | def isabsent(self): |
|
91 | 92 | return True |
|
92 | 93 | |
|
93 | 94 | def internaltool(name, mergetype, onfailure=None, precheck=None): |
|
94 | 95 | '''return a decorator for populating internal merge tool table''' |
|
95 | 96 | def decorator(func): |
|
96 | 97 | fullname = ':' + name |
|
97 | 98 | func.__doc__ = (pycompat.sysstr("``%s``\n" % fullname) |
|
98 | 99 | + func.__doc__.strip()) |
|
99 | 100 | internals[fullname] = func |
|
100 | 101 | internals['internal:' + name] = func |
|
101 | 102 | internalsdoc[fullname] = func |
|
102 | 103 | func.mergetype = mergetype |
|
103 | 104 | func.onfailure = onfailure |
|
104 | 105 | func.precheck = precheck |
|
105 | 106 | return func |
|
106 | 107 | return decorator |
|
107 | 108 | |
|
108 | 109 | def _findtool(ui, tool): |
|
109 | 110 | if tool in internals: |
|
110 | 111 | return tool |
|
111 | 112 | return findexternaltool(ui, tool) |
|
112 | 113 | |
|
113 | 114 | def findexternaltool(ui, tool): |
|
114 | 115 | for kn in ("regkey", "regkeyalt"): |
|
115 | 116 | k = _toolstr(ui, tool, kn) |
|
116 | 117 | if not k: |
|
117 | 118 | continue |
|
118 | 119 | p = util.lookupreg(k, _toolstr(ui, tool, "regname")) |
|
119 | 120 | if p: |
|
120 | 121 | p = util.findexe(p + _toolstr(ui, tool, "regappend")) |
|
121 | 122 | if p: |
|
122 | 123 | return p |
|
123 | 124 | exe = _toolstr(ui, tool, "executable", tool) |
|
124 | 125 | return util.findexe(util.expandpath(exe)) |
|
125 | 126 | |
|
126 | 127 | def _picktool(repo, ui, path, binary, symlink, changedelete): |
|
127 | 128 | def supportscd(tool): |
|
128 | 129 | return tool in internals and internals[tool].mergetype == nomerge |
|
129 | 130 | |
|
130 | 131 | def check(tool, pat, symlink, binary, changedelete): |
|
131 | 132 | tmsg = tool |
|
132 | 133 | if pat: |
|
133 | 134 | tmsg += " specified for " + pat |
|
134 | 135 | if not _findtool(ui, tool): |
|
135 | 136 | if pat: # explicitly requested tool deserves a warning |
|
136 | 137 | ui.warn(_("couldn't find merge tool %s\n") % tmsg) |
|
137 | 138 | else: # configured but non-existing tools are more silent |
|
138 | 139 | ui.note(_("couldn't find merge tool %s\n") % tmsg) |
|
139 | 140 | elif symlink and not _toolbool(ui, tool, "symlink"): |
|
140 | 141 | ui.warn(_("tool %s can't handle symlinks\n") % tmsg) |
|
141 | 142 | elif binary and not _toolbool(ui, tool, "binary"): |
|
142 | 143 | ui.warn(_("tool %s can't handle binary\n") % tmsg) |
|
143 | 144 | elif changedelete and not supportscd(tool): |
|
144 | 145 | # the nomerge tools are the only tools that support change/delete |
|
145 | 146 | # conflicts |
|
146 | 147 | pass |
|
147 | 148 | elif not util.gui() and _toolbool(ui, tool, "gui"): |
|
148 | 149 | ui.warn(_("tool %s requires a GUI\n") % tmsg) |
|
149 | 150 | else: |
|
150 | 151 | return True |
|
151 | 152 | return False |
|
152 | 153 | |
|
153 | 154 | # internal config: ui.forcemerge |
|
154 | 155 | # forcemerge comes from command line arguments, highest priority |
|
155 | 156 | force = ui.config('ui', 'forcemerge') |
|
156 | 157 | if force: |
|
157 | 158 | toolpath = _findtool(ui, force) |
|
158 | 159 | if changedelete and not supportscd(toolpath): |
|
159 | 160 | return ":prompt", None |
|
160 | 161 | else: |
|
161 | 162 | if toolpath: |
|
162 | 163 | return (force, util.shellquote(toolpath)) |
|
163 | 164 | else: |
|
164 | 165 | # mimic HGMERGE if given tool not found |
|
165 | 166 | return (force, force) |
|
166 | 167 | |
|
167 | 168 | # HGMERGE takes next precedence |
|
168 |
hgmerge = |
|
|
169 | hgmerge = encoding.environ.get("HGMERGE") | |
|
169 | 170 | if hgmerge: |
|
170 | 171 | if changedelete and not supportscd(hgmerge): |
|
171 | 172 | return ":prompt", None |
|
172 | 173 | else: |
|
173 | 174 | return (hgmerge, hgmerge) |
|
174 | 175 | |
|
175 | 176 | # then patterns |
|
176 | 177 | for pat, tool in ui.configitems("merge-patterns"): |
|
177 | 178 | mf = match.match(repo.root, '', [pat]) |
|
178 | 179 | if mf(path) and check(tool, pat, symlink, False, changedelete): |
|
179 | 180 | toolpath = _findtool(ui, tool) |
|
180 | 181 | return (tool, util.shellquote(toolpath)) |
|
181 | 182 | |
|
182 | 183 | # then merge tools |
|
183 | 184 | tools = {} |
|
184 | 185 | disabled = set() |
|
185 | 186 | for k, v in ui.configitems("merge-tools"): |
|
186 | 187 | t = k.split('.')[0] |
|
187 | 188 | if t not in tools: |
|
188 | 189 | tools[t] = int(_toolstr(ui, t, "priority", "0")) |
|
189 | 190 | if _toolbool(ui, t, "disabled", False): |
|
190 | 191 | disabled.add(t) |
|
191 | 192 | names = tools.keys() |
|
192 | 193 | tools = sorted([(-p, tool) for tool, p in tools.items() |
|
193 | 194 | if tool not in disabled]) |
|
194 | 195 | uimerge = ui.config("ui", "merge") |
|
195 | 196 | if uimerge: |
|
196 | 197 | # external tools defined in uimerge won't be able to handle |
|
197 | 198 | # change/delete conflicts |
|
198 | 199 | if uimerge not in names and not changedelete: |
|
199 | 200 | return (uimerge, uimerge) |
|
200 | 201 | tools.insert(0, (None, uimerge)) # highest priority |
|
201 | 202 | tools.append((None, "hgmerge")) # the old default, if found |
|
202 | 203 | for p, t in tools: |
|
203 | 204 | if check(t, None, symlink, binary, changedelete): |
|
204 | 205 | toolpath = _findtool(ui, t) |
|
205 | 206 | return (t, util.shellquote(toolpath)) |
|
206 | 207 | |
|
207 | 208 | # internal merge or prompt as last resort |
|
208 | 209 | if symlink or binary or changedelete: |
|
209 | 210 | return ":prompt", None |
|
210 | 211 | return ":merge", None |
|
211 | 212 | |
|
212 | 213 | def _eoltype(data): |
|
213 | 214 | "Guess the EOL type of a file" |
|
214 | 215 | if '\0' in data: # binary |
|
215 | 216 | return None |
|
216 | 217 | if '\r\n' in data: # Windows |
|
217 | 218 | return '\r\n' |
|
218 | 219 | if '\r' in data: # Old Mac |
|
219 | 220 | return '\r' |
|
220 | 221 | if '\n' in data: # UNIX |
|
221 | 222 | return '\n' |
|
222 | 223 | return None # unknown |
|
223 | 224 | |
|
224 | 225 | def _matcheol(file, origfile): |
|
225 | 226 | "Convert EOL markers in a file to match origfile" |
|
226 | 227 | tostyle = _eoltype(util.readfile(origfile)) |
|
227 | 228 | if tostyle: |
|
228 | 229 | data = util.readfile(file) |
|
229 | 230 | style = _eoltype(data) |
|
230 | 231 | if style: |
|
231 | 232 | newdata = data.replace(style, tostyle) |
|
232 | 233 | if newdata != data: |
|
233 | 234 | util.writefile(file, newdata) |
|
234 | 235 | |
|
235 | 236 | @internaltool('prompt', nomerge) |
|
236 | 237 | def _iprompt(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None): |
|
237 | 238 | """Asks the user which of the local `p1()` or the other `p2()` version to |
|
238 | 239 | keep as the merged version.""" |
|
239 | 240 | ui = repo.ui |
|
240 | 241 | fd = fcd.path() |
|
241 | 242 | |
|
242 | 243 | prompts = partextras(labels) |
|
243 | 244 | prompts['fd'] = fd |
|
244 | 245 | try: |
|
245 | 246 | if fco.isabsent(): |
|
246 | 247 | index = ui.promptchoice( |
|
247 | 248 | _("local%(l)s changed %(fd)s which other%(o)s deleted\n" |
|
248 | 249 | "use (c)hanged version, (d)elete, or leave (u)nresolved?" |
|
249 | 250 | "$$ &Changed $$ &Delete $$ &Unresolved") % prompts, 2) |
|
250 | 251 | choice = ['local', 'other', 'unresolved'][index] |
|
251 | 252 | elif fcd.isabsent(): |
|
252 | 253 | index = ui.promptchoice( |
|
253 | 254 | _("other%(o)s changed %(fd)s which local%(l)s deleted\n" |
|
254 | 255 | "use (c)hanged version, leave (d)eleted, or " |
|
255 | 256 | "leave (u)nresolved?" |
|
256 | 257 | "$$ &Changed $$ &Deleted $$ &Unresolved") % prompts, 2) |
|
257 | 258 | choice = ['other', 'local', 'unresolved'][index] |
|
258 | 259 | else: |
|
259 | 260 | index = ui.promptchoice( |
|
260 | 261 | _("no tool found to merge %(fd)s\n" |
|
261 | 262 | "keep (l)ocal%(l)s, take (o)ther%(o)s, or leave (u)nresolved?" |
|
262 | 263 | "$$ &Local $$ &Other $$ &Unresolved") % prompts, 2) |
|
263 | 264 | choice = ['local', 'other', 'unresolved'][index] |
|
264 | 265 | |
|
265 | 266 | if choice == 'other': |
|
266 | 267 | return _iother(repo, mynode, orig, fcd, fco, fca, toolconf, |
|
267 | 268 | labels) |
|
268 | 269 | elif choice == 'local': |
|
269 | 270 | return _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf, |
|
270 | 271 | labels) |
|
271 | 272 | elif choice == 'unresolved': |
|
272 | 273 | return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf, |
|
273 | 274 | labels) |
|
274 | 275 | except error.ResponseExpected: |
|
275 | 276 | ui.write("\n") |
|
276 | 277 | return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf, |
|
277 | 278 | labels) |
|
278 | 279 | |
|
279 | 280 | @internaltool('local', nomerge) |
|
280 | 281 | def _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None): |
|
281 | 282 | """Uses the local `p1()` version of files as the merged version.""" |
|
282 | 283 | return 0, fcd.isabsent() |
|
283 | 284 | |
|
284 | 285 | @internaltool('other', nomerge) |
|
285 | 286 | def _iother(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None): |
|
286 | 287 | """Uses the other `p2()` version of files as the merged version.""" |
|
287 | 288 | if fco.isabsent(): |
|
288 | 289 | # local changed, remote deleted -- 'deleted' picked |
|
289 | 290 | repo.wvfs.unlinkpath(fcd.path()) |
|
290 | 291 | deleted = True |
|
291 | 292 | else: |
|
292 | 293 | repo.wwrite(fcd.path(), fco.data(), fco.flags()) |
|
293 | 294 | deleted = False |
|
294 | 295 | return 0, deleted |
|
295 | 296 | |
|
296 | 297 | @internaltool('fail', nomerge) |
|
297 | 298 | def _ifail(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None): |
|
298 | 299 | """ |
|
299 | 300 | Rather than attempting to merge files that were modified on both |
|
300 | 301 | branches, it marks them as unresolved. The resolve command must be |
|
301 | 302 | used to resolve these conflicts.""" |
|
302 | 303 | # for change/delete conflicts write out the changed version, then fail |
|
303 | 304 | if fcd.isabsent(): |
|
304 | 305 | repo.wwrite(fcd.path(), fco.data(), fco.flags()) |
|
305 | 306 | return 1, False |
|
306 | 307 | |
|
307 | 308 | def _premerge(repo, fcd, fco, fca, toolconf, files, labels=None): |
|
308 | 309 | tool, toolpath, binary, symlink = toolconf |
|
309 | 310 | if symlink or fcd.isabsent() or fco.isabsent(): |
|
310 | 311 | return 1 |
|
311 | 312 | a, b, c, back = files |
|
312 | 313 | |
|
313 | 314 | ui = repo.ui |
|
314 | 315 | |
|
315 | 316 | validkeep = ['keep', 'keep-merge3'] |
|
316 | 317 | |
|
317 | 318 | # do we attempt to simplemerge first? |
|
318 | 319 | try: |
|
319 | 320 | premerge = _toolbool(ui, tool, "premerge", not binary) |
|
320 | 321 | except error.ConfigError: |
|
321 | 322 | premerge = _toolstr(ui, tool, "premerge").lower() |
|
322 | 323 | if premerge not in validkeep: |
|
323 | 324 | _valid = ', '.join(["'" + v + "'" for v in validkeep]) |
|
324 | 325 | raise error.ConfigError(_("%s.premerge not valid " |
|
325 | 326 | "('%s' is neither boolean nor %s)") % |
|
326 | 327 | (tool, premerge, _valid)) |
|
327 | 328 | |
|
328 | 329 | if premerge: |
|
329 | 330 | if premerge == 'keep-merge3': |
|
330 | 331 | if not labels: |
|
331 | 332 | labels = _defaultconflictlabels |
|
332 | 333 | if len(labels) < 3: |
|
333 | 334 | labels.append('base') |
|
334 | 335 | r = simplemerge.simplemerge(ui, a, b, c, quiet=True, label=labels) |
|
335 | 336 | if not r: |
|
336 | 337 | ui.debug(" premerge successful\n") |
|
337 | 338 | return 0 |
|
338 | 339 | if premerge not in validkeep: |
|
339 | 340 | util.copyfile(back, a) # restore from backup and try again |
|
340 | 341 | return 1 # continue merging |
|
341 | 342 | |
|
342 | 343 | def _mergecheck(repo, mynode, orig, fcd, fco, fca, toolconf): |
|
343 | 344 | tool, toolpath, binary, symlink = toolconf |
|
344 | 345 | if symlink: |
|
345 | 346 | repo.ui.warn(_('warning: internal %s cannot merge symlinks ' |
|
346 | 347 | 'for %s\n') % (tool, fcd.path())) |
|
347 | 348 | return False |
|
348 | 349 | if fcd.isabsent() or fco.isabsent(): |
|
349 | 350 | repo.ui.warn(_('warning: internal %s cannot merge change/delete ' |
|
350 | 351 | 'conflict for %s\n') % (tool, fcd.path())) |
|
351 | 352 | return False |
|
352 | 353 | return True |
|
353 | 354 | |
|
354 | 355 | def _merge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels, mode): |
|
355 | 356 | """ |
|
356 | 357 | Uses the internal non-interactive simple merge algorithm for merging |
|
357 | 358 | files. It will fail if there are any conflicts and leave markers in |
|
358 | 359 | the partially merged file. Markers will have two sections, one for each side |
|
359 | 360 | of merge, unless mode equals 'union' which suppresses the markers.""" |
|
360 | 361 | a, b, c, back = files |
|
361 | 362 | |
|
362 | 363 | ui = repo.ui |
|
363 | 364 | |
|
364 | 365 | r = simplemerge.simplemerge(ui, a, b, c, label=labels, mode=mode) |
|
365 | 366 | return True, r, False |
|
366 | 367 | |
|
367 | 368 | @internaltool('union', fullmerge, |
|
368 | 369 | _("warning: conflicts while merging %s! " |
|
369 | 370 | "(edit, then use 'hg resolve --mark')\n"), |
|
370 | 371 | precheck=_mergecheck) |
|
371 | 372 | def _iunion(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): |
|
372 | 373 | """ |
|
373 | 374 | Uses the internal non-interactive simple merge algorithm for merging |
|
374 | 375 | files. It will use both left and right sides for conflict regions. |
|
375 | 376 | No markers are inserted.""" |
|
376 | 377 | return _merge(repo, mynode, orig, fcd, fco, fca, toolconf, |
|
377 | 378 | files, labels, 'union') |
|
378 | 379 | |
|
379 | 380 | @internaltool('merge', fullmerge, |
|
380 | 381 | _("warning: conflicts while merging %s! " |
|
381 | 382 | "(edit, then use 'hg resolve --mark')\n"), |
|
382 | 383 | precheck=_mergecheck) |
|
383 | 384 | def _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): |
|
384 | 385 | """ |
|
385 | 386 | Uses the internal non-interactive simple merge algorithm for merging |
|
386 | 387 | files. It will fail if there are any conflicts and leave markers in |
|
387 | 388 | the partially merged file. Markers will have two sections, one for each side |
|
388 | 389 | of merge.""" |
|
389 | 390 | return _merge(repo, mynode, orig, fcd, fco, fca, toolconf, |
|
390 | 391 | files, labels, 'merge') |
|
391 | 392 | |
|
392 | 393 | @internaltool('merge3', fullmerge, |
|
393 | 394 | _("warning: conflicts while merging %s! " |
|
394 | 395 | "(edit, then use 'hg resolve --mark')\n"), |
|
395 | 396 | precheck=_mergecheck) |
|
396 | 397 | def _imerge3(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): |
|
397 | 398 | """ |
|
398 | 399 | Uses the internal non-interactive simple merge algorithm for merging |
|
399 | 400 | files. It will fail if there are any conflicts and leave markers in |
|
400 | 401 | the partially merged file. Marker will have three sections, one from each |
|
401 | 402 | side of the merge and one for the base content.""" |
|
402 | 403 | if not labels: |
|
403 | 404 | labels = _defaultconflictlabels |
|
404 | 405 | if len(labels) < 3: |
|
405 | 406 | labels.append('base') |
|
406 | 407 | return _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels) |
|
407 | 408 | |
|
408 | 409 | def _imergeauto(repo, mynode, orig, fcd, fco, fca, toolconf, files, |
|
409 | 410 | labels=None, localorother=None): |
|
410 | 411 | """ |
|
411 | 412 | Generic driver for _imergelocal and _imergeother |
|
412 | 413 | """ |
|
413 | 414 | assert localorother is not None |
|
414 | 415 | tool, toolpath, binary, symlink = toolconf |
|
415 | 416 | a, b, c, back = files |
|
416 | 417 | r = simplemerge.simplemerge(repo.ui, a, b, c, label=labels, |
|
417 | 418 | localorother=localorother) |
|
418 | 419 | return True, r |
|
419 | 420 | |
|
420 | 421 | @internaltool('merge-local', mergeonly, precheck=_mergecheck) |
|
421 | 422 | def _imergelocal(*args, **kwargs): |
|
422 | 423 | """ |
|
423 | 424 | Like :merge, but resolve all conflicts non-interactively in favor |
|
424 | 425 | of the local `p1()` changes.""" |
|
425 | 426 | success, status = _imergeauto(localorother='local', *args, **kwargs) |
|
426 | 427 | return success, status, False |
|
427 | 428 | |
|
428 | 429 | @internaltool('merge-other', mergeonly, precheck=_mergecheck) |
|
429 | 430 | def _imergeother(*args, **kwargs): |
|
430 | 431 | """ |
|
431 | 432 | Like :merge, but resolve all conflicts non-interactively in favor |
|
432 | 433 | of the other `p2()` changes.""" |
|
433 | 434 | success, status = _imergeauto(localorother='other', *args, **kwargs) |
|
434 | 435 | return success, status, False |
|
435 | 436 | |
|
436 | 437 | @internaltool('tagmerge', mergeonly, |
|
437 | 438 | _("automatic tag merging of %s failed! " |
|
438 | 439 | "(use 'hg resolve --tool :merge' or another merge " |
|
439 | 440 | "tool of your choice)\n")) |
|
440 | 441 | def _itagmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): |
|
441 | 442 | """ |
|
442 | 443 | Uses the internal tag merge algorithm (experimental). |
|
443 | 444 | """ |
|
444 | 445 | success, status = tagmerge.merge(repo, fcd, fco, fca) |
|
445 | 446 | return success, status, False |
|
446 | 447 | |
|
447 | 448 | @internaltool('dump', fullmerge) |
|
448 | 449 | def _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): |
|
449 | 450 | """ |
|
450 | 451 | Creates three versions of the files to merge, containing the |
|
451 | 452 | contents of local, other and base. These files can then be used to |
|
452 | 453 | perform a merge manually. If the file to be merged is named |
|
453 | 454 | ``a.txt``, these files will accordingly be named ``a.txt.local``, |
|
454 | 455 | ``a.txt.other`` and ``a.txt.base`` and they will be placed in the |
|
455 | 456 | same directory as ``a.txt``.""" |
|
456 | 457 | a, b, c, back = files |
|
457 | 458 | |
|
458 | 459 | fd = fcd.path() |
|
459 | 460 | |
|
460 | 461 | util.copyfile(a, a + ".local") |
|
461 | 462 | repo.wwrite(fd + ".other", fco.data(), fco.flags()) |
|
462 | 463 | repo.wwrite(fd + ".base", fca.data(), fca.flags()) |
|
463 | 464 | return False, 1, False |
|
464 | 465 | |
|
465 | 466 | def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): |
|
466 | 467 | tool, toolpath, binary, symlink = toolconf |
|
467 | 468 | if fcd.isabsent() or fco.isabsent(): |
|
468 | 469 | repo.ui.warn(_('warning: %s cannot merge change/delete conflict ' |
|
469 | 470 | 'for %s\n') % (tool, fcd.path())) |
|
470 | 471 | return False, 1, None |
|
471 | 472 | a, b, c, back = files |
|
472 | 473 | out = "" |
|
473 | 474 | env = {'HG_FILE': fcd.path(), |
|
474 | 475 | 'HG_MY_NODE': short(mynode), |
|
475 | 476 | 'HG_OTHER_NODE': str(fco.changectx()), |
|
476 | 477 | 'HG_BASE_NODE': str(fca.changectx()), |
|
477 | 478 | 'HG_MY_ISLINK': 'l' in fcd.flags(), |
|
478 | 479 | 'HG_OTHER_ISLINK': 'l' in fco.flags(), |
|
479 | 480 | 'HG_BASE_ISLINK': 'l' in fca.flags(), |
|
480 | 481 | } |
|
481 | 482 | |
|
482 | 483 | ui = repo.ui |
|
483 | 484 | |
|
484 | 485 | args = _toolstr(ui, tool, "args", '$local $base $other') |
|
485 | 486 | if "$output" in args: |
|
486 | 487 | out, a = a, back # read input from backup, write to original |
|
487 | 488 | replace = {'local': a, 'base': b, 'other': c, 'output': out} |
|
488 | 489 | args = util.interpolate(r'\$', replace, args, |
|
489 | 490 | lambda s: util.shellquote(util.localpath(s))) |
|
490 | 491 | cmd = toolpath + ' ' + args |
|
491 | 492 | repo.ui.debug('launching merge tool: %s\n' % cmd) |
|
492 | 493 | r = ui.system(cmd, cwd=repo.root, environ=env) |
|
493 | 494 | repo.ui.debug('merge tool returned: %s\n' % r) |
|
494 | 495 | return True, r, False |
|
495 | 496 | |
|
496 | 497 | def _formatconflictmarker(repo, ctx, template, label, pad): |
|
497 | 498 | """Applies the given template to the ctx, prefixed by the label. |
|
498 | 499 | |
|
499 | 500 | Pad is the minimum width of the label prefix, so that multiple markers |
|
500 | 501 | can have aligned templated parts. |
|
501 | 502 | """ |
|
502 | 503 | if ctx.node() is None: |
|
503 | 504 | ctx = ctx.p1() |
|
504 | 505 | |
|
505 | 506 | props = templatekw.keywords.copy() |
|
506 | 507 | props['templ'] = template |
|
507 | 508 | props['ctx'] = ctx |
|
508 | 509 | props['repo'] = repo |
|
509 | 510 | templateresult = template('conflictmarker', **props) |
|
510 | 511 | |
|
511 | 512 | label = ('%s:' % label).ljust(pad + 1) |
|
512 | 513 | mark = '%s %s' % (label, templater.stringify(templateresult)) |
|
513 | 514 | |
|
514 | 515 | if mark: |
|
515 | 516 | mark = mark.splitlines()[0] # split for safety |
|
516 | 517 | |
|
517 | 518 | # 8 for the prefix of conflict marker lines (e.g. '<<<<<<< ') |
|
518 | 519 | return util.ellipsis(mark, 80 - 8) |
|
519 | 520 | |
|
520 | 521 | _defaultconflictmarker = ('{node|short} ' |
|
521 | 522 | '{ifeq(tags, "tip", "", ' |
|
522 | 523 | 'ifeq(tags, "", "", "{tags} "))}' |
|
523 | 524 | '{if(bookmarks, "{bookmarks} ")}' |
|
524 | 525 | '{ifeq(branch, "default", "", "{branch} ")}' |
|
525 | 526 | '- {author|user}: {desc|firstline}') |
|
526 | 527 | |
|
527 | 528 | _defaultconflictlabels = ['local', 'other'] |
|
528 | 529 | |
|
529 | 530 | def _formatlabels(repo, fcd, fco, fca, labels): |
|
530 | 531 | """Formats the given labels using the conflict marker template. |
|
531 | 532 | |
|
532 | 533 | Returns a list of formatted labels. |
|
533 | 534 | """ |
|
534 | 535 | cd = fcd.changectx() |
|
535 | 536 | co = fco.changectx() |
|
536 | 537 | ca = fca.changectx() |
|
537 | 538 | |
|
538 | 539 | ui = repo.ui |
|
539 | 540 | template = ui.config('ui', 'mergemarkertemplate', _defaultconflictmarker) |
|
540 | 541 | tmpl = formatter.maketemplater(ui, 'conflictmarker', template) |
|
541 | 542 | |
|
542 | 543 | pad = max(len(l) for l in labels) |
|
543 | 544 | |
|
544 | 545 | newlabels = [_formatconflictmarker(repo, cd, tmpl, labels[0], pad), |
|
545 | 546 | _formatconflictmarker(repo, co, tmpl, labels[1], pad)] |
|
546 | 547 | if len(labels) > 2: |
|
547 | 548 | newlabels.append(_formatconflictmarker(repo, ca, tmpl, labels[2], pad)) |
|
548 | 549 | return newlabels |
|
549 | 550 | |
|
550 | 551 | def partextras(labels): |
|
551 | 552 | """Return a dictionary of extra labels for use in prompts to the user |
|
552 | 553 | |
|
553 | 554 | Intended use is in strings of the form "(l)ocal%(l)s". |
|
554 | 555 | """ |
|
555 | 556 | if labels is None: |
|
556 | 557 | return { |
|
557 | 558 | "l": "", |
|
558 | 559 | "o": "", |
|
559 | 560 | } |
|
560 | 561 | |
|
561 | 562 | return { |
|
562 | 563 | "l": " [%s]" % labels[0], |
|
563 | 564 | "o": " [%s]" % labels[1], |
|
564 | 565 | } |
|
565 | 566 | |
|
566 | 567 | def _filemerge(premerge, repo, mynode, orig, fcd, fco, fca, labels=None): |
|
567 | 568 | """perform a 3-way merge in the working directory |
|
568 | 569 | |
|
569 | 570 | premerge = whether this is a premerge |
|
570 | 571 | mynode = parent node before merge |
|
571 | 572 | orig = original local filename before merge |
|
572 | 573 | fco = other file context |
|
573 | 574 | fca = ancestor file context |
|
574 | 575 | fcd = local file context for current/destination file |
|
575 | 576 | |
|
576 | 577 | Returns whether the merge is complete, the return value of the merge, and |
|
577 | 578 | a boolean indicating whether the file was deleted from disk.""" |
|
578 | 579 | |
|
579 | 580 | def temp(prefix, ctx): |
|
580 | 581 | fullbase, ext = os.path.splitext(ctx.path()) |
|
581 | 582 | pre = "%s~%s." % (os.path.basename(fullbase), prefix) |
|
582 | 583 | (fd, name) = tempfile.mkstemp(prefix=pre, suffix=ext) |
|
583 | 584 | data = repo.wwritedata(ctx.path(), ctx.data()) |
|
584 | 585 | f = os.fdopen(fd, "wb") |
|
585 | 586 | f.write(data) |
|
586 | 587 | f.close() |
|
587 | 588 | return name |
|
588 | 589 | |
|
589 | 590 | if not fco.cmp(fcd): # files identical? |
|
590 | 591 | return True, None, False |
|
591 | 592 | |
|
592 | 593 | ui = repo.ui |
|
593 | 594 | fd = fcd.path() |
|
594 | 595 | binary = fcd.isbinary() or fco.isbinary() or fca.isbinary() |
|
595 | 596 | symlink = 'l' in fcd.flags() + fco.flags() |
|
596 | 597 | changedelete = fcd.isabsent() or fco.isabsent() |
|
597 | 598 | tool, toolpath = _picktool(repo, ui, fd, binary, symlink, changedelete) |
|
598 | 599 | if tool in internals and tool.startswith('internal:'): |
|
599 | 600 | # normalize to new-style names (':merge' etc) |
|
600 | 601 | tool = tool[len('internal'):] |
|
601 | 602 | ui.debug("picked tool '%s' for %s (binary %s symlink %s changedelete %s)\n" |
|
602 | 603 | % (tool, fd, binary, symlink, changedelete)) |
|
603 | 604 | |
|
604 | 605 | if tool in internals: |
|
605 | 606 | func = internals[tool] |
|
606 | 607 | mergetype = func.mergetype |
|
607 | 608 | onfailure = func.onfailure |
|
608 | 609 | precheck = func.precheck |
|
609 | 610 | else: |
|
610 | 611 | func = _xmerge |
|
611 | 612 | mergetype = fullmerge |
|
612 | 613 | onfailure = _("merging %s failed!\n") |
|
613 | 614 | precheck = None |
|
614 | 615 | |
|
615 | 616 | toolconf = tool, toolpath, binary, symlink |
|
616 | 617 | |
|
617 | 618 | if mergetype == nomerge: |
|
618 | 619 | r, deleted = func(repo, mynode, orig, fcd, fco, fca, toolconf, labels) |
|
619 | 620 | return True, r, deleted |
|
620 | 621 | |
|
621 | 622 | if premerge: |
|
622 | 623 | if orig != fco.path(): |
|
623 | 624 | ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd)) |
|
624 | 625 | else: |
|
625 | 626 | ui.status(_("merging %s\n") % fd) |
|
626 | 627 | |
|
627 | 628 | ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca)) |
|
628 | 629 | |
|
629 | 630 | if precheck and not precheck(repo, mynode, orig, fcd, fco, fca, |
|
630 | 631 | toolconf): |
|
631 | 632 | if onfailure: |
|
632 | 633 | ui.warn(onfailure % fd) |
|
633 | 634 | return True, 1, False |
|
634 | 635 | |
|
635 | 636 | a = repo.wjoin(fd) |
|
636 | 637 | b = temp("base", fca) |
|
637 | 638 | c = temp("other", fco) |
|
638 | 639 | if not fcd.isabsent(): |
|
639 | 640 | back = scmutil.origpath(ui, repo, a) |
|
640 | 641 | if premerge: |
|
641 | 642 | util.copyfile(a, back) |
|
642 | 643 | else: |
|
643 | 644 | back = None |
|
644 | 645 | files = (a, b, c, back) |
|
645 | 646 | |
|
646 | 647 | r = 1 |
|
647 | 648 | try: |
|
648 | 649 | markerstyle = ui.config('ui', 'mergemarkers', 'basic') |
|
649 | 650 | if not labels: |
|
650 | 651 | labels = _defaultconflictlabels |
|
651 | 652 | if markerstyle != 'basic': |
|
652 | 653 | labels = _formatlabels(repo, fcd, fco, fca, labels) |
|
653 | 654 | |
|
654 | 655 | if premerge and mergetype == fullmerge: |
|
655 | 656 | r = _premerge(repo, fcd, fco, fca, toolconf, files, labels=labels) |
|
656 | 657 | # complete if premerge successful (r is 0) |
|
657 | 658 | return not r, r, False |
|
658 | 659 | |
|
659 | 660 | needcheck, r, deleted = func(repo, mynode, orig, fcd, fco, fca, |
|
660 | 661 | toolconf, files, labels=labels) |
|
661 | 662 | |
|
662 | 663 | if needcheck: |
|
663 | 664 | r = _check(r, ui, tool, fcd, files) |
|
664 | 665 | |
|
665 | 666 | if r: |
|
666 | 667 | if onfailure: |
|
667 | 668 | ui.warn(onfailure % fd) |
|
668 | 669 | |
|
669 | 670 | return True, r, deleted |
|
670 | 671 | finally: |
|
671 | 672 | if not r and back is not None: |
|
672 | 673 | util.unlink(back) |
|
673 | 674 | util.unlink(b) |
|
674 | 675 | util.unlink(c) |
|
675 | 676 | |
|
676 | 677 | def _check(r, ui, tool, fcd, files): |
|
677 | 678 | fd = fcd.path() |
|
678 | 679 | a, b, c, back = files |
|
679 | 680 | |
|
680 | 681 | if not r and (_toolbool(ui, tool, "checkconflicts") or |
|
681 | 682 | 'conflicts' in _toollist(ui, tool, "check")): |
|
682 | 683 | if re.search("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data(), |
|
683 | 684 | re.MULTILINE): |
|
684 | 685 | r = 1 |
|
685 | 686 | |
|
686 | 687 | checked = False |
|
687 | 688 | if 'prompt' in _toollist(ui, tool, "check"): |
|
688 | 689 | checked = True |
|
689 | 690 | if ui.promptchoice(_("was merge of '%s' successful (yn)?" |
|
690 | 691 | "$$ &Yes $$ &No") % fd, 1): |
|
691 | 692 | r = 1 |
|
692 | 693 | |
|
693 | 694 | if not r and not checked and (_toolbool(ui, tool, "checkchanged") or |
|
694 | 695 | 'changed' in |
|
695 | 696 | _toollist(ui, tool, "check")): |
|
696 | 697 | if back is not None and filecmp.cmp(a, back): |
|
697 | 698 | if ui.promptchoice(_(" output file %s appears unchanged\n" |
|
698 | 699 | "was merge successful (yn)?" |
|
699 | 700 | "$$ &Yes $$ &No") % fd, 1): |
|
700 | 701 | r = 1 |
|
701 | 702 | |
|
702 | 703 | if back is not None and _toolbool(ui, tool, "fixeol"): |
|
703 | 704 | _matcheol(a, back) |
|
704 | 705 | |
|
705 | 706 | return r |
|
706 | 707 | |
|
707 | 708 | def premerge(repo, mynode, orig, fcd, fco, fca, labels=None): |
|
708 | 709 | return _filemerge(True, repo, mynode, orig, fcd, fco, fca, labels=labels) |
|
709 | 710 | |
|
710 | 711 | def filemerge(repo, mynode, orig, fcd, fco, fca, labels=None): |
|
711 | 712 | return _filemerge(False, repo, mynode, orig, fcd, fco, fca, labels=labels) |
|
712 | 713 | |
|
713 | 714 | # tell hggettext to extract docstrings from these functions: |
|
714 | 715 | i18nfunctions = internals.values() |
@@ -1,200 +1,201 | |||
|
1 | 1 | # hgweb/common.py - Utility functions needed by hgweb_mod and hgwebdir_mod |
|
2 | 2 | # |
|
3 | 3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> |
|
4 | 4 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> |
|
5 | 5 | # |
|
6 | 6 | # This software may be used and distributed according to the terms of the |
|
7 | 7 | # GNU General Public License version 2 or any later version. |
|
8 | 8 | |
|
9 | 9 | from __future__ import absolute_import |
|
10 | 10 | |
|
11 | 11 | import errno |
|
12 | 12 | import mimetypes |
|
13 | 13 | import os |
|
14 | 14 | |
|
15 | 15 | from .. import ( |
|
16 | encoding, | |
|
16 | 17 | pycompat, |
|
17 | 18 | util, |
|
18 | 19 | ) |
|
19 | 20 | |
|
20 | 21 | httpserver = util.httpserver |
|
21 | 22 | |
|
22 | 23 | HTTP_OK = 200 |
|
23 | 24 | HTTP_NOT_MODIFIED = 304 |
|
24 | 25 | HTTP_BAD_REQUEST = 400 |
|
25 | 26 | HTTP_UNAUTHORIZED = 401 |
|
26 | 27 | HTTP_FORBIDDEN = 403 |
|
27 | 28 | HTTP_NOT_FOUND = 404 |
|
28 | 29 | HTTP_METHOD_NOT_ALLOWED = 405 |
|
29 | 30 | HTTP_SERVER_ERROR = 500 |
|
30 | 31 | |
|
31 | 32 | |
|
32 | 33 | def ismember(ui, username, userlist): |
|
33 | 34 | """Check if username is a member of userlist. |
|
34 | 35 | |
|
35 | 36 | If userlist has a single '*' member, all users are considered members. |
|
36 | 37 | Can be overridden by extensions to provide more complex authorization |
|
37 | 38 | schemes. |
|
38 | 39 | """ |
|
39 | 40 | return userlist == ['*'] or username in userlist |
|
40 | 41 | |
|
41 | 42 | def checkauthz(hgweb, req, op): |
|
42 | 43 | '''Check permission for operation based on request data (including |
|
43 | 44 | authentication info). Return if op allowed, else raise an ErrorResponse |
|
44 | 45 | exception.''' |
|
45 | 46 | |
|
46 | 47 | user = req.env.get('REMOTE_USER') |
|
47 | 48 | |
|
48 | 49 | deny_read = hgweb.configlist('web', 'deny_read') |
|
49 | 50 | if deny_read and (not user or ismember(hgweb.repo.ui, user, deny_read)): |
|
50 | 51 | raise ErrorResponse(HTTP_UNAUTHORIZED, 'read not authorized') |
|
51 | 52 | |
|
52 | 53 | allow_read = hgweb.configlist('web', 'allow_read') |
|
53 | 54 | if allow_read and (not ismember(hgweb.repo.ui, user, allow_read)): |
|
54 | 55 | raise ErrorResponse(HTTP_UNAUTHORIZED, 'read not authorized') |
|
55 | 56 | |
|
56 | 57 | if op == 'pull' and not hgweb.allowpull: |
|
57 | 58 | raise ErrorResponse(HTTP_UNAUTHORIZED, 'pull not authorized') |
|
58 | 59 | elif op == 'pull' or op is None: # op is None for interface requests |
|
59 | 60 | return |
|
60 | 61 | |
|
61 | 62 | # enforce that you can only push using POST requests |
|
62 | 63 | if req.env['REQUEST_METHOD'] != 'POST': |
|
63 | 64 | msg = 'push requires POST request' |
|
64 | 65 | raise ErrorResponse(HTTP_METHOD_NOT_ALLOWED, msg) |
|
65 | 66 | |
|
66 | 67 | # require ssl by default for pushing, auth info cannot be sniffed |
|
67 | 68 | # and replayed |
|
68 | 69 | scheme = req.env.get('wsgi.url_scheme') |
|
69 | 70 | if hgweb.configbool('web', 'push_ssl', True) and scheme != 'https': |
|
70 | 71 | raise ErrorResponse(HTTP_FORBIDDEN, 'ssl required') |
|
71 | 72 | |
|
72 | 73 | deny = hgweb.configlist('web', 'deny_push') |
|
73 | 74 | if deny and (not user or ismember(hgweb.repo.ui, user, deny)): |
|
74 | 75 | raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized') |
|
75 | 76 | |
|
76 | 77 | allow = hgweb.configlist('web', 'allow_push') |
|
77 | 78 | if not (allow and ismember(hgweb.repo.ui, user, allow)): |
|
78 | 79 | raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized') |
|
79 | 80 | |
|
80 | 81 | # Hooks for hgweb permission checks; extensions can add hooks here. |
|
81 | 82 | # Each hook is invoked like this: hook(hgweb, request, operation), |
|
82 | 83 | # where operation is either read, pull or push. Hooks should either |
|
83 | 84 | # raise an ErrorResponse exception, or just return. |
|
84 | 85 | # |
|
85 | 86 | # It is possible to do both authentication and authorization through |
|
86 | 87 | # this. |
|
87 | 88 | permhooks = [checkauthz] |
|
88 | 89 | |
|
89 | 90 | |
|
90 | 91 | class ErrorResponse(Exception): |
|
91 | 92 | def __init__(self, code, message=None, headers=[]): |
|
92 | 93 | if message is None: |
|
93 | 94 | message = _statusmessage(code) |
|
94 | 95 | Exception.__init__(self, message) |
|
95 | 96 | self.code = code |
|
96 | 97 | self.headers = headers |
|
97 | 98 | |
|
98 | 99 | class continuereader(object): |
|
99 | 100 | def __init__(self, f, write): |
|
100 | 101 | self.f = f |
|
101 | 102 | self._write = write |
|
102 | 103 | self.continued = False |
|
103 | 104 | |
|
104 | 105 | def read(self, amt=-1): |
|
105 | 106 | if not self.continued: |
|
106 | 107 | self.continued = True |
|
107 | 108 | self._write('HTTP/1.1 100 Continue\r\n\r\n') |
|
108 | 109 | return self.f.read(amt) |
|
109 | 110 | |
|
110 | 111 | def __getattr__(self, attr): |
|
111 | 112 | if attr in ('close', 'readline', 'readlines', '__iter__'): |
|
112 | 113 | return getattr(self.f, attr) |
|
113 | 114 | raise AttributeError |
|
114 | 115 | |
|
115 | 116 | def _statusmessage(code): |
|
116 | 117 | responses = httpserver.basehttprequesthandler.responses |
|
117 | 118 | return responses.get(code, ('Error', 'Unknown error'))[0] |
|
118 | 119 | |
|
119 | 120 | def statusmessage(code, message=None): |
|
120 | 121 | return '%d %s' % (code, message or _statusmessage(code)) |
|
121 | 122 | |
|
122 | 123 | def get_stat(spath, fn): |
|
123 | 124 | """stat fn if it exists, spath otherwise""" |
|
124 | 125 | cl_path = os.path.join(spath, fn) |
|
125 | 126 | if os.path.exists(cl_path): |
|
126 | 127 | return os.stat(cl_path) |
|
127 | 128 | else: |
|
128 | 129 | return os.stat(spath) |
|
129 | 130 | |
|
130 | 131 | def get_mtime(spath): |
|
131 | 132 | return get_stat(spath, "00changelog.i").st_mtime |
|
132 | 133 | |
|
133 | 134 | def staticfile(directory, fname, req): |
|
134 | 135 | """return a file inside directory with guessed Content-Type header |
|
135 | 136 | |
|
136 | 137 | fname always uses '/' as directory separator and isn't allowed to |
|
137 | 138 | contain unusual path components. |
|
138 | 139 | Content-Type is guessed using the mimetypes module. |
|
139 | 140 | Return an empty string if fname is illegal or file not found. |
|
140 | 141 | |
|
141 | 142 | """ |
|
142 | 143 | parts = fname.split('/') |
|
143 | 144 | for part in parts: |
|
144 | 145 | if (part in ('', os.curdir, os.pardir) or |
|
145 | 146 | pycompat.ossep in part or |
|
146 | 147 | pycompat.osaltsep is not None and pycompat.osaltsep in part): |
|
147 | 148 | return |
|
148 | 149 | fpath = os.path.join(*parts) |
|
149 | 150 | if isinstance(directory, str): |
|
150 | 151 | directory = [directory] |
|
151 | 152 | for d in directory: |
|
152 | 153 | path = os.path.join(d, fpath) |
|
153 | 154 | if os.path.exists(path): |
|
154 | 155 | break |
|
155 | 156 | try: |
|
156 | 157 | os.stat(path) |
|
157 | 158 | ct = mimetypes.guess_type(path)[0] or "text/plain" |
|
158 | 159 | fp = open(path, 'rb') |
|
159 | 160 | data = fp.read() |
|
160 | 161 | fp.close() |
|
161 | 162 | req.respond(HTTP_OK, ct, body=data) |
|
162 | 163 | except TypeError: |
|
163 | 164 | raise ErrorResponse(HTTP_SERVER_ERROR, 'illegal filename') |
|
164 | 165 | except OSError as err: |
|
165 | 166 | if err.errno == errno.ENOENT: |
|
166 | 167 | raise ErrorResponse(HTTP_NOT_FOUND) |
|
167 | 168 | else: |
|
168 | 169 | raise ErrorResponse(HTTP_SERVER_ERROR, err.strerror) |
|
169 | 170 | |
|
170 | 171 | def paritygen(stripecount, offset=0): |
|
171 | 172 | """count parity of horizontal stripes for easier reading""" |
|
172 | 173 | if stripecount and offset: |
|
173 | 174 | # account for offset, e.g. due to building the list in reverse |
|
174 | 175 | count = (stripecount + offset) % stripecount |
|
175 | 176 | parity = (stripecount + offset) / stripecount & 1 |
|
176 | 177 | else: |
|
177 | 178 | count = 0 |
|
178 | 179 | parity = 0 |
|
179 | 180 | while True: |
|
180 | 181 | yield parity |
|
181 | 182 | count += 1 |
|
182 | 183 | if stripecount and count >= stripecount: |
|
183 | 184 | parity = 1 - parity |
|
184 | 185 | count = 0 |
|
185 | 186 | |
|
186 | 187 | def get_contact(config): |
|
187 | 188 | """Return repo contact information or empty string. |
|
188 | 189 | |
|
189 | 190 | web.contact is the primary source, but if that is not set, try |
|
190 | 191 | ui.username or $EMAIL as a fallback to display something useful. |
|
191 | 192 | """ |
|
192 | 193 | return (config("web", "contact") or |
|
193 | 194 | config("ui", "username") or |
|
194 |
|
|
|
195 | encoding.environ.get("EMAIL") or "") | |
|
195 | 196 | |
|
196 | 197 | def caching(web, req): |
|
197 | 198 | tag = 'W/"%s"' % web.mtime |
|
198 | 199 | if req.env.get('HTTP_IF_NONE_MATCH') == tag: |
|
199 | 200 | raise ErrorResponse(HTTP_NOT_MODIFIED) |
|
200 | 201 | req.headers.append(('ETag', tag)) |
@@ -1,469 +1,470 | |||
|
1 | 1 | # hgweb/hgweb_mod.py - Web interface for a repository. |
|
2 | 2 | # |
|
3 | 3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> |
|
4 | 4 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
5 | 5 | # |
|
6 | 6 | # This software may be used and distributed according to the terms of the |
|
7 | 7 | # GNU General Public License version 2 or any later version. |
|
8 | 8 | |
|
9 | 9 | from __future__ import absolute_import |
|
10 | 10 | |
|
11 | 11 | import contextlib |
|
12 | 12 | import os |
|
13 | 13 | |
|
14 | 14 | from .common import ( |
|
15 | 15 | ErrorResponse, |
|
16 | 16 | HTTP_BAD_REQUEST, |
|
17 | 17 | HTTP_NOT_FOUND, |
|
18 | 18 | HTTP_NOT_MODIFIED, |
|
19 | 19 | HTTP_OK, |
|
20 | 20 | HTTP_SERVER_ERROR, |
|
21 | 21 | caching, |
|
22 | 22 | permhooks, |
|
23 | 23 | ) |
|
24 | 24 | from .request import wsgirequest |
|
25 | 25 | |
|
26 | 26 | from .. import ( |
|
27 | 27 | encoding, |
|
28 | 28 | error, |
|
29 | 29 | hg, |
|
30 | 30 | hook, |
|
31 | 31 | profiling, |
|
32 | 32 | repoview, |
|
33 | 33 | templatefilters, |
|
34 | 34 | templater, |
|
35 | 35 | ui as uimod, |
|
36 | 36 | util, |
|
37 | 37 | ) |
|
38 | 38 | |
|
39 | 39 | from . import ( |
|
40 | 40 | protocol, |
|
41 | 41 | webcommands, |
|
42 | 42 | webutil, |
|
43 | 43 | wsgicgi, |
|
44 | 44 | ) |
|
45 | 45 | |
|
46 | 46 | perms = { |
|
47 | 47 | 'changegroup': 'pull', |
|
48 | 48 | 'changegroupsubset': 'pull', |
|
49 | 49 | 'getbundle': 'pull', |
|
50 | 50 | 'stream_out': 'pull', |
|
51 | 51 | 'listkeys': 'pull', |
|
52 | 52 | 'unbundle': 'push', |
|
53 | 53 | 'pushkey': 'push', |
|
54 | 54 | } |
|
55 | 55 | |
|
56 | 56 | def makebreadcrumb(url, prefix=''): |
|
57 | 57 | '''Return a 'URL breadcrumb' list |
|
58 | 58 | |
|
59 | 59 | A 'URL breadcrumb' is a list of URL-name pairs, |
|
60 | 60 | corresponding to each of the path items on a URL. |
|
61 | 61 | This can be used to create path navigation entries. |
|
62 | 62 | ''' |
|
63 | 63 | if url.endswith('/'): |
|
64 | 64 | url = url[:-1] |
|
65 | 65 | if prefix: |
|
66 | 66 | url = '/' + prefix + url |
|
67 | 67 | relpath = url |
|
68 | 68 | if relpath.startswith('/'): |
|
69 | 69 | relpath = relpath[1:] |
|
70 | 70 | |
|
71 | 71 | breadcrumb = [] |
|
72 | 72 | urlel = url |
|
73 | 73 | pathitems = [''] + relpath.split('/') |
|
74 | 74 | for pathel in reversed(pathitems): |
|
75 | 75 | if not pathel or not urlel: |
|
76 | 76 | break |
|
77 | 77 | breadcrumb.append({'url': urlel, 'name': pathel}) |
|
78 | 78 | urlel = os.path.dirname(urlel) |
|
79 | 79 | return reversed(breadcrumb) |
|
80 | 80 | |
|
81 | 81 | class requestcontext(object): |
|
82 | 82 | """Holds state/context for an individual request. |
|
83 | 83 | |
|
84 | 84 | Servers can be multi-threaded. Holding state on the WSGI application |
|
85 | 85 | is prone to race conditions. Instances of this class exist to hold |
|
86 | 86 | mutable and race-free state for requests. |
|
87 | 87 | """ |
|
88 | 88 | def __init__(self, app, repo): |
|
89 | 89 | self.repo = repo |
|
90 | 90 | self.reponame = app.reponame |
|
91 | 91 | |
|
92 | 92 | self.archives = ('zip', 'gz', 'bz2') |
|
93 | 93 | |
|
94 | 94 | self.maxchanges = self.configint('web', 'maxchanges', 10) |
|
95 | 95 | self.stripecount = self.configint('web', 'stripes', 1) |
|
96 | 96 | self.maxshortchanges = self.configint('web', 'maxshortchanges', 60) |
|
97 | 97 | self.maxfiles = self.configint('web', 'maxfiles', 10) |
|
98 | 98 | self.allowpull = self.configbool('web', 'allowpull', True) |
|
99 | 99 | |
|
100 | 100 | # we use untrusted=False to prevent a repo owner from using |
|
101 | 101 | # web.templates in .hg/hgrc to get access to any file readable |
|
102 | 102 | # by the user running the CGI script |
|
103 | 103 | self.templatepath = self.config('web', 'templates', untrusted=False) |
|
104 | 104 | |
|
105 | 105 | # This object is more expensive to build than simple config values. |
|
106 | 106 | # It is shared across requests. The app will replace the object |
|
107 | 107 | # if it is updated. Since this is a reference and nothing should |
|
108 | 108 | # modify the underlying object, it should be constant for the lifetime |
|
109 | 109 | # of the request. |
|
110 | 110 | self.websubtable = app.websubtable |
|
111 | 111 | |
|
112 | 112 | # Trust the settings from the .hg/hgrc files by default. |
|
113 | 113 | def config(self, section, name, default=None, untrusted=True): |
|
114 | 114 | return self.repo.ui.config(section, name, default, |
|
115 | 115 | untrusted=untrusted) |
|
116 | 116 | |
|
117 | 117 | def configbool(self, section, name, default=False, untrusted=True): |
|
118 | 118 | return self.repo.ui.configbool(section, name, default, |
|
119 | 119 | untrusted=untrusted) |
|
120 | 120 | |
|
121 | 121 | def configint(self, section, name, default=None, untrusted=True): |
|
122 | 122 | return self.repo.ui.configint(section, name, default, |
|
123 | 123 | untrusted=untrusted) |
|
124 | 124 | |
|
125 | 125 | def configlist(self, section, name, default=None, untrusted=True): |
|
126 | 126 | return self.repo.ui.configlist(section, name, default, |
|
127 | 127 | untrusted=untrusted) |
|
128 | 128 | |
|
129 | 129 | archivespecs = { |
|
130 | 130 | 'bz2': ('application/x-bzip2', 'tbz2', '.tar.bz2', None), |
|
131 | 131 | 'gz': ('application/x-gzip', 'tgz', '.tar.gz', None), |
|
132 | 132 | 'zip': ('application/zip', 'zip', '.zip', None), |
|
133 | 133 | } |
|
134 | 134 | |
|
135 | 135 | def archivelist(self, nodeid): |
|
136 | 136 | allowed = self.configlist('web', 'allow_archive') |
|
137 | 137 | for typ, spec in self.archivespecs.iteritems(): |
|
138 | 138 | if typ in allowed or self.configbool('web', 'allow%s' % typ): |
|
139 | 139 | yield {'type': typ, 'extension': spec[2], 'node': nodeid} |
|
140 | 140 | |
|
141 | 141 | def templater(self, req): |
|
142 | 142 | # determine scheme, port and server name |
|
143 | 143 | # this is needed to create absolute urls |
|
144 | 144 | |
|
145 | 145 | proto = req.env.get('wsgi.url_scheme') |
|
146 | 146 | if proto == 'https': |
|
147 | 147 | proto = 'https' |
|
148 | 148 | default_port = '443' |
|
149 | 149 | else: |
|
150 | 150 | proto = 'http' |
|
151 | 151 | default_port = '80' |
|
152 | 152 | |
|
153 | 153 | port = req.env['SERVER_PORT'] |
|
154 | 154 | port = port != default_port and (':' + port) or '' |
|
155 | 155 | urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port) |
|
156 | 156 | logourl = self.config('web', 'logourl', 'https://mercurial-scm.org/') |
|
157 | 157 | logoimg = self.config('web', 'logoimg', 'hglogo.png') |
|
158 | 158 | staticurl = self.config('web', 'staticurl') or req.url + 'static/' |
|
159 | 159 | if not staticurl.endswith('/'): |
|
160 | 160 | staticurl += '/' |
|
161 | 161 | |
|
162 | 162 | # some functions for the templater |
|
163 | 163 | |
|
164 | 164 | def motd(**map): |
|
165 | 165 | yield self.config('web', 'motd', '') |
|
166 | 166 | |
|
167 | 167 | # figure out which style to use |
|
168 | 168 | |
|
169 | 169 | vars = {} |
|
170 | 170 | styles = ( |
|
171 | 171 | req.form.get('style', [None])[0], |
|
172 | 172 | self.config('web', 'style'), |
|
173 | 173 | 'paper', |
|
174 | 174 | ) |
|
175 | 175 | style, mapfile = templater.stylemap(styles, self.templatepath) |
|
176 | 176 | if style == styles[0]: |
|
177 | 177 | vars['style'] = style |
|
178 | 178 | |
|
179 | 179 | start = req.url[-1] == '?' and '&' or '?' |
|
180 | 180 | sessionvars = webutil.sessionvars(vars, start) |
|
181 | 181 | |
|
182 | 182 | if not self.reponame: |
|
183 | 183 | self.reponame = (self.config('web', 'name') |
|
184 | 184 | or req.env.get('REPO_NAME') |
|
185 | 185 | or req.url.strip('/') or self.repo.root) |
|
186 | 186 | |
|
187 | 187 | def websubfilter(text): |
|
188 | 188 | return templatefilters.websub(text, self.websubtable) |
|
189 | 189 | |
|
190 | 190 | # create the templater |
|
191 | 191 | |
|
192 | 192 | defaults = { |
|
193 | 193 | 'url': req.url, |
|
194 | 194 | 'logourl': logourl, |
|
195 | 195 | 'logoimg': logoimg, |
|
196 | 196 | 'staticurl': staticurl, |
|
197 | 197 | 'urlbase': urlbase, |
|
198 | 198 | 'repo': self.reponame, |
|
199 | 199 | 'encoding': encoding.encoding, |
|
200 | 200 | 'motd': motd, |
|
201 | 201 | 'sessionvars': sessionvars, |
|
202 | 202 | 'pathdef': makebreadcrumb(req.url), |
|
203 | 203 | 'style': style, |
|
204 | 204 | } |
|
205 | 205 | tmpl = templater.templater.frommapfile(mapfile, |
|
206 | 206 | filters={'websub': websubfilter}, |
|
207 | 207 | defaults=defaults) |
|
208 | 208 | return tmpl |
|
209 | 209 | |
|
210 | 210 | |
|
211 | 211 | class hgweb(object): |
|
212 | 212 | """HTTP server for individual repositories. |
|
213 | 213 | |
|
214 | 214 | Instances of this class serve HTTP responses for a particular |
|
215 | 215 | repository. |
|
216 | 216 | |
|
217 | 217 | Instances are typically used as WSGI applications. |
|
218 | 218 | |
|
219 | 219 | Some servers are multi-threaded. On these servers, there may |
|
220 | 220 | be multiple active threads inside __call__. |
|
221 | 221 | """ |
|
222 | 222 | def __init__(self, repo, name=None, baseui=None): |
|
223 | 223 | if isinstance(repo, str): |
|
224 | 224 | if baseui: |
|
225 | 225 | u = baseui.copy() |
|
226 | 226 | else: |
|
227 | 227 | u = uimod.ui.load() |
|
228 | 228 | r = hg.repository(u, repo) |
|
229 | 229 | else: |
|
230 | 230 | # we trust caller to give us a private copy |
|
231 | 231 | r = repo |
|
232 | 232 | |
|
233 | 233 | r.ui.setconfig('ui', 'report_untrusted', 'off', 'hgweb') |
|
234 | 234 | r.baseui.setconfig('ui', 'report_untrusted', 'off', 'hgweb') |
|
235 | 235 | r.ui.setconfig('ui', 'nontty', 'true', 'hgweb') |
|
236 | 236 | r.baseui.setconfig('ui', 'nontty', 'true', 'hgweb') |
|
237 | 237 | # resolve file patterns relative to repo root |
|
238 | 238 | r.ui.setconfig('ui', 'forcecwd', r.root, 'hgweb') |
|
239 | 239 | r.baseui.setconfig('ui', 'forcecwd', r.root, 'hgweb') |
|
240 | 240 | # displaying bundling progress bar while serving feel wrong and may |
|
241 | 241 | # break some wsgi implementation. |
|
242 | 242 | r.ui.setconfig('progress', 'disable', 'true', 'hgweb') |
|
243 | 243 | r.baseui.setconfig('progress', 'disable', 'true', 'hgweb') |
|
244 | 244 | self._repos = [hg.cachedlocalrepo(self._webifyrepo(r))] |
|
245 | 245 | self._lastrepo = self._repos[0] |
|
246 | 246 | hook.redirect(True) |
|
247 | 247 | self.reponame = name |
|
248 | 248 | |
|
249 | 249 | def _webifyrepo(self, repo): |
|
250 | 250 | repo = getwebview(repo) |
|
251 | 251 | self.websubtable = webutil.getwebsubs(repo) |
|
252 | 252 | return repo |
|
253 | 253 | |
|
254 | 254 | @contextlib.contextmanager |
|
255 | 255 | def _obtainrepo(self): |
|
256 | 256 | """Obtain a repo unique to the caller. |
|
257 | 257 | |
|
258 | 258 | Internally we maintain a stack of cachedlocalrepo instances |
|
259 | 259 | to be handed out. If one is available, we pop it and return it, |
|
260 | 260 | ensuring it is up to date in the process. If one is not available, |
|
261 | 261 | we clone the most recently used repo instance and return it. |
|
262 | 262 | |
|
263 | 263 | It is currently possible for the stack to grow without bounds |
|
264 | 264 | if the server allows infinite threads. However, servers should |
|
265 | 265 | have a thread limit, thus establishing our limit. |
|
266 | 266 | """ |
|
267 | 267 | if self._repos: |
|
268 | 268 | cached = self._repos.pop() |
|
269 | 269 | r, created = cached.fetch() |
|
270 | 270 | else: |
|
271 | 271 | cached = self._lastrepo.copy() |
|
272 | 272 | r, created = cached.fetch() |
|
273 | 273 | if created: |
|
274 | 274 | r = self._webifyrepo(r) |
|
275 | 275 | |
|
276 | 276 | self._lastrepo = cached |
|
277 | 277 | self.mtime = cached.mtime |
|
278 | 278 | try: |
|
279 | 279 | yield r |
|
280 | 280 | finally: |
|
281 | 281 | self._repos.append(cached) |
|
282 | 282 | |
|
283 | 283 | def run(self): |
|
284 | 284 | """Start a server from CGI environment. |
|
285 | 285 | |
|
286 | 286 | Modern servers should be using WSGI and should avoid this |
|
287 | 287 | method, if possible. |
|
288 | 288 | """ |
|
289 |
if not |
|
|
289 | if not encoding.environ.get('GATEWAY_INTERFACE', | |
|
290 | '').startswith("CGI/1."): | |
|
290 | 291 | raise RuntimeError("This function is only intended to be " |
|
291 | 292 | "called while running as a CGI script.") |
|
292 | 293 | wsgicgi.launch(self) |
|
293 | 294 | |
|
294 | 295 | def __call__(self, env, respond): |
|
295 | 296 | """Run the WSGI application. |
|
296 | 297 | |
|
297 | 298 | This may be called by multiple threads. |
|
298 | 299 | """ |
|
299 | 300 | req = wsgirequest(env, respond) |
|
300 | 301 | return self.run_wsgi(req) |
|
301 | 302 | |
|
302 | 303 | def run_wsgi(self, req): |
|
303 | 304 | """Internal method to run the WSGI application. |
|
304 | 305 | |
|
305 | 306 | This is typically only called by Mercurial. External consumers |
|
306 | 307 | should be using instances of this class as the WSGI application. |
|
307 | 308 | """ |
|
308 | 309 | with self._obtainrepo() as repo: |
|
309 | 310 | with profiling.maybeprofile(repo.ui): |
|
310 | 311 | for r in self._runwsgi(req, repo): |
|
311 | 312 | yield r |
|
312 | 313 | |
|
313 | 314 | def _runwsgi(self, req, repo): |
|
314 | 315 | rctx = requestcontext(self, repo) |
|
315 | 316 | |
|
316 | 317 | # This state is global across all threads. |
|
317 | 318 | encoding.encoding = rctx.config('web', 'encoding', encoding.encoding) |
|
318 | 319 | rctx.repo.ui.environ = req.env |
|
319 | 320 | |
|
320 | 321 | # work with CGI variables to create coherent structure |
|
321 | 322 | # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME |
|
322 | 323 | |
|
323 | 324 | req.url = req.env['SCRIPT_NAME'] |
|
324 | 325 | if not req.url.endswith('/'): |
|
325 | 326 | req.url += '/' |
|
326 | 327 | if 'REPO_NAME' in req.env: |
|
327 | 328 | req.url += req.env['REPO_NAME'] + '/' |
|
328 | 329 | |
|
329 | 330 | if 'PATH_INFO' in req.env: |
|
330 | 331 | parts = req.env['PATH_INFO'].strip('/').split('/') |
|
331 | 332 | repo_parts = req.env.get('REPO_NAME', '').split('/') |
|
332 | 333 | if parts[:len(repo_parts)] == repo_parts: |
|
333 | 334 | parts = parts[len(repo_parts):] |
|
334 | 335 | query = '/'.join(parts) |
|
335 | 336 | else: |
|
336 | 337 | query = req.env['QUERY_STRING'].partition('&')[0] |
|
337 | 338 | query = query.partition(';')[0] |
|
338 | 339 | |
|
339 | 340 | # process this if it's a protocol request |
|
340 | 341 | # protocol bits don't need to create any URLs |
|
341 | 342 | # and the clients always use the old URL structure |
|
342 | 343 | |
|
343 | 344 | cmd = req.form.get('cmd', [''])[0] |
|
344 | 345 | if protocol.iscmd(cmd): |
|
345 | 346 | try: |
|
346 | 347 | if query: |
|
347 | 348 | raise ErrorResponse(HTTP_NOT_FOUND) |
|
348 | 349 | if cmd in perms: |
|
349 | 350 | self.check_perm(rctx, req, perms[cmd]) |
|
350 | 351 | return protocol.call(rctx.repo, req, cmd) |
|
351 | 352 | except ErrorResponse as inst: |
|
352 | 353 | # A client that sends unbundle without 100-continue will |
|
353 | 354 | # break if we respond early. |
|
354 | 355 | if (cmd == 'unbundle' and |
|
355 | 356 | (req.env.get('HTTP_EXPECT', |
|
356 | 357 | '').lower() != '100-continue') or |
|
357 | 358 | req.env.get('X-HgHttp2', '')): |
|
358 | 359 | req.drain() |
|
359 | 360 | else: |
|
360 | 361 | req.headers.append(('Connection', 'Close')) |
|
361 | 362 | req.respond(inst, protocol.HGTYPE, |
|
362 | 363 | body='0\n%s\n' % inst) |
|
363 | 364 | return '' |
|
364 | 365 | |
|
365 | 366 | # translate user-visible url structure to internal structure |
|
366 | 367 | |
|
367 | 368 | args = query.split('/', 2) |
|
368 | 369 | if 'cmd' not in req.form and args and args[0]: |
|
369 | 370 | |
|
370 | 371 | cmd = args.pop(0) |
|
371 | 372 | style = cmd.rfind('-') |
|
372 | 373 | if style != -1: |
|
373 | 374 | req.form['style'] = [cmd[:style]] |
|
374 | 375 | cmd = cmd[style + 1:] |
|
375 | 376 | |
|
376 | 377 | # avoid accepting e.g. style parameter as command |
|
377 | 378 | if util.safehasattr(webcommands, cmd): |
|
378 | 379 | req.form['cmd'] = [cmd] |
|
379 | 380 | |
|
380 | 381 | if cmd == 'static': |
|
381 | 382 | req.form['file'] = ['/'.join(args)] |
|
382 | 383 | else: |
|
383 | 384 | if args and args[0]: |
|
384 | 385 | node = args.pop(0).replace('%2F', '/') |
|
385 | 386 | req.form['node'] = [node] |
|
386 | 387 | if args: |
|
387 | 388 | req.form['file'] = args |
|
388 | 389 | |
|
389 | 390 | ua = req.env.get('HTTP_USER_AGENT', '') |
|
390 | 391 | if cmd == 'rev' and 'mercurial' in ua: |
|
391 | 392 | req.form['style'] = ['raw'] |
|
392 | 393 | |
|
393 | 394 | if cmd == 'archive': |
|
394 | 395 | fn = req.form['node'][0] |
|
395 | 396 | for type_, spec in rctx.archivespecs.iteritems(): |
|
396 | 397 | ext = spec[2] |
|
397 | 398 | if fn.endswith(ext): |
|
398 | 399 | req.form['node'] = [fn[:-len(ext)]] |
|
399 | 400 | req.form['type'] = [type_] |
|
400 | 401 | |
|
401 | 402 | # process the web interface request |
|
402 | 403 | |
|
403 | 404 | try: |
|
404 | 405 | tmpl = rctx.templater(req) |
|
405 | 406 | ctype = tmpl('mimetype', encoding=encoding.encoding) |
|
406 | 407 | ctype = templater.stringify(ctype) |
|
407 | 408 | |
|
408 | 409 | # check read permissions non-static content |
|
409 | 410 | if cmd != 'static': |
|
410 | 411 | self.check_perm(rctx, req, None) |
|
411 | 412 | |
|
412 | 413 | if cmd == '': |
|
413 | 414 | req.form['cmd'] = [tmpl.cache['default']] |
|
414 | 415 | cmd = req.form['cmd'][0] |
|
415 | 416 | |
|
416 | 417 | if rctx.configbool('web', 'cache', True): |
|
417 | 418 | caching(self, req) # sets ETag header or raises NOT_MODIFIED |
|
418 | 419 | if cmd not in webcommands.__all__: |
|
419 | 420 | msg = 'no such method: %s' % cmd |
|
420 | 421 | raise ErrorResponse(HTTP_BAD_REQUEST, msg) |
|
421 | 422 | elif cmd == 'file' and 'raw' in req.form.get('style', []): |
|
422 | 423 | rctx.ctype = ctype |
|
423 | 424 | content = webcommands.rawfile(rctx, req, tmpl) |
|
424 | 425 | else: |
|
425 | 426 | content = getattr(webcommands, cmd)(rctx, req, tmpl) |
|
426 | 427 | req.respond(HTTP_OK, ctype) |
|
427 | 428 | |
|
428 | 429 | return content |
|
429 | 430 | |
|
430 | 431 | except (error.LookupError, error.RepoLookupError) as err: |
|
431 | 432 | req.respond(HTTP_NOT_FOUND, ctype) |
|
432 | 433 | msg = str(err) |
|
433 | 434 | if (util.safehasattr(err, 'name') and |
|
434 | 435 | not isinstance(err, error.ManifestLookupError)): |
|
435 | 436 | msg = 'revision not found: %s' % err.name |
|
436 | 437 | return tmpl('error', error=msg) |
|
437 | 438 | except (error.RepoError, error.RevlogError) as inst: |
|
438 | 439 | req.respond(HTTP_SERVER_ERROR, ctype) |
|
439 | 440 | return tmpl('error', error=str(inst)) |
|
440 | 441 | except ErrorResponse as inst: |
|
441 | 442 | req.respond(inst, ctype) |
|
442 | 443 | if inst.code == HTTP_NOT_MODIFIED: |
|
443 | 444 | # Not allowed to return a body on a 304 |
|
444 | 445 | return [''] |
|
445 | 446 | return tmpl('error', error=str(inst)) |
|
446 | 447 | |
|
447 | 448 | def check_perm(self, rctx, req, op): |
|
448 | 449 | for permhook in permhooks: |
|
449 | 450 | permhook(rctx, req, op) |
|
450 | 451 | |
|
451 | 452 | def getwebview(repo): |
|
452 | 453 | """The 'web.view' config controls changeset filter to hgweb. Possible |
|
453 | 454 | values are ``served``, ``visible`` and ``all``. Default is ``served``. |
|
454 | 455 | The ``served`` filter only shows changesets that can be pulled from the |
|
455 | 456 | hgweb instance. The``visible`` filter includes secret changesets but |
|
456 | 457 | still excludes "hidden" one. |
|
457 | 458 | |
|
458 | 459 | See the repoview module for details. |
|
459 | 460 | |
|
460 | 461 | The option has been around undocumented since Mercurial 2.5, but no |
|
461 | 462 | user ever asked about it. So we better keep it undocumented for now.""" |
|
462 | 463 | viewconfig = repo.ui.config('web', 'view', 'served', |
|
463 | 464 | untrusted=True) |
|
464 | 465 | if viewconfig == 'all': |
|
465 | 466 | return repo.unfiltered() |
|
466 | 467 | elif viewconfig in repoview.filtertable: |
|
467 | 468 | return repo.filtered(viewconfig) |
|
468 | 469 | else: |
|
469 | 470 | return repo.filtered('served') |
@@ -1,521 +1,522 | |||
|
1 | 1 | # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories. |
|
2 | 2 | # |
|
3 | 3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> |
|
4 | 4 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> |
|
5 | 5 | # |
|
6 | 6 | # This software may be used and distributed according to the terms of the |
|
7 | 7 | # GNU General Public License version 2 or any later version. |
|
8 | 8 | |
|
9 | 9 | from __future__ import absolute_import |
|
10 | 10 | |
|
11 | 11 | import os |
|
12 | 12 | import re |
|
13 | 13 | import time |
|
14 | 14 | |
|
15 | 15 | from ..i18n import _ |
|
16 | 16 | |
|
17 | 17 | from .common import ( |
|
18 | 18 | ErrorResponse, |
|
19 | 19 | HTTP_NOT_FOUND, |
|
20 | 20 | HTTP_OK, |
|
21 | 21 | HTTP_SERVER_ERROR, |
|
22 | 22 | get_contact, |
|
23 | 23 | get_mtime, |
|
24 | 24 | ismember, |
|
25 | 25 | paritygen, |
|
26 | 26 | staticfile, |
|
27 | 27 | ) |
|
28 | 28 | from .request import wsgirequest |
|
29 | 29 | |
|
30 | 30 | from .. import ( |
|
31 | 31 | encoding, |
|
32 | 32 | error, |
|
33 | 33 | hg, |
|
34 | 34 | profiling, |
|
35 | 35 | scmutil, |
|
36 | 36 | templater, |
|
37 | 37 | ui as uimod, |
|
38 | 38 | util, |
|
39 | 39 | ) |
|
40 | 40 | |
|
41 | 41 | from . import ( |
|
42 | 42 | hgweb_mod, |
|
43 | 43 | webutil, |
|
44 | 44 | wsgicgi, |
|
45 | 45 | ) |
|
46 | 46 | |
|
47 | 47 | def cleannames(items): |
|
48 | 48 | return [(util.pconvert(name).strip('/'), path) for name, path in items] |
|
49 | 49 | |
|
50 | 50 | def findrepos(paths): |
|
51 | 51 | repos = [] |
|
52 | 52 | for prefix, root in cleannames(paths): |
|
53 | 53 | roothead, roottail = os.path.split(root) |
|
54 | 54 | # "foo = /bar/*" or "foo = /bar/**" lets every repo /bar/N in or below |
|
55 | 55 | # /bar/ be served as as foo/N . |
|
56 | 56 | # '*' will not search inside dirs with .hg (except .hg/patches), |
|
57 | 57 | # '**' will search inside dirs with .hg (and thus also find subrepos). |
|
58 | 58 | try: |
|
59 | 59 | recurse = {'*': False, '**': True}[roottail] |
|
60 | 60 | except KeyError: |
|
61 | 61 | repos.append((prefix, root)) |
|
62 | 62 | continue |
|
63 | 63 | roothead = os.path.normpath(os.path.abspath(roothead)) |
|
64 | 64 | paths = scmutil.walkrepos(roothead, followsym=True, recurse=recurse) |
|
65 | 65 | repos.extend(urlrepos(prefix, roothead, paths)) |
|
66 | 66 | return repos |
|
67 | 67 | |
|
68 | 68 | def urlrepos(prefix, roothead, paths): |
|
69 | 69 | """yield url paths and filesystem paths from a list of repo paths |
|
70 | 70 | |
|
71 | 71 | >>> conv = lambda seq: [(v, util.pconvert(p)) for v,p in seq] |
|
72 | 72 | >>> conv(urlrepos('hg', '/opt', ['/opt/r', '/opt/r/r', '/opt'])) |
|
73 | 73 | [('hg/r', '/opt/r'), ('hg/r/r', '/opt/r/r'), ('hg', '/opt')] |
|
74 | 74 | >>> conv(urlrepos('', '/opt', ['/opt/r', '/opt/r/r', '/opt'])) |
|
75 | 75 | [('r', '/opt/r'), ('r/r', '/opt/r/r'), ('', '/opt')] |
|
76 | 76 | """ |
|
77 | 77 | for path in paths: |
|
78 | 78 | path = os.path.normpath(path) |
|
79 | 79 | yield (prefix + '/' + |
|
80 | 80 | util.pconvert(path[len(roothead):]).lstrip('/')).strip('/'), path |
|
81 | 81 | |
|
82 | 82 | def geturlcgivars(baseurl, port): |
|
83 | 83 | """ |
|
84 | 84 | Extract CGI variables from baseurl |
|
85 | 85 | |
|
86 | 86 | >>> geturlcgivars("http://host.org/base", "80") |
|
87 | 87 | ('host.org', '80', '/base') |
|
88 | 88 | >>> geturlcgivars("http://host.org:8000/base", "80") |
|
89 | 89 | ('host.org', '8000', '/base') |
|
90 | 90 | >>> geturlcgivars('/base', 8000) |
|
91 | 91 | ('', '8000', '/base') |
|
92 | 92 | >>> geturlcgivars("base", '8000') |
|
93 | 93 | ('', '8000', '/base') |
|
94 | 94 | >>> geturlcgivars("http://host", '8000') |
|
95 | 95 | ('host', '8000', '/') |
|
96 | 96 | >>> geturlcgivars("http://host/", '8000') |
|
97 | 97 | ('host', '8000', '/') |
|
98 | 98 | """ |
|
99 | 99 | u = util.url(baseurl) |
|
100 | 100 | name = u.host or '' |
|
101 | 101 | if u.port: |
|
102 | 102 | port = u.port |
|
103 | 103 | path = u.path or "" |
|
104 | 104 | if not path.startswith('/'): |
|
105 | 105 | path = '/' + path |
|
106 | 106 | |
|
107 | 107 | return name, str(port), path |
|
108 | 108 | |
|
109 | 109 | class hgwebdir(object): |
|
110 | 110 | """HTTP server for multiple repositories. |
|
111 | 111 | |
|
112 | 112 | Given a configuration, different repositories will be served depending |
|
113 | 113 | on the request path. |
|
114 | 114 | |
|
115 | 115 | Instances are typically used as WSGI applications. |
|
116 | 116 | """ |
|
117 | 117 | def __init__(self, conf, baseui=None): |
|
118 | 118 | self.conf = conf |
|
119 | 119 | self.baseui = baseui |
|
120 | 120 | self.ui = None |
|
121 | 121 | self.lastrefresh = 0 |
|
122 | 122 | self.motd = None |
|
123 | 123 | self.refresh() |
|
124 | 124 | |
|
125 | 125 | def refresh(self): |
|
126 | 126 | refreshinterval = 20 |
|
127 | 127 | if self.ui: |
|
128 | 128 | refreshinterval = self.ui.configint('web', 'refreshinterval', |
|
129 | 129 | refreshinterval) |
|
130 | 130 | |
|
131 | 131 | # refreshinterval <= 0 means to always refresh. |
|
132 | 132 | if (refreshinterval > 0 and |
|
133 | 133 | self.lastrefresh + refreshinterval > time.time()): |
|
134 | 134 | return |
|
135 | 135 | |
|
136 | 136 | if self.baseui: |
|
137 | 137 | u = self.baseui.copy() |
|
138 | 138 | else: |
|
139 | 139 | u = uimod.ui.load() |
|
140 | 140 | u.setconfig('ui', 'report_untrusted', 'off', 'hgwebdir') |
|
141 | 141 | u.setconfig('ui', 'nontty', 'true', 'hgwebdir') |
|
142 | 142 | # displaying bundling progress bar while serving feels wrong and may |
|
143 | 143 | # break some wsgi implementations. |
|
144 | 144 | u.setconfig('progress', 'disable', 'true', 'hgweb') |
|
145 | 145 | |
|
146 | 146 | if not isinstance(self.conf, (dict, list, tuple)): |
|
147 | 147 | map = {'paths': 'hgweb-paths'} |
|
148 | 148 | if not os.path.exists(self.conf): |
|
149 | 149 | raise error.Abort(_('config file %s not found!') % self.conf) |
|
150 | 150 | u.readconfig(self.conf, remap=map, trust=True) |
|
151 | 151 | paths = [] |
|
152 | 152 | for name, ignored in u.configitems('hgweb-paths'): |
|
153 | 153 | for path in u.configlist('hgweb-paths', name): |
|
154 | 154 | paths.append((name, path)) |
|
155 | 155 | elif isinstance(self.conf, (list, tuple)): |
|
156 | 156 | paths = self.conf |
|
157 | 157 | elif isinstance(self.conf, dict): |
|
158 | 158 | paths = self.conf.items() |
|
159 | 159 | |
|
160 | 160 | repos = findrepos(paths) |
|
161 | 161 | for prefix, root in u.configitems('collections'): |
|
162 | 162 | prefix = util.pconvert(prefix) |
|
163 | 163 | for path in scmutil.walkrepos(root, followsym=True): |
|
164 | 164 | repo = os.path.normpath(path) |
|
165 | 165 | name = util.pconvert(repo) |
|
166 | 166 | if name.startswith(prefix): |
|
167 | 167 | name = name[len(prefix):] |
|
168 | 168 | repos.append((name.lstrip('/'), repo)) |
|
169 | 169 | |
|
170 | 170 | self.repos = repos |
|
171 | 171 | self.ui = u |
|
172 | 172 | encoding.encoding = self.ui.config('web', 'encoding', |
|
173 | 173 | encoding.encoding) |
|
174 | 174 | self.style = self.ui.config('web', 'style', 'paper') |
|
175 | 175 | self.templatepath = self.ui.config('web', 'templates', None) |
|
176 | 176 | self.stripecount = self.ui.config('web', 'stripes', 1) |
|
177 | 177 | if self.stripecount: |
|
178 | 178 | self.stripecount = int(self.stripecount) |
|
179 | 179 | self._baseurl = self.ui.config('web', 'baseurl') |
|
180 | 180 | prefix = self.ui.config('web', 'prefix', '') |
|
181 | 181 | if prefix.startswith('/'): |
|
182 | 182 | prefix = prefix[1:] |
|
183 | 183 | if prefix.endswith('/'): |
|
184 | 184 | prefix = prefix[:-1] |
|
185 | 185 | self.prefix = prefix |
|
186 | 186 | self.lastrefresh = time.time() |
|
187 | 187 | |
|
188 | 188 | def run(self): |
|
189 |
if not |
|
|
189 | if not encoding.environ.get('GATEWAY_INTERFACE', | |
|
190 | '').startswith("CGI/1."): | |
|
190 | 191 | raise RuntimeError("This function is only intended to be " |
|
191 | 192 | "called while running as a CGI script.") |
|
192 | 193 | wsgicgi.launch(self) |
|
193 | 194 | |
|
194 | 195 | def __call__(self, env, respond): |
|
195 | 196 | req = wsgirequest(env, respond) |
|
196 | 197 | return self.run_wsgi(req) |
|
197 | 198 | |
|
198 | 199 | def read_allowed(self, ui, req): |
|
199 | 200 | """Check allow_read and deny_read config options of a repo's ui object |
|
200 | 201 | to determine user permissions. By default, with neither option set (or |
|
201 | 202 | both empty), allow all users to read the repo. There are two ways a |
|
202 | 203 | user can be denied read access: (1) deny_read is not empty, and the |
|
203 | 204 | user is unauthenticated or deny_read contains user (or *), and (2) |
|
204 | 205 | allow_read is not empty and the user is not in allow_read. Return True |
|
205 | 206 | if user is allowed to read the repo, else return False.""" |
|
206 | 207 | |
|
207 | 208 | user = req.env.get('REMOTE_USER') |
|
208 | 209 | |
|
209 | 210 | deny_read = ui.configlist('web', 'deny_read', untrusted=True) |
|
210 | 211 | if deny_read and (not user or ismember(ui, user, deny_read)): |
|
211 | 212 | return False |
|
212 | 213 | |
|
213 | 214 | allow_read = ui.configlist('web', 'allow_read', untrusted=True) |
|
214 | 215 | # by default, allow reading if no allow_read option has been set |
|
215 | 216 | if (not allow_read) or ismember(ui, user, allow_read): |
|
216 | 217 | return True |
|
217 | 218 | |
|
218 | 219 | return False |
|
219 | 220 | |
|
220 | 221 | def run_wsgi(self, req): |
|
221 | 222 | with profiling.maybeprofile(self.ui): |
|
222 | 223 | for r in self._runwsgi(req): |
|
223 | 224 | yield r |
|
224 | 225 | |
|
225 | 226 | def _runwsgi(self, req): |
|
226 | 227 | try: |
|
227 | 228 | self.refresh() |
|
228 | 229 | |
|
229 | 230 | virtual = req.env.get("PATH_INFO", "").strip('/') |
|
230 | 231 | tmpl = self.templater(req) |
|
231 | 232 | ctype = tmpl('mimetype', encoding=encoding.encoding) |
|
232 | 233 | ctype = templater.stringify(ctype) |
|
233 | 234 | |
|
234 | 235 | # a static file |
|
235 | 236 | if virtual.startswith('static/') or 'static' in req.form: |
|
236 | 237 | if virtual.startswith('static/'): |
|
237 | 238 | fname = virtual[7:] |
|
238 | 239 | else: |
|
239 | 240 | fname = req.form['static'][0] |
|
240 | 241 | static = self.ui.config("web", "static", None, |
|
241 | 242 | untrusted=False) |
|
242 | 243 | if not static: |
|
243 | 244 | tp = self.templatepath or templater.templatepaths() |
|
244 | 245 | if isinstance(tp, str): |
|
245 | 246 | tp = [tp] |
|
246 | 247 | static = [os.path.join(p, 'static') for p in tp] |
|
247 | 248 | staticfile(static, fname, req) |
|
248 | 249 | return [] |
|
249 | 250 | |
|
250 | 251 | # top-level index |
|
251 | 252 | elif not virtual: |
|
252 | 253 | req.respond(HTTP_OK, ctype) |
|
253 | 254 | return self.makeindex(req, tmpl) |
|
254 | 255 | |
|
255 | 256 | # nested indexes and hgwebs |
|
256 | 257 | |
|
257 | 258 | repos = dict(self.repos) |
|
258 | 259 | virtualrepo = virtual |
|
259 | 260 | while virtualrepo: |
|
260 | 261 | real = repos.get(virtualrepo) |
|
261 | 262 | if real: |
|
262 | 263 | req.env['REPO_NAME'] = virtualrepo |
|
263 | 264 | try: |
|
264 | 265 | # ensure caller gets private copy of ui |
|
265 | 266 | repo = hg.repository(self.ui.copy(), real) |
|
266 | 267 | return hgweb_mod.hgweb(repo).run_wsgi(req) |
|
267 | 268 | except IOError as inst: |
|
268 | 269 | msg = inst.strerror |
|
269 | 270 | raise ErrorResponse(HTTP_SERVER_ERROR, msg) |
|
270 | 271 | except error.RepoError as inst: |
|
271 | 272 | raise ErrorResponse(HTTP_SERVER_ERROR, str(inst)) |
|
272 | 273 | |
|
273 | 274 | up = virtualrepo.rfind('/') |
|
274 | 275 | if up < 0: |
|
275 | 276 | break |
|
276 | 277 | virtualrepo = virtualrepo[:up] |
|
277 | 278 | |
|
278 | 279 | # browse subdirectories |
|
279 | 280 | subdir = virtual + '/' |
|
280 | 281 | if [r for r in repos if r.startswith(subdir)]: |
|
281 | 282 | req.respond(HTTP_OK, ctype) |
|
282 | 283 | return self.makeindex(req, tmpl, subdir) |
|
283 | 284 | |
|
284 | 285 | # prefixes not found |
|
285 | 286 | req.respond(HTTP_NOT_FOUND, ctype) |
|
286 | 287 | return tmpl("notfound", repo=virtual) |
|
287 | 288 | |
|
288 | 289 | except ErrorResponse as err: |
|
289 | 290 | req.respond(err, ctype) |
|
290 | 291 | return tmpl('error', error=err.message or '') |
|
291 | 292 | finally: |
|
292 | 293 | tmpl = None |
|
293 | 294 | |
|
294 | 295 | def makeindex(self, req, tmpl, subdir=""): |
|
295 | 296 | |
|
296 | 297 | def archivelist(ui, nodeid, url): |
|
297 | 298 | allowed = ui.configlist("web", "allow_archive", untrusted=True) |
|
298 | 299 | archives = [] |
|
299 | 300 | for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]: |
|
300 | 301 | if i[0] in allowed or ui.configbool("web", "allow" + i[0], |
|
301 | 302 | untrusted=True): |
|
302 | 303 | archives.append({"type" : i[0], "extension": i[1], |
|
303 | 304 | "node": nodeid, "url": url}) |
|
304 | 305 | return archives |
|
305 | 306 | |
|
306 | 307 | def rawentries(subdir="", **map): |
|
307 | 308 | |
|
308 | 309 | descend = self.ui.configbool('web', 'descend', True) |
|
309 | 310 | collapse = self.ui.configbool('web', 'collapse', False) |
|
310 | 311 | seenrepos = set() |
|
311 | 312 | seendirs = set() |
|
312 | 313 | for name, path in self.repos: |
|
313 | 314 | |
|
314 | 315 | if not name.startswith(subdir): |
|
315 | 316 | continue |
|
316 | 317 | name = name[len(subdir):] |
|
317 | 318 | directory = False |
|
318 | 319 | |
|
319 | 320 | if '/' in name: |
|
320 | 321 | if not descend: |
|
321 | 322 | continue |
|
322 | 323 | |
|
323 | 324 | nameparts = name.split('/') |
|
324 | 325 | rootname = nameparts[0] |
|
325 | 326 | |
|
326 | 327 | if not collapse: |
|
327 | 328 | pass |
|
328 | 329 | elif rootname in seendirs: |
|
329 | 330 | continue |
|
330 | 331 | elif rootname in seenrepos: |
|
331 | 332 | pass |
|
332 | 333 | else: |
|
333 | 334 | directory = True |
|
334 | 335 | name = rootname |
|
335 | 336 | |
|
336 | 337 | # redefine the path to refer to the directory |
|
337 | 338 | discarded = '/'.join(nameparts[1:]) |
|
338 | 339 | |
|
339 | 340 | # remove name parts plus accompanying slash |
|
340 | 341 | path = path[:-len(discarded) - 1] |
|
341 | 342 | |
|
342 | 343 | try: |
|
343 | 344 | r = hg.repository(self.ui, path) |
|
344 | 345 | directory = False |
|
345 | 346 | except (IOError, error.RepoError): |
|
346 | 347 | pass |
|
347 | 348 | |
|
348 | 349 | parts = [name] |
|
349 | 350 | if 'PATH_INFO' in req.env: |
|
350 | 351 | parts.insert(0, req.env['PATH_INFO'].rstrip('/')) |
|
351 | 352 | if req.env['SCRIPT_NAME']: |
|
352 | 353 | parts.insert(0, req.env['SCRIPT_NAME']) |
|
353 | 354 | url = re.sub(r'/+', '/', '/'.join(parts) + '/') |
|
354 | 355 | |
|
355 | 356 | # show either a directory entry or a repository |
|
356 | 357 | if directory: |
|
357 | 358 | # get the directory's time information |
|
358 | 359 | try: |
|
359 | 360 | d = (get_mtime(path), util.makedate()[1]) |
|
360 | 361 | except OSError: |
|
361 | 362 | continue |
|
362 | 363 | |
|
363 | 364 | # add '/' to the name to make it obvious that |
|
364 | 365 | # the entry is a directory, not a regular repository |
|
365 | 366 | row = {'contact': "", |
|
366 | 367 | 'contact_sort': "", |
|
367 | 368 | 'name': name + '/', |
|
368 | 369 | 'name_sort': name, |
|
369 | 370 | 'url': url, |
|
370 | 371 | 'description': "", |
|
371 | 372 | 'description_sort': "", |
|
372 | 373 | 'lastchange': d, |
|
373 | 374 | 'lastchange_sort': d[1]-d[0], |
|
374 | 375 | 'archives': [], |
|
375 | 376 | 'isdirectory': True, |
|
376 | 377 | 'labels': [], |
|
377 | 378 | } |
|
378 | 379 | |
|
379 | 380 | seendirs.add(name) |
|
380 | 381 | yield row |
|
381 | 382 | continue |
|
382 | 383 | |
|
383 | 384 | u = self.ui.copy() |
|
384 | 385 | try: |
|
385 | 386 | u.readconfig(os.path.join(path, '.hg', 'hgrc')) |
|
386 | 387 | except Exception as e: |
|
387 | 388 | u.warn(_('error reading %s/.hg/hgrc: %s\n') % (path, e)) |
|
388 | 389 | continue |
|
389 | 390 | def get(section, name, default=None): |
|
390 | 391 | return u.config(section, name, default, untrusted=True) |
|
391 | 392 | |
|
392 | 393 | if u.configbool("web", "hidden", untrusted=True): |
|
393 | 394 | continue |
|
394 | 395 | |
|
395 | 396 | if not self.read_allowed(u, req): |
|
396 | 397 | continue |
|
397 | 398 | |
|
398 | 399 | # update time with local timezone |
|
399 | 400 | try: |
|
400 | 401 | r = hg.repository(self.ui, path) |
|
401 | 402 | except IOError: |
|
402 | 403 | u.warn(_('error accessing repository at %s\n') % path) |
|
403 | 404 | continue |
|
404 | 405 | except error.RepoError: |
|
405 | 406 | u.warn(_('error accessing repository at %s\n') % path) |
|
406 | 407 | continue |
|
407 | 408 | try: |
|
408 | 409 | d = (get_mtime(r.spath), util.makedate()[1]) |
|
409 | 410 | except OSError: |
|
410 | 411 | continue |
|
411 | 412 | |
|
412 | 413 | contact = get_contact(get) |
|
413 | 414 | description = get("web", "description", "") |
|
414 | 415 | seenrepos.add(name) |
|
415 | 416 | name = get("web", "name", name) |
|
416 | 417 | row = {'contact': contact or "unknown", |
|
417 | 418 | 'contact_sort': contact.upper() or "unknown", |
|
418 | 419 | 'name': name, |
|
419 | 420 | 'name_sort': name, |
|
420 | 421 | 'url': url, |
|
421 | 422 | 'description': description or "unknown", |
|
422 | 423 | 'description_sort': description.upper() or "unknown", |
|
423 | 424 | 'lastchange': d, |
|
424 | 425 | 'lastchange_sort': d[1]-d[0], |
|
425 | 426 | 'archives': archivelist(u, "tip", url), |
|
426 | 427 | 'isdirectory': None, |
|
427 | 428 | 'labels': u.configlist('web', 'labels', untrusted=True), |
|
428 | 429 | } |
|
429 | 430 | |
|
430 | 431 | yield row |
|
431 | 432 | |
|
432 | 433 | sortdefault = None, False |
|
433 | 434 | def entries(sortcolumn="", descending=False, subdir="", **map): |
|
434 | 435 | rows = rawentries(subdir=subdir, **map) |
|
435 | 436 | |
|
436 | 437 | if sortcolumn and sortdefault != (sortcolumn, descending): |
|
437 | 438 | sortkey = '%s_sort' % sortcolumn |
|
438 | 439 | rows = sorted(rows, key=lambda x: x[sortkey], |
|
439 | 440 | reverse=descending) |
|
440 | 441 | for row, parity in zip(rows, paritygen(self.stripecount)): |
|
441 | 442 | row['parity'] = parity |
|
442 | 443 | yield row |
|
443 | 444 | |
|
444 | 445 | self.refresh() |
|
445 | 446 | sortable = ["name", "description", "contact", "lastchange"] |
|
446 | 447 | sortcolumn, descending = sortdefault |
|
447 | 448 | if 'sort' in req.form: |
|
448 | 449 | sortcolumn = req.form['sort'][0] |
|
449 | 450 | descending = sortcolumn.startswith('-') |
|
450 | 451 | if descending: |
|
451 | 452 | sortcolumn = sortcolumn[1:] |
|
452 | 453 | if sortcolumn not in sortable: |
|
453 | 454 | sortcolumn = "" |
|
454 | 455 | |
|
455 | 456 | sort = [("sort_%s" % column, |
|
456 | 457 | "%s%s" % ((not descending and column == sortcolumn) |
|
457 | 458 | and "-" or "", column)) |
|
458 | 459 | for column in sortable] |
|
459 | 460 | |
|
460 | 461 | self.refresh() |
|
461 | 462 | self.updatereqenv(req.env) |
|
462 | 463 | |
|
463 | 464 | return tmpl("index", entries=entries, subdir=subdir, |
|
464 | 465 | pathdef=hgweb_mod.makebreadcrumb('/' + subdir, self.prefix), |
|
465 | 466 | sortcolumn=sortcolumn, descending=descending, |
|
466 | 467 | **dict(sort)) |
|
467 | 468 | |
|
468 | 469 | def templater(self, req): |
|
469 | 470 | |
|
470 | 471 | def motd(**map): |
|
471 | 472 | if self.motd is not None: |
|
472 | 473 | yield self.motd |
|
473 | 474 | else: |
|
474 | 475 | yield config('web', 'motd', '') |
|
475 | 476 | |
|
476 | 477 | def config(section, name, default=None, untrusted=True): |
|
477 | 478 | return self.ui.config(section, name, default, untrusted) |
|
478 | 479 | |
|
479 | 480 | self.updatereqenv(req.env) |
|
480 | 481 | |
|
481 | 482 | url = req.env.get('SCRIPT_NAME', '') |
|
482 | 483 | if not url.endswith('/'): |
|
483 | 484 | url += '/' |
|
484 | 485 | |
|
485 | 486 | vars = {} |
|
486 | 487 | styles = ( |
|
487 | 488 | req.form.get('style', [None])[0], |
|
488 | 489 | config('web', 'style'), |
|
489 | 490 | 'paper' |
|
490 | 491 | ) |
|
491 | 492 | style, mapfile = templater.stylemap(styles, self.templatepath) |
|
492 | 493 | if style == styles[0]: |
|
493 | 494 | vars['style'] = style |
|
494 | 495 | |
|
495 | 496 | start = url[-1] == '?' and '&' or '?' |
|
496 | 497 | sessionvars = webutil.sessionvars(vars, start) |
|
497 | 498 | logourl = config('web', 'logourl', 'https://mercurial-scm.org/') |
|
498 | 499 | logoimg = config('web', 'logoimg', 'hglogo.png') |
|
499 | 500 | staticurl = config('web', 'staticurl') or url + 'static/' |
|
500 | 501 | if not staticurl.endswith('/'): |
|
501 | 502 | staticurl += '/' |
|
502 | 503 | |
|
503 | 504 | defaults = { |
|
504 | 505 | "encoding": encoding.encoding, |
|
505 | 506 | "motd": motd, |
|
506 | 507 | "url": url, |
|
507 | 508 | "logourl": logourl, |
|
508 | 509 | "logoimg": logoimg, |
|
509 | 510 | "staticurl": staticurl, |
|
510 | 511 | "sessionvars": sessionvars, |
|
511 | 512 | "style": style, |
|
512 | 513 | } |
|
513 | 514 | tmpl = templater.templater.frommapfile(mapfile, defaults=defaults) |
|
514 | 515 | return tmpl |
|
515 | 516 | |
|
516 | 517 | def updatereqenv(self, env): |
|
517 | 518 | if self._baseurl is not None: |
|
518 | 519 | name, port, path = geturlcgivars(self._baseurl, env['SERVER_PORT']) |
|
519 | 520 | env['SERVER_NAME'] = name |
|
520 | 521 | env['SERVER_PORT'] = port |
|
521 | 522 | env['SCRIPT_NAME'] = path |
@@ -1,91 +1,90 | |||
|
1 | 1 | # hgweb/wsgicgi.py - CGI->WSGI translator |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2006 Eric Hopper <hopper@omnifarious.org> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | # |
|
8 | 8 | # This was originally copied from the public domain code at |
|
9 | 9 | # http://www.python.org/dev/peps/pep-0333/#the-server-gateway-side |
|
10 | 10 | |
|
11 | 11 | from __future__ import absolute_import |
|
12 | 12 | |
|
13 | import os | |
|
14 | ||
|
15 | 13 | from .. import ( |
|
14 | encoding, | |
|
16 | 15 | util, |
|
17 | 16 | ) |
|
18 | 17 | |
|
19 | 18 | from . import ( |
|
20 | 19 | common, |
|
21 | 20 | ) |
|
22 | 21 | |
|
23 | 22 | def launch(application): |
|
24 | 23 | util.setbinary(util.stdin) |
|
25 | 24 | util.setbinary(util.stdout) |
|
26 | 25 | |
|
27 |
environ = dict( |
|
|
26 | environ = dict(encoding.environ.iteritems()) | |
|
28 | 27 | environ.setdefault('PATH_INFO', '') |
|
29 | 28 | if environ.get('SERVER_SOFTWARE', '').startswith('Microsoft-IIS'): |
|
30 | 29 | # IIS includes script_name in PATH_INFO |
|
31 | 30 | scriptname = environ['SCRIPT_NAME'] |
|
32 | 31 | if environ['PATH_INFO'].startswith(scriptname): |
|
33 | 32 | environ['PATH_INFO'] = environ['PATH_INFO'][len(scriptname):] |
|
34 | 33 | |
|
35 | 34 | stdin = util.stdin |
|
36 | 35 | if environ.get('HTTP_EXPECT', '').lower() == '100-continue': |
|
37 | 36 | stdin = common.continuereader(stdin, util.stdout.write) |
|
38 | 37 | |
|
39 | 38 | environ['wsgi.input'] = stdin |
|
40 | 39 | environ['wsgi.errors'] = util.stderr |
|
41 | 40 | environ['wsgi.version'] = (1, 0) |
|
42 | 41 | environ['wsgi.multithread'] = False |
|
43 | 42 | environ['wsgi.multiprocess'] = True |
|
44 | 43 | environ['wsgi.run_once'] = True |
|
45 | 44 | |
|
46 | 45 | if environ.get('HTTPS', 'off').lower() in ('on', '1', 'yes'): |
|
47 | 46 | environ['wsgi.url_scheme'] = 'https' |
|
48 | 47 | else: |
|
49 | 48 | environ['wsgi.url_scheme'] = 'http' |
|
50 | 49 | |
|
51 | 50 | headers_set = [] |
|
52 | 51 | headers_sent = [] |
|
53 | 52 | out = util.stdout |
|
54 | 53 | |
|
55 | 54 | def write(data): |
|
56 | 55 | if not headers_set: |
|
57 | 56 | raise AssertionError("write() before start_response()") |
|
58 | 57 | |
|
59 | 58 | elif not headers_sent: |
|
60 | 59 | # Before the first output, send the stored headers |
|
61 | 60 | status, response_headers = headers_sent[:] = headers_set |
|
62 | 61 | out.write('Status: %s\r\n' % status) |
|
63 | 62 | for header in response_headers: |
|
64 | 63 | out.write('%s: %s\r\n' % header) |
|
65 | 64 | out.write('\r\n') |
|
66 | 65 | |
|
67 | 66 | out.write(data) |
|
68 | 67 | out.flush() |
|
69 | 68 | |
|
70 | 69 | def start_response(status, response_headers, exc_info=None): |
|
71 | 70 | if exc_info: |
|
72 | 71 | try: |
|
73 | 72 | if headers_sent: |
|
74 | 73 | # Re-raise original exception if headers sent |
|
75 | 74 | raise exc_info[0](exc_info[1], exc_info[2]) |
|
76 | 75 | finally: |
|
77 | 76 | exc_info = None # avoid dangling circular ref |
|
78 | 77 | elif headers_set: |
|
79 | 78 | raise AssertionError("Headers already set!") |
|
80 | 79 | |
|
81 | 80 | headers_set[:] = [status, response_headers] |
|
82 | 81 | return write |
|
83 | 82 | |
|
84 | 83 | content = application(environ, start_response) |
|
85 | 84 | try: |
|
86 | 85 | for chunk in content: |
|
87 | 86 | write(chunk) |
|
88 | 87 | if not headers_sent: |
|
89 | 88 | write('') # send headers now if body was empty |
|
90 | 89 | finally: |
|
91 | 90 | getattr(content, 'close', lambda : None)() |
@@ -1,490 +1,491 | |||
|
1 | 1 | # url.py - HTTP handling for mercurial |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com> |
|
4 | 4 | # Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br> |
|
5 | 5 | # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> |
|
6 | 6 | # |
|
7 | 7 | # This software may be used and distributed according to the terms of the |
|
8 | 8 | # GNU General Public License version 2 or any later version. |
|
9 | 9 | |
|
10 | 10 | from __future__ import absolute_import |
|
11 | 11 | |
|
12 | 12 | import base64 |
|
13 | 13 | import os |
|
14 | 14 | import socket |
|
15 | 15 | |
|
16 | 16 | from .i18n import _ |
|
17 | 17 | from . import ( |
|
18 | encoding, | |
|
18 | 19 | error, |
|
19 | 20 | httpconnection as httpconnectionmod, |
|
20 | 21 | keepalive, |
|
21 | 22 | sslutil, |
|
22 | 23 | util, |
|
23 | 24 | ) |
|
24 | 25 | |
|
25 | 26 | httplib = util.httplib |
|
26 | 27 | stringio = util.stringio |
|
27 | 28 | urlerr = util.urlerr |
|
28 | 29 | urlreq = util.urlreq |
|
29 | 30 | |
|
30 | 31 | class passwordmgr(object): |
|
31 | 32 | def __init__(self, ui, passwddb): |
|
32 | 33 | self.ui = ui |
|
33 | 34 | self.passwddb = passwddb |
|
34 | 35 | |
|
35 | 36 | def add_password(self, realm, uri, user, passwd): |
|
36 | 37 | return self.passwddb.add_password(realm, uri, user, passwd) |
|
37 | 38 | |
|
38 | 39 | def find_user_password(self, realm, authuri): |
|
39 | 40 | authinfo = self.passwddb.find_user_password(realm, authuri) |
|
40 | 41 | user, passwd = authinfo |
|
41 | 42 | if user and passwd: |
|
42 | 43 | self._writedebug(user, passwd) |
|
43 | 44 | return (user, passwd) |
|
44 | 45 | |
|
45 | 46 | if not user or not passwd: |
|
46 | 47 | res = httpconnectionmod.readauthforuri(self.ui, authuri, user) |
|
47 | 48 | if res: |
|
48 | 49 | group, auth = res |
|
49 | 50 | user, passwd = auth.get('username'), auth.get('password') |
|
50 | 51 | self.ui.debug("using auth.%s.* for authentication\n" % group) |
|
51 | 52 | if not user or not passwd: |
|
52 | 53 | u = util.url(authuri) |
|
53 | 54 | u.query = None |
|
54 | 55 | if not self.ui.interactive(): |
|
55 | 56 | raise error.Abort(_('http authorization required for %s') % |
|
56 | 57 | util.hidepassword(str(u))) |
|
57 | 58 | |
|
58 | 59 | self.ui.write(_("http authorization required for %s\n") % |
|
59 | 60 | util.hidepassword(str(u))) |
|
60 | 61 | self.ui.write(_("realm: %s\n") % realm) |
|
61 | 62 | if user: |
|
62 | 63 | self.ui.write(_("user: %s\n") % user) |
|
63 | 64 | else: |
|
64 | 65 | user = self.ui.prompt(_("user:"), default=None) |
|
65 | 66 | |
|
66 | 67 | if not passwd: |
|
67 | 68 | passwd = self.ui.getpass() |
|
68 | 69 | |
|
69 | 70 | self.passwddb.add_password(realm, authuri, user, passwd) |
|
70 | 71 | self._writedebug(user, passwd) |
|
71 | 72 | return (user, passwd) |
|
72 | 73 | |
|
73 | 74 | def _writedebug(self, user, passwd): |
|
74 | 75 | msg = _('http auth: user %s, password %s\n') |
|
75 | 76 | self.ui.debug(msg % (user, passwd and '*' * len(passwd) or 'not set')) |
|
76 | 77 | |
|
77 | 78 | def find_stored_password(self, authuri): |
|
78 | 79 | return self.passwddb.find_user_password(None, authuri) |
|
79 | 80 | |
|
80 | 81 | class proxyhandler(urlreq.proxyhandler): |
|
81 | 82 | def __init__(self, ui): |
|
82 | 83 | proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy') |
|
83 | 84 | # XXX proxyauthinfo = None |
|
84 | 85 | |
|
85 | 86 | if proxyurl: |
|
86 | 87 | # proxy can be proper url or host[:port] |
|
87 | 88 | if not (proxyurl.startswith('http:') or |
|
88 | 89 | proxyurl.startswith('https:')): |
|
89 | 90 | proxyurl = 'http://' + proxyurl + '/' |
|
90 | 91 | proxy = util.url(proxyurl) |
|
91 | 92 | if not proxy.user: |
|
92 | 93 | proxy.user = ui.config("http_proxy", "user") |
|
93 | 94 | proxy.passwd = ui.config("http_proxy", "passwd") |
|
94 | 95 | |
|
95 | 96 | # see if we should use a proxy for this url |
|
96 | 97 | no_list = ["localhost", "127.0.0.1"] |
|
97 | 98 | no_list.extend([p.lower() for |
|
98 | 99 | p in ui.configlist("http_proxy", "no")]) |
|
99 | 100 | no_list.extend([p.strip().lower() for |
|
100 | 101 | p in os.getenv("no_proxy", '').split(',') |
|
101 | 102 | if p.strip()]) |
|
102 | 103 | # "http_proxy.always" config is for running tests on localhost |
|
103 | 104 | if ui.configbool("http_proxy", "always"): |
|
104 | 105 | self.no_list = [] |
|
105 | 106 | else: |
|
106 | 107 | self.no_list = no_list |
|
107 | 108 | |
|
108 | 109 | proxyurl = str(proxy) |
|
109 | 110 | proxies = {'http': proxyurl, 'https': proxyurl} |
|
110 | 111 | ui.debug('proxying through http://%s:%s\n' % |
|
111 | 112 | (proxy.host, proxy.port)) |
|
112 | 113 | else: |
|
113 | 114 | proxies = {} |
|
114 | 115 | |
|
115 | 116 | # urllib2 takes proxy values from the environment and those |
|
116 | 117 | # will take precedence if found. So, if there's a config entry |
|
117 | 118 | # defining a proxy, drop the environment ones |
|
118 | 119 | if ui.config("http_proxy", "host"): |
|
119 | 120 | for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]: |
|
120 | 121 | try: |
|
121 |
if env in |
|
|
122 |
del |
|
|
122 | if env in encoding.environ: | |
|
123 | del encoding.environ[env] | |
|
123 | 124 | except OSError: |
|
124 | 125 | pass |
|
125 | 126 | |
|
126 | 127 | urlreq.proxyhandler.__init__(self, proxies) |
|
127 | 128 | self.ui = ui |
|
128 | 129 | |
|
129 | 130 | def proxy_open(self, req, proxy, type_): |
|
130 | 131 | host = req.get_host().split(':')[0] |
|
131 | 132 | for e in self.no_list: |
|
132 | 133 | if host == e: |
|
133 | 134 | return None |
|
134 | 135 | if e.startswith('*.') and host.endswith(e[2:]): |
|
135 | 136 | return None |
|
136 | 137 | if e.startswith('.') and host.endswith(e[1:]): |
|
137 | 138 | return None |
|
138 | 139 | |
|
139 | 140 | return urlreq.proxyhandler.proxy_open(self, req, proxy, type_) |
|
140 | 141 | |
|
141 | 142 | def _gen_sendfile(orgsend): |
|
142 | 143 | def _sendfile(self, data): |
|
143 | 144 | # send a file |
|
144 | 145 | if isinstance(data, httpconnectionmod.httpsendfile): |
|
145 | 146 | # if auth required, some data sent twice, so rewind here |
|
146 | 147 | data.seek(0) |
|
147 | 148 | for chunk in util.filechunkiter(data): |
|
148 | 149 | orgsend(self, chunk) |
|
149 | 150 | else: |
|
150 | 151 | orgsend(self, data) |
|
151 | 152 | return _sendfile |
|
152 | 153 | |
|
153 | 154 | has_https = util.safehasattr(urlreq, 'httpshandler') |
|
154 | 155 | |
|
155 | 156 | class httpconnection(keepalive.HTTPConnection): |
|
156 | 157 | # must be able to send big bundle as stream. |
|
157 | 158 | send = _gen_sendfile(keepalive.HTTPConnection.send) |
|
158 | 159 | |
|
159 | 160 | def getresponse(self): |
|
160 | 161 | proxyres = getattr(self, 'proxyres', None) |
|
161 | 162 | if proxyres: |
|
162 | 163 | if proxyres.will_close: |
|
163 | 164 | self.close() |
|
164 | 165 | self.proxyres = None |
|
165 | 166 | return proxyres |
|
166 | 167 | return keepalive.HTTPConnection.getresponse(self) |
|
167 | 168 | |
|
168 | 169 | # general transaction handler to support different ways to handle |
|
169 | 170 | # HTTPS proxying before and after Python 2.6.3. |
|
170 | 171 | def _generic_start_transaction(handler, h, req): |
|
171 | 172 | tunnel_host = getattr(req, '_tunnel_host', None) |
|
172 | 173 | if tunnel_host: |
|
173 | 174 | if tunnel_host[:7] not in ['http://', 'https:/']: |
|
174 | 175 | tunnel_host = 'https://' + tunnel_host |
|
175 | 176 | new_tunnel = True |
|
176 | 177 | else: |
|
177 | 178 | tunnel_host = req.get_selector() |
|
178 | 179 | new_tunnel = False |
|
179 | 180 | |
|
180 | 181 | if new_tunnel or tunnel_host == req.get_full_url(): # has proxy |
|
181 | 182 | u = util.url(tunnel_host) |
|
182 | 183 | if new_tunnel or u.scheme == 'https': # only use CONNECT for HTTPS |
|
183 | 184 | h.realhostport = ':'.join([u.host, (u.port or '443')]) |
|
184 | 185 | h.headers = req.headers.copy() |
|
185 | 186 | h.headers.update(handler.parent.addheaders) |
|
186 | 187 | return |
|
187 | 188 | |
|
188 | 189 | h.realhostport = None |
|
189 | 190 | h.headers = None |
|
190 | 191 | |
|
191 | 192 | def _generic_proxytunnel(self): |
|
192 | 193 | proxyheaders = dict( |
|
193 | 194 | [(x, self.headers[x]) for x in self.headers |
|
194 | 195 | if x.lower().startswith('proxy-')]) |
|
195 | 196 | self.send('CONNECT %s HTTP/1.0\r\n' % self.realhostport) |
|
196 | 197 | for header in proxyheaders.iteritems(): |
|
197 | 198 | self.send('%s: %s\r\n' % header) |
|
198 | 199 | self.send('\r\n') |
|
199 | 200 | |
|
200 | 201 | # majority of the following code is duplicated from |
|
201 | 202 | # httplib.HTTPConnection as there are no adequate places to |
|
202 | 203 | # override functions to provide the needed functionality |
|
203 | 204 | res = self.response_class(self.sock, |
|
204 | 205 | strict=self.strict, |
|
205 | 206 | method=self._method) |
|
206 | 207 | |
|
207 | 208 | while True: |
|
208 | 209 | version, status, reason = res._read_status() |
|
209 | 210 | if status != httplib.CONTINUE: |
|
210 | 211 | break |
|
211 | 212 | # skip lines that are all whitespace |
|
212 | 213 | list(iter(lambda: res.fp.readline().strip(), '')) |
|
213 | 214 | res.status = status |
|
214 | 215 | res.reason = reason.strip() |
|
215 | 216 | |
|
216 | 217 | if res.status == 200: |
|
217 | 218 | # skip lines until we find a blank line |
|
218 | 219 | list(iter(res.fp.readline, '\r\n')) |
|
219 | 220 | return True |
|
220 | 221 | |
|
221 | 222 | if version == 'HTTP/1.0': |
|
222 | 223 | res.version = 10 |
|
223 | 224 | elif version.startswith('HTTP/1.'): |
|
224 | 225 | res.version = 11 |
|
225 | 226 | elif version == 'HTTP/0.9': |
|
226 | 227 | res.version = 9 |
|
227 | 228 | else: |
|
228 | 229 | raise httplib.UnknownProtocol(version) |
|
229 | 230 | |
|
230 | 231 | if res.version == 9: |
|
231 | 232 | res.length = None |
|
232 | 233 | res.chunked = 0 |
|
233 | 234 | res.will_close = 1 |
|
234 | 235 | res.msg = httplib.HTTPMessage(stringio()) |
|
235 | 236 | return False |
|
236 | 237 | |
|
237 | 238 | res.msg = httplib.HTTPMessage(res.fp) |
|
238 | 239 | res.msg.fp = None |
|
239 | 240 | |
|
240 | 241 | # are we using the chunked-style of transfer encoding? |
|
241 | 242 | trenc = res.msg.getheader('transfer-encoding') |
|
242 | 243 | if trenc and trenc.lower() == "chunked": |
|
243 | 244 | res.chunked = 1 |
|
244 | 245 | res.chunk_left = None |
|
245 | 246 | else: |
|
246 | 247 | res.chunked = 0 |
|
247 | 248 | |
|
248 | 249 | # will the connection close at the end of the response? |
|
249 | 250 | res.will_close = res._check_close() |
|
250 | 251 | |
|
251 | 252 | # do we have a Content-Length? |
|
252 | 253 | # NOTE: RFC 2616, section 4.4, #3 says we ignore this if |
|
253 | 254 | # transfer-encoding is "chunked" |
|
254 | 255 | length = res.msg.getheader('content-length') |
|
255 | 256 | if length and not res.chunked: |
|
256 | 257 | try: |
|
257 | 258 | res.length = int(length) |
|
258 | 259 | except ValueError: |
|
259 | 260 | res.length = None |
|
260 | 261 | else: |
|
261 | 262 | if res.length < 0: # ignore nonsensical negative lengths |
|
262 | 263 | res.length = None |
|
263 | 264 | else: |
|
264 | 265 | res.length = None |
|
265 | 266 | |
|
266 | 267 | # does the body have a fixed length? (of zero) |
|
267 | 268 | if (status == httplib.NO_CONTENT or status == httplib.NOT_MODIFIED or |
|
268 | 269 | 100 <= status < 200 or # 1xx codes |
|
269 | 270 | res._method == 'HEAD'): |
|
270 | 271 | res.length = 0 |
|
271 | 272 | |
|
272 | 273 | # if the connection remains open, and we aren't using chunked, and |
|
273 | 274 | # a content-length was not provided, then assume that the connection |
|
274 | 275 | # WILL close. |
|
275 | 276 | if (not res.will_close and |
|
276 | 277 | not res.chunked and |
|
277 | 278 | res.length is None): |
|
278 | 279 | res.will_close = 1 |
|
279 | 280 | |
|
280 | 281 | self.proxyres = res |
|
281 | 282 | |
|
282 | 283 | return False |
|
283 | 284 | |
|
284 | 285 | class httphandler(keepalive.HTTPHandler): |
|
285 | 286 | def http_open(self, req): |
|
286 | 287 | return self.do_open(httpconnection, req) |
|
287 | 288 | |
|
288 | 289 | def _start_transaction(self, h, req): |
|
289 | 290 | _generic_start_transaction(self, h, req) |
|
290 | 291 | return keepalive.HTTPHandler._start_transaction(self, h, req) |
|
291 | 292 | |
|
292 | 293 | if has_https: |
|
293 | 294 | class httpsconnection(httplib.HTTPConnection): |
|
294 | 295 | response_class = keepalive.HTTPResponse |
|
295 | 296 | default_port = httplib.HTTPS_PORT |
|
296 | 297 | # must be able to send big bundle as stream. |
|
297 | 298 | send = _gen_sendfile(keepalive.safesend) |
|
298 | 299 | getresponse = keepalive.wrapgetresponse(httplib.HTTPConnection) |
|
299 | 300 | |
|
300 | 301 | def __init__(self, host, port=None, key_file=None, cert_file=None, |
|
301 | 302 | *args, **kwargs): |
|
302 | 303 | httplib.HTTPConnection.__init__(self, host, port, *args, **kwargs) |
|
303 | 304 | self.key_file = key_file |
|
304 | 305 | self.cert_file = cert_file |
|
305 | 306 | |
|
306 | 307 | def connect(self): |
|
307 | 308 | self.sock = socket.create_connection((self.host, self.port)) |
|
308 | 309 | |
|
309 | 310 | host = self.host |
|
310 | 311 | if self.realhostport: # use CONNECT proxy |
|
311 | 312 | _generic_proxytunnel(self) |
|
312 | 313 | host = self.realhostport.rsplit(':', 1)[0] |
|
313 | 314 | self.sock = sslutil.wrapsocket( |
|
314 | 315 | self.sock, self.key_file, self.cert_file, ui=self.ui, |
|
315 | 316 | serverhostname=host) |
|
316 | 317 | sslutil.validatesocket(self.sock) |
|
317 | 318 | |
|
318 | 319 | class httpshandler(keepalive.KeepAliveHandler, urlreq.httpshandler): |
|
319 | 320 | def __init__(self, ui): |
|
320 | 321 | keepalive.KeepAliveHandler.__init__(self) |
|
321 | 322 | urlreq.httpshandler.__init__(self) |
|
322 | 323 | self.ui = ui |
|
323 | 324 | self.pwmgr = passwordmgr(self.ui, |
|
324 | 325 | self.ui.httppasswordmgrdb) |
|
325 | 326 | |
|
326 | 327 | def _start_transaction(self, h, req): |
|
327 | 328 | _generic_start_transaction(self, h, req) |
|
328 | 329 | return keepalive.KeepAliveHandler._start_transaction(self, h, req) |
|
329 | 330 | |
|
330 | 331 | def https_open(self, req): |
|
331 | 332 | # req.get_full_url() does not contain credentials and we may |
|
332 | 333 | # need them to match the certificates. |
|
333 | 334 | url = req.get_full_url() |
|
334 | 335 | user, password = self.pwmgr.find_stored_password(url) |
|
335 | 336 | res = httpconnectionmod.readauthforuri(self.ui, url, user) |
|
336 | 337 | if res: |
|
337 | 338 | group, auth = res |
|
338 | 339 | self.auth = auth |
|
339 | 340 | self.ui.debug("using auth.%s.* for authentication\n" % group) |
|
340 | 341 | else: |
|
341 | 342 | self.auth = None |
|
342 | 343 | return self.do_open(self._makeconnection, req) |
|
343 | 344 | |
|
344 | 345 | def _makeconnection(self, host, port=None, *args, **kwargs): |
|
345 | 346 | keyfile = None |
|
346 | 347 | certfile = None |
|
347 | 348 | |
|
348 | 349 | if len(args) >= 1: # key_file |
|
349 | 350 | keyfile = args[0] |
|
350 | 351 | if len(args) >= 2: # cert_file |
|
351 | 352 | certfile = args[1] |
|
352 | 353 | args = args[2:] |
|
353 | 354 | |
|
354 | 355 | # if the user has specified different key/cert files in |
|
355 | 356 | # hgrc, we prefer these |
|
356 | 357 | if self.auth and 'key' in self.auth and 'cert' in self.auth: |
|
357 | 358 | keyfile = self.auth['key'] |
|
358 | 359 | certfile = self.auth['cert'] |
|
359 | 360 | |
|
360 | 361 | conn = httpsconnection(host, port, keyfile, certfile, *args, |
|
361 | 362 | **kwargs) |
|
362 | 363 | conn.ui = self.ui |
|
363 | 364 | return conn |
|
364 | 365 | |
|
365 | 366 | class httpdigestauthhandler(urlreq.httpdigestauthhandler): |
|
366 | 367 | def __init__(self, *args, **kwargs): |
|
367 | 368 | urlreq.httpdigestauthhandler.__init__(self, *args, **kwargs) |
|
368 | 369 | self.retried_req = None |
|
369 | 370 | |
|
370 | 371 | def reset_retry_count(self): |
|
371 | 372 | # Python 2.6.5 will call this on 401 or 407 errors and thus loop |
|
372 | 373 | # forever. We disable reset_retry_count completely and reset in |
|
373 | 374 | # http_error_auth_reqed instead. |
|
374 | 375 | pass |
|
375 | 376 | |
|
376 | 377 | def http_error_auth_reqed(self, auth_header, host, req, headers): |
|
377 | 378 | # Reset the retry counter once for each request. |
|
378 | 379 | if req is not self.retried_req: |
|
379 | 380 | self.retried_req = req |
|
380 | 381 | self.retried = 0 |
|
381 | 382 | return urlreq.httpdigestauthhandler.http_error_auth_reqed( |
|
382 | 383 | self, auth_header, host, req, headers) |
|
383 | 384 | |
|
384 | 385 | class httpbasicauthhandler(urlreq.httpbasicauthhandler): |
|
385 | 386 | def __init__(self, *args, **kwargs): |
|
386 | 387 | self.auth = None |
|
387 | 388 | urlreq.httpbasicauthhandler.__init__(self, *args, **kwargs) |
|
388 | 389 | self.retried_req = None |
|
389 | 390 | |
|
390 | 391 | def http_request(self, request): |
|
391 | 392 | if self.auth: |
|
392 | 393 | request.add_unredirected_header(self.auth_header, self.auth) |
|
393 | 394 | |
|
394 | 395 | return request |
|
395 | 396 | |
|
396 | 397 | def https_request(self, request): |
|
397 | 398 | if self.auth: |
|
398 | 399 | request.add_unredirected_header(self.auth_header, self.auth) |
|
399 | 400 | |
|
400 | 401 | return request |
|
401 | 402 | |
|
402 | 403 | def reset_retry_count(self): |
|
403 | 404 | # Python 2.6.5 will call this on 401 or 407 errors and thus loop |
|
404 | 405 | # forever. We disable reset_retry_count completely and reset in |
|
405 | 406 | # http_error_auth_reqed instead. |
|
406 | 407 | pass |
|
407 | 408 | |
|
408 | 409 | def http_error_auth_reqed(self, auth_header, host, req, headers): |
|
409 | 410 | # Reset the retry counter once for each request. |
|
410 | 411 | if req is not self.retried_req: |
|
411 | 412 | self.retried_req = req |
|
412 | 413 | self.retried = 0 |
|
413 | 414 | return urlreq.httpbasicauthhandler.http_error_auth_reqed( |
|
414 | 415 | self, auth_header, host, req, headers) |
|
415 | 416 | |
|
416 | 417 | def retry_http_basic_auth(self, host, req, realm): |
|
417 | 418 | user, pw = self.passwd.find_user_password(realm, req.get_full_url()) |
|
418 | 419 | if pw is not None: |
|
419 | 420 | raw = "%s:%s" % (user, pw) |
|
420 | 421 | auth = 'Basic %s' % base64.b64encode(raw).strip() |
|
421 | 422 | if req.get_header(self.auth_header, None) == auth: |
|
422 | 423 | return None |
|
423 | 424 | self.auth = auth |
|
424 | 425 | req.add_unredirected_header(self.auth_header, auth) |
|
425 | 426 | return self.parent.open(req) |
|
426 | 427 | else: |
|
427 | 428 | return None |
|
428 | 429 | |
|
429 | 430 | handlerfuncs = [] |
|
430 | 431 | |
|
431 | 432 | def opener(ui, authinfo=None): |
|
432 | 433 | ''' |
|
433 | 434 | construct an opener suitable for urllib2 |
|
434 | 435 | authinfo will be added to the password manager |
|
435 | 436 | ''' |
|
436 | 437 | # experimental config: ui.usehttp2 |
|
437 | 438 | if ui.configbool('ui', 'usehttp2', False): |
|
438 | 439 | handlers = [ |
|
439 | 440 | httpconnectionmod.http2handler( |
|
440 | 441 | ui, |
|
441 | 442 | passwordmgr(ui, ui.httppasswordmgrdb)) |
|
442 | 443 | ] |
|
443 | 444 | else: |
|
444 | 445 | handlers = [httphandler()] |
|
445 | 446 | if has_https: |
|
446 | 447 | handlers.append(httpshandler(ui)) |
|
447 | 448 | |
|
448 | 449 | handlers.append(proxyhandler(ui)) |
|
449 | 450 | |
|
450 | 451 | passmgr = passwordmgr(ui, ui.httppasswordmgrdb) |
|
451 | 452 | if authinfo is not None: |
|
452 | 453 | realm, uris, user, passwd = authinfo |
|
453 | 454 | saveduser, savedpass = passmgr.find_stored_password(uris[0]) |
|
454 | 455 | if user != saveduser or passwd: |
|
455 | 456 | passmgr.add_password(realm, uris, user, passwd) |
|
456 | 457 | ui.debug('http auth: user %s, password %s\n' % |
|
457 | 458 | (user, passwd and '*' * len(passwd) or 'not set')) |
|
458 | 459 | |
|
459 | 460 | handlers.extend((httpbasicauthhandler(passmgr), |
|
460 | 461 | httpdigestauthhandler(passmgr))) |
|
461 | 462 | handlers.extend([h(ui, passmgr) for h in handlerfuncs]) |
|
462 | 463 | opener = urlreq.buildopener(*handlers) |
|
463 | 464 | |
|
464 | 465 | # The user agent should should *NOT* be used by servers for e.g. |
|
465 | 466 | # protocol detection or feature negotiation: there are other |
|
466 | 467 | # facilities for that. |
|
467 | 468 | # |
|
468 | 469 | # "mercurial/proto-1.0" was the original user agent string and |
|
469 | 470 | # exists for backwards compatibility reasons. |
|
470 | 471 | # |
|
471 | 472 | # The "(Mercurial %s)" string contains the distribution |
|
472 | 473 | # name and version. Other client implementations should choose their |
|
473 | 474 | # own distribution name. Since servers should not be using the user |
|
474 | 475 | # agent string for anything, clients should be able to define whatever |
|
475 | 476 | # user agent they deem appropriate. |
|
476 | 477 | agent = 'mercurial/proto-1.0 (Mercurial %s)' % util.version() |
|
477 | 478 | opener.addheaders = [('User-agent', agent)] |
|
478 | 479 | opener.addheaders.append(('Accept', 'application/mercurial-0.1')) |
|
479 | 480 | return opener |
|
480 | 481 | |
|
481 | 482 | def open(ui, url_, data=None): |
|
482 | 483 | u = util.url(url_) |
|
483 | 484 | if u.scheme: |
|
484 | 485 | u.scheme = u.scheme.lower() |
|
485 | 486 | url_, authinfo = u.authinfo() |
|
486 | 487 | else: |
|
487 | 488 | path = util.normpath(os.path.abspath(url_)) |
|
488 | 489 | url_ = 'file://' + urlreq.pathname2url(path) |
|
489 | 490 | authinfo = None |
|
490 | 491 | return opener(ui, authinfo).open(url_, data) |
@@ -1,479 +1,479 | |||
|
1 | 1 | # windows.py - Windows utility function implementations for Mercurial |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | from __future__ import absolute_import |
|
9 | 9 | |
|
10 | 10 | import errno |
|
11 | 11 | import msvcrt |
|
12 | 12 | import os |
|
13 | 13 | import re |
|
14 | 14 | import stat |
|
15 | 15 | import sys |
|
16 | 16 | |
|
17 | 17 | from .i18n import _ |
|
18 | 18 | from . import ( |
|
19 | 19 | encoding, |
|
20 | 20 | osutil, |
|
21 | 21 | pycompat, |
|
22 | 22 | win32, |
|
23 | 23 | ) |
|
24 | 24 | |
|
25 | 25 | try: |
|
26 | 26 | import _winreg as winreg |
|
27 | 27 | winreg.CloseKey |
|
28 | 28 | except ImportError: |
|
29 | 29 | import winreg |
|
30 | 30 | |
|
31 | 31 | executablepath = win32.executablepath |
|
32 | 32 | getuser = win32.getuser |
|
33 | 33 | hidewindow = win32.hidewindow |
|
34 | 34 | makedir = win32.makedir |
|
35 | 35 | nlinks = win32.nlinks |
|
36 | 36 | oslink = win32.oslink |
|
37 | 37 | samedevice = win32.samedevice |
|
38 | 38 | samefile = win32.samefile |
|
39 | 39 | setsignalhandler = win32.setsignalhandler |
|
40 | 40 | spawndetached = win32.spawndetached |
|
41 | 41 | split = os.path.split |
|
42 | 42 | testpid = win32.testpid |
|
43 | 43 | unlink = win32.unlink |
|
44 | 44 | |
|
45 | 45 | umask = 0o022 |
|
46 | 46 | |
|
47 | 47 | class mixedfilemodewrapper(object): |
|
48 | 48 | """Wraps a file handle when it is opened in read/write mode. |
|
49 | 49 | |
|
50 | 50 | fopen() and fdopen() on Windows have a specific-to-Windows requirement |
|
51 | 51 | that files opened with mode r+, w+, or a+ make a call to a file positioning |
|
52 | 52 | function when switching between reads and writes. Without this extra call, |
|
53 | 53 | Python will raise a not very intuitive "IOError: [Errno 0] Error." |
|
54 | 54 | |
|
55 | 55 | This class wraps posixfile instances when the file is opened in read/write |
|
56 | 56 | mode and automatically adds checks or inserts appropriate file positioning |
|
57 | 57 | calls when necessary. |
|
58 | 58 | """ |
|
59 | 59 | OPNONE = 0 |
|
60 | 60 | OPREAD = 1 |
|
61 | 61 | OPWRITE = 2 |
|
62 | 62 | |
|
63 | 63 | def __init__(self, fp): |
|
64 | 64 | object.__setattr__(self, '_fp', fp) |
|
65 | 65 | object.__setattr__(self, '_lastop', 0) |
|
66 | 66 | |
|
67 | 67 | def __getattr__(self, name): |
|
68 | 68 | return getattr(self._fp, name) |
|
69 | 69 | |
|
70 | 70 | def __setattr__(self, name, value): |
|
71 | 71 | return self._fp.__setattr__(name, value) |
|
72 | 72 | |
|
73 | 73 | def _noopseek(self): |
|
74 | 74 | self._fp.seek(0, os.SEEK_CUR) |
|
75 | 75 | |
|
76 | 76 | def seek(self, *args, **kwargs): |
|
77 | 77 | object.__setattr__(self, '_lastop', self.OPNONE) |
|
78 | 78 | return self._fp.seek(*args, **kwargs) |
|
79 | 79 | |
|
80 | 80 | def write(self, d): |
|
81 | 81 | if self._lastop == self.OPREAD: |
|
82 | 82 | self._noopseek() |
|
83 | 83 | |
|
84 | 84 | object.__setattr__(self, '_lastop', self.OPWRITE) |
|
85 | 85 | return self._fp.write(d) |
|
86 | 86 | |
|
87 | 87 | def writelines(self, *args, **kwargs): |
|
88 | 88 | if self._lastop == self.OPREAD: |
|
89 | 89 | self._noopeseek() |
|
90 | 90 | |
|
91 | 91 | object.__setattr__(self, '_lastop', self.OPWRITE) |
|
92 | 92 | return self._fp.writelines(*args, **kwargs) |
|
93 | 93 | |
|
94 | 94 | def read(self, *args, **kwargs): |
|
95 | 95 | if self._lastop == self.OPWRITE: |
|
96 | 96 | self._noopseek() |
|
97 | 97 | |
|
98 | 98 | object.__setattr__(self, '_lastop', self.OPREAD) |
|
99 | 99 | return self._fp.read(*args, **kwargs) |
|
100 | 100 | |
|
101 | 101 | def readline(self, *args, **kwargs): |
|
102 | 102 | if self._lastop == self.OPWRITE: |
|
103 | 103 | self._noopseek() |
|
104 | 104 | |
|
105 | 105 | object.__setattr__(self, '_lastop', self.OPREAD) |
|
106 | 106 | return self._fp.readline(*args, **kwargs) |
|
107 | 107 | |
|
108 | 108 | def readlines(self, *args, **kwargs): |
|
109 | 109 | if self._lastop == self.OPWRITE: |
|
110 | 110 | self._noopseek() |
|
111 | 111 | |
|
112 | 112 | object.__setattr__(self, '_lastop', self.OPREAD) |
|
113 | 113 | return self._fp.readlines(*args, **kwargs) |
|
114 | 114 | |
|
115 | 115 | def posixfile(name, mode='r', buffering=-1): |
|
116 | 116 | '''Open a file with even more POSIX-like semantics''' |
|
117 | 117 | try: |
|
118 | 118 | fp = osutil.posixfile(name, mode, buffering) # may raise WindowsError |
|
119 | 119 | |
|
120 | 120 | # The position when opening in append mode is implementation defined, so |
|
121 | 121 | # make it consistent with other platforms, which position at EOF. |
|
122 | 122 | if 'a' in mode: |
|
123 | 123 | fp.seek(0, os.SEEK_END) |
|
124 | 124 | |
|
125 | 125 | if '+' in mode: |
|
126 | 126 | return mixedfilemodewrapper(fp) |
|
127 | 127 | |
|
128 | 128 | return fp |
|
129 | 129 | except WindowsError as err: |
|
130 | 130 | # convert to a friendlier exception |
|
131 | 131 | raise IOError(err.errno, '%s: %s' % (name, err.strerror)) |
|
132 | 132 | |
|
133 | 133 | class winstdout(object): |
|
134 | 134 | '''stdout on windows misbehaves if sent through a pipe''' |
|
135 | 135 | |
|
136 | 136 | def __init__(self, fp): |
|
137 | 137 | self.fp = fp |
|
138 | 138 | |
|
139 | 139 | def __getattr__(self, key): |
|
140 | 140 | return getattr(self.fp, key) |
|
141 | 141 | |
|
142 | 142 | def close(self): |
|
143 | 143 | try: |
|
144 | 144 | self.fp.close() |
|
145 | 145 | except IOError: |
|
146 | 146 | pass |
|
147 | 147 | |
|
148 | 148 | def write(self, s): |
|
149 | 149 | try: |
|
150 | 150 | # This is workaround for "Not enough space" error on |
|
151 | 151 | # writing large size of data to console. |
|
152 | 152 | limit = 16000 |
|
153 | 153 | l = len(s) |
|
154 | 154 | start = 0 |
|
155 | 155 | self.softspace = 0 |
|
156 | 156 | while start < l: |
|
157 | 157 | end = start + limit |
|
158 | 158 | self.fp.write(s[start:end]) |
|
159 | 159 | start = end |
|
160 | 160 | except IOError as inst: |
|
161 | 161 | if inst.errno != 0: |
|
162 | 162 | raise |
|
163 | 163 | self.close() |
|
164 | 164 | raise IOError(errno.EPIPE, 'Broken pipe') |
|
165 | 165 | |
|
166 | 166 | def flush(self): |
|
167 | 167 | try: |
|
168 | 168 | return self.fp.flush() |
|
169 | 169 | except IOError as inst: |
|
170 | 170 | if inst.errno != errno.EINVAL: |
|
171 | 171 | raise |
|
172 | 172 | self.close() |
|
173 | 173 | raise IOError(errno.EPIPE, 'Broken pipe') |
|
174 | 174 | |
|
175 | 175 | def _is_win_9x(): |
|
176 | 176 | '''return true if run on windows 95, 98 or me.''' |
|
177 | 177 | try: |
|
178 | 178 | return sys.getwindowsversion()[3] == 1 |
|
179 | 179 | except AttributeError: |
|
180 |
return 'command' in |
|
|
180 | return 'command' in encoding.environ.get('comspec', '') | |
|
181 | 181 | |
|
182 | 182 | def openhardlinks(): |
|
183 | 183 | return not _is_win_9x() |
|
184 | 184 | |
|
185 | 185 | def parsepatchoutput(output_line): |
|
186 | 186 | """parses the output produced by patch and returns the filename""" |
|
187 | 187 | pf = output_line[14:] |
|
188 | 188 | if pf[0] == '`': |
|
189 | 189 | pf = pf[1:-1] # Remove the quotes |
|
190 | 190 | return pf |
|
191 | 191 | |
|
192 | 192 | def sshargs(sshcmd, host, user, port): |
|
193 | 193 | '''Build argument list for ssh or Plink''' |
|
194 | 194 | pflag = 'plink' in sshcmd.lower() and '-P' or '-p' |
|
195 | 195 | args = user and ("%s@%s" % (user, host)) or host |
|
196 | 196 | return port and ("%s %s %s" % (args, pflag, port)) or args |
|
197 | 197 | |
|
198 | 198 | def setflags(f, l, x): |
|
199 | 199 | pass |
|
200 | 200 | |
|
201 | 201 | def copymode(src, dst, mode=None): |
|
202 | 202 | pass |
|
203 | 203 | |
|
204 | 204 | def checkexec(path): |
|
205 | 205 | return False |
|
206 | 206 | |
|
207 | 207 | def checklink(path): |
|
208 | 208 | return False |
|
209 | 209 | |
|
210 | 210 | def setbinary(fd): |
|
211 | 211 | # When run without console, pipes may expose invalid |
|
212 | 212 | # fileno(), usually set to -1. |
|
213 | 213 | fno = getattr(fd, 'fileno', None) |
|
214 | 214 | if fno is not None and fno() >= 0: |
|
215 | 215 | msvcrt.setmode(fno(), os.O_BINARY) |
|
216 | 216 | |
|
217 | 217 | def pconvert(path): |
|
218 | 218 | return path.replace(pycompat.ossep, '/') |
|
219 | 219 | |
|
220 | 220 | def localpath(path): |
|
221 | 221 | return path.replace('/', '\\') |
|
222 | 222 | |
|
223 | 223 | def normpath(path): |
|
224 | 224 | return pconvert(os.path.normpath(path)) |
|
225 | 225 | |
|
226 | 226 | def normcase(path): |
|
227 | 227 | return encoding.upper(path) # NTFS compares via upper() |
|
228 | 228 | |
|
229 | 229 | # see posix.py for definitions |
|
230 | 230 | normcasespec = encoding.normcasespecs.upper |
|
231 | 231 | normcasefallback = encoding.upperfallback |
|
232 | 232 | |
|
233 | 233 | def samestat(s1, s2): |
|
234 | 234 | return False |
|
235 | 235 | |
|
236 | 236 | # A sequence of backslashes is special iff it precedes a double quote: |
|
237 | 237 | # - if there's an even number of backslashes, the double quote is not |
|
238 | 238 | # quoted (i.e. it ends the quoted region) |
|
239 | 239 | # - if there's an odd number of backslashes, the double quote is quoted |
|
240 | 240 | # - in both cases, every pair of backslashes is unquoted into a single |
|
241 | 241 | # backslash |
|
242 | 242 | # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx ) |
|
243 | 243 | # So, to quote a string, we must surround it in double quotes, double |
|
244 | 244 | # the number of backslashes that precede double quotes and add another |
|
245 | 245 | # backslash before every double quote (being careful with the double |
|
246 | 246 | # quote we've appended to the end) |
|
247 | 247 | _quotere = None |
|
248 | 248 | _needsshellquote = None |
|
249 | 249 | def shellquote(s): |
|
250 | 250 | r""" |
|
251 | 251 | >>> shellquote(r'C:\Users\xyz') |
|
252 | 252 | '"C:\\Users\\xyz"' |
|
253 | 253 | >>> shellquote(r'C:\Users\xyz/mixed') |
|
254 | 254 | '"C:\\Users\\xyz/mixed"' |
|
255 | 255 | >>> # Would be safe not to quote too, since it is all double backslashes |
|
256 | 256 | >>> shellquote(r'C:\\Users\\xyz') |
|
257 | 257 | '"C:\\\\Users\\\\xyz"' |
|
258 | 258 | >>> # But this must be quoted |
|
259 | 259 | >>> shellquote(r'C:\\Users\\xyz/abc') |
|
260 | 260 | '"C:\\\\Users\\\\xyz/abc"' |
|
261 | 261 | """ |
|
262 | 262 | global _quotere |
|
263 | 263 | if _quotere is None: |
|
264 | 264 | _quotere = re.compile(r'(\\*)("|\\$)') |
|
265 | 265 | global _needsshellquote |
|
266 | 266 | if _needsshellquote is None: |
|
267 | 267 | # ":" is also treated as "safe character", because it is used as a part |
|
268 | 268 | # of path name on Windows. "\" is also part of a path name, but isn't |
|
269 | 269 | # safe because shlex.split() (kind of) treats it as an escape char and |
|
270 | 270 | # drops it. It will leave the next character, even if it is another |
|
271 | 271 | # "\". |
|
272 | 272 | _needsshellquote = re.compile(r'[^a-zA-Z0-9._:/-]').search |
|
273 | 273 | if s and not _needsshellquote(s) and not _quotere.search(s): |
|
274 | 274 | # "s" shouldn't have to be quoted |
|
275 | 275 | return s |
|
276 | 276 | return '"%s"' % _quotere.sub(r'\1\1\\\2', s) |
|
277 | 277 | |
|
278 | 278 | def quotecommand(cmd): |
|
279 | 279 | """Build a command string suitable for os.popen* calls.""" |
|
280 | 280 | if sys.version_info < (2, 7, 1): |
|
281 | 281 | # Python versions since 2.7.1 do this extra quoting themselves |
|
282 | 282 | return '"' + cmd + '"' |
|
283 | 283 | return cmd |
|
284 | 284 | |
|
285 | 285 | def popen(command, mode='r'): |
|
286 | 286 | # Work around "popen spawned process may not write to stdout |
|
287 | 287 | # under windows" |
|
288 | 288 | # http://bugs.python.org/issue1366 |
|
289 | 289 | command += " 2> %s" % os.devnull |
|
290 | 290 | return os.popen(quotecommand(command), mode) |
|
291 | 291 | |
|
292 | 292 | def explainexit(code): |
|
293 | 293 | return _("exited with status %d") % code, code |
|
294 | 294 | |
|
295 | 295 | # if you change this stub into a real check, please try to implement the |
|
296 | 296 | # username and groupname functions above, too. |
|
297 | 297 | def isowner(st): |
|
298 | 298 | return True |
|
299 | 299 | |
|
300 | 300 | def findexe(command): |
|
301 | 301 | '''Find executable for command searching like cmd.exe does. |
|
302 | 302 | If command is a basename then PATH is searched for command. |
|
303 | 303 | PATH isn't searched if command is an absolute or relative path. |
|
304 | 304 | An extension from PATHEXT is found and added if not present. |
|
305 | 305 | If command isn't found None is returned.''' |
|
306 |
pathext = |
|
|
306 | pathext = encoding.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD') | |
|
307 | 307 | pathexts = [ext for ext in pathext.lower().split(pycompat.ospathsep)] |
|
308 | 308 | if os.path.splitext(command)[1].lower() in pathexts: |
|
309 | 309 | pathexts = [''] |
|
310 | 310 | |
|
311 | 311 | def findexisting(pathcommand): |
|
312 | 312 | 'Will append extension (if needed) and return existing file' |
|
313 | 313 | for ext in pathexts: |
|
314 | 314 | executable = pathcommand + ext |
|
315 | 315 | if os.path.exists(executable): |
|
316 | 316 | return executable |
|
317 | 317 | return None |
|
318 | 318 | |
|
319 | 319 | if pycompat.ossep in command: |
|
320 | 320 | return findexisting(command) |
|
321 | 321 | |
|
322 |
for path in |
|
|
322 | for path in encoding.environ.get('PATH', '').split(pycompat.ospathsep): | |
|
323 | 323 | executable = findexisting(os.path.join(path, command)) |
|
324 | 324 | if executable is not None: |
|
325 | 325 | return executable |
|
326 | 326 | return findexisting(os.path.expanduser(os.path.expandvars(command))) |
|
327 | 327 | |
|
328 | 328 | _wantedkinds = set([stat.S_IFREG, stat.S_IFLNK]) |
|
329 | 329 | |
|
330 | 330 | def statfiles(files): |
|
331 | 331 | '''Stat each file in files. Yield each stat, or None if a file |
|
332 | 332 | does not exist or has a type we don't care about. |
|
333 | 333 | |
|
334 | 334 | Cluster and cache stat per directory to minimize number of OS stat calls.''' |
|
335 | 335 | dircache = {} # dirname -> filename -> status | None if file does not exist |
|
336 | 336 | getkind = stat.S_IFMT |
|
337 | 337 | for nf in files: |
|
338 | 338 | nf = normcase(nf) |
|
339 | 339 | dir, base = os.path.split(nf) |
|
340 | 340 | if not dir: |
|
341 | 341 | dir = '.' |
|
342 | 342 | cache = dircache.get(dir, None) |
|
343 | 343 | if cache is None: |
|
344 | 344 | try: |
|
345 | 345 | dmap = dict([(normcase(n), s) |
|
346 | 346 | for n, k, s in osutil.listdir(dir, True) |
|
347 | 347 | if getkind(s.st_mode) in _wantedkinds]) |
|
348 | 348 | except OSError as err: |
|
349 | 349 | # Python >= 2.5 returns ENOENT and adds winerror field |
|
350 | 350 | # EINVAL is raised if dir is not a directory. |
|
351 | 351 | if err.errno not in (errno.ENOENT, errno.EINVAL, |
|
352 | 352 | errno.ENOTDIR): |
|
353 | 353 | raise |
|
354 | 354 | dmap = {} |
|
355 | 355 | cache = dircache.setdefault(dir, dmap) |
|
356 | 356 | yield cache.get(base, None) |
|
357 | 357 | |
|
358 | 358 | def username(uid=None): |
|
359 | 359 | """Return the name of the user with the given uid. |
|
360 | 360 | |
|
361 | 361 | If uid is None, return the name of the current user.""" |
|
362 | 362 | return None |
|
363 | 363 | |
|
364 | 364 | def groupname(gid=None): |
|
365 | 365 | """Return the name of the group with the given gid. |
|
366 | 366 | |
|
367 | 367 | If gid is None, return the name of the current group.""" |
|
368 | 368 | return None |
|
369 | 369 | |
|
370 | 370 | def removedirs(name): |
|
371 | 371 | """special version of os.removedirs that does not remove symlinked |
|
372 | 372 | directories or junction points if they actually contain files""" |
|
373 | 373 | if osutil.listdir(name): |
|
374 | 374 | return |
|
375 | 375 | os.rmdir(name) |
|
376 | 376 | head, tail = os.path.split(name) |
|
377 | 377 | if not tail: |
|
378 | 378 | head, tail = os.path.split(head) |
|
379 | 379 | while head and tail: |
|
380 | 380 | try: |
|
381 | 381 | if osutil.listdir(head): |
|
382 | 382 | return |
|
383 | 383 | os.rmdir(head) |
|
384 | 384 | except (ValueError, OSError): |
|
385 | 385 | break |
|
386 | 386 | head, tail = os.path.split(head) |
|
387 | 387 | |
|
388 | 388 | def unlinkpath(f, ignoremissing=False): |
|
389 | 389 | """unlink and remove the directory if it is empty""" |
|
390 | 390 | try: |
|
391 | 391 | unlink(f) |
|
392 | 392 | except OSError as e: |
|
393 | 393 | if not (ignoremissing and e.errno == errno.ENOENT): |
|
394 | 394 | raise |
|
395 | 395 | # try removing directories that might now be empty |
|
396 | 396 | try: |
|
397 | 397 | removedirs(os.path.dirname(f)) |
|
398 | 398 | except OSError: |
|
399 | 399 | pass |
|
400 | 400 | |
|
401 | 401 | def rename(src, dst): |
|
402 | 402 | '''atomically rename file src to dst, replacing dst if it exists''' |
|
403 | 403 | try: |
|
404 | 404 | os.rename(src, dst) |
|
405 | 405 | except OSError as e: |
|
406 | 406 | if e.errno != errno.EEXIST: |
|
407 | 407 | raise |
|
408 | 408 | unlink(dst) |
|
409 | 409 | os.rename(src, dst) |
|
410 | 410 | |
|
411 | 411 | def gethgcmd(): |
|
412 | 412 | return [sys.executable] + sys.argv[:1] |
|
413 | 413 | |
|
414 | 414 | def groupmembers(name): |
|
415 | 415 | # Don't support groups on Windows for now |
|
416 | 416 | raise KeyError |
|
417 | 417 | |
|
418 | 418 | def isexec(f): |
|
419 | 419 | return False |
|
420 | 420 | |
|
421 | 421 | class cachestat(object): |
|
422 | 422 | def __init__(self, path): |
|
423 | 423 | pass |
|
424 | 424 | |
|
425 | 425 | def cacheable(self): |
|
426 | 426 | return False |
|
427 | 427 | |
|
428 | 428 | def lookupreg(key, valname=None, scope=None): |
|
429 | 429 | ''' Look up a key/value name in the Windows registry. |
|
430 | 430 | |
|
431 | 431 | valname: value name. If unspecified, the default value for the key |
|
432 | 432 | is used. |
|
433 | 433 | scope: optionally specify scope for registry lookup, this can be |
|
434 | 434 | a sequence of scopes to look up in order. Default (CURRENT_USER, |
|
435 | 435 | LOCAL_MACHINE). |
|
436 | 436 | ''' |
|
437 | 437 | if scope is None: |
|
438 | 438 | scope = (winreg.HKEY_CURRENT_USER, winreg.HKEY_LOCAL_MACHINE) |
|
439 | 439 | elif not isinstance(scope, (list, tuple)): |
|
440 | 440 | scope = (scope,) |
|
441 | 441 | for s in scope: |
|
442 | 442 | try: |
|
443 | 443 | val = winreg.QueryValueEx(winreg.OpenKey(s, key), valname)[0] |
|
444 | 444 | # never let a Unicode string escape into the wild |
|
445 | 445 | return encoding.tolocal(val.encode('UTF-8')) |
|
446 | 446 | except EnvironmentError: |
|
447 | 447 | pass |
|
448 | 448 | |
|
449 | 449 | expandglobs = True |
|
450 | 450 | |
|
451 | 451 | def statislink(st): |
|
452 | 452 | '''check whether a stat result is a symlink''' |
|
453 | 453 | return False |
|
454 | 454 | |
|
455 | 455 | def statisexec(st): |
|
456 | 456 | '''check whether a stat result is an executable file''' |
|
457 | 457 | return False |
|
458 | 458 | |
|
459 | 459 | def poll(fds): |
|
460 | 460 | # see posix.py for description |
|
461 | 461 | raise NotImplementedError() |
|
462 | 462 | |
|
463 | 463 | def readpipe(pipe): |
|
464 | 464 | """Read all available data from a pipe.""" |
|
465 | 465 | chunks = [] |
|
466 | 466 | while True: |
|
467 | 467 | size = win32.peekpipe(pipe) |
|
468 | 468 | if not size: |
|
469 | 469 | break |
|
470 | 470 | |
|
471 | 471 | s = pipe.read(size) |
|
472 | 472 | if not s: |
|
473 | 473 | break |
|
474 | 474 | chunks.append(s) |
|
475 | 475 | |
|
476 | 476 | return ''.join(chunks) |
|
477 | 477 | |
|
478 | 478 | def bindunixsocket(sock, path): |
|
479 | 479 | raise NotImplementedError('unsupported platform') |
General Comments 0
You need to be logged in to leave comments.
Login now