Show More
@@ -1,714 +1,715 | |||||
1 | # filemerge.py - file-level merge handling for Mercurial |
|
1 | # filemerge.py - file-level merge handling for Mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
10 | import filecmp |
|
10 | import filecmp | |
11 | import os |
|
11 | import os | |
12 | import re |
|
12 | import re | |
13 | import tempfile |
|
13 | import tempfile | |
14 |
|
14 | |||
15 | from .i18n import _ |
|
15 | from .i18n import _ | |
16 | from .node import nullid, short |
|
16 | from .node import nullid, short | |
17 |
|
17 | |||
18 | from . import ( |
|
18 | from . import ( | |
|
19 | encoding, | |||
19 | error, |
|
20 | error, | |
20 | formatter, |
|
21 | formatter, | |
21 | match, |
|
22 | match, | |
22 | pycompat, |
|
23 | pycompat, | |
23 | scmutil, |
|
24 | scmutil, | |
24 | simplemerge, |
|
25 | simplemerge, | |
25 | tagmerge, |
|
26 | tagmerge, | |
26 | templatekw, |
|
27 | templatekw, | |
27 | templater, |
|
28 | templater, | |
28 | util, |
|
29 | util, | |
29 | ) |
|
30 | ) | |
30 |
|
31 | |||
31 | def _toolstr(ui, tool, part, default=""): |
|
32 | def _toolstr(ui, tool, part, default=""): | |
32 | return ui.config("merge-tools", tool + "." + part, default) |
|
33 | return ui.config("merge-tools", tool + "." + part, default) | |
33 |
|
34 | |||
34 | def _toolbool(ui, tool, part, default=False): |
|
35 | def _toolbool(ui, tool, part, default=False): | |
35 | return ui.configbool("merge-tools", tool + "." + part, default) |
|
36 | return ui.configbool("merge-tools", tool + "." + part, default) | |
36 |
|
37 | |||
37 | def _toollist(ui, tool, part, default=[]): |
|
38 | def _toollist(ui, tool, part, default=[]): | |
38 | return ui.configlist("merge-tools", tool + "." + part, default) |
|
39 | return ui.configlist("merge-tools", tool + "." + part, default) | |
39 |
|
40 | |||
40 | internals = {} |
|
41 | internals = {} | |
41 | # Merge tools to document. |
|
42 | # Merge tools to document. | |
42 | internalsdoc = {} |
|
43 | internalsdoc = {} | |
43 |
|
44 | |||
44 | # internal tool merge types |
|
45 | # internal tool merge types | |
45 | nomerge = None |
|
46 | nomerge = None | |
46 | mergeonly = 'mergeonly' # just the full merge, no premerge |
|
47 | mergeonly = 'mergeonly' # just the full merge, no premerge | |
47 | fullmerge = 'fullmerge' # both premerge and merge |
|
48 | fullmerge = 'fullmerge' # both premerge and merge | |
48 |
|
49 | |||
49 | class absentfilectx(object): |
|
50 | class absentfilectx(object): | |
50 | """Represents a file that's ostensibly in a context but is actually not |
|
51 | """Represents a file that's ostensibly in a context but is actually not | |
51 | present in it. |
|
52 | present in it. | |
52 |
|
53 | |||
53 | This is here because it's very specific to the filemerge code for now -- |
|
54 | This is here because it's very specific to the filemerge code for now -- | |
54 | other code is likely going to break with the values this returns.""" |
|
55 | other code is likely going to break with the values this returns.""" | |
55 | def __init__(self, ctx, f): |
|
56 | def __init__(self, ctx, f): | |
56 | self._ctx = ctx |
|
57 | self._ctx = ctx | |
57 | self._f = f |
|
58 | self._f = f | |
58 |
|
59 | |||
59 | def path(self): |
|
60 | def path(self): | |
60 | return self._f |
|
61 | return self._f | |
61 |
|
62 | |||
62 | def size(self): |
|
63 | def size(self): | |
63 | return None |
|
64 | return None | |
64 |
|
65 | |||
65 | def data(self): |
|
66 | def data(self): | |
66 | return None |
|
67 | return None | |
67 |
|
68 | |||
68 | def filenode(self): |
|
69 | def filenode(self): | |
69 | return nullid |
|
70 | return nullid | |
70 |
|
71 | |||
71 | _customcmp = True |
|
72 | _customcmp = True | |
72 | def cmp(self, fctx): |
|
73 | def cmp(self, fctx): | |
73 | """compare with other file context |
|
74 | """compare with other file context | |
74 |
|
75 | |||
75 | returns True if different from fctx. |
|
76 | returns True if different from fctx. | |
76 | """ |
|
77 | """ | |
77 | return not (fctx.isabsent() and |
|
78 | return not (fctx.isabsent() and | |
78 | fctx.ctx() == self.ctx() and |
|
79 | fctx.ctx() == self.ctx() and | |
79 | fctx.path() == self.path()) |
|
80 | fctx.path() == self.path()) | |
80 |
|
81 | |||
81 | def flags(self): |
|
82 | def flags(self): | |
82 | return '' |
|
83 | return '' | |
83 |
|
84 | |||
84 | def changectx(self): |
|
85 | def changectx(self): | |
85 | return self._ctx |
|
86 | return self._ctx | |
86 |
|
87 | |||
87 | def isbinary(self): |
|
88 | def isbinary(self): | |
88 | return False |
|
89 | return False | |
89 |
|
90 | |||
90 | def isabsent(self): |
|
91 | def isabsent(self): | |
91 | return True |
|
92 | return True | |
92 |
|
93 | |||
93 | def internaltool(name, mergetype, onfailure=None, precheck=None): |
|
94 | def internaltool(name, mergetype, onfailure=None, precheck=None): | |
94 | '''return a decorator for populating internal merge tool table''' |
|
95 | '''return a decorator for populating internal merge tool table''' | |
95 | def decorator(func): |
|
96 | def decorator(func): | |
96 | fullname = ':' + name |
|
97 | fullname = ':' + name | |
97 | func.__doc__ = (pycompat.sysstr("``%s``\n" % fullname) |
|
98 | func.__doc__ = (pycompat.sysstr("``%s``\n" % fullname) | |
98 | + func.__doc__.strip()) |
|
99 | + func.__doc__.strip()) | |
99 | internals[fullname] = func |
|
100 | internals[fullname] = func | |
100 | internals['internal:' + name] = func |
|
101 | internals['internal:' + name] = func | |
101 | internalsdoc[fullname] = func |
|
102 | internalsdoc[fullname] = func | |
102 | func.mergetype = mergetype |
|
103 | func.mergetype = mergetype | |
103 | func.onfailure = onfailure |
|
104 | func.onfailure = onfailure | |
104 | func.precheck = precheck |
|
105 | func.precheck = precheck | |
105 | return func |
|
106 | return func | |
106 | return decorator |
|
107 | return decorator | |
107 |
|
108 | |||
108 | def _findtool(ui, tool): |
|
109 | def _findtool(ui, tool): | |
109 | if tool in internals: |
|
110 | if tool in internals: | |
110 | return tool |
|
111 | return tool | |
111 | return findexternaltool(ui, tool) |
|
112 | return findexternaltool(ui, tool) | |
112 |
|
113 | |||
113 | def findexternaltool(ui, tool): |
|
114 | def findexternaltool(ui, tool): | |
114 | for kn in ("regkey", "regkeyalt"): |
|
115 | for kn in ("regkey", "regkeyalt"): | |
115 | k = _toolstr(ui, tool, kn) |
|
116 | k = _toolstr(ui, tool, kn) | |
116 | if not k: |
|
117 | if not k: | |
117 | continue |
|
118 | continue | |
118 | p = util.lookupreg(k, _toolstr(ui, tool, "regname")) |
|
119 | p = util.lookupreg(k, _toolstr(ui, tool, "regname")) | |
119 | if p: |
|
120 | if p: | |
120 | p = util.findexe(p + _toolstr(ui, tool, "regappend")) |
|
121 | p = util.findexe(p + _toolstr(ui, tool, "regappend")) | |
121 | if p: |
|
122 | if p: | |
122 | return p |
|
123 | return p | |
123 | exe = _toolstr(ui, tool, "executable", tool) |
|
124 | exe = _toolstr(ui, tool, "executable", tool) | |
124 | return util.findexe(util.expandpath(exe)) |
|
125 | return util.findexe(util.expandpath(exe)) | |
125 |
|
126 | |||
126 | def _picktool(repo, ui, path, binary, symlink, changedelete): |
|
127 | def _picktool(repo, ui, path, binary, symlink, changedelete): | |
127 | def supportscd(tool): |
|
128 | def supportscd(tool): | |
128 | return tool in internals and internals[tool].mergetype == nomerge |
|
129 | return tool in internals and internals[tool].mergetype == nomerge | |
129 |
|
130 | |||
130 | def check(tool, pat, symlink, binary, changedelete): |
|
131 | def check(tool, pat, symlink, binary, changedelete): | |
131 | tmsg = tool |
|
132 | tmsg = tool | |
132 | if pat: |
|
133 | if pat: | |
133 | tmsg += " specified for " + pat |
|
134 | tmsg += " specified for " + pat | |
134 | if not _findtool(ui, tool): |
|
135 | if not _findtool(ui, tool): | |
135 | if pat: # explicitly requested tool deserves a warning |
|
136 | if pat: # explicitly requested tool deserves a warning | |
136 | ui.warn(_("couldn't find merge tool %s\n") % tmsg) |
|
137 | ui.warn(_("couldn't find merge tool %s\n") % tmsg) | |
137 | else: # configured but non-existing tools are more silent |
|
138 | else: # configured but non-existing tools are more silent | |
138 | ui.note(_("couldn't find merge tool %s\n") % tmsg) |
|
139 | ui.note(_("couldn't find merge tool %s\n") % tmsg) | |
139 | elif symlink and not _toolbool(ui, tool, "symlink"): |
|
140 | elif symlink and not _toolbool(ui, tool, "symlink"): | |
140 | ui.warn(_("tool %s can't handle symlinks\n") % tmsg) |
|
141 | ui.warn(_("tool %s can't handle symlinks\n") % tmsg) | |
141 | elif binary and not _toolbool(ui, tool, "binary"): |
|
142 | elif binary and not _toolbool(ui, tool, "binary"): | |
142 | ui.warn(_("tool %s can't handle binary\n") % tmsg) |
|
143 | ui.warn(_("tool %s can't handle binary\n") % tmsg) | |
143 | elif changedelete and not supportscd(tool): |
|
144 | elif changedelete and not supportscd(tool): | |
144 | # the nomerge tools are the only tools that support change/delete |
|
145 | # the nomerge tools are the only tools that support change/delete | |
145 | # conflicts |
|
146 | # conflicts | |
146 | pass |
|
147 | pass | |
147 | elif not util.gui() and _toolbool(ui, tool, "gui"): |
|
148 | elif not util.gui() and _toolbool(ui, tool, "gui"): | |
148 | ui.warn(_("tool %s requires a GUI\n") % tmsg) |
|
149 | ui.warn(_("tool %s requires a GUI\n") % tmsg) | |
149 | else: |
|
150 | else: | |
150 | return True |
|
151 | return True | |
151 | return False |
|
152 | return False | |
152 |
|
153 | |||
153 | # internal config: ui.forcemerge |
|
154 | # internal config: ui.forcemerge | |
154 | # forcemerge comes from command line arguments, highest priority |
|
155 | # forcemerge comes from command line arguments, highest priority | |
155 | force = ui.config('ui', 'forcemerge') |
|
156 | force = ui.config('ui', 'forcemerge') | |
156 | if force: |
|
157 | if force: | |
157 | toolpath = _findtool(ui, force) |
|
158 | toolpath = _findtool(ui, force) | |
158 | if changedelete and not supportscd(toolpath): |
|
159 | if changedelete and not supportscd(toolpath): | |
159 | return ":prompt", None |
|
160 | return ":prompt", None | |
160 | else: |
|
161 | else: | |
161 | if toolpath: |
|
162 | if toolpath: | |
162 | return (force, util.shellquote(toolpath)) |
|
163 | return (force, util.shellquote(toolpath)) | |
163 | else: |
|
164 | else: | |
164 | # mimic HGMERGE if given tool not found |
|
165 | # mimic HGMERGE if given tool not found | |
165 | return (force, force) |
|
166 | return (force, force) | |
166 |
|
167 | |||
167 | # HGMERGE takes next precedence |
|
168 | # HGMERGE takes next precedence | |
168 |
hgmerge = |
|
169 | hgmerge = encoding.environ.get("HGMERGE") | |
169 | if hgmerge: |
|
170 | if hgmerge: | |
170 | if changedelete and not supportscd(hgmerge): |
|
171 | if changedelete and not supportscd(hgmerge): | |
171 | return ":prompt", None |
|
172 | return ":prompt", None | |
172 | else: |
|
173 | else: | |
173 | return (hgmerge, hgmerge) |
|
174 | return (hgmerge, hgmerge) | |
174 |
|
175 | |||
175 | # then patterns |
|
176 | # then patterns | |
176 | for pat, tool in ui.configitems("merge-patterns"): |
|
177 | for pat, tool in ui.configitems("merge-patterns"): | |
177 | mf = match.match(repo.root, '', [pat]) |
|
178 | mf = match.match(repo.root, '', [pat]) | |
178 | if mf(path) and check(tool, pat, symlink, False, changedelete): |
|
179 | if mf(path) and check(tool, pat, symlink, False, changedelete): | |
179 | toolpath = _findtool(ui, tool) |
|
180 | toolpath = _findtool(ui, tool) | |
180 | return (tool, util.shellquote(toolpath)) |
|
181 | return (tool, util.shellquote(toolpath)) | |
181 |
|
182 | |||
182 | # then merge tools |
|
183 | # then merge tools | |
183 | tools = {} |
|
184 | tools = {} | |
184 | disabled = set() |
|
185 | disabled = set() | |
185 | for k, v in ui.configitems("merge-tools"): |
|
186 | for k, v in ui.configitems("merge-tools"): | |
186 | t = k.split('.')[0] |
|
187 | t = k.split('.')[0] | |
187 | if t not in tools: |
|
188 | if t not in tools: | |
188 | tools[t] = int(_toolstr(ui, t, "priority", "0")) |
|
189 | tools[t] = int(_toolstr(ui, t, "priority", "0")) | |
189 | if _toolbool(ui, t, "disabled", False): |
|
190 | if _toolbool(ui, t, "disabled", False): | |
190 | disabled.add(t) |
|
191 | disabled.add(t) | |
191 | names = tools.keys() |
|
192 | names = tools.keys() | |
192 | tools = sorted([(-p, tool) for tool, p in tools.items() |
|
193 | tools = sorted([(-p, tool) for tool, p in tools.items() | |
193 | if tool not in disabled]) |
|
194 | if tool not in disabled]) | |
194 | uimerge = ui.config("ui", "merge") |
|
195 | uimerge = ui.config("ui", "merge") | |
195 | if uimerge: |
|
196 | if uimerge: | |
196 | # external tools defined in uimerge won't be able to handle |
|
197 | # external tools defined in uimerge won't be able to handle | |
197 | # change/delete conflicts |
|
198 | # change/delete conflicts | |
198 | if uimerge not in names and not changedelete: |
|
199 | if uimerge not in names and not changedelete: | |
199 | return (uimerge, uimerge) |
|
200 | return (uimerge, uimerge) | |
200 | tools.insert(0, (None, uimerge)) # highest priority |
|
201 | tools.insert(0, (None, uimerge)) # highest priority | |
201 | tools.append((None, "hgmerge")) # the old default, if found |
|
202 | tools.append((None, "hgmerge")) # the old default, if found | |
202 | for p, t in tools: |
|
203 | for p, t in tools: | |
203 | if check(t, None, symlink, binary, changedelete): |
|
204 | if check(t, None, symlink, binary, changedelete): | |
204 | toolpath = _findtool(ui, t) |
|
205 | toolpath = _findtool(ui, t) | |
205 | return (t, util.shellquote(toolpath)) |
|
206 | return (t, util.shellquote(toolpath)) | |
206 |
|
207 | |||
207 | # internal merge or prompt as last resort |
|
208 | # internal merge or prompt as last resort | |
208 | if symlink or binary or changedelete: |
|
209 | if symlink or binary or changedelete: | |
209 | return ":prompt", None |
|
210 | return ":prompt", None | |
210 | return ":merge", None |
|
211 | return ":merge", None | |
211 |
|
212 | |||
212 | def _eoltype(data): |
|
213 | def _eoltype(data): | |
213 | "Guess the EOL type of a file" |
|
214 | "Guess the EOL type of a file" | |
214 | if '\0' in data: # binary |
|
215 | if '\0' in data: # binary | |
215 | return None |
|
216 | return None | |
216 | if '\r\n' in data: # Windows |
|
217 | if '\r\n' in data: # Windows | |
217 | return '\r\n' |
|
218 | return '\r\n' | |
218 | if '\r' in data: # Old Mac |
|
219 | if '\r' in data: # Old Mac | |
219 | return '\r' |
|
220 | return '\r' | |
220 | if '\n' in data: # UNIX |
|
221 | if '\n' in data: # UNIX | |
221 | return '\n' |
|
222 | return '\n' | |
222 | return None # unknown |
|
223 | return None # unknown | |
223 |
|
224 | |||
224 | def _matcheol(file, origfile): |
|
225 | def _matcheol(file, origfile): | |
225 | "Convert EOL markers in a file to match origfile" |
|
226 | "Convert EOL markers in a file to match origfile" | |
226 | tostyle = _eoltype(util.readfile(origfile)) |
|
227 | tostyle = _eoltype(util.readfile(origfile)) | |
227 | if tostyle: |
|
228 | if tostyle: | |
228 | data = util.readfile(file) |
|
229 | data = util.readfile(file) | |
229 | style = _eoltype(data) |
|
230 | style = _eoltype(data) | |
230 | if style: |
|
231 | if style: | |
231 | newdata = data.replace(style, tostyle) |
|
232 | newdata = data.replace(style, tostyle) | |
232 | if newdata != data: |
|
233 | if newdata != data: | |
233 | util.writefile(file, newdata) |
|
234 | util.writefile(file, newdata) | |
234 |
|
235 | |||
235 | @internaltool('prompt', nomerge) |
|
236 | @internaltool('prompt', nomerge) | |
236 | def _iprompt(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None): |
|
237 | def _iprompt(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None): | |
237 | """Asks the user which of the local `p1()` or the other `p2()` version to |
|
238 | """Asks the user which of the local `p1()` or the other `p2()` version to | |
238 | keep as the merged version.""" |
|
239 | keep as the merged version.""" | |
239 | ui = repo.ui |
|
240 | ui = repo.ui | |
240 | fd = fcd.path() |
|
241 | fd = fcd.path() | |
241 |
|
242 | |||
242 | prompts = partextras(labels) |
|
243 | prompts = partextras(labels) | |
243 | prompts['fd'] = fd |
|
244 | prompts['fd'] = fd | |
244 | try: |
|
245 | try: | |
245 | if fco.isabsent(): |
|
246 | if fco.isabsent(): | |
246 | index = ui.promptchoice( |
|
247 | index = ui.promptchoice( | |
247 | _("local%(l)s changed %(fd)s which other%(o)s deleted\n" |
|
248 | _("local%(l)s changed %(fd)s which other%(o)s deleted\n" | |
248 | "use (c)hanged version, (d)elete, or leave (u)nresolved?" |
|
249 | "use (c)hanged version, (d)elete, or leave (u)nresolved?" | |
249 | "$$ &Changed $$ &Delete $$ &Unresolved") % prompts, 2) |
|
250 | "$$ &Changed $$ &Delete $$ &Unresolved") % prompts, 2) | |
250 | choice = ['local', 'other', 'unresolved'][index] |
|
251 | choice = ['local', 'other', 'unresolved'][index] | |
251 | elif fcd.isabsent(): |
|
252 | elif fcd.isabsent(): | |
252 | index = ui.promptchoice( |
|
253 | index = ui.promptchoice( | |
253 | _("other%(o)s changed %(fd)s which local%(l)s deleted\n" |
|
254 | _("other%(o)s changed %(fd)s which local%(l)s deleted\n" | |
254 | "use (c)hanged version, leave (d)eleted, or " |
|
255 | "use (c)hanged version, leave (d)eleted, or " | |
255 | "leave (u)nresolved?" |
|
256 | "leave (u)nresolved?" | |
256 | "$$ &Changed $$ &Deleted $$ &Unresolved") % prompts, 2) |
|
257 | "$$ &Changed $$ &Deleted $$ &Unresolved") % prompts, 2) | |
257 | choice = ['other', 'local', 'unresolved'][index] |
|
258 | choice = ['other', 'local', 'unresolved'][index] | |
258 | else: |
|
259 | else: | |
259 | index = ui.promptchoice( |
|
260 | index = ui.promptchoice( | |
260 | _("no tool found to merge %(fd)s\n" |
|
261 | _("no tool found to merge %(fd)s\n" | |
261 | "keep (l)ocal%(l)s, take (o)ther%(o)s, or leave (u)nresolved?" |
|
262 | "keep (l)ocal%(l)s, take (o)ther%(o)s, or leave (u)nresolved?" | |
262 | "$$ &Local $$ &Other $$ &Unresolved") % prompts, 2) |
|
263 | "$$ &Local $$ &Other $$ &Unresolved") % prompts, 2) | |
263 | choice = ['local', 'other', 'unresolved'][index] |
|
264 | choice = ['local', 'other', 'unresolved'][index] | |
264 |
|
265 | |||
265 | if choice == 'other': |
|
266 | if choice == 'other': | |
266 | return _iother(repo, mynode, orig, fcd, fco, fca, toolconf, |
|
267 | return _iother(repo, mynode, orig, fcd, fco, fca, toolconf, | |
267 | labels) |
|
268 | labels) | |
268 | elif choice == 'local': |
|
269 | elif choice == 'local': | |
269 | return _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf, |
|
270 | return _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf, | |
270 | labels) |
|
271 | labels) | |
271 | elif choice == 'unresolved': |
|
272 | elif choice == 'unresolved': | |
272 | return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf, |
|
273 | return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf, | |
273 | labels) |
|
274 | labels) | |
274 | except error.ResponseExpected: |
|
275 | except error.ResponseExpected: | |
275 | ui.write("\n") |
|
276 | ui.write("\n") | |
276 | return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf, |
|
277 | return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf, | |
277 | labels) |
|
278 | labels) | |
278 |
|
279 | |||
279 | @internaltool('local', nomerge) |
|
280 | @internaltool('local', nomerge) | |
280 | def _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None): |
|
281 | def _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None): | |
281 | """Uses the local `p1()` version of files as the merged version.""" |
|
282 | """Uses the local `p1()` version of files as the merged version.""" | |
282 | return 0, fcd.isabsent() |
|
283 | return 0, fcd.isabsent() | |
283 |
|
284 | |||
284 | @internaltool('other', nomerge) |
|
285 | @internaltool('other', nomerge) | |
285 | def _iother(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None): |
|
286 | def _iother(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None): | |
286 | """Uses the other `p2()` version of files as the merged version.""" |
|
287 | """Uses the other `p2()` version of files as the merged version.""" | |
287 | if fco.isabsent(): |
|
288 | if fco.isabsent(): | |
288 | # local changed, remote deleted -- 'deleted' picked |
|
289 | # local changed, remote deleted -- 'deleted' picked | |
289 | repo.wvfs.unlinkpath(fcd.path()) |
|
290 | repo.wvfs.unlinkpath(fcd.path()) | |
290 | deleted = True |
|
291 | deleted = True | |
291 | else: |
|
292 | else: | |
292 | repo.wwrite(fcd.path(), fco.data(), fco.flags()) |
|
293 | repo.wwrite(fcd.path(), fco.data(), fco.flags()) | |
293 | deleted = False |
|
294 | deleted = False | |
294 | return 0, deleted |
|
295 | return 0, deleted | |
295 |
|
296 | |||
296 | @internaltool('fail', nomerge) |
|
297 | @internaltool('fail', nomerge) | |
297 | def _ifail(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None): |
|
298 | def _ifail(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None): | |
298 | """ |
|
299 | """ | |
299 | Rather than attempting to merge files that were modified on both |
|
300 | Rather than attempting to merge files that were modified on both | |
300 | branches, it marks them as unresolved. The resolve command must be |
|
301 | branches, it marks them as unresolved. The resolve command must be | |
301 | used to resolve these conflicts.""" |
|
302 | used to resolve these conflicts.""" | |
302 | # for change/delete conflicts write out the changed version, then fail |
|
303 | # for change/delete conflicts write out the changed version, then fail | |
303 | if fcd.isabsent(): |
|
304 | if fcd.isabsent(): | |
304 | repo.wwrite(fcd.path(), fco.data(), fco.flags()) |
|
305 | repo.wwrite(fcd.path(), fco.data(), fco.flags()) | |
305 | return 1, False |
|
306 | return 1, False | |
306 |
|
307 | |||
307 | def _premerge(repo, fcd, fco, fca, toolconf, files, labels=None): |
|
308 | def _premerge(repo, fcd, fco, fca, toolconf, files, labels=None): | |
308 | tool, toolpath, binary, symlink = toolconf |
|
309 | tool, toolpath, binary, symlink = toolconf | |
309 | if symlink or fcd.isabsent() or fco.isabsent(): |
|
310 | if symlink or fcd.isabsent() or fco.isabsent(): | |
310 | return 1 |
|
311 | return 1 | |
311 | a, b, c, back = files |
|
312 | a, b, c, back = files | |
312 |
|
313 | |||
313 | ui = repo.ui |
|
314 | ui = repo.ui | |
314 |
|
315 | |||
315 | validkeep = ['keep', 'keep-merge3'] |
|
316 | validkeep = ['keep', 'keep-merge3'] | |
316 |
|
317 | |||
317 | # do we attempt to simplemerge first? |
|
318 | # do we attempt to simplemerge first? | |
318 | try: |
|
319 | try: | |
319 | premerge = _toolbool(ui, tool, "premerge", not binary) |
|
320 | premerge = _toolbool(ui, tool, "premerge", not binary) | |
320 | except error.ConfigError: |
|
321 | except error.ConfigError: | |
321 | premerge = _toolstr(ui, tool, "premerge").lower() |
|
322 | premerge = _toolstr(ui, tool, "premerge").lower() | |
322 | if premerge not in validkeep: |
|
323 | if premerge not in validkeep: | |
323 | _valid = ', '.join(["'" + v + "'" for v in validkeep]) |
|
324 | _valid = ', '.join(["'" + v + "'" for v in validkeep]) | |
324 | raise error.ConfigError(_("%s.premerge not valid " |
|
325 | raise error.ConfigError(_("%s.premerge not valid " | |
325 | "('%s' is neither boolean nor %s)") % |
|
326 | "('%s' is neither boolean nor %s)") % | |
326 | (tool, premerge, _valid)) |
|
327 | (tool, premerge, _valid)) | |
327 |
|
328 | |||
328 | if premerge: |
|
329 | if premerge: | |
329 | if premerge == 'keep-merge3': |
|
330 | if premerge == 'keep-merge3': | |
330 | if not labels: |
|
331 | if not labels: | |
331 | labels = _defaultconflictlabels |
|
332 | labels = _defaultconflictlabels | |
332 | if len(labels) < 3: |
|
333 | if len(labels) < 3: | |
333 | labels.append('base') |
|
334 | labels.append('base') | |
334 | r = simplemerge.simplemerge(ui, a, b, c, quiet=True, label=labels) |
|
335 | r = simplemerge.simplemerge(ui, a, b, c, quiet=True, label=labels) | |
335 | if not r: |
|
336 | if not r: | |
336 | ui.debug(" premerge successful\n") |
|
337 | ui.debug(" premerge successful\n") | |
337 | return 0 |
|
338 | return 0 | |
338 | if premerge not in validkeep: |
|
339 | if premerge not in validkeep: | |
339 | util.copyfile(back, a) # restore from backup and try again |
|
340 | util.copyfile(back, a) # restore from backup and try again | |
340 | return 1 # continue merging |
|
341 | return 1 # continue merging | |
341 |
|
342 | |||
342 | def _mergecheck(repo, mynode, orig, fcd, fco, fca, toolconf): |
|
343 | def _mergecheck(repo, mynode, orig, fcd, fco, fca, toolconf): | |
343 | tool, toolpath, binary, symlink = toolconf |
|
344 | tool, toolpath, binary, symlink = toolconf | |
344 | if symlink: |
|
345 | if symlink: | |
345 | repo.ui.warn(_('warning: internal %s cannot merge symlinks ' |
|
346 | repo.ui.warn(_('warning: internal %s cannot merge symlinks ' | |
346 | 'for %s\n') % (tool, fcd.path())) |
|
347 | 'for %s\n') % (tool, fcd.path())) | |
347 | return False |
|
348 | return False | |
348 | if fcd.isabsent() or fco.isabsent(): |
|
349 | if fcd.isabsent() or fco.isabsent(): | |
349 | repo.ui.warn(_('warning: internal %s cannot merge change/delete ' |
|
350 | repo.ui.warn(_('warning: internal %s cannot merge change/delete ' | |
350 | 'conflict for %s\n') % (tool, fcd.path())) |
|
351 | 'conflict for %s\n') % (tool, fcd.path())) | |
351 | return False |
|
352 | return False | |
352 | return True |
|
353 | return True | |
353 |
|
354 | |||
354 | def _merge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels, mode): |
|
355 | def _merge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels, mode): | |
355 | """ |
|
356 | """ | |
356 | Uses the internal non-interactive simple merge algorithm for merging |
|
357 | Uses the internal non-interactive simple merge algorithm for merging | |
357 | files. It will fail if there are any conflicts and leave markers in |
|
358 | files. It will fail if there are any conflicts and leave markers in | |
358 | the partially merged file. Markers will have two sections, one for each side |
|
359 | the partially merged file. Markers will have two sections, one for each side | |
359 | of merge, unless mode equals 'union' which suppresses the markers.""" |
|
360 | of merge, unless mode equals 'union' which suppresses the markers.""" | |
360 | a, b, c, back = files |
|
361 | a, b, c, back = files | |
361 |
|
362 | |||
362 | ui = repo.ui |
|
363 | ui = repo.ui | |
363 |
|
364 | |||
364 | r = simplemerge.simplemerge(ui, a, b, c, label=labels, mode=mode) |
|
365 | r = simplemerge.simplemerge(ui, a, b, c, label=labels, mode=mode) | |
365 | return True, r, False |
|
366 | return True, r, False | |
366 |
|
367 | |||
367 | @internaltool('union', fullmerge, |
|
368 | @internaltool('union', fullmerge, | |
368 | _("warning: conflicts while merging %s! " |
|
369 | _("warning: conflicts while merging %s! " | |
369 | "(edit, then use 'hg resolve --mark')\n"), |
|
370 | "(edit, then use 'hg resolve --mark')\n"), | |
370 | precheck=_mergecheck) |
|
371 | precheck=_mergecheck) | |
371 | def _iunion(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): |
|
372 | def _iunion(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): | |
372 | """ |
|
373 | """ | |
373 | Uses the internal non-interactive simple merge algorithm for merging |
|
374 | Uses the internal non-interactive simple merge algorithm for merging | |
374 | files. It will use both left and right sides for conflict regions. |
|
375 | files. It will use both left and right sides for conflict regions. | |
375 | No markers are inserted.""" |
|
376 | No markers are inserted.""" | |
376 | return _merge(repo, mynode, orig, fcd, fco, fca, toolconf, |
|
377 | return _merge(repo, mynode, orig, fcd, fco, fca, toolconf, | |
377 | files, labels, 'union') |
|
378 | files, labels, 'union') | |
378 |
|
379 | |||
379 | @internaltool('merge', fullmerge, |
|
380 | @internaltool('merge', fullmerge, | |
380 | _("warning: conflicts while merging %s! " |
|
381 | _("warning: conflicts while merging %s! " | |
381 | "(edit, then use 'hg resolve --mark')\n"), |
|
382 | "(edit, then use 'hg resolve --mark')\n"), | |
382 | precheck=_mergecheck) |
|
383 | precheck=_mergecheck) | |
383 | def _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): |
|
384 | def _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): | |
384 | """ |
|
385 | """ | |
385 | Uses the internal non-interactive simple merge algorithm for merging |
|
386 | Uses the internal non-interactive simple merge algorithm for merging | |
386 | files. It will fail if there are any conflicts and leave markers in |
|
387 | files. It will fail if there are any conflicts and leave markers in | |
387 | the partially merged file. Markers will have two sections, one for each side |
|
388 | the partially merged file. Markers will have two sections, one for each side | |
388 | of merge.""" |
|
389 | of merge.""" | |
389 | return _merge(repo, mynode, orig, fcd, fco, fca, toolconf, |
|
390 | return _merge(repo, mynode, orig, fcd, fco, fca, toolconf, | |
390 | files, labels, 'merge') |
|
391 | files, labels, 'merge') | |
391 |
|
392 | |||
392 | @internaltool('merge3', fullmerge, |
|
393 | @internaltool('merge3', fullmerge, | |
393 | _("warning: conflicts while merging %s! " |
|
394 | _("warning: conflicts while merging %s! " | |
394 | "(edit, then use 'hg resolve --mark')\n"), |
|
395 | "(edit, then use 'hg resolve --mark')\n"), | |
395 | precheck=_mergecheck) |
|
396 | precheck=_mergecheck) | |
396 | def _imerge3(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): |
|
397 | def _imerge3(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): | |
397 | """ |
|
398 | """ | |
398 | Uses the internal non-interactive simple merge algorithm for merging |
|
399 | Uses the internal non-interactive simple merge algorithm for merging | |
399 | files. It will fail if there are any conflicts and leave markers in |
|
400 | files. It will fail if there are any conflicts and leave markers in | |
400 | the partially merged file. Marker will have three sections, one from each |
|
401 | the partially merged file. Marker will have three sections, one from each | |
401 | side of the merge and one for the base content.""" |
|
402 | side of the merge and one for the base content.""" | |
402 | if not labels: |
|
403 | if not labels: | |
403 | labels = _defaultconflictlabels |
|
404 | labels = _defaultconflictlabels | |
404 | if len(labels) < 3: |
|
405 | if len(labels) < 3: | |
405 | labels.append('base') |
|
406 | labels.append('base') | |
406 | return _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels) |
|
407 | return _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels) | |
407 |
|
408 | |||
408 | def _imergeauto(repo, mynode, orig, fcd, fco, fca, toolconf, files, |
|
409 | def _imergeauto(repo, mynode, orig, fcd, fco, fca, toolconf, files, | |
409 | labels=None, localorother=None): |
|
410 | labels=None, localorother=None): | |
410 | """ |
|
411 | """ | |
411 | Generic driver for _imergelocal and _imergeother |
|
412 | Generic driver for _imergelocal and _imergeother | |
412 | """ |
|
413 | """ | |
413 | assert localorother is not None |
|
414 | assert localorother is not None | |
414 | tool, toolpath, binary, symlink = toolconf |
|
415 | tool, toolpath, binary, symlink = toolconf | |
415 | a, b, c, back = files |
|
416 | a, b, c, back = files | |
416 | r = simplemerge.simplemerge(repo.ui, a, b, c, label=labels, |
|
417 | r = simplemerge.simplemerge(repo.ui, a, b, c, label=labels, | |
417 | localorother=localorother) |
|
418 | localorother=localorother) | |
418 | return True, r |
|
419 | return True, r | |
419 |
|
420 | |||
420 | @internaltool('merge-local', mergeonly, precheck=_mergecheck) |
|
421 | @internaltool('merge-local', mergeonly, precheck=_mergecheck) | |
421 | def _imergelocal(*args, **kwargs): |
|
422 | def _imergelocal(*args, **kwargs): | |
422 | """ |
|
423 | """ | |
423 | Like :merge, but resolve all conflicts non-interactively in favor |
|
424 | Like :merge, but resolve all conflicts non-interactively in favor | |
424 | of the local `p1()` changes.""" |
|
425 | of the local `p1()` changes.""" | |
425 | success, status = _imergeauto(localorother='local', *args, **kwargs) |
|
426 | success, status = _imergeauto(localorother='local', *args, **kwargs) | |
426 | return success, status, False |
|
427 | return success, status, False | |
427 |
|
428 | |||
428 | @internaltool('merge-other', mergeonly, precheck=_mergecheck) |
|
429 | @internaltool('merge-other', mergeonly, precheck=_mergecheck) | |
429 | def _imergeother(*args, **kwargs): |
|
430 | def _imergeother(*args, **kwargs): | |
430 | """ |
|
431 | """ | |
431 | Like :merge, but resolve all conflicts non-interactively in favor |
|
432 | Like :merge, but resolve all conflicts non-interactively in favor | |
432 | of the other `p2()` changes.""" |
|
433 | of the other `p2()` changes.""" | |
433 | success, status = _imergeauto(localorother='other', *args, **kwargs) |
|
434 | success, status = _imergeauto(localorother='other', *args, **kwargs) | |
434 | return success, status, False |
|
435 | return success, status, False | |
435 |
|
436 | |||
436 | @internaltool('tagmerge', mergeonly, |
|
437 | @internaltool('tagmerge', mergeonly, | |
437 | _("automatic tag merging of %s failed! " |
|
438 | _("automatic tag merging of %s failed! " | |
438 | "(use 'hg resolve --tool :merge' or another merge " |
|
439 | "(use 'hg resolve --tool :merge' or another merge " | |
439 | "tool of your choice)\n")) |
|
440 | "tool of your choice)\n")) | |
440 | def _itagmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): |
|
441 | def _itagmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): | |
441 | """ |
|
442 | """ | |
442 | Uses the internal tag merge algorithm (experimental). |
|
443 | Uses the internal tag merge algorithm (experimental). | |
443 | """ |
|
444 | """ | |
444 | success, status = tagmerge.merge(repo, fcd, fco, fca) |
|
445 | success, status = tagmerge.merge(repo, fcd, fco, fca) | |
445 | return success, status, False |
|
446 | return success, status, False | |
446 |
|
447 | |||
447 | @internaltool('dump', fullmerge) |
|
448 | @internaltool('dump', fullmerge) | |
448 | def _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): |
|
449 | def _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): | |
449 | """ |
|
450 | """ | |
450 | Creates three versions of the files to merge, containing the |
|
451 | Creates three versions of the files to merge, containing the | |
451 | contents of local, other and base. These files can then be used to |
|
452 | contents of local, other and base. These files can then be used to | |
452 | perform a merge manually. If the file to be merged is named |
|
453 | perform a merge manually. If the file to be merged is named | |
453 | ``a.txt``, these files will accordingly be named ``a.txt.local``, |
|
454 | ``a.txt``, these files will accordingly be named ``a.txt.local``, | |
454 | ``a.txt.other`` and ``a.txt.base`` and they will be placed in the |
|
455 | ``a.txt.other`` and ``a.txt.base`` and they will be placed in the | |
455 | same directory as ``a.txt``.""" |
|
456 | same directory as ``a.txt``.""" | |
456 | a, b, c, back = files |
|
457 | a, b, c, back = files | |
457 |
|
458 | |||
458 | fd = fcd.path() |
|
459 | fd = fcd.path() | |
459 |
|
460 | |||
460 | util.copyfile(a, a + ".local") |
|
461 | util.copyfile(a, a + ".local") | |
461 | repo.wwrite(fd + ".other", fco.data(), fco.flags()) |
|
462 | repo.wwrite(fd + ".other", fco.data(), fco.flags()) | |
462 | repo.wwrite(fd + ".base", fca.data(), fca.flags()) |
|
463 | repo.wwrite(fd + ".base", fca.data(), fca.flags()) | |
463 | return False, 1, False |
|
464 | return False, 1, False | |
464 |
|
465 | |||
465 | def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): |
|
466 | def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): | |
466 | tool, toolpath, binary, symlink = toolconf |
|
467 | tool, toolpath, binary, symlink = toolconf | |
467 | if fcd.isabsent() or fco.isabsent(): |
|
468 | if fcd.isabsent() or fco.isabsent(): | |
468 | repo.ui.warn(_('warning: %s cannot merge change/delete conflict ' |
|
469 | repo.ui.warn(_('warning: %s cannot merge change/delete conflict ' | |
469 | 'for %s\n') % (tool, fcd.path())) |
|
470 | 'for %s\n') % (tool, fcd.path())) | |
470 | return False, 1, None |
|
471 | return False, 1, None | |
471 | a, b, c, back = files |
|
472 | a, b, c, back = files | |
472 | out = "" |
|
473 | out = "" | |
473 | env = {'HG_FILE': fcd.path(), |
|
474 | env = {'HG_FILE': fcd.path(), | |
474 | 'HG_MY_NODE': short(mynode), |
|
475 | 'HG_MY_NODE': short(mynode), | |
475 | 'HG_OTHER_NODE': str(fco.changectx()), |
|
476 | 'HG_OTHER_NODE': str(fco.changectx()), | |
476 | 'HG_BASE_NODE': str(fca.changectx()), |
|
477 | 'HG_BASE_NODE': str(fca.changectx()), | |
477 | 'HG_MY_ISLINK': 'l' in fcd.flags(), |
|
478 | 'HG_MY_ISLINK': 'l' in fcd.flags(), | |
478 | 'HG_OTHER_ISLINK': 'l' in fco.flags(), |
|
479 | 'HG_OTHER_ISLINK': 'l' in fco.flags(), | |
479 | 'HG_BASE_ISLINK': 'l' in fca.flags(), |
|
480 | 'HG_BASE_ISLINK': 'l' in fca.flags(), | |
480 | } |
|
481 | } | |
481 |
|
482 | |||
482 | ui = repo.ui |
|
483 | ui = repo.ui | |
483 |
|
484 | |||
484 | args = _toolstr(ui, tool, "args", '$local $base $other') |
|
485 | args = _toolstr(ui, tool, "args", '$local $base $other') | |
485 | if "$output" in args: |
|
486 | if "$output" in args: | |
486 | out, a = a, back # read input from backup, write to original |
|
487 | out, a = a, back # read input from backup, write to original | |
487 | replace = {'local': a, 'base': b, 'other': c, 'output': out} |
|
488 | replace = {'local': a, 'base': b, 'other': c, 'output': out} | |
488 | args = util.interpolate(r'\$', replace, args, |
|
489 | args = util.interpolate(r'\$', replace, args, | |
489 | lambda s: util.shellquote(util.localpath(s))) |
|
490 | lambda s: util.shellquote(util.localpath(s))) | |
490 | cmd = toolpath + ' ' + args |
|
491 | cmd = toolpath + ' ' + args | |
491 | repo.ui.debug('launching merge tool: %s\n' % cmd) |
|
492 | repo.ui.debug('launching merge tool: %s\n' % cmd) | |
492 | r = ui.system(cmd, cwd=repo.root, environ=env) |
|
493 | r = ui.system(cmd, cwd=repo.root, environ=env) | |
493 | repo.ui.debug('merge tool returned: %s\n' % r) |
|
494 | repo.ui.debug('merge tool returned: %s\n' % r) | |
494 | return True, r, False |
|
495 | return True, r, False | |
495 |
|
496 | |||
496 | def _formatconflictmarker(repo, ctx, template, label, pad): |
|
497 | def _formatconflictmarker(repo, ctx, template, label, pad): | |
497 | """Applies the given template to the ctx, prefixed by the label. |
|
498 | """Applies the given template to the ctx, prefixed by the label. | |
498 |
|
499 | |||
499 | Pad is the minimum width of the label prefix, so that multiple markers |
|
500 | Pad is the minimum width of the label prefix, so that multiple markers | |
500 | can have aligned templated parts. |
|
501 | can have aligned templated parts. | |
501 | """ |
|
502 | """ | |
502 | if ctx.node() is None: |
|
503 | if ctx.node() is None: | |
503 | ctx = ctx.p1() |
|
504 | ctx = ctx.p1() | |
504 |
|
505 | |||
505 | props = templatekw.keywords.copy() |
|
506 | props = templatekw.keywords.copy() | |
506 | props['templ'] = template |
|
507 | props['templ'] = template | |
507 | props['ctx'] = ctx |
|
508 | props['ctx'] = ctx | |
508 | props['repo'] = repo |
|
509 | props['repo'] = repo | |
509 | templateresult = template('conflictmarker', **props) |
|
510 | templateresult = template('conflictmarker', **props) | |
510 |
|
511 | |||
511 | label = ('%s:' % label).ljust(pad + 1) |
|
512 | label = ('%s:' % label).ljust(pad + 1) | |
512 | mark = '%s %s' % (label, templater.stringify(templateresult)) |
|
513 | mark = '%s %s' % (label, templater.stringify(templateresult)) | |
513 |
|
514 | |||
514 | if mark: |
|
515 | if mark: | |
515 | mark = mark.splitlines()[0] # split for safety |
|
516 | mark = mark.splitlines()[0] # split for safety | |
516 |
|
517 | |||
517 | # 8 for the prefix of conflict marker lines (e.g. '<<<<<<< ') |
|
518 | # 8 for the prefix of conflict marker lines (e.g. '<<<<<<< ') | |
518 | return util.ellipsis(mark, 80 - 8) |
|
519 | return util.ellipsis(mark, 80 - 8) | |
519 |
|
520 | |||
520 | _defaultconflictmarker = ('{node|short} ' |
|
521 | _defaultconflictmarker = ('{node|short} ' | |
521 | '{ifeq(tags, "tip", "", ' |
|
522 | '{ifeq(tags, "tip", "", ' | |
522 | 'ifeq(tags, "", "", "{tags} "))}' |
|
523 | 'ifeq(tags, "", "", "{tags} "))}' | |
523 | '{if(bookmarks, "{bookmarks} ")}' |
|
524 | '{if(bookmarks, "{bookmarks} ")}' | |
524 | '{ifeq(branch, "default", "", "{branch} ")}' |
|
525 | '{ifeq(branch, "default", "", "{branch} ")}' | |
525 | '- {author|user}: {desc|firstline}') |
|
526 | '- {author|user}: {desc|firstline}') | |
526 |
|
527 | |||
527 | _defaultconflictlabels = ['local', 'other'] |
|
528 | _defaultconflictlabels = ['local', 'other'] | |
528 |
|
529 | |||
529 | def _formatlabels(repo, fcd, fco, fca, labels): |
|
530 | def _formatlabels(repo, fcd, fco, fca, labels): | |
530 | """Formats the given labels using the conflict marker template. |
|
531 | """Formats the given labels using the conflict marker template. | |
531 |
|
532 | |||
532 | Returns a list of formatted labels. |
|
533 | Returns a list of formatted labels. | |
533 | """ |
|
534 | """ | |
534 | cd = fcd.changectx() |
|
535 | cd = fcd.changectx() | |
535 | co = fco.changectx() |
|
536 | co = fco.changectx() | |
536 | ca = fca.changectx() |
|
537 | ca = fca.changectx() | |
537 |
|
538 | |||
538 | ui = repo.ui |
|
539 | ui = repo.ui | |
539 | template = ui.config('ui', 'mergemarkertemplate', _defaultconflictmarker) |
|
540 | template = ui.config('ui', 'mergemarkertemplate', _defaultconflictmarker) | |
540 | tmpl = formatter.maketemplater(ui, 'conflictmarker', template) |
|
541 | tmpl = formatter.maketemplater(ui, 'conflictmarker', template) | |
541 |
|
542 | |||
542 | pad = max(len(l) for l in labels) |
|
543 | pad = max(len(l) for l in labels) | |
543 |
|
544 | |||
544 | newlabels = [_formatconflictmarker(repo, cd, tmpl, labels[0], pad), |
|
545 | newlabels = [_formatconflictmarker(repo, cd, tmpl, labels[0], pad), | |
545 | _formatconflictmarker(repo, co, tmpl, labels[1], pad)] |
|
546 | _formatconflictmarker(repo, co, tmpl, labels[1], pad)] | |
546 | if len(labels) > 2: |
|
547 | if len(labels) > 2: | |
547 | newlabels.append(_formatconflictmarker(repo, ca, tmpl, labels[2], pad)) |
|
548 | newlabels.append(_formatconflictmarker(repo, ca, tmpl, labels[2], pad)) | |
548 | return newlabels |
|
549 | return newlabels | |
549 |
|
550 | |||
550 | def partextras(labels): |
|
551 | def partextras(labels): | |
551 | """Return a dictionary of extra labels for use in prompts to the user |
|
552 | """Return a dictionary of extra labels for use in prompts to the user | |
552 |
|
553 | |||
553 | Intended use is in strings of the form "(l)ocal%(l)s". |
|
554 | Intended use is in strings of the form "(l)ocal%(l)s". | |
554 | """ |
|
555 | """ | |
555 | if labels is None: |
|
556 | if labels is None: | |
556 | return { |
|
557 | return { | |
557 | "l": "", |
|
558 | "l": "", | |
558 | "o": "", |
|
559 | "o": "", | |
559 | } |
|
560 | } | |
560 |
|
561 | |||
561 | return { |
|
562 | return { | |
562 | "l": " [%s]" % labels[0], |
|
563 | "l": " [%s]" % labels[0], | |
563 | "o": " [%s]" % labels[1], |
|
564 | "o": " [%s]" % labels[1], | |
564 | } |
|
565 | } | |
565 |
|
566 | |||
566 | def _filemerge(premerge, repo, mynode, orig, fcd, fco, fca, labels=None): |
|
567 | def _filemerge(premerge, repo, mynode, orig, fcd, fco, fca, labels=None): | |
567 | """perform a 3-way merge in the working directory |
|
568 | """perform a 3-way merge in the working directory | |
568 |
|
569 | |||
569 | premerge = whether this is a premerge |
|
570 | premerge = whether this is a premerge | |
570 | mynode = parent node before merge |
|
571 | mynode = parent node before merge | |
571 | orig = original local filename before merge |
|
572 | orig = original local filename before merge | |
572 | fco = other file context |
|
573 | fco = other file context | |
573 | fca = ancestor file context |
|
574 | fca = ancestor file context | |
574 | fcd = local file context for current/destination file |
|
575 | fcd = local file context for current/destination file | |
575 |
|
576 | |||
576 | Returns whether the merge is complete, the return value of the merge, and |
|
577 | Returns whether the merge is complete, the return value of the merge, and | |
577 | a boolean indicating whether the file was deleted from disk.""" |
|
578 | a boolean indicating whether the file was deleted from disk.""" | |
578 |
|
579 | |||
579 | def temp(prefix, ctx): |
|
580 | def temp(prefix, ctx): | |
580 | fullbase, ext = os.path.splitext(ctx.path()) |
|
581 | fullbase, ext = os.path.splitext(ctx.path()) | |
581 | pre = "%s~%s." % (os.path.basename(fullbase), prefix) |
|
582 | pre = "%s~%s." % (os.path.basename(fullbase), prefix) | |
582 | (fd, name) = tempfile.mkstemp(prefix=pre, suffix=ext) |
|
583 | (fd, name) = tempfile.mkstemp(prefix=pre, suffix=ext) | |
583 | data = repo.wwritedata(ctx.path(), ctx.data()) |
|
584 | data = repo.wwritedata(ctx.path(), ctx.data()) | |
584 | f = os.fdopen(fd, "wb") |
|
585 | f = os.fdopen(fd, "wb") | |
585 | f.write(data) |
|
586 | f.write(data) | |
586 | f.close() |
|
587 | f.close() | |
587 | return name |
|
588 | return name | |
588 |
|
589 | |||
589 | if not fco.cmp(fcd): # files identical? |
|
590 | if not fco.cmp(fcd): # files identical? | |
590 | return True, None, False |
|
591 | return True, None, False | |
591 |
|
592 | |||
592 | ui = repo.ui |
|
593 | ui = repo.ui | |
593 | fd = fcd.path() |
|
594 | fd = fcd.path() | |
594 | binary = fcd.isbinary() or fco.isbinary() or fca.isbinary() |
|
595 | binary = fcd.isbinary() or fco.isbinary() or fca.isbinary() | |
595 | symlink = 'l' in fcd.flags() + fco.flags() |
|
596 | symlink = 'l' in fcd.flags() + fco.flags() | |
596 | changedelete = fcd.isabsent() or fco.isabsent() |
|
597 | changedelete = fcd.isabsent() or fco.isabsent() | |
597 | tool, toolpath = _picktool(repo, ui, fd, binary, symlink, changedelete) |
|
598 | tool, toolpath = _picktool(repo, ui, fd, binary, symlink, changedelete) | |
598 | if tool in internals and tool.startswith('internal:'): |
|
599 | if tool in internals and tool.startswith('internal:'): | |
599 | # normalize to new-style names (':merge' etc) |
|
600 | # normalize to new-style names (':merge' etc) | |
600 | tool = tool[len('internal'):] |
|
601 | tool = tool[len('internal'):] | |
601 | ui.debug("picked tool '%s' for %s (binary %s symlink %s changedelete %s)\n" |
|
602 | ui.debug("picked tool '%s' for %s (binary %s symlink %s changedelete %s)\n" | |
602 | % (tool, fd, binary, symlink, changedelete)) |
|
603 | % (tool, fd, binary, symlink, changedelete)) | |
603 |
|
604 | |||
604 | if tool in internals: |
|
605 | if tool in internals: | |
605 | func = internals[tool] |
|
606 | func = internals[tool] | |
606 | mergetype = func.mergetype |
|
607 | mergetype = func.mergetype | |
607 | onfailure = func.onfailure |
|
608 | onfailure = func.onfailure | |
608 | precheck = func.precheck |
|
609 | precheck = func.precheck | |
609 | else: |
|
610 | else: | |
610 | func = _xmerge |
|
611 | func = _xmerge | |
611 | mergetype = fullmerge |
|
612 | mergetype = fullmerge | |
612 | onfailure = _("merging %s failed!\n") |
|
613 | onfailure = _("merging %s failed!\n") | |
613 | precheck = None |
|
614 | precheck = None | |
614 |
|
615 | |||
615 | toolconf = tool, toolpath, binary, symlink |
|
616 | toolconf = tool, toolpath, binary, symlink | |
616 |
|
617 | |||
617 | if mergetype == nomerge: |
|
618 | if mergetype == nomerge: | |
618 | r, deleted = func(repo, mynode, orig, fcd, fco, fca, toolconf, labels) |
|
619 | r, deleted = func(repo, mynode, orig, fcd, fco, fca, toolconf, labels) | |
619 | return True, r, deleted |
|
620 | return True, r, deleted | |
620 |
|
621 | |||
621 | if premerge: |
|
622 | if premerge: | |
622 | if orig != fco.path(): |
|
623 | if orig != fco.path(): | |
623 | ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd)) |
|
624 | ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd)) | |
624 | else: |
|
625 | else: | |
625 | ui.status(_("merging %s\n") % fd) |
|
626 | ui.status(_("merging %s\n") % fd) | |
626 |
|
627 | |||
627 | ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca)) |
|
628 | ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca)) | |
628 |
|
629 | |||
629 | if precheck and not precheck(repo, mynode, orig, fcd, fco, fca, |
|
630 | if precheck and not precheck(repo, mynode, orig, fcd, fco, fca, | |
630 | toolconf): |
|
631 | toolconf): | |
631 | if onfailure: |
|
632 | if onfailure: | |
632 | ui.warn(onfailure % fd) |
|
633 | ui.warn(onfailure % fd) | |
633 | return True, 1, False |
|
634 | return True, 1, False | |
634 |
|
635 | |||
635 | a = repo.wjoin(fd) |
|
636 | a = repo.wjoin(fd) | |
636 | b = temp("base", fca) |
|
637 | b = temp("base", fca) | |
637 | c = temp("other", fco) |
|
638 | c = temp("other", fco) | |
638 | if not fcd.isabsent(): |
|
639 | if not fcd.isabsent(): | |
639 | back = scmutil.origpath(ui, repo, a) |
|
640 | back = scmutil.origpath(ui, repo, a) | |
640 | if premerge: |
|
641 | if premerge: | |
641 | util.copyfile(a, back) |
|
642 | util.copyfile(a, back) | |
642 | else: |
|
643 | else: | |
643 | back = None |
|
644 | back = None | |
644 | files = (a, b, c, back) |
|
645 | files = (a, b, c, back) | |
645 |
|
646 | |||
646 | r = 1 |
|
647 | r = 1 | |
647 | try: |
|
648 | try: | |
648 | markerstyle = ui.config('ui', 'mergemarkers', 'basic') |
|
649 | markerstyle = ui.config('ui', 'mergemarkers', 'basic') | |
649 | if not labels: |
|
650 | if not labels: | |
650 | labels = _defaultconflictlabels |
|
651 | labels = _defaultconflictlabels | |
651 | if markerstyle != 'basic': |
|
652 | if markerstyle != 'basic': | |
652 | labels = _formatlabels(repo, fcd, fco, fca, labels) |
|
653 | labels = _formatlabels(repo, fcd, fco, fca, labels) | |
653 |
|
654 | |||
654 | if premerge and mergetype == fullmerge: |
|
655 | if premerge and mergetype == fullmerge: | |
655 | r = _premerge(repo, fcd, fco, fca, toolconf, files, labels=labels) |
|
656 | r = _premerge(repo, fcd, fco, fca, toolconf, files, labels=labels) | |
656 | # complete if premerge successful (r is 0) |
|
657 | # complete if premerge successful (r is 0) | |
657 | return not r, r, False |
|
658 | return not r, r, False | |
658 |
|
659 | |||
659 | needcheck, r, deleted = func(repo, mynode, orig, fcd, fco, fca, |
|
660 | needcheck, r, deleted = func(repo, mynode, orig, fcd, fco, fca, | |
660 | toolconf, files, labels=labels) |
|
661 | toolconf, files, labels=labels) | |
661 |
|
662 | |||
662 | if needcheck: |
|
663 | if needcheck: | |
663 | r = _check(r, ui, tool, fcd, files) |
|
664 | r = _check(r, ui, tool, fcd, files) | |
664 |
|
665 | |||
665 | if r: |
|
666 | if r: | |
666 | if onfailure: |
|
667 | if onfailure: | |
667 | ui.warn(onfailure % fd) |
|
668 | ui.warn(onfailure % fd) | |
668 |
|
669 | |||
669 | return True, r, deleted |
|
670 | return True, r, deleted | |
670 | finally: |
|
671 | finally: | |
671 | if not r and back is not None: |
|
672 | if not r and back is not None: | |
672 | util.unlink(back) |
|
673 | util.unlink(back) | |
673 | util.unlink(b) |
|
674 | util.unlink(b) | |
674 | util.unlink(c) |
|
675 | util.unlink(c) | |
675 |
|
676 | |||
676 | def _check(r, ui, tool, fcd, files): |
|
677 | def _check(r, ui, tool, fcd, files): | |
677 | fd = fcd.path() |
|
678 | fd = fcd.path() | |
678 | a, b, c, back = files |
|
679 | a, b, c, back = files | |
679 |
|
680 | |||
680 | if not r and (_toolbool(ui, tool, "checkconflicts") or |
|
681 | if not r and (_toolbool(ui, tool, "checkconflicts") or | |
681 | 'conflicts' in _toollist(ui, tool, "check")): |
|
682 | 'conflicts' in _toollist(ui, tool, "check")): | |
682 | if re.search("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data(), |
|
683 | if re.search("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data(), | |
683 | re.MULTILINE): |
|
684 | re.MULTILINE): | |
684 | r = 1 |
|
685 | r = 1 | |
685 |
|
686 | |||
686 | checked = False |
|
687 | checked = False | |
687 | if 'prompt' in _toollist(ui, tool, "check"): |
|
688 | if 'prompt' in _toollist(ui, tool, "check"): | |
688 | checked = True |
|
689 | checked = True | |
689 | if ui.promptchoice(_("was merge of '%s' successful (yn)?" |
|
690 | if ui.promptchoice(_("was merge of '%s' successful (yn)?" | |
690 | "$$ &Yes $$ &No") % fd, 1): |
|
691 | "$$ &Yes $$ &No") % fd, 1): | |
691 | r = 1 |
|
692 | r = 1 | |
692 |
|
693 | |||
693 | if not r and not checked and (_toolbool(ui, tool, "checkchanged") or |
|
694 | if not r and not checked and (_toolbool(ui, tool, "checkchanged") or | |
694 | 'changed' in |
|
695 | 'changed' in | |
695 | _toollist(ui, tool, "check")): |
|
696 | _toollist(ui, tool, "check")): | |
696 | if back is not None and filecmp.cmp(a, back): |
|
697 | if back is not None and filecmp.cmp(a, back): | |
697 | if ui.promptchoice(_(" output file %s appears unchanged\n" |
|
698 | if ui.promptchoice(_(" output file %s appears unchanged\n" | |
698 | "was merge successful (yn)?" |
|
699 | "was merge successful (yn)?" | |
699 | "$$ &Yes $$ &No") % fd, 1): |
|
700 | "$$ &Yes $$ &No") % fd, 1): | |
700 | r = 1 |
|
701 | r = 1 | |
701 |
|
702 | |||
702 | if back is not None and _toolbool(ui, tool, "fixeol"): |
|
703 | if back is not None and _toolbool(ui, tool, "fixeol"): | |
703 | _matcheol(a, back) |
|
704 | _matcheol(a, back) | |
704 |
|
705 | |||
705 | return r |
|
706 | return r | |
706 |
|
707 | |||
707 | def premerge(repo, mynode, orig, fcd, fco, fca, labels=None): |
|
708 | def premerge(repo, mynode, orig, fcd, fco, fca, labels=None): | |
708 | return _filemerge(True, repo, mynode, orig, fcd, fco, fca, labels=labels) |
|
709 | return _filemerge(True, repo, mynode, orig, fcd, fco, fca, labels=labels) | |
709 |
|
710 | |||
710 | def filemerge(repo, mynode, orig, fcd, fco, fca, labels=None): |
|
711 | def filemerge(repo, mynode, orig, fcd, fco, fca, labels=None): | |
711 | return _filemerge(False, repo, mynode, orig, fcd, fco, fca, labels=labels) |
|
712 | return _filemerge(False, repo, mynode, orig, fcd, fco, fca, labels=labels) | |
712 |
|
713 | |||
713 | # tell hggettext to extract docstrings from these functions: |
|
714 | # tell hggettext to extract docstrings from these functions: | |
714 | i18nfunctions = internals.values() |
|
715 | i18nfunctions = internals.values() |
@@ -1,200 +1,201 | |||||
1 | # hgweb/common.py - Utility functions needed by hgweb_mod and hgwebdir_mod |
|
1 | # hgweb/common.py - Utility functions needed by hgweb_mod and hgwebdir_mod | |
2 | # |
|
2 | # | |
3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> |
|
3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> | |
4 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> |
|
4 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> | |
5 | # |
|
5 | # | |
6 | # This software may be used and distributed according to the terms of the |
|
6 | # This software may be used and distributed according to the terms of the | |
7 | # GNU General Public License version 2 or any later version. |
|
7 | # GNU General Public License version 2 or any later version. | |
8 |
|
8 | |||
9 | from __future__ import absolute_import |
|
9 | from __future__ import absolute_import | |
10 |
|
10 | |||
11 | import errno |
|
11 | import errno | |
12 | import mimetypes |
|
12 | import mimetypes | |
13 | import os |
|
13 | import os | |
14 |
|
14 | |||
15 | from .. import ( |
|
15 | from .. import ( | |
|
16 | encoding, | |||
16 | pycompat, |
|
17 | pycompat, | |
17 | util, |
|
18 | util, | |
18 | ) |
|
19 | ) | |
19 |
|
20 | |||
20 | httpserver = util.httpserver |
|
21 | httpserver = util.httpserver | |
21 |
|
22 | |||
22 | HTTP_OK = 200 |
|
23 | HTTP_OK = 200 | |
23 | HTTP_NOT_MODIFIED = 304 |
|
24 | HTTP_NOT_MODIFIED = 304 | |
24 | HTTP_BAD_REQUEST = 400 |
|
25 | HTTP_BAD_REQUEST = 400 | |
25 | HTTP_UNAUTHORIZED = 401 |
|
26 | HTTP_UNAUTHORIZED = 401 | |
26 | HTTP_FORBIDDEN = 403 |
|
27 | HTTP_FORBIDDEN = 403 | |
27 | HTTP_NOT_FOUND = 404 |
|
28 | HTTP_NOT_FOUND = 404 | |
28 | HTTP_METHOD_NOT_ALLOWED = 405 |
|
29 | HTTP_METHOD_NOT_ALLOWED = 405 | |
29 | HTTP_SERVER_ERROR = 500 |
|
30 | HTTP_SERVER_ERROR = 500 | |
30 |
|
31 | |||
31 |
|
32 | |||
32 | def ismember(ui, username, userlist): |
|
33 | def ismember(ui, username, userlist): | |
33 | """Check if username is a member of userlist. |
|
34 | """Check if username is a member of userlist. | |
34 |
|
35 | |||
35 | If userlist has a single '*' member, all users are considered members. |
|
36 | If userlist has a single '*' member, all users are considered members. | |
36 | Can be overridden by extensions to provide more complex authorization |
|
37 | Can be overridden by extensions to provide more complex authorization | |
37 | schemes. |
|
38 | schemes. | |
38 | """ |
|
39 | """ | |
39 | return userlist == ['*'] or username in userlist |
|
40 | return userlist == ['*'] or username in userlist | |
40 |
|
41 | |||
41 | def checkauthz(hgweb, req, op): |
|
42 | def checkauthz(hgweb, req, op): | |
42 | '''Check permission for operation based on request data (including |
|
43 | '''Check permission for operation based on request data (including | |
43 | authentication info). Return if op allowed, else raise an ErrorResponse |
|
44 | authentication info). Return if op allowed, else raise an ErrorResponse | |
44 | exception.''' |
|
45 | exception.''' | |
45 |
|
46 | |||
46 | user = req.env.get('REMOTE_USER') |
|
47 | user = req.env.get('REMOTE_USER') | |
47 |
|
48 | |||
48 | deny_read = hgweb.configlist('web', 'deny_read') |
|
49 | deny_read = hgweb.configlist('web', 'deny_read') | |
49 | if deny_read and (not user or ismember(hgweb.repo.ui, user, deny_read)): |
|
50 | if deny_read and (not user or ismember(hgweb.repo.ui, user, deny_read)): | |
50 | raise ErrorResponse(HTTP_UNAUTHORIZED, 'read not authorized') |
|
51 | raise ErrorResponse(HTTP_UNAUTHORIZED, 'read not authorized') | |
51 |
|
52 | |||
52 | allow_read = hgweb.configlist('web', 'allow_read') |
|
53 | allow_read = hgweb.configlist('web', 'allow_read') | |
53 | if allow_read and (not ismember(hgweb.repo.ui, user, allow_read)): |
|
54 | if allow_read and (not ismember(hgweb.repo.ui, user, allow_read)): | |
54 | raise ErrorResponse(HTTP_UNAUTHORIZED, 'read not authorized') |
|
55 | raise ErrorResponse(HTTP_UNAUTHORIZED, 'read not authorized') | |
55 |
|
56 | |||
56 | if op == 'pull' and not hgweb.allowpull: |
|
57 | if op == 'pull' and not hgweb.allowpull: | |
57 | raise ErrorResponse(HTTP_UNAUTHORIZED, 'pull not authorized') |
|
58 | raise ErrorResponse(HTTP_UNAUTHORIZED, 'pull not authorized') | |
58 | elif op == 'pull' or op is None: # op is None for interface requests |
|
59 | elif op == 'pull' or op is None: # op is None for interface requests | |
59 | return |
|
60 | return | |
60 |
|
61 | |||
61 | # enforce that you can only push using POST requests |
|
62 | # enforce that you can only push using POST requests | |
62 | if req.env['REQUEST_METHOD'] != 'POST': |
|
63 | if req.env['REQUEST_METHOD'] != 'POST': | |
63 | msg = 'push requires POST request' |
|
64 | msg = 'push requires POST request' | |
64 | raise ErrorResponse(HTTP_METHOD_NOT_ALLOWED, msg) |
|
65 | raise ErrorResponse(HTTP_METHOD_NOT_ALLOWED, msg) | |
65 |
|
66 | |||
66 | # require ssl by default for pushing, auth info cannot be sniffed |
|
67 | # require ssl by default for pushing, auth info cannot be sniffed | |
67 | # and replayed |
|
68 | # and replayed | |
68 | scheme = req.env.get('wsgi.url_scheme') |
|
69 | scheme = req.env.get('wsgi.url_scheme') | |
69 | if hgweb.configbool('web', 'push_ssl', True) and scheme != 'https': |
|
70 | if hgweb.configbool('web', 'push_ssl', True) and scheme != 'https': | |
70 | raise ErrorResponse(HTTP_FORBIDDEN, 'ssl required') |
|
71 | raise ErrorResponse(HTTP_FORBIDDEN, 'ssl required') | |
71 |
|
72 | |||
72 | deny = hgweb.configlist('web', 'deny_push') |
|
73 | deny = hgweb.configlist('web', 'deny_push') | |
73 | if deny and (not user or ismember(hgweb.repo.ui, user, deny)): |
|
74 | if deny and (not user or ismember(hgweb.repo.ui, user, deny)): | |
74 | raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized') |
|
75 | raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized') | |
75 |
|
76 | |||
76 | allow = hgweb.configlist('web', 'allow_push') |
|
77 | allow = hgweb.configlist('web', 'allow_push') | |
77 | if not (allow and ismember(hgweb.repo.ui, user, allow)): |
|
78 | if not (allow and ismember(hgweb.repo.ui, user, allow)): | |
78 | raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized') |
|
79 | raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized') | |
79 |
|
80 | |||
80 | # Hooks for hgweb permission checks; extensions can add hooks here. |
|
81 | # Hooks for hgweb permission checks; extensions can add hooks here. | |
81 | # Each hook is invoked like this: hook(hgweb, request, operation), |
|
82 | # Each hook is invoked like this: hook(hgweb, request, operation), | |
82 | # where operation is either read, pull or push. Hooks should either |
|
83 | # where operation is either read, pull or push. Hooks should either | |
83 | # raise an ErrorResponse exception, or just return. |
|
84 | # raise an ErrorResponse exception, or just return. | |
84 | # |
|
85 | # | |
85 | # It is possible to do both authentication and authorization through |
|
86 | # It is possible to do both authentication and authorization through | |
86 | # this. |
|
87 | # this. | |
87 | permhooks = [checkauthz] |
|
88 | permhooks = [checkauthz] | |
88 |
|
89 | |||
89 |
|
90 | |||
90 | class ErrorResponse(Exception): |
|
91 | class ErrorResponse(Exception): | |
91 | def __init__(self, code, message=None, headers=[]): |
|
92 | def __init__(self, code, message=None, headers=[]): | |
92 | if message is None: |
|
93 | if message is None: | |
93 | message = _statusmessage(code) |
|
94 | message = _statusmessage(code) | |
94 | Exception.__init__(self, message) |
|
95 | Exception.__init__(self, message) | |
95 | self.code = code |
|
96 | self.code = code | |
96 | self.headers = headers |
|
97 | self.headers = headers | |
97 |
|
98 | |||
98 | class continuereader(object): |
|
99 | class continuereader(object): | |
99 | def __init__(self, f, write): |
|
100 | def __init__(self, f, write): | |
100 | self.f = f |
|
101 | self.f = f | |
101 | self._write = write |
|
102 | self._write = write | |
102 | self.continued = False |
|
103 | self.continued = False | |
103 |
|
104 | |||
104 | def read(self, amt=-1): |
|
105 | def read(self, amt=-1): | |
105 | if not self.continued: |
|
106 | if not self.continued: | |
106 | self.continued = True |
|
107 | self.continued = True | |
107 | self._write('HTTP/1.1 100 Continue\r\n\r\n') |
|
108 | self._write('HTTP/1.1 100 Continue\r\n\r\n') | |
108 | return self.f.read(amt) |
|
109 | return self.f.read(amt) | |
109 |
|
110 | |||
110 | def __getattr__(self, attr): |
|
111 | def __getattr__(self, attr): | |
111 | if attr in ('close', 'readline', 'readlines', '__iter__'): |
|
112 | if attr in ('close', 'readline', 'readlines', '__iter__'): | |
112 | return getattr(self.f, attr) |
|
113 | return getattr(self.f, attr) | |
113 | raise AttributeError |
|
114 | raise AttributeError | |
114 |
|
115 | |||
115 | def _statusmessage(code): |
|
116 | def _statusmessage(code): | |
116 | responses = httpserver.basehttprequesthandler.responses |
|
117 | responses = httpserver.basehttprequesthandler.responses | |
117 | return responses.get(code, ('Error', 'Unknown error'))[0] |
|
118 | return responses.get(code, ('Error', 'Unknown error'))[0] | |
118 |
|
119 | |||
119 | def statusmessage(code, message=None): |
|
120 | def statusmessage(code, message=None): | |
120 | return '%d %s' % (code, message or _statusmessage(code)) |
|
121 | return '%d %s' % (code, message or _statusmessage(code)) | |
121 |
|
122 | |||
122 | def get_stat(spath, fn): |
|
123 | def get_stat(spath, fn): | |
123 | """stat fn if it exists, spath otherwise""" |
|
124 | """stat fn if it exists, spath otherwise""" | |
124 | cl_path = os.path.join(spath, fn) |
|
125 | cl_path = os.path.join(spath, fn) | |
125 | if os.path.exists(cl_path): |
|
126 | if os.path.exists(cl_path): | |
126 | return os.stat(cl_path) |
|
127 | return os.stat(cl_path) | |
127 | else: |
|
128 | else: | |
128 | return os.stat(spath) |
|
129 | return os.stat(spath) | |
129 |
|
130 | |||
130 | def get_mtime(spath): |
|
131 | def get_mtime(spath): | |
131 | return get_stat(spath, "00changelog.i").st_mtime |
|
132 | return get_stat(spath, "00changelog.i").st_mtime | |
132 |
|
133 | |||
133 | def staticfile(directory, fname, req): |
|
134 | def staticfile(directory, fname, req): | |
134 | """return a file inside directory with guessed Content-Type header |
|
135 | """return a file inside directory with guessed Content-Type header | |
135 |
|
136 | |||
136 | fname always uses '/' as directory separator and isn't allowed to |
|
137 | fname always uses '/' as directory separator and isn't allowed to | |
137 | contain unusual path components. |
|
138 | contain unusual path components. | |
138 | Content-Type is guessed using the mimetypes module. |
|
139 | Content-Type is guessed using the mimetypes module. | |
139 | Return an empty string if fname is illegal or file not found. |
|
140 | Return an empty string if fname is illegal or file not found. | |
140 |
|
141 | |||
141 | """ |
|
142 | """ | |
142 | parts = fname.split('/') |
|
143 | parts = fname.split('/') | |
143 | for part in parts: |
|
144 | for part in parts: | |
144 | if (part in ('', os.curdir, os.pardir) or |
|
145 | if (part in ('', os.curdir, os.pardir) or | |
145 | pycompat.ossep in part or |
|
146 | pycompat.ossep in part or | |
146 | pycompat.osaltsep is not None and pycompat.osaltsep in part): |
|
147 | pycompat.osaltsep is not None and pycompat.osaltsep in part): | |
147 | return |
|
148 | return | |
148 | fpath = os.path.join(*parts) |
|
149 | fpath = os.path.join(*parts) | |
149 | if isinstance(directory, str): |
|
150 | if isinstance(directory, str): | |
150 | directory = [directory] |
|
151 | directory = [directory] | |
151 | for d in directory: |
|
152 | for d in directory: | |
152 | path = os.path.join(d, fpath) |
|
153 | path = os.path.join(d, fpath) | |
153 | if os.path.exists(path): |
|
154 | if os.path.exists(path): | |
154 | break |
|
155 | break | |
155 | try: |
|
156 | try: | |
156 | os.stat(path) |
|
157 | os.stat(path) | |
157 | ct = mimetypes.guess_type(path)[0] or "text/plain" |
|
158 | ct = mimetypes.guess_type(path)[0] or "text/plain" | |
158 | fp = open(path, 'rb') |
|
159 | fp = open(path, 'rb') | |
159 | data = fp.read() |
|
160 | data = fp.read() | |
160 | fp.close() |
|
161 | fp.close() | |
161 | req.respond(HTTP_OK, ct, body=data) |
|
162 | req.respond(HTTP_OK, ct, body=data) | |
162 | except TypeError: |
|
163 | except TypeError: | |
163 | raise ErrorResponse(HTTP_SERVER_ERROR, 'illegal filename') |
|
164 | raise ErrorResponse(HTTP_SERVER_ERROR, 'illegal filename') | |
164 | except OSError as err: |
|
165 | except OSError as err: | |
165 | if err.errno == errno.ENOENT: |
|
166 | if err.errno == errno.ENOENT: | |
166 | raise ErrorResponse(HTTP_NOT_FOUND) |
|
167 | raise ErrorResponse(HTTP_NOT_FOUND) | |
167 | else: |
|
168 | else: | |
168 | raise ErrorResponse(HTTP_SERVER_ERROR, err.strerror) |
|
169 | raise ErrorResponse(HTTP_SERVER_ERROR, err.strerror) | |
169 |
|
170 | |||
170 | def paritygen(stripecount, offset=0): |
|
171 | def paritygen(stripecount, offset=0): | |
171 | """count parity of horizontal stripes for easier reading""" |
|
172 | """count parity of horizontal stripes for easier reading""" | |
172 | if stripecount and offset: |
|
173 | if stripecount and offset: | |
173 | # account for offset, e.g. due to building the list in reverse |
|
174 | # account for offset, e.g. due to building the list in reverse | |
174 | count = (stripecount + offset) % stripecount |
|
175 | count = (stripecount + offset) % stripecount | |
175 | parity = (stripecount + offset) / stripecount & 1 |
|
176 | parity = (stripecount + offset) / stripecount & 1 | |
176 | else: |
|
177 | else: | |
177 | count = 0 |
|
178 | count = 0 | |
178 | parity = 0 |
|
179 | parity = 0 | |
179 | while True: |
|
180 | while True: | |
180 | yield parity |
|
181 | yield parity | |
181 | count += 1 |
|
182 | count += 1 | |
182 | if stripecount and count >= stripecount: |
|
183 | if stripecount and count >= stripecount: | |
183 | parity = 1 - parity |
|
184 | parity = 1 - parity | |
184 | count = 0 |
|
185 | count = 0 | |
185 |
|
186 | |||
186 | def get_contact(config): |
|
187 | def get_contact(config): | |
187 | """Return repo contact information or empty string. |
|
188 | """Return repo contact information or empty string. | |
188 |
|
189 | |||
189 | web.contact is the primary source, but if that is not set, try |
|
190 | web.contact is the primary source, but if that is not set, try | |
190 | ui.username or $EMAIL as a fallback to display something useful. |
|
191 | ui.username or $EMAIL as a fallback to display something useful. | |
191 | """ |
|
192 | """ | |
192 | return (config("web", "contact") or |
|
193 | return (config("web", "contact") or | |
193 | config("ui", "username") or |
|
194 | config("ui", "username") or | |
194 |
|
|
195 | encoding.environ.get("EMAIL") or "") | |
195 |
|
196 | |||
196 | def caching(web, req): |
|
197 | def caching(web, req): | |
197 | tag = 'W/"%s"' % web.mtime |
|
198 | tag = 'W/"%s"' % web.mtime | |
198 | if req.env.get('HTTP_IF_NONE_MATCH') == tag: |
|
199 | if req.env.get('HTTP_IF_NONE_MATCH') == tag: | |
199 | raise ErrorResponse(HTTP_NOT_MODIFIED) |
|
200 | raise ErrorResponse(HTTP_NOT_MODIFIED) | |
200 | req.headers.append(('ETag', tag)) |
|
201 | req.headers.append(('ETag', tag)) |
@@ -1,469 +1,470 | |||||
1 | # hgweb/hgweb_mod.py - Web interface for a repository. |
|
1 | # hgweb/hgweb_mod.py - Web interface for a repository. | |
2 | # |
|
2 | # | |
3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> |
|
3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> | |
4 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
4 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> | |
5 | # |
|
5 | # | |
6 | # This software may be used and distributed according to the terms of the |
|
6 | # This software may be used and distributed according to the terms of the | |
7 | # GNU General Public License version 2 or any later version. |
|
7 | # GNU General Public License version 2 or any later version. | |
8 |
|
8 | |||
9 | from __future__ import absolute_import |
|
9 | from __future__ import absolute_import | |
10 |
|
10 | |||
11 | import contextlib |
|
11 | import contextlib | |
12 | import os |
|
12 | import os | |
13 |
|
13 | |||
14 | from .common import ( |
|
14 | from .common import ( | |
15 | ErrorResponse, |
|
15 | ErrorResponse, | |
16 | HTTP_BAD_REQUEST, |
|
16 | HTTP_BAD_REQUEST, | |
17 | HTTP_NOT_FOUND, |
|
17 | HTTP_NOT_FOUND, | |
18 | HTTP_NOT_MODIFIED, |
|
18 | HTTP_NOT_MODIFIED, | |
19 | HTTP_OK, |
|
19 | HTTP_OK, | |
20 | HTTP_SERVER_ERROR, |
|
20 | HTTP_SERVER_ERROR, | |
21 | caching, |
|
21 | caching, | |
22 | permhooks, |
|
22 | permhooks, | |
23 | ) |
|
23 | ) | |
24 | from .request import wsgirequest |
|
24 | from .request import wsgirequest | |
25 |
|
25 | |||
26 | from .. import ( |
|
26 | from .. import ( | |
27 | encoding, |
|
27 | encoding, | |
28 | error, |
|
28 | error, | |
29 | hg, |
|
29 | hg, | |
30 | hook, |
|
30 | hook, | |
31 | profiling, |
|
31 | profiling, | |
32 | repoview, |
|
32 | repoview, | |
33 | templatefilters, |
|
33 | templatefilters, | |
34 | templater, |
|
34 | templater, | |
35 | ui as uimod, |
|
35 | ui as uimod, | |
36 | util, |
|
36 | util, | |
37 | ) |
|
37 | ) | |
38 |
|
38 | |||
39 | from . import ( |
|
39 | from . import ( | |
40 | protocol, |
|
40 | protocol, | |
41 | webcommands, |
|
41 | webcommands, | |
42 | webutil, |
|
42 | webutil, | |
43 | wsgicgi, |
|
43 | wsgicgi, | |
44 | ) |
|
44 | ) | |
45 |
|
45 | |||
46 | perms = { |
|
46 | perms = { | |
47 | 'changegroup': 'pull', |
|
47 | 'changegroup': 'pull', | |
48 | 'changegroupsubset': 'pull', |
|
48 | 'changegroupsubset': 'pull', | |
49 | 'getbundle': 'pull', |
|
49 | 'getbundle': 'pull', | |
50 | 'stream_out': 'pull', |
|
50 | 'stream_out': 'pull', | |
51 | 'listkeys': 'pull', |
|
51 | 'listkeys': 'pull', | |
52 | 'unbundle': 'push', |
|
52 | 'unbundle': 'push', | |
53 | 'pushkey': 'push', |
|
53 | 'pushkey': 'push', | |
54 | } |
|
54 | } | |
55 |
|
55 | |||
56 | def makebreadcrumb(url, prefix=''): |
|
56 | def makebreadcrumb(url, prefix=''): | |
57 | '''Return a 'URL breadcrumb' list |
|
57 | '''Return a 'URL breadcrumb' list | |
58 |
|
58 | |||
59 | A 'URL breadcrumb' is a list of URL-name pairs, |
|
59 | A 'URL breadcrumb' is a list of URL-name pairs, | |
60 | corresponding to each of the path items on a URL. |
|
60 | corresponding to each of the path items on a URL. | |
61 | This can be used to create path navigation entries. |
|
61 | This can be used to create path navigation entries. | |
62 | ''' |
|
62 | ''' | |
63 | if url.endswith('/'): |
|
63 | if url.endswith('/'): | |
64 | url = url[:-1] |
|
64 | url = url[:-1] | |
65 | if prefix: |
|
65 | if prefix: | |
66 | url = '/' + prefix + url |
|
66 | url = '/' + prefix + url | |
67 | relpath = url |
|
67 | relpath = url | |
68 | if relpath.startswith('/'): |
|
68 | if relpath.startswith('/'): | |
69 | relpath = relpath[1:] |
|
69 | relpath = relpath[1:] | |
70 |
|
70 | |||
71 | breadcrumb = [] |
|
71 | breadcrumb = [] | |
72 | urlel = url |
|
72 | urlel = url | |
73 | pathitems = [''] + relpath.split('/') |
|
73 | pathitems = [''] + relpath.split('/') | |
74 | for pathel in reversed(pathitems): |
|
74 | for pathel in reversed(pathitems): | |
75 | if not pathel or not urlel: |
|
75 | if not pathel or not urlel: | |
76 | break |
|
76 | break | |
77 | breadcrumb.append({'url': urlel, 'name': pathel}) |
|
77 | breadcrumb.append({'url': urlel, 'name': pathel}) | |
78 | urlel = os.path.dirname(urlel) |
|
78 | urlel = os.path.dirname(urlel) | |
79 | return reversed(breadcrumb) |
|
79 | return reversed(breadcrumb) | |
80 |
|
80 | |||
81 | class requestcontext(object): |
|
81 | class requestcontext(object): | |
82 | """Holds state/context for an individual request. |
|
82 | """Holds state/context for an individual request. | |
83 |
|
83 | |||
84 | Servers can be multi-threaded. Holding state on the WSGI application |
|
84 | Servers can be multi-threaded. Holding state on the WSGI application | |
85 | is prone to race conditions. Instances of this class exist to hold |
|
85 | is prone to race conditions. Instances of this class exist to hold | |
86 | mutable and race-free state for requests. |
|
86 | mutable and race-free state for requests. | |
87 | """ |
|
87 | """ | |
88 | def __init__(self, app, repo): |
|
88 | def __init__(self, app, repo): | |
89 | self.repo = repo |
|
89 | self.repo = repo | |
90 | self.reponame = app.reponame |
|
90 | self.reponame = app.reponame | |
91 |
|
91 | |||
92 | self.archives = ('zip', 'gz', 'bz2') |
|
92 | self.archives = ('zip', 'gz', 'bz2') | |
93 |
|
93 | |||
94 | self.maxchanges = self.configint('web', 'maxchanges', 10) |
|
94 | self.maxchanges = self.configint('web', 'maxchanges', 10) | |
95 | self.stripecount = self.configint('web', 'stripes', 1) |
|
95 | self.stripecount = self.configint('web', 'stripes', 1) | |
96 | self.maxshortchanges = self.configint('web', 'maxshortchanges', 60) |
|
96 | self.maxshortchanges = self.configint('web', 'maxshortchanges', 60) | |
97 | self.maxfiles = self.configint('web', 'maxfiles', 10) |
|
97 | self.maxfiles = self.configint('web', 'maxfiles', 10) | |
98 | self.allowpull = self.configbool('web', 'allowpull', True) |
|
98 | self.allowpull = self.configbool('web', 'allowpull', True) | |
99 |
|
99 | |||
100 | # we use untrusted=False to prevent a repo owner from using |
|
100 | # we use untrusted=False to prevent a repo owner from using | |
101 | # web.templates in .hg/hgrc to get access to any file readable |
|
101 | # web.templates in .hg/hgrc to get access to any file readable | |
102 | # by the user running the CGI script |
|
102 | # by the user running the CGI script | |
103 | self.templatepath = self.config('web', 'templates', untrusted=False) |
|
103 | self.templatepath = self.config('web', 'templates', untrusted=False) | |
104 |
|
104 | |||
105 | # This object is more expensive to build than simple config values. |
|
105 | # This object is more expensive to build than simple config values. | |
106 | # It is shared across requests. The app will replace the object |
|
106 | # It is shared across requests. The app will replace the object | |
107 | # if it is updated. Since this is a reference and nothing should |
|
107 | # if it is updated. Since this is a reference and nothing should | |
108 | # modify the underlying object, it should be constant for the lifetime |
|
108 | # modify the underlying object, it should be constant for the lifetime | |
109 | # of the request. |
|
109 | # of the request. | |
110 | self.websubtable = app.websubtable |
|
110 | self.websubtable = app.websubtable | |
111 |
|
111 | |||
112 | # Trust the settings from the .hg/hgrc files by default. |
|
112 | # Trust the settings from the .hg/hgrc files by default. | |
113 | def config(self, section, name, default=None, untrusted=True): |
|
113 | def config(self, section, name, default=None, untrusted=True): | |
114 | return self.repo.ui.config(section, name, default, |
|
114 | return self.repo.ui.config(section, name, default, | |
115 | untrusted=untrusted) |
|
115 | untrusted=untrusted) | |
116 |
|
116 | |||
117 | def configbool(self, section, name, default=False, untrusted=True): |
|
117 | def configbool(self, section, name, default=False, untrusted=True): | |
118 | return self.repo.ui.configbool(section, name, default, |
|
118 | return self.repo.ui.configbool(section, name, default, | |
119 | untrusted=untrusted) |
|
119 | untrusted=untrusted) | |
120 |
|
120 | |||
121 | def configint(self, section, name, default=None, untrusted=True): |
|
121 | def configint(self, section, name, default=None, untrusted=True): | |
122 | return self.repo.ui.configint(section, name, default, |
|
122 | return self.repo.ui.configint(section, name, default, | |
123 | untrusted=untrusted) |
|
123 | untrusted=untrusted) | |
124 |
|
124 | |||
125 | def configlist(self, section, name, default=None, untrusted=True): |
|
125 | def configlist(self, section, name, default=None, untrusted=True): | |
126 | return self.repo.ui.configlist(section, name, default, |
|
126 | return self.repo.ui.configlist(section, name, default, | |
127 | untrusted=untrusted) |
|
127 | untrusted=untrusted) | |
128 |
|
128 | |||
129 | archivespecs = { |
|
129 | archivespecs = { | |
130 | 'bz2': ('application/x-bzip2', 'tbz2', '.tar.bz2', None), |
|
130 | 'bz2': ('application/x-bzip2', 'tbz2', '.tar.bz2', None), | |
131 | 'gz': ('application/x-gzip', 'tgz', '.tar.gz', None), |
|
131 | 'gz': ('application/x-gzip', 'tgz', '.tar.gz', None), | |
132 | 'zip': ('application/zip', 'zip', '.zip', None), |
|
132 | 'zip': ('application/zip', 'zip', '.zip', None), | |
133 | } |
|
133 | } | |
134 |
|
134 | |||
135 | def archivelist(self, nodeid): |
|
135 | def archivelist(self, nodeid): | |
136 | allowed = self.configlist('web', 'allow_archive') |
|
136 | allowed = self.configlist('web', 'allow_archive') | |
137 | for typ, spec in self.archivespecs.iteritems(): |
|
137 | for typ, spec in self.archivespecs.iteritems(): | |
138 | if typ in allowed or self.configbool('web', 'allow%s' % typ): |
|
138 | if typ in allowed or self.configbool('web', 'allow%s' % typ): | |
139 | yield {'type': typ, 'extension': spec[2], 'node': nodeid} |
|
139 | yield {'type': typ, 'extension': spec[2], 'node': nodeid} | |
140 |
|
140 | |||
141 | def templater(self, req): |
|
141 | def templater(self, req): | |
142 | # determine scheme, port and server name |
|
142 | # determine scheme, port and server name | |
143 | # this is needed to create absolute urls |
|
143 | # this is needed to create absolute urls | |
144 |
|
144 | |||
145 | proto = req.env.get('wsgi.url_scheme') |
|
145 | proto = req.env.get('wsgi.url_scheme') | |
146 | if proto == 'https': |
|
146 | if proto == 'https': | |
147 | proto = 'https' |
|
147 | proto = 'https' | |
148 | default_port = '443' |
|
148 | default_port = '443' | |
149 | else: |
|
149 | else: | |
150 | proto = 'http' |
|
150 | proto = 'http' | |
151 | default_port = '80' |
|
151 | default_port = '80' | |
152 |
|
152 | |||
153 | port = req.env['SERVER_PORT'] |
|
153 | port = req.env['SERVER_PORT'] | |
154 | port = port != default_port and (':' + port) or '' |
|
154 | port = port != default_port and (':' + port) or '' | |
155 | urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port) |
|
155 | urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port) | |
156 | logourl = self.config('web', 'logourl', 'https://mercurial-scm.org/') |
|
156 | logourl = self.config('web', 'logourl', 'https://mercurial-scm.org/') | |
157 | logoimg = self.config('web', 'logoimg', 'hglogo.png') |
|
157 | logoimg = self.config('web', 'logoimg', 'hglogo.png') | |
158 | staticurl = self.config('web', 'staticurl') or req.url + 'static/' |
|
158 | staticurl = self.config('web', 'staticurl') or req.url + 'static/' | |
159 | if not staticurl.endswith('/'): |
|
159 | if not staticurl.endswith('/'): | |
160 | staticurl += '/' |
|
160 | staticurl += '/' | |
161 |
|
161 | |||
162 | # some functions for the templater |
|
162 | # some functions for the templater | |
163 |
|
163 | |||
164 | def motd(**map): |
|
164 | def motd(**map): | |
165 | yield self.config('web', 'motd', '') |
|
165 | yield self.config('web', 'motd', '') | |
166 |
|
166 | |||
167 | # figure out which style to use |
|
167 | # figure out which style to use | |
168 |
|
168 | |||
169 | vars = {} |
|
169 | vars = {} | |
170 | styles = ( |
|
170 | styles = ( | |
171 | req.form.get('style', [None])[0], |
|
171 | req.form.get('style', [None])[0], | |
172 | self.config('web', 'style'), |
|
172 | self.config('web', 'style'), | |
173 | 'paper', |
|
173 | 'paper', | |
174 | ) |
|
174 | ) | |
175 | style, mapfile = templater.stylemap(styles, self.templatepath) |
|
175 | style, mapfile = templater.stylemap(styles, self.templatepath) | |
176 | if style == styles[0]: |
|
176 | if style == styles[0]: | |
177 | vars['style'] = style |
|
177 | vars['style'] = style | |
178 |
|
178 | |||
179 | start = req.url[-1] == '?' and '&' or '?' |
|
179 | start = req.url[-1] == '?' and '&' or '?' | |
180 | sessionvars = webutil.sessionvars(vars, start) |
|
180 | sessionvars = webutil.sessionvars(vars, start) | |
181 |
|
181 | |||
182 | if not self.reponame: |
|
182 | if not self.reponame: | |
183 | self.reponame = (self.config('web', 'name') |
|
183 | self.reponame = (self.config('web', 'name') | |
184 | or req.env.get('REPO_NAME') |
|
184 | or req.env.get('REPO_NAME') | |
185 | or req.url.strip('/') or self.repo.root) |
|
185 | or req.url.strip('/') or self.repo.root) | |
186 |
|
186 | |||
187 | def websubfilter(text): |
|
187 | def websubfilter(text): | |
188 | return templatefilters.websub(text, self.websubtable) |
|
188 | return templatefilters.websub(text, self.websubtable) | |
189 |
|
189 | |||
190 | # create the templater |
|
190 | # create the templater | |
191 |
|
191 | |||
192 | defaults = { |
|
192 | defaults = { | |
193 | 'url': req.url, |
|
193 | 'url': req.url, | |
194 | 'logourl': logourl, |
|
194 | 'logourl': logourl, | |
195 | 'logoimg': logoimg, |
|
195 | 'logoimg': logoimg, | |
196 | 'staticurl': staticurl, |
|
196 | 'staticurl': staticurl, | |
197 | 'urlbase': urlbase, |
|
197 | 'urlbase': urlbase, | |
198 | 'repo': self.reponame, |
|
198 | 'repo': self.reponame, | |
199 | 'encoding': encoding.encoding, |
|
199 | 'encoding': encoding.encoding, | |
200 | 'motd': motd, |
|
200 | 'motd': motd, | |
201 | 'sessionvars': sessionvars, |
|
201 | 'sessionvars': sessionvars, | |
202 | 'pathdef': makebreadcrumb(req.url), |
|
202 | 'pathdef': makebreadcrumb(req.url), | |
203 | 'style': style, |
|
203 | 'style': style, | |
204 | } |
|
204 | } | |
205 | tmpl = templater.templater.frommapfile(mapfile, |
|
205 | tmpl = templater.templater.frommapfile(mapfile, | |
206 | filters={'websub': websubfilter}, |
|
206 | filters={'websub': websubfilter}, | |
207 | defaults=defaults) |
|
207 | defaults=defaults) | |
208 | return tmpl |
|
208 | return tmpl | |
209 |
|
209 | |||
210 |
|
210 | |||
211 | class hgweb(object): |
|
211 | class hgweb(object): | |
212 | """HTTP server for individual repositories. |
|
212 | """HTTP server for individual repositories. | |
213 |
|
213 | |||
214 | Instances of this class serve HTTP responses for a particular |
|
214 | Instances of this class serve HTTP responses for a particular | |
215 | repository. |
|
215 | repository. | |
216 |
|
216 | |||
217 | Instances are typically used as WSGI applications. |
|
217 | Instances are typically used as WSGI applications. | |
218 |
|
218 | |||
219 | Some servers are multi-threaded. On these servers, there may |
|
219 | Some servers are multi-threaded. On these servers, there may | |
220 | be multiple active threads inside __call__. |
|
220 | be multiple active threads inside __call__. | |
221 | """ |
|
221 | """ | |
222 | def __init__(self, repo, name=None, baseui=None): |
|
222 | def __init__(self, repo, name=None, baseui=None): | |
223 | if isinstance(repo, str): |
|
223 | if isinstance(repo, str): | |
224 | if baseui: |
|
224 | if baseui: | |
225 | u = baseui.copy() |
|
225 | u = baseui.copy() | |
226 | else: |
|
226 | else: | |
227 | u = uimod.ui.load() |
|
227 | u = uimod.ui.load() | |
228 | r = hg.repository(u, repo) |
|
228 | r = hg.repository(u, repo) | |
229 | else: |
|
229 | else: | |
230 | # we trust caller to give us a private copy |
|
230 | # we trust caller to give us a private copy | |
231 | r = repo |
|
231 | r = repo | |
232 |
|
232 | |||
233 | r.ui.setconfig('ui', 'report_untrusted', 'off', 'hgweb') |
|
233 | r.ui.setconfig('ui', 'report_untrusted', 'off', 'hgweb') | |
234 | r.baseui.setconfig('ui', 'report_untrusted', 'off', 'hgweb') |
|
234 | r.baseui.setconfig('ui', 'report_untrusted', 'off', 'hgweb') | |
235 | r.ui.setconfig('ui', 'nontty', 'true', 'hgweb') |
|
235 | r.ui.setconfig('ui', 'nontty', 'true', 'hgweb') | |
236 | r.baseui.setconfig('ui', 'nontty', 'true', 'hgweb') |
|
236 | r.baseui.setconfig('ui', 'nontty', 'true', 'hgweb') | |
237 | # resolve file patterns relative to repo root |
|
237 | # resolve file patterns relative to repo root | |
238 | r.ui.setconfig('ui', 'forcecwd', r.root, 'hgweb') |
|
238 | r.ui.setconfig('ui', 'forcecwd', r.root, 'hgweb') | |
239 | r.baseui.setconfig('ui', 'forcecwd', r.root, 'hgweb') |
|
239 | r.baseui.setconfig('ui', 'forcecwd', r.root, 'hgweb') | |
240 | # displaying bundling progress bar while serving feel wrong and may |
|
240 | # displaying bundling progress bar while serving feel wrong and may | |
241 | # break some wsgi implementation. |
|
241 | # break some wsgi implementation. | |
242 | r.ui.setconfig('progress', 'disable', 'true', 'hgweb') |
|
242 | r.ui.setconfig('progress', 'disable', 'true', 'hgweb') | |
243 | r.baseui.setconfig('progress', 'disable', 'true', 'hgweb') |
|
243 | r.baseui.setconfig('progress', 'disable', 'true', 'hgweb') | |
244 | self._repos = [hg.cachedlocalrepo(self._webifyrepo(r))] |
|
244 | self._repos = [hg.cachedlocalrepo(self._webifyrepo(r))] | |
245 | self._lastrepo = self._repos[0] |
|
245 | self._lastrepo = self._repos[0] | |
246 | hook.redirect(True) |
|
246 | hook.redirect(True) | |
247 | self.reponame = name |
|
247 | self.reponame = name | |
248 |
|
248 | |||
249 | def _webifyrepo(self, repo): |
|
249 | def _webifyrepo(self, repo): | |
250 | repo = getwebview(repo) |
|
250 | repo = getwebview(repo) | |
251 | self.websubtable = webutil.getwebsubs(repo) |
|
251 | self.websubtable = webutil.getwebsubs(repo) | |
252 | return repo |
|
252 | return repo | |
253 |
|
253 | |||
254 | @contextlib.contextmanager |
|
254 | @contextlib.contextmanager | |
255 | def _obtainrepo(self): |
|
255 | def _obtainrepo(self): | |
256 | """Obtain a repo unique to the caller. |
|
256 | """Obtain a repo unique to the caller. | |
257 |
|
257 | |||
258 | Internally we maintain a stack of cachedlocalrepo instances |
|
258 | Internally we maintain a stack of cachedlocalrepo instances | |
259 | to be handed out. If one is available, we pop it and return it, |
|
259 | to be handed out. If one is available, we pop it and return it, | |
260 | ensuring it is up to date in the process. If one is not available, |
|
260 | ensuring it is up to date in the process. If one is not available, | |
261 | we clone the most recently used repo instance and return it. |
|
261 | we clone the most recently used repo instance and return it. | |
262 |
|
262 | |||
263 | It is currently possible for the stack to grow without bounds |
|
263 | It is currently possible for the stack to grow without bounds | |
264 | if the server allows infinite threads. However, servers should |
|
264 | if the server allows infinite threads. However, servers should | |
265 | have a thread limit, thus establishing our limit. |
|
265 | have a thread limit, thus establishing our limit. | |
266 | """ |
|
266 | """ | |
267 | if self._repos: |
|
267 | if self._repos: | |
268 | cached = self._repos.pop() |
|
268 | cached = self._repos.pop() | |
269 | r, created = cached.fetch() |
|
269 | r, created = cached.fetch() | |
270 | else: |
|
270 | else: | |
271 | cached = self._lastrepo.copy() |
|
271 | cached = self._lastrepo.copy() | |
272 | r, created = cached.fetch() |
|
272 | r, created = cached.fetch() | |
273 | if created: |
|
273 | if created: | |
274 | r = self._webifyrepo(r) |
|
274 | r = self._webifyrepo(r) | |
275 |
|
275 | |||
276 | self._lastrepo = cached |
|
276 | self._lastrepo = cached | |
277 | self.mtime = cached.mtime |
|
277 | self.mtime = cached.mtime | |
278 | try: |
|
278 | try: | |
279 | yield r |
|
279 | yield r | |
280 | finally: |
|
280 | finally: | |
281 | self._repos.append(cached) |
|
281 | self._repos.append(cached) | |
282 |
|
282 | |||
283 | def run(self): |
|
283 | def run(self): | |
284 | """Start a server from CGI environment. |
|
284 | """Start a server from CGI environment. | |
285 |
|
285 | |||
286 | Modern servers should be using WSGI and should avoid this |
|
286 | Modern servers should be using WSGI and should avoid this | |
287 | method, if possible. |
|
287 | method, if possible. | |
288 | """ |
|
288 | """ | |
289 |
if not |
|
289 | if not encoding.environ.get('GATEWAY_INTERFACE', | |
|
290 | '').startswith("CGI/1."): | |||
290 | raise RuntimeError("This function is only intended to be " |
|
291 | raise RuntimeError("This function is only intended to be " | |
291 | "called while running as a CGI script.") |
|
292 | "called while running as a CGI script.") | |
292 | wsgicgi.launch(self) |
|
293 | wsgicgi.launch(self) | |
293 |
|
294 | |||
294 | def __call__(self, env, respond): |
|
295 | def __call__(self, env, respond): | |
295 | """Run the WSGI application. |
|
296 | """Run the WSGI application. | |
296 |
|
297 | |||
297 | This may be called by multiple threads. |
|
298 | This may be called by multiple threads. | |
298 | """ |
|
299 | """ | |
299 | req = wsgirequest(env, respond) |
|
300 | req = wsgirequest(env, respond) | |
300 | return self.run_wsgi(req) |
|
301 | return self.run_wsgi(req) | |
301 |
|
302 | |||
302 | def run_wsgi(self, req): |
|
303 | def run_wsgi(self, req): | |
303 | """Internal method to run the WSGI application. |
|
304 | """Internal method to run the WSGI application. | |
304 |
|
305 | |||
305 | This is typically only called by Mercurial. External consumers |
|
306 | This is typically only called by Mercurial. External consumers | |
306 | should be using instances of this class as the WSGI application. |
|
307 | should be using instances of this class as the WSGI application. | |
307 | """ |
|
308 | """ | |
308 | with self._obtainrepo() as repo: |
|
309 | with self._obtainrepo() as repo: | |
309 | with profiling.maybeprofile(repo.ui): |
|
310 | with profiling.maybeprofile(repo.ui): | |
310 | for r in self._runwsgi(req, repo): |
|
311 | for r in self._runwsgi(req, repo): | |
311 | yield r |
|
312 | yield r | |
312 |
|
313 | |||
313 | def _runwsgi(self, req, repo): |
|
314 | def _runwsgi(self, req, repo): | |
314 | rctx = requestcontext(self, repo) |
|
315 | rctx = requestcontext(self, repo) | |
315 |
|
316 | |||
316 | # This state is global across all threads. |
|
317 | # This state is global across all threads. | |
317 | encoding.encoding = rctx.config('web', 'encoding', encoding.encoding) |
|
318 | encoding.encoding = rctx.config('web', 'encoding', encoding.encoding) | |
318 | rctx.repo.ui.environ = req.env |
|
319 | rctx.repo.ui.environ = req.env | |
319 |
|
320 | |||
320 | # work with CGI variables to create coherent structure |
|
321 | # work with CGI variables to create coherent structure | |
321 | # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME |
|
322 | # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME | |
322 |
|
323 | |||
323 | req.url = req.env['SCRIPT_NAME'] |
|
324 | req.url = req.env['SCRIPT_NAME'] | |
324 | if not req.url.endswith('/'): |
|
325 | if not req.url.endswith('/'): | |
325 | req.url += '/' |
|
326 | req.url += '/' | |
326 | if 'REPO_NAME' in req.env: |
|
327 | if 'REPO_NAME' in req.env: | |
327 | req.url += req.env['REPO_NAME'] + '/' |
|
328 | req.url += req.env['REPO_NAME'] + '/' | |
328 |
|
329 | |||
329 | if 'PATH_INFO' in req.env: |
|
330 | if 'PATH_INFO' in req.env: | |
330 | parts = req.env['PATH_INFO'].strip('/').split('/') |
|
331 | parts = req.env['PATH_INFO'].strip('/').split('/') | |
331 | repo_parts = req.env.get('REPO_NAME', '').split('/') |
|
332 | repo_parts = req.env.get('REPO_NAME', '').split('/') | |
332 | if parts[:len(repo_parts)] == repo_parts: |
|
333 | if parts[:len(repo_parts)] == repo_parts: | |
333 | parts = parts[len(repo_parts):] |
|
334 | parts = parts[len(repo_parts):] | |
334 | query = '/'.join(parts) |
|
335 | query = '/'.join(parts) | |
335 | else: |
|
336 | else: | |
336 | query = req.env['QUERY_STRING'].partition('&')[0] |
|
337 | query = req.env['QUERY_STRING'].partition('&')[0] | |
337 | query = query.partition(';')[0] |
|
338 | query = query.partition(';')[0] | |
338 |
|
339 | |||
339 | # process this if it's a protocol request |
|
340 | # process this if it's a protocol request | |
340 | # protocol bits don't need to create any URLs |
|
341 | # protocol bits don't need to create any URLs | |
341 | # and the clients always use the old URL structure |
|
342 | # and the clients always use the old URL structure | |
342 |
|
343 | |||
343 | cmd = req.form.get('cmd', [''])[0] |
|
344 | cmd = req.form.get('cmd', [''])[0] | |
344 | if protocol.iscmd(cmd): |
|
345 | if protocol.iscmd(cmd): | |
345 | try: |
|
346 | try: | |
346 | if query: |
|
347 | if query: | |
347 | raise ErrorResponse(HTTP_NOT_FOUND) |
|
348 | raise ErrorResponse(HTTP_NOT_FOUND) | |
348 | if cmd in perms: |
|
349 | if cmd in perms: | |
349 | self.check_perm(rctx, req, perms[cmd]) |
|
350 | self.check_perm(rctx, req, perms[cmd]) | |
350 | return protocol.call(rctx.repo, req, cmd) |
|
351 | return protocol.call(rctx.repo, req, cmd) | |
351 | except ErrorResponse as inst: |
|
352 | except ErrorResponse as inst: | |
352 | # A client that sends unbundle without 100-continue will |
|
353 | # A client that sends unbundle without 100-continue will | |
353 | # break if we respond early. |
|
354 | # break if we respond early. | |
354 | if (cmd == 'unbundle' and |
|
355 | if (cmd == 'unbundle' and | |
355 | (req.env.get('HTTP_EXPECT', |
|
356 | (req.env.get('HTTP_EXPECT', | |
356 | '').lower() != '100-continue') or |
|
357 | '').lower() != '100-continue') or | |
357 | req.env.get('X-HgHttp2', '')): |
|
358 | req.env.get('X-HgHttp2', '')): | |
358 | req.drain() |
|
359 | req.drain() | |
359 | else: |
|
360 | else: | |
360 | req.headers.append(('Connection', 'Close')) |
|
361 | req.headers.append(('Connection', 'Close')) | |
361 | req.respond(inst, protocol.HGTYPE, |
|
362 | req.respond(inst, protocol.HGTYPE, | |
362 | body='0\n%s\n' % inst) |
|
363 | body='0\n%s\n' % inst) | |
363 | return '' |
|
364 | return '' | |
364 |
|
365 | |||
365 | # translate user-visible url structure to internal structure |
|
366 | # translate user-visible url structure to internal structure | |
366 |
|
367 | |||
367 | args = query.split('/', 2) |
|
368 | args = query.split('/', 2) | |
368 | if 'cmd' not in req.form and args and args[0]: |
|
369 | if 'cmd' not in req.form and args and args[0]: | |
369 |
|
370 | |||
370 | cmd = args.pop(0) |
|
371 | cmd = args.pop(0) | |
371 | style = cmd.rfind('-') |
|
372 | style = cmd.rfind('-') | |
372 | if style != -1: |
|
373 | if style != -1: | |
373 | req.form['style'] = [cmd[:style]] |
|
374 | req.form['style'] = [cmd[:style]] | |
374 | cmd = cmd[style + 1:] |
|
375 | cmd = cmd[style + 1:] | |
375 |
|
376 | |||
376 | # avoid accepting e.g. style parameter as command |
|
377 | # avoid accepting e.g. style parameter as command | |
377 | if util.safehasattr(webcommands, cmd): |
|
378 | if util.safehasattr(webcommands, cmd): | |
378 | req.form['cmd'] = [cmd] |
|
379 | req.form['cmd'] = [cmd] | |
379 |
|
380 | |||
380 | if cmd == 'static': |
|
381 | if cmd == 'static': | |
381 | req.form['file'] = ['/'.join(args)] |
|
382 | req.form['file'] = ['/'.join(args)] | |
382 | else: |
|
383 | else: | |
383 | if args and args[0]: |
|
384 | if args and args[0]: | |
384 | node = args.pop(0).replace('%2F', '/') |
|
385 | node = args.pop(0).replace('%2F', '/') | |
385 | req.form['node'] = [node] |
|
386 | req.form['node'] = [node] | |
386 | if args: |
|
387 | if args: | |
387 | req.form['file'] = args |
|
388 | req.form['file'] = args | |
388 |
|
389 | |||
389 | ua = req.env.get('HTTP_USER_AGENT', '') |
|
390 | ua = req.env.get('HTTP_USER_AGENT', '') | |
390 | if cmd == 'rev' and 'mercurial' in ua: |
|
391 | if cmd == 'rev' and 'mercurial' in ua: | |
391 | req.form['style'] = ['raw'] |
|
392 | req.form['style'] = ['raw'] | |
392 |
|
393 | |||
393 | if cmd == 'archive': |
|
394 | if cmd == 'archive': | |
394 | fn = req.form['node'][0] |
|
395 | fn = req.form['node'][0] | |
395 | for type_, spec in rctx.archivespecs.iteritems(): |
|
396 | for type_, spec in rctx.archivespecs.iteritems(): | |
396 | ext = spec[2] |
|
397 | ext = spec[2] | |
397 | if fn.endswith(ext): |
|
398 | if fn.endswith(ext): | |
398 | req.form['node'] = [fn[:-len(ext)]] |
|
399 | req.form['node'] = [fn[:-len(ext)]] | |
399 | req.form['type'] = [type_] |
|
400 | req.form['type'] = [type_] | |
400 |
|
401 | |||
401 | # process the web interface request |
|
402 | # process the web interface request | |
402 |
|
403 | |||
403 | try: |
|
404 | try: | |
404 | tmpl = rctx.templater(req) |
|
405 | tmpl = rctx.templater(req) | |
405 | ctype = tmpl('mimetype', encoding=encoding.encoding) |
|
406 | ctype = tmpl('mimetype', encoding=encoding.encoding) | |
406 | ctype = templater.stringify(ctype) |
|
407 | ctype = templater.stringify(ctype) | |
407 |
|
408 | |||
408 | # check read permissions non-static content |
|
409 | # check read permissions non-static content | |
409 | if cmd != 'static': |
|
410 | if cmd != 'static': | |
410 | self.check_perm(rctx, req, None) |
|
411 | self.check_perm(rctx, req, None) | |
411 |
|
412 | |||
412 | if cmd == '': |
|
413 | if cmd == '': | |
413 | req.form['cmd'] = [tmpl.cache['default']] |
|
414 | req.form['cmd'] = [tmpl.cache['default']] | |
414 | cmd = req.form['cmd'][0] |
|
415 | cmd = req.form['cmd'][0] | |
415 |
|
416 | |||
416 | if rctx.configbool('web', 'cache', True): |
|
417 | if rctx.configbool('web', 'cache', True): | |
417 | caching(self, req) # sets ETag header or raises NOT_MODIFIED |
|
418 | caching(self, req) # sets ETag header or raises NOT_MODIFIED | |
418 | if cmd not in webcommands.__all__: |
|
419 | if cmd not in webcommands.__all__: | |
419 | msg = 'no such method: %s' % cmd |
|
420 | msg = 'no such method: %s' % cmd | |
420 | raise ErrorResponse(HTTP_BAD_REQUEST, msg) |
|
421 | raise ErrorResponse(HTTP_BAD_REQUEST, msg) | |
421 | elif cmd == 'file' and 'raw' in req.form.get('style', []): |
|
422 | elif cmd == 'file' and 'raw' in req.form.get('style', []): | |
422 | rctx.ctype = ctype |
|
423 | rctx.ctype = ctype | |
423 | content = webcommands.rawfile(rctx, req, tmpl) |
|
424 | content = webcommands.rawfile(rctx, req, tmpl) | |
424 | else: |
|
425 | else: | |
425 | content = getattr(webcommands, cmd)(rctx, req, tmpl) |
|
426 | content = getattr(webcommands, cmd)(rctx, req, tmpl) | |
426 | req.respond(HTTP_OK, ctype) |
|
427 | req.respond(HTTP_OK, ctype) | |
427 |
|
428 | |||
428 | return content |
|
429 | return content | |
429 |
|
430 | |||
430 | except (error.LookupError, error.RepoLookupError) as err: |
|
431 | except (error.LookupError, error.RepoLookupError) as err: | |
431 | req.respond(HTTP_NOT_FOUND, ctype) |
|
432 | req.respond(HTTP_NOT_FOUND, ctype) | |
432 | msg = str(err) |
|
433 | msg = str(err) | |
433 | if (util.safehasattr(err, 'name') and |
|
434 | if (util.safehasattr(err, 'name') and | |
434 | not isinstance(err, error.ManifestLookupError)): |
|
435 | not isinstance(err, error.ManifestLookupError)): | |
435 | msg = 'revision not found: %s' % err.name |
|
436 | msg = 'revision not found: %s' % err.name | |
436 | return tmpl('error', error=msg) |
|
437 | return tmpl('error', error=msg) | |
437 | except (error.RepoError, error.RevlogError) as inst: |
|
438 | except (error.RepoError, error.RevlogError) as inst: | |
438 | req.respond(HTTP_SERVER_ERROR, ctype) |
|
439 | req.respond(HTTP_SERVER_ERROR, ctype) | |
439 | return tmpl('error', error=str(inst)) |
|
440 | return tmpl('error', error=str(inst)) | |
440 | except ErrorResponse as inst: |
|
441 | except ErrorResponse as inst: | |
441 | req.respond(inst, ctype) |
|
442 | req.respond(inst, ctype) | |
442 | if inst.code == HTTP_NOT_MODIFIED: |
|
443 | if inst.code == HTTP_NOT_MODIFIED: | |
443 | # Not allowed to return a body on a 304 |
|
444 | # Not allowed to return a body on a 304 | |
444 | return [''] |
|
445 | return [''] | |
445 | return tmpl('error', error=str(inst)) |
|
446 | return tmpl('error', error=str(inst)) | |
446 |
|
447 | |||
447 | def check_perm(self, rctx, req, op): |
|
448 | def check_perm(self, rctx, req, op): | |
448 | for permhook in permhooks: |
|
449 | for permhook in permhooks: | |
449 | permhook(rctx, req, op) |
|
450 | permhook(rctx, req, op) | |
450 |
|
451 | |||
451 | def getwebview(repo): |
|
452 | def getwebview(repo): | |
452 | """The 'web.view' config controls changeset filter to hgweb. Possible |
|
453 | """The 'web.view' config controls changeset filter to hgweb. Possible | |
453 | values are ``served``, ``visible`` and ``all``. Default is ``served``. |
|
454 | values are ``served``, ``visible`` and ``all``. Default is ``served``. | |
454 | The ``served`` filter only shows changesets that can be pulled from the |
|
455 | The ``served`` filter only shows changesets that can be pulled from the | |
455 | hgweb instance. The``visible`` filter includes secret changesets but |
|
456 | hgweb instance. The``visible`` filter includes secret changesets but | |
456 | still excludes "hidden" one. |
|
457 | still excludes "hidden" one. | |
457 |
|
458 | |||
458 | See the repoview module for details. |
|
459 | See the repoview module for details. | |
459 |
|
460 | |||
460 | The option has been around undocumented since Mercurial 2.5, but no |
|
461 | The option has been around undocumented since Mercurial 2.5, but no | |
461 | user ever asked about it. So we better keep it undocumented for now.""" |
|
462 | user ever asked about it. So we better keep it undocumented for now.""" | |
462 | viewconfig = repo.ui.config('web', 'view', 'served', |
|
463 | viewconfig = repo.ui.config('web', 'view', 'served', | |
463 | untrusted=True) |
|
464 | untrusted=True) | |
464 | if viewconfig == 'all': |
|
465 | if viewconfig == 'all': | |
465 | return repo.unfiltered() |
|
466 | return repo.unfiltered() | |
466 | elif viewconfig in repoview.filtertable: |
|
467 | elif viewconfig in repoview.filtertable: | |
467 | return repo.filtered(viewconfig) |
|
468 | return repo.filtered(viewconfig) | |
468 | else: |
|
469 | else: | |
469 | return repo.filtered('served') |
|
470 | return repo.filtered('served') |
@@ -1,521 +1,522 | |||||
1 | # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories. |
|
1 | # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories. | |
2 | # |
|
2 | # | |
3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> |
|
3 | # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> | |
4 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> |
|
4 | # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> | |
5 | # |
|
5 | # | |
6 | # This software may be used and distributed according to the terms of the |
|
6 | # This software may be used and distributed according to the terms of the | |
7 | # GNU General Public License version 2 or any later version. |
|
7 | # GNU General Public License version 2 or any later version. | |
8 |
|
8 | |||
9 | from __future__ import absolute_import |
|
9 | from __future__ import absolute_import | |
10 |
|
10 | |||
11 | import os |
|
11 | import os | |
12 | import re |
|
12 | import re | |
13 | import time |
|
13 | import time | |
14 |
|
14 | |||
15 | from ..i18n import _ |
|
15 | from ..i18n import _ | |
16 |
|
16 | |||
17 | from .common import ( |
|
17 | from .common import ( | |
18 | ErrorResponse, |
|
18 | ErrorResponse, | |
19 | HTTP_NOT_FOUND, |
|
19 | HTTP_NOT_FOUND, | |
20 | HTTP_OK, |
|
20 | HTTP_OK, | |
21 | HTTP_SERVER_ERROR, |
|
21 | HTTP_SERVER_ERROR, | |
22 | get_contact, |
|
22 | get_contact, | |
23 | get_mtime, |
|
23 | get_mtime, | |
24 | ismember, |
|
24 | ismember, | |
25 | paritygen, |
|
25 | paritygen, | |
26 | staticfile, |
|
26 | staticfile, | |
27 | ) |
|
27 | ) | |
28 | from .request import wsgirequest |
|
28 | from .request import wsgirequest | |
29 |
|
29 | |||
30 | from .. import ( |
|
30 | from .. import ( | |
31 | encoding, |
|
31 | encoding, | |
32 | error, |
|
32 | error, | |
33 | hg, |
|
33 | hg, | |
34 | profiling, |
|
34 | profiling, | |
35 | scmutil, |
|
35 | scmutil, | |
36 | templater, |
|
36 | templater, | |
37 | ui as uimod, |
|
37 | ui as uimod, | |
38 | util, |
|
38 | util, | |
39 | ) |
|
39 | ) | |
40 |
|
40 | |||
41 | from . import ( |
|
41 | from . import ( | |
42 | hgweb_mod, |
|
42 | hgweb_mod, | |
43 | webutil, |
|
43 | webutil, | |
44 | wsgicgi, |
|
44 | wsgicgi, | |
45 | ) |
|
45 | ) | |
46 |
|
46 | |||
47 | def cleannames(items): |
|
47 | def cleannames(items): | |
48 | return [(util.pconvert(name).strip('/'), path) for name, path in items] |
|
48 | return [(util.pconvert(name).strip('/'), path) for name, path in items] | |
49 |
|
49 | |||
50 | def findrepos(paths): |
|
50 | def findrepos(paths): | |
51 | repos = [] |
|
51 | repos = [] | |
52 | for prefix, root in cleannames(paths): |
|
52 | for prefix, root in cleannames(paths): | |
53 | roothead, roottail = os.path.split(root) |
|
53 | roothead, roottail = os.path.split(root) | |
54 | # "foo = /bar/*" or "foo = /bar/**" lets every repo /bar/N in or below |
|
54 | # "foo = /bar/*" or "foo = /bar/**" lets every repo /bar/N in or below | |
55 | # /bar/ be served as as foo/N . |
|
55 | # /bar/ be served as as foo/N . | |
56 | # '*' will not search inside dirs with .hg (except .hg/patches), |
|
56 | # '*' will not search inside dirs with .hg (except .hg/patches), | |
57 | # '**' will search inside dirs with .hg (and thus also find subrepos). |
|
57 | # '**' will search inside dirs with .hg (and thus also find subrepos). | |
58 | try: |
|
58 | try: | |
59 | recurse = {'*': False, '**': True}[roottail] |
|
59 | recurse = {'*': False, '**': True}[roottail] | |
60 | except KeyError: |
|
60 | except KeyError: | |
61 | repos.append((prefix, root)) |
|
61 | repos.append((prefix, root)) | |
62 | continue |
|
62 | continue | |
63 | roothead = os.path.normpath(os.path.abspath(roothead)) |
|
63 | roothead = os.path.normpath(os.path.abspath(roothead)) | |
64 | paths = scmutil.walkrepos(roothead, followsym=True, recurse=recurse) |
|
64 | paths = scmutil.walkrepos(roothead, followsym=True, recurse=recurse) | |
65 | repos.extend(urlrepos(prefix, roothead, paths)) |
|
65 | repos.extend(urlrepos(prefix, roothead, paths)) | |
66 | return repos |
|
66 | return repos | |
67 |
|
67 | |||
68 | def urlrepos(prefix, roothead, paths): |
|
68 | def urlrepos(prefix, roothead, paths): | |
69 | """yield url paths and filesystem paths from a list of repo paths |
|
69 | """yield url paths and filesystem paths from a list of repo paths | |
70 |
|
70 | |||
71 | >>> conv = lambda seq: [(v, util.pconvert(p)) for v,p in seq] |
|
71 | >>> conv = lambda seq: [(v, util.pconvert(p)) for v,p in seq] | |
72 | >>> conv(urlrepos('hg', '/opt', ['/opt/r', '/opt/r/r', '/opt'])) |
|
72 | >>> conv(urlrepos('hg', '/opt', ['/opt/r', '/opt/r/r', '/opt'])) | |
73 | [('hg/r', '/opt/r'), ('hg/r/r', '/opt/r/r'), ('hg', '/opt')] |
|
73 | [('hg/r', '/opt/r'), ('hg/r/r', '/opt/r/r'), ('hg', '/opt')] | |
74 | >>> conv(urlrepos('', '/opt', ['/opt/r', '/opt/r/r', '/opt'])) |
|
74 | >>> conv(urlrepos('', '/opt', ['/opt/r', '/opt/r/r', '/opt'])) | |
75 | [('r', '/opt/r'), ('r/r', '/opt/r/r'), ('', '/opt')] |
|
75 | [('r', '/opt/r'), ('r/r', '/opt/r/r'), ('', '/opt')] | |
76 | """ |
|
76 | """ | |
77 | for path in paths: |
|
77 | for path in paths: | |
78 | path = os.path.normpath(path) |
|
78 | path = os.path.normpath(path) | |
79 | yield (prefix + '/' + |
|
79 | yield (prefix + '/' + | |
80 | util.pconvert(path[len(roothead):]).lstrip('/')).strip('/'), path |
|
80 | util.pconvert(path[len(roothead):]).lstrip('/')).strip('/'), path | |
81 |
|
81 | |||
82 | def geturlcgivars(baseurl, port): |
|
82 | def geturlcgivars(baseurl, port): | |
83 | """ |
|
83 | """ | |
84 | Extract CGI variables from baseurl |
|
84 | Extract CGI variables from baseurl | |
85 |
|
85 | |||
86 | >>> geturlcgivars("http://host.org/base", "80") |
|
86 | >>> geturlcgivars("http://host.org/base", "80") | |
87 | ('host.org', '80', '/base') |
|
87 | ('host.org', '80', '/base') | |
88 | >>> geturlcgivars("http://host.org:8000/base", "80") |
|
88 | >>> geturlcgivars("http://host.org:8000/base", "80") | |
89 | ('host.org', '8000', '/base') |
|
89 | ('host.org', '8000', '/base') | |
90 | >>> geturlcgivars('/base', 8000) |
|
90 | >>> geturlcgivars('/base', 8000) | |
91 | ('', '8000', '/base') |
|
91 | ('', '8000', '/base') | |
92 | >>> geturlcgivars("base", '8000') |
|
92 | >>> geturlcgivars("base", '8000') | |
93 | ('', '8000', '/base') |
|
93 | ('', '8000', '/base') | |
94 | >>> geturlcgivars("http://host", '8000') |
|
94 | >>> geturlcgivars("http://host", '8000') | |
95 | ('host', '8000', '/') |
|
95 | ('host', '8000', '/') | |
96 | >>> geturlcgivars("http://host/", '8000') |
|
96 | >>> geturlcgivars("http://host/", '8000') | |
97 | ('host', '8000', '/') |
|
97 | ('host', '8000', '/') | |
98 | """ |
|
98 | """ | |
99 | u = util.url(baseurl) |
|
99 | u = util.url(baseurl) | |
100 | name = u.host or '' |
|
100 | name = u.host or '' | |
101 | if u.port: |
|
101 | if u.port: | |
102 | port = u.port |
|
102 | port = u.port | |
103 | path = u.path or "" |
|
103 | path = u.path or "" | |
104 | if not path.startswith('/'): |
|
104 | if not path.startswith('/'): | |
105 | path = '/' + path |
|
105 | path = '/' + path | |
106 |
|
106 | |||
107 | return name, str(port), path |
|
107 | return name, str(port), path | |
108 |
|
108 | |||
109 | class hgwebdir(object): |
|
109 | class hgwebdir(object): | |
110 | """HTTP server for multiple repositories. |
|
110 | """HTTP server for multiple repositories. | |
111 |
|
111 | |||
112 | Given a configuration, different repositories will be served depending |
|
112 | Given a configuration, different repositories will be served depending | |
113 | on the request path. |
|
113 | on the request path. | |
114 |
|
114 | |||
115 | Instances are typically used as WSGI applications. |
|
115 | Instances are typically used as WSGI applications. | |
116 | """ |
|
116 | """ | |
117 | def __init__(self, conf, baseui=None): |
|
117 | def __init__(self, conf, baseui=None): | |
118 | self.conf = conf |
|
118 | self.conf = conf | |
119 | self.baseui = baseui |
|
119 | self.baseui = baseui | |
120 | self.ui = None |
|
120 | self.ui = None | |
121 | self.lastrefresh = 0 |
|
121 | self.lastrefresh = 0 | |
122 | self.motd = None |
|
122 | self.motd = None | |
123 | self.refresh() |
|
123 | self.refresh() | |
124 |
|
124 | |||
125 | def refresh(self): |
|
125 | def refresh(self): | |
126 | refreshinterval = 20 |
|
126 | refreshinterval = 20 | |
127 | if self.ui: |
|
127 | if self.ui: | |
128 | refreshinterval = self.ui.configint('web', 'refreshinterval', |
|
128 | refreshinterval = self.ui.configint('web', 'refreshinterval', | |
129 | refreshinterval) |
|
129 | refreshinterval) | |
130 |
|
130 | |||
131 | # refreshinterval <= 0 means to always refresh. |
|
131 | # refreshinterval <= 0 means to always refresh. | |
132 | if (refreshinterval > 0 and |
|
132 | if (refreshinterval > 0 and | |
133 | self.lastrefresh + refreshinterval > time.time()): |
|
133 | self.lastrefresh + refreshinterval > time.time()): | |
134 | return |
|
134 | return | |
135 |
|
135 | |||
136 | if self.baseui: |
|
136 | if self.baseui: | |
137 | u = self.baseui.copy() |
|
137 | u = self.baseui.copy() | |
138 | else: |
|
138 | else: | |
139 | u = uimod.ui.load() |
|
139 | u = uimod.ui.load() | |
140 | u.setconfig('ui', 'report_untrusted', 'off', 'hgwebdir') |
|
140 | u.setconfig('ui', 'report_untrusted', 'off', 'hgwebdir') | |
141 | u.setconfig('ui', 'nontty', 'true', 'hgwebdir') |
|
141 | u.setconfig('ui', 'nontty', 'true', 'hgwebdir') | |
142 | # displaying bundling progress bar while serving feels wrong and may |
|
142 | # displaying bundling progress bar while serving feels wrong and may | |
143 | # break some wsgi implementations. |
|
143 | # break some wsgi implementations. | |
144 | u.setconfig('progress', 'disable', 'true', 'hgweb') |
|
144 | u.setconfig('progress', 'disable', 'true', 'hgweb') | |
145 |
|
145 | |||
146 | if not isinstance(self.conf, (dict, list, tuple)): |
|
146 | if not isinstance(self.conf, (dict, list, tuple)): | |
147 | map = {'paths': 'hgweb-paths'} |
|
147 | map = {'paths': 'hgweb-paths'} | |
148 | if not os.path.exists(self.conf): |
|
148 | if not os.path.exists(self.conf): | |
149 | raise error.Abort(_('config file %s not found!') % self.conf) |
|
149 | raise error.Abort(_('config file %s not found!') % self.conf) | |
150 | u.readconfig(self.conf, remap=map, trust=True) |
|
150 | u.readconfig(self.conf, remap=map, trust=True) | |
151 | paths = [] |
|
151 | paths = [] | |
152 | for name, ignored in u.configitems('hgweb-paths'): |
|
152 | for name, ignored in u.configitems('hgweb-paths'): | |
153 | for path in u.configlist('hgweb-paths', name): |
|
153 | for path in u.configlist('hgweb-paths', name): | |
154 | paths.append((name, path)) |
|
154 | paths.append((name, path)) | |
155 | elif isinstance(self.conf, (list, tuple)): |
|
155 | elif isinstance(self.conf, (list, tuple)): | |
156 | paths = self.conf |
|
156 | paths = self.conf | |
157 | elif isinstance(self.conf, dict): |
|
157 | elif isinstance(self.conf, dict): | |
158 | paths = self.conf.items() |
|
158 | paths = self.conf.items() | |
159 |
|
159 | |||
160 | repos = findrepos(paths) |
|
160 | repos = findrepos(paths) | |
161 | for prefix, root in u.configitems('collections'): |
|
161 | for prefix, root in u.configitems('collections'): | |
162 | prefix = util.pconvert(prefix) |
|
162 | prefix = util.pconvert(prefix) | |
163 | for path in scmutil.walkrepos(root, followsym=True): |
|
163 | for path in scmutil.walkrepos(root, followsym=True): | |
164 | repo = os.path.normpath(path) |
|
164 | repo = os.path.normpath(path) | |
165 | name = util.pconvert(repo) |
|
165 | name = util.pconvert(repo) | |
166 | if name.startswith(prefix): |
|
166 | if name.startswith(prefix): | |
167 | name = name[len(prefix):] |
|
167 | name = name[len(prefix):] | |
168 | repos.append((name.lstrip('/'), repo)) |
|
168 | repos.append((name.lstrip('/'), repo)) | |
169 |
|
169 | |||
170 | self.repos = repos |
|
170 | self.repos = repos | |
171 | self.ui = u |
|
171 | self.ui = u | |
172 | encoding.encoding = self.ui.config('web', 'encoding', |
|
172 | encoding.encoding = self.ui.config('web', 'encoding', | |
173 | encoding.encoding) |
|
173 | encoding.encoding) | |
174 | self.style = self.ui.config('web', 'style', 'paper') |
|
174 | self.style = self.ui.config('web', 'style', 'paper') | |
175 | self.templatepath = self.ui.config('web', 'templates', None) |
|
175 | self.templatepath = self.ui.config('web', 'templates', None) | |
176 | self.stripecount = self.ui.config('web', 'stripes', 1) |
|
176 | self.stripecount = self.ui.config('web', 'stripes', 1) | |
177 | if self.stripecount: |
|
177 | if self.stripecount: | |
178 | self.stripecount = int(self.stripecount) |
|
178 | self.stripecount = int(self.stripecount) | |
179 | self._baseurl = self.ui.config('web', 'baseurl') |
|
179 | self._baseurl = self.ui.config('web', 'baseurl') | |
180 | prefix = self.ui.config('web', 'prefix', '') |
|
180 | prefix = self.ui.config('web', 'prefix', '') | |
181 | if prefix.startswith('/'): |
|
181 | if prefix.startswith('/'): | |
182 | prefix = prefix[1:] |
|
182 | prefix = prefix[1:] | |
183 | if prefix.endswith('/'): |
|
183 | if prefix.endswith('/'): | |
184 | prefix = prefix[:-1] |
|
184 | prefix = prefix[:-1] | |
185 | self.prefix = prefix |
|
185 | self.prefix = prefix | |
186 | self.lastrefresh = time.time() |
|
186 | self.lastrefresh = time.time() | |
187 |
|
187 | |||
188 | def run(self): |
|
188 | def run(self): | |
189 |
if not |
|
189 | if not encoding.environ.get('GATEWAY_INTERFACE', | |
|
190 | '').startswith("CGI/1."): | |||
190 | raise RuntimeError("This function is only intended to be " |
|
191 | raise RuntimeError("This function is only intended to be " | |
191 | "called while running as a CGI script.") |
|
192 | "called while running as a CGI script.") | |
192 | wsgicgi.launch(self) |
|
193 | wsgicgi.launch(self) | |
193 |
|
194 | |||
194 | def __call__(self, env, respond): |
|
195 | def __call__(self, env, respond): | |
195 | req = wsgirequest(env, respond) |
|
196 | req = wsgirequest(env, respond) | |
196 | return self.run_wsgi(req) |
|
197 | return self.run_wsgi(req) | |
197 |
|
198 | |||
198 | def read_allowed(self, ui, req): |
|
199 | def read_allowed(self, ui, req): | |
199 | """Check allow_read and deny_read config options of a repo's ui object |
|
200 | """Check allow_read and deny_read config options of a repo's ui object | |
200 | to determine user permissions. By default, with neither option set (or |
|
201 | to determine user permissions. By default, with neither option set (or | |
201 | both empty), allow all users to read the repo. There are two ways a |
|
202 | both empty), allow all users to read the repo. There are two ways a | |
202 | user can be denied read access: (1) deny_read is not empty, and the |
|
203 | user can be denied read access: (1) deny_read is not empty, and the | |
203 | user is unauthenticated or deny_read contains user (or *), and (2) |
|
204 | user is unauthenticated or deny_read contains user (or *), and (2) | |
204 | allow_read is not empty and the user is not in allow_read. Return True |
|
205 | allow_read is not empty and the user is not in allow_read. Return True | |
205 | if user is allowed to read the repo, else return False.""" |
|
206 | if user is allowed to read the repo, else return False.""" | |
206 |
|
207 | |||
207 | user = req.env.get('REMOTE_USER') |
|
208 | user = req.env.get('REMOTE_USER') | |
208 |
|
209 | |||
209 | deny_read = ui.configlist('web', 'deny_read', untrusted=True) |
|
210 | deny_read = ui.configlist('web', 'deny_read', untrusted=True) | |
210 | if deny_read and (not user or ismember(ui, user, deny_read)): |
|
211 | if deny_read and (not user or ismember(ui, user, deny_read)): | |
211 | return False |
|
212 | return False | |
212 |
|
213 | |||
213 | allow_read = ui.configlist('web', 'allow_read', untrusted=True) |
|
214 | allow_read = ui.configlist('web', 'allow_read', untrusted=True) | |
214 | # by default, allow reading if no allow_read option has been set |
|
215 | # by default, allow reading if no allow_read option has been set | |
215 | if (not allow_read) or ismember(ui, user, allow_read): |
|
216 | if (not allow_read) or ismember(ui, user, allow_read): | |
216 | return True |
|
217 | return True | |
217 |
|
218 | |||
218 | return False |
|
219 | return False | |
219 |
|
220 | |||
220 | def run_wsgi(self, req): |
|
221 | def run_wsgi(self, req): | |
221 | with profiling.maybeprofile(self.ui): |
|
222 | with profiling.maybeprofile(self.ui): | |
222 | for r in self._runwsgi(req): |
|
223 | for r in self._runwsgi(req): | |
223 | yield r |
|
224 | yield r | |
224 |
|
225 | |||
225 | def _runwsgi(self, req): |
|
226 | def _runwsgi(self, req): | |
226 | try: |
|
227 | try: | |
227 | self.refresh() |
|
228 | self.refresh() | |
228 |
|
229 | |||
229 | virtual = req.env.get("PATH_INFO", "").strip('/') |
|
230 | virtual = req.env.get("PATH_INFO", "").strip('/') | |
230 | tmpl = self.templater(req) |
|
231 | tmpl = self.templater(req) | |
231 | ctype = tmpl('mimetype', encoding=encoding.encoding) |
|
232 | ctype = tmpl('mimetype', encoding=encoding.encoding) | |
232 | ctype = templater.stringify(ctype) |
|
233 | ctype = templater.stringify(ctype) | |
233 |
|
234 | |||
234 | # a static file |
|
235 | # a static file | |
235 | if virtual.startswith('static/') or 'static' in req.form: |
|
236 | if virtual.startswith('static/') or 'static' in req.form: | |
236 | if virtual.startswith('static/'): |
|
237 | if virtual.startswith('static/'): | |
237 | fname = virtual[7:] |
|
238 | fname = virtual[7:] | |
238 | else: |
|
239 | else: | |
239 | fname = req.form['static'][0] |
|
240 | fname = req.form['static'][0] | |
240 | static = self.ui.config("web", "static", None, |
|
241 | static = self.ui.config("web", "static", None, | |
241 | untrusted=False) |
|
242 | untrusted=False) | |
242 | if not static: |
|
243 | if not static: | |
243 | tp = self.templatepath or templater.templatepaths() |
|
244 | tp = self.templatepath or templater.templatepaths() | |
244 | if isinstance(tp, str): |
|
245 | if isinstance(tp, str): | |
245 | tp = [tp] |
|
246 | tp = [tp] | |
246 | static = [os.path.join(p, 'static') for p in tp] |
|
247 | static = [os.path.join(p, 'static') for p in tp] | |
247 | staticfile(static, fname, req) |
|
248 | staticfile(static, fname, req) | |
248 | return [] |
|
249 | return [] | |
249 |
|
250 | |||
250 | # top-level index |
|
251 | # top-level index | |
251 | elif not virtual: |
|
252 | elif not virtual: | |
252 | req.respond(HTTP_OK, ctype) |
|
253 | req.respond(HTTP_OK, ctype) | |
253 | return self.makeindex(req, tmpl) |
|
254 | return self.makeindex(req, tmpl) | |
254 |
|
255 | |||
255 | # nested indexes and hgwebs |
|
256 | # nested indexes and hgwebs | |
256 |
|
257 | |||
257 | repos = dict(self.repos) |
|
258 | repos = dict(self.repos) | |
258 | virtualrepo = virtual |
|
259 | virtualrepo = virtual | |
259 | while virtualrepo: |
|
260 | while virtualrepo: | |
260 | real = repos.get(virtualrepo) |
|
261 | real = repos.get(virtualrepo) | |
261 | if real: |
|
262 | if real: | |
262 | req.env['REPO_NAME'] = virtualrepo |
|
263 | req.env['REPO_NAME'] = virtualrepo | |
263 | try: |
|
264 | try: | |
264 | # ensure caller gets private copy of ui |
|
265 | # ensure caller gets private copy of ui | |
265 | repo = hg.repository(self.ui.copy(), real) |
|
266 | repo = hg.repository(self.ui.copy(), real) | |
266 | return hgweb_mod.hgweb(repo).run_wsgi(req) |
|
267 | return hgweb_mod.hgweb(repo).run_wsgi(req) | |
267 | except IOError as inst: |
|
268 | except IOError as inst: | |
268 | msg = inst.strerror |
|
269 | msg = inst.strerror | |
269 | raise ErrorResponse(HTTP_SERVER_ERROR, msg) |
|
270 | raise ErrorResponse(HTTP_SERVER_ERROR, msg) | |
270 | except error.RepoError as inst: |
|
271 | except error.RepoError as inst: | |
271 | raise ErrorResponse(HTTP_SERVER_ERROR, str(inst)) |
|
272 | raise ErrorResponse(HTTP_SERVER_ERROR, str(inst)) | |
272 |
|
273 | |||
273 | up = virtualrepo.rfind('/') |
|
274 | up = virtualrepo.rfind('/') | |
274 | if up < 0: |
|
275 | if up < 0: | |
275 | break |
|
276 | break | |
276 | virtualrepo = virtualrepo[:up] |
|
277 | virtualrepo = virtualrepo[:up] | |
277 |
|
278 | |||
278 | # browse subdirectories |
|
279 | # browse subdirectories | |
279 | subdir = virtual + '/' |
|
280 | subdir = virtual + '/' | |
280 | if [r for r in repos if r.startswith(subdir)]: |
|
281 | if [r for r in repos if r.startswith(subdir)]: | |
281 | req.respond(HTTP_OK, ctype) |
|
282 | req.respond(HTTP_OK, ctype) | |
282 | return self.makeindex(req, tmpl, subdir) |
|
283 | return self.makeindex(req, tmpl, subdir) | |
283 |
|
284 | |||
284 | # prefixes not found |
|
285 | # prefixes not found | |
285 | req.respond(HTTP_NOT_FOUND, ctype) |
|
286 | req.respond(HTTP_NOT_FOUND, ctype) | |
286 | return tmpl("notfound", repo=virtual) |
|
287 | return tmpl("notfound", repo=virtual) | |
287 |
|
288 | |||
288 | except ErrorResponse as err: |
|
289 | except ErrorResponse as err: | |
289 | req.respond(err, ctype) |
|
290 | req.respond(err, ctype) | |
290 | return tmpl('error', error=err.message or '') |
|
291 | return tmpl('error', error=err.message or '') | |
291 | finally: |
|
292 | finally: | |
292 | tmpl = None |
|
293 | tmpl = None | |
293 |
|
294 | |||
294 | def makeindex(self, req, tmpl, subdir=""): |
|
295 | def makeindex(self, req, tmpl, subdir=""): | |
295 |
|
296 | |||
296 | def archivelist(ui, nodeid, url): |
|
297 | def archivelist(ui, nodeid, url): | |
297 | allowed = ui.configlist("web", "allow_archive", untrusted=True) |
|
298 | allowed = ui.configlist("web", "allow_archive", untrusted=True) | |
298 | archives = [] |
|
299 | archives = [] | |
299 | for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]: |
|
300 | for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]: | |
300 | if i[0] in allowed or ui.configbool("web", "allow" + i[0], |
|
301 | if i[0] in allowed or ui.configbool("web", "allow" + i[0], | |
301 | untrusted=True): |
|
302 | untrusted=True): | |
302 | archives.append({"type" : i[0], "extension": i[1], |
|
303 | archives.append({"type" : i[0], "extension": i[1], | |
303 | "node": nodeid, "url": url}) |
|
304 | "node": nodeid, "url": url}) | |
304 | return archives |
|
305 | return archives | |
305 |
|
306 | |||
306 | def rawentries(subdir="", **map): |
|
307 | def rawentries(subdir="", **map): | |
307 |
|
308 | |||
308 | descend = self.ui.configbool('web', 'descend', True) |
|
309 | descend = self.ui.configbool('web', 'descend', True) | |
309 | collapse = self.ui.configbool('web', 'collapse', False) |
|
310 | collapse = self.ui.configbool('web', 'collapse', False) | |
310 | seenrepos = set() |
|
311 | seenrepos = set() | |
311 | seendirs = set() |
|
312 | seendirs = set() | |
312 | for name, path in self.repos: |
|
313 | for name, path in self.repos: | |
313 |
|
314 | |||
314 | if not name.startswith(subdir): |
|
315 | if not name.startswith(subdir): | |
315 | continue |
|
316 | continue | |
316 | name = name[len(subdir):] |
|
317 | name = name[len(subdir):] | |
317 | directory = False |
|
318 | directory = False | |
318 |
|
319 | |||
319 | if '/' in name: |
|
320 | if '/' in name: | |
320 | if not descend: |
|
321 | if not descend: | |
321 | continue |
|
322 | continue | |
322 |
|
323 | |||
323 | nameparts = name.split('/') |
|
324 | nameparts = name.split('/') | |
324 | rootname = nameparts[0] |
|
325 | rootname = nameparts[0] | |
325 |
|
326 | |||
326 | if not collapse: |
|
327 | if not collapse: | |
327 | pass |
|
328 | pass | |
328 | elif rootname in seendirs: |
|
329 | elif rootname in seendirs: | |
329 | continue |
|
330 | continue | |
330 | elif rootname in seenrepos: |
|
331 | elif rootname in seenrepos: | |
331 | pass |
|
332 | pass | |
332 | else: |
|
333 | else: | |
333 | directory = True |
|
334 | directory = True | |
334 | name = rootname |
|
335 | name = rootname | |
335 |
|
336 | |||
336 | # redefine the path to refer to the directory |
|
337 | # redefine the path to refer to the directory | |
337 | discarded = '/'.join(nameparts[1:]) |
|
338 | discarded = '/'.join(nameparts[1:]) | |
338 |
|
339 | |||
339 | # remove name parts plus accompanying slash |
|
340 | # remove name parts plus accompanying slash | |
340 | path = path[:-len(discarded) - 1] |
|
341 | path = path[:-len(discarded) - 1] | |
341 |
|
342 | |||
342 | try: |
|
343 | try: | |
343 | r = hg.repository(self.ui, path) |
|
344 | r = hg.repository(self.ui, path) | |
344 | directory = False |
|
345 | directory = False | |
345 | except (IOError, error.RepoError): |
|
346 | except (IOError, error.RepoError): | |
346 | pass |
|
347 | pass | |
347 |
|
348 | |||
348 | parts = [name] |
|
349 | parts = [name] | |
349 | if 'PATH_INFO' in req.env: |
|
350 | if 'PATH_INFO' in req.env: | |
350 | parts.insert(0, req.env['PATH_INFO'].rstrip('/')) |
|
351 | parts.insert(0, req.env['PATH_INFO'].rstrip('/')) | |
351 | if req.env['SCRIPT_NAME']: |
|
352 | if req.env['SCRIPT_NAME']: | |
352 | parts.insert(0, req.env['SCRIPT_NAME']) |
|
353 | parts.insert(0, req.env['SCRIPT_NAME']) | |
353 | url = re.sub(r'/+', '/', '/'.join(parts) + '/') |
|
354 | url = re.sub(r'/+', '/', '/'.join(parts) + '/') | |
354 |
|
355 | |||
355 | # show either a directory entry or a repository |
|
356 | # show either a directory entry or a repository | |
356 | if directory: |
|
357 | if directory: | |
357 | # get the directory's time information |
|
358 | # get the directory's time information | |
358 | try: |
|
359 | try: | |
359 | d = (get_mtime(path), util.makedate()[1]) |
|
360 | d = (get_mtime(path), util.makedate()[1]) | |
360 | except OSError: |
|
361 | except OSError: | |
361 | continue |
|
362 | continue | |
362 |
|
363 | |||
363 | # add '/' to the name to make it obvious that |
|
364 | # add '/' to the name to make it obvious that | |
364 | # the entry is a directory, not a regular repository |
|
365 | # the entry is a directory, not a regular repository | |
365 | row = {'contact': "", |
|
366 | row = {'contact': "", | |
366 | 'contact_sort': "", |
|
367 | 'contact_sort': "", | |
367 | 'name': name + '/', |
|
368 | 'name': name + '/', | |
368 | 'name_sort': name, |
|
369 | 'name_sort': name, | |
369 | 'url': url, |
|
370 | 'url': url, | |
370 | 'description': "", |
|
371 | 'description': "", | |
371 | 'description_sort': "", |
|
372 | 'description_sort': "", | |
372 | 'lastchange': d, |
|
373 | 'lastchange': d, | |
373 | 'lastchange_sort': d[1]-d[0], |
|
374 | 'lastchange_sort': d[1]-d[0], | |
374 | 'archives': [], |
|
375 | 'archives': [], | |
375 | 'isdirectory': True, |
|
376 | 'isdirectory': True, | |
376 | 'labels': [], |
|
377 | 'labels': [], | |
377 | } |
|
378 | } | |
378 |
|
379 | |||
379 | seendirs.add(name) |
|
380 | seendirs.add(name) | |
380 | yield row |
|
381 | yield row | |
381 | continue |
|
382 | continue | |
382 |
|
383 | |||
383 | u = self.ui.copy() |
|
384 | u = self.ui.copy() | |
384 | try: |
|
385 | try: | |
385 | u.readconfig(os.path.join(path, '.hg', 'hgrc')) |
|
386 | u.readconfig(os.path.join(path, '.hg', 'hgrc')) | |
386 | except Exception as e: |
|
387 | except Exception as e: | |
387 | u.warn(_('error reading %s/.hg/hgrc: %s\n') % (path, e)) |
|
388 | u.warn(_('error reading %s/.hg/hgrc: %s\n') % (path, e)) | |
388 | continue |
|
389 | continue | |
389 | def get(section, name, default=None): |
|
390 | def get(section, name, default=None): | |
390 | return u.config(section, name, default, untrusted=True) |
|
391 | return u.config(section, name, default, untrusted=True) | |
391 |
|
392 | |||
392 | if u.configbool("web", "hidden", untrusted=True): |
|
393 | if u.configbool("web", "hidden", untrusted=True): | |
393 | continue |
|
394 | continue | |
394 |
|
395 | |||
395 | if not self.read_allowed(u, req): |
|
396 | if not self.read_allowed(u, req): | |
396 | continue |
|
397 | continue | |
397 |
|
398 | |||
398 | # update time with local timezone |
|
399 | # update time with local timezone | |
399 | try: |
|
400 | try: | |
400 | r = hg.repository(self.ui, path) |
|
401 | r = hg.repository(self.ui, path) | |
401 | except IOError: |
|
402 | except IOError: | |
402 | u.warn(_('error accessing repository at %s\n') % path) |
|
403 | u.warn(_('error accessing repository at %s\n') % path) | |
403 | continue |
|
404 | continue | |
404 | except error.RepoError: |
|
405 | except error.RepoError: | |
405 | u.warn(_('error accessing repository at %s\n') % path) |
|
406 | u.warn(_('error accessing repository at %s\n') % path) | |
406 | continue |
|
407 | continue | |
407 | try: |
|
408 | try: | |
408 | d = (get_mtime(r.spath), util.makedate()[1]) |
|
409 | d = (get_mtime(r.spath), util.makedate()[1]) | |
409 | except OSError: |
|
410 | except OSError: | |
410 | continue |
|
411 | continue | |
411 |
|
412 | |||
412 | contact = get_contact(get) |
|
413 | contact = get_contact(get) | |
413 | description = get("web", "description", "") |
|
414 | description = get("web", "description", "") | |
414 | seenrepos.add(name) |
|
415 | seenrepos.add(name) | |
415 | name = get("web", "name", name) |
|
416 | name = get("web", "name", name) | |
416 | row = {'contact': contact or "unknown", |
|
417 | row = {'contact': contact or "unknown", | |
417 | 'contact_sort': contact.upper() or "unknown", |
|
418 | 'contact_sort': contact.upper() or "unknown", | |
418 | 'name': name, |
|
419 | 'name': name, | |
419 | 'name_sort': name, |
|
420 | 'name_sort': name, | |
420 | 'url': url, |
|
421 | 'url': url, | |
421 | 'description': description or "unknown", |
|
422 | 'description': description or "unknown", | |
422 | 'description_sort': description.upper() or "unknown", |
|
423 | 'description_sort': description.upper() or "unknown", | |
423 | 'lastchange': d, |
|
424 | 'lastchange': d, | |
424 | 'lastchange_sort': d[1]-d[0], |
|
425 | 'lastchange_sort': d[1]-d[0], | |
425 | 'archives': archivelist(u, "tip", url), |
|
426 | 'archives': archivelist(u, "tip", url), | |
426 | 'isdirectory': None, |
|
427 | 'isdirectory': None, | |
427 | 'labels': u.configlist('web', 'labels', untrusted=True), |
|
428 | 'labels': u.configlist('web', 'labels', untrusted=True), | |
428 | } |
|
429 | } | |
429 |
|
430 | |||
430 | yield row |
|
431 | yield row | |
431 |
|
432 | |||
432 | sortdefault = None, False |
|
433 | sortdefault = None, False | |
433 | def entries(sortcolumn="", descending=False, subdir="", **map): |
|
434 | def entries(sortcolumn="", descending=False, subdir="", **map): | |
434 | rows = rawentries(subdir=subdir, **map) |
|
435 | rows = rawentries(subdir=subdir, **map) | |
435 |
|
436 | |||
436 | if sortcolumn and sortdefault != (sortcolumn, descending): |
|
437 | if sortcolumn and sortdefault != (sortcolumn, descending): | |
437 | sortkey = '%s_sort' % sortcolumn |
|
438 | sortkey = '%s_sort' % sortcolumn | |
438 | rows = sorted(rows, key=lambda x: x[sortkey], |
|
439 | rows = sorted(rows, key=lambda x: x[sortkey], | |
439 | reverse=descending) |
|
440 | reverse=descending) | |
440 | for row, parity in zip(rows, paritygen(self.stripecount)): |
|
441 | for row, parity in zip(rows, paritygen(self.stripecount)): | |
441 | row['parity'] = parity |
|
442 | row['parity'] = parity | |
442 | yield row |
|
443 | yield row | |
443 |
|
444 | |||
444 | self.refresh() |
|
445 | self.refresh() | |
445 | sortable = ["name", "description", "contact", "lastchange"] |
|
446 | sortable = ["name", "description", "contact", "lastchange"] | |
446 | sortcolumn, descending = sortdefault |
|
447 | sortcolumn, descending = sortdefault | |
447 | if 'sort' in req.form: |
|
448 | if 'sort' in req.form: | |
448 | sortcolumn = req.form['sort'][0] |
|
449 | sortcolumn = req.form['sort'][0] | |
449 | descending = sortcolumn.startswith('-') |
|
450 | descending = sortcolumn.startswith('-') | |
450 | if descending: |
|
451 | if descending: | |
451 | sortcolumn = sortcolumn[1:] |
|
452 | sortcolumn = sortcolumn[1:] | |
452 | if sortcolumn not in sortable: |
|
453 | if sortcolumn not in sortable: | |
453 | sortcolumn = "" |
|
454 | sortcolumn = "" | |
454 |
|
455 | |||
455 | sort = [("sort_%s" % column, |
|
456 | sort = [("sort_%s" % column, | |
456 | "%s%s" % ((not descending and column == sortcolumn) |
|
457 | "%s%s" % ((not descending and column == sortcolumn) | |
457 | and "-" or "", column)) |
|
458 | and "-" or "", column)) | |
458 | for column in sortable] |
|
459 | for column in sortable] | |
459 |
|
460 | |||
460 | self.refresh() |
|
461 | self.refresh() | |
461 | self.updatereqenv(req.env) |
|
462 | self.updatereqenv(req.env) | |
462 |
|
463 | |||
463 | return tmpl("index", entries=entries, subdir=subdir, |
|
464 | return tmpl("index", entries=entries, subdir=subdir, | |
464 | pathdef=hgweb_mod.makebreadcrumb('/' + subdir, self.prefix), |
|
465 | pathdef=hgweb_mod.makebreadcrumb('/' + subdir, self.prefix), | |
465 | sortcolumn=sortcolumn, descending=descending, |
|
466 | sortcolumn=sortcolumn, descending=descending, | |
466 | **dict(sort)) |
|
467 | **dict(sort)) | |
467 |
|
468 | |||
468 | def templater(self, req): |
|
469 | def templater(self, req): | |
469 |
|
470 | |||
470 | def motd(**map): |
|
471 | def motd(**map): | |
471 | if self.motd is not None: |
|
472 | if self.motd is not None: | |
472 | yield self.motd |
|
473 | yield self.motd | |
473 | else: |
|
474 | else: | |
474 | yield config('web', 'motd', '') |
|
475 | yield config('web', 'motd', '') | |
475 |
|
476 | |||
476 | def config(section, name, default=None, untrusted=True): |
|
477 | def config(section, name, default=None, untrusted=True): | |
477 | return self.ui.config(section, name, default, untrusted) |
|
478 | return self.ui.config(section, name, default, untrusted) | |
478 |
|
479 | |||
479 | self.updatereqenv(req.env) |
|
480 | self.updatereqenv(req.env) | |
480 |
|
481 | |||
481 | url = req.env.get('SCRIPT_NAME', '') |
|
482 | url = req.env.get('SCRIPT_NAME', '') | |
482 | if not url.endswith('/'): |
|
483 | if not url.endswith('/'): | |
483 | url += '/' |
|
484 | url += '/' | |
484 |
|
485 | |||
485 | vars = {} |
|
486 | vars = {} | |
486 | styles = ( |
|
487 | styles = ( | |
487 | req.form.get('style', [None])[0], |
|
488 | req.form.get('style', [None])[0], | |
488 | config('web', 'style'), |
|
489 | config('web', 'style'), | |
489 | 'paper' |
|
490 | 'paper' | |
490 | ) |
|
491 | ) | |
491 | style, mapfile = templater.stylemap(styles, self.templatepath) |
|
492 | style, mapfile = templater.stylemap(styles, self.templatepath) | |
492 | if style == styles[0]: |
|
493 | if style == styles[0]: | |
493 | vars['style'] = style |
|
494 | vars['style'] = style | |
494 |
|
495 | |||
495 | start = url[-1] == '?' and '&' or '?' |
|
496 | start = url[-1] == '?' and '&' or '?' | |
496 | sessionvars = webutil.sessionvars(vars, start) |
|
497 | sessionvars = webutil.sessionvars(vars, start) | |
497 | logourl = config('web', 'logourl', 'https://mercurial-scm.org/') |
|
498 | logourl = config('web', 'logourl', 'https://mercurial-scm.org/') | |
498 | logoimg = config('web', 'logoimg', 'hglogo.png') |
|
499 | logoimg = config('web', 'logoimg', 'hglogo.png') | |
499 | staticurl = config('web', 'staticurl') or url + 'static/' |
|
500 | staticurl = config('web', 'staticurl') or url + 'static/' | |
500 | if not staticurl.endswith('/'): |
|
501 | if not staticurl.endswith('/'): | |
501 | staticurl += '/' |
|
502 | staticurl += '/' | |
502 |
|
503 | |||
503 | defaults = { |
|
504 | defaults = { | |
504 | "encoding": encoding.encoding, |
|
505 | "encoding": encoding.encoding, | |
505 | "motd": motd, |
|
506 | "motd": motd, | |
506 | "url": url, |
|
507 | "url": url, | |
507 | "logourl": logourl, |
|
508 | "logourl": logourl, | |
508 | "logoimg": logoimg, |
|
509 | "logoimg": logoimg, | |
509 | "staticurl": staticurl, |
|
510 | "staticurl": staticurl, | |
510 | "sessionvars": sessionvars, |
|
511 | "sessionvars": sessionvars, | |
511 | "style": style, |
|
512 | "style": style, | |
512 | } |
|
513 | } | |
513 | tmpl = templater.templater.frommapfile(mapfile, defaults=defaults) |
|
514 | tmpl = templater.templater.frommapfile(mapfile, defaults=defaults) | |
514 | return tmpl |
|
515 | return tmpl | |
515 |
|
516 | |||
516 | def updatereqenv(self, env): |
|
517 | def updatereqenv(self, env): | |
517 | if self._baseurl is not None: |
|
518 | if self._baseurl is not None: | |
518 | name, port, path = geturlcgivars(self._baseurl, env['SERVER_PORT']) |
|
519 | name, port, path = geturlcgivars(self._baseurl, env['SERVER_PORT']) | |
519 | env['SERVER_NAME'] = name |
|
520 | env['SERVER_NAME'] = name | |
520 | env['SERVER_PORT'] = port |
|
521 | env['SERVER_PORT'] = port | |
521 | env['SCRIPT_NAME'] = path |
|
522 | env['SCRIPT_NAME'] = path |
@@ -1,91 +1,90 | |||||
1 | # hgweb/wsgicgi.py - CGI->WSGI translator |
|
1 | # hgweb/wsgicgi.py - CGI->WSGI translator | |
2 | # |
|
2 | # | |
3 | # Copyright 2006 Eric Hopper <hopper@omnifarious.org> |
|
3 | # Copyright 2006 Eric Hopper <hopper@omnifarious.org> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 | # |
|
7 | # | |
8 | # This was originally copied from the public domain code at |
|
8 | # This was originally copied from the public domain code at | |
9 | # http://www.python.org/dev/peps/pep-0333/#the-server-gateway-side |
|
9 | # http://www.python.org/dev/peps/pep-0333/#the-server-gateway-side | |
10 |
|
10 | |||
11 | from __future__ import absolute_import |
|
11 | from __future__ import absolute_import | |
12 |
|
12 | |||
13 | import os |
|
|||
14 |
|
||||
15 | from .. import ( |
|
13 | from .. import ( | |
|
14 | encoding, | |||
16 | util, |
|
15 | util, | |
17 | ) |
|
16 | ) | |
18 |
|
17 | |||
19 | from . import ( |
|
18 | from . import ( | |
20 | common, |
|
19 | common, | |
21 | ) |
|
20 | ) | |
22 |
|
21 | |||
23 | def launch(application): |
|
22 | def launch(application): | |
24 | util.setbinary(util.stdin) |
|
23 | util.setbinary(util.stdin) | |
25 | util.setbinary(util.stdout) |
|
24 | util.setbinary(util.stdout) | |
26 |
|
25 | |||
27 |
environ = dict( |
|
26 | environ = dict(encoding.environ.iteritems()) | |
28 | environ.setdefault('PATH_INFO', '') |
|
27 | environ.setdefault('PATH_INFO', '') | |
29 | if environ.get('SERVER_SOFTWARE', '').startswith('Microsoft-IIS'): |
|
28 | if environ.get('SERVER_SOFTWARE', '').startswith('Microsoft-IIS'): | |
30 | # IIS includes script_name in PATH_INFO |
|
29 | # IIS includes script_name in PATH_INFO | |
31 | scriptname = environ['SCRIPT_NAME'] |
|
30 | scriptname = environ['SCRIPT_NAME'] | |
32 | if environ['PATH_INFO'].startswith(scriptname): |
|
31 | if environ['PATH_INFO'].startswith(scriptname): | |
33 | environ['PATH_INFO'] = environ['PATH_INFO'][len(scriptname):] |
|
32 | environ['PATH_INFO'] = environ['PATH_INFO'][len(scriptname):] | |
34 |
|
33 | |||
35 | stdin = util.stdin |
|
34 | stdin = util.stdin | |
36 | if environ.get('HTTP_EXPECT', '').lower() == '100-continue': |
|
35 | if environ.get('HTTP_EXPECT', '').lower() == '100-continue': | |
37 | stdin = common.continuereader(stdin, util.stdout.write) |
|
36 | stdin = common.continuereader(stdin, util.stdout.write) | |
38 |
|
37 | |||
39 | environ['wsgi.input'] = stdin |
|
38 | environ['wsgi.input'] = stdin | |
40 | environ['wsgi.errors'] = util.stderr |
|
39 | environ['wsgi.errors'] = util.stderr | |
41 | environ['wsgi.version'] = (1, 0) |
|
40 | environ['wsgi.version'] = (1, 0) | |
42 | environ['wsgi.multithread'] = False |
|
41 | environ['wsgi.multithread'] = False | |
43 | environ['wsgi.multiprocess'] = True |
|
42 | environ['wsgi.multiprocess'] = True | |
44 | environ['wsgi.run_once'] = True |
|
43 | environ['wsgi.run_once'] = True | |
45 |
|
44 | |||
46 | if environ.get('HTTPS', 'off').lower() in ('on', '1', 'yes'): |
|
45 | if environ.get('HTTPS', 'off').lower() in ('on', '1', 'yes'): | |
47 | environ['wsgi.url_scheme'] = 'https' |
|
46 | environ['wsgi.url_scheme'] = 'https' | |
48 | else: |
|
47 | else: | |
49 | environ['wsgi.url_scheme'] = 'http' |
|
48 | environ['wsgi.url_scheme'] = 'http' | |
50 |
|
49 | |||
51 | headers_set = [] |
|
50 | headers_set = [] | |
52 | headers_sent = [] |
|
51 | headers_sent = [] | |
53 | out = util.stdout |
|
52 | out = util.stdout | |
54 |
|
53 | |||
55 | def write(data): |
|
54 | def write(data): | |
56 | if not headers_set: |
|
55 | if not headers_set: | |
57 | raise AssertionError("write() before start_response()") |
|
56 | raise AssertionError("write() before start_response()") | |
58 |
|
57 | |||
59 | elif not headers_sent: |
|
58 | elif not headers_sent: | |
60 | # Before the first output, send the stored headers |
|
59 | # Before the first output, send the stored headers | |
61 | status, response_headers = headers_sent[:] = headers_set |
|
60 | status, response_headers = headers_sent[:] = headers_set | |
62 | out.write('Status: %s\r\n' % status) |
|
61 | out.write('Status: %s\r\n' % status) | |
63 | for header in response_headers: |
|
62 | for header in response_headers: | |
64 | out.write('%s: %s\r\n' % header) |
|
63 | out.write('%s: %s\r\n' % header) | |
65 | out.write('\r\n') |
|
64 | out.write('\r\n') | |
66 |
|
65 | |||
67 | out.write(data) |
|
66 | out.write(data) | |
68 | out.flush() |
|
67 | out.flush() | |
69 |
|
68 | |||
70 | def start_response(status, response_headers, exc_info=None): |
|
69 | def start_response(status, response_headers, exc_info=None): | |
71 | if exc_info: |
|
70 | if exc_info: | |
72 | try: |
|
71 | try: | |
73 | if headers_sent: |
|
72 | if headers_sent: | |
74 | # Re-raise original exception if headers sent |
|
73 | # Re-raise original exception if headers sent | |
75 | raise exc_info[0](exc_info[1], exc_info[2]) |
|
74 | raise exc_info[0](exc_info[1], exc_info[2]) | |
76 | finally: |
|
75 | finally: | |
77 | exc_info = None # avoid dangling circular ref |
|
76 | exc_info = None # avoid dangling circular ref | |
78 | elif headers_set: |
|
77 | elif headers_set: | |
79 | raise AssertionError("Headers already set!") |
|
78 | raise AssertionError("Headers already set!") | |
80 |
|
79 | |||
81 | headers_set[:] = [status, response_headers] |
|
80 | headers_set[:] = [status, response_headers] | |
82 | return write |
|
81 | return write | |
83 |
|
82 | |||
84 | content = application(environ, start_response) |
|
83 | content = application(environ, start_response) | |
85 | try: |
|
84 | try: | |
86 | for chunk in content: |
|
85 | for chunk in content: | |
87 | write(chunk) |
|
86 | write(chunk) | |
88 | if not headers_sent: |
|
87 | if not headers_sent: | |
89 | write('') # send headers now if body was empty |
|
88 | write('') # send headers now if body was empty | |
90 | finally: |
|
89 | finally: | |
91 | getattr(content, 'close', lambda : None)() |
|
90 | getattr(content, 'close', lambda : None)() |
@@ -1,490 +1,491 | |||||
1 | # url.py - HTTP handling for mercurial |
|
1 | # url.py - HTTP handling for mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com> | |
4 | # Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br> |
|
4 | # Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br> | |
5 | # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> |
|
5 | # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> | |
6 | # |
|
6 | # | |
7 | # This software may be used and distributed according to the terms of the |
|
7 | # This software may be used and distributed according to the terms of the | |
8 | # GNU General Public License version 2 or any later version. |
|
8 | # GNU General Public License version 2 or any later version. | |
9 |
|
9 | |||
10 | from __future__ import absolute_import |
|
10 | from __future__ import absolute_import | |
11 |
|
11 | |||
12 | import base64 |
|
12 | import base64 | |
13 | import os |
|
13 | import os | |
14 | import socket |
|
14 | import socket | |
15 |
|
15 | |||
16 | from .i18n import _ |
|
16 | from .i18n import _ | |
17 | from . import ( |
|
17 | from . import ( | |
|
18 | encoding, | |||
18 | error, |
|
19 | error, | |
19 | httpconnection as httpconnectionmod, |
|
20 | httpconnection as httpconnectionmod, | |
20 | keepalive, |
|
21 | keepalive, | |
21 | sslutil, |
|
22 | sslutil, | |
22 | util, |
|
23 | util, | |
23 | ) |
|
24 | ) | |
24 |
|
25 | |||
25 | httplib = util.httplib |
|
26 | httplib = util.httplib | |
26 | stringio = util.stringio |
|
27 | stringio = util.stringio | |
27 | urlerr = util.urlerr |
|
28 | urlerr = util.urlerr | |
28 | urlreq = util.urlreq |
|
29 | urlreq = util.urlreq | |
29 |
|
30 | |||
30 | class passwordmgr(object): |
|
31 | class passwordmgr(object): | |
31 | def __init__(self, ui, passwddb): |
|
32 | def __init__(self, ui, passwddb): | |
32 | self.ui = ui |
|
33 | self.ui = ui | |
33 | self.passwddb = passwddb |
|
34 | self.passwddb = passwddb | |
34 |
|
35 | |||
35 | def add_password(self, realm, uri, user, passwd): |
|
36 | def add_password(self, realm, uri, user, passwd): | |
36 | return self.passwddb.add_password(realm, uri, user, passwd) |
|
37 | return self.passwddb.add_password(realm, uri, user, passwd) | |
37 |
|
38 | |||
38 | def find_user_password(self, realm, authuri): |
|
39 | def find_user_password(self, realm, authuri): | |
39 | authinfo = self.passwddb.find_user_password(realm, authuri) |
|
40 | authinfo = self.passwddb.find_user_password(realm, authuri) | |
40 | user, passwd = authinfo |
|
41 | user, passwd = authinfo | |
41 | if user and passwd: |
|
42 | if user and passwd: | |
42 | self._writedebug(user, passwd) |
|
43 | self._writedebug(user, passwd) | |
43 | return (user, passwd) |
|
44 | return (user, passwd) | |
44 |
|
45 | |||
45 | if not user or not passwd: |
|
46 | if not user or not passwd: | |
46 | res = httpconnectionmod.readauthforuri(self.ui, authuri, user) |
|
47 | res = httpconnectionmod.readauthforuri(self.ui, authuri, user) | |
47 | if res: |
|
48 | if res: | |
48 | group, auth = res |
|
49 | group, auth = res | |
49 | user, passwd = auth.get('username'), auth.get('password') |
|
50 | user, passwd = auth.get('username'), auth.get('password') | |
50 | self.ui.debug("using auth.%s.* for authentication\n" % group) |
|
51 | self.ui.debug("using auth.%s.* for authentication\n" % group) | |
51 | if not user or not passwd: |
|
52 | if not user or not passwd: | |
52 | u = util.url(authuri) |
|
53 | u = util.url(authuri) | |
53 | u.query = None |
|
54 | u.query = None | |
54 | if not self.ui.interactive(): |
|
55 | if not self.ui.interactive(): | |
55 | raise error.Abort(_('http authorization required for %s') % |
|
56 | raise error.Abort(_('http authorization required for %s') % | |
56 | util.hidepassword(str(u))) |
|
57 | util.hidepassword(str(u))) | |
57 |
|
58 | |||
58 | self.ui.write(_("http authorization required for %s\n") % |
|
59 | self.ui.write(_("http authorization required for %s\n") % | |
59 | util.hidepassword(str(u))) |
|
60 | util.hidepassword(str(u))) | |
60 | self.ui.write(_("realm: %s\n") % realm) |
|
61 | self.ui.write(_("realm: %s\n") % realm) | |
61 | if user: |
|
62 | if user: | |
62 | self.ui.write(_("user: %s\n") % user) |
|
63 | self.ui.write(_("user: %s\n") % user) | |
63 | else: |
|
64 | else: | |
64 | user = self.ui.prompt(_("user:"), default=None) |
|
65 | user = self.ui.prompt(_("user:"), default=None) | |
65 |
|
66 | |||
66 | if not passwd: |
|
67 | if not passwd: | |
67 | passwd = self.ui.getpass() |
|
68 | passwd = self.ui.getpass() | |
68 |
|
69 | |||
69 | self.passwddb.add_password(realm, authuri, user, passwd) |
|
70 | self.passwddb.add_password(realm, authuri, user, passwd) | |
70 | self._writedebug(user, passwd) |
|
71 | self._writedebug(user, passwd) | |
71 | return (user, passwd) |
|
72 | return (user, passwd) | |
72 |
|
73 | |||
73 | def _writedebug(self, user, passwd): |
|
74 | def _writedebug(self, user, passwd): | |
74 | msg = _('http auth: user %s, password %s\n') |
|
75 | msg = _('http auth: user %s, password %s\n') | |
75 | self.ui.debug(msg % (user, passwd and '*' * len(passwd) or 'not set')) |
|
76 | self.ui.debug(msg % (user, passwd and '*' * len(passwd) or 'not set')) | |
76 |
|
77 | |||
77 | def find_stored_password(self, authuri): |
|
78 | def find_stored_password(self, authuri): | |
78 | return self.passwddb.find_user_password(None, authuri) |
|
79 | return self.passwddb.find_user_password(None, authuri) | |
79 |
|
80 | |||
80 | class proxyhandler(urlreq.proxyhandler): |
|
81 | class proxyhandler(urlreq.proxyhandler): | |
81 | def __init__(self, ui): |
|
82 | def __init__(self, ui): | |
82 | proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy') |
|
83 | proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy') | |
83 | # XXX proxyauthinfo = None |
|
84 | # XXX proxyauthinfo = None | |
84 |
|
85 | |||
85 | if proxyurl: |
|
86 | if proxyurl: | |
86 | # proxy can be proper url or host[:port] |
|
87 | # proxy can be proper url or host[:port] | |
87 | if not (proxyurl.startswith('http:') or |
|
88 | if not (proxyurl.startswith('http:') or | |
88 | proxyurl.startswith('https:')): |
|
89 | proxyurl.startswith('https:')): | |
89 | proxyurl = 'http://' + proxyurl + '/' |
|
90 | proxyurl = 'http://' + proxyurl + '/' | |
90 | proxy = util.url(proxyurl) |
|
91 | proxy = util.url(proxyurl) | |
91 | if not proxy.user: |
|
92 | if not proxy.user: | |
92 | proxy.user = ui.config("http_proxy", "user") |
|
93 | proxy.user = ui.config("http_proxy", "user") | |
93 | proxy.passwd = ui.config("http_proxy", "passwd") |
|
94 | proxy.passwd = ui.config("http_proxy", "passwd") | |
94 |
|
95 | |||
95 | # see if we should use a proxy for this url |
|
96 | # see if we should use a proxy for this url | |
96 | no_list = ["localhost", "127.0.0.1"] |
|
97 | no_list = ["localhost", "127.0.0.1"] | |
97 | no_list.extend([p.lower() for |
|
98 | no_list.extend([p.lower() for | |
98 | p in ui.configlist("http_proxy", "no")]) |
|
99 | p in ui.configlist("http_proxy", "no")]) | |
99 | no_list.extend([p.strip().lower() for |
|
100 | no_list.extend([p.strip().lower() for | |
100 | p in os.getenv("no_proxy", '').split(',') |
|
101 | p in os.getenv("no_proxy", '').split(',') | |
101 | if p.strip()]) |
|
102 | if p.strip()]) | |
102 | # "http_proxy.always" config is for running tests on localhost |
|
103 | # "http_proxy.always" config is for running tests on localhost | |
103 | if ui.configbool("http_proxy", "always"): |
|
104 | if ui.configbool("http_proxy", "always"): | |
104 | self.no_list = [] |
|
105 | self.no_list = [] | |
105 | else: |
|
106 | else: | |
106 | self.no_list = no_list |
|
107 | self.no_list = no_list | |
107 |
|
108 | |||
108 | proxyurl = str(proxy) |
|
109 | proxyurl = str(proxy) | |
109 | proxies = {'http': proxyurl, 'https': proxyurl} |
|
110 | proxies = {'http': proxyurl, 'https': proxyurl} | |
110 | ui.debug('proxying through http://%s:%s\n' % |
|
111 | ui.debug('proxying through http://%s:%s\n' % | |
111 | (proxy.host, proxy.port)) |
|
112 | (proxy.host, proxy.port)) | |
112 | else: |
|
113 | else: | |
113 | proxies = {} |
|
114 | proxies = {} | |
114 |
|
115 | |||
115 | # urllib2 takes proxy values from the environment and those |
|
116 | # urllib2 takes proxy values from the environment and those | |
116 | # will take precedence if found. So, if there's a config entry |
|
117 | # will take precedence if found. So, if there's a config entry | |
117 | # defining a proxy, drop the environment ones |
|
118 | # defining a proxy, drop the environment ones | |
118 | if ui.config("http_proxy", "host"): |
|
119 | if ui.config("http_proxy", "host"): | |
119 | for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]: |
|
120 | for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]: | |
120 | try: |
|
121 | try: | |
121 |
if env in |
|
122 | if env in encoding.environ: | |
122 |
del |
|
123 | del encoding.environ[env] | |
123 | except OSError: |
|
124 | except OSError: | |
124 | pass |
|
125 | pass | |
125 |
|
126 | |||
126 | urlreq.proxyhandler.__init__(self, proxies) |
|
127 | urlreq.proxyhandler.__init__(self, proxies) | |
127 | self.ui = ui |
|
128 | self.ui = ui | |
128 |
|
129 | |||
129 | def proxy_open(self, req, proxy, type_): |
|
130 | def proxy_open(self, req, proxy, type_): | |
130 | host = req.get_host().split(':')[0] |
|
131 | host = req.get_host().split(':')[0] | |
131 | for e in self.no_list: |
|
132 | for e in self.no_list: | |
132 | if host == e: |
|
133 | if host == e: | |
133 | return None |
|
134 | return None | |
134 | if e.startswith('*.') and host.endswith(e[2:]): |
|
135 | if e.startswith('*.') and host.endswith(e[2:]): | |
135 | return None |
|
136 | return None | |
136 | if e.startswith('.') and host.endswith(e[1:]): |
|
137 | if e.startswith('.') and host.endswith(e[1:]): | |
137 | return None |
|
138 | return None | |
138 |
|
139 | |||
139 | return urlreq.proxyhandler.proxy_open(self, req, proxy, type_) |
|
140 | return urlreq.proxyhandler.proxy_open(self, req, proxy, type_) | |
140 |
|
141 | |||
141 | def _gen_sendfile(orgsend): |
|
142 | def _gen_sendfile(orgsend): | |
142 | def _sendfile(self, data): |
|
143 | def _sendfile(self, data): | |
143 | # send a file |
|
144 | # send a file | |
144 | if isinstance(data, httpconnectionmod.httpsendfile): |
|
145 | if isinstance(data, httpconnectionmod.httpsendfile): | |
145 | # if auth required, some data sent twice, so rewind here |
|
146 | # if auth required, some data sent twice, so rewind here | |
146 | data.seek(0) |
|
147 | data.seek(0) | |
147 | for chunk in util.filechunkiter(data): |
|
148 | for chunk in util.filechunkiter(data): | |
148 | orgsend(self, chunk) |
|
149 | orgsend(self, chunk) | |
149 | else: |
|
150 | else: | |
150 | orgsend(self, data) |
|
151 | orgsend(self, data) | |
151 | return _sendfile |
|
152 | return _sendfile | |
152 |
|
153 | |||
153 | has_https = util.safehasattr(urlreq, 'httpshandler') |
|
154 | has_https = util.safehasattr(urlreq, 'httpshandler') | |
154 |
|
155 | |||
155 | class httpconnection(keepalive.HTTPConnection): |
|
156 | class httpconnection(keepalive.HTTPConnection): | |
156 | # must be able to send big bundle as stream. |
|
157 | # must be able to send big bundle as stream. | |
157 | send = _gen_sendfile(keepalive.HTTPConnection.send) |
|
158 | send = _gen_sendfile(keepalive.HTTPConnection.send) | |
158 |
|
159 | |||
159 | def getresponse(self): |
|
160 | def getresponse(self): | |
160 | proxyres = getattr(self, 'proxyres', None) |
|
161 | proxyres = getattr(self, 'proxyres', None) | |
161 | if proxyres: |
|
162 | if proxyres: | |
162 | if proxyres.will_close: |
|
163 | if proxyres.will_close: | |
163 | self.close() |
|
164 | self.close() | |
164 | self.proxyres = None |
|
165 | self.proxyres = None | |
165 | return proxyres |
|
166 | return proxyres | |
166 | return keepalive.HTTPConnection.getresponse(self) |
|
167 | return keepalive.HTTPConnection.getresponse(self) | |
167 |
|
168 | |||
168 | # general transaction handler to support different ways to handle |
|
169 | # general transaction handler to support different ways to handle | |
169 | # HTTPS proxying before and after Python 2.6.3. |
|
170 | # HTTPS proxying before and after Python 2.6.3. | |
170 | def _generic_start_transaction(handler, h, req): |
|
171 | def _generic_start_transaction(handler, h, req): | |
171 | tunnel_host = getattr(req, '_tunnel_host', None) |
|
172 | tunnel_host = getattr(req, '_tunnel_host', None) | |
172 | if tunnel_host: |
|
173 | if tunnel_host: | |
173 | if tunnel_host[:7] not in ['http://', 'https:/']: |
|
174 | if tunnel_host[:7] not in ['http://', 'https:/']: | |
174 | tunnel_host = 'https://' + tunnel_host |
|
175 | tunnel_host = 'https://' + tunnel_host | |
175 | new_tunnel = True |
|
176 | new_tunnel = True | |
176 | else: |
|
177 | else: | |
177 | tunnel_host = req.get_selector() |
|
178 | tunnel_host = req.get_selector() | |
178 | new_tunnel = False |
|
179 | new_tunnel = False | |
179 |
|
180 | |||
180 | if new_tunnel or tunnel_host == req.get_full_url(): # has proxy |
|
181 | if new_tunnel or tunnel_host == req.get_full_url(): # has proxy | |
181 | u = util.url(tunnel_host) |
|
182 | u = util.url(tunnel_host) | |
182 | if new_tunnel or u.scheme == 'https': # only use CONNECT for HTTPS |
|
183 | if new_tunnel or u.scheme == 'https': # only use CONNECT for HTTPS | |
183 | h.realhostport = ':'.join([u.host, (u.port or '443')]) |
|
184 | h.realhostport = ':'.join([u.host, (u.port or '443')]) | |
184 | h.headers = req.headers.copy() |
|
185 | h.headers = req.headers.copy() | |
185 | h.headers.update(handler.parent.addheaders) |
|
186 | h.headers.update(handler.parent.addheaders) | |
186 | return |
|
187 | return | |
187 |
|
188 | |||
188 | h.realhostport = None |
|
189 | h.realhostport = None | |
189 | h.headers = None |
|
190 | h.headers = None | |
190 |
|
191 | |||
191 | def _generic_proxytunnel(self): |
|
192 | def _generic_proxytunnel(self): | |
192 | proxyheaders = dict( |
|
193 | proxyheaders = dict( | |
193 | [(x, self.headers[x]) for x in self.headers |
|
194 | [(x, self.headers[x]) for x in self.headers | |
194 | if x.lower().startswith('proxy-')]) |
|
195 | if x.lower().startswith('proxy-')]) | |
195 | self.send('CONNECT %s HTTP/1.0\r\n' % self.realhostport) |
|
196 | self.send('CONNECT %s HTTP/1.0\r\n' % self.realhostport) | |
196 | for header in proxyheaders.iteritems(): |
|
197 | for header in proxyheaders.iteritems(): | |
197 | self.send('%s: %s\r\n' % header) |
|
198 | self.send('%s: %s\r\n' % header) | |
198 | self.send('\r\n') |
|
199 | self.send('\r\n') | |
199 |
|
200 | |||
200 | # majority of the following code is duplicated from |
|
201 | # majority of the following code is duplicated from | |
201 | # httplib.HTTPConnection as there are no adequate places to |
|
202 | # httplib.HTTPConnection as there are no adequate places to | |
202 | # override functions to provide the needed functionality |
|
203 | # override functions to provide the needed functionality | |
203 | res = self.response_class(self.sock, |
|
204 | res = self.response_class(self.sock, | |
204 | strict=self.strict, |
|
205 | strict=self.strict, | |
205 | method=self._method) |
|
206 | method=self._method) | |
206 |
|
207 | |||
207 | while True: |
|
208 | while True: | |
208 | version, status, reason = res._read_status() |
|
209 | version, status, reason = res._read_status() | |
209 | if status != httplib.CONTINUE: |
|
210 | if status != httplib.CONTINUE: | |
210 | break |
|
211 | break | |
211 | # skip lines that are all whitespace |
|
212 | # skip lines that are all whitespace | |
212 | list(iter(lambda: res.fp.readline().strip(), '')) |
|
213 | list(iter(lambda: res.fp.readline().strip(), '')) | |
213 | res.status = status |
|
214 | res.status = status | |
214 | res.reason = reason.strip() |
|
215 | res.reason = reason.strip() | |
215 |
|
216 | |||
216 | if res.status == 200: |
|
217 | if res.status == 200: | |
217 | # skip lines until we find a blank line |
|
218 | # skip lines until we find a blank line | |
218 | list(iter(res.fp.readline, '\r\n')) |
|
219 | list(iter(res.fp.readline, '\r\n')) | |
219 | return True |
|
220 | return True | |
220 |
|
221 | |||
221 | if version == 'HTTP/1.0': |
|
222 | if version == 'HTTP/1.0': | |
222 | res.version = 10 |
|
223 | res.version = 10 | |
223 | elif version.startswith('HTTP/1.'): |
|
224 | elif version.startswith('HTTP/1.'): | |
224 | res.version = 11 |
|
225 | res.version = 11 | |
225 | elif version == 'HTTP/0.9': |
|
226 | elif version == 'HTTP/0.9': | |
226 | res.version = 9 |
|
227 | res.version = 9 | |
227 | else: |
|
228 | else: | |
228 | raise httplib.UnknownProtocol(version) |
|
229 | raise httplib.UnknownProtocol(version) | |
229 |
|
230 | |||
230 | if res.version == 9: |
|
231 | if res.version == 9: | |
231 | res.length = None |
|
232 | res.length = None | |
232 | res.chunked = 0 |
|
233 | res.chunked = 0 | |
233 | res.will_close = 1 |
|
234 | res.will_close = 1 | |
234 | res.msg = httplib.HTTPMessage(stringio()) |
|
235 | res.msg = httplib.HTTPMessage(stringio()) | |
235 | return False |
|
236 | return False | |
236 |
|
237 | |||
237 | res.msg = httplib.HTTPMessage(res.fp) |
|
238 | res.msg = httplib.HTTPMessage(res.fp) | |
238 | res.msg.fp = None |
|
239 | res.msg.fp = None | |
239 |
|
240 | |||
240 | # are we using the chunked-style of transfer encoding? |
|
241 | # are we using the chunked-style of transfer encoding? | |
241 | trenc = res.msg.getheader('transfer-encoding') |
|
242 | trenc = res.msg.getheader('transfer-encoding') | |
242 | if trenc and trenc.lower() == "chunked": |
|
243 | if trenc and trenc.lower() == "chunked": | |
243 | res.chunked = 1 |
|
244 | res.chunked = 1 | |
244 | res.chunk_left = None |
|
245 | res.chunk_left = None | |
245 | else: |
|
246 | else: | |
246 | res.chunked = 0 |
|
247 | res.chunked = 0 | |
247 |
|
248 | |||
248 | # will the connection close at the end of the response? |
|
249 | # will the connection close at the end of the response? | |
249 | res.will_close = res._check_close() |
|
250 | res.will_close = res._check_close() | |
250 |
|
251 | |||
251 | # do we have a Content-Length? |
|
252 | # do we have a Content-Length? | |
252 | # NOTE: RFC 2616, section 4.4, #3 says we ignore this if |
|
253 | # NOTE: RFC 2616, section 4.4, #3 says we ignore this if | |
253 | # transfer-encoding is "chunked" |
|
254 | # transfer-encoding is "chunked" | |
254 | length = res.msg.getheader('content-length') |
|
255 | length = res.msg.getheader('content-length') | |
255 | if length and not res.chunked: |
|
256 | if length and not res.chunked: | |
256 | try: |
|
257 | try: | |
257 | res.length = int(length) |
|
258 | res.length = int(length) | |
258 | except ValueError: |
|
259 | except ValueError: | |
259 | res.length = None |
|
260 | res.length = None | |
260 | else: |
|
261 | else: | |
261 | if res.length < 0: # ignore nonsensical negative lengths |
|
262 | if res.length < 0: # ignore nonsensical negative lengths | |
262 | res.length = None |
|
263 | res.length = None | |
263 | else: |
|
264 | else: | |
264 | res.length = None |
|
265 | res.length = None | |
265 |
|
266 | |||
266 | # does the body have a fixed length? (of zero) |
|
267 | # does the body have a fixed length? (of zero) | |
267 | if (status == httplib.NO_CONTENT or status == httplib.NOT_MODIFIED or |
|
268 | if (status == httplib.NO_CONTENT or status == httplib.NOT_MODIFIED or | |
268 | 100 <= status < 200 or # 1xx codes |
|
269 | 100 <= status < 200 or # 1xx codes | |
269 | res._method == 'HEAD'): |
|
270 | res._method == 'HEAD'): | |
270 | res.length = 0 |
|
271 | res.length = 0 | |
271 |
|
272 | |||
272 | # if the connection remains open, and we aren't using chunked, and |
|
273 | # if the connection remains open, and we aren't using chunked, and | |
273 | # a content-length was not provided, then assume that the connection |
|
274 | # a content-length was not provided, then assume that the connection | |
274 | # WILL close. |
|
275 | # WILL close. | |
275 | if (not res.will_close and |
|
276 | if (not res.will_close and | |
276 | not res.chunked and |
|
277 | not res.chunked and | |
277 | res.length is None): |
|
278 | res.length is None): | |
278 | res.will_close = 1 |
|
279 | res.will_close = 1 | |
279 |
|
280 | |||
280 | self.proxyres = res |
|
281 | self.proxyres = res | |
281 |
|
282 | |||
282 | return False |
|
283 | return False | |
283 |
|
284 | |||
284 | class httphandler(keepalive.HTTPHandler): |
|
285 | class httphandler(keepalive.HTTPHandler): | |
285 | def http_open(self, req): |
|
286 | def http_open(self, req): | |
286 | return self.do_open(httpconnection, req) |
|
287 | return self.do_open(httpconnection, req) | |
287 |
|
288 | |||
288 | def _start_transaction(self, h, req): |
|
289 | def _start_transaction(self, h, req): | |
289 | _generic_start_transaction(self, h, req) |
|
290 | _generic_start_transaction(self, h, req) | |
290 | return keepalive.HTTPHandler._start_transaction(self, h, req) |
|
291 | return keepalive.HTTPHandler._start_transaction(self, h, req) | |
291 |
|
292 | |||
292 | if has_https: |
|
293 | if has_https: | |
293 | class httpsconnection(httplib.HTTPConnection): |
|
294 | class httpsconnection(httplib.HTTPConnection): | |
294 | response_class = keepalive.HTTPResponse |
|
295 | response_class = keepalive.HTTPResponse | |
295 | default_port = httplib.HTTPS_PORT |
|
296 | default_port = httplib.HTTPS_PORT | |
296 | # must be able to send big bundle as stream. |
|
297 | # must be able to send big bundle as stream. | |
297 | send = _gen_sendfile(keepalive.safesend) |
|
298 | send = _gen_sendfile(keepalive.safesend) | |
298 | getresponse = keepalive.wrapgetresponse(httplib.HTTPConnection) |
|
299 | getresponse = keepalive.wrapgetresponse(httplib.HTTPConnection) | |
299 |
|
300 | |||
300 | def __init__(self, host, port=None, key_file=None, cert_file=None, |
|
301 | def __init__(self, host, port=None, key_file=None, cert_file=None, | |
301 | *args, **kwargs): |
|
302 | *args, **kwargs): | |
302 | httplib.HTTPConnection.__init__(self, host, port, *args, **kwargs) |
|
303 | httplib.HTTPConnection.__init__(self, host, port, *args, **kwargs) | |
303 | self.key_file = key_file |
|
304 | self.key_file = key_file | |
304 | self.cert_file = cert_file |
|
305 | self.cert_file = cert_file | |
305 |
|
306 | |||
306 | def connect(self): |
|
307 | def connect(self): | |
307 | self.sock = socket.create_connection((self.host, self.port)) |
|
308 | self.sock = socket.create_connection((self.host, self.port)) | |
308 |
|
309 | |||
309 | host = self.host |
|
310 | host = self.host | |
310 | if self.realhostport: # use CONNECT proxy |
|
311 | if self.realhostport: # use CONNECT proxy | |
311 | _generic_proxytunnel(self) |
|
312 | _generic_proxytunnel(self) | |
312 | host = self.realhostport.rsplit(':', 1)[0] |
|
313 | host = self.realhostport.rsplit(':', 1)[0] | |
313 | self.sock = sslutil.wrapsocket( |
|
314 | self.sock = sslutil.wrapsocket( | |
314 | self.sock, self.key_file, self.cert_file, ui=self.ui, |
|
315 | self.sock, self.key_file, self.cert_file, ui=self.ui, | |
315 | serverhostname=host) |
|
316 | serverhostname=host) | |
316 | sslutil.validatesocket(self.sock) |
|
317 | sslutil.validatesocket(self.sock) | |
317 |
|
318 | |||
318 | class httpshandler(keepalive.KeepAliveHandler, urlreq.httpshandler): |
|
319 | class httpshandler(keepalive.KeepAliveHandler, urlreq.httpshandler): | |
319 | def __init__(self, ui): |
|
320 | def __init__(self, ui): | |
320 | keepalive.KeepAliveHandler.__init__(self) |
|
321 | keepalive.KeepAliveHandler.__init__(self) | |
321 | urlreq.httpshandler.__init__(self) |
|
322 | urlreq.httpshandler.__init__(self) | |
322 | self.ui = ui |
|
323 | self.ui = ui | |
323 | self.pwmgr = passwordmgr(self.ui, |
|
324 | self.pwmgr = passwordmgr(self.ui, | |
324 | self.ui.httppasswordmgrdb) |
|
325 | self.ui.httppasswordmgrdb) | |
325 |
|
326 | |||
326 | def _start_transaction(self, h, req): |
|
327 | def _start_transaction(self, h, req): | |
327 | _generic_start_transaction(self, h, req) |
|
328 | _generic_start_transaction(self, h, req) | |
328 | return keepalive.KeepAliveHandler._start_transaction(self, h, req) |
|
329 | return keepalive.KeepAliveHandler._start_transaction(self, h, req) | |
329 |
|
330 | |||
330 | def https_open(self, req): |
|
331 | def https_open(self, req): | |
331 | # req.get_full_url() does not contain credentials and we may |
|
332 | # req.get_full_url() does not contain credentials and we may | |
332 | # need them to match the certificates. |
|
333 | # need them to match the certificates. | |
333 | url = req.get_full_url() |
|
334 | url = req.get_full_url() | |
334 | user, password = self.pwmgr.find_stored_password(url) |
|
335 | user, password = self.pwmgr.find_stored_password(url) | |
335 | res = httpconnectionmod.readauthforuri(self.ui, url, user) |
|
336 | res = httpconnectionmod.readauthforuri(self.ui, url, user) | |
336 | if res: |
|
337 | if res: | |
337 | group, auth = res |
|
338 | group, auth = res | |
338 | self.auth = auth |
|
339 | self.auth = auth | |
339 | self.ui.debug("using auth.%s.* for authentication\n" % group) |
|
340 | self.ui.debug("using auth.%s.* for authentication\n" % group) | |
340 | else: |
|
341 | else: | |
341 | self.auth = None |
|
342 | self.auth = None | |
342 | return self.do_open(self._makeconnection, req) |
|
343 | return self.do_open(self._makeconnection, req) | |
343 |
|
344 | |||
344 | def _makeconnection(self, host, port=None, *args, **kwargs): |
|
345 | def _makeconnection(self, host, port=None, *args, **kwargs): | |
345 | keyfile = None |
|
346 | keyfile = None | |
346 | certfile = None |
|
347 | certfile = None | |
347 |
|
348 | |||
348 | if len(args) >= 1: # key_file |
|
349 | if len(args) >= 1: # key_file | |
349 | keyfile = args[0] |
|
350 | keyfile = args[0] | |
350 | if len(args) >= 2: # cert_file |
|
351 | if len(args) >= 2: # cert_file | |
351 | certfile = args[1] |
|
352 | certfile = args[1] | |
352 | args = args[2:] |
|
353 | args = args[2:] | |
353 |
|
354 | |||
354 | # if the user has specified different key/cert files in |
|
355 | # if the user has specified different key/cert files in | |
355 | # hgrc, we prefer these |
|
356 | # hgrc, we prefer these | |
356 | if self.auth and 'key' in self.auth and 'cert' in self.auth: |
|
357 | if self.auth and 'key' in self.auth and 'cert' in self.auth: | |
357 | keyfile = self.auth['key'] |
|
358 | keyfile = self.auth['key'] | |
358 | certfile = self.auth['cert'] |
|
359 | certfile = self.auth['cert'] | |
359 |
|
360 | |||
360 | conn = httpsconnection(host, port, keyfile, certfile, *args, |
|
361 | conn = httpsconnection(host, port, keyfile, certfile, *args, | |
361 | **kwargs) |
|
362 | **kwargs) | |
362 | conn.ui = self.ui |
|
363 | conn.ui = self.ui | |
363 | return conn |
|
364 | return conn | |
364 |
|
365 | |||
365 | class httpdigestauthhandler(urlreq.httpdigestauthhandler): |
|
366 | class httpdigestauthhandler(urlreq.httpdigestauthhandler): | |
366 | def __init__(self, *args, **kwargs): |
|
367 | def __init__(self, *args, **kwargs): | |
367 | urlreq.httpdigestauthhandler.__init__(self, *args, **kwargs) |
|
368 | urlreq.httpdigestauthhandler.__init__(self, *args, **kwargs) | |
368 | self.retried_req = None |
|
369 | self.retried_req = None | |
369 |
|
370 | |||
370 | def reset_retry_count(self): |
|
371 | def reset_retry_count(self): | |
371 | # Python 2.6.5 will call this on 401 or 407 errors and thus loop |
|
372 | # Python 2.6.5 will call this on 401 or 407 errors and thus loop | |
372 | # forever. We disable reset_retry_count completely and reset in |
|
373 | # forever. We disable reset_retry_count completely and reset in | |
373 | # http_error_auth_reqed instead. |
|
374 | # http_error_auth_reqed instead. | |
374 | pass |
|
375 | pass | |
375 |
|
376 | |||
376 | def http_error_auth_reqed(self, auth_header, host, req, headers): |
|
377 | def http_error_auth_reqed(self, auth_header, host, req, headers): | |
377 | # Reset the retry counter once for each request. |
|
378 | # Reset the retry counter once for each request. | |
378 | if req is not self.retried_req: |
|
379 | if req is not self.retried_req: | |
379 | self.retried_req = req |
|
380 | self.retried_req = req | |
380 | self.retried = 0 |
|
381 | self.retried = 0 | |
381 | return urlreq.httpdigestauthhandler.http_error_auth_reqed( |
|
382 | return urlreq.httpdigestauthhandler.http_error_auth_reqed( | |
382 | self, auth_header, host, req, headers) |
|
383 | self, auth_header, host, req, headers) | |
383 |
|
384 | |||
384 | class httpbasicauthhandler(urlreq.httpbasicauthhandler): |
|
385 | class httpbasicauthhandler(urlreq.httpbasicauthhandler): | |
385 | def __init__(self, *args, **kwargs): |
|
386 | def __init__(self, *args, **kwargs): | |
386 | self.auth = None |
|
387 | self.auth = None | |
387 | urlreq.httpbasicauthhandler.__init__(self, *args, **kwargs) |
|
388 | urlreq.httpbasicauthhandler.__init__(self, *args, **kwargs) | |
388 | self.retried_req = None |
|
389 | self.retried_req = None | |
389 |
|
390 | |||
390 | def http_request(self, request): |
|
391 | def http_request(self, request): | |
391 | if self.auth: |
|
392 | if self.auth: | |
392 | request.add_unredirected_header(self.auth_header, self.auth) |
|
393 | request.add_unredirected_header(self.auth_header, self.auth) | |
393 |
|
394 | |||
394 | return request |
|
395 | return request | |
395 |
|
396 | |||
396 | def https_request(self, request): |
|
397 | def https_request(self, request): | |
397 | if self.auth: |
|
398 | if self.auth: | |
398 | request.add_unredirected_header(self.auth_header, self.auth) |
|
399 | request.add_unredirected_header(self.auth_header, self.auth) | |
399 |
|
400 | |||
400 | return request |
|
401 | return request | |
401 |
|
402 | |||
402 | def reset_retry_count(self): |
|
403 | def reset_retry_count(self): | |
403 | # Python 2.6.5 will call this on 401 or 407 errors and thus loop |
|
404 | # Python 2.6.5 will call this on 401 or 407 errors and thus loop | |
404 | # forever. We disable reset_retry_count completely and reset in |
|
405 | # forever. We disable reset_retry_count completely and reset in | |
405 | # http_error_auth_reqed instead. |
|
406 | # http_error_auth_reqed instead. | |
406 | pass |
|
407 | pass | |
407 |
|
408 | |||
408 | def http_error_auth_reqed(self, auth_header, host, req, headers): |
|
409 | def http_error_auth_reqed(self, auth_header, host, req, headers): | |
409 | # Reset the retry counter once for each request. |
|
410 | # Reset the retry counter once for each request. | |
410 | if req is not self.retried_req: |
|
411 | if req is not self.retried_req: | |
411 | self.retried_req = req |
|
412 | self.retried_req = req | |
412 | self.retried = 0 |
|
413 | self.retried = 0 | |
413 | return urlreq.httpbasicauthhandler.http_error_auth_reqed( |
|
414 | return urlreq.httpbasicauthhandler.http_error_auth_reqed( | |
414 | self, auth_header, host, req, headers) |
|
415 | self, auth_header, host, req, headers) | |
415 |
|
416 | |||
416 | def retry_http_basic_auth(self, host, req, realm): |
|
417 | def retry_http_basic_auth(self, host, req, realm): | |
417 | user, pw = self.passwd.find_user_password(realm, req.get_full_url()) |
|
418 | user, pw = self.passwd.find_user_password(realm, req.get_full_url()) | |
418 | if pw is not None: |
|
419 | if pw is not None: | |
419 | raw = "%s:%s" % (user, pw) |
|
420 | raw = "%s:%s" % (user, pw) | |
420 | auth = 'Basic %s' % base64.b64encode(raw).strip() |
|
421 | auth = 'Basic %s' % base64.b64encode(raw).strip() | |
421 | if req.get_header(self.auth_header, None) == auth: |
|
422 | if req.get_header(self.auth_header, None) == auth: | |
422 | return None |
|
423 | return None | |
423 | self.auth = auth |
|
424 | self.auth = auth | |
424 | req.add_unredirected_header(self.auth_header, auth) |
|
425 | req.add_unredirected_header(self.auth_header, auth) | |
425 | return self.parent.open(req) |
|
426 | return self.parent.open(req) | |
426 | else: |
|
427 | else: | |
427 | return None |
|
428 | return None | |
428 |
|
429 | |||
429 | handlerfuncs = [] |
|
430 | handlerfuncs = [] | |
430 |
|
431 | |||
431 | def opener(ui, authinfo=None): |
|
432 | def opener(ui, authinfo=None): | |
432 | ''' |
|
433 | ''' | |
433 | construct an opener suitable for urllib2 |
|
434 | construct an opener suitable for urllib2 | |
434 | authinfo will be added to the password manager |
|
435 | authinfo will be added to the password manager | |
435 | ''' |
|
436 | ''' | |
436 | # experimental config: ui.usehttp2 |
|
437 | # experimental config: ui.usehttp2 | |
437 | if ui.configbool('ui', 'usehttp2', False): |
|
438 | if ui.configbool('ui', 'usehttp2', False): | |
438 | handlers = [ |
|
439 | handlers = [ | |
439 | httpconnectionmod.http2handler( |
|
440 | httpconnectionmod.http2handler( | |
440 | ui, |
|
441 | ui, | |
441 | passwordmgr(ui, ui.httppasswordmgrdb)) |
|
442 | passwordmgr(ui, ui.httppasswordmgrdb)) | |
442 | ] |
|
443 | ] | |
443 | else: |
|
444 | else: | |
444 | handlers = [httphandler()] |
|
445 | handlers = [httphandler()] | |
445 | if has_https: |
|
446 | if has_https: | |
446 | handlers.append(httpshandler(ui)) |
|
447 | handlers.append(httpshandler(ui)) | |
447 |
|
448 | |||
448 | handlers.append(proxyhandler(ui)) |
|
449 | handlers.append(proxyhandler(ui)) | |
449 |
|
450 | |||
450 | passmgr = passwordmgr(ui, ui.httppasswordmgrdb) |
|
451 | passmgr = passwordmgr(ui, ui.httppasswordmgrdb) | |
451 | if authinfo is not None: |
|
452 | if authinfo is not None: | |
452 | realm, uris, user, passwd = authinfo |
|
453 | realm, uris, user, passwd = authinfo | |
453 | saveduser, savedpass = passmgr.find_stored_password(uris[0]) |
|
454 | saveduser, savedpass = passmgr.find_stored_password(uris[0]) | |
454 | if user != saveduser or passwd: |
|
455 | if user != saveduser or passwd: | |
455 | passmgr.add_password(realm, uris, user, passwd) |
|
456 | passmgr.add_password(realm, uris, user, passwd) | |
456 | ui.debug('http auth: user %s, password %s\n' % |
|
457 | ui.debug('http auth: user %s, password %s\n' % | |
457 | (user, passwd and '*' * len(passwd) or 'not set')) |
|
458 | (user, passwd and '*' * len(passwd) or 'not set')) | |
458 |
|
459 | |||
459 | handlers.extend((httpbasicauthhandler(passmgr), |
|
460 | handlers.extend((httpbasicauthhandler(passmgr), | |
460 | httpdigestauthhandler(passmgr))) |
|
461 | httpdigestauthhandler(passmgr))) | |
461 | handlers.extend([h(ui, passmgr) for h in handlerfuncs]) |
|
462 | handlers.extend([h(ui, passmgr) for h in handlerfuncs]) | |
462 | opener = urlreq.buildopener(*handlers) |
|
463 | opener = urlreq.buildopener(*handlers) | |
463 |
|
464 | |||
464 | # The user agent should should *NOT* be used by servers for e.g. |
|
465 | # The user agent should should *NOT* be used by servers for e.g. | |
465 | # protocol detection or feature negotiation: there are other |
|
466 | # protocol detection or feature negotiation: there are other | |
466 | # facilities for that. |
|
467 | # facilities for that. | |
467 | # |
|
468 | # | |
468 | # "mercurial/proto-1.0" was the original user agent string and |
|
469 | # "mercurial/proto-1.0" was the original user agent string and | |
469 | # exists for backwards compatibility reasons. |
|
470 | # exists for backwards compatibility reasons. | |
470 | # |
|
471 | # | |
471 | # The "(Mercurial %s)" string contains the distribution |
|
472 | # The "(Mercurial %s)" string contains the distribution | |
472 | # name and version. Other client implementations should choose their |
|
473 | # name and version. Other client implementations should choose their | |
473 | # own distribution name. Since servers should not be using the user |
|
474 | # own distribution name. Since servers should not be using the user | |
474 | # agent string for anything, clients should be able to define whatever |
|
475 | # agent string for anything, clients should be able to define whatever | |
475 | # user agent they deem appropriate. |
|
476 | # user agent they deem appropriate. | |
476 | agent = 'mercurial/proto-1.0 (Mercurial %s)' % util.version() |
|
477 | agent = 'mercurial/proto-1.0 (Mercurial %s)' % util.version() | |
477 | opener.addheaders = [('User-agent', agent)] |
|
478 | opener.addheaders = [('User-agent', agent)] | |
478 | opener.addheaders.append(('Accept', 'application/mercurial-0.1')) |
|
479 | opener.addheaders.append(('Accept', 'application/mercurial-0.1')) | |
479 | return opener |
|
480 | return opener | |
480 |
|
481 | |||
481 | def open(ui, url_, data=None): |
|
482 | def open(ui, url_, data=None): | |
482 | u = util.url(url_) |
|
483 | u = util.url(url_) | |
483 | if u.scheme: |
|
484 | if u.scheme: | |
484 | u.scheme = u.scheme.lower() |
|
485 | u.scheme = u.scheme.lower() | |
485 | url_, authinfo = u.authinfo() |
|
486 | url_, authinfo = u.authinfo() | |
486 | else: |
|
487 | else: | |
487 | path = util.normpath(os.path.abspath(url_)) |
|
488 | path = util.normpath(os.path.abspath(url_)) | |
488 | url_ = 'file://' + urlreq.pathname2url(path) |
|
489 | url_ = 'file://' + urlreq.pathname2url(path) | |
489 | authinfo = None |
|
490 | authinfo = None | |
490 | return opener(ui, authinfo).open(url_, data) |
|
491 | return opener(ui, authinfo).open(url_, data) |
@@ -1,479 +1,479 | |||||
1 | # windows.py - Windows utility function implementations for Mercurial |
|
1 | # windows.py - Windows utility function implementations for Mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others |
|
3 | # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
10 | import errno |
|
10 | import errno | |
11 | import msvcrt |
|
11 | import msvcrt | |
12 | import os |
|
12 | import os | |
13 | import re |
|
13 | import re | |
14 | import stat |
|
14 | import stat | |
15 | import sys |
|
15 | import sys | |
16 |
|
16 | |||
17 | from .i18n import _ |
|
17 | from .i18n import _ | |
18 | from . import ( |
|
18 | from . import ( | |
19 | encoding, |
|
19 | encoding, | |
20 | osutil, |
|
20 | osutil, | |
21 | pycompat, |
|
21 | pycompat, | |
22 | win32, |
|
22 | win32, | |
23 | ) |
|
23 | ) | |
24 |
|
24 | |||
25 | try: |
|
25 | try: | |
26 | import _winreg as winreg |
|
26 | import _winreg as winreg | |
27 | winreg.CloseKey |
|
27 | winreg.CloseKey | |
28 | except ImportError: |
|
28 | except ImportError: | |
29 | import winreg |
|
29 | import winreg | |
30 |
|
30 | |||
31 | executablepath = win32.executablepath |
|
31 | executablepath = win32.executablepath | |
32 | getuser = win32.getuser |
|
32 | getuser = win32.getuser | |
33 | hidewindow = win32.hidewindow |
|
33 | hidewindow = win32.hidewindow | |
34 | makedir = win32.makedir |
|
34 | makedir = win32.makedir | |
35 | nlinks = win32.nlinks |
|
35 | nlinks = win32.nlinks | |
36 | oslink = win32.oslink |
|
36 | oslink = win32.oslink | |
37 | samedevice = win32.samedevice |
|
37 | samedevice = win32.samedevice | |
38 | samefile = win32.samefile |
|
38 | samefile = win32.samefile | |
39 | setsignalhandler = win32.setsignalhandler |
|
39 | setsignalhandler = win32.setsignalhandler | |
40 | spawndetached = win32.spawndetached |
|
40 | spawndetached = win32.spawndetached | |
41 | split = os.path.split |
|
41 | split = os.path.split | |
42 | testpid = win32.testpid |
|
42 | testpid = win32.testpid | |
43 | unlink = win32.unlink |
|
43 | unlink = win32.unlink | |
44 |
|
44 | |||
45 | umask = 0o022 |
|
45 | umask = 0o022 | |
46 |
|
46 | |||
47 | class mixedfilemodewrapper(object): |
|
47 | class mixedfilemodewrapper(object): | |
48 | """Wraps a file handle when it is opened in read/write mode. |
|
48 | """Wraps a file handle when it is opened in read/write mode. | |
49 |
|
49 | |||
50 | fopen() and fdopen() on Windows have a specific-to-Windows requirement |
|
50 | fopen() and fdopen() on Windows have a specific-to-Windows requirement | |
51 | that files opened with mode r+, w+, or a+ make a call to a file positioning |
|
51 | that files opened with mode r+, w+, or a+ make a call to a file positioning | |
52 | function when switching between reads and writes. Without this extra call, |
|
52 | function when switching between reads and writes. Without this extra call, | |
53 | Python will raise a not very intuitive "IOError: [Errno 0] Error." |
|
53 | Python will raise a not very intuitive "IOError: [Errno 0] Error." | |
54 |
|
54 | |||
55 | This class wraps posixfile instances when the file is opened in read/write |
|
55 | This class wraps posixfile instances when the file is opened in read/write | |
56 | mode and automatically adds checks or inserts appropriate file positioning |
|
56 | mode and automatically adds checks or inserts appropriate file positioning | |
57 | calls when necessary. |
|
57 | calls when necessary. | |
58 | """ |
|
58 | """ | |
59 | OPNONE = 0 |
|
59 | OPNONE = 0 | |
60 | OPREAD = 1 |
|
60 | OPREAD = 1 | |
61 | OPWRITE = 2 |
|
61 | OPWRITE = 2 | |
62 |
|
62 | |||
63 | def __init__(self, fp): |
|
63 | def __init__(self, fp): | |
64 | object.__setattr__(self, '_fp', fp) |
|
64 | object.__setattr__(self, '_fp', fp) | |
65 | object.__setattr__(self, '_lastop', 0) |
|
65 | object.__setattr__(self, '_lastop', 0) | |
66 |
|
66 | |||
67 | def __getattr__(self, name): |
|
67 | def __getattr__(self, name): | |
68 | return getattr(self._fp, name) |
|
68 | return getattr(self._fp, name) | |
69 |
|
69 | |||
70 | def __setattr__(self, name, value): |
|
70 | def __setattr__(self, name, value): | |
71 | return self._fp.__setattr__(name, value) |
|
71 | return self._fp.__setattr__(name, value) | |
72 |
|
72 | |||
73 | def _noopseek(self): |
|
73 | def _noopseek(self): | |
74 | self._fp.seek(0, os.SEEK_CUR) |
|
74 | self._fp.seek(0, os.SEEK_CUR) | |
75 |
|
75 | |||
76 | def seek(self, *args, **kwargs): |
|
76 | def seek(self, *args, **kwargs): | |
77 | object.__setattr__(self, '_lastop', self.OPNONE) |
|
77 | object.__setattr__(self, '_lastop', self.OPNONE) | |
78 | return self._fp.seek(*args, **kwargs) |
|
78 | return self._fp.seek(*args, **kwargs) | |
79 |
|
79 | |||
80 | def write(self, d): |
|
80 | def write(self, d): | |
81 | if self._lastop == self.OPREAD: |
|
81 | if self._lastop == self.OPREAD: | |
82 | self._noopseek() |
|
82 | self._noopseek() | |
83 |
|
83 | |||
84 | object.__setattr__(self, '_lastop', self.OPWRITE) |
|
84 | object.__setattr__(self, '_lastop', self.OPWRITE) | |
85 | return self._fp.write(d) |
|
85 | return self._fp.write(d) | |
86 |
|
86 | |||
87 | def writelines(self, *args, **kwargs): |
|
87 | def writelines(self, *args, **kwargs): | |
88 | if self._lastop == self.OPREAD: |
|
88 | if self._lastop == self.OPREAD: | |
89 | self._noopeseek() |
|
89 | self._noopeseek() | |
90 |
|
90 | |||
91 | object.__setattr__(self, '_lastop', self.OPWRITE) |
|
91 | object.__setattr__(self, '_lastop', self.OPWRITE) | |
92 | return self._fp.writelines(*args, **kwargs) |
|
92 | return self._fp.writelines(*args, **kwargs) | |
93 |
|
93 | |||
94 | def read(self, *args, **kwargs): |
|
94 | def read(self, *args, **kwargs): | |
95 | if self._lastop == self.OPWRITE: |
|
95 | if self._lastop == self.OPWRITE: | |
96 | self._noopseek() |
|
96 | self._noopseek() | |
97 |
|
97 | |||
98 | object.__setattr__(self, '_lastop', self.OPREAD) |
|
98 | object.__setattr__(self, '_lastop', self.OPREAD) | |
99 | return self._fp.read(*args, **kwargs) |
|
99 | return self._fp.read(*args, **kwargs) | |
100 |
|
100 | |||
101 | def readline(self, *args, **kwargs): |
|
101 | def readline(self, *args, **kwargs): | |
102 | if self._lastop == self.OPWRITE: |
|
102 | if self._lastop == self.OPWRITE: | |
103 | self._noopseek() |
|
103 | self._noopseek() | |
104 |
|
104 | |||
105 | object.__setattr__(self, '_lastop', self.OPREAD) |
|
105 | object.__setattr__(self, '_lastop', self.OPREAD) | |
106 | return self._fp.readline(*args, **kwargs) |
|
106 | return self._fp.readline(*args, **kwargs) | |
107 |
|
107 | |||
108 | def readlines(self, *args, **kwargs): |
|
108 | def readlines(self, *args, **kwargs): | |
109 | if self._lastop == self.OPWRITE: |
|
109 | if self._lastop == self.OPWRITE: | |
110 | self._noopseek() |
|
110 | self._noopseek() | |
111 |
|
111 | |||
112 | object.__setattr__(self, '_lastop', self.OPREAD) |
|
112 | object.__setattr__(self, '_lastop', self.OPREAD) | |
113 | return self._fp.readlines(*args, **kwargs) |
|
113 | return self._fp.readlines(*args, **kwargs) | |
114 |
|
114 | |||
115 | def posixfile(name, mode='r', buffering=-1): |
|
115 | def posixfile(name, mode='r', buffering=-1): | |
116 | '''Open a file with even more POSIX-like semantics''' |
|
116 | '''Open a file with even more POSIX-like semantics''' | |
117 | try: |
|
117 | try: | |
118 | fp = osutil.posixfile(name, mode, buffering) # may raise WindowsError |
|
118 | fp = osutil.posixfile(name, mode, buffering) # may raise WindowsError | |
119 |
|
119 | |||
120 | # The position when opening in append mode is implementation defined, so |
|
120 | # The position when opening in append mode is implementation defined, so | |
121 | # make it consistent with other platforms, which position at EOF. |
|
121 | # make it consistent with other platforms, which position at EOF. | |
122 | if 'a' in mode: |
|
122 | if 'a' in mode: | |
123 | fp.seek(0, os.SEEK_END) |
|
123 | fp.seek(0, os.SEEK_END) | |
124 |
|
124 | |||
125 | if '+' in mode: |
|
125 | if '+' in mode: | |
126 | return mixedfilemodewrapper(fp) |
|
126 | return mixedfilemodewrapper(fp) | |
127 |
|
127 | |||
128 | return fp |
|
128 | return fp | |
129 | except WindowsError as err: |
|
129 | except WindowsError as err: | |
130 | # convert to a friendlier exception |
|
130 | # convert to a friendlier exception | |
131 | raise IOError(err.errno, '%s: %s' % (name, err.strerror)) |
|
131 | raise IOError(err.errno, '%s: %s' % (name, err.strerror)) | |
132 |
|
132 | |||
133 | class winstdout(object): |
|
133 | class winstdout(object): | |
134 | '''stdout on windows misbehaves if sent through a pipe''' |
|
134 | '''stdout on windows misbehaves if sent through a pipe''' | |
135 |
|
135 | |||
136 | def __init__(self, fp): |
|
136 | def __init__(self, fp): | |
137 | self.fp = fp |
|
137 | self.fp = fp | |
138 |
|
138 | |||
139 | def __getattr__(self, key): |
|
139 | def __getattr__(self, key): | |
140 | return getattr(self.fp, key) |
|
140 | return getattr(self.fp, key) | |
141 |
|
141 | |||
142 | def close(self): |
|
142 | def close(self): | |
143 | try: |
|
143 | try: | |
144 | self.fp.close() |
|
144 | self.fp.close() | |
145 | except IOError: |
|
145 | except IOError: | |
146 | pass |
|
146 | pass | |
147 |
|
147 | |||
148 | def write(self, s): |
|
148 | def write(self, s): | |
149 | try: |
|
149 | try: | |
150 | # This is workaround for "Not enough space" error on |
|
150 | # This is workaround for "Not enough space" error on | |
151 | # writing large size of data to console. |
|
151 | # writing large size of data to console. | |
152 | limit = 16000 |
|
152 | limit = 16000 | |
153 | l = len(s) |
|
153 | l = len(s) | |
154 | start = 0 |
|
154 | start = 0 | |
155 | self.softspace = 0 |
|
155 | self.softspace = 0 | |
156 | while start < l: |
|
156 | while start < l: | |
157 | end = start + limit |
|
157 | end = start + limit | |
158 | self.fp.write(s[start:end]) |
|
158 | self.fp.write(s[start:end]) | |
159 | start = end |
|
159 | start = end | |
160 | except IOError as inst: |
|
160 | except IOError as inst: | |
161 | if inst.errno != 0: |
|
161 | if inst.errno != 0: | |
162 | raise |
|
162 | raise | |
163 | self.close() |
|
163 | self.close() | |
164 | raise IOError(errno.EPIPE, 'Broken pipe') |
|
164 | raise IOError(errno.EPIPE, 'Broken pipe') | |
165 |
|
165 | |||
166 | def flush(self): |
|
166 | def flush(self): | |
167 | try: |
|
167 | try: | |
168 | return self.fp.flush() |
|
168 | return self.fp.flush() | |
169 | except IOError as inst: |
|
169 | except IOError as inst: | |
170 | if inst.errno != errno.EINVAL: |
|
170 | if inst.errno != errno.EINVAL: | |
171 | raise |
|
171 | raise | |
172 | self.close() |
|
172 | self.close() | |
173 | raise IOError(errno.EPIPE, 'Broken pipe') |
|
173 | raise IOError(errno.EPIPE, 'Broken pipe') | |
174 |
|
174 | |||
175 | def _is_win_9x(): |
|
175 | def _is_win_9x(): | |
176 | '''return true if run on windows 95, 98 or me.''' |
|
176 | '''return true if run on windows 95, 98 or me.''' | |
177 | try: |
|
177 | try: | |
178 | return sys.getwindowsversion()[3] == 1 |
|
178 | return sys.getwindowsversion()[3] == 1 | |
179 | except AttributeError: |
|
179 | except AttributeError: | |
180 |
return 'command' in |
|
180 | return 'command' in encoding.environ.get('comspec', '') | |
181 |
|
181 | |||
182 | def openhardlinks(): |
|
182 | def openhardlinks(): | |
183 | return not _is_win_9x() |
|
183 | return not _is_win_9x() | |
184 |
|
184 | |||
185 | def parsepatchoutput(output_line): |
|
185 | def parsepatchoutput(output_line): | |
186 | """parses the output produced by patch and returns the filename""" |
|
186 | """parses the output produced by patch and returns the filename""" | |
187 | pf = output_line[14:] |
|
187 | pf = output_line[14:] | |
188 | if pf[0] == '`': |
|
188 | if pf[0] == '`': | |
189 | pf = pf[1:-1] # Remove the quotes |
|
189 | pf = pf[1:-1] # Remove the quotes | |
190 | return pf |
|
190 | return pf | |
191 |
|
191 | |||
192 | def sshargs(sshcmd, host, user, port): |
|
192 | def sshargs(sshcmd, host, user, port): | |
193 | '''Build argument list for ssh or Plink''' |
|
193 | '''Build argument list for ssh or Plink''' | |
194 | pflag = 'plink' in sshcmd.lower() and '-P' or '-p' |
|
194 | pflag = 'plink' in sshcmd.lower() and '-P' or '-p' | |
195 | args = user and ("%s@%s" % (user, host)) or host |
|
195 | args = user and ("%s@%s" % (user, host)) or host | |
196 | return port and ("%s %s %s" % (args, pflag, port)) or args |
|
196 | return port and ("%s %s %s" % (args, pflag, port)) or args | |
197 |
|
197 | |||
198 | def setflags(f, l, x): |
|
198 | def setflags(f, l, x): | |
199 | pass |
|
199 | pass | |
200 |
|
200 | |||
201 | def copymode(src, dst, mode=None): |
|
201 | def copymode(src, dst, mode=None): | |
202 | pass |
|
202 | pass | |
203 |
|
203 | |||
204 | def checkexec(path): |
|
204 | def checkexec(path): | |
205 | return False |
|
205 | return False | |
206 |
|
206 | |||
207 | def checklink(path): |
|
207 | def checklink(path): | |
208 | return False |
|
208 | return False | |
209 |
|
209 | |||
210 | def setbinary(fd): |
|
210 | def setbinary(fd): | |
211 | # When run without console, pipes may expose invalid |
|
211 | # When run without console, pipes may expose invalid | |
212 | # fileno(), usually set to -1. |
|
212 | # fileno(), usually set to -1. | |
213 | fno = getattr(fd, 'fileno', None) |
|
213 | fno = getattr(fd, 'fileno', None) | |
214 | if fno is not None and fno() >= 0: |
|
214 | if fno is not None and fno() >= 0: | |
215 | msvcrt.setmode(fno(), os.O_BINARY) |
|
215 | msvcrt.setmode(fno(), os.O_BINARY) | |
216 |
|
216 | |||
217 | def pconvert(path): |
|
217 | def pconvert(path): | |
218 | return path.replace(pycompat.ossep, '/') |
|
218 | return path.replace(pycompat.ossep, '/') | |
219 |
|
219 | |||
220 | def localpath(path): |
|
220 | def localpath(path): | |
221 | return path.replace('/', '\\') |
|
221 | return path.replace('/', '\\') | |
222 |
|
222 | |||
223 | def normpath(path): |
|
223 | def normpath(path): | |
224 | return pconvert(os.path.normpath(path)) |
|
224 | return pconvert(os.path.normpath(path)) | |
225 |
|
225 | |||
226 | def normcase(path): |
|
226 | def normcase(path): | |
227 | return encoding.upper(path) # NTFS compares via upper() |
|
227 | return encoding.upper(path) # NTFS compares via upper() | |
228 |
|
228 | |||
229 | # see posix.py for definitions |
|
229 | # see posix.py for definitions | |
230 | normcasespec = encoding.normcasespecs.upper |
|
230 | normcasespec = encoding.normcasespecs.upper | |
231 | normcasefallback = encoding.upperfallback |
|
231 | normcasefallback = encoding.upperfallback | |
232 |
|
232 | |||
233 | def samestat(s1, s2): |
|
233 | def samestat(s1, s2): | |
234 | return False |
|
234 | return False | |
235 |
|
235 | |||
236 | # A sequence of backslashes is special iff it precedes a double quote: |
|
236 | # A sequence of backslashes is special iff it precedes a double quote: | |
237 | # - if there's an even number of backslashes, the double quote is not |
|
237 | # - if there's an even number of backslashes, the double quote is not | |
238 | # quoted (i.e. it ends the quoted region) |
|
238 | # quoted (i.e. it ends the quoted region) | |
239 | # - if there's an odd number of backslashes, the double quote is quoted |
|
239 | # - if there's an odd number of backslashes, the double quote is quoted | |
240 | # - in both cases, every pair of backslashes is unquoted into a single |
|
240 | # - in both cases, every pair of backslashes is unquoted into a single | |
241 | # backslash |
|
241 | # backslash | |
242 | # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx ) |
|
242 | # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx ) | |
243 | # So, to quote a string, we must surround it in double quotes, double |
|
243 | # So, to quote a string, we must surround it in double quotes, double | |
244 | # the number of backslashes that precede double quotes and add another |
|
244 | # the number of backslashes that precede double quotes and add another | |
245 | # backslash before every double quote (being careful with the double |
|
245 | # backslash before every double quote (being careful with the double | |
246 | # quote we've appended to the end) |
|
246 | # quote we've appended to the end) | |
247 | _quotere = None |
|
247 | _quotere = None | |
248 | _needsshellquote = None |
|
248 | _needsshellquote = None | |
249 | def shellquote(s): |
|
249 | def shellquote(s): | |
250 | r""" |
|
250 | r""" | |
251 | >>> shellquote(r'C:\Users\xyz') |
|
251 | >>> shellquote(r'C:\Users\xyz') | |
252 | '"C:\\Users\\xyz"' |
|
252 | '"C:\\Users\\xyz"' | |
253 | >>> shellquote(r'C:\Users\xyz/mixed') |
|
253 | >>> shellquote(r'C:\Users\xyz/mixed') | |
254 | '"C:\\Users\\xyz/mixed"' |
|
254 | '"C:\\Users\\xyz/mixed"' | |
255 | >>> # Would be safe not to quote too, since it is all double backslashes |
|
255 | >>> # Would be safe not to quote too, since it is all double backslashes | |
256 | >>> shellquote(r'C:\\Users\\xyz') |
|
256 | >>> shellquote(r'C:\\Users\\xyz') | |
257 | '"C:\\\\Users\\\\xyz"' |
|
257 | '"C:\\\\Users\\\\xyz"' | |
258 | >>> # But this must be quoted |
|
258 | >>> # But this must be quoted | |
259 | >>> shellquote(r'C:\\Users\\xyz/abc') |
|
259 | >>> shellquote(r'C:\\Users\\xyz/abc') | |
260 | '"C:\\\\Users\\\\xyz/abc"' |
|
260 | '"C:\\\\Users\\\\xyz/abc"' | |
261 | """ |
|
261 | """ | |
262 | global _quotere |
|
262 | global _quotere | |
263 | if _quotere is None: |
|
263 | if _quotere is None: | |
264 | _quotere = re.compile(r'(\\*)("|\\$)') |
|
264 | _quotere = re.compile(r'(\\*)("|\\$)') | |
265 | global _needsshellquote |
|
265 | global _needsshellquote | |
266 | if _needsshellquote is None: |
|
266 | if _needsshellquote is None: | |
267 | # ":" is also treated as "safe character", because it is used as a part |
|
267 | # ":" is also treated as "safe character", because it is used as a part | |
268 | # of path name on Windows. "\" is also part of a path name, but isn't |
|
268 | # of path name on Windows. "\" is also part of a path name, but isn't | |
269 | # safe because shlex.split() (kind of) treats it as an escape char and |
|
269 | # safe because shlex.split() (kind of) treats it as an escape char and | |
270 | # drops it. It will leave the next character, even if it is another |
|
270 | # drops it. It will leave the next character, even if it is another | |
271 | # "\". |
|
271 | # "\". | |
272 | _needsshellquote = re.compile(r'[^a-zA-Z0-9._:/-]').search |
|
272 | _needsshellquote = re.compile(r'[^a-zA-Z0-9._:/-]').search | |
273 | if s and not _needsshellquote(s) and not _quotere.search(s): |
|
273 | if s and not _needsshellquote(s) and not _quotere.search(s): | |
274 | # "s" shouldn't have to be quoted |
|
274 | # "s" shouldn't have to be quoted | |
275 | return s |
|
275 | return s | |
276 | return '"%s"' % _quotere.sub(r'\1\1\\\2', s) |
|
276 | return '"%s"' % _quotere.sub(r'\1\1\\\2', s) | |
277 |
|
277 | |||
278 | def quotecommand(cmd): |
|
278 | def quotecommand(cmd): | |
279 | """Build a command string suitable for os.popen* calls.""" |
|
279 | """Build a command string suitable for os.popen* calls.""" | |
280 | if sys.version_info < (2, 7, 1): |
|
280 | if sys.version_info < (2, 7, 1): | |
281 | # Python versions since 2.7.1 do this extra quoting themselves |
|
281 | # Python versions since 2.7.1 do this extra quoting themselves | |
282 | return '"' + cmd + '"' |
|
282 | return '"' + cmd + '"' | |
283 | return cmd |
|
283 | return cmd | |
284 |
|
284 | |||
285 | def popen(command, mode='r'): |
|
285 | def popen(command, mode='r'): | |
286 | # Work around "popen spawned process may not write to stdout |
|
286 | # Work around "popen spawned process may not write to stdout | |
287 | # under windows" |
|
287 | # under windows" | |
288 | # http://bugs.python.org/issue1366 |
|
288 | # http://bugs.python.org/issue1366 | |
289 | command += " 2> %s" % os.devnull |
|
289 | command += " 2> %s" % os.devnull | |
290 | return os.popen(quotecommand(command), mode) |
|
290 | return os.popen(quotecommand(command), mode) | |
291 |
|
291 | |||
292 | def explainexit(code): |
|
292 | def explainexit(code): | |
293 | return _("exited with status %d") % code, code |
|
293 | return _("exited with status %d") % code, code | |
294 |
|
294 | |||
295 | # if you change this stub into a real check, please try to implement the |
|
295 | # if you change this stub into a real check, please try to implement the | |
296 | # username and groupname functions above, too. |
|
296 | # username and groupname functions above, too. | |
297 | def isowner(st): |
|
297 | def isowner(st): | |
298 | return True |
|
298 | return True | |
299 |
|
299 | |||
300 | def findexe(command): |
|
300 | def findexe(command): | |
301 | '''Find executable for command searching like cmd.exe does. |
|
301 | '''Find executable for command searching like cmd.exe does. | |
302 | If command is a basename then PATH is searched for command. |
|
302 | If command is a basename then PATH is searched for command. | |
303 | PATH isn't searched if command is an absolute or relative path. |
|
303 | PATH isn't searched if command is an absolute or relative path. | |
304 | An extension from PATHEXT is found and added if not present. |
|
304 | An extension from PATHEXT is found and added if not present. | |
305 | If command isn't found None is returned.''' |
|
305 | If command isn't found None is returned.''' | |
306 |
pathext = |
|
306 | pathext = encoding.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD') | |
307 | pathexts = [ext for ext in pathext.lower().split(pycompat.ospathsep)] |
|
307 | pathexts = [ext for ext in pathext.lower().split(pycompat.ospathsep)] | |
308 | if os.path.splitext(command)[1].lower() in pathexts: |
|
308 | if os.path.splitext(command)[1].lower() in pathexts: | |
309 | pathexts = [''] |
|
309 | pathexts = [''] | |
310 |
|
310 | |||
311 | def findexisting(pathcommand): |
|
311 | def findexisting(pathcommand): | |
312 | 'Will append extension (if needed) and return existing file' |
|
312 | 'Will append extension (if needed) and return existing file' | |
313 | for ext in pathexts: |
|
313 | for ext in pathexts: | |
314 | executable = pathcommand + ext |
|
314 | executable = pathcommand + ext | |
315 | if os.path.exists(executable): |
|
315 | if os.path.exists(executable): | |
316 | return executable |
|
316 | return executable | |
317 | return None |
|
317 | return None | |
318 |
|
318 | |||
319 | if pycompat.ossep in command: |
|
319 | if pycompat.ossep in command: | |
320 | return findexisting(command) |
|
320 | return findexisting(command) | |
321 |
|
321 | |||
322 |
for path in |
|
322 | for path in encoding.environ.get('PATH', '').split(pycompat.ospathsep): | |
323 | executable = findexisting(os.path.join(path, command)) |
|
323 | executable = findexisting(os.path.join(path, command)) | |
324 | if executable is not None: |
|
324 | if executable is not None: | |
325 | return executable |
|
325 | return executable | |
326 | return findexisting(os.path.expanduser(os.path.expandvars(command))) |
|
326 | return findexisting(os.path.expanduser(os.path.expandvars(command))) | |
327 |
|
327 | |||
328 | _wantedkinds = set([stat.S_IFREG, stat.S_IFLNK]) |
|
328 | _wantedkinds = set([stat.S_IFREG, stat.S_IFLNK]) | |
329 |
|
329 | |||
330 | def statfiles(files): |
|
330 | def statfiles(files): | |
331 | '''Stat each file in files. Yield each stat, or None if a file |
|
331 | '''Stat each file in files. Yield each stat, or None if a file | |
332 | does not exist or has a type we don't care about. |
|
332 | does not exist or has a type we don't care about. | |
333 |
|
333 | |||
334 | Cluster and cache stat per directory to minimize number of OS stat calls.''' |
|
334 | Cluster and cache stat per directory to minimize number of OS stat calls.''' | |
335 | dircache = {} # dirname -> filename -> status | None if file does not exist |
|
335 | dircache = {} # dirname -> filename -> status | None if file does not exist | |
336 | getkind = stat.S_IFMT |
|
336 | getkind = stat.S_IFMT | |
337 | for nf in files: |
|
337 | for nf in files: | |
338 | nf = normcase(nf) |
|
338 | nf = normcase(nf) | |
339 | dir, base = os.path.split(nf) |
|
339 | dir, base = os.path.split(nf) | |
340 | if not dir: |
|
340 | if not dir: | |
341 | dir = '.' |
|
341 | dir = '.' | |
342 | cache = dircache.get(dir, None) |
|
342 | cache = dircache.get(dir, None) | |
343 | if cache is None: |
|
343 | if cache is None: | |
344 | try: |
|
344 | try: | |
345 | dmap = dict([(normcase(n), s) |
|
345 | dmap = dict([(normcase(n), s) | |
346 | for n, k, s in osutil.listdir(dir, True) |
|
346 | for n, k, s in osutil.listdir(dir, True) | |
347 | if getkind(s.st_mode) in _wantedkinds]) |
|
347 | if getkind(s.st_mode) in _wantedkinds]) | |
348 | except OSError as err: |
|
348 | except OSError as err: | |
349 | # Python >= 2.5 returns ENOENT and adds winerror field |
|
349 | # Python >= 2.5 returns ENOENT and adds winerror field | |
350 | # EINVAL is raised if dir is not a directory. |
|
350 | # EINVAL is raised if dir is not a directory. | |
351 | if err.errno not in (errno.ENOENT, errno.EINVAL, |
|
351 | if err.errno not in (errno.ENOENT, errno.EINVAL, | |
352 | errno.ENOTDIR): |
|
352 | errno.ENOTDIR): | |
353 | raise |
|
353 | raise | |
354 | dmap = {} |
|
354 | dmap = {} | |
355 | cache = dircache.setdefault(dir, dmap) |
|
355 | cache = dircache.setdefault(dir, dmap) | |
356 | yield cache.get(base, None) |
|
356 | yield cache.get(base, None) | |
357 |
|
357 | |||
358 | def username(uid=None): |
|
358 | def username(uid=None): | |
359 | """Return the name of the user with the given uid. |
|
359 | """Return the name of the user with the given uid. | |
360 |
|
360 | |||
361 | If uid is None, return the name of the current user.""" |
|
361 | If uid is None, return the name of the current user.""" | |
362 | return None |
|
362 | return None | |
363 |
|
363 | |||
364 | def groupname(gid=None): |
|
364 | def groupname(gid=None): | |
365 | """Return the name of the group with the given gid. |
|
365 | """Return the name of the group with the given gid. | |
366 |
|
366 | |||
367 | If gid is None, return the name of the current group.""" |
|
367 | If gid is None, return the name of the current group.""" | |
368 | return None |
|
368 | return None | |
369 |
|
369 | |||
370 | def removedirs(name): |
|
370 | def removedirs(name): | |
371 | """special version of os.removedirs that does not remove symlinked |
|
371 | """special version of os.removedirs that does not remove symlinked | |
372 | directories or junction points if they actually contain files""" |
|
372 | directories or junction points if they actually contain files""" | |
373 | if osutil.listdir(name): |
|
373 | if osutil.listdir(name): | |
374 | return |
|
374 | return | |
375 | os.rmdir(name) |
|
375 | os.rmdir(name) | |
376 | head, tail = os.path.split(name) |
|
376 | head, tail = os.path.split(name) | |
377 | if not tail: |
|
377 | if not tail: | |
378 | head, tail = os.path.split(head) |
|
378 | head, tail = os.path.split(head) | |
379 | while head and tail: |
|
379 | while head and tail: | |
380 | try: |
|
380 | try: | |
381 | if osutil.listdir(head): |
|
381 | if osutil.listdir(head): | |
382 | return |
|
382 | return | |
383 | os.rmdir(head) |
|
383 | os.rmdir(head) | |
384 | except (ValueError, OSError): |
|
384 | except (ValueError, OSError): | |
385 | break |
|
385 | break | |
386 | head, tail = os.path.split(head) |
|
386 | head, tail = os.path.split(head) | |
387 |
|
387 | |||
388 | def unlinkpath(f, ignoremissing=False): |
|
388 | def unlinkpath(f, ignoremissing=False): | |
389 | """unlink and remove the directory if it is empty""" |
|
389 | """unlink and remove the directory if it is empty""" | |
390 | try: |
|
390 | try: | |
391 | unlink(f) |
|
391 | unlink(f) | |
392 | except OSError as e: |
|
392 | except OSError as e: | |
393 | if not (ignoremissing and e.errno == errno.ENOENT): |
|
393 | if not (ignoremissing and e.errno == errno.ENOENT): | |
394 | raise |
|
394 | raise | |
395 | # try removing directories that might now be empty |
|
395 | # try removing directories that might now be empty | |
396 | try: |
|
396 | try: | |
397 | removedirs(os.path.dirname(f)) |
|
397 | removedirs(os.path.dirname(f)) | |
398 | except OSError: |
|
398 | except OSError: | |
399 | pass |
|
399 | pass | |
400 |
|
400 | |||
401 | def rename(src, dst): |
|
401 | def rename(src, dst): | |
402 | '''atomically rename file src to dst, replacing dst if it exists''' |
|
402 | '''atomically rename file src to dst, replacing dst if it exists''' | |
403 | try: |
|
403 | try: | |
404 | os.rename(src, dst) |
|
404 | os.rename(src, dst) | |
405 | except OSError as e: |
|
405 | except OSError as e: | |
406 | if e.errno != errno.EEXIST: |
|
406 | if e.errno != errno.EEXIST: | |
407 | raise |
|
407 | raise | |
408 | unlink(dst) |
|
408 | unlink(dst) | |
409 | os.rename(src, dst) |
|
409 | os.rename(src, dst) | |
410 |
|
410 | |||
411 | def gethgcmd(): |
|
411 | def gethgcmd(): | |
412 | return [sys.executable] + sys.argv[:1] |
|
412 | return [sys.executable] + sys.argv[:1] | |
413 |
|
413 | |||
414 | def groupmembers(name): |
|
414 | def groupmembers(name): | |
415 | # Don't support groups on Windows for now |
|
415 | # Don't support groups on Windows for now | |
416 | raise KeyError |
|
416 | raise KeyError | |
417 |
|
417 | |||
418 | def isexec(f): |
|
418 | def isexec(f): | |
419 | return False |
|
419 | return False | |
420 |
|
420 | |||
421 | class cachestat(object): |
|
421 | class cachestat(object): | |
422 | def __init__(self, path): |
|
422 | def __init__(self, path): | |
423 | pass |
|
423 | pass | |
424 |
|
424 | |||
425 | def cacheable(self): |
|
425 | def cacheable(self): | |
426 | return False |
|
426 | return False | |
427 |
|
427 | |||
428 | def lookupreg(key, valname=None, scope=None): |
|
428 | def lookupreg(key, valname=None, scope=None): | |
429 | ''' Look up a key/value name in the Windows registry. |
|
429 | ''' Look up a key/value name in the Windows registry. | |
430 |
|
430 | |||
431 | valname: value name. If unspecified, the default value for the key |
|
431 | valname: value name. If unspecified, the default value for the key | |
432 | is used. |
|
432 | is used. | |
433 | scope: optionally specify scope for registry lookup, this can be |
|
433 | scope: optionally specify scope for registry lookup, this can be | |
434 | a sequence of scopes to look up in order. Default (CURRENT_USER, |
|
434 | a sequence of scopes to look up in order. Default (CURRENT_USER, | |
435 | LOCAL_MACHINE). |
|
435 | LOCAL_MACHINE). | |
436 | ''' |
|
436 | ''' | |
437 | if scope is None: |
|
437 | if scope is None: | |
438 | scope = (winreg.HKEY_CURRENT_USER, winreg.HKEY_LOCAL_MACHINE) |
|
438 | scope = (winreg.HKEY_CURRENT_USER, winreg.HKEY_LOCAL_MACHINE) | |
439 | elif not isinstance(scope, (list, tuple)): |
|
439 | elif not isinstance(scope, (list, tuple)): | |
440 | scope = (scope,) |
|
440 | scope = (scope,) | |
441 | for s in scope: |
|
441 | for s in scope: | |
442 | try: |
|
442 | try: | |
443 | val = winreg.QueryValueEx(winreg.OpenKey(s, key), valname)[0] |
|
443 | val = winreg.QueryValueEx(winreg.OpenKey(s, key), valname)[0] | |
444 | # never let a Unicode string escape into the wild |
|
444 | # never let a Unicode string escape into the wild | |
445 | return encoding.tolocal(val.encode('UTF-8')) |
|
445 | return encoding.tolocal(val.encode('UTF-8')) | |
446 | except EnvironmentError: |
|
446 | except EnvironmentError: | |
447 | pass |
|
447 | pass | |
448 |
|
448 | |||
449 | expandglobs = True |
|
449 | expandglobs = True | |
450 |
|
450 | |||
451 | def statislink(st): |
|
451 | def statislink(st): | |
452 | '''check whether a stat result is a symlink''' |
|
452 | '''check whether a stat result is a symlink''' | |
453 | return False |
|
453 | return False | |
454 |
|
454 | |||
455 | def statisexec(st): |
|
455 | def statisexec(st): | |
456 | '''check whether a stat result is an executable file''' |
|
456 | '''check whether a stat result is an executable file''' | |
457 | return False |
|
457 | return False | |
458 |
|
458 | |||
459 | def poll(fds): |
|
459 | def poll(fds): | |
460 | # see posix.py for description |
|
460 | # see posix.py for description | |
461 | raise NotImplementedError() |
|
461 | raise NotImplementedError() | |
462 |
|
462 | |||
463 | def readpipe(pipe): |
|
463 | def readpipe(pipe): | |
464 | """Read all available data from a pipe.""" |
|
464 | """Read all available data from a pipe.""" | |
465 | chunks = [] |
|
465 | chunks = [] | |
466 | while True: |
|
466 | while True: | |
467 | size = win32.peekpipe(pipe) |
|
467 | size = win32.peekpipe(pipe) | |
468 | if not size: |
|
468 | if not size: | |
469 | break |
|
469 | break | |
470 |
|
470 | |||
471 | s = pipe.read(size) |
|
471 | s = pipe.read(size) | |
472 | if not s: |
|
472 | if not s: | |
473 | break |
|
473 | break | |
474 | chunks.append(s) |
|
474 | chunks.append(s) | |
475 |
|
475 | |||
476 | return ''.join(chunks) |
|
476 | return ''.join(chunks) | |
477 |
|
477 | |||
478 | def bindunixsocket(sock, path): |
|
478 | def bindunixsocket(sock, path): | |
479 | raise NotImplementedError('unsupported platform') |
|
479 | raise NotImplementedError('unsupported platform') |
General Comments 0
You need to be logged in to leave comments.
Login now