Show More
@@ -1,1154 +1,1154 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Routes configuration |
|
22 | Routes configuration | |
23 |
|
23 | |||
24 | The more specific and detailed routes should be defined first so they |
|
24 | The more specific and detailed routes should be defined first so they | |
25 | may take precedent over the more generic routes. For more information |
|
25 | may take precedent over the more generic routes. For more information | |
26 | refer to the routes manual at http://routes.groovie.org/docs/ |
|
26 | refer to the routes manual at http://routes.groovie.org/docs/ | |
27 |
|
27 | |||
28 | IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py |
|
28 | IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py | |
29 | and _route_name variable which uses some of stored naming here to do redirects. |
|
29 | and _route_name variable which uses some of stored naming here to do redirects. | |
30 | """ |
|
30 | """ | |
31 | import os |
|
31 | import os | |
32 | import re |
|
32 | import re | |
33 | from routes import Mapper |
|
33 | from routes import Mapper | |
34 |
|
34 | |||
35 | from rhodecode.config import routing_links |
|
35 | from rhodecode.config import routing_links | |
36 |
|
36 | |||
37 | # prefix for non repository related links needs to be prefixed with `/` |
|
37 | # prefix for non repository related links needs to be prefixed with `/` | |
38 | ADMIN_PREFIX = '/_admin' |
|
38 | ADMIN_PREFIX = '/_admin' | |
39 |
|
39 | |||
40 | # Default requirements for URL parts |
|
40 | # Default requirements for URL parts | |
41 | URL_NAME_REQUIREMENTS = { |
|
41 | URL_NAME_REQUIREMENTS = { | |
42 | # group name can have a slash in them, but they must not end with a slash |
|
42 | # group name can have a slash in them, but they must not end with a slash | |
43 | 'group_name': r'.*?[^/]', |
|
43 | 'group_name': r'.*?[^/]', | |
44 | # repo names can have a slash in them, but they must not end with a slash |
|
44 | # repo names can have a slash in them, but they must not end with a slash | |
45 | 'repo_name': r'.*?[^/]', |
|
45 | 'repo_name': r'.*?[^/]', | |
46 | # file path eats up everything at the end |
|
46 | # file path eats up everything at the end | |
47 | 'f_path': r'.*', |
|
47 | 'f_path': r'.*', | |
48 | # reference types |
|
48 | # reference types | |
49 | 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)', |
|
49 | 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)', | |
50 | 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)', |
|
50 | 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)', | |
51 | } |
|
51 | } | |
52 |
|
52 | |||
53 |
|
53 | |||
54 | def add_route_requirements(route_path, requirements): |
|
54 | def add_route_requirements(route_path, requirements): | |
55 | """ |
|
55 | """ | |
56 | Adds regex requirements to pyramid routes using a mapping dict |
|
56 | Adds regex requirements to pyramid routes using a mapping dict | |
57 |
|
57 | |||
58 | >>> add_route_requirements('/{action}/{id}', {'id': r'\d+'}) |
|
58 | >>> add_route_requirements('/{action}/{id}', {'id': r'\d+'}) | |
59 | '/{action}/{id:\d+}' |
|
59 | '/{action}/{id:\d+}' | |
60 |
|
60 | |||
61 | """ |
|
61 | """ | |
62 | for key, regex in requirements.items(): |
|
62 | for key, regex in requirements.items(): | |
63 | route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex)) |
|
63 | route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex)) | |
64 | return route_path |
|
64 | return route_path | |
65 |
|
65 | |||
66 |
|
66 | |||
67 | class JSRoutesMapper(Mapper): |
|
67 | class JSRoutesMapper(Mapper): | |
68 | """ |
|
68 | """ | |
69 | Wrapper for routes.Mapper to make pyroutes compatible url definitions |
|
69 | Wrapper for routes.Mapper to make pyroutes compatible url definitions | |
70 | """ |
|
70 | """ | |
71 | _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$') |
|
71 | _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$') | |
72 | _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)') |
|
72 | _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)') | |
73 | def __init__(self, *args, **kw): |
|
73 | def __init__(self, *args, **kw): | |
74 | super(JSRoutesMapper, self).__init__(*args, **kw) |
|
74 | super(JSRoutesMapper, self).__init__(*args, **kw) | |
75 | self._jsroutes = [] |
|
75 | self._jsroutes = [] | |
76 |
|
76 | |||
77 | def connect(self, *args, **kw): |
|
77 | def connect(self, *args, **kw): | |
78 | """ |
|
78 | """ | |
79 | Wrapper for connect to take an extra argument jsroute=True |
|
79 | Wrapper for connect to take an extra argument jsroute=True | |
80 |
|
80 | |||
81 | :param jsroute: boolean, if True will add the route to the pyroutes list |
|
81 | :param jsroute: boolean, if True will add the route to the pyroutes list | |
82 | """ |
|
82 | """ | |
83 | if kw.pop('jsroute', False): |
|
83 | if kw.pop('jsroute', False): | |
84 | if not self._named_route_regex.match(args[0]): |
|
84 | if not self._named_route_regex.match(args[0]): | |
85 | raise Exception('only named routes can be added to pyroutes') |
|
85 | raise Exception('only named routes can be added to pyroutes') | |
86 | self._jsroutes.append(args[0]) |
|
86 | self._jsroutes.append(args[0]) | |
87 |
|
87 | |||
88 | super(JSRoutesMapper, self).connect(*args, **kw) |
|
88 | super(JSRoutesMapper, self).connect(*args, **kw) | |
89 |
|
89 | |||
90 | def _extract_route_information(self, route): |
|
90 | def _extract_route_information(self, route): | |
91 | """ |
|
91 | """ | |
92 | Convert a route into tuple(name, path, args), eg: |
|
92 | Convert a route into tuple(name, path, args), eg: | |
93 | ('user_profile', '/profile/%(username)s', ['username']) |
|
93 | ('user_profile', '/profile/%(username)s', ['username']) | |
94 | """ |
|
94 | """ | |
95 | routepath = route.routepath |
|
95 | routepath = route.routepath | |
96 | def replace(matchobj): |
|
96 | def replace(matchobj): | |
97 | if matchobj.group(1): |
|
97 | if matchobj.group(1): | |
98 | return "%%(%s)s" % matchobj.group(1).split(':')[0] |
|
98 | return "%%(%s)s" % matchobj.group(1).split(':')[0] | |
99 | else: |
|
99 | else: | |
100 | return "%%(%s)s" % matchobj.group(2) |
|
100 | return "%%(%s)s" % matchobj.group(2) | |
101 |
|
101 | |||
102 | routepath = self._argument_prog.sub(replace, routepath) |
|
102 | routepath = self._argument_prog.sub(replace, routepath) | |
103 | return ( |
|
103 | return ( | |
104 | route.name, |
|
104 | route.name, | |
105 | routepath, |
|
105 | routepath, | |
106 | [(arg[0].split(':')[0] if arg[0] != '' else arg[1]) |
|
106 | [(arg[0].split(':')[0] if arg[0] != '' else arg[1]) | |
107 | for arg in self._argument_prog.findall(route.routepath)] |
|
107 | for arg in self._argument_prog.findall(route.routepath)] | |
108 | ) |
|
108 | ) | |
109 |
|
109 | |||
110 | def jsroutes(self): |
|
110 | def jsroutes(self): | |
111 | """ |
|
111 | """ | |
112 | Return a list of pyroutes.js compatible routes |
|
112 | Return a list of pyroutes.js compatible routes | |
113 | """ |
|
113 | """ | |
114 | for route_name in self._jsroutes: |
|
114 | for route_name in self._jsroutes: | |
115 | yield self._extract_route_information(self._routenames[route_name]) |
|
115 | yield self._extract_route_information(self._routenames[route_name]) | |
116 |
|
116 | |||
117 |
|
117 | |||
118 | def make_map(config): |
|
118 | def make_map(config): | |
119 | """Create, configure and return the routes Mapper""" |
|
119 | """Create, configure and return the routes Mapper""" | |
120 | rmap = JSRoutesMapper(directory=config['pylons.paths']['controllers'], |
|
120 | rmap = JSRoutesMapper(directory=config['pylons.paths']['controllers'], | |
121 | always_scan=config['debug']) |
|
121 | always_scan=config['debug']) | |
122 | rmap.minimization = False |
|
122 | rmap.minimization = False | |
123 | rmap.explicit = False |
|
123 | rmap.explicit = False | |
124 |
|
124 | |||
125 | from rhodecode.lib.utils2 import str2bool |
|
125 | from rhodecode.lib.utils2 import str2bool | |
126 | from rhodecode.model import repo, repo_group |
|
126 | from rhodecode.model import repo, repo_group | |
127 |
|
127 | |||
128 | def check_repo(environ, match_dict): |
|
128 | def check_repo(environ, match_dict): | |
129 | """ |
|
129 | """ | |
130 | check for valid repository for proper 404 handling |
|
130 | check for valid repository for proper 404 handling | |
131 |
|
131 | |||
132 | :param environ: |
|
132 | :param environ: | |
133 | :param match_dict: |
|
133 | :param match_dict: | |
134 | """ |
|
134 | """ | |
135 | repo_name = match_dict.get('repo_name') |
|
135 | repo_name = match_dict.get('repo_name') | |
136 |
|
136 | |||
137 | if match_dict.get('f_path'): |
|
137 | if match_dict.get('f_path'): | |
138 | # fix for multiple initial slashes that causes errors |
|
138 | # fix for multiple initial slashes that causes errors | |
139 | match_dict['f_path'] = match_dict['f_path'].lstrip('/') |
|
139 | match_dict['f_path'] = match_dict['f_path'].lstrip('/') | |
140 | repo_model = repo.RepoModel() |
|
140 | repo_model = repo.RepoModel() | |
141 | by_name_match = repo_model.get_by_repo_name(repo_name) |
|
141 | by_name_match = repo_model.get_by_repo_name(repo_name) | |
142 | # if we match quickly from database, short circuit the operation, |
|
142 | # if we match quickly from database, short circuit the operation, | |
143 | # and validate repo based on the type. |
|
143 | # and validate repo based on the type. | |
144 | if by_name_match: |
|
144 | if by_name_match: | |
145 | return True |
|
145 | return True | |
146 |
|
146 | |||
147 | by_id_match = repo_model.get_repo_by_id(repo_name) |
|
147 | by_id_match = repo_model.get_repo_by_id(repo_name) | |
148 | if by_id_match: |
|
148 | if by_id_match: | |
149 | repo_name = by_id_match.repo_name |
|
149 | repo_name = by_id_match.repo_name | |
150 | match_dict['repo_name'] = repo_name |
|
150 | match_dict['repo_name'] = repo_name | |
151 | return True |
|
151 | return True | |
152 |
|
152 | |||
153 | return False |
|
153 | return False | |
154 |
|
154 | |||
155 | def check_group(environ, match_dict): |
|
155 | def check_group(environ, match_dict): | |
156 | """ |
|
156 | """ | |
157 | check for valid repository group path for proper 404 handling |
|
157 | check for valid repository group path for proper 404 handling | |
158 |
|
158 | |||
159 | :param environ: |
|
159 | :param environ: | |
160 | :param match_dict: |
|
160 | :param match_dict: | |
161 | """ |
|
161 | """ | |
162 | repo_group_name = match_dict.get('group_name') |
|
162 | repo_group_name = match_dict.get('group_name') | |
163 | repo_group_model = repo_group.RepoGroupModel() |
|
163 | repo_group_model = repo_group.RepoGroupModel() | |
164 | by_name_match = repo_group_model.get_by_group_name(repo_group_name) |
|
164 | by_name_match = repo_group_model.get_by_group_name(repo_group_name) | |
165 | if by_name_match: |
|
165 | if by_name_match: | |
166 | return True |
|
166 | return True | |
167 |
|
167 | |||
168 | return False |
|
168 | return False | |
169 |
|
169 | |||
170 | def check_user_group(environ, match_dict): |
|
170 | def check_user_group(environ, match_dict): | |
171 | """ |
|
171 | """ | |
172 | check for valid user group for proper 404 handling |
|
172 | check for valid user group for proper 404 handling | |
173 |
|
173 | |||
174 | :param environ: |
|
174 | :param environ: | |
175 | :param match_dict: |
|
175 | :param match_dict: | |
176 | """ |
|
176 | """ | |
177 | return True |
|
177 | return True | |
178 |
|
178 | |||
179 | def check_int(environ, match_dict): |
|
179 | def check_int(environ, match_dict): | |
180 | return match_dict.get('id').isdigit() |
|
180 | return match_dict.get('id').isdigit() | |
181 |
|
181 | |||
182 |
|
182 | |||
183 | #========================================================================== |
|
183 | #========================================================================== | |
184 | # CUSTOM ROUTES HERE |
|
184 | # CUSTOM ROUTES HERE | |
185 | #========================================================================== |
|
185 | #========================================================================== | |
186 |
|
186 | |||
187 | # MAIN PAGE |
|
187 | # MAIN PAGE | |
188 | rmap.connect('home', '/', controller='home', action='index', jsroute=True) |
|
188 | rmap.connect('home', '/', controller='home', action='index', jsroute=True) | |
189 | rmap.connect('goto_switcher_data', '/_goto_data', controller='home', |
|
189 | rmap.connect('goto_switcher_data', '/_goto_data', controller='home', | |
190 | action='goto_switcher_data') |
|
190 | action='goto_switcher_data') | |
191 | rmap.connect('repo_list_data', '/_repos', controller='home', |
|
191 | rmap.connect('repo_list_data', '/_repos', controller='home', | |
192 | action='repo_list_data') |
|
192 | action='repo_list_data') | |
193 |
|
193 | |||
194 | rmap.connect('user_autocomplete_data', '/_users', controller='home', |
|
194 | rmap.connect('user_autocomplete_data', '/_users', controller='home', | |
195 | action='user_autocomplete_data', jsroute=True) |
|
195 | action='user_autocomplete_data', jsroute=True) | |
196 | rmap.connect('user_group_autocomplete_data', '/_user_groups', controller='home', |
|
196 | rmap.connect('user_group_autocomplete_data', '/_user_groups', controller='home', | |
197 | action='user_group_autocomplete_data') |
|
197 | action='user_group_autocomplete_data') | |
198 |
|
198 | |||
199 | rmap.connect( |
|
199 | rmap.connect( | |
200 | 'user_profile', '/_profiles/{username}', controller='users', |
|
200 | 'user_profile', '/_profiles/{username}', controller='users', | |
201 | action='user_profile') |
|
201 | action='user_profile') | |
202 |
|
202 | |||
203 | # TODO: johbo: Static links, to be replaced by our redirection mechanism |
|
203 | # TODO: johbo: Static links, to be replaced by our redirection mechanism | |
204 | rmap.connect('rst_help', |
|
204 | rmap.connect('rst_help', | |
205 | 'http://docutils.sourceforge.net/docs/user/rst/quickref.html', |
|
205 | 'http://docutils.sourceforge.net/docs/user/rst/quickref.html', | |
206 | _static=True) |
|
206 | _static=True) | |
207 | rmap.connect('markdown_help', |
|
207 | rmap.connect('markdown_help', | |
208 | 'http://daringfireball.net/projects/markdown/syntax', |
|
208 | 'http://daringfireball.net/projects/markdown/syntax', | |
209 | _static=True) |
|
209 | _static=True) | |
210 | rmap.connect('rhodecode_official', 'https://rhodecode.com', _static=True) |
|
210 | rmap.connect('rhodecode_official', 'https://rhodecode.com', _static=True) | |
211 | rmap.connect('rhodecode_support', 'https://rhodecode.com/help/', _static=True) |
|
211 | rmap.connect('rhodecode_support', 'https://rhodecode.com/help/', _static=True) | |
212 | rmap.connect('rhodecode_translations', 'https://rhodecode.com/translate/enterprise', _static=True) |
|
212 | rmap.connect('rhodecode_translations', 'https://rhodecode.com/translate/enterprise', _static=True) | |
213 | # TODO: anderson - making this a static link since redirect won't play |
|
213 | # TODO: anderson - making this a static link since redirect won't play | |
214 | # nice with POST requests |
|
214 | # nice with POST requests | |
215 | rmap.connect('enterprise_license_convert_from_old', |
|
215 | rmap.connect('enterprise_license_convert_from_old', | |
216 | 'https://rhodecode.com/u/license-upgrade', |
|
216 | 'https://rhodecode.com/u/license-upgrade', | |
217 | _static=True) |
|
217 | _static=True) | |
218 |
|
218 | |||
219 | routing_links.connect_redirection_links(rmap) |
|
219 | routing_links.connect_redirection_links(rmap) | |
220 |
|
220 | |||
221 | rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping') |
|
221 | rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping') | |
222 | rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test') |
|
222 | rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test') | |
223 |
|
223 | |||
224 | # ADMIN REPOSITORY ROUTES |
|
224 | # ADMIN REPOSITORY ROUTES | |
225 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
225 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
226 | controller='admin/repos') as m: |
|
226 | controller='admin/repos') as m: | |
227 | m.connect('repos', '/repos', |
|
227 | m.connect('repos', '/repos', | |
228 | action='create', conditions={'method': ['POST']}) |
|
228 | action='create', conditions={'method': ['POST']}) | |
229 | m.connect('repos', '/repos', |
|
229 | m.connect('repos', '/repos', | |
230 | action='index', conditions={'method': ['GET']}) |
|
230 | action='index', conditions={'method': ['GET']}) | |
231 | m.connect('new_repo', '/create_repository', jsroute=True, |
|
231 | m.connect('new_repo', '/create_repository', jsroute=True, | |
232 | action='create_repository', conditions={'method': ['GET']}) |
|
232 | action='create_repository', conditions={'method': ['GET']}) | |
233 | m.connect('/repos/{repo_name}', |
|
233 | m.connect('/repos/{repo_name}', | |
234 | action='update', conditions={'method': ['PUT'], |
|
234 | action='update', conditions={'method': ['PUT'], | |
235 | 'function': check_repo}, |
|
235 | 'function': check_repo}, | |
236 | requirements=URL_NAME_REQUIREMENTS) |
|
236 | requirements=URL_NAME_REQUIREMENTS) | |
237 | m.connect('delete_repo', '/repos/{repo_name}', |
|
237 | m.connect('delete_repo', '/repos/{repo_name}', | |
238 | action='delete', conditions={'method': ['DELETE']}, |
|
238 | action='delete', conditions={'method': ['DELETE']}, | |
239 | requirements=URL_NAME_REQUIREMENTS) |
|
239 | requirements=URL_NAME_REQUIREMENTS) | |
240 | m.connect('repo', '/repos/{repo_name}', |
|
240 | m.connect('repo', '/repos/{repo_name}', | |
241 | action='show', conditions={'method': ['GET'], |
|
241 | action='show', conditions={'method': ['GET'], | |
242 | 'function': check_repo}, |
|
242 | 'function': check_repo}, | |
243 | requirements=URL_NAME_REQUIREMENTS) |
|
243 | requirements=URL_NAME_REQUIREMENTS) | |
244 |
|
244 | |||
245 | # ADMIN REPOSITORY GROUPS ROUTES |
|
245 | # ADMIN REPOSITORY GROUPS ROUTES | |
246 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
246 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
247 | controller='admin/repo_groups') as m: |
|
247 | controller='admin/repo_groups') as m: | |
248 | m.connect('repo_groups', '/repo_groups', |
|
248 | m.connect('repo_groups', '/repo_groups', | |
249 | action='create', conditions={'method': ['POST']}) |
|
249 | action='create', conditions={'method': ['POST']}) | |
250 | m.connect('repo_groups', '/repo_groups', |
|
250 | m.connect('repo_groups', '/repo_groups', | |
251 | action='index', conditions={'method': ['GET']}) |
|
251 | action='index', conditions={'method': ['GET']}) | |
252 | m.connect('new_repo_group', '/repo_groups/new', |
|
252 | m.connect('new_repo_group', '/repo_groups/new', | |
253 | action='new', conditions={'method': ['GET']}) |
|
253 | action='new', conditions={'method': ['GET']}) | |
254 | m.connect('update_repo_group', '/repo_groups/{group_name}', |
|
254 | m.connect('update_repo_group', '/repo_groups/{group_name}', | |
255 | action='update', conditions={'method': ['PUT'], |
|
255 | action='update', conditions={'method': ['PUT'], | |
256 | 'function': check_group}, |
|
256 | 'function': check_group}, | |
257 | requirements=URL_NAME_REQUIREMENTS) |
|
257 | requirements=URL_NAME_REQUIREMENTS) | |
258 |
|
258 | |||
259 | # EXTRAS REPO GROUP ROUTES |
|
259 | # EXTRAS REPO GROUP ROUTES | |
260 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', |
|
260 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', | |
261 | action='edit', |
|
261 | action='edit', | |
262 | conditions={'method': ['GET'], 'function': check_group}, |
|
262 | conditions={'method': ['GET'], 'function': check_group}, | |
263 | requirements=URL_NAME_REQUIREMENTS) |
|
263 | requirements=URL_NAME_REQUIREMENTS) | |
264 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', |
|
264 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', | |
265 | action='edit', |
|
265 | action='edit', | |
266 | conditions={'method': ['PUT'], 'function': check_group}, |
|
266 | conditions={'method': ['PUT'], 'function': check_group}, | |
267 | requirements=URL_NAME_REQUIREMENTS) |
|
267 | requirements=URL_NAME_REQUIREMENTS) | |
268 |
|
268 | |||
269 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', |
|
269 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', | |
270 | action='edit_repo_group_advanced', |
|
270 | action='edit_repo_group_advanced', | |
271 | conditions={'method': ['GET'], 'function': check_group}, |
|
271 | conditions={'method': ['GET'], 'function': check_group}, | |
272 | requirements=URL_NAME_REQUIREMENTS) |
|
272 | requirements=URL_NAME_REQUIREMENTS) | |
273 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', |
|
273 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', | |
274 | action='edit_repo_group_advanced', |
|
274 | action='edit_repo_group_advanced', | |
275 | conditions={'method': ['PUT'], 'function': check_group}, |
|
275 | conditions={'method': ['PUT'], 'function': check_group}, | |
276 | requirements=URL_NAME_REQUIREMENTS) |
|
276 | requirements=URL_NAME_REQUIREMENTS) | |
277 |
|
277 | |||
278 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', |
|
278 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', | |
279 | action='edit_repo_group_perms', |
|
279 | action='edit_repo_group_perms', | |
280 | conditions={'method': ['GET'], 'function': check_group}, |
|
280 | conditions={'method': ['GET'], 'function': check_group}, | |
281 | requirements=URL_NAME_REQUIREMENTS) |
|
281 | requirements=URL_NAME_REQUIREMENTS) | |
282 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', |
|
282 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', | |
283 | action='update_perms', |
|
283 | action='update_perms', | |
284 | conditions={'method': ['PUT'], 'function': check_group}, |
|
284 | conditions={'method': ['PUT'], 'function': check_group}, | |
285 | requirements=URL_NAME_REQUIREMENTS) |
|
285 | requirements=URL_NAME_REQUIREMENTS) | |
286 |
|
286 | |||
287 | m.connect('delete_repo_group', '/repo_groups/{group_name}', |
|
287 | m.connect('delete_repo_group', '/repo_groups/{group_name}', | |
288 | action='delete', conditions={'method': ['DELETE'], |
|
288 | action='delete', conditions={'method': ['DELETE'], | |
289 | 'function': check_group}, |
|
289 | 'function': check_group}, | |
290 | requirements=URL_NAME_REQUIREMENTS) |
|
290 | requirements=URL_NAME_REQUIREMENTS) | |
291 |
|
291 | |||
292 | # ADMIN USER ROUTES |
|
292 | # ADMIN USER ROUTES | |
293 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
293 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
294 | controller='admin/users') as m: |
|
294 | controller='admin/users') as m: | |
295 | m.connect('users', '/users', |
|
295 | m.connect('users', '/users', | |
296 | action='create', conditions={'method': ['POST']}) |
|
296 | action='create', conditions={'method': ['POST']}) | |
297 | m.connect('users', '/users', |
|
297 | m.connect('users', '/users', | |
298 | action='index', conditions={'method': ['GET']}) |
|
298 | action='index', conditions={'method': ['GET']}) | |
299 | m.connect('new_user', '/users/new', |
|
299 | m.connect('new_user', '/users/new', | |
300 | action='new', conditions={'method': ['GET']}) |
|
300 | action='new', conditions={'method': ['GET']}) | |
301 | m.connect('update_user', '/users/{user_id}', |
|
301 | m.connect('update_user', '/users/{user_id}', | |
302 | action='update', conditions={'method': ['PUT']}) |
|
302 | action='update', conditions={'method': ['PUT']}) | |
303 | m.connect('delete_user', '/users/{user_id}', |
|
303 | m.connect('delete_user', '/users/{user_id}', | |
304 | action='delete', conditions={'method': ['DELETE']}) |
|
304 | action='delete', conditions={'method': ['DELETE']}) | |
305 | m.connect('edit_user', '/users/{user_id}/edit', |
|
305 | m.connect('edit_user', '/users/{user_id}/edit', | |
306 | action='edit', conditions={'method': ['GET']}) |
|
306 | action='edit', conditions={'method': ['GET']}) | |
307 | m.connect('user', '/users/{user_id}', |
|
307 | m.connect('user', '/users/{user_id}', | |
308 | action='show', conditions={'method': ['GET']}) |
|
308 | action='show', conditions={'method': ['GET']}) | |
309 | m.connect('force_password_reset_user', '/users/{user_id}/password_reset', |
|
309 | m.connect('force_password_reset_user', '/users/{user_id}/password_reset', | |
310 | action='reset_password', conditions={'method': ['POST']}) |
|
310 | action='reset_password', conditions={'method': ['POST']}) | |
311 | m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group', |
|
311 | m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group', | |
312 | action='create_personal_repo_group', conditions={'method': ['POST']}) |
|
312 | action='create_personal_repo_group', conditions={'method': ['POST']}) | |
313 |
|
313 | |||
314 | # EXTRAS USER ROUTES |
|
314 | # EXTRAS USER ROUTES | |
315 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', |
|
315 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', | |
316 | action='edit_advanced', conditions={'method': ['GET']}) |
|
316 | action='edit_advanced', conditions={'method': ['GET']}) | |
317 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', |
|
317 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', | |
318 | action='update_advanced', conditions={'method': ['PUT']}) |
|
318 | action='update_advanced', conditions={'method': ['PUT']}) | |
319 |
|
319 | |||
320 | m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens', |
|
320 | m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens', | |
321 | action='edit_auth_tokens', conditions={'method': ['GET']}) |
|
321 | action='edit_auth_tokens', conditions={'method': ['GET']}) | |
322 | m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens', |
|
322 | m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens', | |
323 | action='add_auth_token', conditions={'method': ['PUT']}) |
|
323 | action='add_auth_token', conditions={'method': ['PUT']}) | |
324 | m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens', |
|
324 | m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens', | |
325 | action='delete_auth_token', conditions={'method': ['DELETE']}) |
|
325 | action='delete_auth_token', conditions={'method': ['DELETE']}) | |
326 |
|
326 | |||
327 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', |
|
327 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', | |
328 | action='edit_global_perms', conditions={'method': ['GET']}) |
|
328 | action='edit_global_perms', conditions={'method': ['GET']}) | |
329 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', |
|
329 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', | |
330 | action='update_global_perms', conditions={'method': ['PUT']}) |
|
330 | action='update_global_perms', conditions={'method': ['PUT']}) | |
331 |
|
331 | |||
332 | m.connect('edit_user_perms_summary', '/users/{user_id}/edit/permissions_summary', |
|
332 | m.connect('edit_user_perms_summary', '/users/{user_id}/edit/permissions_summary', | |
333 | action='edit_perms_summary', conditions={'method': ['GET']}) |
|
333 | action='edit_perms_summary', conditions={'method': ['GET']}) | |
334 |
|
334 | |||
335 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', |
|
335 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', | |
336 | action='edit_emails', conditions={'method': ['GET']}) |
|
336 | action='edit_emails', conditions={'method': ['GET']}) | |
337 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', |
|
337 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', | |
338 | action='add_email', conditions={'method': ['PUT']}) |
|
338 | action='add_email', conditions={'method': ['PUT']}) | |
339 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', |
|
339 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', | |
340 | action='delete_email', conditions={'method': ['DELETE']}) |
|
340 | action='delete_email', conditions={'method': ['DELETE']}) | |
341 |
|
341 | |||
342 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', |
|
342 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', | |
343 | action='edit_ips', conditions={'method': ['GET']}) |
|
343 | action='edit_ips', conditions={'method': ['GET']}) | |
344 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', |
|
344 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', | |
345 | action='add_ip', conditions={'method': ['PUT']}) |
|
345 | action='add_ip', conditions={'method': ['PUT']}) | |
346 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', |
|
346 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', | |
347 | action='delete_ip', conditions={'method': ['DELETE']}) |
|
347 | action='delete_ip', conditions={'method': ['DELETE']}) | |
348 |
|
348 | |||
349 | # ADMIN USER GROUPS REST ROUTES |
|
349 | # ADMIN USER GROUPS REST ROUTES | |
350 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
350 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
351 | controller='admin/user_groups') as m: |
|
351 | controller='admin/user_groups') as m: | |
352 | m.connect('users_groups', '/user_groups', |
|
352 | m.connect('users_groups', '/user_groups', | |
353 | action='create', conditions={'method': ['POST']}) |
|
353 | action='create', conditions={'method': ['POST']}) | |
354 | m.connect('users_groups', '/user_groups', |
|
354 | m.connect('users_groups', '/user_groups', | |
355 | action='index', conditions={'method': ['GET']}) |
|
355 | action='index', conditions={'method': ['GET']}) | |
356 | m.connect('new_users_group', '/user_groups/new', |
|
356 | m.connect('new_users_group', '/user_groups/new', | |
357 | action='new', conditions={'method': ['GET']}) |
|
357 | action='new', conditions={'method': ['GET']}) | |
358 | m.connect('update_users_group', '/user_groups/{user_group_id}', |
|
358 | m.connect('update_users_group', '/user_groups/{user_group_id}', | |
359 | action='update', conditions={'method': ['PUT']}) |
|
359 | action='update', conditions={'method': ['PUT']}) | |
360 | m.connect('delete_users_group', '/user_groups/{user_group_id}', |
|
360 | m.connect('delete_users_group', '/user_groups/{user_group_id}', | |
361 | action='delete', conditions={'method': ['DELETE']}) |
|
361 | action='delete', conditions={'method': ['DELETE']}) | |
362 | m.connect('edit_users_group', '/user_groups/{user_group_id}/edit', |
|
362 | m.connect('edit_users_group', '/user_groups/{user_group_id}/edit', | |
363 | action='edit', conditions={'method': ['GET']}, |
|
363 | action='edit', conditions={'method': ['GET']}, | |
364 | function=check_user_group) |
|
364 | function=check_user_group) | |
365 |
|
365 | |||
366 | # EXTRAS USER GROUP ROUTES |
|
366 | # EXTRAS USER GROUP ROUTES | |
367 | m.connect('edit_user_group_global_perms', |
|
367 | m.connect('edit_user_group_global_perms', | |
368 | '/user_groups/{user_group_id}/edit/global_permissions', |
|
368 | '/user_groups/{user_group_id}/edit/global_permissions', | |
369 | action='edit_global_perms', conditions={'method': ['GET']}) |
|
369 | action='edit_global_perms', conditions={'method': ['GET']}) | |
370 | m.connect('edit_user_group_global_perms', |
|
370 | m.connect('edit_user_group_global_perms', | |
371 | '/user_groups/{user_group_id}/edit/global_permissions', |
|
371 | '/user_groups/{user_group_id}/edit/global_permissions', | |
372 | action='update_global_perms', conditions={'method': ['PUT']}) |
|
372 | action='update_global_perms', conditions={'method': ['PUT']}) | |
373 | m.connect('edit_user_group_perms_summary', |
|
373 | m.connect('edit_user_group_perms_summary', | |
374 | '/user_groups/{user_group_id}/edit/permissions_summary', |
|
374 | '/user_groups/{user_group_id}/edit/permissions_summary', | |
375 | action='edit_perms_summary', conditions={'method': ['GET']}) |
|
375 | action='edit_perms_summary', conditions={'method': ['GET']}) | |
376 |
|
376 | |||
377 | m.connect('edit_user_group_perms', |
|
377 | m.connect('edit_user_group_perms', | |
378 | '/user_groups/{user_group_id}/edit/permissions', |
|
378 | '/user_groups/{user_group_id}/edit/permissions', | |
379 | action='edit_perms', conditions={'method': ['GET']}) |
|
379 | action='edit_perms', conditions={'method': ['GET']}) | |
380 | m.connect('edit_user_group_perms', |
|
380 | m.connect('edit_user_group_perms', | |
381 | '/user_groups/{user_group_id}/edit/permissions', |
|
381 | '/user_groups/{user_group_id}/edit/permissions', | |
382 | action='update_perms', conditions={'method': ['PUT']}) |
|
382 | action='update_perms', conditions={'method': ['PUT']}) | |
383 |
|
383 | |||
384 | m.connect('edit_user_group_advanced', |
|
384 | m.connect('edit_user_group_advanced', | |
385 | '/user_groups/{user_group_id}/edit/advanced', |
|
385 | '/user_groups/{user_group_id}/edit/advanced', | |
386 | action='edit_advanced', conditions={'method': ['GET']}) |
|
386 | action='edit_advanced', conditions={'method': ['GET']}) | |
387 |
|
387 | |||
388 | m.connect('edit_user_group_members', |
|
388 | m.connect('edit_user_group_members', | |
389 | '/user_groups/{user_group_id}/edit/members', jsroute=True, |
|
389 | '/user_groups/{user_group_id}/edit/members', jsroute=True, | |
390 | action='edit_members', conditions={'method': ['GET']}) |
|
390 | action='edit_members', conditions={'method': ['GET']}) | |
391 |
|
391 | |||
392 | # ADMIN PERMISSIONS ROUTES |
|
392 | # ADMIN PERMISSIONS ROUTES | |
393 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
393 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
394 | controller='admin/permissions') as m: |
|
394 | controller='admin/permissions') as m: | |
395 | m.connect('admin_permissions_application', '/permissions/application', |
|
395 | m.connect('admin_permissions_application', '/permissions/application', | |
396 | action='permission_application_update', conditions={'method': ['POST']}) |
|
396 | action='permission_application_update', conditions={'method': ['POST']}) | |
397 | m.connect('admin_permissions_application', '/permissions/application', |
|
397 | m.connect('admin_permissions_application', '/permissions/application', | |
398 | action='permission_application', conditions={'method': ['GET']}) |
|
398 | action='permission_application', conditions={'method': ['GET']}) | |
399 |
|
399 | |||
400 | m.connect('admin_permissions_global', '/permissions/global', |
|
400 | m.connect('admin_permissions_global', '/permissions/global', | |
401 | action='permission_global_update', conditions={'method': ['POST']}) |
|
401 | action='permission_global_update', conditions={'method': ['POST']}) | |
402 | m.connect('admin_permissions_global', '/permissions/global', |
|
402 | m.connect('admin_permissions_global', '/permissions/global', | |
403 | action='permission_global', conditions={'method': ['GET']}) |
|
403 | action='permission_global', conditions={'method': ['GET']}) | |
404 |
|
404 | |||
405 | m.connect('admin_permissions_object', '/permissions/object', |
|
405 | m.connect('admin_permissions_object', '/permissions/object', | |
406 | action='permission_objects_update', conditions={'method': ['POST']}) |
|
406 | action='permission_objects_update', conditions={'method': ['POST']}) | |
407 | m.connect('admin_permissions_object', '/permissions/object', |
|
407 | m.connect('admin_permissions_object', '/permissions/object', | |
408 | action='permission_objects', conditions={'method': ['GET']}) |
|
408 | action='permission_objects', conditions={'method': ['GET']}) | |
409 |
|
409 | |||
410 | m.connect('admin_permissions_ips', '/permissions/ips', |
|
410 | m.connect('admin_permissions_ips', '/permissions/ips', | |
411 | action='permission_ips', conditions={'method': ['POST']}) |
|
411 | action='permission_ips', conditions={'method': ['POST']}) | |
412 | m.connect('admin_permissions_ips', '/permissions/ips', |
|
412 | m.connect('admin_permissions_ips', '/permissions/ips', | |
413 | action='permission_ips', conditions={'method': ['GET']}) |
|
413 | action='permission_ips', conditions={'method': ['GET']}) | |
414 |
|
414 | |||
415 | m.connect('admin_permissions_overview', '/permissions/overview', |
|
415 | m.connect('admin_permissions_overview', '/permissions/overview', | |
416 | action='permission_perms', conditions={'method': ['GET']}) |
|
416 | action='permission_perms', conditions={'method': ['GET']}) | |
417 |
|
417 | |||
418 | # ADMIN DEFAULTS REST ROUTES |
|
418 | # ADMIN DEFAULTS REST ROUTES | |
419 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
419 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
420 | controller='admin/defaults') as m: |
|
420 | controller='admin/defaults') as m: | |
421 | m.connect('admin_defaults_repositories', '/defaults/repositories', |
|
421 | m.connect('admin_defaults_repositories', '/defaults/repositories', | |
422 | action='update_repository_defaults', conditions={'method': ['POST']}) |
|
422 | action='update_repository_defaults', conditions={'method': ['POST']}) | |
423 | m.connect('admin_defaults_repositories', '/defaults/repositories', |
|
423 | m.connect('admin_defaults_repositories', '/defaults/repositories', | |
424 | action='index', conditions={'method': ['GET']}) |
|
424 | action='index', conditions={'method': ['GET']}) | |
425 |
|
425 | |||
426 | # ADMIN DEBUG STYLE ROUTES |
|
426 | # ADMIN DEBUG STYLE ROUTES | |
427 | if str2bool(config.get('debug_style')): |
|
427 | if str2bool(config.get('debug_style')): | |
428 | with rmap.submapper(path_prefix=ADMIN_PREFIX + '/debug_style', |
|
428 | with rmap.submapper(path_prefix=ADMIN_PREFIX + '/debug_style', | |
429 | controller='debug_style') as m: |
|
429 | controller='debug_style') as m: | |
430 | m.connect('debug_style_home', '', |
|
430 | m.connect('debug_style_home', '', | |
431 | action='index', conditions={'method': ['GET']}) |
|
431 | action='index', conditions={'method': ['GET']}) | |
432 | m.connect('debug_style_template', '/t/{t_path}', |
|
432 | m.connect('debug_style_template', '/t/{t_path}', | |
433 | action='template', conditions={'method': ['GET']}) |
|
433 | action='template', conditions={'method': ['GET']}) | |
434 |
|
434 | |||
435 | # ADMIN SETTINGS ROUTES |
|
435 | # ADMIN SETTINGS ROUTES | |
436 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
436 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
437 | controller='admin/settings') as m: |
|
437 | controller='admin/settings') as m: | |
438 |
|
438 | |||
439 | # default |
|
439 | # default | |
440 | m.connect('admin_settings', '/settings', |
|
440 | m.connect('admin_settings', '/settings', | |
441 | action='settings_global_update', |
|
441 | action='settings_global_update', | |
442 | conditions={'method': ['POST']}) |
|
442 | conditions={'method': ['POST']}) | |
443 | m.connect('admin_settings', '/settings', |
|
443 | m.connect('admin_settings', '/settings', | |
444 | action='settings_global', conditions={'method': ['GET']}) |
|
444 | action='settings_global', conditions={'method': ['GET']}) | |
445 |
|
445 | |||
446 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
446 | m.connect('admin_settings_vcs', '/settings/vcs', | |
447 | action='settings_vcs_update', |
|
447 | action='settings_vcs_update', | |
448 | conditions={'method': ['POST']}) |
|
448 | conditions={'method': ['POST']}) | |
449 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
449 | m.connect('admin_settings_vcs', '/settings/vcs', | |
450 | action='settings_vcs', |
|
450 | action='settings_vcs', | |
451 | conditions={'method': ['GET']}) |
|
451 | conditions={'method': ['GET']}) | |
452 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
452 | m.connect('admin_settings_vcs', '/settings/vcs', | |
453 | action='delete_svn_pattern', |
|
453 | action='delete_svn_pattern', | |
454 | conditions={'method': ['DELETE']}) |
|
454 | conditions={'method': ['DELETE']}) | |
455 |
|
455 | |||
456 | m.connect('admin_settings_mapping', '/settings/mapping', |
|
456 | m.connect('admin_settings_mapping', '/settings/mapping', | |
457 | action='settings_mapping_update', |
|
457 | action='settings_mapping_update', | |
458 | conditions={'method': ['POST']}) |
|
458 | conditions={'method': ['POST']}) | |
459 | m.connect('admin_settings_mapping', '/settings/mapping', |
|
459 | m.connect('admin_settings_mapping', '/settings/mapping', | |
460 | action='settings_mapping', conditions={'method': ['GET']}) |
|
460 | action='settings_mapping', conditions={'method': ['GET']}) | |
461 |
|
461 | |||
462 | m.connect('admin_settings_global', '/settings/global', |
|
462 | m.connect('admin_settings_global', '/settings/global', | |
463 | action='settings_global_update', |
|
463 | action='settings_global_update', | |
464 | conditions={'method': ['POST']}) |
|
464 | conditions={'method': ['POST']}) | |
465 | m.connect('admin_settings_global', '/settings/global', |
|
465 | m.connect('admin_settings_global', '/settings/global', | |
466 | action='settings_global', conditions={'method': ['GET']}) |
|
466 | action='settings_global', conditions={'method': ['GET']}) | |
467 |
|
467 | |||
468 | m.connect('admin_settings_visual', '/settings/visual', |
|
468 | m.connect('admin_settings_visual', '/settings/visual', | |
469 | action='settings_visual_update', |
|
469 | action='settings_visual_update', | |
470 | conditions={'method': ['POST']}) |
|
470 | conditions={'method': ['POST']}) | |
471 | m.connect('admin_settings_visual', '/settings/visual', |
|
471 | m.connect('admin_settings_visual', '/settings/visual', | |
472 | action='settings_visual', conditions={'method': ['GET']}) |
|
472 | action='settings_visual', conditions={'method': ['GET']}) | |
473 |
|
473 | |||
474 | m.connect('admin_settings_issuetracker', |
|
474 | m.connect('admin_settings_issuetracker', | |
475 | '/settings/issue-tracker', action='settings_issuetracker', |
|
475 | '/settings/issue-tracker', action='settings_issuetracker', | |
476 | conditions={'method': ['GET']}) |
|
476 | conditions={'method': ['GET']}) | |
477 | m.connect('admin_settings_issuetracker_save', |
|
477 | m.connect('admin_settings_issuetracker_save', | |
478 | '/settings/issue-tracker/save', |
|
478 | '/settings/issue-tracker/save', | |
479 | action='settings_issuetracker_save', |
|
479 | action='settings_issuetracker_save', | |
480 | conditions={'method': ['POST']}) |
|
480 | conditions={'method': ['POST']}) | |
481 | m.connect('admin_issuetracker_test', '/settings/issue-tracker/test', |
|
481 | m.connect('admin_issuetracker_test', '/settings/issue-tracker/test', | |
482 | action='settings_issuetracker_test', |
|
482 | action='settings_issuetracker_test', | |
483 | conditions={'method': ['POST']}) |
|
483 | conditions={'method': ['POST']}) | |
484 | m.connect('admin_issuetracker_delete', |
|
484 | m.connect('admin_issuetracker_delete', | |
485 | '/settings/issue-tracker/delete', |
|
485 | '/settings/issue-tracker/delete', | |
486 | action='settings_issuetracker_delete', |
|
486 | action='settings_issuetracker_delete', | |
487 | conditions={'method': ['DELETE']}) |
|
487 | conditions={'method': ['DELETE']}) | |
488 |
|
488 | |||
489 | m.connect('admin_settings_email', '/settings/email', |
|
489 | m.connect('admin_settings_email', '/settings/email', | |
490 | action='settings_email_update', |
|
490 | action='settings_email_update', | |
491 | conditions={'method': ['POST']}) |
|
491 | conditions={'method': ['POST']}) | |
492 | m.connect('admin_settings_email', '/settings/email', |
|
492 | m.connect('admin_settings_email', '/settings/email', | |
493 | action='settings_email', conditions={'method': ['GET']}) |
|
493 | action='settings_email', conditions={'method': ['GET']}) | |
494 |
|
494 | |||
495 | m.connect('admin_settings_hooks', '/settings/hooks', |
|
495 | m.connect('admin_settings_hooks', '/settings/hooks', | |
496 | action='settings_hooks_update', |
|
496 | action='settings_hooks_update', | |
497 | conditions={'method': ['POST', 'DELETE']}) |
|
497 | conditions={'method': ['POST', 'DELETE']}) | |
498 | m.connect('admin_settings_hooks', '/settings/hooks', |
|
498 | m.connect('admin_settings_hooks', '/settings/hooks', | |
499 | action='settings_hooks', conditions={'method': ['GET']}) |
|
499 | action='settings_hooks', conditions={'method': ['GET']}) | |
500 |
|
500 | |||
501 | m.connect('admin_settings_search', '/settings/search', |
|
501 | m.connect('admin_settings_search', '/settings/search', | |
502 | action='settings_search', conditions={'method': ['GET']}) |
|
502 | action='settings_search', conditions={'method': ['GET']}) | |
503 |
|
503 | |||
504 | m.connect('admin_settings_system', '/settings/system', |
|
504 | m.connect('admin_settings_system', '/settings/system', | |
505 | action='settings_system', conditions={'method': ['GET']}) |
|
505 | action='settings_system', conditions={'method': ['GET']}) | |
506 |
|
506 | |||
507 | m.connect('admin_settings_system_update', '/settings/system/updates', |
|
507 | m.connect('admin_settings_system_update', '/settings/system/updates', | |
508 | action='settings_system_update', conditions={'method': ['GET']}) |
|
508 | action='settings_system_update', conditions={'method': ['GET']}) | |
509 |
|
509 | |||
510 | m.connect('admin_settings_supervisor', '/settings/supervisor', |
|
510 | m.connect('admin_settings_supervisor', '/settings/supervisor', | |
511 | action='settings_supervisor', conditions={'method': ['GET']}) |
|
511 | action='settings_supervisor', conditions={'method': ['GET']}) | |
512 | m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log', |
|
512 | m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log', | |
513 | action='settings_supervisor_log', conditions={'method': ['GET']}) |
|
513 | action='settings_supervisor_log', conditions={'method': ['GET']}) | |
514 |
|
514 | |||
515 | m.connect('admin_settings_labs', '/settings/labs', |
|
515 | m.connect('admin_settings_labs', '/settings/labs', | |
516 | action='settings_labs_update', |
|
516 | action='settings_labs_update', | |
517 | conditions={'method': ['POST']}) |
|
517 | conditions={'method': ['POST']}) | |
518 | m.connect('admin_settings_labs', '/settings/labs', |
|
518 | m.connect('admin_settings_labs', '/settings/labs', | |
519 | action='settings_labs', conditions={'method': ['GET']}) |
|
519 | action='settings_labs', conditions={'method': ['GET']}) | |
520 |
|
520 | |||
521 | # ADMIN MY ACCOUNT |
|
521 | # ADMIN MY ACCOUNT | |
522 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
522 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
523 | controller='admin/my_account') as m: |
|
523 | controller='admin/my_account') as m: | |
524 |
|
524 | |||
525 | m.connect('my_account', '/my_account', |
|
525 | m.connect('my_account', '/my_account', | |
526 | action='my_account', conditions={'method': ['GET']}) |
|
526 | action='my_account', conditions={'method': ['GET']}) | |
527 | m.connect('my_account_edit', '/my_account/edit', |
|
527 | m.connect('my_account_edit', '/my_account/edit', | |
528 | action='my_account_edit', conditions={'method': ['GET']}) |
|
528 | action='my_account_edit', conditions={'method': ['GET']}) | |
529 | m.connect('my_account', '/my_account', |
|
529 | m.connect('my_account', '/my_account', | |
530 | action='my_account_update', conditions={'method': ['POST']}) |
|
530 | action='my_account_update', conditions={'method': ['POST']}) | |
531 |
|
531 | |||
532 | m.connect('my_account_password', '/my_account/password', |
|
532 | m.connect('my_account_password', '/my_account/password', | |
533 | action='my_account_password', conditions={'method': ['GET']}) |
|
533 | action='my_account_password', conditions={'method': ['GET']}) | |
534 | m.connect('my_account_password', '/my_account/password', |
|
534 | m.connect('my_account_password', '/my_account/password', | |
535 | action='my_account_password_update', conditions={'method': ['POST']}) |
|
535 | action='my_account_password_update', conditions={'method': ['POST']}) | |
536 |
|
536 | |||
537 | m.connect('my_account_repos', '/my_account/repos', |
|
537 | m.connect('my_account_repos', '/my_account/repos', | |
538 | action='my_account_repos', conditions={'method': ['GET']}) |
|
538 | action='my_account_repos', conditions={'method': ['GET']}) | |
539 |
|
539 | |||
540 | m.connect('my_account_watched', '/my_account/watched', |
|
540 | m.connect('my_account_watched', '/my_account/watched', | |
541 | action='my_account_watched', conditions={'method': ['GET']}) |
|
541 | action='my_account_watched', conditions={'method': ['GET']}) | |
542 |
|
542 | |||
543 | m.connect('my_account_pullrequests', '/my_account/pull_requests', |
|
543 | m.connect('my_account_pullrequests', '/my_account/pull_requests', | |
544 | action='my_account_pullrequests', conditions={'method': ['GET']}) |
|
544 | action='my_account_pullrequests', conditions={'method': ['GET']}) | |
545 |
|
545 | |||
546 | m.connect('my_account_perms', '/my_account/perms', |
|
546 | m.connect('my_account_perms', '/my_account/perms', | |
547 | action='my_account_perms', conditions={'method': ['GET']}) |
|
547 | action='my_account_perms', conditions={'method': ['GET']}) | |
548 |
|
548 | |||
549 | m.connect('my_account_emails', '/my_account/emails', |
|
549 | m.connect('my_account_emails', '/my_account/emails', | |
550 | action='my_account_emails', conditions={'method': ['GET']}) |
|
550 | action='my_account_emails', conditions={'method': ['GET']}) | |
551 | m.connect('my_account_emails', '/my_account/emails', |
|
551 | m.connect('my_account_emails', '/my_account/emails', | |
552 | action='my_account_emails_add', conditions={'method': ['POST']}) |
|
552 | action='my_account_emails_add', conditions={'method': ['POST']}) | |
553 | m.connect('my_account_emails', '/my_account/emails', |
|
553 | m.connect('my_account_emails', '/my_account/emails', | |
554 | action='my_account_emails_delete', conditions={'method': ['DELETE']}) |
|
554 | action='my_account_emails_delete', conditions={'method': ['DELETE']}) | |
555 |
|
555 | |||
556 | m.connect('my_account_auth_tokens', '/my_account/auth_tokens', |
|
556 | m.connect('my_account_auth_tokens', '/my_account/auth_tokens', | |
557 | action='my_account_auth_tokens', conditions={'method': ['GET']}) |
|
557 | action='my_account_auth_tokens', conditions={'method': ['GET']}) | |
558 | m.connect('my_account_auth_tokens', '/my_account/auth_tokens', |
|
558 | m.connect('my_account_auth_tokens', '/my_account/auth_tokens', | |
559 | action='my_account_auth_tokens_add', conditions={'method': ['POST']}) |
|
559 | action='my_account_auth_tokens_add', conditions={'method': ['POST']}) | |
560 | m.connect('my_account_auth_tokens', '/my_account/auth_tokens', |
|
560 | m.connect('my_account_auth_tokens', '/my_account/auth_tokens', | |
561 | action='my_account_auth_tokens_delete', conditions={'method': ['DELETE']}) |
|
561 | action='my_account_auth_tokens_delete', conditions={'method': ['DELETE']}) | |
562 |
|
562 | |||
563 | # NOTIFICATION REST ROUTES |
|
563 | # NOTIFICATION REST ROUTES | |
564 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
564 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
565 | controller='admin/notifications') as m: |
|
565 | controller='admin/notifications') as m: | |
566 | m.connect('notifications', '/notifications', |
|
566 | m.connect('notifications', '/notifications', | |
567 | action='index', conditions={'method': ['GET']}) |
|
567 | action='index', conditions={'method': ['GET']}) | |
568 | m.connect('notifications_mark_all_read', '/notifications/mark_all_read', |
|
568 | m.connect('notifications_mark_all_read', '/notifications/mark_all_read', | |
569 | action='mark_all_read', conditions={'method': ['POST']}) |
|
569 | action='mark_all_read', conditions={'method': ['POST']}) | |
570 |
|
570 | |||
571 | m.connect('/notifications/{notification_id}', |
|
571 | m.connect('/notifications/{notification_id}', | |
572 | action='update', conditions={'method': ['PUT']}) |
|
572 | action='update', conditions={'method': ['PUT']}) | |
573 | m.connect('/notifications/{notification_id}', |
|
573 | m.connect('/notifications/{notification_id}', | |
574 | action='delete', conditions={'method': ['DELETE']}) |
|
574 | action='delete', conditions={'method': ['DELETE']}) | |
575 | m.connect('notification', '/notifications/{notification_id}', |
|
575 | m.connect('notification', '/notifications/{notification_id}', | |
576 | action='show', conditions={'method': ['GET']}) |
|
576 | action='show', conditions={'method': ['GET']}) | |
577 |
|
577 | |||
578 | # ADMIN GIST |
|
578 | # ADMIN GIST | |
579 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
579 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
580 | controller='admin/gists') as m: |
|
580 | controller='admin/gists') as m: | |
581 | m.connect('gists', '/gists', |
|
581 | m.connect('gists', '/gists', | |
582 | action='create', conditions={'method': ['POST']}) |
|
582 | action='create', conditions={'method': ['POST']}) | |
583 | m.connect('gists', '/gists', jsroute=True, |
|
583 | m.connect('gists', '/gists', jsroute=True, | |
584 | action='index', conditions={'method': ['GET']}) |
|
584 | action='index', conditions={'method': ['GET']}) | |
585 | m.connect('new_gist', '/gists/new', jsroute=True, |
|
585 | m.connect('new_gist', '/gists/new', jsroute=True, | |
586 | action='new', conditions={'method': ['GET']}) |
|
586 | action='new', conditions={'method': ['GET']}) | |
587 |
|
587 | |||
588 | m.connect('/gists/{gist_id}', |
|
588 | m.connect('/gists/{gist_id}', | |
589 | action='delete', conditions={'method': ['DELETE']}) |
|
589 | action='delete', conditions={'method': ['DELETE']}) | |
590 | m.connect('edit_gist', '/gists/{gist_id}/edit', |
|
590 | m.connect('edit_gist', '/gists/{gist_id}/edit', | |
591 | action='edit_form', conditions={'method': ['GET']}) |
|
591 | action='edit_form', conditions={'method': ['GET']}) | |
592 | m.connect('edit_gist', '/gists/{gist_id}/edit', |
|
592 | m.connect('edit_gist', '/gists/{gist_id}/edit', | |
593 | action='edit', conditions={'method': ['POST']}) |
|
593 | action='edit', conditions={'method': ['POST']}) | |
594 | m.connect( |
|
594 | m.connect( | |
595 | 'edit_gist_check_revision', '/gists/{gist_id}/edit/check_revision', |
|
595 | 'edit_gist_check_revision', '/gists/{gist_id}/edit/check_revision', | |
596 | action='check_revision', conditions={'method': ['GET']}) |
|
596 | action='check_revision', conditions={'method': ['GET']}) | |
597 |
|
597 | |||
598 | m.connect('gist', '/gists/{gist_id}', |
|
598 | m.connect('gist', '/gists/{gist_id}', | |
599 | action='show', conditions={'method': ['GET']}) |
|
599 | action='show', conditions={'method': ['GET']}) | |
600 | m.connect('gist_rev', '/gists/{gist_id}/{revision}', |
|
600 | m.connect('gist_rev', '/gists/{gist_id}/{revision}', | |
601 | revision='tip', |
|
601 | revision='tip', | |
602 | action='show', conditions={'method': ['GET']}) |
|
602 | action='show', conditions={'method': ['GET']}) | |
603 | m.connect('formatted_gist', '/gists/{gist_id}/{revision}/{format}', |
|
603 | m.connect('formatted_gist', '/gists/{gist_id}/{revision}/{format}', | |
604 | revision='tip', |
|
604 | revision='tip', | |
605 | action='show', conditions={'method': ['GET']}) |
|
605 | action='show', conditions={'method': ['GET']}) | |
606 | m.connect('formatted_gist_file', '/gists/{gist_id}/{revision}/{format}/{f_path}', |
|
606 | m.connect('formatted_gist_file', '/gists/{gist_id}/{revision}/{format}/{f_path}', | |
607 | revision='tip', |
|
607 | revision='tip', | |
608 | action='show', conditions={'method': ['GET']}, |
|
608 | action='show', conditions={'method': ['GET']}, | |
609 | requirements=URL_NAME_REQUIREMENTS) |
|
609 | requirements=URL_NAME_REQUIREMENTS) | |
610 |
|
610 | |||
611 | # ADMIN MAIN PAGES |
|
611 | # ADMIN MAIN PAGES | |
612 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
612 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
613 | controller='admin/admin') as m: |
|
613 | controller='admin/admin') as m: | |
614 | m.connect('admin_home', '', action='index') |
|
614 | m.connect('admin_home', '', action='index') | |
615 | m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}', |
|
615 | m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}', | |
616 | action='add_repo') |
|
616 | action='add_repo') | |
617 | m.connect( |
|
617 | m.connect( | |
618 | 'pull_requests_global_0', '/pull_requests/{pull_request_id:[0-9]+}', |
|
618 | 'pull_requests_global_0', '/pull_requests/{pull_request_id:[0-9]+}', | |
619 | action='pull_requests') |
|
619 | action='pull_requests') | |
620 | m.connect( |
|
620 | m.connect( | |
621 | 'pull_requests_global', '/pull-requests/{pull_request_id:[0-9]+}', |
|
621 | 'pull_requests_global', '/pull-requests/{pull_request_id:[0-9]+}', | |
622 | action='pull_requests') |
|
622 | action='pull_requests') | |
623 |
|
623 | |||
624 |
|
624 | |||
625 | # USER JOURNAL |
|
625 | # USER JOURNAL | |
626 | rmap.connect('journal', '%s/journal' % (ADMIN_PREFIX,), |
|
626 | rmap.connect('journal', '%s/journal' % (ADMIN_PREFIX,), | |
627 | controller='journal', action='index') |
|
627 | controller='journal', action='index') | |
628 | rmap.connect('journal_rss', '%s/journal/rss' % (ADMIN_PREFIX,), |
|
628 | rmap.connect('journal_rss', '%s/journal/rss' % (ADMIN_PREFIX,), | |
629 | controller='journal', action='journal_rss') |
|
629 | controller='journal', action='journal_rss') | |
630 | rmap.connect('journal_atom', '%s/journal/atom' % (ADMIN_PREFIX,), |
|
630 | rmap.connect('journal_atom', '%s/journal/atom' % (ADMIN_PREFIX,), | |
631 | controller='journal', action='journal_atom') |
|
631 | controller='journal', action='journal_atom') | |
632 |
|
632 | |||
633 | rmap.connect('public_journal', '%s/public_journal' % (ADMIN_PREFIX,), |
|
633 | rmap.connect('public_journal', '%s/public_journal' % (ADMIN_PREFIX,), | |
634 | controller='journal', action='public_journal') |
|
634 | controller='journal', action='public_journal') | |
635 |
|
635 | |||
636 | rmap.connect('public_journal_rss', '%s/public_journal/rss' % (ADMIN_PREFIX,), |
|
636 | rmap.connect('public_journal_rss', '%s/public_journal/rss' % (ADMIN_PREFIX,), | |
637 | controller='journal', action='public_journal_rss') |
|
637 | controller='journal', action='public_journal_rss') | |
638 |
|
638 | |||
639 | rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % (ADMIN_PREFIX,), |
|
639 | rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % (ADMIN_PREFIX,), | |
640 | controller='journal', action='public_journal_rss') |
|
640 | controller='journal', action='public_journal_rss') | |
641 |
|
641 | |||
642 | rmap.connect('public_journal_atom', |
|
642 | rmap.connect('public_journal_atom', | |
643 | '%s/public_journal/atom' % (ADMIN_PREFIX,), controller='journal', |
|
643 | '%s/public_journal/atom' % (ADMIN_PREFIX,), controller='journal', | |
644 | action='public_journal_atom') |
|
644 | action='public_journal_atom') | |
645 |
|
645 | |||
646 | rmap.connect('public_journal_atom_old', |
|
646 | rmap.connect('public_journal_atom_old', | |
647 | '%s/public_journal_atom' % (ADMIN_PREFIX,), controller='journal', |
|
647 | '%s/public_journal_atom' % (ADMIN_PREFIX,), controller='journal', | |
648 | action='public_journal_atom') |
|
648 | action='public_journal_atom') | |
649 |
|
649 | |||
650 | rmap.connect('toggle_following', '%s/toggle_following' % (ADMIN_PREFIX,), |
|
650 | rmap.connect('toggle_following', '%s/toggle_following' % (ADMIN_PREFIX,), | |
651 | controller='journal', action='toggle_following', jsroute=True, |
|
651 | controller='journal', action='toggle_following', jsroute=True, | |
652 | conditions={'method': ['POST']}) |
|
652 | conditions={'method': ['POST']}) | |
653 |
|
653 | |||
654 | # FULL TEXT SEARCH |
|
654 | # FULL TEXT SEARCH | |
655 | rmap.connect('search', '%s/search' % (ADMIN_PREFIX,), |
|
655 | rmap.connect('search', '%s/search' % (ADMIN_PREFIX,), | |
656 | controller='search') |
|
656 | controller='search') | |
657 | rmap.connect('search_repo_home', '/{repo_name}/search', |
|
657 | rmap.connect('search_repo_home', '/{repo_name}/search', | |
658 | controller='search', |
|
658 | controller='search', | |
659 | action='index', |
|
659 | action='index', | |
660 | conditions={'function': check_repo}, |
|
660 | conditions={'function': check_repo}, | |
661 | requirements=URL_NAME_REQUIREMENTS) |
|
661 | requirements=URL_NAME_REQUIREMENTS) | |
662 |
|
662 | |||
663 | # FEEDS |
|
663 | # FEEDS | |
664 | rmap.connect('rss_feed_home', '/{repo_name}/feed/rss', |
|
664 | rmap.connect('rss_feed_home', '/{repo_name}/feed/rss', | |
665 | controller='feed', action='rss', |
|
665 | controller='feed', action='rss', | |
666 | conditions={'function': check_repo}, |
|
666 | conditions={'function': check_repo}, | |
667 | requirements=URL_NAME_REQUIREMENTS) |
|
667 | requirements=URL_NAME_REQUIREMENTS) | |
668 |
|
668 | |||
669 | rmap.connect('atom_feed_home', '/{repo_name}/feed/atom', |
|
669 | rmap.connect('atom_feed_home', '/{repo_name}/feed/atom', | |
670 | controller='feed', action='atom', |
|
670 | controller='feed', action='atom', | |
671 | conditions={'function': check_repo}, |
|
671 | conditions={'function': check_repo}, | |
672 | requirements=URL_NAME_REQUIREMENTS) |
|
672 | requirements=URL_NAME_REQUIREMENTS) | |
673 |
|
673 | |||
674 | #========================================================================== |
|
674 | #========================================================================== | |
675 | # REPOSITORY ROUTES |
|
675 | # REPOSITORY ROUTES | |
676 | #========================================================================== |
|
676 | #========================================================================== | |
677 |
|
677 | |||
678 | rmap.connect('repo_creating_home', '/{repo_name}/repo_creating', |
|
678 | rmap.connect('repo_creating_home', '/{repo_name}/repo_creating', | |
679 | controller='admin/repos', action='repo_creating', |
|
679 | controller='admin/repos', action='repo_creating', | |
680 | requirements=URL_NAME_REQUIREMENTS) |
|
680 | requirements=URL_NAME_REQUIREMENTS) | |
681 | rmap.connect('repo_check_home', '/{repo_name}/crepo_check', |
|
681 | rmap.connect('repo_check_home', '/{repo_name}/crepo_check', | |
682 | controller='admin/repos', action='repo_check', |
|
682 | controller='admin/repos', action='repo_check', | |
683 | requirements=URL_NAME_REQUIREMENTS) |
|
683 | requirements=URL_NAME_REQUIREMENTS) | |
684 |
|
684 | |||
685 | rmap.connect('repo_stats', '/{repo_name}/repo_stats/{commit_id}', |
|
685 | rmap.connect('repo_stats', '/{repo_name}/repo_stats/{commit_id}', | |
686 | controller='summary', action='repo_stats', |
|
686 | controller='summary', action='repo_stats', | |
687 | conditions={'function': check_repo}, |
|
687 | conditions={'function': check_repo}, | |
688 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
688 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
689 |
|
689 | |||
690 | rmap.connect('repo_refs_data', '/{repo_name}/refs-data', |
|
690 | rmap.connect('repo_refs_data', '/{repo_name}/refs-data', | |
691 | controller='summary', action='repo_refs_data', jsroute=True, |
|
691 | controller='summary', action='repo_refs_data', jsroute=True, | |
692 | requirements=URL_NAME_REQUIREMENTS) |
|
692 | requirements=URL_NAME_REQUIREMENTS) | |
693 | rmap.connect('repo_refs_changelog_data', '/{repo_name}/refs-data-changelog', |
|
693 | rmap.connect('repo_refs_changelog_data', '/{repo_name}/refs-data-changelog', | |
694 | controller='summary', action='repo_refs_changelog_data', |
|
694 | controller='summary', action='repo_refs_changelog_data', | |
695 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
695 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
696 |
|
696 | |||
697 | rmap.connect('changeset_home', '/{repo_name}/changeset/{revision}', |
|
697 | rmap.connect('changeset_home', '/{repo_name}/changeset/{revision}', | |
698 | controller='changeset', revision='tip', jsroute=True, |
|
698 | controller='changeset', revision='tip', jsroute=True, | |
699 | conditions={'function': check_repo}, |
|
699 | conditions={'function': check_repo}, | |
700 | requirements=URL_NAME_REQUIREMENTS) |
|
700 | requirements=URL_NAME_REQUIREMENTS) | |
701 | rmap.connect('changeset_children', '/{repo_name}/changeset_children/{revision}', |
|
701 | rmap.connect('changeset_children', '/{repo_name}/changeset_children/{revision}', | |
702 | controller='changeset', revision='tip', action='changeset_children', |
|
702 | controller='changeset', revision='tip', action='changeset_children', | |
703 | conditions={'function': check_repo}, |
|
703 | conditions={'function': check_repo}, | |
704 | requirements=URL_NAME_REQUIREMENTS) |
|
704 | requirements=URL_NAME_REQUIREMENTS) | |
705 | rmap.connect('changeset_parents', '/{repo_name}/changeset_parents/{revision}', |
|
705 | rmap.connect('changeset_parents', '/{repo_name}/changeset_parents/{revision}', | |
706 | controller='changeset', revision='tip', action='changeset_parents', |
|
706 | controller='changeset', revision='tip', action='changeset_parents', | |
707 | conditions={'function': check_repo}, |
|
707 | conditions={'function': check_repo}, | |
708 | requirements=URL_NAME_REQUIREMENTS) |
|
708 | requirements=URL_NAME_REQUIREMENTS) | |
709 |
|
709 | |||
710 | # repo edit options |
|
710 | # repo edit options | |
711 | rmap.connect('edit_repo', '/{repo_name}/settings', jsroute=True, |
|
711 | rmap.connect('edit_repo', '/{repo_name}/settings', jsroute=True, | |
712 | controller='admin/repos', action='edit', |
|
712 | controller='admin/repos', action='edit', | |
713 | conditions={'method': ['GET'], 'function': check_repo}, |
|
713 | conditions={'method': ['GET'], 'function': check_repo}, | |
714 | requirements=URL_NAME_REQUIREMENTS) |
|
714 | requirements=URL_NAME_REQUIREMENTS) | |
715 |
|
715 | |||
716 | rmap.connect('edit_repo_perms', '/{repo_name}/settings/permissions', |
|
716 | rmap.connect('edit_repo_perms', '/{repo_name}/settings/permissions', | |
717 | jsroute=True, |
|
717 | jsroute=True, | |
718 | controller='admin/repos', action='edit_permissions', |
|
718 | controller='admin/repos', action='edit_permissions', | |
719 | conditions={'method': ['GET'], 'function': check_repo}, |
|
719 | conditions={'method': ['GET'], 'function': check_repo}, | |
720 | requirements=URL_NAME_REQUIREMENTS) |
|
720 | requirements=URL_NAME_REQUIREMENTS) | |
721 | rmap.connect('edit_repo_perms_update', '/{repo_name}/settings/permissions', |
|
721 | rmap.connect('edit_repo_perms_update', '/{repo_name}/settings/permissions', | |
722 | controller='admin/repos', action='edit_permissions_update', |
|
722 | controller='admin/repos', action='edit_permissions_update', | |
723 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
723 | conditions={'method': ['PUT'], 'function': check_repo}, | |
724 | requirements=URL_NAME_REQUIREMENTS) |
|
724 | requirements=URL_NAME_REQUIREMENTS) | |
725 |
|
725 | |||
726 | rmap.connect('edit_repo_fields', '/{repo_name}/settings/fields', |
|
726 | rmap.connect('edit_repo_fields', '/{repo_name}/settings/fields', | |
727 | controller='admin/repos', action='edit_fields', |
|
727 | controller='admin/repos', action='edit_fields', | |
728 | conditions={'method': ['GET'], 'function': check_repo}, |
|
728 | conditions={'method': ['GET'], 'function': check_repo}, | |
729 | requirements=URL_NAME_REQUIREMENTS) |
|
729 | requirements=URL_NAME_REQUIREMENTS) | |
730 | rmap.connect('create_repo_fields', '/{repo_name}/settings/fields/new', |
|
730 | rmap.connect('create_repo_fields', '/{repo_name}/settings/fields/new', | |
731 | controller='admin/repos', action='create_repo_field', |
|
731 | controller='admin/repos', action='create_repo_field', | |
732 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
732 | conditions={'method': ['PUT'], 'function': check_repo}, | |
733 | requirements=URL_NAME_REQUIREMENTS) |
|
733 | requirements=URL_NAME_REQUIREMENTS) | |
734 | rmap.connect('delete_repo_fields', '/{repo_name}/settings/fields/{field_id}', |
|
734 | rmap.connect('delete_repo_fields', '/{repo_name}/settings/fields/{field_id}', | |
735 | controller='admin/repos', action='delete_repo_field', |
|
735 | controller='admin/repos', action='delete_repo_field', | |
736 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
736 | conditions={'method': ['DELETE'], 'function': check_repo}, | |
737 | requirements=URL_NAME_REQUIREMENTS) |
|
737 | requirements=URL_NAME_REQUIREMENTS) | |
738 |
|
738 | |||
739 | rmap.connect('edit_repo_advanced', '/{repo_name}/settings/advanced', |
|
739 | rmap.connect('edit_repo_advanced', '/{repo_name}/settings/advanced', | |
740 | controller='admin/repos', action='edit_advanced', |
|
740 | controller='admin/repos', action='edit_advanced', | |
741 | conditions={'method': ['GET'], 'function': check_repo}, |
|
741 | conditions={'method': ['GET'], 'function': check_repo}, | |
742 | requirements=URL_NAME_REQUIREMENTS) |
|
742 | requirements=URL_NAME_REQUIREMENTS) | |
743 |
|
743 | |||
744 | rmap.connect('edit_repo_advanced_locking', '/{repo_name}/settings/advanced/locking', |
|
744 | rmap.connect('edit_repo_advanced_locking', '/{repo_name}/settings/advanced/locking', | |
745 | controller='admin/repos', action='edit_advanced_locking', |
|
745 | controller='admin/repos', action='edit_advanced_locking', | |
746 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
746 | conditions={'method': ['PUT'], 'function': check_repo}, | |
747 | requirements=URL_NAME_REQUIREMENTS) |
|
747 | requirements=URL_NAME_REQUIREMENTS) | |
748 | rmap.connect('toggle_locking', '/{repo_name}/settings/advanced/locking_toggle', |
|
748 | rmap.connect('toggle_locking', '/{repo_name}/settings/advanced/locking_toggle', | |
749 | controller='admin/repos', action='toggle_locking', |
|
749 | controller='admin/repos', action='toggle_locking', | |
750 | conditions={'method': ['GET'], 'function': check_repo}, |
|
750 | conditions={'method': ['GET'], 'function': check_repo}, | |
751 | requirements=URL_NAME_REQUIREMENTS) |
|
751 | requirements=URL_NAME_REQUIREMENTS) | |
752 |
|
752 | |||
753 | rmap.connect('edit_repo_advanced_journal', '/{repo_name}/settings/advanced/journal', |
|
753 | rmap.connect('edit_repo_advanced_journal', '/{repo_name}/settings/advanced/journal', | |
754 | controller='admin/repos', action='edit_advanced_journal', |
|
754 | controller='admin/repos', action='edit_advanced_journal', | |
755 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
755 | conditions={'method': ['PUT'], 'function': check_repo}, | |
756 | requirements=URL_NAME_REQUIREMENTS) |
|
756 | requirements=URL_NAME_REQUIREMENTS) | |
757 |
|
757 | |||
758 | rmap.connect('edit_repo_advanced_fork', '/{repo_name}/settings/advanced/fork', |
|
758 | rmap.connect('edit_repo_advanced_fork', '/{repo_name}/settings/advanced/fork', | |
759 | controller='admin/repos', action='edit_advanced_fork', |
|
759 | controller='admin/repos', action='edit_advanced_fork', | |
760 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
760 | conditions={'method': ['PUT'], 'function': check_repo}, | |
761 | requirements=URL_NAME_REQUIREMENTS) |
|
761 | requirements=URL_NAME_REQUIREMENTS) | |
762 |
|
762 | |||
763 | rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches', |
|
763 | rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches', | |
764 | controller='admin/repos', action='edit_caches_form', |
|
764 | controller='admin/repos', action='edit_caches_form', | |
765 | conditions={'method': ['GET'], 'function': check_repo}, |
|
765 | conditions={'method': ['GET'], 'function': check_repo}, | |
766 | requirements=URL_NAME_REQUIREMENTS) |
|
766 | requirements=URL_NAME_REQUIREMENTS) | |
767 | rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches', |
|
767 | rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches', | |
768 | controller='admin/repos', action='edit_caches', |
|
768 | controller='admin/repos', action='edit_caches', | |
769 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
769 | conditions={'method': ['PUT'], 'function': check_repo}, | |
770 | requirements=URL_NAME_REQUIREMENTS) |
|
770 | requirements=URL_NAME_REQUIREMENTS) | |
771 |
|
771 | |||
772 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', |
|
772 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', | |
773 | controller='admin/repos', action='edit_remote_form', |
|
773 | controller='admin/repos', action='edit_remote_form', | |
774 | conditions={'method': ['GET'], 'function': check_repo}, |
|
774 | conditions={'method': ['GET'], 'function': check_repo}, | |
775 | requirements=URL_NAME_REQUIREMENTS) |
|
775 | requirements=URL_NAME_REQUIREMENTS) | |
776 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', |
|
776 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', | |
777 | controller='admin/repos', action='edit_remote', |
|
777 | controller='admin/repos', action='edit_remote', | |
778 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
778 | conditions={'method': ['PUT'], 'function': check_repo}, | |
779 | requirements=URL_NAME_REQUIREMENTS) |
|
779 | requirements=URL_NAME_REQUIREMENTS) | |
780 |
|
780 | |||
781 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', |
|
781 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', | |
782 | controller='admin/repos', action='edit_statistics_form', |
|
782 | controller='admin/repos', action='edit_statistics_form', | |
783 | conditions={'method': ['GET'], 'function': check_repo}, |
|
783 | conditions={'method': ['GET'], 'function': check_repo}, | |
784 | requirements=URL_NAME_REQUIREMENTS) |
|
784 | requirements=URL_NAME_REQUIREMENTS) | |
785 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', |
|
785 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', | |
786 | controller='admin/repos', action='edit_statistics', |
|
786 | controller='admin/repos', action='edit_statistics', | |
787 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
787 | conditions={'method': ['PUT'], 'function': check_repo}, | |
788 | requirements=URL_NAME_REQUIREMENTS) |
|
788 | requirements=URL_NAME_REQUIREMENTS) | |
789 | rmap.connect('repo_settings_issuetracker', |
|
789 | rmap.connect('repo_settings_issuetracker', | |
790 | '/{repo_name}/settings/issue-tracker', |
|
790 | '/{repo_name}/settings/issue-tracker', | |
791 | controller='admin/repos', action='repo_issuetracker', |
|
791 | controller='admin/repos', action='repo_issuetracker', | |
792 | conditions={'method': ['GET'], 'function': check_repo}, |
|
792 | conditions={'method': ['GET'], 'function': check_repo}, | |
793 | requirements=URL_NAME_REQUIREMENTS) |
|
793 | requirements=URL_NAME_REQUIREMENTS) | |
794 | rmap.connect('repo_issuetracker_test', |
|
794 | rmap.connect('repo_issuetracker_test', | |
795 | '/{repo_name}/settings/issue-tracker/test', |
|
795 | '/{repo_name}/settings/issue-tracker/test', | |
796 | controller='admin/repos', action='repo_issuetracker_test', |
|
796 | controller='admin/repos', action='repo_issuetracker_test', | |
797 | conditions={'method': ['POST'], 'function': check_repo}, |
|
797 | conditions={'method': ['POST'], 'function': check_repo}, | |
798 | requirements=URL_NAME_REQUIREMENTS) |
|
798 | requirements=URL_NAME_REQUIREMENTS) | |
799 | rmap.connect('repo_issuetracker_delete', |
|
799 | rmap.connect('repo_issuetracker_delete', | |
800 | '/{repo_name}/settings/issue-tracker/delete', |
|
800 | '/{repo_name}/settings/issue-tracker/delete', | |
801 | controller='admin/repos', action='repo_issuetracker_delete', |
|
801 | controller='admin/repos', action='repo_issuetracker_delete', | |
802 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
802 | conditions={'method': ['DELETE'], 'function': check_repo}, | |
803 | requirements=URL_NAME_REQUIREMENTS) |
|
803 | requirements=URL_NAME_REQUIREMENTS) | |
804 | rmap.connect('repo_issuetracker_save', |
|
804 | rmap.connect('repo_issuetracker_save', | |
805 | '/{repo_name}/settings/issue-tracker/save', |
|
805 | '/{repo_name}/settings/issue-tracker/save', | |
806 | controller='admin/repos', action='repo_issuetracker_save', |
|
806 | controller='admin/repos', action='repo_issuetracker_save', | |
807 | conditions={'method': ['POST'], 'function': check_repo}, |
|
807 | conditions={'method': ['POST'], 'function': check_repo}, | |
808 | requirements=URL_NAME_REQUIREMENTS) |
|
808 | requirements=URL_NAME_REQUIREMENTS) | |
809 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
809 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', | |
810 | controller='admin/repos', action='repo_settings_vcs_update', |
|
810 | controller='admin/repos', action='repo_settings_vcs_update', | |
811 | conditions={'method': ['POST'], 'function': check_repo}, |
|
811 | conditions={'method': ['POST'], 'function': check_repo}, | |
812 | requirements=URL_NAME_REQUIREMENTS) |
|
812 | requirements=URL_NAME_REQUIREMENTS) | |
813 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
813 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', | |
814 | controller='admin/repos', action='repo_settings_vcs', |
|
814 | controller='admin/repos', action='repo_settings_vcs', | |
815 | conditions={'method': ['GET'], 'function': check_repo}, |
|
815 | conditions={'method': ['GET'], 'function': check_repo}, | |
816 | requirements=URL_NAME_REQUIREMENTS) |
|
816 | requirements=URL_NAME_REQUIREMENTS) | |
817 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
817 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', | |
818 | controller='admin/repos', action='repo_delete_svn_pattern', |
|
818 | controller='admin/repos', action='repo_delete_svn_pattern', | |
819 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
819 | conditions={'method': ['DELETE'], 'function': check_repo}, | |
820 | requirements=URL_NAME_REQUIREMENTS) |
|
820 | requirements=URL_NAME_REQUIREMENTS) | |
821 |
|
821 | |||
822 | # still working url for backward compat. |
|
822 | # still working url for backward compat. | |
823 | rmap.connect('raw_changeset_home_depraced', |
|
823 | rmap.connect('raw_changeset_home_depraced', | |
824 | '/{repo_name}/raw-changeset/{revision}', |
|
824 | '/{repo_name}/raw-changeset/{revision}', | |
825 | controller='changeset', action='changeset_raw', |
|
825 | controller='changeset', action='changeset_raw', | |
826 | revision='tip', conditions={'function': check_repo}, |
|
826 | revision='tip', conditions={'function': check_repo}, | |
827 | requirements=URL_NAME_REQUIREMENTS) |
|
827 | requirements=URL_NAME_REQUIREMENTS) | |
828 |
|
828 | |||
829 | # new URLs |
|
829 | # new URLs | |
830 | rmap.connect('changeset_raw_home', |
|
830 | rmap.connect('changeset_raw_home', | |
831 | '/{repo_name}/changeset-diff/{revision}', |
|
831 | '/{repo_name}/changeset-diff/{revision}', | |
832 | controller='changeset', action='changeset_raw', |
|
832 | controller='changeset', action='changeset_raw', | |
833 | revision='tip', conditions={'function': check_repo}, |
|
833 | revision='tip', conditions={'function': check_repo}, | |
834 | requirements=URL_NAME_REQUIREMENTS) |
|
834 | requirements=URL_NAME_REQUIREMENTS) | |
835 |
|
835 | |||
836 | rmap.connect('changeset_patch_home', |
|
836 | rmap.connect('changeset_patch_home', | |
837 | '/{repo_name}/changeset-patch/{revision}', |
|
837 | '/{repo_name}/changeset-patch/{revision}', | |
838 | controller='changeset', action='changeset_patch', |
|
838 | controller='changeset', action='changeset_patch', | |
839 | revision='tip', conditions={'function': check_repo}, |
|
839 | revision='tip', conditions={'function': check_repo}, | |
840 | requirements=URL_NAME_REQUIREMENTS) |
|
840 | requirements=URL_NAME_REQUIREMENTS) | |
841 |
|
841 | |||
842 | rmap.connect('changeset_download_home', |
|
842 | rmap.connect('changeset_download_home', | |
843 | '/{repo_name}/changeset-download/{revision}', |
|
843 | '/{repo_name}/changeset-download/{revision}', | |
844 | controller='changeset', action='changeset_download', |
|
844 | controller='changeset', action='changeset_download', | |
845 | revision='tip', conditions={'function': check_repo}, |
|
845 | revision='tip', conditions={'function': check_repo}, | |
846 | requirements=URL_NAME_REQUIREMENTS) |
|
846 | requirements=URL_NAME_REQUIREMENTS) | |
847 |
|
847 | |||
848 | rmap.connect('changeset_comment', |
|
848 | rmap.connect('changeset_comment', | |
849 | '/{repo_name}/changeset/{revision}/comment', jsroute=True, |
|
849 | '/{repo_name}/changeset/{revision}/comment', jsroute=True, | |
850 | controller='changeset', revision='tip', action='comment', |
|
850 | controller='changeset', revision='tip', action='comment', | |
851 | conditions={'function': check_repo}, |
|
851 | conditions={'function': check_repo}, | |
852 | requirements=URL_NAME_REQUIREMENTS) |
|
852 | requirements=URL_NAME_REQUIREMENTS) | |
853 |
|
853 | |||
854 | rmap.connect('changeset_comment_preview', |
|
854 | rmap.connect('changeset_comment_preview', | |
855 | '/{repo_name}/changeset/comment/preview', jsroute=True, |
|
855 | '/{repo_name}/changeset/comment/preview', jsroute=True, | |
856 | controller='changeset', action='preview_comment', |
|
856 | controller='changeset', action='preview_comment', | |
857 | conditions={'function': check_repo, 'method': ['POST']}, |
|
857 | conditions={'function': check_repo, 'method': ['POST']}, | |
858 | requirements=URL_NAME_REQUIREMENTS) |
|
858 | requirements=URL_NAME_REQUIREMENTS) | |
859 |
|
859 | |||
860 | rmap.connect('changeset_comment_delete', |
|
860 | rmap.connect('changeset_comment_delete', | |
861 | '/{repo_name}/changeset/comment/{comment_id}/delete', |
|
861 | '/{repo_name}/changeset/comment/{comment_id}/delete', | |
862 | controller='changeset', action='delete_comment', |
|
862 | controller='changeset', action='delete_comment', | |
863 | conditions={'function': check_repo, 'method': ['DELETE']}, |
|
863 | conditions={'function': check_repo, 'method': ['DELETE']}, | |
864 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
864 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
865 |
|
865 | |||
866 | rmap.connect('changeset_info', '/changeset_info/{repo_name}/{revision}', |
|
866 | rmap.connect('changeset_info', '/changeset_info/{repo_name}/{revision}', | |
867 | controller='changeset', action='changeset_info', |
|
867 | controller='changeset', action='changeset_info', | |
868 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
868 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
869 |
|
869 | |||
870 | rmap.connect('compare_home', |
|
870 | rmap.connect('compare_home', | |
871 | '/{repo_name}/compare', |
|
871 | '/{repo_name}/compare', | |
872 | controller='compare', action='index', |
|
872 | controller='compare', action='index', | |
873 | conditions={'function': check_repo}, |
|
873 | conditions={'function': check_repo}, | |
874 | requirements=URL_NAME_REQUIREMENTS) |
|
874 | requirements=URL_NAME_REQUIREMENTS) | |
875 |
|
875 | |||
876 | rmap.connect('compare_url', |
|
876 | rmap.connect('compare_url', | |
877 | '/{repo_name}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', |
|
877 | '/{repo_name}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', | |
878 | controller='compare', action='compare', |
|
878 | controller='compare', action='compare', | |
879 | conditions={'function': check_repo}, |
|
879 | conditions={'function': check_repo}, | |
880 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
880 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
881 |
|
881 | |||
882 | rmap.connect('pullrequest_home', |
|
882 | rmap.connect('pullrequest_home', | |
883 | '/{repo_name}/pull-request/new', controller='pullrequests', |
|
883 | '/{repo_name}/pull-request/new', controller='pullrequests', | |
884 | action='index', conditions={'function': check_repo, |
|
884 | action='index', conditions={'function': check_repo, | |
885 | 'method': ['GET']}, |
|
885 | 'method': ['GET']}, | |
886 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
886 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
887 |
|
887 | |||
888 | rmap.connect('pullrequest', |
|
888 | rmap.connect('pullrequest', | |
889 | '/{repo_name}/pull-request/new', controller='pullrequests', |
|
889 | '/{repo_name}/pull-request/new', controller='pullrequests', | |
890 | action='create', conditions={'function': check_repo, |
|
890 | action='create', conditions={'function': check_repo, | |
891 | 'method': ['POST']}, |
|
891 | 'method': ['POST']}, | |
892 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
892 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
893 |
|
893 | |||
894 | rmap.connect('pullrequest_repo_refs', |
|
894 | rmap.connect('pullrequest_repo_refs', | |
895 | '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}', |
|
895 | '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}', | |
896 | controller='pullrequests', |
|
896 | controller='pullrequests', | |
897 | action='get_repo_refs', |
|
897 | action='get_repo_refs', | |
898 | conditions={'function': check_repo, 'method': ['GET']}, |
|
898 | conditions={'function': check_repo, 'method': ['GET']}, | |
899 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
899 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
900 |
|
900 | |||
901 | rmap.connect('pullrequest_repo_destinations', |
|
901 | rmap.connect('pullrequest_repo_destinations', | |
902 | '/{repo_name}/pull-request/repo-destinations', |
|
902 | '/{repo_name}/pull-request/repo-destinations', | |
903 | controller='pullrequests', |
|
903 | controller='pullrequests', | |
904 | action='get_repo_destinations', |
|
904 | action='get_repo_destinations', | |
905 | conditions={'function': check_repo, 'method': ['GET']}, |
|
905 | conditions={'function': check_repo, 'method': ['GET']}, | |
906 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
906 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
907 |
|
907 | |||
908 | rmap.connect('pullrequest_show', |
|
908 | rmap.connect('pullrequest_show', | |
909 | '/{repo_name}/pull-request/{pull_request_id}', |
|
909 | '/{repo_name}/pull-request/{pull_request_id}', | |
910 | controller='pullrequests', |
|
910 | controller='pullrequests', | |
911 | action='show', conditions={'function': check_repo, |
|
911 | action='show', conditions={'function': check_repo, | |
912 | 'method': ['GET']}, |
|
912 | 'method': ['GET']}, | |
913 | requirements=URL_NAME_REQUIREMENTS) |
|
913 | requirements=URL_NAME_REQUIREMENTS) | |
914 |
|
914 | |||
915 | rmap.connect('pullrequest_update', |
|
915 | rmap.connect('pullrequest_update', | |
916 | '/{repo_name}/pull-request/{pull_request_id}', |
|
916 | '/{repo_name}/pull-request/{pull_request_id}', | |
917 | controller='pullrequests', |
|
917 | controller='pullrequests', | |
918 | action='update', conditions={'function': check_repo, |
|
918 | action='update', conditions={'function': check_repo, | |
919 | 'method': ['PUT']}, |
|
919 | 'method': ['PUT']}, | |
920 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
920 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
921 |
|
921 | |||
922 | rmap.connect('pullrequest_merge', |
|
922 | rmap.connect('pullrequest_merge', | |
923 | '/{repo_name}/pull-request/{pull_request_id}', |
|
923 | '/{repo_name}/pull-request/{pull_request_id}', | |
924 | controller='pullrequests', |
|
924 | controller='pullrequests', | |
925 | action='merge', conditions={'function': check_repo, |
|
925 | action='merge', conditions={'function': check_repo, | |
926 | 'method': ['POST']}, |
|
926 | 'method': ['POST']}, | |
927 | requirements=URL_NAME_REQUIREMENTS) |
|
927 | requirements=URL_NAME_REQUIREMENTS) | |
928 |
|
928 | |||
929 | rmap.connect('pullrequest_delete', |
|
929 | rmap.connect('pullrequest_delete', | |
930 | '/{repo_name}/pull-request/{pull_request_id}', |
|
930 | '/{repo_name}/pull-request/{pull_request_id}', | |
931 | controller='pullrequests', |
|
931 | controller='pullrequests', | |
932 | action='delete', conditions={'function': check_repo, |
|
932 | action='delete', conditions={'function': check_repo, | |
933 | 'method': ['DELETE']}, |
|
933 | 'method': ['DELETE']}, | |
934 | requirements=URL_NAME_REQUIREMENTS) |
|
934 | requirements=URL_NAME_REQUIREMENTS) | |
935 |
|
935 | |||
936 | rmap.connect('pullrequest_show_all', |
|
936 | rmap.connect('pullrequest_show_all', | |
937 | '/{repo_name}/pull-request', |
|
937 | '/{repo_name}/pull-request', | |
938 | controller='pullrequests', |
|
938 | controller='pullrequests', | |
939 | action='show_all', conditions={'function': check_repo, |
|
939 | action='show_all', conditions={'function': check_repo, | |
940 | 'method': ['GET']}, |
|
940 | 'method': ['GET']}, | |
941 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
941 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
942 |
|
942 | |||
943 | rmap.connect('pullrequest_comment', |
|
943 | rmap.connect('pullrequest_comment', | |
944 | '/{repo_name}/pull-request-comment/{pull_request_id}', |
|
944 | '/{repo_name}/pull-request-comment/{pull_request_id}', | |
945 | controller='pullrequests', |
|
945 | controller='pullrequests', | |
946 | action='comment', conditions={'function': check_repo, |
|
946 | action='comment', conditions={'function': check_repo, | |
947 | 'method': ['POST']}, |
|
947 | 'method': ['POST']}, | |
948 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
948 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
949 |
|
949 | |||
950 | rmap.connect('pullrequest_comment_delete', |
|
950 | rmap.connect('pullrequest_comment_delete', | |
951 | '/{repo_name}/pull-request-comment/{comment_id}/delete', |
|
951 | '/{repo_name}/pull-request-comment/{comment_id}/delete', | |
952 | controller='pullrequests', action='delete_comment', |
|
952 | controller='pullrequests', action='delete_comment', | |
953 | conditions={'function': check_repo, 'method': ['DELETE']}, |
|
953 | conditions={'function': check_repo, 'method': ['DELETE']}, | |
954 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
954 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
955 |
|
955 | |||
956 | rmap.connect('summary_home_explicit', '/{repo_name}/summary', |
|
956 | rmap.connect('summary_home_explicit', '/{repo_name}/summary', | |
957 | controller='summary', conditions={'function': check_repo}, |
|
957 | controller='summary', conditions={'function': check_repo}, | |
958 | requirements=URL_NAME_REQUIREMENTS) |
|
958 | requirements=URL_NAME_REQUIREMENTS) | |
959 |
|
959 | |||
960 | rmap.connect('branches_home', '/{repo_name}/branches', |
|
960 | rmap.connect('branches_home', '/{repo_name}/branches', | |
961 | controller='branches', conditions={'function': check_repo}, |
|
961 | controller='branches', conditions={'function': check_repo}, | |
962 | requirements=URL_NAME_REQUIREMENTS) |
|
962 | requirements=URL_NAME_REQUIREMENTS) | |
963 |
|
963 | |||
964 | rmap.connect('tags_home', '/{repo_name}/tags', |
|
964 | rmap.connect('tags_home', '/{repo_name}/tags', | |
965 | controller='tags', conditions={'function': check_repo}, |
|
965 | controller='tags', conditions={'function': check_repo}, | |
966 | requirements=URL_NAME_REQUIREMENTS) |
|
966 | requirements=URL_NAME_REQUIREMENTS) | |
967 |
|
967 | |||
968 | rmap.connect('bookmarks_home', '/{repo_name}/bookmarks', |
|
968 | rmap.connect('bookmarks_home', '/{repo_name}/bookmarks', | |
969 | controller='bookmarks', conditions={'function': check_repo}, |
|
969 | controller='bookmarks', conditions={'function': check_repo}, | |
970 | requirements=URL_NAME_REQUIREMENTS) |
|
970 | requirements=URL_NAME_REQUIREMENTS) | |
971 |
|
971 | |||
972 | rmap.connect('changelog_home', '/{repo_name}/changelog', jsroute=True, |
|
972 | rmap.connect('changelog_home', '/{repo_name}/changelog', jsroute=True, | |
973 | controller='changelog', conditions={'function': check_repo}, |
|
973 | controller='changelog', conditions={'function': check_repo}, | |
974 | requirements=URL_NAME_REQUIREMENTS) |
|
974 | requirements=URL_NAME_REQUIREMENTS) | |
975 |
|
975 | |||
976 | rmap.connect('changelog_summary_home', '/{repo_name}/changelog_summary', |
|
976 | rmap.connect('changelog_summary_home', '/{repo_name}/changelog_summary', | |
977 | controller='changelog', action='changelog_summary', |
|
977 | controller='changelog', action='changelog_summary', | |
978 | conditions={'function': check_repo}, |
|
978 | conditions={'function': check_repo}, | |
979 | requirements=URL_NAME_REQUIREMENTS) |
|
979 | requirements=URL_NAME_REQUIREMENTS) | |
980 |
|
980 | |||
981 | rmap.connect('changelog_file_home', |
|
981 | rmap.connect('changelog_file_home', | |
982 | '/{repo_name}/changelog/{revision}/{f_path}', |
|
982 | '/{repo_name}/changelog/{revision}/{f_path}', | |
983 | controller='changelog', f_path=None, |
|
983 | controller='changelog', f_path=None, | |
984 | conditions={'function': check_repo}, |
|
984 | conditions={'function': check_repo}, | |
985 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
985 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
986 |
|
986 | |||
987 | rmap.connect('changelog_details', '/{repo_name}/changelog_details/{cs}', |
|
987 | rmap.connect('changelog_details', '/{repo_name}/changelog_details/{cs}', | |
988 | controller='changelog', action='changelog_details', |
|
988 | controller='changelog', action='changelog_details', | |
989 | conditions={'function': check_repo}, |
|
989 | conditions={'function': check_repo}, | |
990 | requirements=URL_NAME_REQUIREMENTS) |
|
990 | requirements=URL_NAME_REQUIREMENTS) | |
991 |
|
991 | |||
992 | rmap.connect('files_home', '/{repo_name}/files/{revision}/{f_path}', |
|
992 | rmap.connect('files_home', '/{repo_name}/files/{revision}/{f_path}', | |
993 | controller='files', revision='tip', f_path='', |
|
993 | controller='files', revision='tip', f_path='', | |
994 | conditions={'function': check_repo}, |
|
994 | conditions={'function': check_repo}, | |
995 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
995 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
996 |
|
996 | |||
997 | rmap.connect('files_home_simple_catchrev', |
|
997 | rmap.connect('files_home_simple_catchrev', | |
998 | '/{repo_name}/files/{revision}', |
|
998 | '/{repo_name}/files/{revision}', | |
999 | controller='files', revision='tip', f_path='', |
|
999 | controller='files', revision='tip', f_path='', | |
1000 | conditions={'function': check_repo}, |
|
1000 | conditions={'function': check_repo}, | |
1001 | requirements=URL_NAME_REQUIREMENTS) |
|
1001 | requirements=URL_NAME_REQUIREMENTS) | |
1002 |
|
1002 | |||
1003 | rmap.connect('files_home_simple_catchall', |
|
1003 | rmap.connect('files_home_simple_catchall', | |
1004 | '/{repo_name}/files', |
|
1004 | '/{repo_name}/files', | |
1005 | controller='files', revision='tip', f_path='', |
|
1005 | controller='files', revision='tip', f_path='', | |
1006 | conditions={'function': check_repo}, |
|
1006 | conditions={'function': check_repo}, | |
1007 | requirements=URL_NAME_REQUIREMENTS) |
|
1007 | requirements=URL_NAME_REQUIREMENTS) | |
1008 |
|
1008 | |||
1009 | rmap.connect('files_history_home', |
|
1009 | rmap.connect('files_history_home', | |
1010 | '/{repo_name}/history/{revision}/{f_path}', |
|
1010 | '/{repo_name}/history/{revision}/{f_path}', | |
1011 | controller='files', action='history', revision='tip', f_path='', |
|
1011 | controller='files', action='history', revision='tip', f_path='', | |
1012 | conditions={'function': check_repo}, |
|
1012 | conditions={'function': check_repo}, | |
1013 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1013 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
1014 |
|
1014 | |||
1015 | rmap.connect('files_authors_home', |
|
1015 | rmap.connect('files_authors_home', | |
1016 | '/{repo_name}/authors/{revision}/{f_path}', |
|
1016 | '/{repo_name}/authors/{revision}/{f_path}', | |
1017 | controller='files', action='authors', revision='tip', f_path='', |
|
1017 | controller='files', action='authors', revision='tip', f_path='', | |
1018 | conditions={'function': check_repo}, |
|
1018 | conditions={'function': check_repo}, | |
1019 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1019 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
1020 |
|
1020 | |||
1021 | rmap.connect('files_diff_home', '/{repo_name}/diff/{f_path}', |
|
1021 | rmap.connect('files_diff_home', '/{repo_name}/diff/{f_path}', | |
1022 | controller='files', action='diff', f_path='', |
|
1022 | controller='files', action='diff', f_path='', | |
1023 | conditions={'function': check_repo}, |
|
1023 | conditions={'function': check_repo}, | |
1024 | requirements=URL_NAME_REQUIREMENTS) |
|
1024 | requirements=URL_NAME_REQUIREMENTS) | |
1025 |
|
1025 | |||
1026 | rmap.connect('files_diff_2way_home', |
|
1026 | rmap.connect('files_diff_2way_home', | |
1027 | '/{repo_name}/diff-2way/{f_path}', |
|
1027 | '/{repo_name}/diff-2way/{f_path}', | |
1028 | controller='files', action='diff_2way', f_path='', |
|
1028 | controller='files', action='diff_2way', f_path='', | |
1029 | conditions={'function': check_repo}, |
|
1029 | conditions={'function': check_repo}, | |
1030 | requirements=URL_NAME_REQUIREMENTS) |
|
1030 | requirements=URL_NAME_REQUIREMENTS) | |
1031 |
|
1031 | |||
1032 | rmap.connect('files_rawfile_home', |
|
1032 | rmap.connect('files_rawfile_home', | |
1033 | '/{repo_name}/rawfile/{revision}/{f_path}', |
|
1033 | '/{repo_name}/rawfile/{revision}/{f_path}', | |
1034 | controller='files', action='rawfile', revision='tip', |
|
1034 | controller='files', action='rawfile', revision='tip', | |
1035 | f_path='', conditions={'function': check_repo}, |
|
1035 | f_path='', conditions={'function': check_repo}, | |
1036 | requirements=URL_NAME_REQUIREMENTS) |
|
1036 | requirements=URL_NAME_REQUIREMENTS) | |
1037 |
|
1037 | |||
1038 | rmap.connect('files_raw_home', |
|
1038 | rmap.connect('files_raw_home', | |
1039 | '/{repo_name}/raw/{revision}/{f_path}', |
|
1039 | '/{repo_name}/raw/{revision}/{f_path}', | |
1040 | controller='files', action='raw', revision='tip', f_path='', |
|
1040 | controller='files', action='raw', revision='tip', f_path='', | |
1041 | conditions={'function': check_repo}, |
|
1041 | conditions={'function': check_repo}, | |
1042 | requirements=URL_NAME_REQUIREMENTS) |
|
1042 | requirements=URL_NAME_REQUIREMENTS) | |
1043 |
|
1043 | |||
1044 | rmap.connect('files_render_home', |
|
1044 | rmap.connect('files_render_home', | |
1045 | '/{repo_name}/render/{revision}/{f_path}', |
|
1045 | '/{repo_name}/render/{revision}/{f_path}', | |
1046 | controller='files', action='index', revision='tip', f_path='', |
|
1046 | controller='files', action='index', revision='tip', f_path='', | |
1047 | rendered=True, conditions={'function': check_repo}, |
|
1047 | rendered=True, conditions={'function': check_repo}, | |
1048 | requirements=URL_NAME_REQUIREMENTS) |
|
1048 | requirements=URL_NAME_REQUIREMENTS) | |
1049 |
|
1049 | |||
1050 | rmap.connect('files_annotate_home', |
|
1050 | rmap.connect('files_annotate_home', | |
1051 | '/{repo_name}/annotate/{revision}/{f_path}', |
|
1051 | '/{repo_name}/annotate/{revision}/{f_path}', | |
1052 | controller='files', action='index', revision='tip', |
|
1052 | controller='files', action='index', revision='tip', | |
1053 | f_path='', annotate=True, conditions={'function': check_repo}, |
|
1053 | f_path='', annotate=True, conditions={'function': check_repo}, | |
1054 | requirements=URL_NAME_REQUIREMENTS) |
|
1054 | requirements=URL_NAME_REQUIREMENTS) | |
1055 |
|
1055 | |||
1056 | rmap.connect('files_edit', |
|
1056 | rmap.connect('files_edit', | |
1057 | '/{repo_name}/edit/{revision}/{f_path}', |
|
1057 | '/{repo_name}/edit/{revision}/{f_path}', | |
1058 | controller='files', action='edit', revision='tip', |
|
1058 | controller='files', action='edit', revision='tip', | |
1059 | f_path='', |
|
1059 | f_path='', | |
1060 | conditions={'function': check_repo, 'method': ['POST']}, |
|
1060 | conditions={'function': check_repo, 'method': ['POST']}, | |
1061 | requirements=URL_NAME_REQUIREMENTS) |
|
1061 | requirements=URL_NAME_REQUIREMENTS) | |
1062 |
|
1062 | |||
1063 | rmap.connect('files_edit_home', |
|
1063 | rmap.connect('files_edit_home', | |
1064 | '/{repo_name}/edit/{revision}/{f_path}', |
|
1064 | '/{repo_name}/edit/{revision}/{f_path}', | |
1065 | controller='files', action='edit_home', revision='tip', |
|
1065 | controller='files', action='edit_home', revision='tip', | |
1066 | f_path='', conditions={'function': check_repo}, |
|
1066 | f_path='', conditions={'function': check_repo}, | |
1067 | requirements=URL_NAME_REQUIREMENTS) |
|
1067 | requirements=URL_NAME_REQUIREMENTS) | |
1068 |
|
1068 | |||
1069 | rmap.connect('files_add', |
|
1069 | rmap.connect('files_add', | |
1070 | '/{repo_name}/add/{revision}/{f_path}', |
|
1070 | '/{repo_name}/add/{revision}/{f_path}', | |
1071 | controller='files', action='add', revision='tip', |
|
1071 | controller='files', action='add', revision='tip', | |
1072 | f_path='', |
|
1072 | f_path='', | |
1073 | conditions={'function': check_repo, 'method': ['POST']}, |
|
1073 | conditions={'function': check_repo, 'method': ['POST']}, | |
1074 | requirements=URL_NAME_REQUIREMENTS) |
|
1074 | requirements=URL_NAME_REQUIREMENTS) | |
1075 |
|
1075 | |||
1076 | rmap.connect('files_add_home', |
|
1076 | rmap.connect('files_add_home', | |
1077 | '/{repo_name}/add/{revision}/{f_path}', |
|
1077 | '/{repo_name}/add/{revision}/{f_path}', | |
1078 | controller='files', action='add_home', revision='tip', |
|
1078 | controller='files', action='add_home', revision='tip', | |
1079 | f_path='', conditions={'function': check_repo}, |
|
1079 | f_path='', conditions={'function': check_repo}, | |
1080 | requirements=URL_NAME_REQUIREMENTS) |
|
1080 | requirements=URL_NAME_REQUIREMENTS) | |
1081 |
|
1081 | |||
1082 | rmap.connect('files_delete', |
|
1082 | rmap.connect('files_delete', | |
1083 | '/{repo_name}/delete/{revision}/{f_path}', |
|
1083 | '/{repo_name}/delete/{revision}/{f_path}', | |
1084 | controller='files', action='delete', revision='tip', |
|
1084 | controller='files', action='delete', revision='tip', | |
1085 | f_path='', |
|
1085 | f_path='', | |
1086 | conditions={'function': check_repo, 'method': ['POST']}, |
|
1086 | conditions={'function': check_repo, 'method': ['POST']}, | |
1087 | requirements=URL_NAME_REQUIREMENTS) |
|
1087 | requirements=URL_NAME_REQUIREMENTS) | |
1088 |
|
1088 | |||
1089 | rmap.connect('files_delete_home', |
|
1089 | rmap.connect('files_delete_home', | |
1090 | '/{repo_name}/delete/{revision}/{f_path}', |
|
1090 | '/{repo_name}/delete/{revision}/{f_path}', | |
1091 | controller='files', action='delete_home', revision='tip', |
|
1091 | controller='files', action='delete_home', revision='tip', | |
1092 | f_path='', conditions={'function': check_repo}, |
|
1092 | f_path='', conditions={'function': check_repo}, | |
1093 | requirements=URL_NAME_REQUIREMENTS) |
|
1093 | requirements=URL_NAME_REQUIREMENTS) | |
1094 |
|
1094 | |||
1095 | rmap.connect('files_archive_home', '/{repo_name}/archive/{fname}', |
|
1095 | rmap.connect('files_archive_home', '/{repo_name}/archive/{fname}', | |
1096 | controller='files', action='archivefile', |
|
1096 | controller='files', action='archivefile', | |
1097 | conditions={'function': check_repo}, |
|
1097 | conditions={'function': check_repo}, | |
1098 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1098 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
1099 |
|
1099 | |||
1100 | rmap.connect('files_nodelist_home', |
|
1100 | rmap.connect('files_nodelist_home', | |
1101 | '/{repo_name}/nodelist/{revision}/{f_path}', |
|
1101 | '/{repo_name}/nodelist/{revision}/{f_path}', | |
1102 | controller='files', action='nodelist', |
|
1102 | controller='files', action='nodelist', | |
1103 | conditions={'function': check_repo}, |
|
1103 | conditions={'function': check_repo}, | |
1104 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1104 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
1105 |
|
1105 | |||
1106 |
rmap.connect('files_ |
|
1106 | rmap.connect('files_nodetree_full', | |
1107 |
'/{repo_name}/ |
|
1107 | '/{repo_name}/nodetree_full/{commit_id}/{f_path}', | |
1108 |
controller='files', action=' |
|
1108 | controller='files', action='nodetree_full', | |
1109 | conditions={'function': check_repo}, |
|
1109 | conditions={'function': check_repo}, | |
1110 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1110 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
1111 |
|
1111 | |||
1112 | rmap.connect('repo_fork_create_home', '/{repo_name}/fork', |
|
1112 | rmap.connect('repo_fork_create_home', '/{repo_name}/fork', | |
1113 | controller='forks', action='fork_create', |
|
1113 | controller='forks', action='fork_create', | |
1114 | conditions={'function': check_repo, 'method': ['POST']}, |
|
1114 | conditions={'function': check_repo, 'method': ['POST']}, | |
1115 | requirements=URL_NAME_REQUIREMENTS) |
|
1115 | requirements=URL_NAME_REQUIREMENTS) | |
1116 |
|
1116 | |||
1117 | rmap.connect('repo_fork_home', '/{repo_name}/fork', |
|
1117 | rmap.connect('repo_fork_home', '/{repo_name}/fork', | |
1118 | controller='forks', action='fork', |
|
1118 | controller='forks', action='fork', | |
1119 | conditions={'function': check_repo}, |
|
1119 | conditions={'function': check_repo}, | |
1120 | requirements=URL_NAME_REQUIREMENTS) |
|
1120 | requirements=URL_NAME_REQUIREMENTS) | |
1121 |
|
1121 | |||
1122 | rmap.connect('repo_forks_home', '/{repo_name}/forks', |
|
1122 | rmap.connect('repo_forks_home', '/{repo_name}/forks', | |
1123 | controller='forks', action='forks', |
|
1123 | controller='forks', action='forks', | |
1124 | conditions={'function': check_repo}, |
|
1124 | conditions={'function': check_repo}, | |
1125 | requirements=URL_NAME_REQUIREMENTS) |
|
1125 | requirements=URL_NAME_REQUIREMENTS) | |
1126 |
|
1126 | |||
1127 | rmap.connect('repo_followers_home', '/{repo_name}/followers', |
|
1127 | rmap.connect('repo_followers_home', '/{repo_name}/followers', | |
1128 | controller='followers', action='followers', |
|
1128 | controller='followers', action='followers', | |
1129 | conditions={'function': check_repo}, |
|
1129 | conditions={'function': check_repo}, | |
1130 | requirements=URL_NAME_REQUIREMENTS) |
|
1130 | requirements=URL_NAME_REQUIREMENTS) | |
1131 |
|
1131 | |||
1132 | # must be here for proper group/repo catching pattern |
|
1132 | # must be here for proper group/repo catching pattern | |
1133 | _connect_with_slash( |
|
1133 | _connect_with_slash( | |
1134 | rmap, 'repo_group_home', '/{group_name}', |
|
1134 | rmap, 'repo_group_home', '/{group_name}', | |
1135 | controller='home', action='index_repo_group', |
|
1135 | controller='home', action='index_repo_group', | |
1136 | conditions={'function': check_group}, |
|
1136 | conditions={'function': check_group}, | |
1137 | requirements=URL_NAME_REQUIREMENTS) |
|
1137 | requirements=URL_NAME_REQUIREMENTS) | |
1138 |
|
1138 | |||
1139 | # catch all, at the end |
|
1139 | # catch all, at the end | |
1140 | _connect_with_slash( |
|
1140 | _connect_with_slash( | |
1141 | rmap, 'summary_home', '/{repo_name}', jsroute=True, |
|
1141 | rmap, 'summary_home', '/{repo_name}', jsroute=True, | |
1142 | controller='summary', action='index', |
|
1142 | controller='summary', action='index', | |
1143 | conditions={'function': check_repo}, |
|
1143 | conditions={'function': check_repo}, | |
1144 | requirements=URL_NAME_REQUIREMENTS) |
|
1144 | requirements=URL_NAME_REQUIREMENTS) | |
1145 |
|
1145 | |||
1146 | return rmap |
|
1146 | return rmap | |
1147 |
|
1147 | |||
1148 |
|
1148 | |||
1149 | def _connect_with_slash(mapper, name, path, *args, **kwargs): |
|
1149 | def _connect_with_slash(mapper, name, path, *args, **kwargs): | |
1150 | """ |
|
1150 | """ | |
1151 | Connect a route with an optional trailing slash in `path`. |
|
1151 | Connect a route with an optional trailing slash in `path`. | |
1152 | """ |
|
1152 | """ | |
1153 | mapper.connect(name + '_slash', path + '/', *args, **kwargs) |
|
1153 | mapper.connect(name + '_slash', path + '/', *args, **kwargs) | |
1154 | mapper.connect(name, path, *args, **kwargs) |
|
1154 | mapper.connect(name, path, *args, **kwargs) |
@@ -1,1114 +1,1114 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Files controller for RhodeCode Enterprise |
|
22 | Files controller for RhodeCode Enterprise | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import itertools |
|
25 | import itertools | |
26 | import logging |
|
26 | import logging | |
27 | import os |
|
27 | import os | |
28 | import shutil |
|
28 | import shutil | |
29 | import tempfile |
|
29 | import tempfile | |
30 |
|
30 | |||
31 | from pylons import request, response, tmpl_context as c, url |
|
31 | from pylons import request, response, tmpl_context as c, url | |
32 | from pylons.i18n.translation import _ |
|
32 | from pylons.i18n.translation import _ | |
33 | from pylons.controllers.util import redirect |
|
33 | from pylons.controllers.util import redirect | |
34 | from webob.exc import HTTPNotFound, HTTPBadRequest |
|
34 | from webob.exc import HTTPNotFound, HTTPBadRequest | |
35 |
|
35 | |||
36 | from rhodecode.controllers.utils import parse_path_ref |
|
36 | from rhodecode.controllers.utils import parse_path_ref | |
37 | from rhodecode.lib import diffs, helpers as h, caches |
|
37 | from rhodecode.lib import diffs, helpers as h, caches | |
38 | from rhodecode.lib.compat import OrderedDict |
|
38 | from rhodecode.lib.compat import OrderedDict | |
39 | from rhodecode.lib.utils import jsonify, action_logger |
|
39 | from rhodecode.lib.utils import jsonify, action_logger | |
40 | from rhodecode.lib.utils2 import ( |
|
40 | from rhodecode.lib.utils2 import ( | |
41 | convert_line_endings, detect_mode, safe_str, str2bool) |
|
41 | convert_line_endings, detect_mode, safe_str, str2bool) | |
42 | from rhodecode.lib.auth import ( |
|
42 | from rhodecode.lib.auth import ( | |
43 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired, XHRRequired) |
|
43 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired, XHRRequired) | |
44 | from rhodecode.lib.base import BaseRepoController, render |
|
44 | from rhodecode.lib.base import BaseRepoController, render | |
45 | from rhodecode.lib.vcs import path as vcspath |
|
45 | from rhodecode.lib.vcs import path as vcspath | |
46 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
46 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |
47 | from rhodecode.lib.vcs.conf import settings |
|
47 | from rhodecode.lib.vcs.conf import settings | |
48 | from rhodecode.lib.vcs.exceptions import ( |
|
48 | from rhodecode.lib.vcs.exceptions import ( | |
49 | RepositoryError, CommitDoesNotExistError, EmptyRepositoryError, |
|
49 | RepositoryError, CommitDoesNotExistError, EmptyRepositoryError, | |
50 | ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError, |
|
50 | ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError, | |
51 | NodeDoesNotExistError, CommitError, NodeError) |
|
51 | NodeDoesNotExistError, CommitError, NodeError) | |
52 | from rhodecode.lib.vcs.nodes import FileNode |
|
52 | from rhodecode.lib.vcs.nodes import FileNode | |
53 |
|
53 | |||
54 | from rhodecode.model.repo import RepoModel |
|
54 | from rhodecode.model.repo import RepoModel | |
55 | from rhodecode.model.scm import ScmModel |
|
55 | from rhodecode.model.scm import ScmModel | |
56 | from rhodecode.model.db import Repository |
|
56 | from rhodecode.model.db import Repository | |
57 |
|
57 | |||
58 | from rhodecode.controllers.changeset import ( |
|
58 | from rhodecode.controllers.changeset import ( | |
59 | _ignorews_url, _context_url, get_line_ctx, get_ignore_ws) |
|
59 | _ignorews_url, _context_url, get_line_ctx, get_ignore_ws) | |
60 | from rhodecode.lib.exceptions import NonRelativePathError |
|
60 | from rhodecode.lib.exceptions import NonRelativePathError | |
61 |
|
61 | |||
62 | log = logging.getLogger(__name__) |
|
62 | log = logging.getLogger(__name__) | |
63 |
|
63 | |||
64 |
|
64 | |||
65 | class FilesController(BaseRepoController): |
|
65 | class FilesController(BaseRepoController): | |
66 |
|
66 | |||
67 | def __before__(self): |
|
67 | def __before__(self): | |
68 | super(FilesController, self).__before__() |
|
68 | super(FilesController, self).__before__() | |
69 | c.cut_off_limit = self.cut_off_limit_file |
|
69 | c.cut_off_limit = self.cut_off_limit_file | |
70 |
|
70 | |||
71 | def _get_default_encoding(self): |
|
71 | def _get_default_encoding(self): | |
72 | enc_list = getattr(c, 'default_encodings', []) |
|
72 | enc_list = getattr(c, 'default_encodings', []) | |
73 | return enc_list[0] if enc_list else 'UTF-8' |
|
73 | return enc_list[0] if enc_list else 'UTF-8' | |
74 |
|
74 | |||
75 | def __get_commit_or_redirect(self, commit_id, repo_name, |
|
75 | def __get_commit_or_redirect(self, commit_id, repo_name, | |
76 | redirect_after=True): |
|
76 | redirect_after=True): | |
77 | """ |
|
77 | """ | |
78 | This is a safe way to get commit. If an error occurs it redirects to |
|
78 | This is a safe way to get commit. If an error occurs it redirects to | |
79 | tip with proper message |
|
79 | tip with proper message | |
80 |
|
80 | |||
81 | :param commit_id: id of commit to fetch |
|
81 | :param commit_id: id of commit to fetch | |
82 | :param repo_name: repo name to redirect after |
|
82 | :param repo_name: repo name to redirect after | |
83 | :param redirect_after: toggle redirection |
|
83 | :param redirect_after: toggle redirection | |
84 | """ |
|
84 | """ | |
85 | try: |
|
85 | try: | |
86 | return c.rhodecode_repo.get_commit(commit_id) |
|
86 | return c.rhodecode_repo.get_commit(commit_id) | |
87 | except EmptyRepositoryError: |
|
87 | except EmptyRepositoryError: | |
88 | if not redirect_after: |
|
88 | if not redirect_after: | |
89 | return None |
|
89 | return None | |
90 | url_ = url('files_add_home', |
|
90 | url_ = url('files_add_home', | |
91 | repo_name=c.repo_name, |
|
91 | repo_name=c.repo_name, | |
92 | revision=0, f_path='', anchor='edit') |
|
92 | revision=0, f_path='', anchor='edit') | |
93 | if h.HasRepoPermissionAny( |
|
93 | if h.HasRepoPermissionAny( | |
94 | 'repository.write', 'repository.admin')(c.repo_name): |
|
94 | 'repository.write', 'repository.admin')(c.repo_name): | |
95 | add_new = h.link_to( |
|
95 | add_new = h.link_to( | |
96 | _('Click here to add a new file.'), |
|
96 | _('Click here to add a new file.'), | |
97 | url_, class_="alert-link") |
|
97 | url_, class_="alert-link") | |
98 | else: |
|
98 | else: | |
99 | add_new = "" |
|
99 | add_new = "" | |
100 | h.flash(h.literal( |
|
100 | h.flash(h.literal( | |
101 | _('There are no files yet. %s') % add_new), category='warning') |
|
101 | _('There are no files yet. %s') % add_new), category='warning') | |
102 | redirect(h.url('summary_home', repo_name=repo_name)) |
|
102 | redirect(h.url('summary_home', repo_name=repo_name)) | |
103 | except (CommitDoesNotExistError, LookupError): |
|
103 | except (CommitDoesNotExistError, LookupError): | |
104 | msg = _('No such commit exists for this repository') |
|
104 | msg = _('No such commit exists for this repository') | |
105 | h.flash(msg, category='error') |
|
105 | h.flash(msg, category='error') | |
106 | raise HTTPNotFound() |
|
106 | raise HTTPNotFound() | |
107 | except RepositoryError as e: |
|
107 | except RepositoryError as e: | |
108 | h.flash(safe_str(e), category='error') |
|
108 | h.flash(safe_str(e), category='error') | |
109 | raise HTTPNotFound() |
|
109 | raise HTTPNotFound() | |
110 |
|
110 | |||
111 | def __get_filenode_or_redirect(self, repo_name, commit, path): |
|
111 | def __get_filenode_or_redirect(self, repo_name, commit, path): | |
112 | """ |
|
112 | """ | |
113 | Returns file_node, if error occurs or given path is directory, |
|
113 | Returns file_node, if error occurs or given path is directory, | |
114 | it'll redirect to top level path |
|
114 | it'll redirect to top level path | |
115 |
|
115 | |||
116 | :param repo_name: repo_name |
|
116 | :param repo_name: repo_name | |
117 | :param commit: given commit |
|
117 | :param commit: given commit | |
118 | :param path: path to lookup |
|
118 | :param path: path to lookup | |
119 | """ |
|
119 | """ | |
120 | try: |
|
120 | try: | |
121 | file_node = commit.get_node(path) |
|
121 | file_node = commit.get_node(path) | |
122 | if file_node.is_dir(): |
|
122 | if file_node.is_dir(): | |
123 | raise RepositoryError('The given path is a directory') |
|
123 | raise RepositoryError('The given path is a directory') | |
124 | except CommitDoesNotExistError: |
|
124 | except CommitDoesNotExistError: | |
125 | msg = _('No such commit exists for this repository') |
|
125 | msg = _('No such commit exists for this repository') | |
126 | log.exception(msg) |
|
126 | log.exception(msg) | |
127 | h.flash(msg, category='error') |
|
127 | h.flash(msg, category='error') | |
128 | raise HTTPNotFound() |
|
128 | raise HTTPNotFound() | |
129 | except RepositoryError as e: |
|
129 | except RepositoryError as e: | |
130 | h.flash(safe_str(e), category='error') |
|
130 | h.flash(safe_str(e), category='error') | |
131 | raise HTTPNotFound() |
|
131 | raise HTTPNotFound() | |
132 |
|
132 | |||
133 | return file_node |
|
133 | return file_node | |
134 |
|
134 | |||
135 | def __get_tree_cache_manager(self, repo_name, namespace_type): |
|
135 | def __get_tree_cache_manager(self, repo_name, namespace_type): | |
136 | _namespace = caches.get_repo_namespace_key(namespace_type, repo_name) |
|
136 | _namespace = caches.get_repo_namespace_key(namespace_type, repo_name) | |
137 | return caches.get_cache_manager('repo_cache_long', _namespace) |
|
137 | return caches.get_cache_manager('repo_cache_long', _namespace) | |
138 |
|
138 | |||
139 |
def _get_tree_at_commit(self, repo_name, commit_id, f_path |
|
139 | def _get_tree_at_commit(self, repo_name, commit_id, f_path, | |
|
140 | full_load=False, force=False): | |||
140 | def _cached_tree(): |
|
141 | def _cached_tree(): | |
141 | log.debug('Generating cached file tree for %s, %s, %s', |
|
142 | log.debug('Generating cached file tree for %s, %s, %s', | |
142 | repo_name, commit_id, f_path) |
|
143 | repo_name, commit_id, f_path) | |
|
144 | c.full_load = full_load | |||
143 | return render('files/files_browser_tree.html') |
|
145 | return render('files/files_browser_tree.html') | |
144 |
|
146 | |||
145 | cache_manager = self.__get_tree_cache_manager( |
|
147 | cache_manager = self.__get_tree_cache_manager( | |
146 | repo_name, caches.FILE_TREE) |
|
148 | repo_name, caches.FILE_TREE) | |
147 |
|
149 | |||
148 | cache_key = caches.compute_key_from_params( |
|
150 | cache_key = caches.compute_key_from_params( | |
149 | repo_name, commit_id, f_path) |
|
151 | repo_name, commit_id, f_path) | |
150 |
|
152 | |||
|
153 | if force: | |||
|
154 | # we want to force recompute of caches | |||
|
155 | cache_manager.remove_value(cache_key) | |||
|
156 | ||||
151 | return cache_manager.get(cache_key, createfunc=_cached_tree) |
|
157 | return cache_manager.get(cache_key, createfunc=_cached_tree) | |
152 |
|
158 | |||
153 | def _get_nodelist_at_commit(self, repo_name, commit_id, f_path): |
|
159 | def _get_nodelist_at_commit(self, repo_name, commit_id, f_path): | |
154 | def _cached_nodes(): |
|
160 | def _cached_nodes(): | |
155 | log.debug('Generating cached nodelist for %s, %s, %s', |
|
161 | log.debug('Generating cached nodelist for %s, %s, %s', | |
156 | repo_name, commit_id, f_path) |
|
162 | repo_name, commit_id, f_path) | |
157 | _d, _f = ScmModel().get_nodes( |
|
163 | _d, _f = ScmModel().get_nodes( | |
158 | repo_name, commit_id, f_path, flat=False) |
|
164 | repo_name, commit_id, f_path, flat=False) | |
159 | return _d + _f |
|
165 | return _d + _f | |
160 |
|
166 | |||
161 | cache_manager = self.__get_tree_cache_manager( |
|
167 | cache_manager = self.__get_tree_cache_manager( | |
162 | repo_name, caches.FILE_SEARCH_TREE_META) |
|
168 | repo_name, caches.FILE_SEARCH_TREE_META) | |
163 |
|
169 | |||
164 | cache_key = caches.compute_key_from_params( |
|
170 | cache_key = caches.compute_key_from_params( | |
165 | repo_name, commit_id, f_path) |
|
171 | repo_name, commit_id, f_path) | |
166 | return cache_manager.get(cache_key, createfunc=_cached_nodes) |
|
172 | return cache_manager.get(cache_key, createfunc=_cached_nodes) | |
167 |
|
173 | |||
168 | def _get_metadata_at_commit(self, repo_name, commit, dir_node): |
|
|||
169 | def _cached_metadata(): |
|
|||
170 | log.debug('Generating cached metadata for %s, %s, %s', |
|
|||
171 | repo_name, commit.raw_id, safe_str(dir_node.path)) |
|
|||
172 |
|
||||
173 | data = ScmModel().get_dirnode_metadata(commit, dir_node) |
|
|||
174 | return data |
|
|||
175 |
|
||||
176 | cache_manager = self.__get_tree_cache_manager( |
|
|||
177 | repo_name, caches.FILE_TREE_META) |
|
|||
178 |
|
||||
179 | cache_key = caches.compute_key_from_params( |
|
|||
180 | repo_name, commit.raw_id, safe_str(dir_node.path)) |
|
|||
181 |
|
||||
182 | return cache_manager.get(cache_key, createfunc=_cached_metadata) |
|
|||
183 |
|
||||
184 | @LoginRequired() |
|
174 | @LoginRequired() | |
185 | @HasRepoPermissionAnyDecorator( |
|
175 | @HasRepoPermissionAnyDecorator( | |
186 | 'repository.read', 'repository.write', 'repository.admin') |
|
176 | 'repository.read', 'repository.write', 'repository.admin') | |
187 | def index( |
|
177 | def index( | |
188 | self, repo_name, revision, f_path, annotate=False, rendered=False): |
|
178 | self, repo_name, revision, f_path, annotate=False, rendered=False): | |
189 | commit_id = revision |
|
179 | commit_id = revision | |
190 |
|
180 | |||
191 | # redirect to given commit_id from form if given |
|
181 | # redirect to given commit_id from form if given | |
192 | get_commit_id = request.GET.get('at_rev', None) |
|
182 | get_commit_id = request.GET.get('at_rev', None) | |
193 | if get_commit_id: |
|
183 | if get_commit_id: | |
194 | self.__get_commit_or_redirect(get_commit_id, repo_name) |
|
184 | self.__get_commit_or_redirect(get_commit_id, repo_name) | |
195 |
|
185 | |||
196 | c.commit = self.__get_commit_or_redirect(commit_id, repo_name) |
|
186 | c.commit = self.__get_commit_or_redirect(commit_id, repo_name) | |
197 | c.branch = request.GET.get('branch', None) |
|
187 | c.branch = request.GET.get('branch', None) | |
198 | c.f_path = f_path |
|
188 | c.f_path = f_path | |
199 | c.annotate = annotate |
|
189 | c.annotate = annotate | |
200 | # default is false, but .rst/.md files later are autorendered, we can |
|
190 | # default is false, but .rst/.md files later are autorendered, we can | |
201 | # overwrite autorendering by setting this GET flag |
|
191 | # overwrite autorendering by setting this GET flag | |
202 | c.renderer = rendered or not request.GET.get('no-render', False) |
|
192 | c.renderer = rendered or not request.GET.get('no-render', False) | |
203 |
|
193 | |||
204 | # prev link |
|
194 | # prev link | |
205 | try: |
|
195 | try: | |
206 | prev_commit = c.commit.prev(c.branch) |
|
196 | prev_commit = c.commit.prev(c.branch) | |
207 | c.prev_commit = prev_commit |
|
197 | c.prev_commit = prev_commit | |
208 | c.url_prev = url('files_home', repo_name=c.repo_name, |
|
198 | c.url_prev = url('files_home', repo_name=c.repo_name, | |
209 | revision=prev_commit.raw_id, f_path=f_path) |
|
199 | revision=prev_commit.raw_id, f_path=f_path) | |
210 | if c.branch: |
|
200 | if c.branch: | |
211 | c.url_prev += '?branch=%s' % c.branch |
|
201 | c.url_prev += '?branch=%s' % c.branch | |
212 | except (CommitDoesNotExistError, VCSError): |
|
202 | except (CommitDoesNotExistError, VCSError): | |
213 | c.url_prev = '#' |
|
203 | c.url_prev = '#' | |
214 | c.prev_commit = EmptyCommit() |
|
204 | c.prev_commit = EmptyCommit() | |
215 |
|
205 | |||
216 | # next link |
|
206 | # next link | |
217 | try: |
|
207 | try: | |
218 | next_commit = c.commit.next(c.branch) |
|
208 | next_commit = c.commit.next(c.branch) | |
219 | c.next_commit = next_commit |
|
209 | c.next_commit = next_commit | |
220 | c.url_next = url('files_home', repo_name=c.repo_name, |
|
210 | c.url_next = url('files_home', repo_name=c.repo_name, | |
221 | revision=next_commit.raw_id, f_path=f_path) |
|
211 | revision=next_commit.raw_id, f_path=f_path) | |
222 | if c.branch: |
|
212 | if c.branch: | |
223 | c.url_next += '?branch=%s' % c.branch |
|
213 | c.url_next += '?branch=%s' % c.branch | |
224 | except (CommitDoesNotExistError, VCSError): |
|
214 | except (CommitDoesNotExistError, VCSError): | |
225 | c.url_next = '#' |
|
215 | c.url_next = '#' | |
226 | c.next_commit = EmptyCommit() |
|
216 | c.next_commit = EmptyCommit() | |
227 |
|
217 | |||
228 | # files or dirs |
|
218 | # files or dirs | |
229 | try: |
|
219 | try: | |
230 | c.file = c.commit.get_node(f_path) |
|
220 | c.file = c.commit.get_node(f_path) | |
231 | c.file_author = True |
|
221 | c.file_author = True | |
232 | c.file_tree = '' |
|
222 | c.file_tree = '' | |
233 | if c.file.is_file(): |
|
223 | if c.file.is_file(): | |
234 | c.renderer = ( |
|
224 | c.renderer = ( | |
235 | c.renderer and h.renderer_from_filename(c.file.path)) |
|
225 | c.renderer and h.renderer_from_filename(c.file.path)) | |
236 | c.file_last_commit = c.file.last_commit |
|
226 | c.file_last_commit = c.file.last_commit | |
237 |
|
227 | |||
238 | c.on_branch_head = self._is_valid_head( |
|
228 | c.on_branch_head = self._is_valid_head( | |
239 | commit_id, c.rhodecode_repo) |
|
229 | commit_id, c.rhodecode_repo) | |
240 | c.branch_or_raw_id = c.commit.branch or c.commit.raw_id |
|
230 | c.branch_or_raw_id = c.commit.branch or c.commit.raw_id | |
241 |
|
231 | |||
242 | author = c.file_last_commit.author |
|
232 | author = c.file_last_commit.author | |
243 | c.authors = [(h.email(author), |
|
233 | c.authors = [(h.email(author), | |
244 | h.person(author, 'username_or_name_or_email'))] |
|
234 | h.person(author, 'username_or_name_or_email'))] | |
245 | else: |
|
235 | else: | |
246 | c.authors = [] |
|
236 | c.authors = [] | |
247 | c.file_tree = self._get_tree_at_commit( |
|
237 | c.file_tree = self._get_tree_at_commit( | |
248 | repo_name, c.commit.raw_id, f_path) |
|
238 | repo_name, c.commit.raw_id, f_path) | |
|
239 | ||||
249 | except RepositoryError as e: |
|
240 | except RepositoryError as e: | |
250 | h.flash(safe_str(e), category='error') |
|
241 | h.flash(safe_str(e), category='error') | |
251 | raise HTTPNotFound() |
|
242 | raise HTTPNotFound() | |
252 |
|
243 | |||
253 | if request.environ.get('HTTP_X_PJAX'): |
|
244 | if request.environ.get('HTTP_X_PJAX'): | |
254 | return render('files/files_pjax.html') |
|
245 | return render('files/files_pjax.html') | |
255 |
|
246 | |||
256 | return render('files/files.html') |
|
247 | return render('files/files.html') | |
257 |
|
248 | |||
258 | @LoginRequired() |
|
249 | @LoginRequired() | |
259 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
250 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
260 | 'repository.admin') |
|
251 | 'repository.admin') | |
261 | @jsonify |
|
252 | @jsonify | |
262 | def history(self, repo_name, revision, f_path): |
|
253 | def history(self, repo_name, revision, f_path): | |
263 | commit = self.__get_commit_or_redirect(revision, repo_name) |
|
254 | commit = self.__get_commit_or_redirect(revision, repo_name) | |
264 | f_path = f_path |
|
255 | f_path = f_path | |
265 | _file = commit.get_node(f_path) |
|
256 | _file = commit.get_node(f_path) | |
266 | if _file.is_file(): |
|
257 | if _file.is_file(): | |
267 | file_history, _hist = self._get_node_history(commit, f_path) |
|
258 | file_history, _hist = self._get_node_history(commit, f_path) | |
268 |
|
259 | |||
269 | res = [] |
|
260 | res = [] | |
270 | for obj in file_history: |
|
261 | for obj in file_history: | |
271 | res.append({ |
|
262 | res.append({ | |
272 | 'text': obj[1], |
|
263 | 'text': obj[1], | |
273 | 'children': [{'id': o[0], 'text': o[1]} for o in obj[0]] |
|
264 | 'children': [{'id': o[0], 'text': o[1]} for o in obj[0]] | |
274 | }) |
|
265 | }) | |
275 |
|
266 | |||
276 | data = { |
|
267 | data = { | |
277 | 'more': False, |
|
268 | 'more': False, | |
278 | 'results': res |
|
269 | 'results': res | |
279 | } |
|
270 | } | |
280 | return data |
|
271 | return data | |
281 |
|
272 | |||
282 | @LoginRequired() |
|
273 | @LoginRequired() | |
283 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
274 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
284 | 'repository.admin') |
|
275 | 'repository.admin') | |
285 | def authors(self, repo_name, revision, f_path): |
|
276 | def authors(self, repo_name, revision, f_path): | |
286 | commit = self.__get_commit_or_redirect(revision, repo_name) |
|
277 | commit = self.__get_commit_or_redirect(revision, repo_name) | |
287 | file_node = commit.get_node(f_path) |
|
278 | file_node = commit.get_node(f_path) | |
288 | if file_node.is_file(): |
|
279 | if file_node.is_file(): | |
289 | c.file_last_commit = file_node.last_commit |
|
280 | c.file_last_commit = file_node.last_commit | |
290 | if request.GET.get('annotate') == '1': |
|
281 | if request.GET.get('annotate') == '1': | |
291 | # use _hist from annotation if annotation mode is on |
|
282 | # use _hist from annotation if annotation mode is on | |
292 | commit_ids = set(x[1] for x in file_node.annotate) |
|
283 | commit_ids = set(x[1] for x in file_node.annotate) | |
293 | _hist = ( |
|
284 | _hist = ( | |
294 | c.rhodecode_repo.get_commit(commit_id) |
|
285 | c.rhodecode_repo.get_commit(commit_id) | |
295 | for commit_id in commit_ids) |
|
286 | for commit_id in commit_ids) | |
296 | else: |
|
287 | else: | |
297 | _f_history, _hist = self._get_node_history(commit, f_path) |
|
288 | _f_history, _hist = self._get_node_history(commit, f_path) | |
298 | c.file_author = False |
|
289 | c.file_author = False | |
299 | c.authors = [] |
|
290 | c.authors = [] | |
300 | for author in set(commit.author for commit in _hist): |
|
291 | for author in set(commit.author for commit in _hist): | |
301 | c.authors.append(( |
|
292 | c.authors.append(( | |
302 | h.email(author), |
|
293 | h.email(author), | |
303 | h.person(author, 'username_or_name_or_email'))) |
|
294 | h.person(author, 'username_or_name_or_email'))) | |
304 | return render('files/file_authors_box.html') |
|
295 | return render('files/file_authors_box.html') | |
305 |
|
296 | |||
306 | @LoginRequired() |
|
297 | @LoginRequired() | |
307 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
298 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
308 | 'repository.admin') |
|
299 | 'repository.admin') | |
309 | def rawfile(self, repo_name, revision, f_path): |
|
300 | def rawfile(self, repo_name, revision, f_path): | |
310 | """ |
|
301 | """ | |
311 | Action for download as raw |
|
302 | Action for download as raw | |
312 | """ |
|
303 | """ | |
313 | commit = self.__get_commit_or_redirect(revision, repo_name) |
|
304 | commit = self.__get_commit_or_redirect(revision, repo_name) | |
314 | file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path) |
|
305 | file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path) | |
315 |
|
306 | |||
316 | response.content_disposition = 'attachment; filename=%s' % \ |
|
307 | response.content_disposition = 'attachment; filename=%s' % \ | |
317 | safe_str(f_path.split(Repository.NAME_SEP)[-1]) |
|
308 | safe_str(f_path.split(Repository.NAME_SEP)[-1]) | |
318 |
|
309 | |||
319 | response.content_type = file_node.mimetype |
|
310 | response.content_type = file_node.mimetype | |
320 | charset = self._get_default_encoding() |
|
311 | charset = self._get_default_encoding() | |
321 | if charset: |
|
312 | if charset: | |
322 | response.charset = charset |
|
313 | response.charset = charset | |
323 |
|
314 | |||
324 | return file_node.content |
|
315 | return file_node.content | |
325 |
|
316 | |||
326 | @LoginRequired() |
|
317 | @LoginRequired() | |
327 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
318 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
328 | 'repository.admin') |
|
319 | 'repository.admin') | |
329 | def raw(self, repo_name, revision, f_path): |
|
320 | def raw(self, repo_name, revision, f_path): | |
330 | """ |
|
321 | """ | |
331 | Action for show as raw, some mimetypes are "rendered", |
|
322 | Action for show as raw, some mimetypes are "rendered", | |
332 | those include images, icons. |
|
323 | those include images, icons. | |
333 | """ |
|
324 | """ | |
334 | commit = self.__get_commit_or_redirect(revision, repo_name) |
|
325 | commit = self.__get_commit_or_redirect(revision, repo_name) | |
335 | file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path) |
|
326 | file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path) | |
336 |
|
327 | |||
337 | raw_mimetype_mapping = { |
|
328 | raw_mimetype_mapping = { | |
338 | # map original mimetype to a mimetype used for "show as raw" |
|
329 | # map original mimetype to a mimetype used for "show as raw" | |
339 | # you can also provide a content-disposition to override the |
|
330 | # you can also provide a content-disposition to override the | |
340 | # default "attachment" disposition. |
|
331 | # default "attachment" disposition. | |
341 | # orig_type: (new_type, new_dispo) |
|
332 | # orig_type: (new_type, new_dispo) | |
342 |
|
333 | |||
343 | # show images inline: |
|
334 | # show images inline: | |
344 | # Do not re-add SVG: it is unsafe and permits XSS attacks. One can |
|
335 | # Do not re-add SVG: it is unsafe and permits XSS attacks. One can | |
345 | # for example render an SVG with javascript inside or even render |
|
336 | # for example render an SVG with javascript inside or even render | |
346 | # HTML. |
|
337 | # HTML. | |
347 | 'image/x-icon': ('image/x-icon', 'inline'), |
|
338 | 'image/x-icon': ('image/x-icon', 'inline'), | |
348 | 'image/png': ('image/png', 'inline'), |
|
339 | 'image/png': ('image/png', 'inline'), | |
349 | 'image/gif': ('image/gif', 'inline'), |
|
340 | 'image/gif': ('image/gif', 'inline'), | |
350 | 'image/jpeg': ('image/jpeg', 'inline'), |
|
341 | 'image/jpeg': ('image/jpeg', 'inline'), | |
351 | } |
|
342 | } | |
352 |
|
343 | |||
353 | mimetype = file_node.mimetype |
|
344 | mimetype = file_node.mimetype | |
354 | try: |
|
345 | try: | |
355 | mimetype, dispo = raw_mimetype_mapping[mimetype] |
|
346 | mimetype, dispo = raw_mimetype_mapping[mimetype] | |
356 | except KeyError: |
|
347 | except KeyError: | |
357 | # we don't know anything special about this, handle it safely |
|
348 | # we don't know anything special about this, handle it safely | |
358 | if file_node.is_binary: |
|
349 | if file_node.is_binary: | |
359 | # do same as download raw for binary files |
|
350 | # do same as download raw for binary files | |
360 | mimetype, dispo = 'application/octet-stream', 'attachment' |
|
351 | mimetype, dispo = 'application/octet-stream', 'attachment' | |
361 | else: |
|
352 | else: | |
362 | # do not just use the original mimetype, but force text/plain, |
|
353 | # do not just use the original mimetype, but force text/plain, | |
363 | # otherwise it would serve text/html and that might be unsafe. |
|
354 | # otherwise it would serve text/html and that might be unsafe. | |
364 | # Note: underlying vcs library fakes text/plain mimetype if the |
|
355 | # Note: underlying vcs library fakes text/plain mimetype if the | |
365 | # mimetype can not be determined and it thinks it is not |
|
356 | # mimetype can not be determined and it thinks it is not | |
366 | # binary.This might lead to erroneous text display in some |
|
357 | # binary.This might lead to erroneous text display in some | |
367 | # cases, but helps in other cases, like with text files |
|
358 | # cases, but helps in other cases, like with text files | |
368 | # without extension. |
|
359 | # without extension. | |
369 | mimetype, dispo = 'text/plain', 'inline' |
|
360 | mimetype, dispo = 'text/plain', 'inline' | |
370 |
|
361 | |||
371 | if dispo == 'attachment': |
|
362 | if dispo == 'attachment': | |
372 | dispo = 'attachment; filename=%s' % safe_str( |
|
363 | dispo = 'attachment; filename=%s' % safe_str( | |
373 | f_path.split(os.sep)[-1]) |
|
364 | f_path.split(os.sep)[-1]) | |
374 |
|
365 | |||
375 | response.content_disposition = dispo |
|
366 | response.content_disposition = dispo | |
376 | response.content_type = mimetype |
|
367 | response.content_type = mimetype | |
377 | charset = self._get_default_encoding() |
|
368 | charset = self._get_default_encoding() | |
378 | if charset: |
|
369 | if charset: | |
379 | response.charset = charset |
|
370 | response.charset = charset | |
380 | return file_node.content |
|
371 | return file_node.content | |
381 |
|
372 | |||
382 | @CSRFRequired() |
|
373 | @CSRFRequired() | |
383 | @LoginRequired() |
|
374 | @LoginRequired() | |
384 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
375 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
385 | def delete(self, repo_name, revision, f_path): |
|
376 | def delete(self, repo_name, revision, f_path): | |
386 | commit_id = revision |
|
377 | commit_id = revision | |
387 |
|
378 | |||
388 | repo = c.rhodecode_db_repo |
|
379 | repo = c.rhodecode_db_repo | |
389 | if repo.enable_locking and repo.locked[0]: |
|
380 | if repo.enable_locking and repo.locked[0]: | |
390 | h.flash(_('This repository has been locked by %s on %s') |
|
381 | h.flash(_('This repository has been locked by %s on %s') | |
391 | % (h.person_by_id(repo.locked[0]), |
|
382 | % (h.person_by_id(repo.locked[0]), | |
392 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
383 | h.format_date(h.time_to_datetime(repo.locked[1]))), | |
393 | 'warning') |
|
384 | 'warning') | |
394 | return redirect(h.url('files_home', |
|
385 | return redirect(h.url('files_home', | |
395 | repo_name=repo_name, revision='tip')) |
|
386 | repo_name=repo_name, revision='tip')) | |
396 |
|
387 | |||
397 | if not self._is_valid_head(commit_id, repo.scm_instance()): |
|
388 | if not self._is_valid_head(commit_id, repo.scm_instance()): | |
398 | h.flash(_('You can only delete files with revision ' |
|
389 | h.flash(_('You can only delete files with revision ' | |
399 | 'being a valid branch '), category='warning') |
|
390 | 'being a valid branch '), category='warning') | |
400 | return redirect(h.url('files_home', |
|
391 | return redirect(h.url('files_home', | |
401 | repo_name=repo_name, revision='tip', |
|
392 | repo_name=repo_name, revision='tip', | |
402 | f_path=f_path)) |
|
393 | f_path=f_path)) | |
403 |
|
394 | |||
404 | c.commit = self.__get_commit_or_redirect(commit_id, repo_name) |
|
395 | c.commit = self.__get_commit_or_redirect(commit_id, repo_name) | |
405 | c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path) |
|
396 | c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path) | |
406 |
|
397 | |||
407 | c.default_message = _( |
|
398 | c.default_message = _( | |
408 | 'Deleted file %s via RhodeCode Enterprise') % (f_path) |
|
399 | 'Deleted file %s via RhodeCode Enterprise') % (f_path) | |
409 | c.f_path = f_path |
|
400 | c.f_path = f_path | |
410 | node_path = f_path |
|
401 | node_path = f_path | |
411 | author = c.rhodecode_user.full_contact |
|
402 | author = c.rhodecode_user.full_contact | |
412 | message = request.POST.get('message') or c.default_message |
|
403 | message = request.POST.get('message') or c.default_message | |
413 | try: |
|
404 | try: | |
414 | nodes = { |
|
405 | nodes = { | |
415 | node_path: { |
|
406 | node_path: { | |
416 | 'content': '' |
|
407 | 'content': '' | |
417 | } |
|
408 | } | |
418 | } |
|
409 | } | |
419 | self.scm_model.delete_nodes( |
|
410 | self.scm_model.delete_nodes( | |
420 | user=c.rhodecode_user.user_id, repo=c.rhodecode_db_repo, |
|
411 | user=c.rhodecode_user.user_id, repo=c.rhodecode_db_repo, | |
421 | message=message, |
|
412 | message=message, | |
422 | nodes=nodes, |
|
413 | nodes=nodes, | |
423 | parent_commit=c.commit, |
|
414 | parent_commit=c.commit, | |
424 | author=author, |
|
415 | author=author, | |
425 | ) |
|
416 | ) | |
426 |
|
417 | |||
427 | h.flash(_('Successfully deleted file %s') % f_path, |
|
418 | h.flash(_('Successfully deleted file %s') % f_path, | |
428 | category='success') |
|
419 | category='success') | |
429 | except Exception: |
|
420 | except Exception: | |
430 | msg = _('Error occurred during commit') |
|
421 | msg = _('Error occurred during commit') | |
431 | log.exception(msg) |
|
422 | log.exception(msg) | |
432 | h.flash(msg, category='error') |
|
423 | h.flash(msg, category='error') | |
433 | return redirect(url('changeset_home', |
|
424 | return redirect(url('changeset_home', | |
434 | repo_name=c.repo_name, revision='tip')) |
|
425 | repo_name=c.repo_name, revision='tip')) | |
435 |
|
426 | |||
436 | @LoginRequired() |
|
427 | @LoginRequired() | |
437 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
428 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
438 | def delete_home(self, repo_name, revision, f_path): |
|
429 | def delete_home(self, repo_name, revision, f_path): | |
439 | commit_id = revision |
|
430 | commit_id = revision | |
440 |
|
431 | |||
441 | repo = c.rhodecode_db_repo |
|
432 | repo = c.rhodecode_db_repo | |
442 | if repo.enable_locking and repo.locked[0]: |
|
433 | if repo.enable_locking and repo.locked[0]: | |
443 | h.flash(_('This repository has been locked by %s on %s') |
|
434 | h.flash(_('This repository has been locked by %s on %s') | |
444 | % (h.person_by_id(repo.locked[0]), |
|
435 | % (h.person_by_id(repo.locked[0]), | |
445 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
436 | h.format_date(h.time_to_datetime(repo.locked[1]))), | |
446 | 'warning') |
|
437 | 'warning') | |
447 | return redirect(h.url('files_home', |
|
438 | return redirect(h.url('files_home', | |
448 | repo_name=repo_name, revision='tip')) |
|
439 | repo_name=repo_name, revision='tip')) | |
449 |
|
440 | |||
450 | if not self._is_valid_head(commit_id, repo.scm_instance()): |
|
441 | if not self._is_valid_head(commit_id, repo.scm_instance()): | |
451 | h.flash(_('You can only delete files with revision ' |
|
442 | h.flash(_('You can only delete files with revision ' | |
452 | 'being a valid branch '), category='warning') |
|
443 | 'being a valid branch '), category='warning') | |
453 | return redirect(h.url('files_home', |
|
444 | return redirect(h.url('files_home', | |
454 | repo_name=repo_name, revision='tip', |
|
445 | repo_name=repo_name, revision='tip', | |
455 | f_path=f_path)) |
|
446 | f_path=f_path)) | |
456 |
|
447 | |||
457 | c.commit = self.__get_commit_or_redirect(commit_id, repo_name) |
|
448 | c.commit = self.__get_commit_or_redirect(commit_id, repo_name) | |
458 | c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path) |
|
449 | c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path) | |
459 |
|
450 | |||
460 | c.default_message = _( |
|
451 | c.default_message = _( | |
461 | 'Deleted file %s via RhodeCode Enterprise') % (f_path) |
|
452 | 'Deleted file %s via RhodeCode Enterprise') % (f_path) | |
462 | c.f_path = f_path |
|
453 | c.f_path = f_path | |
463 |
|
454 | |||
464 | return render('files/files_delete.html') |
|
455 | return render('files/files_delete.html') | |
465 |
|
456 | |||
466 | @CSRFRequired() |
|
457 | @CSRFRequired() | |
467 | @LoginRequired() |
|
458 | @LoginRequired() | |
468 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
459 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
469 | def edit(self, repo_name, revision, f_path): |
|
460 | def edit(self, repo_name, revision, f_path): | |
470 | commit_id = revision |
|
461 | commit_id = revision | |
471 |
|
462 | |||
472 | repo = c.rhodecode_db_repo |
|
463 | repo = c.rhodecode_db_repo | |
473 | if repo.enable_locking and repo.locked[0]: |
|
464 | if repo.enable_locking and repo.locked[0]: | |
474 | h.flash(_('This repository has been locked by %s on %s') |
|
465 | h.flash(_('This repository has been locked by %s on %s') | |
475 | % (h.person_by_id(repo.locked[0]), |
|
466 | % (h.person_by_id(repo.locked[0]), | |
476 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
467 | h.format_date(h.time_to_datetime(repo.locked[1]))), | |
477 | 'warning') |
|
468 | 'warning') | |
478 | return redirect(h.url('files_home', |
|
469 | return redirect(h.url('files_home', | |
479 | repo_name=repo_name, revision='tip')) |
|
470 | repo_name=repo_name, revision='tip')) | |
480 |
|
471 | |||
481 | if not self._is_valid_head(commit_id, repo.scm_instance()): |
|
472 | if not self._is_valid_head(commit_id, repo.scm_instance()): | |
482 | h.flash(_('You can only edit files with revision ' |
|
473 | h.flash(_('You can only edit files with revision ' | |
483 | 'being a valid branch '), category='warning') |
|
474 | 'being a valid branch '), category='warning') | |
484 | return redirect(h.url('files_home', |
|
475 | return redirect(h.url('files_home', | |
485 | repo_name=repo_name, revision='tip', |
|
476 | repo_name=repo_name, revision='tip', | |
486 | f_path=f_path)) |
|
477 | f_path=f_path)) | |
487 |
|
478 | |||
488 | c.commit = self.__get_commit_or_redirect(commit_id, repo_name) |
|
479 | c.commit = self.__get_commit_or_redirect(commit_id, repo_name) | |
489 | c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path) |
|
480 | c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path) | |
490 |
|
481 | |||
491 | if c.file.is_binary: |
|
482 | if c.file.is_binary: | |
492 | return redirect(url('files_home', repo_name=c.repo_name, |
|
483 | return redirect(url('files_home', repo_name=c.repo_name, | |
493 | revision=c.commit.raw_id, f_path=f_path)) |
|
484 | revision=c.commit.raw_id, f_path=f_path)) | |
494 | c.default_message = _( |
|
485 | c.default_message = _( | |
495 | 'Edited file %s via RhodeCode Enterprise') % (f_path) |
|
486 | 'Edited file %s via RhodeCode Enterprise') % (f_path) | |
496 | c.f_path = f_path |
|
487 | c.f_path = f_path | |
497 | old_content = c.file.content |
|
488 | old_content = c.file.content | |
498 | sl = old_content.splitlines(1) |
|
489 | sl = old_content.splitlines(1) | |
499 | first_line = sl[0] if sl else '' |
|
490 | first_line = sl[0] if sl else '' | |
500 |
|
491 | |||
501 | # modes: 0 - Unix, 1 - Mac, 2 - DOS |
|
492 | # modes: 0 - Unix, 1 - Mac, 2 - DOS | |
502 | mode = detect_mode(first_line, 0) |
|
493 | mode = detect_mode(first_line, 0) | |
503 | content = convert_line_endings(request.POST.get('content', ''), mode) |
|
494 | content = convert_line_endings(request.POST.get('content', ''), mode) | |
504 |
|
495 | |||
505 | message = request.POST.get('message') or c.default_message |
|
496 | message = request.POST.get('message') or c.default_message | |
506 | org_f_path = c.file.unicode_path |
|
497 | org_f_path = c.file.unicode_path | |
507 | filename = request.POST['filename'] |
|
498 | filename = request.POST['filename'] | |
508 | org_filename = c.file.name |
|
499 | org_filename = c.file.name | |
509 |
|
500 | |||
510 | if content == old_content and filename == org_filename: |
|
501 | if content == old_content and filename == org_filename: | |
511 | h.flash(_('No changes'), category='warning') |
|
502 | h.flash(_('No changes'), category='warning') | |
512 | return redirect(url('changeset_home', repo_name=c.repo_name, |
|
503 | return redirect(url('changeset_home', repo_name=c.repo_name, | |
513 | revision='tip')) |
|
504 | revision='tip')) | |
514 | try: |
|
505 | try: | |
515 | mapping = { |
|
506 | mapping = { | |
516 | org_f_path: { |
|
507 | org_f_path: { | |
517 | 'org_filename': org_f_path, |
|
508 | 'org_filename': org_f_path, | |
518 | 'filename': os.path.join(c.file.dir_path, filename), |
|
509 | 'filename': os.path.join(c.file.dir_path, filename), | |
519 | 'content': content, |
|
510 | 'content': content, | |
520 | 'lexer': '', |
|
511 | 'lexer': '', | |
521 | 'op': 'mod', |
|
512 | 'op': 'mod', | |
522 | } |
|
513 | } | |
523 | } |
|
514 | } | |
524 |
|
515 | |||
525 | ScmModel().update_nodes( |
|
516 | ScmModel().update_nodes( | |
526 | user=c.rhodecode_user.user_id, |
|
517 | user=c.rhodecode_user.user_id, | |
527 | repo=c.rhodecode_db_repo, |
|
518 | repo=c.rhodecode_db_repo, | |
528 | message=message, |
|
519 | message=message, | |
529 | nodes=mapping, |
|
520 | nodes=mapping, | |
530 | parent_commit=c.commit, |
|
521 | parent_commit=c.commit, | |
531 | ) |
|
522 | ) | |
532 |
|
523 | |||
533 | h.flash(_('Successfully committed to %s') % f_path, |
|
524 | h.flash(_('Successfully committed to %s') % f_path, | |
534 | category='success') |
|
525 | category='success') | |
535 | except Exception: |
|
526 | except Exception: | |
536 | msg = _('Error occurred during commit') |
|
527 | msg = _('Error occurred during commit') | |
537 | log.exception(msg) |
|
528 | log.exception(msg) | |
538 | h.flash(msg, category='error') |
|
529 | h.flash(msg, category='error') | |
539 | return redirect(url('changeset_home', |
|
530 | return redirect(url('changeset_home', | |
540 | repo_name=c.repo_name, revision='tip')) |
|
531 | repo_name=c.repo_name, revision='tip')) | |
541 |
|
532 | |||
542 | @LoginRequired() |
|
533 | @LoginRequired() | |
543 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
534 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
544 | def edit_home(self, repo_name, revision, f_path): |
|
535 | def edit_home(self, repo_name, revision, f_path): | |
545 | commit_id = revision |
|
536 | commit_id = revision | |
546 |
|
537 | |||
547 | repo = c.rhodecode_db_repo |
|
538 | repo = c.rhodecode_db_repo | |
548 | if repo.enable_locking and repo.locked[0]: |
|
539 | if repo.enable_locking and repo.locked[0]: | |
549 | h.flash(_('This repository has been locked by %s on %s') |
|
540 | h.flash(_('This repository has been locked by %s on %s') | |
550 | % (h.person_by_id(repo.locked[0]), |
|
541 | % (h.person_by_id(repo.locked[0]), | |
551 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
542 | h.format_date(h.time_to_datetime(repo.locked[1]))), | |
552 | 'warning') |
|
543 | 'warning') | |
553 | return redirect(h.url('files_home', |
|
544 | return redirect(h.url('files_home', | |
554 | repo_name=repo_name, revision='tip')) |
|
545 | repo_name=repo_name, revision='tip')) | |
555 |
|
546 | |||
556 | if not self._is_valid_head(commit_id, repo.scm_instance()): |
|
547 | if not self._is_valid_head(commit_id, repo.scm_instance()): | |
557 | h.flash(_('You can only edit files with revision ' |
|
548 | h.flash(_('You can only edit files with revision ' | |
558 | 'being a valid branch '), category='warning') |
|
549 | 'being a valid branch '), category='warning') | |
559 | return redirect(h.url('files_home', |
|
550 | return redirect(h.url('files_home', | |
560 | repo_name=repo_name, revision='tip', |
|
551 | repo_name=repo_name, revision='tip', | |
561 | f_path=f_path)) |
|
552 | f_path=f_path)) | |
562 |
|
553 | |||
563 | c.commit = self.__get_commit_or_redirect(commit_id, repo_name) |
|
554 | c.commit = self.__get_commit_or_redirect(commit_id, repo_name) | |
564 | c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path) |
|
555 | c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path) | |
565 |
|
556 | |||
566 | if c.file.is_binary: |
|
557 | if c.file.is_binary: | |
567 | return redirect(url('files_home', repo_name=c.repo_name, |
|
558 | return redirect(url('files_home', repo_name=c.repo_name, | |
568 | revision=c.commit.raw_id, f_path=f_path)) |
|
559 | revision=c.commit.raw_id, f_path=f_path)) | |
569 | c.default_message = _( |
|
560 | c.default_message = _( | |
570 | 'Edited file %s via RhodeCode Enterprise') % (f_path) |
|
561 | 'Edited file %s via RhodeCode Enterprise') % (f_path) | |
571 | c.f_path = f_path |
|
562 | c.f_path = f_path | |
572 |
|
563 | |||
573 | return render('files/files_edit.html') |
|
564 | return render('files/files_edit.html') | |
574 |
|
565 | |||
575 | def _is_valid_head(self, commit_id, repo): |
|
566 | def _is_valid_head(self, commit_id, repo): | |
576 | # check if commit is a branch identifier- basically we cannot |
|
567 | # check if commit is a branch identifier- basically we cannot | |
577 | # create multiple heads via file editing |
|
568 | # create multiple heads via file editing | |
578 | valid_heads = repo.branches.keys() + repo.branches.values() |
|
569 | valid_heads = repo.branches.keys() + repo.branches.values() | |
579 |
|
570 | |||
580 | if h.is_svn(repo) and not repo.is_empty(): |
|
571 | if h.is_svn(repo) and not repo.is_empty(): | |
581 | # Note: Subversion only has one head, we add it here in case there |
|
572 | # Note: Subversion only has one head, we add it here in case there | |
582 | # is no branch matched. |
|
573 | # is no branch matched. | |
583 | valid_heads.append(repo.get_commit(commit_idx=-1).raw_id) |
|
574 | valid_heads.append(repo.get_commit(commit_idx=-1).raw_id) | |
584 |
|
575 | |||
585 | # check if commit is a branch name or branch hash |
|
576 | # check if commit is a branch name or branch hash | |
586 | return commit_id in valid_heads |
|
577 | return commit_id in valid_heads | |
587 |
|
578 | |||
588 | @CSRFRequired() |
|
579 | @CSRFRequired() | |
589 | @LoginRequired() |
|
580 | @LoginRequired() | |
590 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
581 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
591 | def add(self, repo_name, revision, f_path): |
|
582 | def add(self, repo_name, revision, f_path): | |
592 | repo = Repository.get_by_repo_name(repo_name) |
|
583 | repo = Repository.get_by_repo_name(repo_name) | |
593 | if repo.enable_locking and repo.locked[0]: |
|
584 | if repo.enable_locking and repo.locked[0]: | |
594 | h.flash(_('This repository has been locked by %s on %s') |
|
585 | h.flash(_('This repository has been locked by %s on %s') | |
595 | % (h.person_by_id(repo.locked[0]), |
|
586 | % (h.person_by_id(repo.locked[0]), | |
596 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
587 | h.format_date(h.time_to_datetime(repo.locked[1]))), | |
597 | 'warning') |
|
588 | 'warning') | |
598 | return redirect(h.url('files_home', |
|
589 | return redirect(h.url('files_home', | |
599 | repo_name=repo_name, revision='tip')) |
|
590 | repo_name=repo_name, revision='tip')) | |
600 |
|
591 | |||
601 | r_post = request.POST |
|
592 | r_post = request.POST | |
602 |
|
593 | |||
603 | c.commit = self.__get_commit_or_redirect( |
|
594 | c.commit = self.__get_commit_or_redirect( | |
604 | revision, repo_name, redirect_after=False) |
|
595 | revision, repo_name, redirect_after=False) | |
605 | if c.commit is None: |
|
596 | if c.commit is None: | |
606 | c.commit = EmptyCommit(alias=c.rhodecode_repo.alias) |
|
597 | c.commit = EmptyCommit(alias=c.rhodecode_repo.alias) | |
607 | c.default_message = (_('Added file via RhodeCode Enterprise')) |
|
598 | c.default_message = (_('Added file via RhodeCode Enterprise')) | |
608 | c.f_path = f_path |
|
599 | c.f_path = f_path | |
609 | unix_mode = 0 |
|
600 | unix_mode = 0 | |
610 | content = convert_line_endings(r_post.get('content', ''), unix_mode) |
|
601 | content = convert_line_endings(r_post.get('content', ''), unix_mode) | |
611 |
|
602 | |||
612 | message = r_post.get('message') or c.default_message |
|
603 | message = r_post.get('message') or c.default_message | |
613 | filename = r_post.get('filename') |
|
604 | filename = r_post.get('filename') | |
614 | location = r_post.get('location', '') # dir location |
|
605 | location = r_post.get('location', '') # dir location | |
615 | file_obj = r_post.get('upload_file', None) |
|
606 | file_obj = r_post.get('upload_file', None) | |
616 |
|
607 | |||
617 | if file_obj is not None and hasattr(file_obj, 'filename'): |
|
608 | if file_obj is not None and hasattr(file_obj, 'filename'): | |
618 | filename = file_obj.filename |
|
609 | filename = file_obj.filename | |
619 | content = file_obj.file |
|
610 | content = file_obj.file | |
620 |
|
611 | |||
621 | if hasattr(content, 'file'): |
|
612 | if hasattr(content, 'file'): | |
622 | # non posix systems store real file under file attr |
|
613 | # non posix systems store real file under file attr | |
623 | content = content.file |
|
614 | content = content.file | |
624 |
|
615 | |||
625 | # If there's no commit, redirect to repo summary |
|
616 | # If there's no commit, redirect to repo summary | |
626 | if type(c.commit) is EmptyCommit: |
|
617 | if type(c.commit) is EmptyCommit: | |
627 | redirect_url = "summary_home" |
|
618 | redirect_url = "summary_home" | |
628 | else: |
|
619 | else: | |
629 | redirect_url = "changeset_home" |
|
620 | redirect_url = "changeset_home" | |
630 |
|
621 | |||
631 | if not filename: |
|
622 | if not filename: | |
632 | h.flash(_('No filename'), category='warning') |
|
623 | h.flash(_('No filename'), category='warning') | |
633 | return redirect(url(redirect_url, repo_name=c.repo_name, |
|
624 | return redirect(url(redirect_url, repo_name=c.repo_name, | |
634 | revision='tip')) |
|
625 | revision='tip')) | |
635 |
|
626 | |||
636 | # extract the location from filename, |
|
627 | # extract the location from filename, | |
637 | # allows using foo/bar.txt syntax to create subdirectories |
|
628 | # allows using foo/bar.txt syntax to create subdirectories | |
638 | subdir_loc = filename.rsplit('/', 1) |
|
629 | subdir_loc = filename.rsplit('/', 1) | |
639 | if len(subdir_loc) == 2: |
|
630 | if len(subdir_loc) == 2: | |
640 | location = os.path.join(location, subdir_loc[0]) |
|
631 | location = os.path.join(location, subdir_loc[0]) | |
641 |
|
632 | |||
642 | # strip all crap out of file, just leave the basename |
|
633 | # strip all crap out of file, just leave the basename | |
643 | filename = os.path.basename(filename) |
|
634 | filename = os.path.basename(filename) | |
644 | node_path = os.path.join(location, filename) |
|
635 | node_path = os.path.join(location, filename) | |
645 | author = c.rhodecode_user.full_contact |
|
636 | author = c.rhodecode_user.full_contact | |
646 |
|
637 | |||
647 | try: |
|
638 | try: | |
648 | nodes = { |
|
639 | nodes = { | |
649 | node_path: { |
|
640 | node_path: { | |
650 | 'content': content |
|
641 | 'content': content | |
651 | } |
|
642 | } | |
652 | } |
|
643 | } | |
653 | self.scm_model.create_nodes( |
|
644 | self.scm_model.create_nodes( | |
654 | user=c.rhodecode_user.user_id, |
|
645 | user=c.rhodecode_user.user_id, | |
655 | repo=c.rhodecode_db_repo, |
|
646 | repo=c.rhodecode_db_repo, | |
656 | message=message, |
|
647 | message=message, | |
657 | nodes=nodes, |
|
648 | nodes=nodes, | |
658 | parent_commit=c.commit, |
|
649 | parent_commit=c.commit, | |
659 | author=author, |
|
650 | author=author, | |
660 | ) |
|
651 | ) | |
661 |
|
652 | |||
662 | h.flash(_('Successfully committed to %s') % node_path, |
|
653 | h.flash(_('Successfully committed to %s') % node_path, | |
663 | category='success') |
|
654 | category='success') | |
664 | except NonRelativePathError as e: |
|
655 | except NonRelativePathError as e: | |
665 | h.flash(_( |
|
656 | h.flash(_( | |
666 | 'The location specified must be a relative path and must not ' |
|
657 | 'The location specified must be a relative path and must not ' | |
667 | 'contain .. in the path'), category='warning') |
|
658 | 'contain .. in the path'), category='warning') | |
668 | return redirect(url('changeset_home', repo_name=c.repo_name, |
|
659 | return redirect(url('changeset_home', repo_name=c.repo_name, | |
669 | revision='tip')) |
|
660 | revision='tip')) | |
670 | except (NodeError, NodeAlreadyExistsError) as e: |
|
661 | except (NodeError, NodeAlreadyExistsError) as e: | |
671 | h.flash(_(e), category='error') |
|
662 | h.flash(_(e), category='error') | |
672 | except Exception: |
|
663 | except Exception: | |
673 | msg = _('Error occurred during commit') |
|
664 | msg = _('Error occurred during commit') | |
674 | log.exception(msg) |
|
665 | log.exception(msg) | |
675 | h.flash(msg, category='error') |
|
666 | h.flash(msg, category='error') | |
676 | return redirect(url('changeset_home', |
|
667 | return redirect(url('changeset_home', | |
677 | repo_name=c.repo_name, revision='tip')) |
|
668 | repo_name=c.repo_name, revision='tip')) | |
678 |
|
669 | |||
679 | @LoginRequired() |
|
670 | @LoginRequired() | |
680 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
671 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') | |
681 | def add_home(self, repo_name, revision, f_path): |
|
672 | def add_home(self, repo_name, revision, f_path): | |
682 |
|
673 | |||
683 | repo = Repository.get_by_repo_name(repo_name) |
|
674 | repo = Repository.get_by_repo_name(repo_name) | |
684 | if repo.enable_locking and repo.locked[0]: |
|
675 | if repo.enable_locking and repo.locked[0]: | |
685 | h.flash(_('This repository has been locked by %s on %s') |
|
676 | h.flash(_('This repository has been locked by %s on %s') | |
686 | % (h.person_by_id(repo.locked[0]), |
|
677 | % (h.person_by_id(repo.locked[0]), | |
687 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
678 | h.format_date(h.time_to_datetime(repo.locked[1]))), | |
688 | 'warning') |
|
679 | 'warning') | |
689 | return redirect(h.url('files_home', |
|
680 | return redirect(h.url('files_home', | |
690 | repo_name=repo_name, revision='tip')) |
|
681 | repo_name=repo_name, revision='tip')) | |
691 |
|
682 | |||
692 | c.commit = self.__get_commit_or_redirect( |
|
683 | c.commit = self.__get_commit_or_redirect( | |
693 | revision, repo_name, redirect_after=False) |
|
684 | revision, repo_name, redirect_after=False) | |
694 | if c.commit is None: |
|
685 | if c.commit is None: | |
695 | c.commit = EmptyCommit(alias=c.rhodecode_repo.alias) |
|
686 | c.commit = EmptyCommit(alias=c.rhodecode_repo.alias) | |
696 | c.default_message = (_('Added file via RhodeCode Enterprise')) |
|
687 | c.default_message = (_('Added file via RhodeCode Enterprise')) | |
697 | c.f_path = f_path |
|
688 | c.f_path = f_path | |
698 |
|
689 | |||
699 | return render('files/files_add.html') |
|
690 | return render('files/files_add.html') | |
700 |
|
691 | |||
701 | @LoginRequired() |
|
692 | @LoginRequired() | |
702 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
693 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
703 | 'repository.admin') |
|
694 | 'repository.admin') | |
704 | def archivefile(self, repo_name, fname): |
|
695 | def archivefile(self, repo_name, fname): | |
705 | fileformat = None |
|
696 | fileformat = None | |
706 | commit_id = None |
|
697 | commit_id = None | |
707 | ext = None |
|
698 | ext = None | |
708 | subrepos = request.GET.get('subrepos') == 'true' |
|
699 | subrepos = request.GET.get('subrepos') == 'true' | |
709 |
|
700 | |||
710 | for a_type, ext_data in settings.ARCHIVE_SPECS.items(): |
|
701 | for a_type, ext_data in settings.ARCHIVE_SPECS.items(): | |
711 | archive_spec = fname.split(ext_data[1]) |
|
702 | archive_spec = fname.split(ext_data[1]) | |
712 | if len(archive_spec) == 2 and archive_spec[1] == '': |
|
703 | if len(archive_spec) == 2 and archive_spec[1] == '': | |
713 | fileformat = a_type or ext_data[1] |
|
704 | fileformat = a_type or ext_data[1] | |
714 | commit_id = archive_spec[0] |
|
705 | commit_id = archive_spec[0] | |
715 | ext = ext_data[1] |
|
706 | ext = ext_data[1] | |
716 |
|
707 | |||
717 | dbrepo = RepoModel().get_by_repo_name(repo_name) |
|
708 | dbrepo = RepoModel().get_by_repo_name(repo_name) | |
718 | if not dbrepo.enable_downloads: |
|
709 | if not dbrepo.enable_downloads: | |
719 | return _('Downloads disabled') |
|
710 | return _('Downloads disabled') | |
720 |
|
711 | |||
721 | try: |
|
712 | try: | |
722 | commit = c.rhodecode_repo.get_commit(commit_id) |
|
713 | commit = c.rhodecode_repo.get_commit(commit_id) | |
723 | content_type = settings.ARCHIVE_SPECS[fileformat][0] |
|
714 | content_type = settings.ARCHIVE_SPECS[fileformat][0] | |
724 | except CommitDoesNotExistError: |
|
715 | except CommitDoesNotExistError: | |
725 | return _('Unknown revision %s') % commit_id |
|
716 | return _('Unknown revision %s') % commit_id | |
726 | except EmptyRepositoryError: |
|
717 | except EmptyRepositoryError: | |
727 | return _('Empty repository') |
|
718 | return _('Empty repository') | |
728 | except KeyError: |
|
719 | except KeyError: | |
729 | return _('Unknown archive type') |
|
720 | return _('Unknown archive type') | |
730 |
|
721 | |||
731 | # archive cache |
|
722 | # archive cache | |
732 | from rhodecode import CONFIG |
|
723 | from rhodecode import CONFIG | |
733 |
|
724 | |||
734 | archive_name = '%s-%s%s%s' % ( |
|
725 | archive_name = '%s-%s%s%s' % ( | |
735 | safe_str(repo_name.replace('/', '_')), |
|
726 | safe_str(repo_name.replace('/', '_')), | |
736 | '-sub' if subrepos else '', |
|
727 | '-sub' if subrepos else '', | |
737 | safe_str(commit.short_id), ext) |
|
728 | safe_str(commit.short_id), ext) | |
738 |
|
729 | |||
739 | use_cached_archive = False |
|
730 | use_cached_archive = False | |
740 | archive_cache_enabled = CONFIG.get( |
|
731 | archive_cache_enabled = CONFIG.get( | |
741 | 'archive_cache_dir') and not request.GET.get('no_cache') |
|
732 | 'archive_cache_dir') and not request.GET.get('no_cache') | |
742 |
|
733 | |||
743 | if archive_cache_enabled: |
|
734 | if archive_cache_enabled: | |
744 | # check if we it's ok to write |
|
735 | # check if we it's ok to write | |
745 | if not os.path.isdir(CONFIG['archive_cache_dir']): |
|
736 | if not os.path.isdir(CONFIG['archive_cache_dir']): | |
746 | os.makedirs(CONFIG['archive_cache_dir']) |
|
737 | os.makedirs(CONFIG['archive_cache_dir']) | |
747 | cached_archive_path = os.path.join( |
|
738 | cached_archive_path = os.path.join( | |
748 | CONFIG['archive_cache_dir'], archive_name) |
|
739 | CONFIG['archive_cache_dir'], archive_name) | |
749 | if os.path.isfile(cached_archive_path): |
|
740 | if os.path.isfile(cached_archive_path): | |
750 | log.debug('Found cached archive in %s', cached_archive_path) |
|
741 | log.debug('Found cached archive in %s', cached_archive_path) | |
751 | fd, archive = None, cached_archive_path |
|
742 | fd, archive = None, cached_archive_path | |
752 | use_cached_archive = True |
|
743 | use_cached_archive = True | |
753 | else: |
|
744 | else: | |
754 | log.debug('Archive %s is not yet cached', archive_name) |
|
745 | log.debug('Archive %s is not yet cached', archive_name) | |
755 |
|
746 | |||
756 | if not use_cached_archive: |
|
747 | if not use_cached_archive: | |
757 | # generate new archive |
|
748 | # generate new archive | |
758 | fd, archive = tempfile.mkstemp() |
|
749 | fd, archive = tempfile.mkstemp() | |
759 | log.debug('Creating new temp archive in %s' % (archive,)) |
|
750 | log.debug('Creating new temp archive in %s' % (archive,)) | |
760 | try: |
|
751 | try: | |
761 | commit.archive_repo(archive, kind=fileformat, subrepos=subrepos) |
|
752 | commit.archive_repo(archive, kind=fileformat, subrepos=subrepos) | |
762 | except ImproperArchiveTypeError: |
|
753 | except ImproperArchiveTypeError: | |
763 | return _('Unknown archive type') |
|
754 | return _('Unknown archive type') | |
764 | if archive_cache_enabled: |
|
755 | if archive_cache_enabled: | |
765 | # if we generated the archive and we have cache enabled |
|
756 | # if we generated the archive and we have cache enabled | |
766 | # let's use this for future |
|
757 | # let's use this for future | |
767 | log.debug('Storing new archive in %s' % (cached_archive_path,)) |
|
758 | log.debug('Storing new archive in %s' % (cached_archive_path,)) | |
768 | shutil.move(archive, cached_archive_path) |
|
759 | shutil.move(archive, cached_archive_path) | |
769 | archive = cached_archive_path |
|
760 | archive = cached_archive_path | |
770 |
|
761 | |||
771 | def get_chunked_archive(archive): |
|
762 | def get_chunked_archive(archive): | |
772 | with open(archive, 'rb') as stream: |
|
763 | with open(archive, 'rb') as stream: | |
773 | while True: |
|
764 | while True: | |
774 | data = stream.read(16 * 1024) |
|
765 | data = stream.read(16 * 1024) | |
775 | if not data: |
|
766 | if not data: | |
776 | if fd: # fd means we used temporary file |
|
767 | if fd: # fd means we used temporary file | |
777 | os.close(fd) |
|
768 | os.close(fd) | |
778 | if not archive_cache_enabled: |
|
769 | if not archive_cache_enabled: | |
779 | log.debug('Destroying temp archive %s', archive) |
|
770 | log.debug('Destroying temp archive %s', archive) | |
780 | os.remove(archive) |
|
771 | os.remove(archive) | |
781 | break |
|
772 | break | |
782 | yield data |
|
773 | yield data | |
783 |
|
774 | |||
784 | # store download action |
|
775 | # store download action | |
785 | action_logger(user=c.rhodecode_user, |
|
776 | action_logger(user=c.rhodecode_user, | |
786 | action='user_downloaded_archive:%s' % archive_name, |
|
777 | action='user_downloaded_archive:%s' % archive_name, | |
787 | repo=repo_name, ipaddr=self.ip_addr, commit=True) |
|
778 | repo=repo_name, ipaddr=self.ip_addr, commit=True) | |
788 | response.content_disposition = str( |
|
779 | response.content_disposition = str( | |
789 | 'attachment; filename=%s' % archive_name) |
|
780 | 'attachment; filename=%s' % archive_name) | |
790 | response.content_type = str(content_type) |
|
781 | response.content_type = str(content_type) | |
791 |
|
782 | |||
792 | return get_chunked_archive(archive) |
|
783 | return get_chunked_archive(archive) | |
793 |
|
784 | |||
794 | @LoginRequired() |
|
785 | @LoginRequired() | |
795 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
786 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
796 | 'repository.admin') |
|
787 | 'repository.admin') | |
797 | def diff(self, repo_name, f_path): |
|
788 | def diff(self, repo_name, f_path): | |
798 | ignore_whitespace = request.GET.get('ignorews') == '1' |
|
789 | ignore_whitespace = request.GET.get('ignorews') == '1' | |
799 | line_context = request.GET.get('context', 3) |
|
790 | line_context = request.GET.get('context', 3) | |
800 | diff1 = request.GET.get('diff1', '') |
|
791 | diff1 = request.GET.get('diff1', '') | |
801 |
|
792 | |||
802 | path1, diff1 = parse_path_ref(diff1, default_path=f_path) |
|
793 | path1, diff1 = parse_path_ref(diff1, default_path=f_path) | |
803 |
|
794 | |||
804 | diff2 = request.GET.get('diff2', '') |
|
795 | diff2 = request.GET.get('diff2', '') | |
805 | c.action = request.GET.get('diff') |
|
796 | c.action = request.GET.get('diff') | |
806 | c.no_changes = diff1 == diff2 |
|
797 | c.no_changes = diff1 == diff2 | |
807 | c.f_path = f_path |
|
798 | c.f_path = f_path | |
808 | c.big_diff = False |
|
799 | c.big_diff = False | |
809 | c.ignorews_url = _ignorews_url |
|
800 | c.ignorews_url = _ignorews_url | |
810 | c.context_url = _context_url |
|
801 | c.context_url = _context_url | |
811 | c.changes = OrderedDict() |
|
802 | c.changes = OrderedDict() | |
812 | c.changes[diff2] = [] |
|
803 | c.changes[diff2] = [] | |
813 |
|
804 | |||
814 | if not any((diff1, diff2)): |
|
805 | if not any((diff1, diff2)): | |
815 | h.flash( |
|
806 | h.flash( | |
816 | 'Need query parameter "diff1" or "diff2" to generate a diff.', |
|
807 | 'Need query parameter "diff1" or "diff2" to generate a diff.', | |
817 | category='error') |
|
808 | category='error') | |
818 | raise HTTPBadRequest() |
|
809 | raise HTTPBadRequest() | |
819 |
|
810 | |||
820 | # special case if we want a show commit_id only, it's impl here |
|
811 | # special case if we want a show commit_id only, it's impl here | |
821 | # to reduce JS and callbacks |
|
812 | # to reduce JS and callbacks | |
822 |
|
813 | |||
823 | if request.GET.get('show_rev') and diff1: |
|
814 | if request.GET.get('show_rev') and diff1: | |
824 | if str2bool(request.GET.get('annotate', 'False')): |
|
815 | if str2bool(request.GET.get('annotate', 'False')): | |
825 | _url = url('files_annotate_home', repo_name=c.repo_name, |
|
816 | _url = url('files_annotate_home', repo_name=c.repo_name, | |
826 | revision=diff1, f_path=path1) |
|
817 | revision=diff1, f_path=path1) | |
827 | else: |
|
818 | else: | |
828 | _url = url('files_home', repo_name=c.repo_name, |
|
819 | _url = url('files_home', repo_name=c.repo_name, | |
829 | revision=diff1, f_path=path1) |
|
820 | revision=diff1, f_path=path1) | |
830 |
|
821 | |||
831 | return redirect(_url) |
|
822 | return redirect(_url) | |
832 |
|
823 | |||
833 | try: |
|
824 | try: | |
834 | node1 = self._get_file_node(diff1, path1) |
|
825 | node1 = self._get_file_node(diff1, path1) | |
835 | node2 = self._get_file_node(diff2, f_path) |
|
826 | node2 = self._get_file_node(diff2, f_path) | |
836 | except (RepositoryError, NodeError): |
|
827 | except (RepositoryError, NodeError): | |
837 | log.exception("Exception while trying to get node from repository") |
|
828 | log.exception("Exception while trying to get node from repository") | |
838 | return redirect(url( |
|
829 | return redirect(url( | |
839 | 'files_home', repo_name=c.repo_name, f_path=f_path)) |
|
830 | 'files_home', repo_name=c.repo_name, f_path=f_path)) | |
840 |
|
831 | |||
841 | if all(isinstance(node.commit, EmptyCommit) |
|
832 | if all(isinstance(node.commit, EmptyCommit) | |
842 | for node in (node1, node2)): |
|
833 | for node in (node1, node2)): | |
843 | raise HTTPNotFound |
|
834 | raise HTTPNotFound | |
844 |
|
835 | |||
845 | c.commit_1 = node1.commit |
|
836 | c.commit_1 = node1.commit | |
846 | c.commit_2 = node2.commit |
|
837 | c.commit_2 = node2.commit | |
847 |
|
838 | |||
848 | if c.action == 'download': |
|
839 | if c.action == 'download': | |
849 | _diff = diffs.get_gitdiff(node1, node2, |
|
840 | _diff = diffs.get_gitdiff(node1, node2, | |
850 | ignore_whitespace=ignore_whitespace, |
|
841 | ignore_whitespace=ignore_whitespace, | |
851 | context=line_context) |
|
842 | context=line_context) | |
852 | diff = diffs.DiffProcessor(_diff, format='gitdiff') |
|
843 | diff = diffs.DiffProcessor(_diff, format='gitdiff') | |
853 |
|
844 | |||
854 | diff_name = '%s_vs_%s.diff' % (diff1, diff2) |
|
845 | diff_name = '%s_vs_%s.diff' % (diff1, diff2) | |
855 | response.content_type = 'text/plain' |
|
846 | response.content_type = 'text/plain' | |
856 | response.content_disposition = ( |
|
847 | response.content_disposition = ( | |
857 | 'attachment; filename=%s' % (diff_name,) |
|
848 | 'attachment; filename=%s' % (diff_name,) | |
858 | ) |
|
849 | ) | |
859 | charset = self._get_default_encoding() |
|
850 | charset = self._get_default_encoding() | |
860 | if charset: |
|
851 | if charset: | |
861 | response.charset = charset |
|
852 | response.charset = charset | |
862 | return diff.as_raw() |
|
853 | return diff.as_raw() | |
863 |
|
854 | |||
864 | elif c.action == 'raw': |
|
855 | elif c.action == 'raw': | |
865 | _diff = diffs.get_gitdiff(node1, node2, |
|
856 | _diff = diffs.get_gitdiff(node1, node2, | |
866 | ignore_whitespace=ignore_whitespace, |
|
857 | ignore_whitespace=ignore_whitespace, | |
867 | context=line_context) |
|
858 | context=line_context) | |
868 | diff = diffs.DiffProcessor(_diff, format='gitdiff') |
|
859 | diff = diffs.DiffProcessor(_diff, format='gitdiff') | |
869 | response.content_type = 'text/plain' |
|
860 | response.content_type = 'text/plain' | |
870 | charset = self._get_default_encoding() |
|
861 | charset = self._get_default_encoding() | |
871 | if charset: |
|
862 | if charset: | |
872 | response.charset = charset |
|
863 | response.charset = charset | |
873 | return diff.as_raw() |
|
864 | return diff.as_raw() | |
874 |
|
865 | |||
875 | else: |
|
866 | else: | |
876 | fid = h.FID(diff2, node2.path) |
|
867 | fid = h.FID(diff2, node2.path) | |
877 | line_context_lcl = get_line_ctx(fid, request.GET) |
|
868 | line_context_lcl = get_line_ctx(fid, request.GET) | |
878 | ign_whitespace_lcl = get_ignore_ws(fid, request.GET) |
|
869 | ign_whitespace_lcl = get_ignore_ws(fid, request.GET) | |
879 |
|
870 | |||
880 | __, commit1, commit2, diff, st, data = diffs.wrapped_diff( |
|
871 | __, commit1, commit2, diff, st, data = diffs.wrapped_diff( | |
881 | filenode_old=node1, |
|
872 | filenode_old=node1, | |
882 | filenode_new=node2, |
|
873 | filenode_new=node2, | |
883 | diff_limit=self.cut_off_limit_diff, |
|
874 | diff_limit=self.cut_off_limit_diff, | |
884 | file_limit=self.cut_off_limit_file, |
|
875 | file_limit=self.cut_off_limit_file, | |
885 | show_full_diff=request.GET.get('fulldiff'), |
|
876 | show_full_diff=request.GET.get('fulldiff'), | |
886 | ignore_whitespace=ign_whitespace_lcl, |
|
877 | ignore_whitespace=ign_whitespace_lcl, | |
887 | line_context=line_context_lcl,) |
|
878 | line_context=line_context_lcl,) | |
888 |
|
879 | |||
889 | c.lines_added = data['stats']['added'] if data else 0 |
|
880 | c.lines_added = data['stats']['added'] if data else 0 | |
890 | c.lines_deleted = data['stats']['deleted'] if data else 0 |
|
881 | c.lines_deleted = data['stats']['deleted'] if data else 0 | |
891 | c.files = [data] |
|
882 | c.files = [data] | |
892 | c.commit_ranges = [c.commit_1, c.commit_2] |
|
883 | c.commit_ranges = [c.commit_1, c.commit_2] | |
893 | c.ancestor = None |
|
884 | c.ancestor = None | |
894 | c.statuses = [] |
|
885 | c.statuses = [] | |
895 | c.target_repo = c.rhodecode_db_repo |
|
886 | c.target_repo = c.rhodecode_db_repo | |
896 | c.filename1 = node1.path |
|
887 | c.filename1 = node1.path | |
897 | c.filename = node2.path |
|
888 | c.filename = node2.path | |
898 | c.binary_file = node1.is_binary or node2.is_binary |
|
889 | c.binary_file = node1.is_binary or node2.is_binary | |
899 | operation = data['operation'] if data else '' |
|
890 | operation = data['operation'] if data else '' | |
900 |
|
891 | |||
901 | commit_changes = { |
|
892 | commit_changes = { | |
902 | # TODO: it's passing the old file to the diff to keep the |
|
893 | # TODO: it's passing the old file to the diff to keep the | |
903 | # standard but this is not being used for this template, |
|
894 | # standard but this is not being used for this template, | |
904 | # but might need both files in the future or a more standard |
|
895 | # but might need both files in the future or a more standard | |
905 | # way to work with that |
|
896 | # way to work with that | |
906 | 'fid': [commit1, commit2, operation, |
|
897 | 'fid': [commit1, commit2, operation, | |
907 | c.filename, diff, st, data] |
|
898 | c.filename, diff, st, data] | |
908 | } |
|
899 | } | |
909 |
|
900 | |||
910 | c.changes = commit_changes |
|
901 | c.changes = commit_changes | |
911 |
|
902 | |||
912 | return render('files/file_diff.html') |
|
903 | return render('files/file_diff.html') | |
913 |
|
904 | |||
914 | @LoginRequired() |
|
905 | @LoginRequired() | |
915 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
906 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', | |
916 | 'repository.admin') |
|
907 | 'repository.admin') | |
917 | def diff_2way(self, repo_name, f_path): |
|
908 | def diff_2way(self, repo_name, f_path): | |
918 | diff1 = request.GET.get('diff1', '') |
|
909 | diff1 = request.GET.get('diff1', '') | |
919 | diff2 = request.GET.get('diff2', '') |
|
910 | diff2 = request.GET.get('diff2', '') | |
920 |
|
911 | |||
921 | nodes = [] |
|
912 | nodes = [] | |
922 | unknown_commits = [] |
|
913 | unknown_commits = [] | |
923 | for commit in [diff1, diff2]: |
|
914 | for commit in [diff1, diff2]: | |
924 | try: |
|
915 | try: | |
925 | nodes.append(self._get_file_node(commit, f_path)) |
|
916 | nodes.append(self._get_file_node(commit, f_path)) | |
926 | except (RepositoryError, NodeError): |
|
917 | except (RepositoryError, NodeError): | |
927 | log.exception('%(commit)s does not exist' % {'commit': commit}) |
|
918 | log.exception('%(commit)s does not exist' % {'commit': commit}) | |
928 | unknown_commits.append(commit) |
|
919 | unknown_commits.append(commit) | |
929 | h.flash(h.literal( |
|
920 | h.flash(h.literal( | |
930 | _('Commit %(commit)s does not exist.') % {'commit': commit} |
|
921 | _('Commit %(commit)s does not exist.') % {'commit': commit} | |
931 | ), category='error') |
|
922 | ), category='error') | |
932 |
|
923 | |||
933 | if unknown_commits: |
|
924 | if unknown_commits: | |
934 | return redirect(url('files_home', repo_name=c.repo_name, |
|
925 | return redirect(url('files_home', repo_name=c.repo_name, | |
935 | f_path=f_path)) |
|
926 | f_path=f_path)) | |
936 |
|
927 | |||
937 | if all(isinstance(node.commit, EmptyCommit) for node in nodes): |
|
928 | if all(isinstance(node.commit, EmptyCommit) for node in nodes): | |
938 | raise HTTPNotFound |
|
929 | raise HTTPNotFound | |
939 |
|
930 | |||
940 | node1, node2 = nodes |
|
931 | node1, node2 = nodes | |
941 |
|
932 | |||
942 | f_gitdiff = diffs.get_gitdiff(node1, node2, ignore_whitespace=False) |
|
933 | f_gitdiff = diffs.get_gitdiff(node1, node2, ignore_whitespace=False) | |
943 | diff_processor = diffs.DiffProcessor(f_gitdiff, format='gitdiff') |
|
934 | diff_processor = diffs.DiffProcessor(f_gitdiff, format='gitdiff') | |
944 | diff_data = diff_processor.prepare() |
|
935 | diff_data = diff_processor.prepare() | |
945 |
|
936 | |||
946 | if not diff_data or diff_data[0]['raw_diff'] == '': |
|
937 | if not diff_data or diff_data[0]['raw_diff'] == '': | |
947 | h.flash(h.literal(_('%(file_path)s has not changed ' |
|
938 | h.flash(h.literal(_('%(file_path)s has not changed ' | |
948 | 'between %(commit_1)s and %(commit_2)s.') % { |
|
939 | 'between %(commit_1)s and %(commit_2)s.') % { | |
949 | 'file_path': f_path, |
|
940 | 'file_path': f_path, | |
950 | 'commit_1': node1.commit.id, |
|
941 | 'commit_1': node1.commit.id, | |
951 | 'commit_2': node2.commit.id |
|
942 | 'commit_2': node2.commit.id | |
952 | }), category='error') |
|
943 | }), category='error') | |
953 | return redirect(url('files_home', repo_name=c.repo_name, |
|
944 | return redirect(url('files_home', repo_name=c.repo_name, | |
954 | f_path=f_path)) |
|
945 | f_path=f_path)) | |
955 |
|
946 | |||
956 | c.diff_data = diff_data[0] |
|
947 | c.diff_data = diff_data[0] | |
957 | c.FID = h.FID(diff2, node2.path) |
|
948 | c.FID = h.FID(diff2, node2.path) | |
958 | # cleanup some unneeded data |
|
949 | # cleanup some unneeded data | |
959 | del c.diff_data['raw_diff'] |
|
950 | del c.diff_data['raw_diff'] | |
960 | del c.diff_data['chunks'] |
|
951 | del c.diff_data['chunks'] | |
961 |
|
952 | |||
962 | c.node1 = node1 |
|
953 | c.node1 = node1 | |
963 | c.commit_1 = node1.commit |
|
954 | c.commit_1 = node1.commit | |
964 | c.node2 = node2 |
|
955 | c.node2 = node2 | |
965 | c.commit_2 = node2.commit |
|
956 | c.commit_2 = node2.commit | |
966 |
|
957 | |||
967 | return render('files/diff_2way.html') |
|
958 | return render('files/diff_2way.html') | |
968 |
|
959 | |||
969 | def _get_file_node(self, commit_id, f_path): |
|
960 | def _get_file_node(self, commit_id, f_path): | |
970 | if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]: |
|
961 | if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]: | |
971 | commit = c.rhodecode_repo.get_commit(commit_id=commit_id) |
|
962 | commit = c.rhodecode_repo.get_commit(commit_id=commit_id) | |
972 | try: |
|
963 | try: | |
973 | node = commit.get_node(f_path) |
|
964 | node = commit.get_node(f_path) | |
974 | if node.is_dir(): |
|
965 | if node.is_dir(): | |
975 | raise NodeError('%s path is a %s not a file' |
|
966 | raise NodeError('%s path is a %s not a file' | |
976 | % (node, type(node))) |
|
967 | % (node, type(node))) | |
977 | except NodeDoesNotExistError: |
|
968 | except NodeDoesNotExistError: | |
978 | commit = EmptyCommit( |
|
969 | commit = EmptyCommit( | |
979 | commit_id=commit_id, |
|
970 | commit_id=commit_id, | |
980 | idx=commit.idx, |
|
971 | idx=commit.idx, | |
981 | repo=commit.repository, |
|
972 | repo=commit.repository, | |
982 | alias=commit.repository.alias, |
|
973 | alias=commit.repository.alias, | |
983 | message=commit.message, |
|
974 | message=commit.message, | |
984 | author=commit.author, |
|
975 | author=commit.author, | |
985 | date=commit.date) |
|
976 | date=commit.date) | |
986 | node = FileNode(f_path, '', commit=commit) |
|
977 | node = FileNode(f_path, '', commit=commit) | |
987 | else: |
|
978 | else: | |
988 | commit = EmptyCommit( |
|
979 | commit = EmptyCommit( | |
989 | repo=c.rhodecode_repo, |
|
980 | repo=c.rhodecode_repo, | |
990 | alias=c.rhodecode_repo.alias) |
|
981 | alias=c.rhodecode_repo.alias) | |
991 | node = FileNode(f_path, '', commit=commit) |
|
982 | node = FileNode(f_path, '', commit=commit) | |
992 | return node |
|
983 | return node | |
993 |
|
984 | |||
994 | def _get_node_history(self, commit, f_path, commits=None): |
|
985 | def _get_node_history(self, commit, f_path, commits=None): | |
995 | """ |
|
986 | """ | |
996 | get commit history for given node |
|
987 | get commit history for given node | |
997 |
|
988 | |||
998 | :param commit: commit to calculate history |
|
989 | :param commit: commit to calculate history | |
999 | :param f_path: path for node to calculate history for |
|
990 | :param f_path: path for node to calculate history for | |
1000 | :param commits: if passed don't calculate history and take |
|
991 | :param commits: if passed don't calculate history and take | |
1001 | commits defined in this list |
|
992 | commits defined in this list | |
1002 | """ |
|
993 | """ | |
1003 | # calculate history based on tip |
|
994 | # calculate history based on tip | |
1004 | tip = c.rhodecode_repo.get_commit() |
|
995 | tip = c.rhodecode_repo.get_commit() | |
1005 | if commits is None: |
|
996 | if commits is None: | |
1006 | pre_load = ["author", "branch"] |
|
997 | pre_load = ["author", "branch"] | |
1007 | try: |
|
998 | try: | |
1008 | commits = tip.get_file_history(f_path, pre_load=pre_load) |
|
999 | commits = tip.get_file_history(f_path, pre_load=pre_load) | |
1009 | except (NodeDoesNotExistError, CommitError): |
|
1000 | except (NodeDoesNotExistError, CommitError): | |
1010 | # this node is not present at tip! |
|
1001 | # this node is not present at tip! | |
1011 | commits = commit.get_file_history(f_path, pre_load=pre_load) |
|
1002 | commits = commit.get_file_history(f_path, pre_load=pre_load) | |
1012 |
|
1003 | |||
1013 | history = [] |
|
1004 | history = [] | |
1014 | commits_group = ([], _("Changesets")) |
|
1005 | commits_group = ([], _("Changesets")) | |
1015 | for commit in commits: |
|
1006 | for commit in commits: | |
1016 | branch = ' (%s)' % commit.branch if commit.branch else '' |
|
1007 | branch = ' (%s)' % commit.branch if commit.branch else '' | |
1017 | n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch) |
|
1008 | n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch) | |
1018 | commits_group[0].append((commit.raw_id, n_desc,)) |
|
1009 | commits_group[0].append((commit.raw_id, n_desc,)) | |
1019 | history.append(commits_group) |
|
1010 | history.append(commits_group) | |
1020 |
|
1011 | |||
1021 | symbolic_reference = self._symbolic_reference |
|
1012 | symbolic_reference = self._symbolic_reference | |
1022 |
|
1013 | |||
1023 | if c.rhodecode_repo.alias == 'svn': |
|
1014 | if c.rhodecode_repo.alias == 'svn': | |
1024 | adjusted_f_path = self._adjust_file_path_for_svn( |
|
1015 | adjusted_f_path = self._adjust_file_path_for_svn( | |
1025 | f_path, c.rhodecode_repo) |
|
1016 | f_path, c.rhodecode_repo) | |
1026 | if adjusted_f_path != f_path: |
|
1017 | if adjusted_f_path != f_path: | |
1027 | log.debug( |
|
1018 | log.debug( | |
1028 | 'Recognized svn tag or branch in file "%s", using svn ' |
|
1019 | 'Recognized svn tag or branch in file "%s", using svn ' | |
1029 | 'specific symbolic references', f_path) |
|
1020 | 'specific symbolic references', f_path) | |
1030 | f_path = adjusted_f_path |
|
1021 | f_path = adjusted_f_path | |
1031 | symbolic_reference = self._symbolic_reference_svn |
|
1022 | symbolic_reference = self._symbolic_reference_svn | |
1032 |
|
1023 | |||
1033 | branches = self._create_references( |
|
1024 | branches = self._create_references( | |
1034 | c.rhodecode_repo.branches, symbolic_reference, f_path) |
|
1025 | c.rhodecode_repo.branches, symbolic_reference, f_path) | |
1035 | branches_group = (branches, _("Branches")) |
|
1026 | branches_group = (branches, _("Branches")) | |
1036 |
|
1027 | |||
1037 | tags = self._create_references( |
|
1028 | tags = self._create_references( | |
1038 | c.rhodecode_repo.tags, symbolic_reference, f_path) |
|
1029 | c.rhodecode_repo.tags, symbolic_reference, f_path) | |
1039 | tags_group = (tags, _("Tags")) |
|
1030 | tags_group = (tags, _("Tags")) | |
1040 |
|
1031 | |||
1041 | history.append(branches_group) |
|
1032 | history.append(branches_group) | |
1042 | history.append(tags_group) |
|
1033 | history.append(tags_group) | |
1043 |
|
1034 | |||
1044 | return history, commits |
|
1035 | return history, commits | |
1045 |
|
1036 | |||
1046 | def _adjust_file_path_for_svn(self, f_path, repo): |
|
1037 | def _adjust_file_path_for_svn(self, f_path, repo): | |
1047 | """ |
|
1038 | """ | |
1048 | Computes the relative path of `f_path`. |
|
1039 | Computes the relative path of `f_path`. | |
1049 |
|
1040 | |||
1050 | This is mainly based on prefix matching of the recognized tags and |
|
1041 | This is mainly based on prefix matching of the recognized tags and | |
1051 | branches in the underlying repository. |
|
1042 | branches in the underlying repository. | |
1052 | """ |
|
1043 | """ | |
1053 | tags_and_branches = itertools.chain( |
|
1044 | tags_and_branches = itertools.chain( | |
1054 | repo.branches.iterkeys(), |
|
1045 | repo.branches.iterkeys(), | |
1055 | repo.tags.iterkeys()) |
|
1046 | repo.tags.iterkeys()) | |
1056 | tags_and_branches = sorted(tags_and_branches, key=len, reverse=True) |
|
1047 | tags_and_branches = sorted(tags_and_branches, key=len, reverse=True) | |
1057 |
|
1048 | |||
1058 | for name in tags_and_branches: |
|
1049 | for name in tags_and_branches: | |
1059 | if f_path.startswith(name + '/'): |
|
1050 | if f_path.startswith(name + '/'): | |
1060 | f_path = vcspath.relpath(f_path, name) |
|
1051 | f_path = vcspath.relpath(f_path, name) | |
1061 | break |
|
1052 | break | |
1062 | return f_path |
|
1053 | return f_path | |
1063 |
|
1054 | |||
1064 | def _create_references( |
|
1055 | def _create_references( | |
1065 | self, branches_or_tags, symbolic_reference, f_path): |
|
1056 | self, branches_or_tags, symbolic_reference, f_path): | |
1066 | items = [] |
|
1057 | items = [] | |
1067 | for name, commit_id in branches_or_tags.items(): |
|
1058 | for name, commit_id in branches_or_tags.items(): | |
1068 | sym_ref = symbolic_reference(commit_id, name, f_path) |
|
1059 | sym_ref = symbolic_reference(commit_id, name, f_path) | |
1069 | items.append((sym_ref, name)) |
|
1060 | items.append((sym_ref, name)) | |
1070 | return items |
|
1061 | return items | |
1071 |
|
1062 | |||
1072 | def _symbolic_reference(self, commit_id, name, f_path): |
|
1063 | def _symbolic_reference(self, commit_id, name, f_path): | |
1073 | return commit_id |
|
1064 | return commit_id | |
1074 |
|
1065 | |||
1075 | def _symbolic_reference_svn(self, commit_id, name, f_path): |
|
1066 | def _symbolic_reference_svn(self, commit_id, name, f_path): | |
1076 | new_f_path = vcspath.join(name, f_path) |
|
1067 | new_f_path = vcspath.join(name, f_path) | |
1077 | return u'%s@%s' % (new_f_path, commit_id) |
|
1068 | return u'%s@%s' % (new_f_path, commit_id) | |
1078 |
|
1069 | |||
1079 | @LoginRequired() |
|
1070 | @LoginRequired() | |
1080 | @XHRRequired() |
|
1071 | @XHRRequired() | |
1081 | @HasRepoPermissionAnyDecorator( |
|
1072 | @HasRepoPermissionAnyDecorator( | |
1082 | 'repository.read', 'repository.write', 'repository.admin') |
|
1073 | 'repository.read', 'repository.write', 'repository.admin') | |
1083 | @jsonify |
|
1074 | @jsonify | |
1084 | def nodelist(self, repo_name, revision, f_path): |
|
1075 | def nodelist(self, repo_name, revision, f_path): | |
1085 | commit = self.__get_commit_or_redirect(revision, repo_name) |
|
1076 | commit = self.__get_commit_or_redirect(revision, repo_name) | |
1086 |
|
1077 | |||
1087 | metadata = self._get_nodelist_at_commit( |
|
1078 | metadata = self._get_nodelist_at_commit( | |
1088 | repo_name, commit.raw_id, f_path) |
|
1079 | repo_name, commit.raw_id, f_path) | |
1089 | return {'nodes': metadata} |
|
1080 | return {'nodes': metadata} | |
1090 |
|
1081 | |||
1091 | @LoginRequired() |
|
1082 | @LoginRequired() | |
1092 | @XHRRequired() |
|
1083 | @XHRRequired() | |
1093 | @HasRepoPermissionAnyDecorator( |
|
1084 | @HasRepoPermissionAnyDecorator( | |
1094 | 'repository.read', 'repository.write', 'repository.admin') |
|
1085 | 'repository.read', 'repository.write', 'repository.admin') | |
1095 | @jsonify |
|
1086 | def nodetree_full(self, repo_name, commit_id, f_path): | |
1096 | def metadata_list(self, repo_name, revision, f_path): |
|
|||
1097 | """ |
|
1087 | """ | |
1098 |
Returns |
|
1088 | Returns rendered html of file tree that contains commit date, | |
1099 |
a |
|
1089 | author, revision for the specified combination of | |
|
1090 | repo, commit_id and file path | |||
1100 |
|
1091 | |||
1101 | :param repo_name: name of the repository |
|
1092 | :param repo_name: name of the repository | |
1102 |
:param |
|
1093 | :param commit_id: commit_id of file tree | |
1103 | :param f_path: file path of the requested directory |
|
1094 | :param f_path: file path of the requested directory | |
1104 | """ |
|
1095 | """ | |
1105 |
|
1096 | |||
1106 |
commit = self.__get_commit_or_redirect( |
|
1097 | commit = self.__get_commit_or_redirect(commit_id, repo_name) | |
1107 | try: |
|
1098 | try: | |
1108 |
|
|
1099 | dir_node = commit.get_node(f_path) | |
1109 | except RepositoryError as e: |
|
1100 | except RepositoryError as e: | |
1110 |
return |
|
1101 | return 'error {}'.format(safe_str(e)) | |
|
1102 | ||||
|
1103 | if dir_node.is_file(): | |||
|
1104 | return '' | |||
1111 |
|
1105 | |||
1112 | metadata = self._get_metadata_at_commit( |
|
1106 | c.file = dir_node | |
1113 | repo_name, commit, file_node) |
|
1107 | c.commit = commit | |
1114 | return {'metadata': metadata} |
|
1108 | ||
|
1109 | # using force=True here, make a little trick. We flush the cache and | |||
|
1110 | # compute it using the same key as without full_load, so the fully | |||
|
1111 | # loaded cached tree is now returned instead of partial | |||
|
1112 | return self._get_tree_at_commit( | |||
|
1113 | repo_name, commit.raw_id, dir_node.path, full_load=True, | |||
|
1114 | force=True) |
@@ -1,226 +1,226 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2015-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2015-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | import beaker |
|
22 | import beaker | |
23 | import logging |
|
23 | import logging | |
24 |
|
24 | |||
25 | from beaker.cache import _cache_decorate, cache_regions, region_invalidate |
|
25 | from beaker.cache import _cache_decorate, cache_regions, region_invalidate | |
26 |
|
26 | |||
27 | from rhodecode.lib.utils import safe_str, md5 |
|
27 | from rhodecode.lib.utils import safe_str, md5 | |
28 | from rhodecode.model.db import Session, CacheKey, IntegrityError |
|
28 | from rhodecode.model.db import Session, CacheKey, IntegrityError | |
29 |
|
29 | |||
30 | log = logging.getLogger(__name__) |
|
30 | log = logging.getLogger(__name__) | |
31 |
|
31 | |||
32 | FILE_TREE = 'cache_file_tree' |
|
32 | FILE_TREE = 'cache_file_tree' | |
33 | FILE_TREE_META = 'cache_file_tree_metadata' |
|
33 | FILE_TREE_META = 'cache_file_tree_metadata' | |
34 | FILE_SEARCH_TREE_META = 'cache_file_search_metadata' |
|
34 | FILE_SEARCH_TREE_META = 'cache_file_search_metadata' | |
35 | SUMMARY_STATS = 'cache_summary_stats' |
|
35 | SUMMARY_STATS = 'cache_summary_stats' | |
36 |
|
36 | |||
37 | # This list of caches gets purged when invalidation happens |
|
37 | # This list of caches gets purged when invalidation happens | |
38 |
USED_REPO_CACHES = (FILE_TREE, FILE_ |
|
38 | USED_REPO_CACHES = (FILE_TREE, FILE_SEARCH_TREE_META) | |
39 |
|
39 | |||
40 | DEFAULT_CACHE_MANAGER_CONFIG = { |
|
40 | DEFAULT_CACHE_MANAGER_CONFIG = { | |
41 | 'type': 'memorylru_base', |
|
41 | 'type': 'memorylru_base', | |
42 | 'max_items': 10240, |
|
42 | 'max_items': 10240, | |
43 | 'key_length': 256, |
|
43 | 'key_length': 256, | |
44 | 'enabled': True |
|
44 | 'enabled': True | |
45 | } |
|
45 | } | |
46 |
|
46 | |||
47 |
|
47 | |||
48 | def configure_cache_region( |
|
48 | def configure_cache_region( | |
49 | region_name, region_kw, default_cache_kw, default_expire=60): |
|
49 | region_name, region_kw, default_cache_kw, default_expire=60): | |
50 | default_type = default_cache_kw.get('type', 'memory') |
|
50 | default_type = default_cache_kw.get('type', 'memory') | |
51 | default_lock_dir = default_cache_kw.get('lock_dir') |
|
51 | default_lock_dir = default_cache_kw.get('lock_dir') | |
52 | default_data_dir = default_cache_kw.get('data_dir') |
|
52 | default_data_dir = default_cache_kw.get('data_dir') | |
53 |
|
53 | |||
54 | region_kw['lock_dir'] = region_kw.get('lock_dir', default_lock_dir) |
|
54 | region_kw['lock_dir'] = region_kw.get('lock_dir', default_lock_dir) | |
55 | region_kw['data_dir'] = region_kw.get('data_dir', default_data_dir) |
|
55 | region_kw['data_dir'] = region_kw.get('data_dir', default_data_dir) | |
56 | region_kw['type'] = region_kw.get('type', default_type) |
|
56 | region_kw['type'] = region_kw.get('type', default_type) | |
57 | region_kw['expire'] = int(region_kw.get('expire', default_expire)) |
|
57 | region_kw['expire'] = int(region_kw.get('expire', default_expire)) | |
58 |
|
58 | |||
59 | beaker.cache.cache_regions[region_name] = region_kw |
|
59 | beaker.cache.cache_regions[region_name] = region_kw | |
60 |
|
60 | |||
61 |
|
61 | |||
62 | def get_cache_manager(region_name, cache_name, custom_ttl=None): |
|
62 | def get_cache_manager(region_name, cache_name, custom_ttl=None): | |
63 | """ |
|
63 | """ | |
64 | Creates a Beaker cache manager. Such instance can be used like that:: |
|
64 | Creates a Beaker cache manager. Such instance can be used like that:: | |
65 |
|
65 | |||
66 | _namespace = caches.get_repo_namespace_key(caches.XXX, repo_name) |
|
66 | _namespace = caches.get_repo_namespace_key(caches.XXX, repo_name) | |
67 | cache_manager = caches.get_cache_manager('repo_cache_long', _namespace) |
|
67 | cache_manager = caches.get_cache_manager('repo_cache_long', _namespace) | |
68 | _cache_key = caches.compute_key_from_params(repo_name, commit.raw_id) |
|
68 | _cache_key = caches.compute_key_from_params(repo_name, commit.raw_id) | |
69 | def heavy_compute(): |
|
69 | def heavy_compute(): | |
70 | ... |
|
70 | ... | |
71 | result = cache_manager.get(_cache_key, createfunc=heavy_compute) |
|
71 | result = cache_manager.get(_cache_key, createfunc=heavy_compute) | |
72 |
|
72 | |||
73 | :param region_name: region from ini file |
|
73 | :param region_name: region from ini file | |
74 | :param cache_name: custom cache name, usually prefix+repo_name. eg |
|
74 | :param cache_name: custom cache name, usually prefix+repo_name. eg | |
75 | file_switcher_repo1 |
|
75 | file_switcher_repo1 | |
76 | :param custom_ttl: override .ini file timeout on this cache |
|
76 | :param custom_ttl: override .ini file timeout on this cache | |
77 | :return: instance of cache manager |
|
77 | :return: instance of cache manager | |
78 | """ |
|
78 | """ | |
79 |
|
79 | |||
80 | cache_config = cache_regions.get(region_name, DEFAULT_CACHE_MANAGER_CONFIG) |
|
80 | cache_config = cache_regions.get(region_name, DEFAULT_CACHE_MANAGER_CONFIG) | |
81 | if custom_ttl: |
|
81 | if custom_ttl: | |
82 | log.debug('Updating region %s with custom ttl: %s', |
|
82 | log.debug('Updating region %s with custom ttl: %s', | |
83 | region_name, custom_ttl) |
|
83 | region_name, custom_ttl) | |
84 | cache_config.update({'expire': custom_ttl}) |
|
84 | cache_config.update({'expire': custom_ttl}) | |
85 |
|
85 | |||
86 | return beaker.cache.Cache._get_cache(cache_name, cache_config) |
|
86 | return beaker.cache.Cache._get_cache(cache_name, cache_config) | |
87 |
|
87 | |||
88 |
|
88 | |||
89 | def clear_cache_manager(cache_manager): |
|
89 | def clear_cache_manager(cache_manager): | |
90 | """ |
|
90 | """ | |
91 | namespace = 'foobar' |
|
91 | namespace = 'foobar' | |
92 | cache_manager = get_cache_manager('repo_cache_long', namespace) |
|
92 | cache_manager = get_cache_manager('repo_cache_long', namespace) | |
93 | clear_cache_manager(cache_manager) |
|
93 | clear_cache_manager(cache_manager) | |
94 | """ |
|
94 | """ | |
95 |
|
95 | |||
96 | log.debug('Clearing all values for cache manager %s', cache_manager) |
|
96 | log.debug('Clearing all values for cache manager %s', cache_manager) | |
97 | cache_manager.clear() |
|
97 | cache_manager.clear() | |
98 |
|
98 | |||
99 |
|
99 | |||
100 | def clear_repo_caches(repo_name): |
|
100 | def clear_repo_caches(repo_name): | |
101 | # invalidate cache manager for this repo |
|
101 | # invalidate cache manager for this repo | |
102 | for prefix in USED_REPO_CACHES: |
|
102 | for prefix in USED_REPO_CACHES: | |
103 | namespace = get_repo_namespace_key(prefix, repo_name) |
|
103 | namespace = get_repo_namespace_key(prefix, repo_name) | |
104 | cache_manager = get_cache_manager('repo_cache_long', namespace) |
|
104 | cache_manager = get_cache_manager('repo_cache_long', namespace) | |
105 | clear_cache_manager(cache_manager) |
|
105 | clear_cache_manager(cache_manager) | |
106 |
|
106 | |||
107 |
|
107 | |||
108 | def compute_key_from_params(*args): |
|
108 | def compute_key_from_params(*args): | |
109 | """ |
|
109 | """ | |
110 | Helper to compute key from given params to be used in cache manager |
|
110 | Helper to compute key from given params to be used in cache manager | |
111 | """ |
|
111 | """ | |
112 | return md5("_".join(map(safe_str, args))) |
|
112 | return md5("_".join(map(safe_str, args))) | |
113 |
|
113 | |||
114 |
|
114 | |||
115 | def get_repo_namespace_key(prefix, repo_name): |
|
115 | def get_repo_namespace_key(prefix, repo_name): | |
116 | return '{0}_{1}'.format(prefix, compute_key_from_params(repo_name)) |
|
116 | return '{0}_{1}'.format(prefix, compute_key_from_params(repo_name)) | |
117 |
|
117 | |||
118 |
|
118 | |||
119 | def conditional_cache(region, prefix, condition, func): |
|
119 | def conditional_cache(region, prefix, condition, func): | |
120 | """ |
|
120 | """ | |
121 | Conditional caching function use like:: |
|
121 | Conditional caching function use like:: | |
122 | def _c(arg): |
|
122 | def _c(arg): | |
123 | # heavy computation function |
|
123 | # heavy computation function | |
124 | return data |
|
124 | return data | |
125 |
|
125 | |||
126 | # depending on the condition the compute is wrapped in cache or not |
|
126 | # depending on the condition the compute is wrapped in cache or not | |
127 | compute = conditional_cache('short_term', 'cache_desc', |
|
127 | compute = conditional_cache('short_term', 'cache_desc', | |
128 | condition=True, func=func) |
|
128 | condition=True, func=func) | |
129 | return compute(arg) |
|
129 | return compute(arg) | |
130 |
|
130 | |||
131 | :param region: name of cache region |
|
131 | :param region: name of cache region | |
132 | :param prefix: cache region prefix |
|
132 | :param prefix: cache region prefix | |
133 | :param condition: condition for cache to be triggered, and |
|
133 | :param condition: condition for cache to be triggered, and | |
134 | return data cached |
|
134 | return data cached | |
135 | :param func: wrapped heavy function to compute |
|
135 | :param func: wrapped heavy function to compute | |
136 |
|
136 | |||
137 | """ |
|
137 | """ | |
138 | wrapped = func |
|
138 | wrapped = func | |
139 | if condition: |
|
139 | if condition: | |
140 | log.debug('conditional_cache: True, wrapping call of ' |
|
140 | log.debug('conditional_cache: True, wrapping call of ' | |
141 | 'func: %s into %s region cache', region, func) |
|
141 | 'func: %s into %s region cache', region, func) | |
142 | cached_region = _cache_decorate((prefix,), None, None, region) |
|
142 | cached_region = _cache_decorate((prefix,), None, None, region) | |
143 | wrapped = cached_region(func) |
|
143 | wrapped = cached_region(func) | |
144 | return wrapped |
|
144 | return wrapped | |
145 |
|
145 | |||
146 |
|
146 | |||
147 | class ActiveRegionCache(object): |
|
147 | class ActiveRegionCache(object): | |
148 | def __init__(self, context): |
|
148 | def __init__(self, context): | |
149 | self.context = context |
|
149 | self.context = context | |
150 |
|
150 | |||
151 | def invalidate(self, *args, **kwargs): |
|
151 | def invalidate(self, *args, **kwargs): | |
152 | return False |
|
152 | return False | |
153 |
|
153 | |||
154 | def compute(self): |
|
154 | def compute(self): | |
155 | log.debug('Context cache: getting obj %s from cache', self.context) |
|
155 | log.debug('Context cache: getting obj %s from cache', self.context) | |
156 | return self.context.compute_func(self.context.cache_key) |
|
156 | return self.context.compute_func(self.context.cache_key) | |
157 |
|
157 | |||
158 |
|
158 | |||
159 | class FreshRegionCache(ActiveRegionCache): |
|
159 | class FreshRegionCache(ActiveRegionCache): | |
160 | def invalidate(self): |
|
160 | def invalidate(self): | |
161 | log.debug('Context cache: invalidating cache for %s', self.context) |
|
161 | log.debug('Context cache: invalidating cache for %s', self.context) | |
162 | region_invalidate( |
|
162 | region_invalidate( | |
163 | self.context.compute_func, None, self.context.cache_key) |
|
163 | self.context.compute_func, None, self.context.cache_key) | |
164 | return True |
|
164 | return True | |
165 |
|
165 | |||
166 |
|
166 | |||
167 | class InvalidationContext(object): |
|
167 | class InvalidationContext(object): | |
168 | def __repr__(self): |
|
168 | def __repr__(self): | |
169 | return '<InvalidationContext:{}[{}]>'.format( |
|
169 | return '<InvalidationContext:{}[{}]>'.format( | |
170 | safe_str(self.repo_name), safe_str(self.cache_type)) |
|
170 | safe_str(self.repo_name), safe_str(self.cache_type)) | |
171 |
|
171 | |||
172 | def __init__(self, compute_func, repo_name, cache_type, |
|
172 | def __init__(self, compute_func, repo_name, cache_type, | |
173 | raise_exception=False): |
|
173 | raise_exception=False): | |
174 | self.compute_func = compute_func |
|
174 | self.compute_func = compute_func | |
175 | self.repo_name = repo_name |
|
175 | self.repo_name = repo_name | |
176 | self.cache_type = cache_type |
|
176 | self.cache_type = cache_type | |
177 | self.cache_key = compute_key_from_params( |
|
177 | self.cache_key = compute_key_from_params( | |
178 | repo_name, cache_type) |
|
178 | repo_name, cache_type) | |
179 | self.raise_exception = raise_exception |
|
179 | self.raise_exception = raise_exception | |
180 |
|
180 | |||
181 | def get_cache_obj(self): |
|
181 | def get_cache_obj(self): | |
182 | cache_key = CacheKey.get_cache_key( |
|
182 | cache_key = CacheKey.get_cache_key( | |
183 | self.repo_name, self.cache_type) |
|
183 | self.repo_name, self.cache_type) | |
184 | cache_obj = CacheKey.get_active_cache(cache_key) |
|
184 | cache_obj = CacheKey.get_active_cache(cache_key) | |
185 | if not cache_obj: |
|
185 | if not cache_obj: | |
186 | cache_obj = CacheKey(cache_key, self.repo_name) |
|
186 | cache_obj = CacheKey(cache_key, self.repo_name) | |
187 | return cache_obj |
|
187 | return cache_obj | |
188 |
|
188 | |||
189 | def __enter__(self): |
|
189 | def __enter__(self): | |
190 | """ |
|
190 | """ | |
191 | Test if current object is valid, and return CacheRegion function |
|
191 | Test if current object is valid, and return CacheRegion function | |
192 | that does invalidation and calculation |
|
192 | that does invalidation and calculation | |
193 | """ |
|
193 | """ | |
194 |
|
194 | |||
195 | self.cache_obj = self.get_cache_obj() |
|
195 | self.cache_obj = self.get_cache_obj() | |
196 | if self.cache_obj.cache_active: |
|
196 | if self.cache_obj.cache_active: | |
197 | # means our cache obj is existing and marked as it's |
|
197 | # means our cache obj is existing and marked as it's | |
198 | # cache is not outdated, we return BaseInvalidator |
|
198 | # cache is not outdated, we return BaseInvalidator | |
199 | self.skip_cache_active_change = True |
|
199 | self.skip_cache_active_change = True | |
200 | return ActiveRegionCache(self) |
|
200 | return ActiveRegionCache(self) | |
201 |
|
201 | |||
202 | # the key is either not existing or set to False, we return |
|
202 | # the key is either not existing or set to False, we return | |
203 | # the real invalidator which re-computes value. We additionally set |
|
203 | # the real invalidator which re-computes value. We additionally set | |
204 | # the flag to actually update the Database objects |
|
204 | # the flag to actually update the Database objects | |
205 | self.skip_cache_active_change = False |
|
205 | self.skip_cache_active_change = False | |
206 | return FreshRegionCache(self) |
|
206 | return FreshRegionCache(self) | |
207 |
|
207 | |||
208 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
208 | def __exit__(self, exc_type, exc_val, exc_tb): | |
209 |
|
209 | |||
210 | if self.skip_cache_active_change: |
|
210 | if self.skip_cache_active_change: | |
211 | return |
|
211 | return | |
212 |
|
212 | |||
213 | try: |
|
213 | try: | |
214 | self.cache_obj.cache_active = True |
|
214 | self.cache_obj.cache_active = True | |
215 | Session().add(self.cache_obj) |
|
215 | Session().add(self.cache_obj) | |
216 | Session().commit() |
|
216 | Session().commit() | |
217 | except IntegrityError: |
|
217 | except IntegrityError: | |
218 | # if we catch integrity error, it means we inserted this object |
|
218 | # if we catch integrity error, it means we inserted this object | |
219 | # assumption is that's really an edge race-condition case and |
|
219 | # assumption is that's really an edge race-condition case and | |
220 | # it's safe is to skip it |
|
220 | # it's safe is to skip it | |
221 | Session().rollback() |
|
221 | Session().rollback() | |
222 | except Exception: |
|
222 | except Exception: | |
223 | log.exception('Failed to commit on cache key update') |
|
223 | log.exception('Failed to commit on cache key update') | |
224 | Session().rollback() |
|
224 | Session().rollback() | |
225 | if self.raise_exception: |
|
225 | if self.raise_exception: | |
226 | raise |
|
226 | raise |
@@ -1,50 +1,51 b'' | |||||
1 |
|
1 | |||
2 | /****************************************************************************** |
|
2 | /****************************************************************************** | |
3 | * * |
|
3 | * * | |
4 | * DO NOT CHANGE THIS FILE MANUALLY * |
|
4 | * DO NOT CHANGE THIS FILE MANUALLY * | |
5 | * * |
|
5 | * * | |
6 | * * |
|
6 | * * | |
7 |
* This file is automatically generated when the app starts up |
|
7 | * This file is automatically generated when the app starts up with * | |
|
8 | * generate_js_files = true * | |||
8 | * * |
|
9 | * * | |
9 | * To add a route here pass jsroute=True to the route definition in the app * |
|
10 | * To add a route here pass jsroute=True to the route definition in the app * | |
10 | * * |
|
11 | * * | |
11 | ******************************************************************************/ |
|
12 | ******************************************************************************/ | |
12 | function registerRCRoutes() { |
|
13 | function registerRCRoutes() { | |
13 | // routes registration |
|
14 | // routes registration | |
14 | pyroutes.register('home', '/', []); |
|
15 | pyroutes.register('home', '/', []); | |
15 | pyroutes.register('user_autocomplete_data', '/_users', []); |
|
16 | pyroutes.register('user_autocomplete_data', '/_users', []); | |
16 | pyroutes.register('new_repo', '/_admin/create_repository', []); |
|
17 | pyroutes.register('new_repo', '/_admin/create_repository', []); | |
17 | pyroutes.register('edit_user_group_members', '/_admin/user_groups/%(user_group_id)s/edit/members', ['user_group_id']); |
|
18 | pyroutes.register('edit_user_group_members', '/_admin/user_groups/%(user_group_id)s/edit/members', ['user_group_id']); | |
18 | pyroutes.register('gists', '/_admin/gists', []); |
|
19 | pyroutes.register('gists', '/_admin/gists', []); | |
19 | pyroutes.register('new_gist', '/_admin/gists/new', []); |
|
20 | pyroutes.register('new_gist', '/_admin/gists/new', []); | |
20 | pyroutes.register('toggle_following', '/_admin/toggle_following', []); |
|
21 | pyroutes.register('toggle_following', '/_admin/toggle_following', []); | |
21 | pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']); |
|
22 | pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']); | |
22 | pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']); |
|
23 | pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']); | |
23 | pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']); |
|
24 | pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']); | |
24 | pyroutes.register('changeset_home', '/%(repo_name)s/changeset/%(revision)s', ['repo_name', 'revision']); |
|
25 | pyroutes.register('changeset_home', '/%(repo_name)s/changeset/%(revision)s', ['repo_name', 'revision']); | |
25 | pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']); |
|
26 | pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']); | |
26 | pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']); |
|
27 | pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']); | |
27 | pyroutes.register('changeset_comment', '/%(repo_name)s/changeset/%(revision)s/comment', ['repo_name', 'revision']); |
|
28 | pyroutes.register('changeset_comment', '/%(repo_name)s/changeset/%(revision)s/comment', ['repo_name', 'revision']); | |
28 | pyroutes.register('changeset_comment_preview', '/%(repo_name)s/changeset/comment/preview', ['repo_name']); |
|
29 | pyroutes.register('changeset_comment_preview', '/%(repo_name)s/changeset/comment/preview', ['repo_name']); | |
29 | pyroutes.register('changeset_comment_delete', '/%(repo_name)s/changeset/comment/%(comment_id)s/delete', ['repo_name', 'comment_id']); |
|
30 | pyroutes.register('changeset_comment_delete', '/%(repo_name)s/changeset/comment/%(comment_id)s/delete', ['repo_name', 'comment_id']); | |
30 | pyroutes.register('changeset_info', '/changeset_info/%(repo_name)s/%(revision)s', ['repo_name', 'revision']); |
|
31 | pyroutes.register('changeset_info', '/changeset_info/%(repo_name)s/%(revision)s', ['repo_name', 'revision']); | |
31 | pyroutes.register('compare_url', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']); |
|
32 | pyroutes.register('compare_url', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']); | |
32 | pyroutes.register('pullrequest_home', '/%(repo_name)s/pull-request/new', ['repo_name']); |
|
33 | pyroutes.register('pullrequest_home', '/%(repo_name)s/pull-request/new', ['repo_name']); | |
33 | pyroutes.register('pullrequest', '/%(repo_name)s/pull-request/new', ['repo_name']); |
|
34 | pyroutes.register('pullrequest', '/%(repo_name)s/pull-request/new', ['repo_name']); | |
34 | pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']); |
|
35 | pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']); | |
35 | pyroutes.register('pullrequest_repo_destinations', '/%(repo_name)s/pull-request/repo-destinations', ['repo_name']); |
|
36 | pyroutes.register('pullrequest_repo_destinations', '/%(repo_name)s/pull-request/repo-destinations', ['repo_name']); | |
36 | pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']); |
|
37 | pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']); | |
37 | pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']); |
|
38 | pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']); | |
38 | pyroutes.register('pullrequest_comment', '/%(repo_name)s/pull-request-comment/%(pull_request_id)s', ['repo_name', 'pull_request_id']); |
|
39 | pyroutes.register('pullrequest_comment', '/%(repo_name)s/pull-request-comment/%(pull_request_id)s', ['repo_name', 'pull_request_id']); | |
39 | pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request-comment/%(comment_id)s/delete', ['repo_name', 'comment_id']); |
|
40 | pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request-comment/%(comment_id)s/delete', ['repo_name', 'comment_id']); | |
40 | pyroutes.register('changelog_home', '/%(repo_name)s/changelog', ['repo_name']); |
|
41 | pyroutes.register('changelog_home', '/%(repo_name)s/changelog', ['repo_name']); | |
41 | pyroutes.register('changelog_file_home', '/%(repo_name)s/changelog/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']); |
|
42 | pyroutes.register('changelog_file_home', '/%(repo_name)s/changelog/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']); | |
42 | pyroutes.register('files_home', '/%(repo_name)s/files/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']); |
|
43 | pyroutes.register('files_home', '/%(repo_name)s/files/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']); | |
43 | pyroutes.register('files_history_home', '/%(repo_name)s/history/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']); |
|
44 | pyroutes.register('files_history_home', '/%(repo_name)s/history/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']); | |
44 | pyroutes.register('files_authors_home', '/%(repo_name)s/authors/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']); |
|
45 | pyroutes.register('files_authors_home', '/%(repo_name)s/authors/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']); | |
45 | pyroutes.register('files_archive_home', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']); |
|
46 | pyroutes.register('files_archive_home', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']); | |
46 | pyroutes.register('files_nodelist_home', '/%(repo_name)s/nodelist/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']); |
|
47 | pyroutes.register('files_nodelist_home', '/%(repo_name)s/nodelist/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']); | |
47 |
pyroutes.register('files_ |
|
48 | pyroutes.register('files_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
48 | pyroutes.register('summary_home_slash', '/%(repo_name)s/', ['repo_name']); |
|
49 | pyroutes.register('summary_home_slash', '/%(repo_name)s/', ['repo_name']); | |
49 | pyroutes.register('summary_home', '/%(repo_name)s', ['repo_name']); |
|
50 | pyroutes.register('summary_home', '/%(repo_name)s', ['repo_name']); | |
50 | } |
|
51 | } |
@@ -1,333 +1,322 b'' | |||||
1 | <%inherit file="/base/base.html"/> |
|
1 | <%inherit file="/base/base.html"/> | |
2 |
|
2 | |||
3 | <%def name="title(*args)"> |
|
3 | <%def name="title(*args)"> | |
4 | ${_('%s Files') % c.repo_name} |
|
4 | ${_('%s Files') % c.repo_name} | |
5 | %if hasattr(c,'file'): |
|
5 | %if hasattr(c,'file'): | |
6 | · ${h.safe_unicode(c.file.path) or '\\'} |
|
6 | · ${h.safe_unicode(c.file.path) or '\\'} | |
7 | %endif |
|
7 | %endif | |
8 |
|
8 | |||
9 | %if c.rhodecode_name: |
|
9 | %if c.rhodecode_name: | |
10 | · ${h.branding(c.rhodecode_name)} |
|
10 | · ${h.branding(c.rhodecode_name)} | |
11 | %endif |
|
11 | %endif | |
12 | </%def> |
|
12 | </%def> | |
13 |
|
13 | |||
14 | <%def name="breadcrumbs_links()"> |
|
14 | <%def name="breadcrumbs_links()"> | |
15 | ${_('Files')} |
|
15 | ${_('Files')} | |
16 | %if c.file: |
|
16 | %if c.file: | |
17 | @ ${h.show_id(c.commit)} |
|
17 | @ ${h.show_id(c.commit)} | |
18 | %endif |
|
18 | %endif | |
19 | </%def> |
|
19 | </%def> | |
20 |
|
20 | |||
21 | <%def name="menu_bar_nav()"> |
|
21 | <%def name="menu_bar_nav()"> | |
22 | ${self.menu_items(active='repositories')} |
|
22 | ${self.menu_items(active='repositories')} | |
23 | </%def> |
|
23 | </%def> | |
24 |
|
24 | |||
25 | <%def name="menu_bar_subnav()"> |
|
25 | <%def name="menu_bar_subnav()"> | |
26 | ${self.repo_menu(active='files')} |
|
26 | ${self.repo_menu(active='files')} | |
27 | </%def> |
|
27 | </%def> | |
28 |
|
28 | |||
29 | <%def name="main()"> |
|
29 | <%def name="main()"> | |
30 | <div class="title"> |
|
30 | <div class="title"> | |
31 | ${self.repo_page_title(c.rhodecode_db_repo)} |
|
31 | ${self.repo_page_title(c.rhodecode_db_repo)} | |
32 | </div> |
|
32 | </div> | |
33 |
|
33 | |||
34 | <div id="pjax-container" class="summary"> |
|
34 | <div id="pjax-container" class="summary"> | |
35 | <div id="files_data"> |
|
35 | <div id="files_data"> | |
36 | <%include file='files_pjax.html'/> |
|
36 | <%include file='files_pjax.html'/> | |
37 | </div> |
|
37 | </div> | |
38 | </div> |
|
38 | </div> | |
39 | <script> |
|
39 | <script> | |
40 | var curState = { |
|
40 | var curState = { | |
41 | commit_id: "${c.commit.raw_id}" |
|
41 | commit_id: "${c.commit.raw_id}" | |
42 | }; |
|
42 | }; | |
43 |
|
43 | |||
44 | var getState = function(context) { |
|
44 | var getState = function(context) { | |
45 | var url = $(location).attr('href'); |
|
45 | var url = $(location).attr('href'); | |
46 | var _base_url = '${h.url("files_home",repo_name=c.repo_name,revision='',f_path='')}'; |
|
46 | var _base_url = '${h.url("files_home",repo_name=c.repo_name,revision='',f_path='')}'; | |
47 | var _annotate_url = '${h.url("files_annotate_home",repo_name=c.repo_name,revision='',f_path='')}'; |
|
47 | var _annotate_url = '${h.url("files_annotate_home",repo_name=c.repo_name,revision='',f_path='')}'; | |
48 | _base_url = _base_url.replace('//', '/'); |
|
48 | _base_url = _base_url.replace('//', '/'); | |
49 | _annotate_url = _annotate_url.replace('//', '/'); |
|
49 | _annotate_url = _annotate_url.replace('//', '/'); | |
50 |
|
50 | |||
51 | //extract f_path from url. |
|
51 | //extract f_path from url. | |
52 | var parts = url.split(_base_url); |
|
52 | var parts = url.split(_base_url); | |
53 | if (parts.length != 2) { |
|
53 | if (parts.length != 2) { | |
54 | parts = url.split(_annotate_url); |
|
54 | parts = url.split(_annotate_url); | |
55 | if (parts.length != 2) { |
|
55 | if (parts.length != 2) { | |
56 | var rev = "tip"; |
|
56 | var rev = "tip"; | |
57 | var f_path = ""; |
|
57 | var f_path = ""; | |
58 | } else { |
|
58 | } else { | |
59 | var parts2 = parts[1].split('/'); |
|
59 | var parts2 = parts[1].split('/'); | |
60 | var rev = parts2.shift(); // pop the first element which is the revision |
|
60 | var rev = parts2.shift(); // pop the first element which is the revision | |
61 | var f_path = parts2.join('/'); |
|
61 | var f_path = parts2.join('/'); | |
62 | } |
|
62 | } | |
63 |
|
63 | |||
64 | } else { |
|
64 | } else { | |
65 | var parts2 = parts[1].split('/'); |
|
65 | var parts2 = parts[1].split('/'); | |
66 | var rev = parts2.shift(); // pop the first element which is the revision |
|
66 | var rev = parts2.shift(); // pop the first element which is the revision | |
67 | var f_path = parts2.join('/'); |
|
67 | var f_path = parts2.join('/'); | |
68 | } |
|
68 | } | |
69 |
|
69 | |||
70 | var _node_list_url = pyroutes.url('files_nodelist_home', |
|
70 | var _node_list_url = pyroutes.url('files_nodelist_home', | |
71 | {repo_name: templateContext.repo_name, |
|
71 | {repo_name: templateContext.repo_name, | |
72 | revision: rev, f_path: f_path}); |
|
72 | revision: rev, f_path: f_path}); | |
73 | var _url_base = pyroutes.url('files_home', |
|
73 | var _url_base = pyroutes.url('files_home', | |
74 | {repo_name: templateContext.repo_name, |
|
74 | {repo_name: templateContext.repo_name, | |
75 | revision: rev, f_path:'__FPATH__'}); |
|
75 | revision: rev, f_path:'__FPATH__'}); | |
76 | return { |
|
76 | return { | |
77 | url: url, |
|
77 | url: url, | |
78 | f_path: f_path, |
|
78 | f_path: f_path, | |
79 | rev: rev, |
|
79 | rev: rev, | |
80 | commit_id: curState.commit_id, |
|
80 | commit_id: curState.commit_id, | |
81 | node_list_url: _node_list_url, |
|
81 | node_list_url: _node_list_url, | |
82 | url_base: _url_base |
|
82 | url_base: _url_base | |
83 | }; |
|
83 | }; | |
84 | }; |
|
84 | }; | |
85 |
|
85 | |||
86 | var metadataRequest = null; |
|
86 | var metadataRequest = null; | |
87 | var getFilesMetadata = function() { |
|
87 | var getFilesMetadata = function() { | |
88 | if (metadataRequest && metadataRequest.readyState != 4) { |
|
88 | if (metadataRequest && metadataRequest.readyState != 4) { | |
89 | metadataRequest.abort(); |
|
89 | metadataRequest.abort(); | |
90 | } |
|
90 | } | |
91 | if (source_page) { |
|
91 | if (source_page) { | |
92 | return false; |
|
92 | return false; | |
93 | } |
|
93 | } | |
|
94 | ||||
|
95 | if ($('#file-tree-wrapper').hasClass('full-load')) { | |||
|
96 | // in case our HTML wrapper has full-load class we don't | |||
|
97 | // trigger the async load of metadata | |||
|
98 | return false; | |||
|
99 | } | |||
|
100 | ||||
94 | var state = getState('metadata'); |
|
101 | var state = getState('metadata'); | |
95 | var url_data = { |
|
102 | var url_data = { | |
96 | 'repo_name': templateContext.repo_name, |
|
103 | 'repo_name': templateContext.repo_name, | |
97 |
' |
|
104 | 'commit_id': state.commit_id, | |
98 | 'f_path': state.f_path |
|
105 | 'f_path': state.f_path | |
99 | }; |
|
106 | }; | |
100 |
|
107 | |||
101 |
var url = pyroutes.url('files_ |
|
108 | var url = pyroutes.url('files_nodetree_full', url_data); | |
102 |
|
109 | |||
103 | metadataRequest = $.ajax({url: url}); |
|
110 | metadataRequest = $.ajax({url: url}); | |
104 |
|
111 | |||
105 | metadataRequest.done(function(data) { |
|
112 | metadataRequest.done(function(data) { | |
106 | var data = data.metadata; |
|
113 | $('#file-tree').html(data); | |
107 | var dataLength = data.length; |
|
|||
108 | for (var i = 0; i < dataLength; i++) { |
|
|||
109 | var rowData = data[i]; |
|
|||
110 | var name = rowData.name.replace('\\', '\\\\'); |
|
|||
111 |
|
||||
112 | $('td[title="size-' + name + '"]').html(rowData.size); |
|
|||
113 | var timeComponent = AgeModule.createTimeComponent( |
|
|||
114 | rowData.modified_ts, rowData.modified_at); |
|
|||
115 | $('td[title="modified_at-' + name + '"]').html(timeComponent); |
|
|||
116 |
|
||||
117 | $('td[title="revision-' + name + '"]').html( |
|
|||
118 | '<div class="tooltip" title="{0}"><pre>r{1}:{2}</pre></div>'.format( |
|
|||
119 | data[i].message, data[i].revision, data[i].short_id)); |
|
|||
120 | $('td[title="author-' + name + '"]').html( |
|
|||
121 | '<span title="{0}">{1}</span>'.format( |
|
|||
122 | data[i].author, data[i].user_profile)); |
|
|||
123 | } |
|
|||
124 | timeagoActivate(); |
|
114 | timeagoActivate(); | |
125 | }); |
|
115 | }); | |
126 | metadataRequest.fail(function (data, textStatus, errorThrown) { |
|
116 | metadataRequest.fail(function (data, textStatus, errorThrown) { | |
127 | console.log(data); |
|
117 | console.log(data); | |
128 | if (data.status != 0) { |
|
118 | if (data.status != 0) { | |
129 | alert("Error while fetching metadata.\nError code {0} ({1}).Please consider reloading the page".format(data.status,data.statusText)); |
|
119 | alert("Error while fetching metadata.\nError code {0} ({1}).Please consider reloading the page".format(data.status,data.statusText)); | |
130 | } |
|
120 | } | |
131 | }); |
|
121 | }); | |
132 | }; |
|
122 | }; | |
133 |
|
123 | |||
134 | var callbacks = function() { |
|
124 | var callbacks = function() { | |
135 | var state = getState('callbacks'); |
|
125 | var state = getState('callbacks'); | |
136 | timeagoActivate(); |
|
126 | timeagoActivate(); | |
137 |
|
127 | |||
138 | // used for history, and switch to |
|
128 | // used for history, and switch to | |
139 | var initialCommitData = { |
|
129 | var initialCommitData = { | |
140 | id: null, |
|
130 | id: null, | |
141 |
text: |
|
131 | text: '${_("Switch To Commit")}', | |
142 | type: 'sha', |
|
132 | type: 'sha', | |
143 | raw_id: null, |
|
133 | raw_id: null, | |
144 | files_url: null |
|
134 | files_url: null | |
145 | }; |
|
135 | }; | |
146 |
|
136 | |||
147 | if ($('#trimmed_message_box').height() < 50) { |
|
137 | if ($('#trimmed_message_box').height() < 50) { | |
148 | $('#message_expand').hide(); |
|
138 | $('#message_expand').hide(); | |
149 | } |
|
139 | } | |
150 |
|
140 | |||
151 | $('#message_expand').on('click', function(e) { |
|
141 | $('#message_expand').on('click', function(e) { | |
152 | $('#trimmed_message_box').css('max-height', 'none'); |
|
142 | $('#trimmed_message_box').css('max-height', 'none'); | |
153 | $(this).hide(); |
|
143 | $(this).hide(); | |
154 | }); |
|
144 | }); | |
155 |
|
145 | |||
156 |
|
146 | |||
157 | if (source_page) { |
|
147 | if (source_page) { | |
158 | // variants for with source code, not tree view |
|
148 | // variants for with source code, not tree view | |
159 |
|
149 | |||
160 | if (location.href.indexOf('#') != -1) { |
|
150 | if (location.href.indexOf('#') != -1) { | |
161 | page_highlights = location.href.substring(location.href.indexOf('#') + 1).split('L'); |
|
151 | page_highlights = location.href.substring(location.href.indexOf('#') + 1).split('L'); | |
162 | if (page_highlights.length == 2) { |
|
152 | if (page_highlights.length == 2) { | |
163 | highlight_ranges = page_highlights[1].split(","); |
|
153 | highlight_ranges = page_highlights[1].split(","); | |
164 |
|
154 | |||
165 | var h_lines = []; |
|
155 | var h_lines = []; | |
166 | for (pos in highlight_ranges) { |
|
156 | for (pos in highlight_ranges) { | |
167 | var _range = highlight_ranges[pos].split('-'); |
|
157 | var _range = highlight_ranges[pos].split('-'); | |
168 | if (_range.length == 2) { |
|
158 | if (_range.length == 2) { | |
169 | var start = parseInt(_range[0]); |
|
159 | var start = parseInt(_range[0]); | |
170 | var end = parseInt(_range[1]); |
|
160 | var end = parseInt(_range[1]); | |
171 | if (start < end) { |
|
161 | if (start < end) { | |
172 | for (var i = start; i <= end; i++) { |
|
162 | for (var i = start; i <= end; i++) { | |
173 | h_lines.push(i); |
|
163 | h_lines.push(i); | |
174 | } |
|
164 | } | |
175 | } |
|
165 | } | |
176 | } |
|
166 | } | |
177 | else { |
|
167 | else { | |
178 | h_lines.push(parseInt(highlight_ranges[pos])); |
|
168 | h_lines.push(parseInt(highlight_ranges[pos])); | |
179 | } |
|
169 | } | |
180 | } |
|
170 | } | |
181 |
|
171 | |||
182 | for (pos in h_lines) { |
|
172 | for (pos in h_lines) { | |
183 | // @comment-highlight-color |
|
173 | // @comment-highlight-color | |
184 | $('#L' + h_lines[pos]).css('background-color', '#ffd887'); |
|
174 | $('#L' + h_lines[pos]).css('background-color', '#ffd887'); | |
185 | } |
|
175 | } | |
186 |
|
176 | |||
187 | var _first_line = $('#L' + h_lines[0]).get(0); |
|
177 | var _first_line = $('#L' + h_lines[0]).get(0); | |
188 | if (_first_line) { |
|
178 | if (_first_line) { | |
189 | var line = $('#L' + h_lines[0]); |
|
179 | var line = $('#L' + h_lines[0]); | |
190 | offsetScroll(line, 70); |
|
180 | offsetScroll(line, 70); | |
191 | } |
|
181 | } | |
192 | } |
|
182 | } | |
193 | } |
|
183 | } | |
194 |
|
184 | |||
195 | // select code link event |
|
185 | // select code link event | |
196 | $("#hlcode").mouseup(getSelectionLink); |
|
186 | $("#hlcode").mouseup(getSelectionLink); | |
197 |
|
187 | |||
198 | // file history select2 |
|
188 | // file history select2 | |
199 | select2FileHistorySwitcher('#diff1', initialCommitData, state); |
|
189 | select2FileHistorySwitcher('#diff1', initialCommitData, state); | |
200 | $('#diff1').on('change', function(e) { |
|
190 | $('#diff1').on('change', function(e) { | |
201 | $('#diff').removeClass('disabled').removeAttr("disabled"); |
|
191 | $('#diff').removeClass('disabled').removeAttr("disabled"); | |
202 | $('#show_rev').removeClass('disabled').removeAttr("disabled"); |
|
192 | $('#show_rev').removeClass('disabled').removeAttr("disabled"); | |
203 | }); |
|
193 | }); | |
204 |
|
194 | |||
205 | // show more authors |
|
195 | // show more authors | |
206 | $('#show_authors').on('click', function(e) { |
|
196 | $('#show_authors').on('click', function(e) { | |
207 | e.preventDefault(); |
|
197 | e.preventDefault(); | |
208 | var url = pyroutes.url('files_authors_home', |
|
198 | var url = pyroutes.url('files_authors_home', | |
209 | {'repo_name': templateContext.repo_name, |
|
199 | {'repo_name': templateContext.repo_name, | |
210 | 'revision': state.rev, 'f_path': state.f_path}); |
|
200 | 'revision': state.rev, 'f_path': state.f_path}); | |
211 |
|
201 | |||
212 | $.pjax({ |
|
202 | $.pjax({ | |
213 | url: url, |
|
203 | url: url, | |
214 | data: 'annotate=${"1" if c.annotate else "0"}', |
|
204 | data: 'annotate=${"1" if c.annotate else "0"}', | |
215 | container: '#file_authors', |
|
205 | container: '#file_authors', | |
216 | push: false, |
|
206 | push: false, | |
217 | timeout: pjaxTimeout |
|
207 | timeout: pjaxTimeout | |
218 | }).complete(function(){ |
|
208 | }).complete(function(){ | |
219 | $('#show_authors').hide(); |
|
209 | $('#show_authors').hide(); | |
220 | }) |
|
210 | }) | |
221 | }); |
|
211 | }); | |
222 |
|
212 | |||
223 | // load file short history |
|
213 | // load file short history | |
224 | $('#file_history_overview').on('click', function(e) { |
|
214 | $('#file_history_overview').on('click', function(e) { | |
225 | e.preventDefault(); |
|
215 | e.preventDefault(); | |
226 | path = state.f_path; |
|
216 | path = state.f_path; | |
227 | if (path.indexOf("#") >= 0) { |
|
217 | if (path.indexOf("#") >= 0) { | |
228 | path = path.slice(0, path.indexOf("#")); |
|
218 | path = path.slice(0, path.indexOf("#")); | |
229 | } |
|
219 | } | |
230 | var url = pyroutes.url('changelog_file_home', |
|
220 | var url = pyroutes.url('changelog_file_home', | |
231 | {'repo_name': templateContext.repo_name, |
|
221 | {'repo_name': templateContext.repo_name, | |
232 | 'revision': state.rev, 'f_path': path, 'limit': 6}); |
|
222 | 'revision': state.rev, 'f_path': path, 'limit': 6}); | |
233 | $('#file_history_container').show(); |
|
223 | $('#file_history_container').show(); | |
234 | $('#file_history_container').html('<div class="file-history-inner">{0}</div>'.format(_gettext('Loading ...'))); |
|
224 | $('#file_history_container').html('<div class="file-history-inner">{0}</div>'.format(_gettext('Loading ...'))); | |
235 |
|
225 | |||
236 | $.pjax({ |
|
226 | $.pjax({ | |
237 | url: url, |
|
227 | url: url, | |
238 | container: '#file_history_container', |
|
228 | container: '#file_history_container', | |
239 | push: false, |
|
229 | push: false, | |
240 | timeout: pjaxTimeout |
|
230 | timeout: pjaxTimeout | |
241 | }) |
|
231 | }) | |
242 | }); |
|
232 | }); | |
243 |
|
233 | |||
244 | } |
|
234 | } | |
245 | else { |
|
235 | else { | |
246 | getFilesMetadata(); |
|
236 | getFilesMetadata(); | |
247 |
|
237 | |||
248 | // fuzzy file filter |
|
238 | // fuzzy file filter | |
249 | fileBrowserListeners(state.node_list_url, state.url_base); |
|
239 | fileBrowserListeners(state.node_list_url, state.url_base); | |
250 |
|
240 | |||
251 | // switch to widget |
|
241 | // switch to widget | |
252 | select2RefSwitcher('#refs_filter', initialCommitData); |
|
242 | select2RefSwitcher('#refs_filter', initialCommitData); | |
253 | $('#refs_filter').on('change', function(e) { |
|
243 | $('#refs_filter').on('change', function(e) { | |
254 | var data = $('#refs_filter').select2('data'); |
|
244 | var data = $('#refs_filter').select2('data'); | |
255 | curState.commit_id = data.raw_id; |
|
245 | curState.commit_id = data.raw_id; | |
256 | $.pjax({url: data.files_url, container: '#pjax-container', timeout: pjaxTimeout}); |
|
246 | $.pjax({url: data.files_url, container: '#pjax-container', timeout: pjaxTimeout}); | |
257 | }); |
|
247 | }); | |
258 |
|
248 | |||
259 | $("#prev_commit_link").on('click', function(e) { |
|
249 | $("#prev_commit_link").on('click', function(e) { | |
260 | var data = $(this).data(); |
|
250 | var data = $(this).data(); | |
261 | curState.commit_id = data.commitId; |
|
251 | curState.commit_id = data.commitId; | |
262 | }); |
|
252 | }); | |
263 |
|
253 | |||
264 | $("#next_commit_link").on('click', function(e) { |
|
254 | $("#next_commit_link").on('click', function(e) { | |
265 | var data = $(this).data(); |
|
255 | var data = $(this).data(); | |
266 | curState.commit_id = data.commitId; |
|
256 | curState.commit_id = data.commitId; | |
267 | }); |
|
257 | }); | |
268 |
|
258 | |||
269 | $('#at_rev').on("keypress", function(e) { |
|
259 | $('#at_rev').on("keypress", function(e) { | |
270 | /* ENTER PRESSED */ |
|
260 | /* ENTER PRESSED */ | |
271 | if (e.keyCode === 13) { |
|
261 | if (e.keyCode === 13) { | |
272 | var rev = $('#at_rev').val(); |
|
262 | var rev = $('#at_rev').val(); | |
273 | // explicit reload page here. with pjax entering bad input |
|
263 | // explicit reload page here. with pjax entering bad input | |
274 | // produces not so nice results |
|
264 | // produces not so nice results | |
275 | window.location = pyroutes.url('files_home', |
|
265 | window.location = pyroutes.url('files_home', | |
276 | {'repo_name': templateContext.repo_name, |
|
266 | {'repo_name': templateContext.repo_name, | |
277 | 'revision': rev, 'f_path': state.f_path}); |
|
267 | 'revision': rev, 'f_path': state.f_path}); | |
278 | } |
|
268 | } | |
279 | }); |
|
269 | }); | |
280 | } |
|
270 | } | |
281 | }; |
|
271 | }; | |
282 |
|
272 | |||
283 | var pjaxTimeout = 5000; |
|
273 | var pjaxTimeout = 5000; | |
284 |
|
274 | |||
285 | $(document).pjax(".pjax-link", "#pjax-container", { |
|
275 | $(document).pjax(".pjax-link", "#pjax-container", { | |
286 | "fragment": "#pjax-content", |
|
276 | "fragment": "#pjax-content", | |
287 | "maxCacheLength": 1000, |
|
277 | "maxCacheLength": 1000, | |
288 | "timeout": pjaxTimeout |
|
278 | "timeout": pjaxTimeout | |
289 | }); |
|
279 | }); | |
290 |
|
280 | |||
291 | // define global back/forward states |
|
281 | // define global back/forward states | |
292 | var isPjaxPopState = false; |
|
282 | var isPjaxPopState = false; | |
293 | $(document).on('pjax:popstate', function() { |
|
283 | $(document).on('pjax:popstate', function() { | |
294 | isPjaxPopState = true; |
|
284 | isPjaxPopState = true; | |
295 | }); |
|
285 | }); | |
296 |
|
286 | |||
297 | $(document).on('pjax:end', function(xhr, options) { |
|
287 | $(document).on('pjax:end', function(xhr, options) { | |
298 | if (isPjaxPopState) { |
|
288 | if (isPjaxPopState) { | |
299 | isPjaxPopState = false; |
|
289 | isPjaxPopState = false; | |
300 | callbacks(); |
|
290 | callbacks(); | |
301 | _NODEFILTER.resetFilter(); |
|
291 | _NODEFILTER.resetFilter(); | |
302 | } |
|
292 | } | |
303 |
|
293 | |||
304 | // run callback for tracking if defined for google analytics etc. |
|
294 | // run callback for tracking if defined for google analytics etc. | |
305 | // this is used to trigger tracking on pjax |
|
295 | // this is used to trigger tracking on pjax | |
306 | if (typeof window.rhodecode_statechange_callback !== 'undefined') { |
|
296 | if (typeof window.rhodecode_statechange_callback !== 'undefined') { | |
307 | var state = getState('statechange'); |
|
297 | var state = getState('statechange'); | |
308 | rhodecode_statechange_callback(state.url, null) |
|
298 | rhodecode_statechange_callback(state.url, null) | |
309 | } |
|
299 | } | |
310 | }); |
|
300 | }); | |
311 |
|
301 | |||
312 | $(document).on('pjax:success', function(event, xhr, options) { |
|
302 | $(document).on('pjax:success', function(event, xhr, options) { | |
313 | if (event.target.id == "file_history_container") { |
|
303 | if (event.target.id == "file_history_container") { | |
314 | $('#file_history_overview').hide(); |
|
304 | $('#file_history_overview').hide(); | |
315 | $('#file_history_overview_full').show(); |
|
305 | $('#file_history_overview_full').show(); | |
316 | timeagoActivate(); |
|
306 | timeagoActivate(); | |
317 | } else { |
|
307 | } else { | |
318 | callbacks(); |
|
308 | callbacks(); | |
319 | } |
|
309 | } | |
320 | }); |
|
310 | }); | |
321 |
|
311 | |||
322 | $(document).ready(function() { |
|
312 | $(document).ready(function() { | |
323 | callbacks(); |
|
313 | callbacks(); | |
324 | var search_GET = "${request.GET.get('search','')}"; |
|
314 | var search_GET = "${request.GET.get('search','')}"; | |
325 | if (search_GET == "1") { |
|
315 | if (search_GET == "1") { | |
326 | _NODEFILTER.initFilter(); |
|
316 | _NODEFILTER.initFilter(); | |
327 | } |
|
317 | } | |
328 | }); |
|
318 | }); | |
329 |
|
319 | |||
330 | </script> |
|
320 | </script> | |
331 |
|
321 | |||
332 |
|
||||
333 | </%def> |
|
322 | </%def> |
@@ -1,51 +1,53 b'' | |||||
1 |
|
1 | |||
2 | <div id="codeblock" class="browserblock"> |
|
2 | <div id="codeblock" class="browserblock"> | |
3 | <div class="browser-header"> |
|
3 | <div class="browser-header"> | |
4 | <div class="browser-nav"> |
|
4 | <div class="browser-nav"> | |
5 | ${h.form(h.url.current(), method='GET', id='at_rev_form')} |
|
5 | ${h.form(h.url.current(), method='GET', id='at_rev_form')} | |
6 | <div class="info_box"> |
|
6 | <div class="info_box"> | |
7 | ${h.hidden('refs_filter')} |
|
7 | ${h.hidden('refs_filter')} | |
8 | <div class="info_box_elem previous"> |
|
8 | <div class="info_box_elem previous"> | |
9 | <a id="prev_commit_link" data-commit-id="${c.prev_commit.raw_id}" class="pjax-link ${'disabled' if c.url_prev == '#' else ''}" href="${c.url_prev}" title="${_('Previous commit')}"><i class="icon-chevron-left"></i></a> |
|
9 | <a id="prev_commit_link" data-commit-id="${c.prev_commit.raw_id}" class="pjax-link ${'disabled' if c.url_prev == '#' else ''}" href="${c.url_prev}" title="${_('Previous commit')}"><i class="icon-chevron-left"></i></a> | |
10 | </div> |
|
10 | </div> | |
11 | <div class="info_box_elem">${h.text('at_rev',value=c.commit.revision)}</div> |
|
11 | <div class="info_box_elem">${h.text('at_rev',value=c.commit.revision)}</div> | |
12 | <div class="info_box_elem next"> |
|
12 | <div class="info_box_elem next"> | |
13 | <a id="next_commit_link" data-commit-id="${c.next_commit.raw_id}" class="pjax-link ${'disabled' if c.url_next == '#' else ''}" href="${c.url_next}" title="${_('Next commit')}"><i class="icon-chevron-right"></i></a> |
|
13 | <a id="next_commit_link" data-commit-id="${c.next_commit.raw_id}" class="pjax-link ${'disabled' if c.url_next == '#' else ''}" href="${c.url_next}" title="${_('Next commit')}"><i class="icon-chevron-right"></i></a> | |
14 | </div> |
|
14 | </div> | |
15 | </div> |
|
15 | </div> | |
16 | ${h.end_form()} |
|
16 | ${h.end_form()} | |
17 |
|
17 | |||
18 | <div id="search_activate_id" class="search_activate"> |
|
18 | <div id="search_activate_id" class="search_activate"> | |
19 | <a class="btn btn-default" id="filter_activate" href="javascript:void(0)">${_('Search File List')}</a> |
|
19 | <a class="btn btn-default" id="filter_activate" href="javascript:void(0)">${_('Search File List')}</a> | |
20 | </div> |
|
20 | </div> | |
21 | <div id="search_deactivate_id" class="search_activate hidden"> |
|
21 | <div id="search_deactivate_id" class="search_activate hidden"> | |
22 | <a class="btn btn-default" id="filter_deactivate" href="javascript:void(0)">${_('Close File List')}</a> |
|
22 | <a class="btn btn-default" id="filter_deactivate" href="javascript:void(0)">${_('Close File List')}</a> | |
23 | </div> |
|
23 | </div> | |
24 | % if h.HasRepoPermissionAny('repository.write','repository.admin')(c.repo_name): |
|
24 | % if h.HasRepoPermissionAny('repository.write','repository.admin')(c.repo_name): | |
25 | <div title="${_('Add New File')}" class="btn btn-primary new-file"> |
|
25 | <div title="${_('Add New File')}" class="btn btn-primary new-file"> | |
26 | <a href="${h.url('files_add_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.f_path, anchor='edit')}"> |
|
26 | <a href="${h.url('files_add_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.f_path, anchor='edit')}"> | |
27 | ${_('Add File')}</a> |
|
27 | ${_('Add File')}</a> | |
28 | </div> |
|
28 | </div> | |
29 | % endif |
|
29 | % endif | |
30 | </div> |
|
30 | </div> | |
31 |
|
31 | |||
32 | <div class="browser-search"> |
|
32 | <div class="browser-search"> | |
33 | <div class="node-filter"> |
|
33 | <div class="node-filter"> | |
34 | <div class="node_filter_box hidden" id="node_filter_box_loading" >${_('Loading file list...')}</div> |
|
34 | <div class="node_filter_box hidden" id="node_filter_box_loading" >${_('Loading file list...')}</div> | |
35 | <div class="node_filter_box hidden" id="node_filter_box" > |
|
35 | <div class="node_filter_box hidden" id="node_filter_box" > | |
36 | <div class="node-filter-path">${h.get_last_path_part(c.file)}/</div> |
|
36 | <div class="node-filter-path">${h.get_last_path_part(c.file)}/</div> | |
37 | <div class="node-filter-input"> |
|
37 | <div class="node-filter-input"> | |
38 | <input class="init" type="text" name="filter" size="25" id="node_filter" autocomplete="off"> |
|
38 | <input class="init" type="text" name="filter" size="25" id="node_filter" autocomplete="off"> | |
39 | </div> |
|
39 | </div> | |
40 | </div> |
|
40 | </div> | |
41 | </div> |
|
41 | </div> | |
42 | </div> |
|
42 | </div> | |
43 | </div> |
|
43 | </div> | |
44 | ## file tree is computed from caches, and filled in |
|
44 | ## file tree is computed from caches, and filled in | |
|
45 | <div id="file-tree"> | |||
45 | ${c.file_tree} |
|
46 | ${c.file_tree} | |
|
47 | </div> | |||
46 |
|
48 | |||
47 | </div> |
|
49 | </div> | |
48 |
|
50 | |||
49 | <script> |
|
51 | <script> | |
50 | var source_page = false; |
|
52 | var source_page = false; | |
51 | </script> |
|
53 | </script> |
@@ -1,60 +1,78 b'' | |||||
1 | <div class="browser-body"> |
|
1 | <div id="file-tree-wrapper" class="browser-body ${'full-load' if c.full_load else ''}"> | |
2 | <table class="code-browser rctable"> |
|
2 | <table class="code-browser rctable"> | |
3 | <thead> |
|
3 | <thead> | |
4 | <tr> |
|
4 | <tr> | |
5 | <th>${_('Name')}</th> |
|
5 | <th>${_('Name')}</th> | |
6 | <th>${_('Size')}</th> |
|
6 | <th>${_('Size')}</th> | |
7 | <th>${_('Modified')}</th> |
|
7 | <th>${_('Modified')}</th> | |
8 | <th>${_('Last Commit')}</th> |
|
8 | <th>${_('Last Commit')}</th> | |
9 | <th>${_('Author')}</th> |
|
9 | <th>${_('Author')}</th> | |
10 | </tr> |
|
10 | </tr> | |
11 | </thead> |
|
11 | </thead> | |
12 |
|
12 | |||
13 | <tbody id="tbody"> |
|
13 | <tbody id="tbody"> | |
14 | %if c.file.parent: |
|
14 | %if c.file.parent: | |
15 | <tr class="parity0"> |
|
15 | <tr class="parity0"> | |
16 | <td class="td-componentname"> |
|
16 | <td class="td-componentname"> | |
17 | <a href="${h.url('files_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.file.parent.path)}" class="pjax-link"> |
|
17 | <a href="${h.url('files_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.file.parent.path)}" class="pjax-link"> | |
18 | <i class="icon-folder"></i>.. |
|
18 | <i class="icon-folder"></i>.. | |
19 | </a> |
|
19 | </a> | |
20 | </td> |
|
20 | </td> | |
21 | <td></td> |
|
21 | <td></td> | |
22 | <td></td> |
|
22 | <td></td> | |
23 | <td></td> |
|
23 | <td></td> | |
24 | <td></td> |
|
24 | <td></td> | |
25 | </tr> |
|
25 | </tr> | |
26 | %endif |
|
26 | %endif | |
27 | %for cnt,node in enumerate(c.file): |
|
27 | %for cnt,node in enumerate(c.file): | |
28 | <tr class="parity${cnt%2}"> |
|
28 | <tr class="parity${cnt%2}"> | |
29 | <td class="td-componentname"> |
|
29 | <td class="td-componentname"> | |
30 | %if node.is_submodule(): |
|
30 | %if node.is_submodule(): | |
31 | <span class="submodule-dir"> |
|
31 | <span class="submodule-dir"> | |
32 | ${h.link_to_if( |
|
32 | ${h.link_to_if( | |
33 | node.url.startswith('http://') or node.url.startswith('https://'), |
|
33 | node.url.startswith('http://') or node.url.startswith('https://'), | |
34 | node.name,node.url)} |
|
34 | node.name, node.url)} | |
35 | </span> |
|
35 | </span> | |
36 | %else: |
|
36 | %else: | |
37 | <a href="${h.url('files_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=h.safe_unicode(node.path))}" class="pjax-link"> |
|
37 | <a href="${h.url('files_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=h.safe_unicode(node.path))}" class="pjax-link"> | |
38 | <i class="${'icon-file browser-file' if node.is_file() else 'icon-folder browser-dir'}"></i>${node.name} |
|
38 | <i class="${'icon-file browser-file' if node.is_file() else 'icon-folder browser-dir'}"></i>${node.name} | |
39 | </a> |
|
39 | </a> | |
40 | %endif |
|
40 | %endif | |
41 | </td> |
|
41 | </td> | |
42 | %if node.is_file(): |
|
42 | %if node.is_file(): | |
43 |
<td class="td-size" |
|
43 | <td class="td-size" data-attr-name="size"> | |
44 | <td class="td-time" title="${'modified_at-%s' % node.name}"> |
|
44 | % if c.full_load: | |
45 | <span class="browser-loading">${_('Loading...')}</span> |
|
45 | <span data-size="${node.size}">${h.format_byte_size_binary(node.size)}</span> | |
|
46 | % else: | |||
|
47 | ${_('Loading ...')} | |||
|
48 | % endif | |||
|
49 | </td> | |||
|
50 | <td class="td-time" data-attr-name="modified_at"> | |||
|
51 | % if c.full_load: | |||
|
52 | <span data-date="${node.last_commit.date}">${h.age_component(node.last_commit.date)}</span> | |||
|
53 | % endif | |||
46 | </td> |
|
54 | </td> | |
47 |
<td class="td-hash" |
|
55 | <td class="td-hash" data-attr-name="commit_id"> | |
48 | <td class="td-user" title="${'author-%s' % node.name}"></td> |
|
56 | % if c.full_load: | |
|
57 | <div class="tooltip" title="${node.last_commit.message}"> | |||
|
58 | <pre data-commit-id="${node.last_commit.raw_id}">r${node.last_commit.revision}:${node.last_commit.short_id}</pre> | |||
|
59 | </div> | |||
|
60 | % endif | |||
|
61 | </td> | |||
|
62 | <td class="td-user" data-attr-name="author"> | |||
|
63 | % if c.full_load: | |||
|
64 | <span data-author="${node.last_commit.author}" title="${node.last_commit.author}">${h.gravatar_with_user(node.last_commit.author)|n}</span> | |||
|
65 | % endif | |||
|
66 | </td> | |||
49 | %else: |
|
67 | %else: | |
50 | <td></td> |
|
68 | <td></td> | |
51 | <td></td> |
|
69 | <td></td> | |
52 | <td></td> |
|
70 | <td></td> | |
53 | <td></td> |
|
71 | <td></td> | |
54 | %endif |
|
72 | %endif | |
55 | </tr> |
|
73 | </tr> | |
56 | %endfor |
|
74 | %endfor | |
57 | </tbody> |
|
75 | </tbody> | |
58 | <tbody id="tbody_filtered"></tbody> |
|
76 | <tbody id="tbody_filtered"></tbody> | |
59 | </table> |
|
77 | </table> | |
60 | </div> No newline at end of file |
|
78 | </div> |
@@ -1,939 +1,942 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import os |
|
21 | import os | |
22 |
|
22 | |||
23 | import mock |
|
23 | import mock | |
24 | import pytest |
|
24 | import pytest | |
25 |
|
25 | |||
26 | from rhodecode.controllers.files import FilesController |
|
26 | from rhodecode.controllers.files import FilesController | |
27 | from rhodecode.lib import helpers as h |
|
27 | from rhodecode.lib import helpers as h | |
28 | from rhodecode.lib.compat import OrderedDict |
|
28 | from rhodecode.lib.compat import OrderedDict | |
29 | from rhodecode.lib.ext_json import json |
|
29 | from rhodecode.lib.ext_json import json | |
30 | from rhodecode.lib.vcs import nodes |
|
30 | from rhodecode.lib.vcs import nodes | |
31 | from rhodecode.lib.vcs.conf import settings |
|
31 | from rhodecode.lib.vcs.conf import settings | |
32 | from rhodecode.tests import ( |
|
32 | from rhodecode.tests import ( | |
33 | url, assert_session_flash, assert_not_in_session_flash) |
|
33 | url, assert_session_flash, assert_not_in_session_flash) | |
34 | from rhodecode.tests.fixture import Fixture |
|
34 | from rhodecode.tests.fixture import Fixture | |
35 | from rhodecode.tests.utils import AssertResponse |
|
35 | from rhodecode.tests.utils import AssertResponse | |
36 |
|
36 | |||
37 | fixture = Fixture() |
|
37 | fixture = Fixture() | |
38 |
|
38 | |||
39 | NODE_HISTORY = { |
|
39 | NODE_HISTORY = { | |
40 | 'hg': json.loads(fixture.load_resource('hg_node_history_response.json')), |
|
40 | 'hg': json.loads(fixture.load_resource('hg_node_history_response.json')), | |
41 | 'git': json.loads(fixture.load_resource('git_node_history_response.json')), |
|
41 | 'git': json.loads(fixture.load_resource('git_node_history_response.json')), | |
42 | 'svn': json.loads(fixture.load_resource('svn_node_history_response.json')), |
|
42 | 'svn': json.loads(fixture.load_resource('svn_node_history_response.json')), | |
43 | } |
|
43 | } | |
44 |
|
44 | |||
45 |
|
45 | |||
46 | @pytest.mark.usefixtures("app") |
|
46 | @pytest.mark.usefixtures("app") | |
47 | class TestFilesController: |
|
47 | class TestFilesController: | |
48 |
|
48 | |||
49 | def test_index(self, backend): |
|
49 | def test_index(self, backend): | |
50 | response = self.app.get(url( |
|
50 | response = self.app.get(url( | |
51 | controller='files', action='index', |
|
51 | controller='files', action='index', | |
52 | repo_name=backend.repo_name, revision='tip', f_path='/')) |
|
52 | repo_name=backend.repo_name, revision='tip', f_path='/')) | |
53 | commit = backend.repo.get_commit() |
|
53 | commit = backend.repo.get_commit() | |
54 |
|
54 | |||
55 | params = { |
|
55 | params = { | |
56 | 'repo_name': backend.repo_name, |
|
56 | 'repo_name': backend.repo_name, | |
57 |
' |
|
57 | 'commit_id': commit.raw_id, | |
58 | 'date': commit.date |
|
58 | 'date': commit.date | |
59 | } |
|
59 | } | |
60 | assert_dirs_in_response(response, ['docs', 'vcs'], params) |
|
60 | assert_dirs_in_response(response, ['docs', 'vcs'], params) | |
61 | files = [ |
|
61 | files = [ | |
62 | '.gitignore', |
|
62 | '.gitignore', | |
63 | '.hgignore', |
|
63 | '.hgignore', | |
64 | '.hgtags', |
|
64 | '.hgtags', | |
65 | # TODO: missing in Git |
|
65 | # TODO: missing in Git | |
66 | # '.travis.yml', |
|
66 | # '.travis.yml', | |
67 | 'MANIFEST.in', |
|
67 | 'MANIFEST.in', | |
68 | 'README.rst', |
|
68 | 'README.rst', | |
69 | # TODO: File is missing in svn repository |
|
69 | # TODO: File is missing in svn repository | |
70 | # 'run_test_and_report.sh', |
|
70 | # 'run_test_and_report.sh', | |
71 | 'setup.cfg', |
|
71 | 'setup.cfg', | |
72 | 'setup.py', |
|
72 | 'setup.py', | |
73 | 'test_and_report.sh', |
|
73 | 'test_and_report.sh', | |
74 | 'tox.ini', |
|
74 | 'tox.ini', | |
75 | ] |
|
75 | ] | |
76 | assert_files_in_response(response, files, params) |
|
76 | assert_files_in_response(response, files, params) | |
77 | assert_timeago_in_response(response, files, params) |
|
77 | assert_timeago_in_response(response, files, params) | |
78 |
|
78 | |||
79 | def test_index_links_submodules_with_absolute_url(self, backend_hg): |
|
79 | def test_index_links_submodules_with_absolute_url(self, backend_hg): | |
80 | repo = backend_hg['subrepos'] |
|
80 | repo = backend_hg['subrepos'] | |
81 | response = self.app.get(url( |
|
81 | response = self.app.get(url( | |
82 | controller='files', action='index', |
|
82 | controller='files', action='index', | |
83 | repo_name=repo.repo_name, revision='tip', f_path='/')) |
|
83 | repo_name=repo.repo_name, revision='tip', f_path='/')) | |
84 | assert_response = AssertResponse(response) |
|
84 | assert_response = AssertResponse(response) | |
85 | assert_response.contains_one_link( |
|
85 | assert_response.contains_one_link( | |
86 | 'absolute-path @ 000000000000', 'http://example.com/absolute-path') |
|
86 | 'absolute-path @ 000000000000', 'http://example.com/absolute-path') | |
87 |
|
87 | |||
88 | def test_index_links_submodules_with_absolute_url_subpaths( |
|
88 | def test_index_links_submodules_with_absolute_url_subpaths( | |
89 | self, backend_hg): |
|
89 | self, backend_hg): | |
90 | repo = backend_hg['subrepos'] |
|
90 | repo = backend_hg['subrepos'] | |
91 | response = self.app.get(url( |
|
91 | response = self.app.get(url( | |
92 | controller='files', action='index', |
|
92 | controller='files', action='index', | |
93 | repo_name=repo.repo_name, revision='tip', f_path='/')) |
|
93 | repo_name=repo.repo_name, revision='tip', f_path='/')) | |
94 | assert_response = AssertResponse(response) |
|
94 | assert_response = AssertResponse(response) | |
95 | assert_response.contains_one_link( |
|
95 | assert_response.contains_one_link( | |
96 | 'subpaths-path @ 000000000000', |
|
96 | 'subpaths-path @ 000000000000', | |
97 | 'http://sub-base.example.com/subpaths-path') |
|
97 | 'http://sub-base.example.com/subpaths-path') | |
98 |
|
98 | |||
99 | @pytest.mark.xfail_backends("svn", reason="Depends on branch support") |
|
99 | @pytest.mark.xfail_backends("svn", reason="Depends on branch support") | |
100 | def test_files_menu(self, backend): |
|
100 | def test_files_menu(self, backend): | |
101 | new_branch = "temp_branch_name" |
|
101 | new_branch = "temp_branch_name" | |
102 | commits = [ |
|
102 | commits = [ | |
103 | {'message': 'a'}, |
|
103 | {'message': 'a'}, | |
104 | {'message': 'b', 'branch': new_branch} |
|
104 | {'message': 'b', 'branch': new_branch} | |
105 | ] |
|
105 | ] | |
106 | backend.create_repo(commits) |
|
106 | backend.create_repo(commits) | |
107 |
|
107 | |||
108 | backend.repo.landing_rev = "branch:%s" % new_branch |
|
108 | backend.repo.landing_rev = "branch:%s" % new_branch | |
109 |
|
109 | |||
110 | # get response based on tip and not new revision |
|
110 | # get response based on tip and not new revision | |
111 | response = self.app.get(url( |
|
111 | response = self.app.get(url( | |
112 | controller='files', action='index', |
|
112 | controller='files', action='index', | |
113 | repo_name=backend.repo_name, revision='tip', f_path='/'), |
|
113 | repo_name=backend.repo_name, revision='tip', f_path='/'), | |
114 | status=200) |
|
114 | status=200) | |
115 |
|
115 | |||
116 | # make sure Files menu url is not tip but new revision |
|
116 | # make sure Files menu url is not tip but new revision | |
117 | landing_rev = backend.repo.landing_rev[1] |
|
117 | landing_rev = backend.repo.landing_rev[1] | |
118 | files_url = url('files_home', repo_name=backend.repo_name, |
|
118 | files_url = url('files_home', repo_name=backend.repo_name, | |
119 | revision=landing_rev) |
|
119 | revision=landing_rev) | |
120 |
|
120 | |||
121 | assert landing_rev != 'tip' |
|
121 | assert landing_rev != 'tip' | |
122 | response.mustcontain('<li class="active"><a class="menulink" href="%s">' % files_url) |
|
122 | response.mustcontain('<li class="active"><a class="menulink" href="%s">' % files_url) | |
123 |
|
123 | |||
124 | def test_index_commit(self, backend): |
|
124 | def test_index_commit(self, backend): | |
125 | commit = backend.repo.get_commit(commit_idx=32) |
|
125 | commit = backend.repo.get_commit(commit_idx=32) | |
126 |
|
126 | |||
127 | response = self.app.get(url( |
|
127 | response = self.app.get(url( | |
128 | controller='files', action='index', |
|
128 | controller='files', action='index', | |
129 | repo_name=backend.repo_name, |
|
129 | repo_name=backend.repo_name, | |
130 | revision=commit.raw_id, |
|
130 | revision=commit.raw_id, | |
131 | f_path='/') |
|
131 | f_path='/') | |
132 | ) |
|
132 | ) | |
133 |
|
133 | |||
134 | dirs = ['docs', 'tests'] |
|
134 | dirs = ['docs', 'tests'] | |
135 | files = ['README.rst'] |
|
135 | files = ['README.rst'] | |
136 | params = { |
|
136 | params = { | |
137 | 'repo_name': backend.repo_name, |
|
137 | 'repo_name': backend.repo_name, | |
138 |
' |
|
138 | 'commit_id': commit.raw_id, | |
139 | } |
|
139 | } | |
140 | assert_dirs_in_response(response, dirs, params) |
|
140 | assert_dirs_in_response(response, dirs, params) | |
141 | assert_files_in_response(response, files, params) |
|
141 | assert_files_in_response(response, files, params) | |
142 |
|
142 | |||
143 | @pytest.mark.xfail_backends("git", reason="Missing branches in git repo") |
|
143 | @pytest.mark.xfail_backends("git", reason="Missing branches in git repo") | |
144 | @pytest.mark.xfail_backends("svn", reason="Depends on branch support") |
|
144 | @pytest.mark.xfail_backends("svn", reason="Depends on branch support") | |
145 | def test_index_different_branch(self, backend): |
|
145 | def test_index_different_branch(self, backend): | |
146 | # TODO: Git test repository does not contain branches |
|
146 | # TODO: Git test repository does not contain branches | |
147 | # TODO: Branch support in Subversion |
|
147 | # TODO: Branch support in Subversion | |
148 |
|
148 | |||
149 | commit = backend.repo.get_commit(commit_idx=150) |
|
149 | commit = backend.repo.get_commit(commit_idx=150) | |
150 | response = self.app.get(url( |
|
150 | response = self.app.get(url( | |
151 | controller='files', action='index', |
|
151 | controller='files', action='index', | |
152 | repo_name=backend.repo_name, |
|
152 | repo_name=backend.repo_name, | |
153 | revision=commit.raw_id, |
|
153 | revision=commit.raw_id, | |
154 | f_path='/')) |
|
154 | f_path='/')) | |
155 | assert_response = AssertResponse(response) |
|
155 | assert_response = AssertResponse(response) | |
156 | assert_response.element_contains( |
|
156 | assert_response.element_contains( | |
157 | '.tags .branchtag', 'git') |
|
157 | '.tags .branchtag', 'git') | |
158 |
|
158 | |||
159 | def test_index_paging(self, backend): |
|
159 | def test_index_paging(self, backend): | |
160 | repo = backend.repo |
|
160 | repo = backend.repo | |
161 | indexes = [73, 92, 109, 1, 0] |
|
161 | indexes = [73, 92, 109, 1, 0] | |
162 | idx_map = [(rev, repo.get_commit(commit_idx=rev).raw_id) |
|
162 | idx_map = [(rev, repo.get_commit(commit_idx=rev).raw_id) | |
163 | for rev in indexes] |
|
163 | for rev in indexes] | |
164 |
|
164 | |||
165 | for idx in idx_map: |
|
165 | for idx in idx_map: | |
166 | response = self.app.get(url( |
|
166 | response = self.app.get(url( | |
167 | controller='files', action='index', |
|
167 | controller='files', action='index', | |
168 | repo_name=backend.repo_name, |
|
168 | repo_name=backend.repo_name, | |
169 | revision=idx[1], |
|
169 | revision=idx[1], | |
170 | f_path='/')) |
|
170 | f_path='/')) | |
171 |
|
171 | |||
172 | response.mustcontain("""r%s:%s""" % (idx[0], idx[1][:8])) |
|
172 | response.mustcontain("""r%s:%s""" % (idx[0], idx[1][:8])) | |
173 |
|
173 | |||
174 | def test_file_source(self, backend): |
|
174 | def test_file_source(self, backend): | |
175 | commit = backend.repo.get_commit(commit_idx=167) |
|
175 | commit = backend.repo.get_commit(commit_idx=167) | |
176 | response = self.app.get(url( |
|
176 | response = self.app.get(url( | |
177 | controller='files', action='index', |
|
177 | controller='files', action='index', | |
178 | repo_name=backend.repo_name, |
|
178 | repo_name=backend.repo_name, | |
179 | revision=commit.raw_id, |
|
179 | revision=commit.raw_id, | |
180 | f_path='vcs/nodes.py')) |
|
180 | f_path='vcs/nodes.py')) | |
181 |
|
181 | |||
182 | msgbox = """<div class="commit right-content">%s</div>""" |
|
182 | msgbox = """<div class="commit right-content">%s</div>""" | |
183 | response.mustcontain(msgbox % (commit.message, )) |
|
183 | response.mustcontain(msgbox % (commit.message, )) | |
184 |
|
184 | |||
185 | assert_response = AssertResponse(response) |
|
185 | assert_response = AssertResponse(response) | |
186 | if commit.branch: |
|
186 | if commit.branch: | |
187 | assert_response.element_contains('.tags.tags-main .branchtag', commit.branch) |
|
187 | assert_response.element_contains('.tags.tags-main .branchtag', commit.branch) | |
188 | if commit.tags: |
|
188 | if commit.tags: | |
189 | for tag in commit.tags: |
|
189 | for tag in commit.tags: | |
190 | assert_response.element_contains('.tags.tags-main .tagtag', tag) |
|
190 | assert_response.element_contains('.tags.tags-main .tagtag', tag) | |
191 |
|
191 | |||
192 | def test_file_source_history(self, backend): |
|
192 | def test_file_source_history(self, backend): | |
193 | response = self.app.get( |
|
193 | response = self.app.get( | |
194 | url( |
|
194 | url( | |
195 | controller='files', action='history', |
|
195 | controller='files', action='history', | |
196 | repo_name=backend.repo_name, |
|
196 | repo_name=backend.repo_name, | |
197 | revision='tip', |
|
197 | revision='tip', | |
198 | f_path='vcs/nodes.py'), |
|
198 | f_path='vcs/nodes.py'), | |
199 | extra_environ={'HTTP_X_PARTIAL_XHR': '1'}) |
|
199 | extra_environ={'HTTP_X_PARTIAL_XHR': '1'}) | |
200 | assert NODE_HISTORY[backend.alias] == json.loads(response.body) |
|
200 | assert NODE_HISTORY[backend.alias] == json.loads(response.body) | |
201 |
|
201 | |||
202 | def test_file_source_history_svn(self, backend_svn): |
|
202 | def test_file_source_history_svn(self, backend_svn): | |
203 | simple_repo = backend_svn['svn-simple-layout'] |
|
203 | simple_repo = backend_svn['svn-simple-layout'] | |
204 | response = self.app.get( |
|
204 | response = self.app.get( | |
205 | url( |
|
205 | url( | |
206 | controller='files', action='history', |
|
206 | controller='files', action='history', | |
207 | repo_name=simple_repo.repo_name, |
|
207 | repo_name=simple_repo.repo_name, | |
208 | revision='tip', |
|
208 | revision='tip', | |
209 | f_path='trunk/example.py'), |
|
209 | f_path='trunk/example.py'), | |
210 | extra_environ={'HTTP_X_PARTIAL_XHR': '1'}) |
|
210 | extra_environ={'HTTP_X_PARTIAL_XHR': '1'}) | |
211 |
|
211 | |||
212 | expected_data = json.loads( |
|
212 | expected_data = json.loads( | |
213 | fixture.load_resource('svn_node_history_branches.json')) |
|
213 | fixture.load_resource('svn_node_history_branches.json')) | |
214 | assert expected_data == response.json |
|
214 | assert expected_data == response.json | |
215 |
|
215 | |||
216 | def test_file_annotation_history(self, backend): |
|
216 | def test_file_annotation_history(self, backend): | |
217 | response = self.app.get( |
|
217 | response = self.app.get( | |
218 | url( |
|
218 | url( | |
219 | controller='files', action='history', |
|
219 | controller='files', action='history', | |
220 | repo_name=backend.repo_name, |
|
220 | repo_name=backend.repo_name, | |
221 | revision='tip', |
|
221 | revision='tip', | |
222 | f_path='vcs/nodes.py', |
|
222 | f_path='vcs/nodes.py', | |
223 | annotate=True), |
|
223 | annotate=True), | |
224 | extra_environ={'HTTP_X_PARTIAL_XHR': '1'}) |
|
224 | extra_environ={'HTTP_X_PARTIAL_XHR': '1'}) | |
225 | assert NODE_HISTORY[backend.alias] == json.loads(response.body) |
|
225 | assert NODE_HISTORY[backend.alias] == json.loads(response.body) | |
226 |
|
226 | |||
227 | def test_file_annotation(self, backend): |
|
227 | def test_file_annotation(self, backend): | |
228 | response = self.app.get(url( |
|
228 | response = self.app.get(url( | |
229 | controller='files', action='index', |
|
229 | controller='files', action='index', | |
230 | repo_name=backend.repo_name, revision='tip', f_path='vcs/nodes.py', |
|
230 | repo_name=backend.repo_name, revision='tip', f_path='vcs/nodes.py', | |
231 | annotate=True)) |
|
231 | annotate=True)) | |
232 |
|
232 | |||
233 | expected_revisions = { |
|
233 | expected_revisions = { | |
234 | 'hg': 'r356:25213a5fbb04', |
|
234 | 'hg': 'r356:25213a5fbb04', | |
235 | 'git': 'r345:c994f0de03b2', |
|
235 | 'git': 'r345:c994f0de03b2', | |
236 | 'svn': 'r208:209', |
|
236 | 'svn': 'r208:209', | |
237 | } |
|
237 | } | |
238 | response.mustcontain(expected_revisions[backend.alias]) |
|
238 | response.mustcontain(expected_revisions[backend.alias]) | |
239 |
|
239 | |||
240 | def test_file_authors(self, backend): |
|
240 | def test_file_authors(self, backend): | |
241 | response = self.app.get(url( |
|
241 | response = self.app.get(url( | |
242 | controller='files', action='authors', |
|
242 | controller='files', action='authors', | |
243 | repo_name=backend.repo_name, |
|
243 | repo_name=backend.repo_name, | |
244 | revision='tip', |
|
244 | revision='tip', | |
245 | f_path='vcs/nodes.py', |
|
245 | f_path='vcs/nodes.py', | |
246 | annotate=True)) |
|
246 | annotate=True)) | |
247 |
|
247 | |||
248 | expected_authors = { |
|
248 | expected_authors = { | |
249 | 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'), |
|
249 | 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'), | |
250 | 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'), |
|
250 | 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'), | |
251 | 'svn': ('marcin', 'lukasz'), |
|
251 | 'svn': ('marcin', 'lukasz'), | |
252 | } |
|
252 | } | |
253 |
|
253 | |||
254 | for author in expected_authors[backend.alias]: |
|
254 | for author in expected_authors[backend.alias]: | |
255 | response.mustcontain(author) |
|
255 | response.mustcontain(author) | |
256 |
|
256 | |||
257 | def test_tree_search_top_level(self, backend, xhr_header): |
|
257 | def test_tree_search_top_level(self, backend, xhr_header): | |
258 | commit = backend.repo.get_commit(commit_idx=173) |
|
258 | commit = backend.repo.get_commit(commit_idx=173) | |
259 | response = self.app.get( |
|
259 | response = self.app.get( | |
260 | url('files_nodelist_home', repo_name=backend.repo_name, |
|
260 | url('files_nodelist_home', repo_name=backend.repo_name, | |
261 | revision=commit.raw_id, f_path='/'), |
|
261 | revision=commit.raw_id, f_path='/'), | |
262 | extra_environ=xhr_header) |
|
262 | extra_environ=xhr_header) | |
263 | assert 'nodes' in response.json |
|
263 | assert 'nodes' in response.json | |
264 | assert {'name': 'docs', 'type': 'dir'} in response.json['nodes'] |
|
264 | assert {'name': 'docs', 'type': 'dir'} in response.json['nodes'] | |
265 |
|
265 | |||
266 | def test_tree_search_at_path(self, backend, xhr_header): |
|
266 | def test_tree_search_at_path(self, backend, xhr_header): | |
267 | commit = backend.repo.get_commit(commit_idx=173) |
|
267 | commit = backend.repo.get_commit(commit_idx=173) | |
268 | response = self.app.get( |
|
268 | response = self.app.get( | |
269 | url('files_nodelist_home', repo_name=backend.repo_name, |
|
269 | url('files_nodelist_home', repo_name=backend.repo_name, | |
270 | revision=commit.raw_id, f_path='/docs'), |
|
270 | revision=commit.raw_id, f_path='/docs'), | |
271 | extra_environ=xhr_header) |
|
271 | extra_environ=xhr_header) | |
272 | assert 'nodes' in response.json |
|
272 | assert 'nodes' in response.json | |
273 | nodes = response.json['nodes'] |
|
273 | nodes = response.json['nodes'] | |
274 | assert {'name': 'docs/api', 'type': 'dir'} in nodes |
|
274 | assert {'name': 'docs/api', 'type': 'dir'} in nodes | |
275 | assert {'name': 'docs/index.rst', 'type': 'file'} in nodes |
|
275 | assert {'name': 'docs/index.rst', 'type': 'file'} in nodes | |
276 |
|
276 | |||
277 | def test_tree_search_at_path_missing_xhr(self, backend): |
|
277 | def test_tree_search_at_path_missing_xhr(self, backend): | |
278 | self.app.get( |
|
278 | self.app.get( | |
279 | url('files_nodelist_home', repo_name=backend.repo_name, |
|
279 | url('files_nodelist_home', repo_name=backend.repo_name, | |
280 | revision='tip', f_path=''), status=400) |
|
280 | revision='tip', f_path=''), status=400) | |
281 |
|
281 | |||
282 | def test_tree_view_list(self, backend, xhr_header): |
|
282 | def test_tree_view_list(self, backend, xhr_header): | |
283 | commit = backend.repo.get_commit(commit_idx=173) |
|
283 | commit = backend.repo.get_commit(commit_idx=173) | |
284 | response = self.app.get( |
|
284 | response = self.app.get( | |
285 | url('files_nodelist_home', repo_name=backend.repo_name, |
|
285 | url('files_nodelist_home', repo_name=backend.repo_name, | |
286 | f_path='/', revision=commit.raw_id), |
|
286 | f_path='/', revision=commit.raw_id), | |
287 | extra_environ=xhr_header, |
|
287 | extra_environ=xhr_header, | |
288 | ) |
|
288 | ) | |
289 | response.mustcontain("vcs/web/simplevcs/views/repository.py") |
|
289 | response.mustcontain("vcs/web/simplevcs/views/repository.py") | |
290 |
|
290 | |||
291 | def test_tree_view_list_at_path(self, backend, xhr_header): |
|
291 | def test_tree_view_list_at_path(self, backend, xhr_header): | |
292 | commit = backend.repo.get_commit(commit_idx=173) |
|
292 | commit = backend.repo.get_commit(commit_idx=173) | |
293 | response = self.app.get( |
|
293 | response = self.app.get( | |
294 | url('files_nodelist_home', repo_name=backend.repo_name, |
|
294 | url('files_nodelist_home', repo_name=backend.repo_name, | |
295 | f_path='/docs', revision=commit.raw_id), |
|
295 | f_path='/docs', revision=commit.raw_id), | |
296 | extra_environ=xhr_header, |
|
296 | extra_environ=xhr_header, | |
297 | ) |
|
297 | ) | |
298 | response.mustcontain("docs/index.rst") |
|
298 | response.mustcontain("docs/index.rst") | |
299 |
|
299 | |||
300 | def test_tree_view_list_missing_xhr(self, backend): |
|
300 | def test_tree_view_list_missing_xhr(self, backend): | |
301 | self.app.get( |
|
301 | self.app.get( | |
302 | url('files_nodelist_home', repo_name=backend.repo_name, |
|
302 | url('files_nodelist_home', repo_name=backend.repo_name, | |
303 | f_path='/', revision='tip'), status=400) |
|
303 | f_path='/', revision='tip'), status=400) | |
304 |
|
304 | |||
305 |
def test_ |
|
305 | def test_nodetree_full_success(self, backend, xhr_header): | |
306 | commit = backend.repo.get_commit(commit_idx=173) |
|
306 | commit = backend.repo.get_commit(commit_idx=173) | |
307 | response = self.app.get( |
|
307 | response = self.app.get( | |
308 |
url('files_ |
|
308 | url('files_nodetree_full', repo_name=backend.repo_name, | |
309 |
f_path='/', |
|
309 | f_path='/', commit_id=commit.raw_id), | |
310 | extra_environ=xhr_header) |
|
310 | extra_environ=xhr_header) | |
311 |
|
311 | |||
312 | expected_keys = ['author', 'message', 'modified_at', 'modified_ts', |
|
312 | assert_response = AssertResponse(response) | |
313 | 'name', 'revision', 'short_id', 'size'] |
|
|||
314 | for filename in response.json.get('metadata'): |
|
|||
315 | for key in expected_keys: |
|
|||
316 | assert key in filename |
|
|||
317 |
|
313 | |||
318 | def test_tree_metadata_list_if_file(self, backend, xhr_header): |
|
314 | for attr in ['data-commit-id', 'data-date', 'data-author']: | |
|
315 | elements = assert_response.get_elements('[{}]'.format(attr)) | |||
|
316 | assert len(elements) > 1 | |||
|
317 | ||||
|
318 | for element in elements: | |||
|
319 | assert element.get(attr) | |||
|
320 | ||||
|
321 | def test_nodetree_full_if_file(self, backend, xhr_header): | |||
319 | commit = backend.repo.get_commit(commit_idx=173) |
|
322 | commit = backend.repo.get_commit(commit_idx=173) | |
320 | response = self.app.get( |
|
323 | response = self.app.get( | |
321 |
url('files_ |
|
324 | url('files_nodetree_full', repo_name=backend.repo_name, | |
322 |
f_path='README.rst', |
|
325 | f_path='README.rst', commit_id=commit.raw_id), | |
323 | extra_environ=xhr_header) |
|
326 | extra_environ=xhr_header) | |
324 |
assert response. |
|
327 | assert response.body == '' | |
325 |
|
328 | |||
326 | def test_tree_metadata_list_missing_xhr(self, backend): |
|
329 | def test_tree_metadata_list_missing_xhr(self, backend): | |
327 | self.app.get( |
|
330 | self.app.get( | |
328 |
url('files_ |
|
331 | url('files_nodetree_full', repo_name=backend.repo_name, | |
329 |
f_path='/', |
|
332 | f_path='/', commit_id='tip'), status=400) | |
330 |
|
333 | |||
331 | def test_access_empty_repo_redirect_to_summary_with_alert_write_perms( |
|
334 | def test_access_empty_repo_redirect_to_summary_with_alert_write_perms( | |
332 | self, app, backend_stub, autologin_regular_user, user_regular, |
|
335 | self, app, backend_stub, autologin_regular_user, user_regular, | |
333 | user_util): |
|
336 | user_util): | |
334 | repo = backend_stub.create_repo() |
|
337 | repo = backend_stub.create_repo() | |
335 | user_util.grant_user_permission_to_repo( |
|
338 | user_util.grant_user_permission_to_repo( | |
336 | repo, user_regular, 'repository.write') |
|
339 | repo, user_regular, 'repository.write') | |
337 | response = self.app.get(url( |
|
340 | response = self.app.get(url( | |
338 | controller='files', action='index', |
|
341 | controller='files', action='index', | |
339 | repo_name=repo.repo_name, revision='tip', f_path='/')) |
|
342 | repo_name=repo.repo_name, revision='tip', f_path='/')) | |
340 | assert_session_flash( |
|
343 | assert_session_flash( | |
341 | response, |
|
344 | response, | |
342 | 'There are no files yet. <a class="alert-link" ' |
|
345 | 'There are no files yet. <a class="alert-link" ' | |
343 | 'href="/%s/add/0/#edit">Click here to add a new file.</a>' |
|
346 | 'href="/%s/add/0/#edit">Click here to add a new file.</a>' | |
344 | % (repo.repo_name)) |
|
347 | % (repo.repo_name)) | |
345 |
|
348 | |||
346 | def test_access_empty_repo_redirect_to_summary_with_alert_no_write_perms( |
|
349 | def test_access_empty_repo_redirect_to_summary_with_alert_no_write_perms( | |
347 | self, backend_stub, user_util): |
|
350 | self, backend_stub, user_util): | |
348 | repo = backend_stub.create_repo() |
|
351 | repo = backend_stub.create_repo() | |
349 | repo_file_url = url( |
|
352 | repo_file_url = url( | |
350 | 'files_add_home', |
|
353 | 'files_add_home', | |
351 | repo_name=repo.repo_name, |
|
354 | repo_name=repo.repo_name, | |
352 | revision=0, f_path='', anchor='edit') |
|
355 | revision=0, f_path='', anchor='edit') | |
353 | response = self.app.get(url( |
|
356 | response = self.app.get(url( | |
354 | controller='files', action='index', |
|
357 | controller='files', action='index', | |
355 | repo_name=repo.repo_name, revision='tip', f_path='/')) |
|
358 | repo_name=repo.repo_name, revision='tip', f_path='/')) | |
356 | assert_not_in_session_flash(response, repo_file_url) |
|
359 | assert_not_in_session_flash(response, repo_file_url) | |
357 |
|
360 | |||
358 |
|
361 | |||
359 | # TODO: johbo: Think about a better place for these tests. Either controller |
|
362 | # TODO: johbo: Think about a better place for these tests. Either controller | |
360 | # specific unit tests or we move down the whole logic further towards the vcs |
|
363 | # specific unit tests or we move down the whole logic further towards the vcs | |
361 | # layer |
|
364 | # layer | |
362 | class TestAdjustFilePathForSvn: |
|
365 | class TestAdjustFilePathForSvn: | |
363 | """SVN specific adjustments of node history in FileController.""" |
|
366 | """SVN specific adjustments of node history in FileController.""" | |
364 |
|
367 | |||
365 | def test_returns_path_relative_to_matched_reference(self): |
|
368 | def test_returns_path_relative_to_matched_reference(self): | |
366 | repo = self._repo(branches=['trunk']) |
|
369 | repo = self._repo(branches=['trunk']) | |
367 | self.assert_file_adjustment('trunk/file', 'file', repo) |
|
370 | self.assert_file_adjustment('trunk/file', 'file', repo) | |
368 |
|
371 | |||
369 | def test_does_not_modify_file_if_no_reference_matches(self): |
|
372 | def test_does_not_modify_file_if_no_reference_matches(self): | |
370 | repo = self._repo(branches=['trunk']) |
|
373 | repo = self._repo(branches=['trunk']) | |
371 | self.assert_file_adjustment('notes/file', 'notes/file', repo) |
|
374 | self.assert_file_adjustment('notes/file', 'notes/file', repo) | |
372 |
|
375 | |||
373 | def test_does_not_adjust_partial_directory_names(self): |
|
376 | def test_does_not_adjust_partial_directory_names(self): | |
374 | repo = self._repo(branches=['trun']) |
|
377 | repo = self._repo(branches=['trun']) | |
375 | self.assert_file_adjustment('trunk/file', 'trunk/file', repo) |
|
378 | self.assert_file_adjustment('trunk/file', 'trunk/file', repo) | |
376 |
|
379 | |||
377 | def test_is_robust_to_patterns_which_prefix_other_patterns(self): |
|
380 | def test_is_robust_to_patterns_which_prefix_other_patterns(self): | |
378 | repo = self._repo(branches=['trunk', 'trunk/new', 'trunk/old']) |
|
381 | repo = self._repo(branches=['trunk', 'trunk/new', 'trunk/old']) | |
379 | self.assert_file_adjustment('trunk/new/file', 'file', repo) |
|
382 | self.assert_file_adjustment('trunk/new/file', 'file', repo) | |
380 |
|
383 | |||
381 | def assert_file_adjustment(self, f_path, expected, repo): |
|
384 | def assert_file_adjustment(self, f_path, expected, repo): | |
382 | controller = FilesController() |
|
385 | controller = FilesController() | |
383 | result = controller._adjust_file_path_for_svn(f_path, repo) |
|
386 | result = controller._adjust_file_path_for_svn(f_path, repo) | |
384 | assert result == expected |
|
387 | assert result == expected | |
385 |
|
388 | |||
386 | def _repo(self, branches=None): |
|
389 | def _repo(self, branches=None): | |
387 | repo = mock.Mock() |
|
390 | repo = mock.Mock() | |
388 | repo.branches = OrderedDict((name, '0') for name in branches or []) |
|
391 | repo.branches = OrderedDict((name, '0') for name in branches or []) | |
389 | repo.tags = {} |
|
392 | repo.tags = {} | |
390 | return repo |
|
393 | return repo | |
391 |
|
394 | |||
392 |
|
395 | |||
393 | @pytest.mark.usefixtures("app") |
|
396 | @pytest.mark.usefixtures("app") | |
394 | class TestRepositoryArchival: |
|
397 | class TestRepositoryArchival: | |
395 |
|
398 | |||
396 | def test_archival(self, backend): |
|
399 | def test_archival(self, backend): | |
397 | backend.enable_downloads() |
|
400 | backend.enable_downloads() | |
398 | commit = backend.repo.get_commit(commit_idx=173) |
|
401 | commit = backend.repo.get_commit(commit_idx=173) | |
399 | for archive, info in settings.ARCHIVE_SPECS.items(): |
|
402 | for archive, info in settings.ARCHIVE_SPECS.items(): | |
400 | mime_type, arch_ext = info |
|
403 | mime_type, arch_ext = info | |
401 | short = commit.short_id + arch_ext |
|
404 | short = commit.short_id + arch_ext | |
402 | fname = commit.raw_id + arch_ext |
|
405 | fname = commit.raw_id + arch_ext | |
403 | filename = '%s-%s' % (backend.repo_name, short) |
|
406 | filename = '%s-%s' % (backend.repo_name, short) | |
404 | response = self.app.get(url(controller='files', |
|
407 | response = self.app.get(url(controller='files', | |
405 | action='archivefile', |
|
408 | action='archivefile', | |
406 | repo_name=backend.repo_name, |
|
409 | repo_name=backend.repo_name, | |
407 | fname=fname)) |
|
410 | fname=fname)) | |
408 |
|
411 | |||
409 | assert response.status == '200 OK' |
|
412 | assert response.status == '200 OK' | |
410 | headers = { |
|
413 | headers = { | |
411 | 'Pragma': 'no-cache', |
|
414 | 'Pragma': 'no-cache', | |
412 | 'Cache-Control': 'no-cache', |
|
415 | 'Cache-Control': 'no-cache', | |
413 | 'Content-Disposition': 'attachment; filename=%s' % filename, |
|
416 | 'Content-Disposition': 'attachment; filename=%s' % filename, | |
414 | 'Content-Type': '%s; charset=utf-8' % mime_type, |
|
417 | 'Content-Type': '%s; charset=utf-8' % mime_type, | |
415 | } |
|
418 | } | |
416 | if 'Set-Cookie' in response.response.headers: |
|
419 | if 'Set-Cookie' in response.response.headers: | |
417 | del response.response.headers['Set-Cookie'] |
|
420 | del response.response.headers['Set-Cookie'] | |
418 | assert response.response.headers == headers |
|
421 | assert response.response.headers == headers | |
419 |
|
422 | |||
420 | def test_archival_wrong_ext(self, backend): |
|
423 | def test_archival_wrong_ext(self, backend): | |
421 | backend.enable_downloads() |
|
424 | backend.enable_downloads() | |
422 | commit = backend.repo.get_commit(commit_idx=173) |
|
425 | commit = backend.repo.get_commit(commit_idx=173) | |
423 | for arch_ext in ['tar', 'rar', 'x', '..ax', '.zipz']: |
|
426 | for arch_ext in ['tar', 'rar', 'x', '..ax', '.zipz']: | |
424 | fname = commit.raw_id + arch_ext |
|
427 | fname = commit.raw_id + arch_ext | |
425 |
|
428 | |||
426 | response = self.app.get(url(controller='files', |
|
429 | response = self.app.get(url(controller='files', | |
427 | action='archivefile', |
|
430 | action='archivefile', | |
428 | repo_name=backend.repo_name, |
|
431 | repo_name=backend.repo_name, | |
429 | fname=fname)) |
|
432 | fname=fname)) | |
430 | response.mustcontain('Unknown archive type') |
|
433 | response.mustcontain('Unknown archive type') | |
431 |
|
434 | |||
432 | def test_archival_wrong_commit_id(self, backend): |
|
435 | def test_archival_wrong_commit_id(self, backend): | |
433 | backend.enable_downloads() |
|
436 | backend.enable_downloads() | |
434 | for commit_id in ['00x000000', 'tar', 'wrong', '@##$@$42413232', |
|
437 | for commit_id in ['00x000000', 'tar', 'wrong', '@##$@$42413232', | |
435 | '232dffcd']: |
|
438 | '232dffcd']: | |
436 | fname = '%s.zip' % commit_id |
|
439 | fname = '%s.zip' % commit_id | |
437 |
|
440 | |||
438 | response = self.app.get(url(controller='files', |
|
441 | response = self.app.get(url(controller='files', | |
439 | action='archivefile', |
|
442 | action='archivefile', | |
440 | repo_name=backend.repo_name, |
|
443 | repo_name=backend.repo_name, | |
441 | fname=fname)) |
|
444 | fname=fname)) | |
442 | response.mustcontain('Unknown revision') |
|
445 | response.mustcontain('Unknown revision') | |
443 |
|
446 | |||
444 |
|
447 | |||
445 | @pytest.mark.usefixtures("app", "autologin_user") |
|
448 | @pytest.mark.usefixtures("app", "autologin_user") | |
446 | class TestRawFileHandling: |
|
449 | class TestRawFileHandling: | |
447 |
|
450 | |||
448 | def test_raw_file_ok(self, backend): |
|
451 | def test_raw_file_ok(self, backend): | |
449 | commit = backend.repo.get_commit(commit_idx=173) |
|
452 | commit = backend.repo.get_commit(commit_idx=173) | |
450 | response = self.app.get(url(controller='files', action='rawfile', |
|
453 | response = self.app.get(url(controller='files', action='rawfile', | |
451 | repo_name=backend.repo_name, |
|
454 | repo_name=backend.repo_name, | |
452 | revision=commit.raw_id, |
|
455 | revision=commit.raw_id, | |
453 | f_path='vcs/nodes.py')) |
|
456 | f_path='vcs/nodes.py')) | |
454 |
|
457 | |||
455 | assert response.content_disposition == "attachment; filename=nodes.py" |
|
458 | assert response.content_disposition == "attachment; filename=nodes.py" | |
456 | assert response.content_type == "text/x-python" |
|
459 | assert response.content_type == "text/x-python" | |
457 |
|
460 | |||
458 | def test_raw_file_wrong_cs(self, backend): |
|
461 | def test_raw_file_wrong_cs(self, backend): | |
459 | commit_id = u'ERRORce30c96924232dffcd24178a07ffeb5dfc' |
|
462 | commit_id = u'ERRORce30c96924232dffcd24178a07ffeb5dfc' | |
460 | f_path = 'vcs/nodes.py' |
|
463 | f_path = 'vcs/nodes.py' | |
461 |
|
464 | |||
462 | response = self.app.get(url(controller='files', action='rawfile', |
|
465 | response = self.app.get(url(controller='files', action='rawfile', | |
463 | repo_name=backend.repo_name, |
|
466 | repo_name=backend.repo_name, | |
464 | revision=commit_id, |
|
467 | revision=commit_id, | |
465 | f_path=f_path), status=404) |
|
468 | f_path=f_path), status=404) | |
466 |
|
469 | |||
467 | msg = """No such commit exists for this repository""" |
|
470 | msg = """No such commit exists for this repository""" | |
468 | response.mustcontain(msg) |
|
471 | response.mustcontain(msg) | |
469 |
|
472 | |||
470 | def test_raw_file_wrong_f_path(self, backend): |
|
473 | def test_raw_file_wrong_f_path(self, backend): | |
471 | commit = backend.repo.get_commit(commit_idx=173) |
|
474 | commit = backend.repo.get_commit(commit_idx=173) | |
472 | f_path = 'vcs/ERRORnodes.py' |
|
475 | f_path = 'vcs/ERRORnodes.py' | |
473 | response = self.app.get(url(controller='files', action='rawfile', |
|
476 | response = self.app.get(url(controller='files', action='rawfile', | |
474 | repo_name=backend.repo_name, |
|
477 | repo_name=backend.repo_name, | |
475 | revision=commit.raw_id, |
|
478 | revision=commit.raw_id, | |
476 | f_path=f_path), status=404) |
|
479 | f_path=f_path), status=404) | |
477 |
|
480 | |||
478 | msg = ( |
|
481 | msg = ( | |
479 | "There is no file nor directory at the given path: " |
|
482 | "There is no file nor directory at the given path: " | |
480 | "'%s' at commit %s" % (f_path, commit.short_id)) |
|
483 | "'%s' at commit %s" % (f_path, commit.short_id)) | |
481 | response.mustcontain(msg) |
|
484 | response.mustcontain(msg) | |
482 |
|
485 | |||
483 | def test_raw_ok(self, backend): |
|
486 | def test_raw_ok(self, backend): | |
484 | commit = backend.repo.get_commit(commit_idx=173) |
|
487 | commit = backend.repo.get_commit(commit_idx=173) | |
485 | response = self.app.get(url(controller='files', action='raw', |
|
488 | response = self.app.get(url(controller='files', action='raw', | |
486 | repo_name=backend.repo_name, |
|
489 | repo_name=backend.repo_name, | |
487 | revision=commit.raw_id, |
|
490 | revision=commit.raw_id, | |
488 | f_path='vcs/nodes.py')) |
|
491 | f_path='vcs/nodes.py')) | |
489 |
|
492 | |||
490 | assert response.content_type == "text/plain" |
|
493 | assert response.content_type == "text/plain" | |
491 |
|
494 | |||
492 | def test_raw_wrong_cs(self, backend): |
|
495 | def test_raw_wrong_cs(self, backend): | |
493 | commit_id = u'ERRORcce30c96924232dffcd24178a07ffeb5dfc' |
|
496 | commit_id = u'ERRORcce30c96924232dffcd24178a07ffeb5dfc' | |
494 | f_path = 'vcs/nodes.py' |
|
497 | f_path = 'vcs/nodes.py' | |
495 |
|
498 | |||
496 | response = self.app.get(url(controller='files', action='raw', |
|
499 | response = self.app.get(url(controller='files', action='raw', | |
497 | repo_name=backend.repo_name, |
|
500 | repo_name=backend.repo_name, | |
498 | revision=commit_id, |
|
501 | revision=commit_id, | |
499 | f_path=f_path), status=404) |
|
502 | f_path=f_path), status=404) | |
500 |
|
503 | |||
501 | msg = """No such commit exists for this repository""" |
|
504 | msg = """No such commit exists for this repository""" | |
502 | response.mustcontain(msg) |
|
505 | response.mustcontain(msg) | |
503 |
|
506 | |||
504 | def test_raw_wrong_f_path(self, backend): |
|
507 | def test_raw_wrong_f_path(self, backend): | |
505 | commit = backend.repo.get_commit(commit_idx=173) |
|
508 | commit = backend.repo.get_commit(commit_idx=173) | |
506 | f_path = 'vcs/ERRORnodes.py' |
|
509 | f_path = 'vcs/ERRORnodes.py' | |
507 | response = self.app.get(url(controller='files', action='raw', |
|
510 | response = self.app.get(url(controller='files', action='raw', | |
508 | repo_name=backend.repo_name, |
|
511 | repo_name=backend.repo_name, | |
509 | revision=commit.raw_id, |
|
512 | revision=commit.raw_id, | |
510 | f_path=f_path), status=404) |
|
513 | f_path=f_path), status=404) | |
511 | msg = ( |
|
514 | msg = ( | |
512 | "There is no file nor directory at the given path: " |
|
515 | "There is no file nor directory at the given path: " | |
513 | "'%s' at commit %s" % (f_path, commit.short_id)) |
|
516 | "'%s' at commit %s" % (f_path, commit.short_id)) | |
514 | response.mustcontain(msg) |
|
517 | response.mustcontain(msg) | |
515 |
|
518 | |||
516 | def test_raw_svg_should_not_be_rendered(self, backend): |
|
519 | def test_raw_svg_should_not_be_rendered(self, backend): | |
517 | backend.create_repo() |
|
520 | backend.create_repo() | |
518 | backend.ensure_file("xss.svg") |
|
521 | backend.ensure_file("xss.svg") | |
519 | response = self.app.get(url(controller='files', action='raw', |
|
522 | response = self.app.get(url(controller='files', action='raw', | |
520 | repo_name=backend.repo_name, |
|
523 | repo_name=backend.repo_name, | |
521 | revision='tip', |
|
524 | revision='tip', | |
522 | f_path='xss.svg')) |
|
525 | f_path='xss.svg')) | |
523 |
|
526 | |||
524 | # If the content type is image/svg+xml then it allows to render HTML |
|
527 | # If the content type is image/svg+xml then it allows to render HTML | |
525 | # and malicious SVG. |
|
528 | # and malicious SVG. | |
526 | assert response.content_type == "text/plain" |
|
529 | assert response.content_type == "text/plain" | |
527 |
|
530 | |||
528 |
|
531 | |||
529 | @pytest.mark.usefixtures("app") |
|
532 | @pytest.mark.usefixtures("app") | |
530 | class TestFilesDiff: |
|
533 | class TestFilesDiff: | |
531 |
|
534 | |||
532 | @pytest.mark.parametrize("diff", ['diff', 'download', 'raw']) |
|
535 | @pytest.mark.parametrize("diff", ['diff', 'download', 'raw']) | |
533 | def test_file_full_diff(self, backend, diff): |
|
536 | def test_file_full_diff(self, backend, diff): | |
534 | commit1 = backend.repo.get_commit(commit_idx=-1) |
|
537 | commit1 = backend.repo.get_commit(commit_idx=-1) | |
535 | commit2 = backend.repo.get_commit(commit_idx=-2) |
|
538 | commit2 = backend.repo.get_commit(commit_idx=-2) | |
536 | response = self.app.get( |
|
539 | response = self.app.get( | |
537 | url( |
|
540 | url( | |
538 | controller='files', |
|
541 | controller='files', | |
539 | action='diff', |
|
542 | action='diff', | |
540 | repo_name=backend.repo_name, |
|
543 | repo_name=backend.repo_name, | |
541 | f_path='README'), |
|
544 | f_path='README'), | |
542 | params={ |
|
545 | params={ | |
543 | 'diff1': commit1.raw_id, |
|
546 | 'diff1': commit1.raw_id, | |
544 | 'diff2': commit2.raw_id, |
|
547 | 'diff2': commit2.raw_id, | |
545 | 'fulldiff': '1', |
|
548 | 'fulldiff': '1', | |
546 | 'diff': diff, |
|
549 | 'diff': diff, | |
547 | }) |
|
550 | }) | |
548 | response.mustcontain('README.rst') |
|
551 | response.mustcontain('README.rst') | |
549 | response.mustcontain('No newline at end of file') |
|
552 | response.mustcontain('No newline at end of file') | |
550 |
|
553 | |||
551 | def test_file_binary_diff(self, backend): |
|
554 | def test_file_binary_diff(self, backend): | |
552 | commits = [ |
|
555 | commits = [ | |
553 | {'message': 'First commit'}, |
|
556 | {'message': 'First commit'}, | |
554 | {'message': 'Commit with binary', |
|
557 | {'message': 'Commit with binary', | |
555 | 'added': [nodes.FileNode('file.bin', content='\0BINARY\0')]}, |
|
558 | 'added': [nodes.FileNode('file.bin', content='\0BINARY\0')]}, | |
556 | ] |
|
559 | ] | |
557 | repo = backend.create_repo(commits=commits) |
|
560 | repo = backend.create_repo(commits=commits) | |
558 |
|
561 | |||
559 | response = self.app.get( |
|
562 | response = self.app.get( | |
560 | url( |
|
563 | url( | |
561 | controller='files', |
|
564 | controller='files', | |
562 | action='diff', |
|
565 | action='diff', | |
563 | repo_name=backend.repo_name, |
|
566 | repo_name=backend.repo_name, | |
564 | f_path='file.bin'), |
|
567 | f_path='file.bin'), | |
565 | params={ |
|
568 | params={ | |
566 | 'diff1': repo.get_commit(commit_idx=0).raw_id, |
|
569 | 'diff1': repo.get_commit(commit_idx=0).raw_id, | |
567 | 'diff2': repo.get_commit(commit_idx=1).raw_id, |
|
570 | 'diff2': repo.get_commit(commit_idx=1).raw_id, | |
568 | 'fulldiff': '1', |
|
571 | 'fulldiff': '1', | |
569 | 'diff': 'diff', |
|
572 | 'diff': 'diff', | |
570 | }) |
|
573 | }) | |
571 | response.mustcontain('Cannot diff binary files') |
|
574 | response.mustcontain('Cannot diff binary files') | |
572 |
|
575 | |||
573 | def test_diff_2way(self, backend): |
|
576 | def test_diff_2way(self, backend): | |
574 | commit1 = backend.repo.get_commit(commit_idx=-1) |
|
577 | commit1 = backend.repo.get_commit(commit_idx=-1) | |
575 | commit2 = backend.repo.get_commit(commit_idx=-2) |
|
578 | commit2 = backend.repo.get_commit(commit_idx=-2) | |
576 | response = self.app.get( |
|
579 | response = self.app.get( | |
577 | url( |
|
580 | url( | |
578 | controller='files', |
|
581 | controller='files', | |
579 | action='diff_2way', |
|
582 | action='diff_2way', | |
580 | repo_name=backend.repo_name, |
|
583 | repo_name=backend.repo_name, | |
581 | f_path='README'), |
|
584 | f_path='README'), | |
582 | params={ |
|
585 | params={ | |
583 | 'diff1': commit1.raw_id, |
|
586 | 'diff1': commit1.raw_id, | |
584 | 'diff2': commit2.raw_id, |
|
587 | 'diff2': commit2.raw_id, | |
585 | }) |
|
588 | }) | |
586 |
|
589 | |||
587 | # Expecting links to both variants of the file. Links are used |
|
590 | # Expecting links to both variants of the file. Links are used | |
588 | # to load the content dynamically. |
|
591 | # to load the content dynamically. | |
589 | response.mustcontain('/%s/README' % commit1.raw_id) |
|
592 | response.mustcontain('/%s/README' % commit1.raw_id) | |
590 | response.mustcontain('/%s/README' % commit2.raw_id) |
|
593 | response.mustcontain('/%s/README' % commit2.raw_id) | |
591 |
|
594 | |||
592 | def test_requires_one_commit_id(self, backend, autologin_user): |
|
595 | def test_requires_one_commit_id(self, backend, autologin_user): | |
593 | response = self.app.get( |
|
596 | response = self.app.get( | |
594 | url( |
|
597 | url( | |
595 | controller='files', |
|
598 | controller='files', | |
596 | action='diff', |
|
599 | action='diff', | |
597 | repo_name=backend.repo_name, |
|
600 | repo_name=backend.repo_name, | |
598 | f_path='README.rst'), |
|
601 | f_path='README.rst'), | |
599 | status=400) |
|
602 | status=400) | |
600 | response.mustcontain( |
|
603 | response.mustcontain( | |
601 | 'Need query parameter', 'diff1', 'diff2', 'to generate a diff.') |
|
604 | 'Need query parameter', 'diff1', 'diff2', 'to generate a diff.') | |
602 |
|
605 | |||
603 | def test_returns_not_found_if_file_does_not_exist(self, vcsbackend): |
|
606 | def test_returns_not_found_if_file_does_not_exist(self, vcsbackend): | |
604 | repo = vcsbackend.repo |
|
607 | repo = vcsbackend.repo | |
605 | self.app.get( |
|
608 | self.app.get( | |
606 | url( |
|
609 | url( | |
607 | controller='files', |
|
610 | controller='files', | |
608 | action='diff', |
|
611 | action='diff', | |
609 | repo_name=repo.name, |
|
612 | repo_name=repo.name, | |
610 | f_path='does-not-exist-in-any-commit', |
|
613 | f_path='does-not-exist-in-any-commit', | |
611 | diff1=repo[0].raw_id, |
|
614 | diff1=repo[0].raw_id, | |
612 | diff2=repo[1].raw_id), |
|
615 | diff2=repo[1].raw_id), | |
613 | status=404) |
|
616 | status=404) | |
614 |
|
617 | |||
615 | def test_returns_redirect_if_file_not_changed(self, backend): |
|
618 | def test_returns_redirect_if_file_not_changed(self, backend): | |
616 | commit = backend.repo.get_commit(commit_idx=-1) |
|
619 | commit = backend.repo.get_commit(commit_idx=-1) | |
617 | f_path= 'README' |
|
620 | f_path= 'README' | |
618 | response = self.app.get( |
|
621 | response = self.app.get( | |
619 | url( |
|
622 | url( | |
620 | controller='files', |
|
623 | controller='files', | |
621 | action='diff_2way', |
|
624 | action='diff_2way', | |
622 | repo_name=backend.repo_name, |
|
625 | repo_name=backend.repo_name, | |
623 | f_path=f_path, |
|
626 | f_path=f_path, | |
624 | diff1=commit.raw_id, |
|
627 | diff1=commit.raw_id, | |
625 | diff2=commit.raw_id, |
|
628 | diff2=commit.raw_id, | |
626 | ), |
|
629 | ), | |
627 | status=302 |
|
630 | status=302 | |
628 | ) |
|
631 | ) | |
629 | assert response.headers['Location'].endswith(f_path) |
|
632 | assert response.headers['Location'].endswith(f_path) | |
630 | redirected = response.follow() |
|
633 | redirected = response.follow() | |
631 | redirected.mustcontain('has not changed between') |
|
634 | redirected.mustcontain('has not changed between') | |
632 |
|
635 | |||
633 | def test_supports_diff_to_different_path_svn(self, backend_svn): |
|
636 | def test_supports_diff_to_different_path_svn(self, backend_svn): | |
634 | repo = backend_svn['svn-simple-layout'].scm_instance() |
|
637 | repo = backend_svn['svn-simple-layout'].scm_instance() | |
635 | commit_id = repo[-1].raw_id |
|
638 | commit_id = repo[-1].raw_id | |
636 | response = self.app.get( |
|
639 | response = self.app.get( | |
637 | url( |
|
640 | url( | |
638 | controller='files', |
|
641 | controller='files', | |
639 | action='diff', |
|
642 | action='diff', | |
640 | repo_name=repo.name, |
|
643 | repo_name=repo.name, | |
641 | f_path='trunk/example.py', |
|
644 | f_path='trunk/example.py', | |
642 | diff1='tags/v0.2/example.py@' + commit_id, |
|
645 | diff1='tags/v0.2/example.py@' + commit_id, | |
643 | diff2=commit_id), |
|
646 | diff2=commit_id), | |
644 | status=200) |
|
647 | status=200) | |
645 | response.mustcontain( |
|
648 | response.mustcontain( | |
646 | "Will print out a useful message on invocation.") |
|
649 | "Will print out a useful message on invocation.") | |
647 |
|
650 | |||
648 | # Note: Expecting that we indicate the user what's being compared |
|
651 | # Note: Expecting that we indicate the user what's being compared | |
649 | response.mustcontain("trunk/example.py") |
|
652 | response.mustcontain("trunk/example.py") | |
650 | response.mustcontain("tags/v0.2/example.py") |
|
653 | response.mustcontain("tags/v0.2/example.py") | |
651 |
|
654 | |||
652 | def test_show_rev_redirects_to_svn_path(self, backend_svn): |
|
655 | def test_show_rev_redirects_to_svn_path(self, backend_svn): | |
653 | repo = backend_svn['svn-simple-layout'].scm_instance() |
|
656 | repo = backend_svn['svn-simple-layout'].scm_instance() | |
654 | commit_id = repo[-1].raw_id |
|
657 | commit_id = repo[-1].raw_id | |
655 | response = self.app.get( |
|
658 | response = self.app.get( | |
656 | url( |
|
659 | url( | |
657 | controller='files', |
|
660 | controller='files', | |
658 | action='diff', |
|
661 | action='diff', | |
659 | repo_name=repo.name, |
|
662 | repo_name=repo.name, | |
660 | f_path='trunk/example.py', |
|
663 | f_path='trunk/example.py', | |
661 | diff1='branches/argparse/example.py@' + commit_id, |
|
664 | diff1='branches/argparse/example.py@' + commit_id, | |
662 | diff2=commit_id), |
|
665 | diff2=commit_id), | |
663 | params={'show_rev': 'Show at Revision'}, |
|
666 | params={'show_rev': 'Show at Revision'}, | |
664 | status=302) |
|
667 | status=302) | |
665 | assert response.headers['Location'].endswith( |
|
668 | assert response.headers['Location'].endswith( | |
666 | 'svn-svn-simple-layout/files/26/branches/argparse/example.py') |
|
669 | 'svn-svn-simple-layout/files/26/branches/argparse/example.py') | |
667 |
|
670 | |||
668 | def test_show_rev_and_annotate_redirects_to_svn_path(self, backend_svn): |
|
671 | def test_show_rev_and_annotate_redirects_to_svn_path(self, backend_svn): | |
669 | repo = backend_svn['svn-simple-layout'].scm_instance() |
|
672 | repo = backend_svn['svn-simple-layout'].scm_instance() | |
670 | commit_id = repo[-1].raw_id |
|
673 | commit_id = repo[-1].raw_id | |
671 | response = self.app.get( |
|
674 | response = self.app.get( | |
672 | url( |
|
675 | url( | |
673 | controller='files', |
|
676 | controller='files', | |
674 | action='diff', |
|
677 | action='diff', | |
675 | repo_name=repo.name, |
|
678 | repo_name=repo.name, | |
676 | f_path='trunk/example.py', |
|
679 | f_path='trunk/example.py', | |
677 | diff1='branches/argparse/example.py@' + commit_id, |
|
680 | diff1='branches/argparse/example.py@' + commit_id, | |
678 | diff2=commit_id), |
|
681 | diff2=commit_id), | |
679 | params={ |
|
682 | params={ | |
680 | 'show_rev': 'Show at Revision', |
|
683 | 'show_rev': 'Show at Revision', | |
681 | 'annotate': 'true', |
|
684 | 'annotate': 'true', | |
682 | }, |
|
685 | }, | |
683 | status=302) |
|
686 | status=302) | |
684 | assert response.headers['Location'].endswith( |
|
687 | assert response.headers['Location'].endswith( | |
685 | 'svn-svn-simple-layout/annotate/26/branches/argparse/example.py') |
|
688 | 'svn-svn-simple-layout/annotate/26/branches/argparse/example.py') | |
686 |
|
689 | |||
687 |
|
690 | |||
688 | @pytest.mark.usefixtures("app", "autologin_user") |
|
691 | @pytest.mark.usefixtures("app", "autologin_user") | |
689 | class TestChangingFiles: |
|
692 | class TestChangingFiles: | |
690 |
|
693 | |||
691 | def test_add_file_view(self, backend): |
|
694 | def test_add_file_view(self, backend): | |
692 | self.app.get(url( |
|
695 | self.app.get(url( | |
693 | 'files_add_home', |
|
696 | 'files_add_home', | |
694 | repo_name=backend.repo_name, |
|
697 | repo_name=backend.repo_name, | |
695 | revision='tip', f_path='/')) |
|
698 | revision='tip', f_path='/')) | |
696 |
|
699 | |||
697 | @pytest.mark.xfail_backends("svn", reason="Depends on online editing") |
|
700 | @pytest.mark.xfail_backends("svn", reason="Depends on online editing") | |
698 | def test_add_file_into_repo_missing_content(self, backend, csrf_token): |
|
701 | def test_add_file_into_repo_missing_content(self, backend, csrf_token): | |
699 | repo = backend.create_repo() |
|
702 | repo = backend.create_repo() | |
700 | filename = 'init.py' |
|
703 | filename = 'init.py' | |
701 | response = self.app.post( |
|
704 | response = self.app.post( | |
702 | url( |
|
705 | url( | |
703 | 'files_add', |
|
706 | 'files_add', | |
704 | repo_name=repo.repo_name, |
|
707 | repo_name=repo.repo_name, | |
705 | revision='tip', f_path='/'), |
|
708 | revision='tip', f_path='/'), | |
706 | params={ |
|
709 | params={ | |
707 | 'content': "", |
|
710 | 'content': "", | |
708 | 'filename': filename, |
|
711 | 'filename': filename, | |
709 | 'location': "", |
|
712 | 'location': "", | |
710 | 'csrf_token': csrf_token, |
|
713 | 'csrf_token': csrf_token, | |
711 | }, |
|
714 | }, | |
712 | status=302) |
|
715 | status=302) | |
713 | assert_session_flash( |
|
716 | assert_session_flash( | |
714 | response, 'Successfully committed to %s' |
|
717 | response, 'Successfully committed to %s' | |
715 | % os.path.join(filename)) |
|
718 | % os.path.join(filename)) | |
716 |
|
719 | |||
717 | def test_add_file_into_repo_missing_filename(self, backend, csrf_token): |
|
720 | def test_add_file_into_repo_missing_filename(self, backend, csrf_token): | |
718 | response = self.app.post( |
|
721 | response = self.app.post( | |
719 | url( |
|
722 | url( | |
720 | 'files_add', |
|
723 | 'files_add', | |
721 | repo_name=backend.repo_name, |
|
724 | repo_name=backend.repo_name, | |
722 | revision='tip', f_path='/'), |
|
725 | revision='tip', f_path='/'), | |
723 | params={ |
|
726 | params={ | |
724 | 'content': "foo", |
|
727 | 'content': "foo", | |
725 | 'csrf_token': csrf_token, |
|
728 | 'csrf_token': csrf_token, | |
726 | }, |
|
729 | }, | |
727 | status=302) |
|
730 | status=302) | |
728 |
|
731 | |||
729 | assert_session_flash(response, 'No filename') |
|
732 | assert_session_flash(response, 'No filename') | |
730 |
|
733 | |||
731 | def test_add_file_into_repo_errors_and_no_commits( |
|
734 | def test_add_file_into_repo_errors_and_no_commits( | |
732 | self, backend, csrf_token): |
|
735 | self, backend, csrf_token): | |
733 | repo = backend.create_repo() |
|
736 | repo = backend.create_repo() | |
734 | # Create a file with no filename, it will display an error but |
|
737 | # Create a file with no filename, it will display an error but | |
735 | # the repo has no commits yet |
|
738 | # the repo has no commits yet | |
736 | response = self.app.post( |
|
739 | response = self.app.post( | |
737 | url( |
|
740 | url( | |
738 | 'files_add', |
|
741 | 'files_add', | |
739 | repo_name=repo.repo_name, |
|
742 | repo_name=repo.repo_name, | |
740 | revision='tip', f_path='/'), |
|
743 | revision='tip', f_path='/'), | |
741 | params={ |
|
744 | params={ | |
742 | 'content': "foo", |
|
745 | 'content': "foo", | |
743 | 'csrf_token': csrf_token, |
|
746 | 'csrf_token': csrf_token, | |
744 | }, |
|
747 | }, | |
745 | status=302) |
|
748 | status=302) | |
746 |
|
749 | |||
747 | assert_session_flash(response, 'No filename') |
|
750 | assert_session_flash(response, 'No filename') | |
748 |
|
751 | |||
749 | # Not allowed, redirect to the summary |
|
752 | # Not allowed, redirect to the summary | |
750 | redirected = response.follow() |
|
753 | redirected = response.follow() | |
751 | summary_url = url('summary_home', repo_name=repo.repo_name) |
|
754 | summary_url = url('summary_home', repo_name=repo.repo_name) | |
752 |
|
755 | |||
753 | # As there are no commits, displays the summary page with the error of |
|
756 | # As there are no commits, displays the summary page with the error of | |
754 | # creating a file with no filename |
|
757 | # creating a file with no filename | |
755 | assert redirected.req.path == summary_url |
|
758 | assert redirected.req.path == summary_url | |
756 |
|
759 | |||
757 | @pytest.mark.parametrize("location, filename", [ |
|
760 | @pytest.mark.parametrize("location, filename", [ | |
758 | ('/abs', 'foo'), |
|
761 | ('/abs', 'foo'), | |
759 | ('../rel', 'foo'), |
|
762 | ('../rel', 'foo'), | |
760 | ('file/../foo', 'foo'), |
|
763 | ('file/../foo', 'foo'), | |
761 | ]) |
|
764 | ]) | |
762 | def test_add_file_into_repo_bad_filenames( |
|
765 | def test_add_file_into_repo_bad_filenames( | |
763 | self, location, filename, backend, csrf_token): |
|
766 | self, location, filename, backend, csrf_token): | |
764 | response = self.app.post( |
|
767 | response = self.app.post( | |
765 | url( |
|
768 | url( | |
766 | 'files_add', |
|
769 | 'files_add', | |
767 | repo_name=backend.repo_name, |
|
770 | repo_name=backend.repo_name, | |
768 | revision='tip', f_path='/'), |
|
771 | revision='tip', f_path='/'), | |
769 | params={ |
|
772 | params={ | |
770 | 'content': "foo", |
|
773 | 'content': "foo", | |
771 | 'filename': filename, |
|
774 | 'filename': filename, | |
772 | 'location': location, |
|
775 | 'location': location, | |
773 | 'csrf_token': csrf_token, |
|
776 | 'csrf_token': csrf_token, | |
774 | }, |
|
777 | }, | |
775 | status=302) |
|
778 | status=302) | |
776 |
|
779 | |||
777 | assert_session_flash( |
|
780 | assert_session_flash( | |
778 | response, |
|
781 | response, | |
779 | 'The location specified must be a relative path and must not ' |
|
782 | 'The location specified must be a relative path and must not ' | |
780 | 'contain .. in the path') |
|
783 | 'contain .. in the path') | |
781 |
|
784 | |||
782 | @pytest.mark.parametrize("cnt, location, filename", [ |
|
785 | @pytest.mark.parametrize("cnt, location, filename", [ | |
783 | (1, '', 'foo.txt'), |
|
786 | (1, '', 'foo.txt'), | |
784 | (2, 'dir', 'foo.rst'), |
|
787 | (2, 'dir', 'foo.rst'), | |
785 | (3, 'rel/dir', 'foo.bar'), |
|
788 | (3, 'rel/dir', 'foo.bar'), | |
786 | ]) |
|
789 | ]) | |
787 | def test_add_file_into_repo(self, cnt, location, filename, backend, |
|
790 | def test_add_file_into_repo(self, cnt, location, filename, backend, | |
788 | csrf_token): |
|
791 | csrf_token): | |
789 | repo = backend.create_repo() |
|
792 | repo = backend.create_repo() | |
790 | response = self.app.post( |
|
793 | response = self.app.post( | |
791 | url( |
|
794 | url( | |
792 | 'files_add', |
|
795 | 'files_add', | |
793 | repo_name=repo.repo_name, |
|
796 | repo_name=repo.repo_name, | |
794 | revision='tip', f_path='/'), |
|
797 | revision='tip', f_path='/'), | |
795 | params={ |
|
798 | params={ | |
796 | 'content': "foo", |
|
799 | 'content': "foo", | |
797 | 'filename': filename, |
|
800 | 'filename': filename, | |
798 | 'location': location, |
|
801 | 'location': location, | |
799 | 'csrf_token': csrf_token, |
|
802 | 'csrf_token': csrf_token, | |
800 | }, |
|
803 | }, | |
801 | status=302) |
|
804 | status=302) | |
802 | assert_session_flash( |
|
805 | assert_session_flash( | |
803 | response, 'Successfully committed to %s' |
|
806 | response, 'Successfully committed to %s' | |
804 | % os.path.join(location, filename)) |
|
807 | % os.path.join(location, filename)) | |
805 |
|
808 | |||
806 | def test_edit_file_view(self, backend): |
|
809 | def test_edit_file_view(self, backend): | |
807 | response = self.app.get( |
|
810 | response = self.app.get( | |
808 | url( |
|
811 | url( | |
809 | 'files_edit_home', |
|
812 | 'files_edit_home', | |
810 | repo_name=backend.repo_name, |
|
813 | repo_name=backend.repo_name, | |
811 | revision=backend.default_head_id, |
|
814 | revision=backend.default_head_id, | |
812 | f_path='vcs/nodes.py'), |
|
815 | f_path='vcs/nodes.py'), | |
813 | status=200) |
|
816 | status=200) | |
814 | response.mustcontain("Module holding everything related to vcs nodes.") |
|
817 | response.mustcontain("Module holding everything related to vcs nodes.") | |
815 |
|
818 | |||
816 | def test_edit_file_view_not_on_branch(self, backend): |
|
819 | def test_edit_file_view_not_on_branch(self, backend): | |
817 | repo = backend.create_repo() |
|
820 | repo = backend.create_repo() | |
818 | backend.ensure_file("vcs/nodes.py") |
|
821 | backend.ensure_file("vcs/nodes.py") | |
819 |
|
822 | |||
820 | response = self.app.get( |
|
823 | response = self.app.get( | |
821 | url( |
|
824 | url( | |
822 | 'files_edit_home', |
|
825 | 'files_edit_home', | |
823 | repo_name=repo.repo_name, |
|
826 | repo_name=repo.repo_name, | |
824 | revision='tip', f_path='vcs/nodes.py'), |
|
827 | revision='tip', f_path='vcs/nodes.py'), | |
825 | status=302) |
|
828 | status=302) | |
826 | assert_session_flash( |
|
829 | assert_session_flash( | |
827 | response, |
|
830 | response, | |
828 | 'You can only edit files with revision being a valid branch') |
|
831 | 'You can only edit files with revision being a valid branch') | |
829 |
|
832 | |||
830 | def test_edit_file_view_commit_changes(self, backend, csrf_token): |
|
833 | def test_edit_file_view_commit_changes(self, backend, csrf_token): | |
831 | repo = backend.create_repo() |
|
834 | repo = backend.create_repo() | |
832 | backend.ensure_file("vcs/nodes.py", content="print 'hello'") |
|
835 | backend.ensure_file("vcs/nodes.py", content="print 'hello'") | |
833 |
|
836 | |||
834 | response = self.app.post( |
|
837 | response = self.app.post( | |
835 | url( |
|
838 | url( | |
836 | 'files_edit', |
|
839 | 'files_edit', | |
837 | repo_name=repo.repo_name, |
|
840 | repo_name=repo.repo_name, | |
838 | revision=backend.default_head_id, |
|
841 | revision=backend.default_head_id, | |
839 | f_path='vcs/nodes.py'), |
|
842 | f_path='vcs/nodes.py'), | |
840 | params={ |
|
843 | params={ | |
841 | 'content': "print 'hello world'", |
|
844 | 'content': "print 'hello world'", | |
842 | 'message': 'I committed', |
|
845 | 'message': 'I committed', | |
843 | 'filename': "vcs/nodes.py", |
|
846 | 'filename': "vcs/nodes.py", | |
844 | 'csrf_token': csrf_token, |
|
847 | 'csrf_token': csrf_token, | |
845 | }, |
|
848 | }, | |
846 | status=302) |
|
849 | status=302) | |
847 | assert_session_flash( |
|
850 | assert_session_flash( | |
848 | response, 'Successfully committed to vcs/nodes.py') |
|
851 | response, 'Successfully committed to vcs/nodes.py') | |
849 | tip = repo.get_commit(commit_idx=-1) |
|
852 | tip = repo.get_commit(commit_idx=-1) | |
850 | assert tip.message == 'I committed' |
|
853 | assert tip.message == 'I committed' | |
851 |
|
854 | |||
852 | def test_edit_file_view_commit_changes_default_message(self, backend, |
|
855 | def test_edit_file_view_commit_changes_default_message(self, backend, | |
853 | csrf_token): |
|
856 | csrf_token): | |
854 | repo = backend.create_repo() |
|
857 | repo = backend.create_repo() | |
855 | backend.ensure_file("vcs/nodes.py", content="print 'hello'") |
|
858 | backend.ensure_file("vcs/nodes.py", content="print 'hello'") | |
856 |
|
859 | |||
857 | commit_id = ( |
|
860 | commit_id = ( | |
858 | backend.default_branch_name or |
|
861 | backend.default_branch_name or | |
859 | backend.repo.scm_instance().commit_ids[-1]) |
|
862 | backend.repo.scm_instance().commit_ids[-1]) | |
860 |
|
863 | |||
861 | response = self.app.post( |
|
864 | response = self.app.post( | |
862 | url( |
|
865 | url( | |
863 | 'files_edit', |
|
866 | 'files_edit', | |
864 | repo_name=repo.repo_name, |
|
867 | repo_name=repo.repo_name, | |
865 | revision=commit_id, |
|
868 | revision=commit_id, | |
866 | f_path='vcs/nodes.py'), |
|
869 | f_path='vcs/nodes.py'), | |
867 | params={ |
|
870 | params={ | |
868 | 'content': "print 'hello world'", |
|
871 | 'content': "print 'hello world'", | |
869 | 'message': '', |
|
872 | 'message': '', | |
870 | 'filename': "vcs/nodes.py", |
|
873 | 'filename': "vcs/nodes.py", | |
871 | 'csrf_token': csrf_token, |
|
874 | 'csrf_token': csrf_token, | |
872 | }, |
|
875 | }, | |
873 | status=302) |
|
876 | status=302) | |
874 | assert_session_flash( |
|
877 | assert_session_flash( | |
875 | response, 'Successfully committed to vcs/nodes.py') |
|
878 | response, 'Successfully committed to vcs/nodes.py') | |
876 | tip = repo.get_commit(commit_idx=-1) |
|
879 | tip = repo.get_commit(commit_idx=-1) | |
877 | assert tip.message == 'Edited file vcs/nodes.py via RhodeCode Enterprise' |
|
880 | assert tip.message == 'Edited file vcs/nodes.py via RhodeCode Enterprise' | |
878 |
|
881 | |||
879 | def test_delete_file_view(self, backend): |
|
882 | def test_delete_file_view(self, backend): | |
880 | self.app.get(url( |
|
883 | self.app.get(url( | |
881 | 'files_delete_home', |
|
884 | 'files_delete_home', | |
882 | repo_name=backend.repo_name, |
|
885 | repo_name=backend.repo_name, | |
883 | revision='tip', f_path='vcs/nodes.py')) |
|
886 | revision='tip', f_path='vcs/nodes.py')) | |
884 |
|
887 | |||
885 | def test_delete_file_view_not_on_branch(self, backend): |
|
888 | def test_delete_file_view_not_on_branch(self, backend): | |
886 | repo = backend.create_repo() |
|
889 | repo = backend.create_repo() | |
887 | backend.ensure_file('vcs/nodes.py') |
|
890 | backend.ensure_file('vcs/nodes.py') | |
888 |
|
891 | |||
889 | response = self.app.get( |
|
892 | response = self.app.get( | |
890 | url( |
|
893 | url( | |
891 | 'files_delete_home', |
|
894 | 'files_delete_home', | |
892 | repo_name=repo.repo_name, |
|
895 | repo_name=repo.repo_name, | |
893 | revision='tip', f_path='vcs/nodes.py'), |
|
896 | revision='tip', f_path='vcs/nodes.py'), | |
894 | status=302) |
|
897 | status=302) | |
895 | assert_session_flash( |
|
898 | assert_session_flash( | |
896 | response, |
|
899 | response, | |
897 | 'You can only delete files with revision being a valid branch') |
|
900 | 'You can only delete files with revision being a valid branch') | |
898 |
|
901 | |||
899 | def test_delete_file_view_commit_changes(self, backend, csrf_token): |
|
902 | def test_delete_file_view_commit_changes(self, backend, csrf_token): | |
900 | repo = backend.create_repo() |
|
903 | repo = backend.create_repo() | |
901 | backend.ensure_file("vcs/nodes.py") |
|
904 | backend.ensure_file("vcs/nodes.py") | |
902 |
|
905 | |||
903 | response = self.app.post( |
|
906 | response = self.app.post( | |
904 | url( |
|
907 | url( | |
905 | 'files_delete_home', |
|
908 | 'files_delete_home', | |
906 | repo_name=repo.repo_name, |
|
909 | repo_name=repo.repo_name, | |
907 | revision=backend.default_head_id, |
|
910 | revision=backend.default_head_id, | |
908 | f_path='vcs/nodes.py'), |
|
911 | f_path='vcs/nodes.py'), | |
909 | params={ |
|
912 | params={ | |
910 | 'message': 'i commited', |
|
913 | 'message': 'i commited', | |
911 | 'csrf_token': csrf_token, |
|
914 | 'csrf_token': csrf_token, | |
912 | }, |
|
915 | }, | |
913 | status=302) |
|
916 | status=302) | |
914 | assert_session_flash( |
|
917 | assert_session_flash( | |
915 | response, 'Successfully deleted file vcs/nodes.py') |
|
918 | response, 'Successfully deleted file vcs/nodes.py') | |
916 |
|
919 | |||
917 |
|
920 | |||
918 | def assert_files_in_response(response, files, params): |
|
921 | def assert_files_in_response(response, files, params): | |
919 | template = ( |
|
922 | template = ( | |
920 |
" |
|
923 | 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"') | |
921 | _assert_items_in_response(response, files, template, params) |
|
924 | _assert_items_in_response(response, files, template, params) | |
922 |
|
925 | |||
923 |
|
926 | |||
924 | def assert_dirs_in_response(response, dirs, params): |
|
927 | def assert_dirs_in_response(response, dirs, params): | |
925 | template = ( |
|
928 | template = ( | |
926 |
" |
|
929 | 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"') | |
927 | _assert_items_in_response(response, dirs, template, params) |
|
930 | _assert_items_in_response(response, dirs, template, params) | |
928 |
|
931 | |||
929 |
|
932 | |||
930 | def _assert_items_in_response(response, items, template, params): |
|
933 | def _assert_items_in_response(response, items, template, params): | |
931 | for item in items: |
|
934 | for item in items: | |
932 | item_params = {'name': item} |
|
935 | item_params = {'name': item} | |
933 | item_params.update(params) |
|
936 | item_params.update(params) | |
934 | response.mustcontain(template % item_params) |
|
937 | response.mustcontain(template % item_params) | |
935 |
|
938 | |||
936 |
|
939 | |||
937 | def assert_timeago_in_response(response, items, params): |
|
940 | def assert_timeago_in_response(response, items, params): | |
938 | for item in items: |
|
941 | for item in items: | |
939 | response.mustcontain(h.age_component(params['date'])) |
|
942 | response.mustcontain(h.age_component(params['date'])) |
@@ -1,282 +1,285 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import threading |
|
21 | import threading | |
22 | import time |
|
22 | import time | |
23 | import logging |
|
23 | import logging | |
24 | import os.path |
|
24 | import os.path | |
25 | import subprocess |
|
25 | import subprocess | |
26 | import urllib2 |
|
26 | import urllib2 | |
27 | from urlparse import urlparse, parse_qsl |
|
27 | from urlparse import urlparse, parse_qsl | |
28 | from urllib import unquote_plus |
|
28 | from urllib import unquote_plus | |
29 |
|
29 | |||
30 | import pytest |
|
30 | import pytest | |
31 | import rc_testdata |
|
31 | import rc_testdata | |
32 | from lxml.html import fromstring, tostring |
|
32 | from lxml.html import fromstring, tostring | |
33 | from lxml.cssselect import CSSSelector |
|
33 | from lxml.cssselect import CSSSelector | |
34 |
|
34 | |||
35 | from rhodecode.model.db import User |
|
35 | from rhodecode.model.db import User | |
36 | from rhodecode.model.meta import Session |
|
36 | from rhodecode.model.meta import Session | |
37 | from rhodecode.model.scm import ScmModel |
|
37 | from rhodecode.model.scm import ScmModel | |
38 | from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository |
|
38 | from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository | |
39 |
|
39 | |||
40 |
|
40 | |||
41 | log = logging.getLogger(__name__) |
|
41 | log = logging.getLogger(__name__) | |
42 |
|
42 | |||
43 |
|
43 | |||
44 | def set_anonymous_access(enabled): |
|
44 | def set_anonymous_access(enabled): | |
45 | """(Dis)allows anonymous access depending on parameter `enabled`""" |
|
45 | """(Dis)allows anonymous access depending on parameter `enabled`""" | |
46 | user = User.get_default_user() |
|
46 | user = User.get_default_user() | |
47 | user.active = enabled |
|
47 | user.active = enabled | |
48 | Session().add(user) |
|
48 | Session().add(user) | |
49 | Session().commit() |
|
49 | Session().commit() | |
50 | log.info('anonymous access is now: %s', enabled) |
|
50 | log.info('anonymous access is now: %s', enabled) | |
51 | assert enabled == User.get_default_user().active, ( |
|
51 | assert enabled == User.get_default_user().active, ( | |
52 | 'Cannot set anonymous access') |
|
52 | 'Cannot set anonymous access') | |
53 |
|
53 | |||
54 |
|
54 | |||
55 | def check_xfail_backends(node, backend_alias): |
|
55 | def check_xfail_backends(node, backend_alias): | |
56 | # Using "xfail_backends" here intentionally, since this marks work |
|
56 | # Using "xfail_backends" here intentionally, since this marks work | |
57 | # which is "to be done" soon. |
|
57 | # which is "to be done" soon. | |
58 | skip_marker = node.get_marker('xfail_backends') |
|
58 | skip_marker = node.get_marker('xfail_backends') | |
59 | if skip_marker and backend_alias in skip_marker.args: |
|
59 | if skip_marker and backend_alias in skip_marker.args: | |
60 | msg = "Support for backend %s to be developed." % (backend_alias, ) |
|
60 | msg = "Support for backend %s to be developed." % (backend_alias, ) | |
61 | msg = skip_marker.kwargs.get('reason', msg) |
|
61 | msg = skip_marker.kwargs.get('reason', msg) | |
62 | pytest.xfail(msg) |
|
62 | pytest.xfail(msg) | |
63 |
|
63 | |||
64 |
|
64 | |||
65 | def check_skip_backends(node, backend_alias): |
|
65 | def check_skip_backends(node, backend_alias): | |
66 | # Using "skip_backends" here intentionally, since this marks work which is |
|
66 | # Using "skip_backends" here intentionally, since this marks work which is | |
67 | # not supported. |
|
67 | # not supported. | |
68 | skip_marker = node.get_marker('skip_backends') |
|
68 | skip_marker = node.get_marker('skip_backends') | |
69 | if skip_marker and backend_alias in skip_marker.args: |
|
69 | if skip_marker and backend_alias in skip_marker.args: | |
70 | msg = "Feature not supported for backend %s." % (backend_alias, ) |
|
70 | msg = "Feature not supported for backend %s." % (backend_alias, ) | |
71 | msg = skip_marker.kwargs.get('reason', msg) |
|
71 | msg = skip_marker.kwargs.get('reason', msg) | |
72 | pytest.skip(msg) |
|
72 | pytest.skip(msg) | |
73 |
|
73 | |||
74 |
|
74 | |||
75 | def extract_git_repo_from_dump(dump_name, repo_name): |
|
75 | def extract_git_repo_from_dump(dump_name, repo_name): | |
76 | """Create git repo `repo_name` from dump `dump_name`.""" |
|
76 | """Create git repo `repo_name` from dump `dump_name`.""" | |
77 | repos_path = ScmModel().repos_path |
|
77 | repos_path = ScmModel().repos_path | |
78 | target_path = os.path.join(repos_path, repo_name) |
|
78 | target_path = os.path.join(repos_path, repo_name) | |
79 | rc_testdata.extract_git_dump(dump_name, target_path) |
|
79 | rc_testdata.extract_git_dump(dump_name, target_path) | |
80 | return target_path |
|
80 | return target_path | |
81 |
|
81 | |||
82 |
|
82 | |||
83 | def extract_hg_repo_from_dump(dump_name, repo_name): |
|
83 | def extract_hg_repo_from_dump(dump_name, repo_name): | |
84 | """Create hg repo `repo_name` from dump `dump_name`.""" |
|
84 | """Create hg repo `repo_name` from dump `dump_name`.""" | |
85 | repos_path = ScmModel().repos_path |
|
85 | repos_path = ScmModel().repos_path | |
86 | target_path = os.path.join(repos_path, repo_name) |
|
86 | target_path = os.path.join(repos_path, repo_name) | |
87 | rc_testdata.extract_hg_dump(dump_name, target_path) |
|
87 | rc_testdata.extract_hg_dump(dump_name, target_path) | |
88 | return target_path |
|
88 | return target_path | |
89 |
|
89 | |||
90 |
|
90 | |||
91 | def extract_svn_repo_from_dump(dump_name, repo_name): |
|
91 | def extract_svn_repo_from_dump(dump_name, repo_name): | |
92 | """Create a svn repo `repo_name` from dump `dump_name`.""" |
|
92 | """Create a svn repo `repo_name` from dump `dump_name`.""" | |
93 | repos_path = ScmModel().repos_path |
|
93 | repos_path = ScmModel().repos_path | |
94 | target_path = os.path.join(repos_path, repo_name) |
|
94 | target_path = os.path.join(repos_path, repo_name) | |
95 | SubversionRepository(target_path, create=True) |
|
95 | SubversionRepository(target_path, create=True) | |
96 | _load_svn_dump_into_repo(dump_name, target_path) |
|
96 | _load_svn_dump_into_repo(dump_name, target_path) | |
97 | return target_path |
|
97 | return target_path | |
98 |
|
98 | |||
99 |
|
99 | |||
100 | def assert_message_in_log(log_records, message, levelno, module): |
|
100 | def assert_message_in_log(log_records, message, levelno, module): | |
101 | messages = [ |
|
101 | messages = [ | |
102 | r.message for r in log_records |
|
102 | r.message for r in log_records | |
103 | if r.module == module and r.levelno == levelno |
|
103 | if r.module == module and r.levelno == levelno | |
104 | ] |
|
104 | ] | |
105 | assert message in messages |
|
105 | assert message in messages | |
106 |
|
106 | |||
107 |
|
107 | |||
108 | def _load_svn_dump_into_repo(dump_name, repo_path): |
|
108 | def _load_svn_dump_into_repo(dump_name, repo_path): | |
109 | """ |
|
109 | """ | |
110 | Utility to populate a svn repository with a named dump |
|
110 | Utility to populate a svn repository with a named dump | |
111 |
|
111 | |||
112 | Currently the dumps are in rc_testdata. They might later on be |
|
112 | Currently the dumps are in rc_testdata. They might later on be | |
113 | integrated with the main repository once they stabilize more. |
|
113 | integrated with the main repository once they stabilize more. | |
114 | """ |
|
114 | """ | |
115 | dump = rc_testdata.load_svn_dump(dump_name) |
|
115 | dump = rc_testdata.load_svn_dump(dump_name) | |
116 | load_dump = subprocess.Popen( |
|
116 | load_dump = subprocess.Popen( | |
117 | ['svnadmin', 'load', repo_path], |
|
117 | ['svnadmin', 'load', repo_path], | |
118 | stdin=subprocess.PIPE, stdout=subprocess.PIPE, |
|
118 | stdin=subprocess.PIPE, stdout=subprocess.PIPE, | |
119 | stderr=subprocess.PIPE) |
|
119 | stderr=subprocess.PIPE) | |
120 | out, err = load_dump.communicate(dump) |
|
120 | out, err = load_dump.communicate(dump) | |
121 | if load_dump.returncode != 0: |
|
121 | if load_dump.returncode != 0: | |
122 | log.error("Output of load_dump command: %s", out) |
|
122 | log.error("Output of load_dump command: %s", out) | |
123 | log.error("Error output of load_dump command: %s", err) |
|
123 | log.error("Error output of load_dump command: %s", err) | |
124 | raise Exception( |
|
124 | raise Exception( | |
125 | 'Failed to load dump "%s" into repository at path "%s".' |
|
125 | 'Failed to load dump "%s" into repository at path "%s".' | |
126 | % (dump_name, repo_path)) |
|
126 | % (dump_name, repo_path)) | |
127 |
|
127 | |||
128 |
|
128 | |||
129 | class AssertResponse(object): |
|
129 | class AssertResponse(object): | |
130 | """ |
|
130 | """ | |
131 | Utility that helps to assert things about a given HTML response. |
|
131 | Utility that helps to assert things about a given HTML response. | |
132 | """ |
|
132 | """ | |
133 |
|
133 | |||
134 | def __init__(self, response): |
|
134 | def __init__(self, response): | |
135 | self.response = response |
|
135 | self.response = response | |
136 |
|
136 | |||
137 | def one_element_exists(self, css_selector): |
|
137 | def one_element_exists(self, css_selector): | |
138 | self.get_element(css_selector) |
|
138 | self.get_element(css_selector) | |
139 |
|
139 | |||
140 | def no_element_exists(self, css_selector): |
|
140 | def no_element_exists(self, css_selector): | |
141 | assert not self._get_elements(css_selector) |
|
141 | assert not self._get_elements(css_selector) | |
142 |
|
142 | |||
143 | def element_equals_to(self, css_selector, expected_content): |
|
143 | def element_equals_to(self, css_selector, expected_content): | |
144 | element = self.get_element(css_selector) |
|
144 | element = self.get_element(css_selector) | |
145 | element_text = self._element_to_string(element) |
|
145 | element_text = self._element_to_string(element) | |
146 | assert expected_content in element_text |
|
146 | assert expected_content in element_text | |
147 |
|
147 | |||
148 | def element_contains(self, css_selector, expected_content): |
|
148 | def element_contains(self, css_selector, expected_content): | |
149 | element = self.get_element(css_selector) |
|
149 | element = self.get_element(css_selector) | |
150 | assert expected_content in element.text_content() |
|
150 | assert expected_content in element.text_content() | |
151 |
|
151 | |||
152 | def contains_one_link(self, link_text, href): |
|
152 | def contains_one_link(self, link_text, href): | |
153 | doc = fromstring(self.response.body) |
|
153 | doc = fromstring(self.response.body) | |
154 | sel = CSSSelector('a[href]') |
|
154 | sel = CSSSelector('a[href]') | |
155 | elements = [ |
|
155 | elements = [ | |
156 | e for e in sel(doc) if e.text_content().strip() == link_text] |
|
156 | e for e in sel(doc) if e.text_content().strip() == link_text] | |
157 | assert len(elements) == 1, "Did not find link or found multiple links" |
|
157 | assert len(elements) == 1, "Did not find link or found multiple links" | |
158 | self._ensure_url_equal(elements[0].attrib.get('href'), href) |
|
158 | self._ensure_url_equal(elements[0].attrib.get('href'), href) | |
159 |
|
159 | |||
160 | def contains_one_anchor(self, anchor_id): |
|
160 | def contains_one_anchor(self, anchor_id): | |
161 | doc = fromstring(self.response.body) |
|
161 | doc = fromstring(self.response.body) | |
162 | sel = CSSSelector('#' + anchor_id) |
|
162 | sel = CSSSelector('#' + anchor_id) | |
163 | elements = sel(doc) |
|
163 | elements = sel(doc) | |
164 | assert len(elements) == 1 |
|
164 | assert len(elements) == 1 | |
165 |
|
165 | |||
166 | def _ensure_url_equal(self, found, expected): |
|
166 | def _ensure_url_equal(self, found, expected): | |
167 | assert _Url(found) == _Url(expected) |
|
167 | assert _Url(found) == _Url(expected) | |
168 |
|
168 | |||
169 | def get_element(self, css_selector): |
|
169 | def get_element(self, css_selector): | |
170 | elements = self._get_elements(css_selector) |
|
170 | elements = self._get_elements(css_selector) | |
171 | assert len(elements) == 1 |
|
171 | assert len(elements) == 1 | |
172 | return elements[0] |
|
172 | return elements[0] | |
173 |
|
173 | |||
|
174 | def get_elements(self, css_selector): | |||
|
175 | return self._get_elements(css_selector) | |||
|
176 | ||||
174 | def _get_elements(self, css_selector): |
|
177 | def _get_elements(self, css_selector): | |
175 | doc = fromstring(self.response.body) |
|
178 | doc = fromstring(self.response.body) | |
176 | sel = CSSSelector(css_selector) |
|
179 | sel = CSSSelector(css_selector) | |
177 | elements = sel(doc) |
|
180 | elements = sel(doc) | |
178 | return elements |
|
181 | return elements | |
179 |
|
182 | |||
180 | def _element_to_string(self, element): |
|
183 | def _element_to_string(self, element): | |
181 | return tostring(element) |
|
184 | return tostring(element) | |
182 |
|
185 | |||
183 |
|
186 | |||
184 | class _Url(object): |
|
187 | class _Url(object): | |
185 | """ |
|
188 | """ | |
186 | A url object that can be compared with other url orbjects |
|
189 | A url object that can be compared with other url orbjects | |
187 | without regard to the vagaries of encoding, escaping, and ordering |
|
190 | without regard to the vagaries of encoding, escaping, and ordering | |
188 | of parameters in query strings. |
|
191 | of parameters in query strings. | |
189 |
|
192 | |||
190 | Inspired by |
|
193 | Inspired by | |
191 | http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python |
|
194 | http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python | |
192 | """ |
|
195 | """ | |
193 |
|
196 | |||
194 | def __init__(self, url): |
|
197 | def __init__(self, url): | |
195 | parts = urlparse(url) |
|
198 | parts = urlparse(url) | |
196 | _query = frozenset(parse_qsl(parts.query)) |
|
199 | _query = frozenset(parse_qsl(parts.query)) | |
197 | _path = unquote_plus(parts.path) |
|
200 | _path = unquote_plus(parts.path) | |
198 | parts = parts._replace(query=_query, path=_path) |
|
201 | parts = parts._replace(query=_query, path=_path) | |
199 | self.parts = parts |
|
202 | self.parts = parts | |
200 |
|
203 | |||
201 | def __eq__(self, other): |
|
204 | def __eq__(self, other): | |
202 | return self.parts == other.parts |
|
205 | return self.parts == other.parts | |
203 |
|
206 | |||
204 | def __hash__(self): |
|
207 | def __hash__(self): | |
205 | return hash(self.parts) |
|
208 | return hash(self.parts) | |
206 |
|
209 | |||
207 |
|
210 | |||
208 | def run_test_concurrently(times, raise_catched_exc=True): |
|
211 | def run_test_concurrently(times, raise_catched_exc=True): | |
209 | """ |
|
212 | """ | |
210 | Add this decorator to small pieces of code that you want to test |
|
213 | Add this decorator to small pieces of code that you want to test | |
211 | concurrently |
|
214 | concurrently | |
212 |
|
215 | |||
213 | ex: |
|
216 | ex: | |
214 |
|
217 | |||
215 | @test_concurrently(25) |
|
218 | @test_concurrently(25) | |
216 | def my_test_function(): |
|
219 | def my_test_function(): | |
217 | ... |
|
220 | ... | |
218 | """ |
|
221 | """ | |
219 | def test_concurrently_decorator(test_func): |
|
222 | def test_concurrently_decorator(test_func): | |
220 | def wrapper(*args, **kwargs): |
|
223 | def wrapper(*args, **kwargs): | |
221 | exceptions = [] |
|
224 | exceptions = [] | |
222 |
|
225 | |||
223 | def call_test_func(): |
|
226 | def call_test_func(): | |
224 | try: |
|
227 | try: | |
225 | test_func(*args, **kwargs) |
|
228 | test_func(*args, **kwargs) | |
226 | except Exception, e: |
|
229 | except Exception, e: | |
227 | exceptions.append(e) |
|
230 | exceptions.append(e) | |
228 | if raise_catched_exc: |
|
231 | if raise_catched_exc: | |
229 | raise |
|
232 | raise | |
230 | threads = [] |
|
233 | threads = [] | |
231 | for i in range(times): |
|
234 | for i in range(times): | |
232 | threads.append(threading.Thread(target=call_test_func)) |
|
235 | threads.append(threading.Thread(target=call_test_func)) | |
233 | for t in threads: |
|
236 | for t in threads: | |
234 | t.start() |
|
237 | t.start() | |
235 | for t in threads: |
|
238 | for t in threads: | |
236 | t.join() |
|
239 | t.join() | |
237 | if exceptions: |
|
240 | if exceptions: | |
238 | raise Exception( |
|
241 | raise Exception( | |
239 | 'test_concurrently intercepted %s exceptions: %s' % ( |
|
242 | 'test_concurrently intercepted %s exceptions: %s' % ( | |
240 | len(exceptions), exceptions)) |
|
243 | len(exceptions), exceptions)) | |
241 | return wrapper |
|
244 | return wrapper | |
242 | return test_concurrently_decorator |
|
245 | return test_concurrently_decorator | |
243 |
|
246 | |||
244 |
|
247 | |||
245 | def wait_for_url(url, timeout=10): |
|
248 | def wait_for_url(url, timeout=10): | |
246 | """ |
|
249 | """ | |
247 | Wait until URL becomes reachable. |
|
250 | Wait until URL becomes reachable. | |
248 |
|
251 | |||
249 | It polls the URL until the timeout is reached or it became reachable. |
|
252 | It polls the URL until the timeout is reached or it became reachable. | |
250 | If will call to `py.test.fail` in case the URL is not reachable. |
|
253 | If will call to `py.test.fail` in case the URL is not reachable. | |
251 | """ |
|
254 | """ | |
252 | timeout = time.time() + timeout |
|
255 | timeout = time.time() + timeout | |
253 | last = 0 |
|
256 | last = 0 | |
254 | wait = 0.1 |
|
257 | wait = 0.1 | |
255 |
|
258 | |||
256 | while (timeout > last): |
|
259 | while (timeout > last): | |
257 | last = time.time() |
|
260 | last = time.time() | |
258 | if is_url_reachable(url): |
|
261 | if is_url_reachable(url): | |
259 | break |
|
262 | break | |
260 | elif ((last + wait) > time.time()): |
|
263 | elif ((last + wait) > time.time()): | |
261 | # Go to sleep because not enough time has passed since last check. |
|
264 | # Go to sleep because not enough time has passed since last check. | |
262 | time.sleep(wait) |
|
265 | time.sleep(wait) | |
263 | else: |
|
266 | else: | |
264 | pytest.fail("Timeout while waiting for URL {}".format(url)) |
|
267 | pytest.fail("Timeout while waiting for URL {}".format(url)) | |
265 |
|
268 | |||
266 |
|
269 | |||
267 | def is_url_reachable(url): |
|
270 | def is_url_reachable(url): | |
268 | try: |
|
271 | try: | |
269 | urllib2.urlopen(url) |
|
272 | urllib2.urlopen(url) | |
270 | except urllib2.URLError: |
|
273 | except urllib2.URLError: | |
271 | return False |
|
274 | return False | |
272 | return True |
|
275 | return True | |
273 |
|
276 | |||
274 |
|
277 | |||
275 | def get_session_from_response(response): |
|
278 | def get_session_from_response(response): | |
276 | """ |
|
279 | """ | |
277 | This returns the session from a response object. Pylons has some magic |
|
280 | This returns the session from a response object. Pylons has some magic | |
278 | to make the session available as `response.session`. But pyramid |
|
281 | to make the session available as `response.session`. But pyramid | |
279 | doesn't expose it. |
|
282 | doesn't expose it. | |
280 | """ |
|
283 | """ | |
281 | # TODO: Try to look up the session key also. |
|
284 | # TODO: Try to look up the session key also. | |
282 | return response.request.environ['beaker.session'] |
|
285 | return response.request.environ['beaker.session'] |
General Comments 0
You need to be logged in to leave comments.
Login now