Show More
@@ -1,1154 +1,1154 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Routes configuration |
|
23 | 23 | |
|
24 | 24 | The more specific and detailed routes should be defined first so they |
|
25 | 25 | may take precedent over the more generic routes. For more information |
|
26 | 26 | refer to the routes manual at http://routes.groovie.org/docs/ |
|
27 | 27 | |
|
28 | 28 | IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py |
|
29 | 29 | and _route_name variable which uses some of stored naming here to do redirects. |
|
30 | 30 | """ |
|
31 | 31 | import os |
|
32 | 32 | import re |
|
33 | 33 | from routes import Mapper |
|
34 | 34 | |
|
35 | 35 | from rhodecode.config import routing_links |
|
36 | 36 | |
|
37 | 37 | # prefix for non repository related links needs to be prefixed with `/` |
|
38 | 38 | ADMIN_PREFIX = '/_admin' |
|
39 | 39 | |
|
40 | 40 | # Default requirements for URL parts |
|
41 | 41 | URL_NAME_REQUIREMENTS = { |
|
42 | 42 | # group name can have a slash in them, but they must not end with a slash |
|
43 | 43 | 'group_name': r'.*?[^/]', |
|
44 | 44 | # repo names can have a slash in them, but they must not end with a slash |
|
45 | 45 | 'repo_name': r'.*?[^/]', |
|
46 | 46 | # file path eats up everything at the end |
|
47 | 47 | 'f_path': r'.*', |
|
48 | 48 | # reference types |
|
49 | 49 | 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)', |
|
50 | 50 | 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)', |
|
51 | 51 | } |
|
52 | 52 | |
|
53 | 53 | |
|
54 | 54 | def add_route_requirements(route_path, requirements): |
|
55 | 55 | """ |
|
56 | 56 | Adds regex requirements to pyramid routes using a mapping dict |
|
57 | 57 | |
|
58 | 58 | >>> add_route_requirements('/{action}/{id}', {'id': r'\d+'}) |
|
59 | 59 | '/{action}/{id:\d+}' |
|
60 | 60 | |
|
61 | 61 | """ |
|
62 | 62 | for key, regex in requirements.items(): |
|
63 | 63 | route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex)) |
|
64 | 64 | return route_path |
|
65 | 65 | |
|
66 | 66 | |
|
67 | 67 | class JSRoutesMapper(Mapper): |
|
68 | 68 | """ |
|
69 | 69 | Wrapper for routes.Mapper to make pyroutes compatible url definitions |
|
70 | 70 | """ |
|
71 | 71 | _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$') |
|
72 | 72 | _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)') |
|
73 | 73 | def __init__(self, *args, **kw): |
|
74 | 74 | super(JSRoutesMapper, self).__init__(*args, **kw) |
|
75 | 75 | self._jsroutes = [] |
|
76 | 76 | |
|
77 | 77 | def connect(self, *args, **kw): |
|
78 | 78 | """ |
|
79 | 79 | Wrapper for connect to take an extra argument jsroute=True |
|
80 | 80 | |
|
81 | 81 | :param jsroute: boolean, if True will add the route to the pyroutes list |
|
82 | 82 | """ |
|
83 | 83 | if kw.pop('jsroute', False): |
|
84 | 84 | if not self._named_route_regex.match(args[0]): |
|
85 | 85 | raise Exception('only named routes can be added to pyroutes') |
|
86 | 86 | self._jsroutes.append(args[0]) |
|
87 | 87 | |
|
88 | 88 | super(JSRoutesMapper, self).connect(*args, **kw) |
|
89 | 89 | |
|
90 | 90 | def _extract_route_information(self, route): |
|
91 | 91 | """ |
|
92 | 92 | Convert a route into tuple(name, path, args), eg: |
|
93 | 93 | ('user_profile', '/profile/%(username)s', ['username']) |
|
94 | 94 | """ |
|
95 | 95 | routepath = route.routepath |
|
96 | 96 | def replace(matchobj): |
|
97 | 97 | if matchobj.group(1): |
|
98 | 98 | return "%%(%s)s" % matchobj.group(1).split(':')[0] |
|
99 | 99 | else: |
|
100 | 100 | return "%%(%s)s" % matchobj.group(2) |
|
101 | 101 | |
|
102 | 102 | routepath = self._argument_prog.sub(replace, routepath) |
|
103 | 103 | return ( |
|
104 | 104 | route.name, |
|
105 | 105 | routepath, |
|
106 | 106 | [(arg[0].split(':')[0] if arg[0] != '' else arg[1]) |
|
107 | 107 | for arg in self._argument_prog.findall(route.routepath)] |
|
108 | 108 | ) |
|
109 | 109 | |
|
110 | 110 | def jsroutes(self): |
|
111 | 111 | """ |
|
112 | 112 | Return a list of pyroutes.js compatible routes |
|
113 | 113 | """ |
|
114 | 114 | for route_name in self._jsroutes: |
|
115 | 115 | yield self._extract_route_information(self._routenames[route_name]) |
|
116 | 116 | |
|
117 | 117 | |
|
118 | 118 | def make_map(config): |
|
119 | 119 | """Create, configure and return the routes Mapper""" |
|
120 | 120 | rmap = JSRoutesMapper(directory=config['pylons.paths']['controllers'], |
|
121 | 121 | always_scan=config['debug']) |
|
122 | 122 | rmap.minimization = False |
|
123 | 123 | rmap.explicit = False |
|
124 | 124 | |
|
125 | 125 | from rhodecode.lib.utils2 import str2bool |
|
126 | 126 | from rhodecode.model import repo, repo_group |
|
127 | 127 | |
|
128 | 128 | def check_repo(environ, match_dict): |
|
129 | 129 | """ |
|
130 | 130 | check for valid repository for proper 404 handling |
|
131 | 131 | |
|
132 | 132 | :param environ: |
|
133 | 133 | :param match_dict: |
|
134 | 134 | """ |
|
135 | 135 | repo_name = match_dict.get('repo_name') |
|
136 | 136 | |
|
137 | 137 | if match_dict.get('f_path'): |
|
138 | 138 | # fix for multiple initial slashes that causes errors |
|
139 | 139 | match_dict['f_path'] = match_dict['f_path'].lstrip('/') |
|
140 | 140 | repo_model = repo.RepoModel() |
|
141 | 141 | by_name_match = repo_model.get_by_repo_name(repo_name) |
|
142 | 142 | # if we match quickly from database, short circuit the operation, |
|
143 | 143 | # and validate repo based on the type. |
|
144 | 144 | if by_name_match: |
|
145 | 145 | return True |
|
146 | 146 | |
|
147 | 147 | by_id_match = repo_model.get_repo_by_id(repo_name) |
|
148 | 148 | if by_id_match: |
|
149 | 149 | repo_name = by_id_match.repo_name |
|
150 | 150 | match_dict['repo_name'] = repo_name |
|
151 | 151 | return True |
|
152 | 152 | |
|
153 | 153 | return False |
|
154 | 154 | |
|
155 | 155 | def check_group(environ, match_dict): |
|
156 | 156 | """ |
|
157 | 157 | check for valid repository group path for proper 404 handling |
|
158 | 158 | |
|
159 | 159 | :param environ: |
|
160 | 160 | :param match_dict: |
|
161 | 161 | """ |
|
162 | 162 | repo_group_name = match_dict.get('group_name') |
|
163 | 163 | repo_group_model = repo_group.RepoGroupModel() |
|
164 | 164 | by_name_match = repo_group_model.get_by_group_name(repo_group_name) |
|
165 | 165 | if by_name_match: |
|
166 | 166 | return True |
|
167 | 167 | |
|
168 | 168 | return False |
|
169 | 169 | |
|
170 | 170 | def check_user_group(environ, match_dict): |
|
171 | 171 | """ |
|
172 | 172 | check for valid user group for proper 404 handling |
|
173 | 173 | |
|
174 | 174 | :param environ: |
|
175 | 175 | :param match_dict: |
|
176 | 176 | """ |
|
177 | 177 | return True |
|
178 | 178 | |
|
179 | 179 | def check_int(environ, match_dict): |
|
180 | 180 | return match_dict.get('id').isdigit() |
|
181 | 181 | |
|
182 | 182 | |
|
183 | 183 | #========================================================================== |
|
184 | 184 | # CUSTOM ROUTES HERE |
|
185 | 185 | #========================================================================== |
|
186 | 186 | |
|
187 | 187 | # MAIN PAGE |
|
188 | 188 | rmap.connect('home', '/', controller='home', action='index', jsroute=True) |
|
189 | 189 | rmap.connect('goto_switcher_data', '/_goto_data', controller='home', |
|
190 | 190 | action='goto_switcher_data') |
|
191 | 191 | rmap.connect('repo_list_data', '/_repos', controller='home', |
|
192 | 192 | action='repo_list_data') |
|
193 | 193 | |
|
194 | 194 | rmap.connect('user_autocomplete_data', '/_users', controller='home', |
|
195 | 195 | action='user_autocomplete_data', jsroute=True) |
|
196 | 196 | rmap.connect('user_group_autocomplete_data', '/_user_groups', controller='home', |
|
197 | 197 | action='user_group_autocomplete_data') |
|
198 | 198 | |
|
199 | 199 | rmap.connect( |
|
200 | 200 | 'user_profile', '/_profiles/{username}', controller='users', |
|
201 | 201 | action='user_profile') |
|
202 | 202 | |
|
203 | 203 | # TODO: johbo: Static links, to be replaced by our redirection mechanism |
|
204 | 204 | rmap.connect('rst_help', |
|
205 | 205 | 'http://docutils.sourceforge.net/docs/user/rst/quickref.html', |
|
206 | 206 | _static=True) |
|
207 | 207 | rmap.connect('markdown_help', |
|
208 | 208 | 'http://daringfireball.net/projects/markdown/syntax', |
|
209 | 209 | _static=True) |
|
210 | 210 | rmap.connect('rhodecode_official', 'https://rhodecode.com', _static=True) |
|
211 | 211 | rmap.connect('rhodecode_support', 'https://rhodecode.com/help/', _static=True) |
|
212 | 212 | rmap.connect('rhodecode_translations', 'https://rhodecode.com/translate/enterprise', _static=True) |
|
213 | 213 | # TODO: anderson - making this a static link since redirect won't play |
|
214 | 214 | # nice with POST requests |
|
215 | 215 | rmap.connect('enterprise_license_convert_from_old', |
|
216 | 216 | 'https://rhodecode.com/u/license-upgrade', |
|
217 | 217 | _static=True) |
|
218 | 218 | |
|
219 | 219 | routing_links.connect_redirection_links(rmap) |
|
220 | 220 | |
|
221 | 221 | rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping') |
|
222 | 222 | rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test') |
|
223 | 223 | |
|
224 | 224 | # ADMIN REPOSITORY ROUTES |
|
225 | 225 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
226 | 226 | controller='admin/repos') as m: |
|
227 | 227 | m.connect('repos', '/repos', |
|
228 | 228 | action='create', conditions={'method': ['POST']}) |
|
229 | 229 | m.connect('repos', '/repos', |
|
230 | 230 | action='index', conditions={'method': ['GET']}) |
|
231 | 231 | m.connect('new_repo', '/create_repository', jsroute=True, |
|
232 | 232 | action='create_repository', conditions={'method': ['GET']}) |
|
233 | 233 | m.connect('/repos/{repo_name}', |
|
234 | 234 | action='update', conditions={'method': ['PUT'], |
|
235 | 235 | 'function': check_repo}, |
|
236 | 236 | requirements=URL_NAME_REQUIREMENTS) |
|
237 | 237 | m.connect('delete_repo', '/repos/{repo_name}', |
|
238 | 238 | action='delete', conditions={'method': ['DELETE']}, |
|
239 | 239 | requirements=URL_NAME_REQUIREMENTS) |
|
240 | 240 | m.connect('repo', '/repos/{repo_name}', |
|
241 | 241 | action='show', conditions={'method': ['GET'], |
|
242 | 242 | 'function': check_repo}, |
|
243 | 243 | requirements=URL_NAME_REQUIREMENTS) |
|
244 | 244 | |
|
245 | 245 | # ADMIN REPOSITORY GROUPS ROUTES |
|
246 | 246 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
247 | 247 | controller='admin/repo_groups') as m: |
|
248 | 248 | m.connect('repo_groups', '/repo_groups', |
|
249 | 249 | action='create', conditions={'method': ['POST']}) |
|
250 | 250 | m.connect('repo_groups', '/repo_groups', |
|
251 | 251 | action='index', conditions={'method': ['GET']}) |
|
252 | 252 | m.connect('new_repo_group', '/repo_groups/new', |
|
253 | 253 | action='new', conditions={'method': ['GET']}) |
|
254 | 254 | m.connect('update_repo_group', '/repo_groups/{group_name}', |
|
255 | 255 | action='update', conditions={'method': ['PUT'], |
|
256 | 256 | 'function': check_group}, |
|
257 | 257 | requirements=URL_NAME_REQUIREMENTS) |
|
258 | 258 | |
|
259 | 259 | # EXTRAS REPO GROUP ROUTES |
|
260 | 260 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', |
|
261 | 261 | action='edit', |
|
262 | 262 | conditions={'method': ['GET'], 'function': check_group}, |
|
263 | 263 | requirements=URL_NAME_REQUIREMENTS) |
|
264 | 264 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', |
|
265 | 265 | action='edit', |
|
266 | 266 | conditions={'method': ['PUT'], 'function': check_group}, |
|
267 | 267 | requirements=URL_NAME_REQUIREMENTS) |
|
268 | 268 | |
|
269 | 269 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', |
|
270 | 270 | action='edit_repo_group_advanced', |
|
271 | 271 | conditions={'method': ['GET'], 'function': check_group}, |
|
272 | 272 | requirements=URL_NAME_REQUIREMENTS) |
|
273 | 273 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', |
|
274 | 274 | action='edit_repo_group_advanced', |
|
275 | 275 | conditions={'method': ['PUT'], 'function': check_group}, |
|
276 | 276 | requirements=URL_NAME_REQUIREMENTS) |
|
277 | 277 | |
|
278 | 278 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', |
|
279 | 279 | action='edit_repo_group_perms', |
|
280 | 280 | conditions={'method': ['GET'], 'function': check_group}, |
|
281 | 281 | requirements=URL_NAME_REQUIREMENTS) |
|
282 | 282 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', |
|
283 | 283 | action='update_perms', |
|
284 | 284 | conditions={'method': ['PUT'], 'function': check_group}, |
|
285 | 285 | requirements=URL_NAME_REQUIREMENTS) |
|
286 | 286 | |
|
287 | 287 | m.connect('delete_repo_group', '/repo_groups/{group_name}', |
|
288 | 288 | action='delete', conditions={'method': ['DELETE'], |
|
289 | 289 | 'function': check_group}, |
|
290 | 290 | requirements=URL_NAME_REQUIREMENTS) |
|
291 | 291 | |
|
292 | 292 | # ADMIN USER ROUTES |
|
293 | 293 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
294 | 294 | controller='admin/users') as m: |
|
295 | 295 | m.connect('users', '/users', |
|
296 | 296 | action='create', conditions={'method': ['POST']}) |
|
297 | 297 | m.connect('users', '/users', |
|
298 | 298 | action='index', conditions={'method': ['GET']}) |
|
299 | 299 | m.connect('new_user', '/users/new', |
|
300 | 300 | action='new', conditions={'method': ['GET']}) |
|
301 | 301 | m.connect('update_user', '/users/{user_id}', |
|
302 | 302 | action='update', conditions={'method': ['PUT']}) |
|
303 | 303 | m.connect('delete_user', '/users/{user_id}', |
|
304 | 304 | action='delete', conditions={'method': ['DELETE']}) |
|
305 | 305 | m.connect('edit_user', '/users/{user_id}/edit', |
|
306 | 306 | action='edit', conditions={'method': ['GET']}) |
|
307 | 307 | m.connect('user', '/users/{user_id}', |
|
308 | 308 | action='show', conditions={'method': ['GET']}) |
|
309 | 309 | m.connect('force_password_reset_user', '/users/{user_id}/password_reset', |
|
310 | 310 | action='reset_password', conditions={'method': ['POST']}) |
|
311 | 311 | m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group', |
|
312 | 312 | action='create_personal_repo_group', conditions={'method': ['POST']}) |
|
313 | 313 | |
|
314 | 314 | # EXTRAS USER ROUTES |
|
315 | 315 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', |
|
316 | 316 | action='edit_advanced', conditions={'method': ['GET']}) |
|
317 | 317 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', |
|
318 | 318 | action='update_advanced', conditions={'method': ['PUT']}) |
|
319 | 319 | |
|
320 | 320 | m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens', |
|
321 | 321 | action='edit_auth_tokens', conditions={'method': ['GET']}) |
|
322 | 322 | m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens', |
|
323 | 323 | action='add_auth_token', conditions={'method': ['PUT']}) |
|
324 | 324 | m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens', |
|
325 | 325 | action='delete_auth_token', conditions={'method': ['DELETE']}) |
|
326 | 326 | |
|
327 | 327 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', |
|
328 | 328 | action='edit_global_perms', conditions={'method': ['GET']}) |
|
329 | 329 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', |
|
330 | 330 | action='update_global_perms', conditions={'method': ['PUT']}) |
|
331 | 331 | |
|
332 | 332 | m.connect('edit_user_perms_summary', '/users/{user_id}/edit/permissions_summary', |
|
333 | 333 | action='edit_perms_summary', conditions={'method': ['GET']}) |
|
334 | 334 | |
|
335 | 335 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', |
|
336 | 336 | action='edit_emails', conditions={'method': ['GET']}) |
|
337 | 337 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', |
|
338 | 338 | action='add_email', conditions={'method': ['PUT']}) |
|
339 | 339 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', |
|
340 | 340 | action='delete_email', conditions={'method': ['DELETE']}) |
|
341 | 341 | |
|
342 | 342 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', |
|
343 | 343 | action='edit_ips', conditions={'method': ['GET']}) |
|
344 | 344 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', |
|
345 | 345 | action='add_ip', conditions={'method': ['PUT']}) |
|
346 | 346 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', |
|
347 | 347 | action='delete_ip', conditions={'method': ['DELETE']}) |
|
348 | 348 | |
|
349 | 349 | # ADMIN USER GROUPS REST ROUTES |
|
350 | 350 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
351 | 351 | controller='admin/user_groups') as m: |
|
352 | 352 | m.connect('users_groups', '/user_groups', |
|
353 | 353 | action='create', conditions={'method': ['POST']}) |
|
354 | 354 | m.connect('users_groups', '/user_groups', |
|
355 | 355 | action='index', conditions={'method': ['GET']}) |
|
356 | 356 | m.connect('new_users_group', '/user_groups/new', |
|
357 | 357 | action='new', conditions={'method': ['GET']}) |
|
358 | 358 | m.connect('update_users_group', '/user_groups/{user_group_id}', |
|
359 | 359 | action='update', conditions={'method': ['PUT']}) |
|
360 | 360 | m.connect('delete_users_group', '/user_groups/{user_group_id}', |
|
361 | 361 | action='delete', conditions={'method': ['DELETE']}) |
|
362 | 362 | m.connect('edit_users_group', '/user_groups/{user_group_id}/edit', |
|
363 | 363 | action='edit', conditions={'method': ['GET']}, |
|
364 | 364 | function=check_user_group) |
|
365 | 365 | |
|
366 | 366 | # EXTRAS USER GROUP ROUTES |
|
367 | 367 | m.connect('edit_user_group_global_perms', |
|
368 | 368 | '/user_groups/{user_group_id}/edit/global_permissions', |
|
369 | 369 | action='edit_global_perms', conditions={'method': ['GET']}) |
|
370 | 370 | m.connect('edit_user_group_global_perms', |
|
371 | 371 | '/user_groups/{user_group_id}/edit/global_permissions', |
|
372 | 372 | action='update_global_perms', conditions={'method': ['PUT']}) |
|
373 | 373 | m.connect('edit_user_group_perms_summary', |
|
374 | 374 | '/user_groups/{user_group_id}/edit/permissions_summary', |
|
375 | 375 | action='edit_perms_summary', conditions={'method': ['GET']}) |
|
376 | 376 | |
|
377 | 377 | m.connect('edit_user_group_perms', |
|
378 | 378 | '/user_groups/{user_group_id}/edit/permissions', |
|
379 | 379 | action='edit_perms', conditions={'method': ['GET']}) |
|
380 | 380 | m.connect('edit_user_group_perms', |
|
381 | 381 | '/user_groups/{user_group_id}/edit/permissions', |
|
382 | 382 | action='update_perms', conditions={'method': ['PUT']}) |
|
383 | 383 | |
|
384 | 384 | m.connect('edit_user_group_advanced', |
|
385 | 385 | '/user_groups/{user_group_id}/edit/advanced', |
|
386 | 386 | action='edit_advanced', conditions={'method': ['GET']}) |
|
387 | 387 | |
|
388 | 388 | m.connect('edit_user_group_members', |
|
389 | 389 | '/user_groups/{user_group_id}/edit/members', jsroute=True, |
|
390 | 390 | action='edit_members', conditions={'method': ['GET']}) |
|
391 | 391 | |
|
392 | 392 | # ADMIN PERMISSIONS ROUTES |
|
393 | 393 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
394 | 394 | controller='admin/permissions') as m: |
|
395 | 395 | m.connect('admin_permissions_application', '/permissions/application', |
|
396 | 396 | action='permission_application_update', conditions={'method': ['POST']}) |
|
397 | 397 | m.connect('admin_permissions_application', '/permissions/application', |
|
398 | 398 | action='permission_application', conditions={'method': ['GET']}) |
|
399 | 399 | |
|
400 | 400 | m.connect('admin_permissions_global', '/permissions/global', |
|
401 | 401 | action='permission_global_update', conditions={'method': ['POST']}) |
|
402 | 402 | m.connect('admin_permissions_global', '/permissions/global', |
|
403 | 403 | action='permission_global', conditions={'method': ['GET']}) |
|
404 | 404 | |
|
405 | 405 | m.connect('admin_permissions_object', '/permissions/object', |
|
406 | 406 | action='permission_objects_update', conditions={'method': ['POST']}) |
|
407 | 407 | m.connect('admin_permissions_object', '/permissions/object', |
|
408 | 408 | action='permission_objects', conditions={'method': ['GET']}) |
|
409 | 409 | |
|
410 | 410 | m.connect('admin_permissions_ips', '/permissions/ips', |
|
411 | 411 | action='permission_ips', conditions={'method': ['POST']}) |
|
412 | 412 | m.connect('admin_permissions_ips', '/permissions/ips', |
|
413 | 413 | action='permission_ips', conditions={'method': ['GET']}) |
|
414 | 414 | |
|
415 | 415 | m.connect('admin_permissions_overview', '/permissions/overview', |
|
416 | 416 | action='permission_perms', conditions={'method': ['GET']}) |
|
417 | 417 | |
|
418 | 418 | # ADMIN DEFAULTS REST ROUTES |
|
419 | 419 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
420 | 420 | controller='admin/defaults') as m: |
|
421 | 421 | m.connect('admin_defaults_repositories', '/defaults/repositories', |
|
422 | 422 | action='update_repository_defaults', conditions={'method': ['POST']}) |
|
423 | 423 | m.connect('admin_defaults_repositories', '/defaults/repositories', |
|
424 | 424 | action='index', conditions={'method': ['GET']}) |
|
425 | 425 | |
|
426 | 426 | # ADMIN DEBUG STYLE ROUTES |
|
427 | 427 | if str2bool(config.get('debug_style')): |
|
428 | 428 | with rmap.submapper(path_prefix=ADMIN_PREFIX + '/debug_style', |
|
429 | 429 | controller='debug_style') as m: |
|
430 | 430 | m.connect('debug_style_home', '', |
|
431 | 431 | action='index', conditions={'method': ['GET']}) |
|
432 | 432 | m.connect('debug_style_template', '/t/{t_path}', |
|
433 | 433 | action='template', conditions={'method': ['GET']}) |
|
434 | 434 | |
|
435 | 435 | # ADMIN SETTINGS ROUTES |
|
436 | 436 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
437 | 437 | controller='admin/settings') as m: |
|
438 | 438 | |
|
439 | 439 | # default |
|
440 | 440 | m.connect('admin_settings', '/settings', |
|
441 | 441 | action='settings_global_update', |
|
442 | 442 | conditions={'method': ['POST']}) |
|
443 | 443 | m.connect('admin_settings', '/settings', |
|
444 | 444 | action='settings_global', conditions={'method': ['GET']}) |
|
445 | 445 | |
|
446 | 446 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
447 | 447 | action='settings_vcs_update', |
|
448 | 448 | conditions={'method': ['POST']}) |
|
449 | 449 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
450 | 450 | action='settings_vcs', |
|
451 | 451 | conditions={'method': ['GET']}) |
|
452 | 452 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
453 | 453 | action='delete_svn_pattern', |
|
454 | 454 | conditions={'method': ['DELETE']}) |
|
455 | 455 | |
|
456 | 456 | m.connect('admin_settings_mapping', '/settings/mapping', |
|
457 | 457 | action='settings_mapping_update', |
|
458 | 458 | conditions={'method': ['POST']}) |
|
459 | 459 | m.connect('admin_settings_mapping', '/settings/mapping', |
|
460 | 460 | action='settings_mapping', conditions={'method': ['GET']}) |
|
461 | 461 | |
|
462 | 462 | m.connect('admin_settings_global', '/settings/global', |
|
463 | 463 | action='settings_global_update', |
|
464 | 464 | conditions={'method': ['POST']}) |
|
465 | 465 | m.connect('admin_settings_global', '/settings/global', |
|
466 | 466 | action='settings_global', conditions={'method': ['GET']}) |
|
467 | 467 | |
|
468 | 468 | m.connect('admin_settings_visual', '/settings/visual', |
|
469 | 469 | action='settings_visual_update', |
|
470 | 470 | conditions={'method': ['POST']}) |
|
471 | 471 | m.connect('admin_settings_visual', '/settings/visual', |
|
472 | 472 | action='settings_visual', conditions={'method': ['GET']}) |
|
473 | 473 | |
|
474 | 474 | m.connect('admin_settings_issuetracker', |
|
475 | 475 | '/settings/issue-tracker', action='settings_issuetracker', |
|
476 | 476 | conditions={'method': ['GET']}) |
|
477 | 477 | m.connect('admin_settings_issuetracker_save', |
|
478 | 478 | '/settings/issue-tracker/save', |
|
479 | 479 | action='settings_issuetracker_save', |
|
480 | 480 | conditions={'method': ['POST']}) |
|
481 | 481 | m.connect('admin_issuetracker_test', '/settings/issue-tracker/test', |
|
482 | 482 | action='settings_issuetracker_test', |
|
483 | 483 | conditions={'method': ['POST']}) |
|
484 | 484 | m.connect('admin_issuetracker_delete', |
|
485 | 485 | '/settings/issue-tracker/delete', |
|
486 | 486 | action='settings_issuetracker_delete', |
|
487 | 487 | conditions={'method': ['DELETE']}) |
|
488 | 488 | |
|
489 | 489 | m.connect('admin_settings_email', '/settings/email', |
|
490 | 490 | action='settings_email_update', |
|
491 | 491 | conditions={'method': ['POST']}) |
|
492 | 492 | m.connect('admin_settings_email', '/settings/email', |
|
493 | 493 | action='settings_email', conditions={'method': ['GET']}) |
|
494 | 494 | |
|
495 | 495 | m.connect('admin_settings_hooks', '/settings/hooks', |
|
496 | 496 | action='settings_hooks_update', |
|
497 | 497 | conditions={'method': ['POST', 'DELETE']}) |
|
498 | 498 | m.connect('admin_settings_hooks', '/settings/hooks', |
|
499 | 499 | action='settings_hooks', conditions={'method': ['GET']}) |
|
500 | 500 | |
|
501 | 501 | m.connect('admin_settings_search', '/settings/search', |
|
502 | 502 | action='settings_search', conditions={'method': ['GET']}) |
|
503 | 503 | |
|
504 | 504 | m.connect('admin_settings_system', '/settings/system', |
|
505 | 505 | action='settings_system', conditions={'method': ['GET']}) |
|
506 | 506 | |
|
507 | 507 | m.connect('admin_settings_system_update', '/settings/system/updates', |
|
508 | 508 | action='settings_system_update', conditions={'method': ['GET']}) |
|
509 | 509 | |
|
510 | 510 | m.connect('admin_settings_supervisor', '/settings/supervisor', |
|
511 | 511 | action='settings_supervisor', conditions={'method': ['GET']}) |
|
512 | 512 | m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log', |
|
513 | 513 | action='settings_supervisor_log', conditions={'method': ['GET']}) |
|
514 | 514 | |
|
515 | 515 | m.connect('admin_settings_labs', '/settings/labs', |
|
516 | 516 | action='settings_labs_update', |
|
517 | 517 | conditions={'method': ['POST']}) |
|
518 | 518 | m.connect('admin_settings_labs', '/settings/labs', |
|
519 | 519 | action='settings_labs', conditions={'method': ['GET']}) |
|
520 | 520 | |
|
521 | 521 | # ADMIN MY ACCOUNT |
|
522 | 522 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
523 | 523 | controller='admin/my_account') as m: |
|
524 | 524 | |
|
525 | 525 | m.connect('my_account', '/my_account', |
|
526 | 526 | action='my_account', conditions={'method': ['GET']}) |
|
527 | 527 | m.connect('my_account_edit', '/my_account/edit', |
|
528 | 528 | action='my_account_edit', conditions={'method': ['GET']}) |
|
529 | 529 | m.connect('my_account', '/my_account', |
|
530 | 530 | action='my_account_update', conditions={'method': ['POST']}) |
|
531 | 531 | |
|
532 | 532 | m.connect('my_account_password', '/my_account/password', |
|
533 | 533 | action='my_account_password', conditions={'method': ['GET']}) |
|
534 | 534 | m.connect('my_account_password', '/my_account/password', |
|
535 | 535 | action='my_account_password_update', conditions={'method': ['POST']}) |
|
536 | 536 | |
|
537 | 537 | m.connect('my_account_repos', '/my_account/repos', |
|
538 | 538 | action='my_account_repos', conditions={'method': ['GET']}) |
|
539 | 539 | |
|
540 | 540 | m.connect('my_account_watched', '/my_account/watched', |
|
541 | 541 | action='my_account_watched', conditions={'method': ['GET']}) |
|
542 | 542 | |
|
543 | 543 | m.connect('my_account_pullrequests', '/my_account/pull_requests', |
|
544 | 544 | action='my_account_pullrequests', conditions={'method': ['GET']}) |
|
545 | 545 | |
|
546 | 546 | m.connect('my_account_perms', '/my_account/perms', |
|
547 | 547 | action='my_account_perms', conditions={'method': ['GET']}) |
|
548 | 548 | |
|
549 | 549 | m.connect('my_account_emails', '/my_account/emails', |
|
550 | 550 | action='my_account_emails', conditions={'method': ['GET']}) |
|
551 | 551 | m.connect('my_account_emails', '/my_account/emails', |
|
552 | 552 | action='my_account_emails_add', conditions={'method': ['POST']}) |
|
553 | 553 | m.connect('my_account_emails', '/my_account/emails', |
|
554 | 554 | action='my_account_emails_delete', conditions={'method': ['DELETE']}) |
|
555 | 555 | |
|
556 | 556 | m.connect('my_account_auth_tokens', '/my_account/auth_tokens', |
|
557 | 557 | action='my_account_auth_tokens', conditions={'method': ['GET']}) |
|
558 | 558 | m.connect('my_account_auth_tokens', '/my_account/auth_tokens', |
|
559 | 559 | action='my_account_auth_tokens_add', conditions={'method': ['POST']}) |
|
560 | 560 | m.connect('my_account_auth_tokens', '/my_account/auth_tokens', |
|
561 | 561 | action='my_account_auth_tokens_delete', conditions={'method': ['DELETE']}) |
|
562 | 562 | |
|
563 | 563 | # NOTIFICATION REST ROUTES |
|
564 | 564 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
565 | 565 | controller='admin/notifications') as m: |
|
566 | 566 | m.connect('notifications', '/notifications', |
|
567 | 567 | action='index', conditions={'method': ['GET']}) |
|
568 | 568 | m.connect('notifications_mark_all_read', '/notifications/mark_all_read', |
|
569 | 569 | action='mark_all_read', conditions={'method': ['POST']}) |
|
570 | 570 | |
|
571 | 571 | m.connect('/notifications/{notification_id}', |
|
572 | 572 | action='update', conditions={'method': ['PUT']}) |
|
573 | 573 | m.connect('/notifications/{notification_id}', |
|
574 | 574 | action='delete', conditions={'method': ['DELETE']}) |
|
575 | 575 | m.connect('notification', '/notifications/{notification_id}', |
|
576 | 576 | action='show', conditions={'method': ['GET']}) |
|
577 | 577 | |
|
578 | 578 | # ADMIN GIST |
|
579 | 579 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
580 | 580 | controller='admin/gists') as m: |
|
581 | 581 | m.connect('gists', '/gists', |
|
582 | 582 | action='create', conditions={'method': ['POST']}) |
|
583 | 583 | m.connect('gists', '/gists', jsroute=True, |
|
584 | 584 | action='index', conditions={'method': ['GET']}) |
|
585 | 585 | m.connect('new_gist', '/gists/new', jsroute=True, |
|
586 | 586 | action='new', conditions={'method': ['GET']}) |
|
587 | 587 | |
|
588 | 588 | m.connect('/gists/{gist_id}', |
|
589 | 589 | action='delete', conditions={'method': ['DELETE']}) |
|
590 | 590 | m.connect('edit_gist', '/gists/{gist_id}/edit', |
|
591 | 591 | action='edit_form', conditions={'method': ['GET']}) |
|
592 | 592 | m.connect('edit_gist', '/gists/{gist_id}/edit', |
|
593 | 593 | action='edit', conditions={'method': ['POST']}) |
|
594 | 594 | m.connect( |
|
595 | 595 | 'edit_gist_check_revision', '/gists/{gist_id}/edit/check_revision', |
|
596 | 596 | action='check_revision', conditions={'method': ['GET']}) |
|
597 | 597 | |
|
598 | 598 | m.connect('gist', '/gists/{gist_id}', |
|
599 | 599 | action='show', conditions={'method': ['GET']}) |
|
600 | 600 | m.connect('gist_rev', '/gists/{gist_id}/{revision}', |
|
601 | 601 | revision='tip', |
|
602 | 602 | action='show', conditions={'method': ['GET']}) |
|
603 | 603 | m.connect('formatted_gist', '/gists/{gist_id}/{revision}/{format}', |
|
604 | 604 | revision='tip', |
|
605 | 605 | action='show', conditions={'method': ['GET']}) |
|
606 | 606 | m.connect('formatted_gist_file', '/gists/{gist_id}/{revision}/{format}/{f_path}', |
|
607 | 607 | revision='tip', |
|
608 | 608 | action='show', conditions={'method': ['GET']}, |
|
609 | 609 | requirements=URL_NAME_REQUIREMENTS) |
|
610 | 610 | |
|
611 | 611 | # ADMIN MAIN PAGES |
|
612 | 612 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
613 | 613 | controller='admin/admin') as m: |
|
614 | 614 | m.connect('admin_home', '', action='index') |
|
615 | 615 | m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}', |
|
616 | 616 | action='add_repo') |
|
617 | 617 | m.connect( |
|
618 | 618 | 'pull_requests_global_0', '/pull_requests/{pull_request_id:[0-9]+}', |
|
619 | 619 | action='pull_requests') |
|
620 | 620 | m.connect( |
|
621 | 621 | 'pull_requests_global', '/pull-requests/{pull_request_id:[0-9]+}', |
|
622 | 622 | action='pull_requests') |
|
623 | 623 | |
|
624 | 624 | |
|
625 | 625 | # USER JOURNAL |
|
626 | 626 | rmap.connect('journal', '%s/journal' % (ADMIN_PREFIX,), |
|
627 | 627 | controller='journal', action='index') |
|
628 | 628 | rmap.connect('journal_rss', '%s/journal/rss' % (ADMIN_PREFIX,), |
|
629 | 629 | controller='journal', action='journal_rss') |
|
630 | 630 | rmap.connect('journal_atom', '%s/journal/atom' % (ADMIN_PREFIX,), |
|
631 | 631 | controller='journal', action='journal_atom') |
|
632 | 632 | |
|
633 | 633 | rmap.connect('public_journal', '%s/public_journal' % (ADMIN_PREFIX,), |
|
634 | 634 | controller='journal', action='public_journal') |
|
635 | 635 | |
|
636 | 636 | rmap.connect('public_journal_rss', '%s/public_journal/rss' % (ADMIN_PREFIX,), |
|
637 | 637 | controller='journal', action='public_journal_rss') |
|
638 | 638 | |
|
639 | 639 | rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % (ADMIN_PREFIX,), |
|
640 | 640 | controller='journal', action='public_journal_rss') |
|
641 | 641 | |
|
642 | 642 | rmap.connect('public_journal_atom', |
|
643 | 643 | '%s/public_journal/atom' % (ADMIN_PREFIX,), controller='journal', |
|
644 | 644 | action='public_journal_atom') |
|
645 | 645 | |
|
646 | 646 | rmap.connect('public_journal_atom_old', |
|
647 | 647 | '%s/public_journal_atom' % (ADMIN_PREFIX,), controller='journal', |
|
648 | 648 | action='public_journal_atom') |
|
649 | 649 | |
|
650 | 650 | rmap.connect('toggle_following', '%s/toggle_following' % (ADMIN_PREFIX,), |
|
651 | 651 | controller='journal', action='toggle_following', jsroute=True, |
|
652 | 652 | conditions={'method': ['POST']}) |
|
653 | 653 | |
|
654 | 654 | # FULL TEXT SEARCH |
|
655 | 655 | rmap.connect('search', '%s/search' % (ADMIN_PREFIX,), |
|
656 | 656 | controller='search') |
|
657 | 657 | rmap.connect('search_repo_home', '/{repo_name}/search', |
|
658 | 658 | controller='search', |
|
659 | 659 | action='index', |
|
660 | 660 | conditions={'function': check_repo}, |
|
661 | 661 | requirements=URL_NAME_REQUIREMENTS) |
|
662 | 662 | |
|
663 | 663 | # FEEDS |
|
664 | 664 | rmap.connect('rss_feed_home', '/{repo_name}/feed/rss', |
|
665 | 665 | controller='feed', action='rss', |
|
666 | 666 | conditions={'function': check_repo}, |
|
667 | 667 | requirements=URL_NAME_REQUIREMENTS) |
|
668 | 668 | |
|
669 | 669 | rmap.connect('atom_feed_home', '/{repo_name}/feed/atom', |
|
670 | 670 | controller='feed', action='atom', |
|
671 | 671 | conditions={'function': check_repo}, |
|
672 | 672 | requirements=URL_NAME_REQUIREMENTS) |
|
673 | 673 | |
|
674 | 674 | #========================================================================== |
|
675 | 675 | # REPOSITORY ROUTES |
|
676 | 676 | #========================================================================== |
|
677 | 677 | |
|
678 | 678 | rmap.connect('repo_creating_home', '/{repo_name}/repo_creating', |
|
679 | 679 | controller='admin/repos', action='repo_creating', |
|
680 | 680 | requirements=URL_NAME_REQUIREMENTS) |
|
681 | 681 | rmap.connect('repo_check_home', '/{repo_name}/crepo_check', |
|
682 | 682 | controller='admin/repos', action='repo_check', |
|
683 | 683 | requirements=URL_NAME_REQUIREMENTS) |
|
684 | 684 | |
|
685 | 685 | rmap.connect('repo_stats', '/{repo_name}/repo_stats/{commit_id}', |
|
686 | 686 | controller='summary', action='repo_stats', |
|
687 | 687 | conditions={'function': check_repo}, |
|
688 | 688 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
689 | 689 | |
|
690 | 690 | rmap.connect('repo_refs_data', '/{repo_name}/refs-data', |
|
691 | 691 | controller='summary', action='repo_refs_data', jsroute=True, |
|
692 | 692 | requirements=URL_NAME_REQUIREMENTS) |
|
693 | 693 | rmap.connect('repo_refs_changelog_data', '/{repo_name}/refs-data-changelog', |
|
694 | 694 | controller='summary', action='repo_refs_changelog_data', |
|
695 | 695 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
696 | 696 | |
|
697 | 697 | rmap.connect('changeset_home', '/{repo_name}/changeset/{revision}', |
|
698 | 698 | controller='changeset', revision='tip', jsroute=True, |
|
699 | 699 | conditions={'function': check_repo}, |
|
700 | 700 | requirements=URL_NAME_REQUIREMENTS) |
|
701 | 701 | rmap.connect('changeset_children', '/{repo_name}/changeset_children/{revision}', |
|
702 | 702 | controller='changeset', revision='tip', action='changeset_children', |
|
703 | 703 | conditions={'function': check_repo}, |
|
704 | 704 | requirements=URL_NAME_REQUIREMENTS) |
|
705 | 705 | rmap.connect('changeset_parents', '/{repo_name}/changeset_parents/{revision}', |
|
706 | 706 | controller='changeset', revision='tip', action='changeset_parents', |
|
707 | 707 | conditions={'function': check_repo}, |
|
708 | 708 | requirements=URL_NAME_REQUIREMENTS) |
|
709 | 709 | |
|
710 | 710 | # repo edit options |
|
711 | 711 | rmap.connect('edit_repo', '/{repo_name}/settings', jsroute=True, |
|
712 | 712 | controller='admin/repos', action='edit', |
|
713 | 713 | conditions={'method': ['GET'], 'function': check_repo}, |
|
714 | 714 | requirements=URL_NAME_REQUIREMENTS) |
|
715 | 715 | |
|
716 | 716 | rmap.connect('edit_repo_perms', '/{repo_name}/settings/permissions', |
|
717 | 717 | jsroute=True, |
|
718 | 718 | controller='admin/repos', action='edit_permissions', |
|
719 | 719 | conditions={'method': ['GET'], 'function': check_repo}, |
|
720 | 720 | requirements=URL_NAME_REQUIREMENTS) |
|
721 | 721 | rmap.connect('edit_repo_perms_update', '/{repo_name}/settings/permissions', |
|
722 | 722 | controller='admin/repos', action='edit_permissions_update', |
|
723 | 723 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
724 | 724 | requirements=URL_NAME_REQUIREMENTS) |
|
725 | 725 | |
|
726 | 726 | rmap.connect('edit_repo_fields', '/{repo_name}/settings/fields', |
|
727 | 727 | controller='admin/repos', action='edit_fields', |
|
728 | 728 | conditions={'method': ['GET'], 'function': check_repo}, |
|
729 | 729 | requirements=URL_NAME_REQUIREMENTS) |
|
730 | 730 | rmap.connect('create_repo_fields', '/{repo_name}/settings/fields/new', |
|
731 | 731 | controller='admin/repos', action='create_repo_field', |
|
732 | 732 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
733 | 733 | requirements=URL_NAME_REQUIREMENTS) |
|
734 | 734 | rmap.connect('delete_repo_fields', '/{repo_name}/settings/fields/{field_id}', |
|
735 | 735 | controller='admin/repos', action='delete_repo_field', |
|
736 | 736 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
737 | 737 | requirements=URL_NAME_REQUIREMENTS) |
|
738 | 738 | |
|
739 | 739 | rmap.connect('edit_repo_advanced', '/{repo_name}/settings/advanced', |
|
740 | 740 | controller='admin/repos', action='edit_advanced', |
|
741 | 741 | conditions={'method': ['GET'], 'function': check_repo}, |
|
742 | 742 | requirements=URL_NAME_REQUIREMENTS) |
|
743 | 743 | |
|
744 | 744 | rmap.connect('edit_repo_advanced_locking', '/{repo_name}/settings/advanced/locking', |
|
745 | 745 | controller='admin/repos', action='edit_advanced_locking', |
|
746 | 746 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
747 | 747 | requirements=URL_NAME_REQUIREMENTS) |
|
748 | 748 | rmap.connect('toggle_locking', '/{repo_name}/settings/advanced/locking_toggle', |
|
749 | 749 | controller='admin/repos', action='toggle_locking', |
|
750 | 750 | conditions={'method': ['GET'], 'function': check_repo}, |
|
751 | 751 | requirements=URL_NAME_REQUIREMENTS) |
|
752 | 752 | |
|
753 | 753 | rmap.connect('edit_repo_advanced_journal', '/{repo_name}/settings/advanced/journal', |
|
754 | 754 | controller='admin/repos', action='edit_advanced_journal', |
|
755 | 755 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
756 | 756 | requirements=URL_NAME_REQUIREMENTS) |
|
757 | 757 | |
|
758 | 758 | rmap.connect('edit_repo_advanced_fork', '/{repo_name}/settings/advanced/fork', |
|
759 | 759 | controller='admin/repos', action='edit_advanced_fork', |
|
760 | 760 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
761 | 761 | requirements=URL_NAME_REQUIREMENTS) |
|
762 | 762 | |
|
763 | 763 | rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches', |
|
764 | 764 | controller='admin/repos', action='edit_caches_form', |
|
765 | 765 | conditions={'method': ['GET'], 'function': check_repo}, |
|
766 | 766 | requirements=URL_NAME_REQUIREMENTS) |
|
767 | 767 | rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches', |
|
768 | 768 | controller='admin/repos', action='edit_caches', |
|
769 | 769 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
770 | 770 | requirements=URL_NAME_REQUIREMENTS) |
|
771 | 771 | |
|
772 | 772 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', |
|
773 | 773 | controller='admin/repos', action='edit_remote_form', |
|
774 | 774 | conditions={'method': ['GET'], 'function': check_repo}, |
|
775 | 775 | requirements=URL_NAME_REQUIREMENTS) |
|
776 | 776 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', |
|
777 | 777 | controller='admin/repos', action='edit_remote', |
|
778 | 778 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
779 | 779 | requirements=URL_NAME_REQUIREMENTS) |
|
780 | 780 | |
|
781 | 781 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', |
|
782 | 782 | controller='admin/repos', action='edit_statistics_form', |
|
783 | 783 | conditions={'method': ['GET'], 'function': check_repo}, |
|
784 | 784 | requirements=URL_NAME_REQUIREMENTS) |
|
785 | 785 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', |
|
786 | 786 | controller='admin/repos', action='edit_statistics', |
|
787 | 787 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
788 | 788 | requirements=URL_NAME_REQUIREMENTS) |
|
789 | 789 | rmap.connect('repo_settings_issuetracker', |
|
790 | 790 | '/{repo_name}/settings/issue-tracker', |
|
791 | 791 | controller='admin/repos', action='repo_issuetracker', |
|
792 | 792 | conditions={'method': ['GET'], 'function': check_repo}, |
|
793 | 793 | requirements=URL_NAME_REQUIREMENTS) |
|
794 | 794 | rmap.connect('repo_issuetracker_test', |
|
795 | 795 | '/{repo_name}/settings/issue-tracker/test', |
|
796 | 796 | controller='admin/repos', action='repo_issuetracker_test', |
|
797 | 797 | conditions={'method': ['POST'], 'function': check_repo}, |
|
798 | 798 | requirements=URL_NAME_REQUIREMENTS) |
|
799 | 799 | rmap.connect('repo_issuetracker_delete', |
|
800 | 800 | '/{repo_name}/settings/issue-tracker/delete', |
|
801 | 801 | controller='admin/repos', action='repo_issuetracker_delete', |
|
802 | 802 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
803 | 803 | requirements=URL_NAME_REQUIREMENTS) |
|
804 | 804 | rmap.connect('repo_issuetracker_save', |
|
805 | 805 | '/{repo_name}/settings/issue-tracker/save', |
|
806 | 806 | controller='admin/repos', action='repo_issuetracker_save', |
|
807 | 807 | conditions={'method': ['POST'], 'function': check_repo}, |
|
808 | 808 | requirements=URL_NAME_REQUIREMENTS) |
|
809 | 809 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
810 | 810 | controller='admin/repos', action='repo_settings_vcs_update', |
|
811 | 811 | conditions={'method': ['POST'], 'function': check_repo}, |
|
812 | 812 | requirements=URL_NAME_REQUIREMENTS) |
|
813 | 813 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
814 | 814 | controller='admin/repos', action='repo_settings_vcs', |
|
815 | 815 | conditions={'method': ['GET'], 'function': check_repo}, |
|
816 | 816 | requirements=URL_NAME_REQUIREMENTS) |
|
817 | 817 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
818 | 818 | controller='admin/repos', action='repo_delete_svn_pattern', |
|
819 | 819 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
820 | 820 | requirements=URL_NAME_REQUIREMENTS) |
|
821 | 821 | |
|
822 | 822 | # still working url for backward compat. |
|
823 | 823 | rmap.connect('raw_changeset_home_depraced', |
|
824 | 824 | '/{repo_name}/raw-changeset/{revision}', |
|
825 | 825 | controller='changeset', action='changeset_raw', |
|
826 | 826 | revision='tip', conditions={'function': check_repo}, |
|
827 | 827 | requirements=URL_NAME_REQUIREMENTS) |
|
828 | 828 | |
|
829 | 829 | # new URLs |
|
830 | 830 | rmap.connect('changeset_raw_home', |
|
831 | 831 | '/{repo_name}/changeset-diff/{revision}', |
|
832 | 832 | controller='changeset', action='changeset_raw', |
|
833 | 833 | revision='tip', conditions={'function': check_repo}, |
|
834 | 834 | requirements=URL_NAME_REQUIREMENTS) |
|
835 | 835 | |
|
836 | 836 | rmap.connect('changeset_patch_home', |
|
837 | 837 | '/{repo_name}/changeset-patch/{revision}', |
|
838 | 838 | controller='changeset', action='changeset_patch', |
|
839 | 839 | revision='tip', conditions={'function': check_repo}, |
|
840 | 840 | requirements=URL_NAME_REQUIREMENTS) |
|
841 | 841 | |
|
842 | 842 | rmap.connect('changeset_download_home', |
|
843 | 843 | '/{repo_name}/changeset-download/{revision}', |
|
844 | 844 | controller='changeset', action='changeset_download', |
|
845 | 845 | revision='tip', conditions={'function': check_repo}, |
|
846 | 846 | requirements=URL_NAME_REQUIREMENTS) |
|
847 | 847 | |
|
848 | 848 | rmap.connect('changeset_comment', |
|
849 | 849 | '/{repo_name}/changeset/{revision}/comment', jsroute=True, |
|
850 | 850 | controller='changeset', revision='tip', action='comment', |
|
851 | 851 | conditions={'function': check_repo}, |
|
852 | 852 | requirements=URL_NAME_REQUIREMENTS) |
|
853 | 853 | |
|
854 | 854 | rmap.connect('changeset_comment_preview', |
|
855 | 855 | '/{repo_name}/changeset/comment/preview', jsroute=True, |
|
856 | 856 | controller='changeset', action='preview_comment', |
|
857 | 857 | conditions={'function': check_repo, 'method': ['POST']}, |
|
858 | 858 | requirements=URL_NAME_REQUIREMENTS) |
|
859 | 859 | |
|
860 | 860 | rmap.connect('changeset_comment_delete', |
|
861 | 861 | '/{repo_name}/changeset/comment/{comment_id}/delete', |
|
862 | 862 | controller='changeset', action='delete_comment', |
|
863 | 863 | conditions={'function': check_repo, 'method': ['DELETE']}, |
|
864 | 864 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
865 | 865 | |
|
866 | 866 | rmap.connect('changeset_info', '/changeset_info/{repo_name}/{revision}', |
|
867 | 867 | controller='changeset', action='changeset_info', |
|
868 | 868 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
869 | 869 | |
|
870 | 870 | rmap.connect('compare_home', |
|
871 | 871 | '/{repo_name}/compare', |
|
872 | 872 | controller='compare', action='index', |
|
873 | 873 | conditions={'function': check_repo}, |
|
874 | 874 | requirements=URL_NAME_REQUIREMENTS) |
|
875 | 875 | |
|
876 | 876 | rmap.connect('compare_url', |
|
877 | 877 | '/{repo_name}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', |
|
878 | 878 | controller='compare', action='compare', |
|
879 | 879 | conditions={'function': check_repo}, |
|
880 | 880 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
881 | 881 | |
|
882 | 882 | rmap.connect('pullrequest_home', |
|
883 | 883 | '/{repo_name}/pull-request/new', controller='pullrequests', |
|
884 | 884 | action='index', conditions={'function': check_repo, |
|
885 | 885 | 'method': ['GET']}, |
|
886 | 886 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
887 | 887 | |
|
888 | 888 | rmap.connect('pullrequest', |
|
889 | 889 | '/{repo_name}/pull-request/new', controller='pullrequests', |
|
890 | 890 | action='create', conditions={'function': check_repo, |
|
891 | 891 | 'method': ['POST']}, |
|
892 | 892 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
893 | 893 | |
|
894 | 894 | rmap.connect('pullrequest_repo_refs', |
|
895 | 895 | '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}', |
|
896 | 896 | controller='pullrequests', |
|
897 | 897 | action='get_repo_refs', |
|
898 | 898 | conditions={'function': check_repo, 'method': ['GET']}, |
|
899 | 899 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
900 | 900 | |
|
901 | 901 | rmap.connect('pullrequest_repo_destinations', |
|
902 | 902 | '/{repo_name}/pull-request/repo-destinations', |
|
903 | 903 | controller='pullrequests', |
|
904 | 904 | action='get_repo_destinations', |
|
905 | 905 | conditions={'function': check_repo, 'method': ['GET']}, |
|
906 | 906 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
907 | 907 | |
|
908 | 908 | rmap.connect('pullrequest_show', |
|
909 | 909 | '/{repo_name}/pull-request/{pull_request_id}', |
|
910 | 910 | controller='pullrequests', |
|
911 | 911 | action='show', conditions={'function': check_repo, |
|
912 | 912 | 'method': ['GET']}, |
|
913 | 913 | requirements=URL_NAME_REQUIREMENTS) |
|
914 | 914 | |
|
915 | 915 | rmap.connect('pullrequest_update', |
|
916 | 916 | '/{repo_name}/pull-request/{pull_request_id}', |
|
917 | 917 | controller='pullrequests', |
|
918 | 918 | action='update', conditions={'function': check_repo, |
|
919 | 919 | 'method': ['PUT']}, |
|
920 | 920 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
921 | 921 | |
|
922 | 922 | rmap.connect('pullrequest_merge', |
|
923 | 923 | '/{repo_name}/pull-request/{pull_request_id}', |
|
924 | 924 | controller='pullrequests', |
|
925 | 925 | action='merge', conditions={'function': check_repo, |
|
926 | 926 | 'method': ['POST']}, |
|
927 | 927 | requirements=URL_NAME_REQUIREMENTS) |
|
928 | 928 | |
|
929 | 929 | rmap.connect('pullrequest_delete', |
|
930 | 930 | '/{repo_name}/pull-request/{pull_request_id}', |
|
931 | 931 | controller='pullrequests', |
|
932 | 932 | action='delete', conditions={'function': check_repo, |
|
933 | 933 | 'method': ['DELETE']}, |
|
934 | 934 | requirements=URL_NAME_REQUIREMENTS) |
|
935 | 935 | |
|
936 | 936 | rmap.connect('pullrequest_show_all', |
|
937 | 937 | '/{repo_name}/pull-request', |
|
938 | 938 | controller='pullrequests', |
|
939 | 939 | action='show_all', conditions={'function': check_repo, |
|
940 | 940 | 'method': ['GET']}, |
|
941 | 941 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
942 | 942 | |
|
943 | 943 | rmap.connect('pullrequest_comment', |
|
944 | 944 | '/{repo_name}/pull-request-comment/{pull_request_id}', |
|
945 | 945 | controller='pullrequests', |
|
946 | 946 | action='comment', conditions={'function': check_repo, |
|
947 | 947 | 'method': ['POST']}, |
|
948 | 948 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
949 | 949 | |
|
950 | 950 | rmap.connect('pullrequest_comment_delete', |
|
951 | 951 | '/{repo_name}/pull-request-comment/{comment_id}/delete', |
|
952 | 952 | controller='pullrequests', action='delete_comment', |
|
953 | 953 | conditions={'function': check_repo, 'method': ['DELETE']}, |
|
954 | 954 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
955 | 955 | |
|
956 | 956 | rmap.connect('summary_home_explicit', '/{repo_name}/summary', |
|
957 | 957 | controller='summary', conditions={'function': check_repo}, |
|
958 | 958 | requirements=URL_NAME_REQUIREMENTS) |
|
959 | 959 | |
|
960 | 960 | rmap.connect('branches_home', '/{repo_name}/branches', |
|
961 | 961 | controller='branches', conditions={'function': check_repo}, |
|
962 | 962 | requirements=URL_NAME_REQUIREMENTS) |
|
963 | 963 | |
|
964 | 964 | rmap.connect('tags_home', '/{repo_name}/tags', |
|
965 | 965 | controller='tags', conditions={'function': check_repo}, |
|
966 | 966 | requirements=URL_NAME_REQUIREMENTS) |
|
967 | 967 | |
|
968 | 968 | rmap.connect('bookmarks_home', '/{repo_name}/bookmarks', |
|
969 | 969 | controller='bookmarks', conditions={'function': check_repo}, |
|
970 | 970 | requirements=URL_NAME_REQUIREMENTS) |
|
971 | 971 | |
|
972 | 972 | rmap.connect('changelog_home', '/{repo_name}/changelog', jsroute=True, |
|
973 | 973 | controller='changelog', conditions={'function': check_repo}, |
|
974 | 974 | requirements=URL_NAME_REQUIREMENTS) |
|
975 | 975 | |
|
976 | 976 | rmap.connect('changelog_summary_home', '/{repo_name}/changelog_summary', |
|
977 | 977 | controller='changelog', action='changelog_summary', |
|
978 | 978 | conditions={'function': check_repo}, |
|
979 | 979 | requirements=URL_NAME_REQUIREMENTS) |
|
980 | 980 | |
|
981 | 981 | rmap.connect('changelog_file_home', |
|
982 | 982 | '/{repo_name}/changelog/{revision}/{f_path}', |
|
983 | 983 | controller='changelog', f_path=None, |
|
984 | 984 | conditions={'function': check_repo}, |
|
985 | 985 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
986 | 986 | |
|
987 | 987 | rmap.connect('changelog_details', '/{repo_name}/changelog_details/{cs}', |
|
988 | 988 | controller='changelog', action='changelog_details', |
|
989 | 989 | conditions={'function': check_repo}, |
|
990 | 990 | requirements=URL_NAME_REQUIREMENTS) |
|
991 | 991 | |
|
992 | 992 | rmap.connect('files_home', '/{repo_name}/files/{revision}/{f_path}', |
|
993 | 993 | controller='files', revision='tip', f_path='', |
|
994 | 994 | conditions={'function': check_repo}, |
|
995 | 995 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
996 | 996 | |
|
997 | 997 | rmap.connect('files_home_simple_catchrev', |
|
998 | 998 | '/{repo_name}/files/{revision}', |
|
999 | 999 | controller='files', revision='tip', f_path='', |
|
1000 | 1000 | conditions={'function': check_repo}, |
|
1001 | 1001 | requirements=URL_NAME_REQUIREMENTS) |
|
1002 | 1002 | |
|
1003 | 1003 | rmap.connect('files_home_simple_catchall', |
|
1004 | 1004 | '/{repo_name}/files', |
|
1005 | 1005 | controller='files', revision='tip', f_path='', |
|
1006 | 1006 | conditions={'function': check_repo}, |
|
1007 | 1007 | requirements=URL_NAME_REQUIREMENTS) |
|
1008 | 1008 | |
|
1009 | 1009 | rmap.connect('files_history_home', |
|
1010 | 1010 | '/{repo_name}/history/{revision}/{f_path}', |
|
1011 | 1011 | controller='files', action='history', revision='tip', f_path='', |
|
1012 | 1012 | conditions={'function': check_repo}, |
|
1013 | 1013 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1014 | 1014 | |
|
1015 | 1015 | rmap.connect('files_authors_home', |
|
1016 | 1016 | '/{repo_name}/authors/{revision}/{f_path}', |
|
1017 | 1017 | controller='files', action='authors', revision='tip', f_path='', |
|
1018 | 1018 | conditions={'function': check_repo}, |
|
1019 | 1019 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1020 | 1020 | |
|
1021 | 1021 | rmap.connect('files_diff_home', '/{repo_name}/diff/{f_path}', |
|
1022 | 1022 | controller='files', action='diff', f_path='', |
|
1023 | 1023 | conditions={'function': check_repo}, |
|
1024 | 1024 | requirements=URL_NAME_REQUIREMENTS) |
|
1025 | 1025 | |
|
1026 | 1026 | rmap.connect('files_diff_2way_home', |
|
1027 | 1027 | '/{repo_name}/diff-2way/{f_path}', |
|
1028 | 1028 | controller='files', action='diff_2way', f_path='', |
|
1029 | 1029 | conditions={'function': check_repo}, |
|
1030 | 1030 | requirements=URL_NAME_REQUIREMENTS) |
|
1031 | 1031 | |
|
1032 | 1032 | rmap.connect('files_rawfile_home', |
|
1033 | 1033 | '/{repo_name}/rawfile/{revision}/{f_path}', |
|
1034 | 1034 | controller='files', action='rawfile', revision='tip', |
|
1035 | 1035 | f_path='', conditions={'function': check_repo}, |
|
1036 | 1036 | requirements=URL_NAME_REQUIREMENTS) |
|
1037 | 1037 | |
|
1038 | 1038 | rmap.connect('files_raw_home', |
|
1039 | 1039 | '/{repo_name}/raw/{revision}/{f_path}', |
|
1040 | 1040 | controller='files', action='raw', revision='tip', f_path='', |
|
1041 | 1041 | conditions={'function': check_repo}, |
|
1042 | 1042 | requirements=URL_NAME_REQUIREMENTS) |
|
1043 | 1043 | |
|
1044 | 1044 | rmap.connect('files_render_home', |
|
1045 | 1045 | '/{repo_name}/render/{revision}/{f_path}', |
|
1046 | 1046 | controller='files', action='index', revision='tip', f_path='', |
|
1047 | 1047 | rendered=True, conditions={'function': check_repo}, |
|
1048 | 1048 | requirements=URL_NAME_REQUIREMENTS) |
|
1049 | 1049 | |
|
1050 | 1050 | rmap.connect('files_annotate_home', |
|
1051 | 1051 | '/{repo_name}/annotate/{revision}/{f_path}', |
|
1052 | 1052 | controller='files', action='index', revision='tip', |
|
1053 | 1053 | f_path='', annotate=True, conditions={'function': check_repo}, |
|
1054 | 1054 | requirements=URL_NAME_REQUIREMENTS) |
|
1055 | 1055 | |
|
1056 | 1056 | rmap.connect('files_edit', |
|
1057 | 1057 | '/{repo_name}/edit/{revision}/{f_path}', |
|
1058 | 1058 | controller='files', action='edit', revision='tip', |
|
1059 | 1059 | f_path='', |
|
1060 | 1060 | conditions={'function': check_repo, 'method': ['POST']}, |
|
1061 | 1061 | requirements=URL_NAME_REQUIREMENTS) |
|
1062 | 1062 | |
|
1063 | 1063 | rmap.connect('files_edit_home', |
|
1064 | 1064 | '/{repo_name}/edit/{revision}/{f_path}', |
|
1065 | 1065 | controller='files', action='edit_home', revision='tip', |
|
1066 | 1066 | f_path='', conditions={'function': check_repo}, |
|
1067 | 1067 | requirements=URL_NAME_REQUIREMENTS) |
|
1068 | 1068 | |
|
1069 | 1069 | rmap.connect('files_add', |
|
1070 | 1070 | '/{repo_name}/add/{revision}/{f_path}', |
|
1071 | 1071 | controller='files', action='add', revision='tip', |
|
1072 | 1072 | f_path='', |
|
1073 | 1073 | conditions={'function': check_repo, 'method': ['POST']}, |
|
1074 | 1074 | requirements=URL_NAME_REQUIREMENTS) |
|
1075 | 1075 | |
|
1076 | 1076 | rmap.connect('files_add_home', |
|
1077 | 1077 | '/{repo_name}/add/{revision}/{f_path}', |
|
1078 | 1078 | controller='files', action='add_home', revision='tip', |
|
1079 | 1079 | f_path='', conditions={'function': check_repo}, |
|
1080 | 1080 | requirements=URL_NAME_REQUIREMENTS) |
|
1081 | 1081 | |
|
1082 | 1082 | rmap.connect('files_delete', |
|
1083 | 1083 | '/{repo_name}/delete/{revision}/{f_path}', |
|
1084 | 1084 | controller='files', action='delete', revision='tip', |
|
1085 | 1085 | f_path='', |
|
1086 | 1086 | conditions={'function': check_repo, 'method': ['POST']}, |
|
1087 | 1087 | requirements=URL_NAME_REQUIREMENTS) |
|
1088 | 1088 | |
|
1089 | 1089 | rmap.connect('files_delete_home', |
|
1090 | 1090 | '/{repo_name}/delete/{revision}/{f_path}', |
|
1091 | 1091 | controller='files', action='delete_home', revision='tip', |
|
1092 | 1092 | f_path='', conditions={'function': check_repo}, |
|
1093 | 1093 | requirements=URL_NAME_REQUIREMENTS) |
|
1094 | 1094 | |
|
1095 | 1095 | rmap.connect('files_archive_home', '/{repo_name}/archive/{fname}', |
|
1096 | 1096 | controller='files', action='archivefile', |
|
1097 | 1097 | conditions={'function': check_repo}, |
|
1098 | 1098 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1099 | 1099 | |
|
1100 | 1100 | rmap.connect('files_nodelist_home', |
|
1101 | 1101 | '/{repo_name}/nodelist/{revision}/{f_path}', |
|
1102 | 1102 | controller='files', action='nodelist', |
|
1103 | 1103 | conditions={'function': check_repo}, |
|
1104 | 1104 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1105 | 1105 | |
|
1106 |
rmap.connect('files_ |
|
|
1107 |
'/{repo_name}/ |
|
|
1108 |
controller='files', action=' |
|
|
1106 | rmap.connect('files_nodetree_full', | |
|
1107 | '/{repo_name}/nodetree_full/{commit_id}/{f_path}', | |
|
1108 | controller='files', action='nodetree_full', | |
|
1109 | 1109 | conditions={'function': check_repo}, |
|
1110 | 1110 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1111 | 1111 | |
|
1112 | 1112 | rmap.connect('repo_fork_create_home', '/{repo_name}/fork', |
|
1113 | 1113 | controller='forks', action='fork_create', |
|
1114 | 1114 | conditions={'function': check_repo, 'method': ['POST']}, |
|
1115 | 1115 | requirements=URL_NAME_REQUIREMENTS) |
|
1116 | 1116 | |
|
1117 | 1117 | rmap.connect('repo_fork_home', '/{repo_name}/fork', |
|
1118 | 1118 | controller='forks', action='fork', |
|
1119 | 1119 | conditions={'function': check_repo}, |
|
1120 | 1120 | requirements=URL_NAME_REQUIREMENTS) |
|
1121 | 1121 | |
|
1122 | 1122 | rmap.connect('repo_forks_home', '/{repo_name}/forks', |
|
1123 | 1123 | controller='forks', action='forks', |
|
1124 | 1124 | conditions={'function': check_repo}, |
|
1125 | 1125 | requirements=URL_NAME_REQUIREMENTS) |
|
1126 | 1126 | |
|
1127 | 1127 | rmap.connect('repo_followers_home', '/{repo_name}/followers', |
|
1128 | 1128 | controller='followers', action='followers', |
|
1129 | 1129 | conditions={'function': check_repo}, |
|
1130 | 1130 | requirements=URL_NAME_REQUIREMENTS) |
|
1131 | 1131 | |
|
1132 | 1132 | # must be here for proper group/repo catching pattern |
|
1133 | 1133 | _connect_with_slash( |
|
1134 | 1134 | rmap, 'repo_group_home', '/{group_name}', |
|
1135 | 1135 | controller='home', action='index_repo_group', |
|
1136 | 1136 | conditions={'function': check_group}, |
|
1137 | 1137 | requirements=URL_NAME_REQUIREMENTS) |
|
1138 | 1138 | |
|
1139 | 1139 | # catch all, at the end |
|
1140 | 1140 | _connect_with_slash( |
|
1141 | 1141 | rmap, 'summary_home', '/{repo_name}', jsroute=True, |
|
1142 | 1142 | controller='summary', action='index', |
|
1143 | 1143 | conditions={'function': check_repo}, |
|
1144 | 1144 | requirements=URL_NAME_REQUIREMENTS) |
|
1145 | 1145 | |
|
1146 | 1146 | return rmap |
|
1147 | 1147 | |
|
1148 | 1148 | |
|
1149 | 1149 | def _connect_with_slash(mapper, name, path, *args, **kwargs): |
|
1150 | 1150 | """ |
|
1151 | 1151 | Connect a route with an optional trailing slash in `path`. |
|
1152 | 1152 | """ |
|
1153 | 1153 | mapper.connect(name + '_slash', path + '/', *args, **kwargs) |
|
1154 | 1154 | mapper.connect(name, path, *args, **kwargs) |
@@ -1,1114 +1,1114 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Files controller for RhodeCode Enterprise |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import itertools |
|
26 | 26 | import logging |
|
27 | 27 | import os |
|
28 | 28 | import shutil |
|
29 | 29 | import tempfile |
|
30 | 30 | |
|
31 | 31 | from pylons import request, response, tmpl_context as c, url |
|
32 | 32 | from pylons.i18n.translation import _ |
|
33 | 33 | from pylons.controllers.util import redirect |
|
34 | 34 | from webob.exc import HTTPNotFound, HTTPBadRequest |
|
35 | 35 | |
|
36 | 36 | from rhodecode.controllers.utils import parse_path_ref |
|
37 | 37 | from rhodecode.lib import diffs, helpers as h, caches |
|
38 | 38 | from rhodecode.lib.compat import OrderedDict |
|
39 | 39 | from rhodecode.lib.utils import jsonify, action_logger |
|
40 | 40 | from rhodecode.lib.utils2 import ( |
|
41 | 41 | convert_line_endings, detect_mode, safe_str, str2bool) |
|
42 | 42 | from rhodecode.lib.auth import ( |
|
43 | 43 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired, XHRRequired) |
|
44 | 44 | from rhodecode.lib.base import BaseRepoController, render |
|
45 | 45 | from rhodecode.lib.vcs import path as vcspath |
|
46 | 46 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
47 | 47 | from rhodecode.lib.vcs.conf import settings |
|
48 | 48 | from rhodecode.lib.vcs.exceptions import ( |
|
49 | 49 | RepositoryError, CommitDoesNotExistError, EmptyRepositoryError, |
|
50 | 50 | ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError, |
|
51 | 51 | NodeDoesNotExistError, CommitError, NodeError) |
|
52 | 52 | from rhodecode.lib.vcs.nodes import FileNode |
|
53 | 53 | |
|
54 | 54 | from rhodecode.model.repo import RepoModel |
|
55 | 55 | from rhodecode.model.scm import ScmModel |
|
56 | 56 | from rhodecode.model.db import Repository |
|
57 | 57 | |
|
58 | 58 | from rhodecode.controllers.changeset import ( |
|
59 | 59 | _ignorews_url, _context_url, get_line_ctx, get_ignore_ws) |
|
60 | 60 | from rhodecode.lib.exceptions import NonRelativePathError |
|
61 | 61 | |
|
62 | 62 | log = logging.getLogger(__name__) |
|
63 | 63 | |
|
64 | 64 | |
|
65 | 65 | class FilesController(BaseRepoController): |
|
66 | 66 | |
|
67 | 67 | def __before__(self): |
|
68 | 68 | super(FilesController, self).__before__() |
|
69 | 69 | c.cut_off_limit = self.cut_off_limit_file |
|
70 | 70 | |
|
71 | 71 | def _get_default_encoding(self): |
|
72 | 72 | enc_list = getattr(c, 'default_encodings', []) |
|
73 | 73 | return enc_list[0] if enc_list else 'UTF-8' |
|
74 | 74 | |
|
75 | 75 | def __get_commit_or_redirect(self, commit_id, repo_name, |
|
76 | 76 | redirect_after=True): |
|
77 | 77 | """ |
|
78 | 78 | This is a safe way to get commit. If an error occurs it redirects to |
|
79 | 79 | tip with proper message |
|
80 | 80 | |
|
81 | 81 | :param commit_id: id of commit to fetch |
|
82 | 82 | :param repo_name: repo name to redirect after |
|
83 | 83 | :param redirect_after: toggle redirection |
|
84 | 84 | """ |
|
85 | 85 | try: |
|
86 | 86 | return c.rhodecode_repo.get_commit(commit_id) |
|
87 | 87 | except EmptyRepositoryError: |
|
88 | 88 | if not redirect_after: |
|
89 | 89 | return None |
|
90 | 90 | url_ = url('files_add_home', |
|
91 | 91 | repo_name=c.repo_name, |
|
92 | 92 | revision=0, f_path='', anchor='edit') |
|
93 | 93 | if h.HasRepoPermissionAny( |
|
94 | 94 | 'repository.write', 'repository.admin')(c.repo_name): |
|
95 | 95 | add_new = h.link_to( |
|
96 | 96 | _('Click here to add a new file.'), |
|
97 | 97 | url_, class_="alert-link") |
|
98 | 98 | else: |
|
99 | 99 | add_new = "" |
|
100 | 100 | h.flash(h.literal( |
|
101 | 101 | _('There are no files yet. %s') % add_new), category='warning') |
|
102 | 102 | redirect(h.url('summary_home', repo_name=repo_name)) |
|
103 | 103 | except (CommitDoesNotExistError, LookupError): |
|
104 | 104 | msg = _('No such commit exists for this repository') |
|
105 | 105 | h.flash(msg, category='error') |
|
106 | 106 | raise HTTPNotFound() |
|
107 | 107 | except RepositoryError as e: |
|
108 | 108 | h.flash(safe_str(e), category='error') |
|
109 | 109 | raise HTTPNotFound() |
|
110 | 110 | |
|
111 | 111 | def __get_filenode_or_redirect(self, repo_name, commit, path): |
|
112 | 112 | """ |
|
113 | 113 | Returns file_node, if error occurs or given path is directory, |
|
114 | 114 | it'll redirect to top level path |
|
115 | 115 | |
|
116 | 116 | :param repo_name: repo_name |
|
117 | 117 | :param commit: given commit |
|
118 | 118 | :param path: path to lookup |
|
119 | 119 | """ |
|
120 | 120 | try: |
|
121 | 121 | file_node = commit.get_node(path) |
|
122 | 122 | if file_node.is_dir(): |
|
123 | 123 | raise RepositoryError('The given path is a directory') |
|
124 | 124 | except CommitDoesNotExistError: |
|
125 | 125 | msg = _('No such commit exists for this repository') |
|
126 | 126 | log.exception(msg) |
|
127 | 127 | h.flash(msg, category='error') |
|
128 | 128 | raise HTTPNotFound() |
|
129 | 129 | except RepositoryError as e: |
|
130 | 130 | h.flash(safe_str(e), category='error') |
|
131 | 131 | raise HTTPNotFound() |
|
132 | 132 | |
|
133 | 133 | return file_node |
|
134 | 134 | |
|
135 | 135 | def __get_tree_cache_manager(self, repo_name, namespace_type): |
|
136 | 136 | _namespace = caches.get_repo_namespace_key(namespace_type, repo_name) |
|
137 | 137 | return caches.get_cache_manager('repo_cache_long', _namespace) |
|
138 | 138 | |
|
139 |
def _get_tree_at_commit(self, repo_name, commit_id, f_path |
|
|
139 | def _get_tree_at_commit(self, repo_name, commit_id, f_path, | |
|
140 | full_load=False, force=False): | |
|
140 | 141 | def _cached_tree(): |
|
141 | 142 | log.debug('Generating cached file tree for %s, %s, %s', |
|
142 | 143 | repo_name, commit_id, f_path) |
|
144 | c.full_load = full_load | |
|
143 | 145 | return render('files/files_browser_tree.html') |
|
144 | 146 | |
|
145 | 147 | cache_manager = self.__get_tree_cache_manager( |
|
146 | 148 | repo_name, caches.FILE_TREE) |
|
147 | 149 | |
|
148 | 150 | cache_key = caches.compute_key_from_params( |
|
149 | 151 | repo_name, commit_id, f_path) |
|
150 | 152 | |
|
153 | if force: | |
|
154 | # we want to force recompute of caches | |
|
155 | cache_manager.remove_value(cache_key) | |
|
156 | ||
|
151 | 157 | return cache_manager.get(cache_key, createfunc=_cached_tree) |
|
152 | 158 | |
|
153 | 159 | def _get_nodelist_at_commit(self, repo_name, commit_id, f_path): |
|
154 | 160 | def _cached_nodes(): |
|
155 | 161 | log.debug('Generating cached nodelist for %s, %s, %s', |
|
156 | 162 | repo_name, commit_id, f_path) |
|
157 | 163 | _d, _f = ScmModel().get_nodes( |
|
158 | 164 | repo_name, commit_id, f_path, flat=False) |
|
159 | 165 | return _d + _f |
|
160 | 166 | |
|
161 | 167 | cache_manager = self.__get_tree_cache_manager( |
|
162 | 168 | repo_name, caches.FILE_SEARCH_TREE_META) |
|
163 | 169 | |
|
164 | 170 | cache_key = caches.compute_key_from_params( |
|
165 | 171 | repo_name, commit_id, f_path) |
|
166 | 172 | return cache_manager.get(cache_key, createfunc=_cached_nodes) |
|
167 | 173 | |
|
168 | def _get_metadata_at_commit(self, repo_name, commit, dir_node): | |
|
169 | def _cached_metadata(): | |
|
170 | log.debug('Generating cached metadata for %s, %s, %s', | |
|
171 | repo_name, commit.raw_id, safe_str(dir_node.path)) | |
|
172 | ||
|
173 | data = ScmModel().get_dirnode_metadata(commit, dir_node) | |
|
174 | return data | |
|
175 | ||
|
176 | cache_manager = self.__get_tree_cache_manager( | |
|
177 | repo_name, caches.FILE_TREE_META) | |
|
178 | ||
|
179 | cache_key = caches.compute_key_from_params( | |
|
180 | repo_name, commit.raw_id, safe_str(dir_node.path)) | |
|
181 | ||
|
182 | return cache_manager.get(cache_key, createfunc=_cached_metadata) | |
|
183 | ||
|
184 | 174 | @LoginRequired() |
|
185 | 175 | @HasRepoPermissionAnyDecorator( |
|
186 | 176 | 'repository.read', 'repository.write', 'repository.admin') |
|
187 | 177 | def index( |
|
188 | 178 | self, repo_name, revision, f_path, annotate=False, rendered=False): |
|
189 | 179 | commit_id = revision |
|
190 | 180 | |
|
191 | 181 | # redirect to given commit_id from form if given |
|
192 | 182 | get_commit_id = request.GET.get('at_rev', None) |
|
193 | 183 | if get_commit_id: |
|
194 | 184 | self.__get_commit_or_redirect(get_commit_id, repo_name) |
|
195 | 185 | |
|
196 | 186 | c.commit = self.__get_commit_or_redirect(commit_id, repo_name) |
|
197 | 187 | c.branch = request.GET.get('branch', None) |
|
198 | 188 | c.f_path = f_path |
|
199 | 189 | c.annotate = annotate |
|
200 | 190 | # default is false, but .rst/.md files later are autorendered, we can |
|
201 | 191 | # overwrite autorendering by setting this GET flag |
|
202 | 192 | c.renderer = rendered or not request.GET.get('no-render', False) |
|
203 | 193 | |
|
204 | 194 | # prev link |
|
205 | 195 | try: |
|
206 | 196 | prev_commit = c.commit.prev(c.branch) |
|
207 | 197 | c.prev_commit = prev_commit |
|
208 | 198 | c.url_prev = url('files_home', repo_name=c.repo_name, |
|
209 | 199 | revision=prev_commit.raw_id, f_path=f_path) |
|
210 | 200 | if c.branch: |
|
211 | 201 | c.url_prev += '?branch=%s' % c.branch |
|
212 | 202 | except (CommitDoesNotExistError, VCSError): |
|
213 | 203 | c.url_prev = '#' |
|
214 | 204 | c.prev_commit = EmptyCommit() |
|
215 | 205 | |
|
216 | 206 | # next link |
|
217 | 207 | try: |
|
218 | 208 | next_commit = c.commit.next(c.branch) |
|
219 | 209 | c.next_commit = next_commit |
|
220 | 210 | c.url_next = url('files_home', repo_name=c.repo_name, |
|
221 | 211 | revision=next_commit.raw_id, f_path=f_path) |
|
222 | 212 | if c.branch: |
|
223 | 213 | c.url_next += '?branch=%s' % c.branch |
|
224 | 214 | except (CommitDoesNotExistError, VCSError): |
|
225 | 215 | c.url_next = '#' |
|
226 | 216 | c.next_commit = EmptyCommit() |
|
227 | 217 | |
|
228 | 218 | # files or dirs |
|
229 | 219 | try: |
|
230 | 220 | c.file = c.commit.get_node(f_path) |
|
231 | 221 | c.file_author = True |
|
232 | 222 | c.file_tree = '' |
|
233 | 223 | if c.file.is_file(): |
|
234 | 224 | c.renderer = ( |
|
235 | 225 | c.renderer and h.renderer_from_filename(c.file.path)) |
|
236 | 226 | c.file_last_commit = c.file.last_commit |
|
237 | 227 | |
|
238 | 228 | c.on_branch_head = self._is_valid_head( |
|
239 | 229 | commit_id, c.rhodecode_repo) |
|
240 | 230 | c.branch_or_raw_id = c.commit.branch or c.commit.raw_id |
|
241 | 231 | |
|
242 | 232 | author = c.file_last_commit.author |
|
243 | 233 | c.authors = [(h.email(author), |
|
244 | 234 | h.person(author, 'username_or_name_or_email'))] |
|
245 | 235 | else: |
|
246 | 236 | c.authors = [] |
|
247 | 237 | c.file_tree = self._get_tree_at_commit( |
|
248 | 238 | repo_name, c.commit.raw_id, f_path) |
|
239 | ||
|
249 | 240 | except RepositoryError as e: |
|
250 | 241 | h.flash(safe_str(e), category='error') |
|
251 | 242 | raise HTTPNotFound() |
|
252 | 243 | |
|
253 | 244 | if request.environ.get('HTTP_X_PJAX'): |
|
254 | 245 | return render('files/files_pjax.html') |
|
255 | 246 | |
|
256 | 247 | return render('files/files.html') |
|
257 | 248 | |
|
258 | 249 | @LoginRequired() |
|
259 | 250 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
260 | 251 | 'repository.admin') |
|
261 | 252 | @jsonify |
|
262 | 253 | def history(self, repo_name, revision, f_path): |
|
263 | 254 | commit = self.__get_commit_or_redirect(revision, repo_name) |
|
264 | 255 | f_path = f_path |
|
265 | 256 | _file = commit.get_node(f_path) |
|
266 | 257 | if _file.is_file(): |
|
267 | 258 | file_history, _hist = self._get_node_history(commit, f_path) |
|
268 | 259 | |
|
269 | 260 | res = [] |
|
270 | 261 | for obj in file_history: |
|
271 | 262 | res.append({ |
|
272 | 263 | 'text': obj[1], |
|
273 | 264 | 'children': [{'id': o[0], 'text': o[1]} for o in obj[0]] |
|
274 | 265 | }) |
|
275 | 266 | |
|
276 | 267 | data = { |
|
277 | 268 | 'more': False, |
|
278 | 269 | 'results': res |
|
279 | 270 | } |
|
280 | 271 | return data |
|
281 | 272 | |
|
282 | 273 | @LoginRequired() |
|
283 | 274 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
284 | 275 | 'repository.admin') |
|
285 | 276 | def authors(self, repo_name, revision, f_path): |
|
286 | 277 | commit = self.__get_commit_or_redirect(revision, repo_name) |
|
287 | 278 | file_node = commit.get_node(f_path) |
|
288 | 279 | if file_node.is_file(): |
|
289 | 280 | c.file_last_commit = file_node.last_commit |
|
290 | 281 | if request.GET.get('annotate') == '1': |
|
291 | 282 | # use _hist from annotation if annotation mode is on |
|
292 | 283 | commit_ids = set(x[1] for x in file_node.annotate) |
|
293 | 284 | _hist = ( |
|
294 | 285 | c.rhodecode_repo.get_commit(commit_id) |
|
295 | 286 | for commit_id in commit_ids) |
|
296 | 287 | else: |
|
297 | 288 | _f_history, _hist = self._get_node_history(commit, f_path) |
|
298 | 289 | c.file_author = False |
|
299 | 290 | c.authors = [] |
|
300 | 291 | for author in set(commit.author for commit in _hist): |
|
301 | 292 | c.authors.append(( |
|
302 | 293 | h.email(author), |
|
303 | 294 | h.person(author, 'username_or_name_or_email'))) |
|
304 | 295 | return render('files/file_authors_box.html') |
|
305 | 296 | |
|
306 | 297 | @LoginRequired() |
|
307 | 298 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
308 | 299 | 'repository.admin') |
|
309 | 300 | def rawfile(self, repo_name, revision, f_path): |
|
310 | 301 | """ |
|
311 | 302 | Action for download as raw |
|
312 | 303 | """ |
|
313 | 304 | commit = self.__get_commit_or_redirect(revision, repo_name) |
|
314 | 305 | file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path) |
|
315 | 306 | |
|
316 | 307 | response.content_disposition = 'attachment; filename=%s' % \ |
|
317 | 308 | safe_str(f_path.split(Repository.NAME_SEP)[-1]) |
|
318 | 309 | |
|
319 | 310 | response.content_type = file_node.mimetype |
|
320 | 311 | charset = self._get_default_encoding() |
|
321 | 312 | if charset: |
|
322 | 313 | response.charset = charset |
|
323 | 314 | |
|
324 | 315 | return file_node.content |
|
325 | 316 | |
|
326 | 317 | @LoginRequired() |
|
327 | 318 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
328 | 319 | 'repository.admin') |
|
329 | 320 | def raw(self, repo_name, revision, f_path): |
|
330 | 321 | """ |
|
331 | 322 | Action for show as raw, some mimetypes are "rendered", |
|
332 | 323 | those include images, icons. |
|
333 | 324 | """ |
|
334 | 325 | commit = self.__get_commit_or_redirect(revision, repo_name) |
|
335 | 326 | file_node = self.__get_filenode_or_redirect(repo_name, commit, f_path) |
|
336 | 327 | |
|
337 | 328 | raw_mimetype_mapping = { |
|
338 | 329 | # map original mimetype to a mimetype used for "show as raw" |
|
339 | 330 | # you can also provide a content-disposition to override the |
|
340 | 331 | # default "attachment" disposition. |
|
341 | 332 | # orig_type: (new_type, new_dispo) |
|
342 | 333 | |
|
343 | 334 | # show images inline: |
|
344 | 335 | # Do not re-add SVG: it is unsafe and permits XSS attacks. One can |
|
345 | 336 | # for example render an SVG with javascript inside or even render |
|
346 | 337 | # HTML. |
|
347 | 338 | 'image/x-icon': ('image/x-icon', 'inline'), |
|
348 | 339 | 'image/png': ('image/png', 'inline'), |
|
349 | 340 | 'image/gif': ('image/gif', 'inline'), |
|
350 | 341 | 'image/jpeg': ('image/jpeg', 'inline'), |
|
351 | 342 | } |
|
352 | 343 | |
|
353 | 344 | mimetype = file_node.mimetype |
|
354 | 345 | try: |
|
355 | 346 | mimetype, dispo = raw_mimetype_mapping[mimetype] |
|
356 | 347 | except KeyError: |
|
357 | 348 | # we don't know anything special about this, handle it safely |
|
358 | 349 | if file_node.is_binary: |
|
359 | 350 | # do same as download raw for binary files |
|
360 | 351 | mimetype, dispo = 'application/octet-stream', 'attachment' |
|
361 | 352 | else: |
|
362 | 353 | # do not just use the original mimetype, but force text/plain, |
|
363 | 354 | # otherwise it would serve text/html and that might be unsafe. |
|
364 | 355 | # Note: underlying vcs library fakes text/plain mimetype if the |
|
365 | 356 | # mimetype can not be determined and it thinks it is not |
|
366 | 357 | # binary.This might lead to erroneous text display in some |
|
367 | 358 | # cases, but helps in other cases, like with text files |
|
368 | 359 | # without extension. |
|
369 | 360 | mimetype, dispo = 'text/plain', 'inline' |
|
370 | 361 | |
|
371 | 362 | if dispo == 'attachment': |
|
372 | 363 | dispo = 'attachment; filename=%s' % safe_str( |
|
373 | 364 | f_path.split(os.sep)[-1]) |
|
374 | 365 | |
|
375 | 366 | response.content_disposition = dispo |
|
376 | 367 | response.content_type = mimetype |
|
377 | 368 | charset = self._get_default_encoding() |
|
378 | 369 | if charset: |
|
379 | 370 | response.charset = charset |
|
380 | 371 | return file_node.content |
|
381 | 372 | |
|
382 | 373 | @CSRFRequired() |
|
383 | 374 | @LoginRequired() |
|
384 | 375 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
385 | 376 | def delete(self, repo_name, revision, f_path): |
|
386 | 377 | commit_id = revision |
|
387 | 378 | |
|
388 | 379 | repo = c.rhodecode_db_repo |
|
389 | 380 | if repo.enable_locking and repo.locked[0]: |
|
390 | 381 | h.flash(_('This repository has been locked by %s on %s') |
|
391 | 382 | % (h.person_by_id(repo.locked[0]), |
|
392 | 383 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
393 | 384 | 'warning') |
|
394 | 385 | return redirect(h.url('files_home', |
|
395 | 386 | repo_name=repo_name, revision='tip')) |
|
396 | 387 | |
|
397 | 388 | if not self._is_valid_head(commit_id, repo.scm_instance()): |
|
398 | 389 | h.flash(_('You can only delete files with revision ' |
|
399 | 390 | 'being a valid branch '), category='warning') |
|
400 | 391 | return redirect(h.url('files_home', |
|
401 | 392 | repo_name=repo_name, revision='tip', |
|
402 | 393 | f_path=f_path)) |
|
403 | 394 | |
|
404 | 395 | c.commit = self.__get_commit_or_redirect(commit_id, repo_name) |
|
405 | 396 | c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path) |
|
406 | 397 | |
|
407 | 398 | c.default_message = _( |
|
408 | 399 | 'Deleted file %s via RhodeCode Enterprise') % (f_path) |
|
409 | 400 | c.f_path = f_path |
|
410 | 401 | node_path = f_path |
|
411 | 402 | author = c.rhodecode_user.full_contact |
|
412 | 403 | message = request.POST.get('message') or c.default_message |
|
413 | 404 | try: |
|
414 | 405 | nodes = { |
|
415 | 406 | node_path: { |
|
416 | 407 | 'content': '' |
|
417 | 408 | } |
|
418 | 409 | } |
|
419 | 410 | self.scm_model.delete_nodes( |
|
420 | 411 | user=c.rhodecode_user.user_id, repo=c.rhodecode_db_repo, |
|
421 | 412 | message=message, |
|
422 | 413 | nodes=nodes, |
|
423 | 414 | parent_commit=c.commit, |
|
424 | 415 | author=author, |
|
425 | 416 | ) |
|
426 | 417 | |
|
427 | 418 | h.flash(_('Successfully deleted file %s') % f_path, |
|
428 | 419 | category='success') |
|
429 | 420 | except Exception: |
|
430 | 421 | msg = _('Error occurred during commit') |
|
431 | 422 | log.exception(msg) |
|
432 | 423 | h.flash(msg, category='error') |
|
433 | 424 | return redirect(url('changeset_home', |
|
434 | 425 | repo_name=c.repo_name, revision='tip')) |
|
435 | 426 | |
|
436 | 427 | @LoginRequired() |
|
437 | 428 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
438 | 429 | def delete_home(self, repo_name, revision, f_path): |
|
439 | 430 | commit_id = revision |
|
440 | 431 | |
|
441 | 432 | repo = c.rhodecode_db_repo |
|
442 | 433 | if repo.enable_locking and repo.locked[0]: |
|
443 | 434 | h.flash(_('This repository has been locked by %s on %s') |
|
444 | 435 | % (h.person_by_id(repo.locked[0]), |
|
445 | 436 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
446 | 437 | 'warning') |
|
447 | 438 | return redirect(h.url('files_home', |
|
448 | 439 | repo_name=repo_name, revision='tip')) |
|
449 | 440 | |
|
450 | 441 | if not self._is_valid_head(commit_id, repo.scm_instance()): |
|
451 | 442 | h.flash(_('You can only delete files with revision ' |
|
452 | 443 | 'being a valid branch '), category='warning') |
|
453 | 444 | return redirect(h.url('files_home', |
|
454 | 445 | repo_name=repo_name, revision='tip', |
|
455 | 446 | f_path=f_path)) |
|
456 | 447 | |
|
457 | 448 | c.commit = self.__get_commit_or_redirect(commit_id, repo_name) |
|
458 | 449 | c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path) |
|
459 | 450 | |
|
460 | 451 | c.default_message = _( |
|
461 | 452 | 'Deleted file %s via RhodeCode Enterprise') % (f_path) |
|
462 | 453 | c.f_path = f_path |
|
463 | 454 | |
|
464 | 455 | return render('files/files_delete.html') |
|
465 | 456 | |
|
466 | 457 | @CSRFRequired() |
|
467 | 458 | @LoginRequired() |
|
468 | 459 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
469 | 460 | def edit(self, repo_name, revision, f_path): |
|
470 | 461 | commit_id = revision |
|
471 | 462 | |
|
472 | 463 | repo = c.rhodecode_db_repo |
|
473 | 464 | if repo.enable_locking and repo.locked[0]: |
|
474 | 465 | h.flash(_('This repository has been locked by %s on %s') |
|
475 | 466 | % (h.person_by_id(repo.locked[0]), |
|
476 | 467 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
477 | 468 | 'warning') |
|
478 | 469 | return redirect(h.url('files_home', |
|
479 | 470 | repo_name=repo_name, revision='tip')) |
|
480 | 471 | |
|
481 | 472 | if not self._is_valid_head(commit_id, repo.scm_instance()): |
|
482 | 473 | h.flash(_('You can only edit files with revision ' |
|
483 | 474 | 'being a valid branch '), category='warning') |
|
484 | 475 | return redirect(h.url('files_home', |
|
485 | 476 | repo_name=repo_name, revision='tip', |
|
486 | 477 | f_path=f_path)) |
|
487 | 478 | |
|
488 | 479 | c.commit = self.__get_commit_or_redirect(commit_id, repo_name) |
|
489 | 480 | c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path) |
|
490 | 481 | |
|
491 | 482 | if c.file.is_binary: |
|
492 | 483 | return redirect(url('files_home', repo_name=c.repo_name, |
|
493 | 484 | revision=c.commit.raw_id, f_path=f_path)) |
|
494 | 485 | c.default_message = _( |
|
495 | 486 | 'Edited file %s via RhodeCode Enterprise') % (f_path) |
|
496 | 487 | c.f_path = f_path |
|
497 | 488 | old_content = c.file.content |
|
498 | 489 | sl = old_content.splitlines(1) |
|
499 | 490 | first_line = sl[0] if sl else '' |
|
500 | 491 | |
|
501 | 492 | # modes: 0 - Unix, 1 - Mac, 2 - DOS |
|
502 | 493 | mode = detect_mode(first_line, 0) |
|
503 | 494 | content = convert_line_endings(request.POST.get('content', ''), mode) |
|
504 | 495 | |
|
505 | 496 | message = request.POST.get('message') or c.default_message |
|
506 | 497 | org_f_path = c.file.unicode_path |
|
507 | 498 | filename = request.POST['filename'] |
|
508 | 499 | org_filename = c.file.name |
|
509 | 500 | |
|
510 | 501 | if content == old_content and filename == org_filename: |
|
511 | 502 | h.flash(_('No changes'), category='warning') |
|
512 | 503 | return redirect(url('changeset_home', repo_name=c.repo_name, |
|
513 | 504 | revision='tip')) |
|
514 | 505 | try: |
|
515 | 506 | mapping = { |
|
516 | 507 | org_f_path: { |
|
517 | 508 | 'org_filename': org_f_path, |
|
518 | 509 | 'filename': os.path.join(c.file.dir_path, filename), |
|
519 | 510 | 'content': content, |
|
520 | 511 | 'lexer': '', |
|
521 | 512 | 'op': 'mod', |
|
522 | 513 | } |
|
523 | 514 | } |
|
524 | 515 | |
|
525 | 516 | ScmModel().update_nodes( |
|
526 | 517 | user=c.rhodecode_user.user_id, |
|
527 | 518 | repo=c.rhodecode_db_repo, |
|
528 | 519 | message=message, |
|
529 | 520 | nodes=mapping, |
|
530 | 521 | parent_commit=c.commit, |
|
531 | 522 | ) |
|
532 | 523 | |
|
533 | 524 | h.flash(_('Successfully committed to %s') % f_path, |
|
534 | 525 | category='success') |
|
535 | 526 | except Exception: |
|
536 | 527 | msg = _('Error occurred during commit') |
|
537 | 528 | log.exception(msg) |
|
538 | 529 | h.flash(msg, category='error') |
|
539 | 530 | return redirect(url('changeset_home', |
|
540 | 531 | repo_name=c.repo_name, revision='tip')) |
|
541 | 532 | |
|
542 | 533 | @LoginRequired() |
|
543 | 534 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
544 | 535 | def edit_home(self, repo_name, revision, f_path): |
|
545 | 536 | commit_id = revision |
|
546 | 537 | |
|
547 | 538 | repo = c.rhodecode_db_repo |
|
548 | 539 | if repo.enable_locking and repo.locked[0]: |
|
549 | 540 | h.flash(_('This repository has been locked by %s on %s') |
|
550 | 541 | % (h.person_by_id(repo.locked[0]), |
|
551 | 542 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
552 | 543 | 'warning') |
|
553 | 544 | return redirect(h.url('files_home', |
|
554 | 545 | repo_name=repo_name, revision='tip')) |
|
555 | 546 | |
|
556 | 547 | if not self._is_valid_head(commit_id, repo.scm_instance()): |
|
557 | 548 | h.flash(_('You can only edit files with revision ' |
|
558 | 549 | 'being a valid branch '), category='warning') |
|
559 | 550 | return redirect(h.url('files_home', |
|
560 | 551 | repo_name=repo_name, revision='tip', |
|
561 | 552 | f_path=f_path)) |
|
562 | 553 | |
|
563 | 554 | c.commit = self.__get_commit_or_redirect(commit_id, repo_name) |
|
564 | 555 | c.file = self.__get_filenode_or_redirect(repo_name, c.commit, f_path) |
|
565 | 556 | |
|
566 | 557 | if c.file.is_binary: |
|
567 | 558 | return redirect(url('files_home', repo_name=c.repo_name, |
|
568 | 559 | revision=c.commit.raw_id, f_path=f_path)) |
|
569 | 560 | c.default_message = _( |
|
570 | 561 | 'Edited file %s via RhodeCode Enterprise') % (f_path) |
|
571 | 562 | c.f_path = f_path |
|
572 | 563 | |
|
573 | 564 | return render('files/files_edit.html') |
|
574 | 565 | |
|
575 | 566 | def _is_valid_head(self, commit_id, repo): |
|
576 | 567 | # check if commit is a branch identifier- basically we cannot |
|
577 | 568 | # create multiple heads via file editing |
|
578 | 569 | valid_heads = repo.branches.keys() + repo.branches.values() |
|
579 | 570 | |
|
580 | 571 | if h.is_svn(repo) and not repo.is_empty(): |
|
581 | 572 | # Note: Subversion only has one head, we add it here in case there |
|
582 | 573 | # is no branch matched. |
|
583 | 574 | valid_heads.append(repo.get_commit(commit_idx=-1).raw_id) |
|
584 | 575 | |
|
585 | 576 | # check if commit is a branch name or branch hash |
|
586 | 577 | return commit_id in valid_heads |
|
587 | 578 | |
|
588 | 579 | @CSRFRequired() |
|
589 | 580 | @LoginRequired() |
|
590 | 581 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
591 | 582 | def add(self, repo_name, revision, f_path): |
|
592 | 583 | repo = Repository.get_by_repo_name(repo_name) |
|
593 | 584 | if repo.enable_locking and repo.locked[0]: |
|
594 | 585 | h.flash(_('This repository has been locked by %s on %s') |
|
595 | 586 | % (h.person_by_id(repo.locked[0]), |
|
596 | 587 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
597 | 588 | 'warning') |
|
598 | 589 | return redirect(h.url('files_home', |
|
599 | 590 | repo_name=repo_name, revision='tip')) |
|
600 | 591 | |
|
601 | 592 | r_post = request.POST |
|
602 | 593 | |
|
603 | 594 | c.commit = self.__get_commit_or_redirect( |
|
604 | 595 | revision, repo_name, redirect_after=False) |
|
605 | 596 | if c.commit is None: |
|
606 | 597 | c.commit = EmptyCommit(alias=c.rhodecode_repo.alias) |
|
607 | 598 | c.default_message = (_('Added file via RhodeCode Enterprise')) |
|
608 | 599 | c.f_path = f_path |
|
609 | 600 | unix_mode = 0 |
|
610 | 601 | content = convert_line_endings(r_post.get('content', ''), unix_mode) |
|
611 | 602 | |
|
612 | 603 | message = r_post.get('message') or c.default_message |
|
613 | 604 | filename = r_post.get('filename') |
|
614 | 605 | location = r_post.get('location', '') # dir location |
|
615 | 606 | file_obj = r_post.get('upload_file', None) |
|
616 | 607 | |
|
617 | 608 | if file_obj is not None and hasattr(file_obj, 'filename'): |
|
618 | 609 | filename = file_obj.filename |
|
619 | 610 | content = file_obj.file |
|
620 | 611 | |
|
621 | 612 | if hasattr(content, 'file'): |
|
622 | 613 | # non posix systems store real file under file attr |
|
623 | 614 | content = content.file |
|
624 | 615 | |
|
625 | 616 | # If there's no commit, redirect to repo summary |
|
626 | 617 | if type(c.commit) is EmptyCommit: |
|
627 | 618 | redirect_url = "summary_home" |
|
628 | 619 | else: |
|
629 | 620 | redirect_url = "changeset_home" |
|
630 | 621 | |
|
631 | 622 | if not filename: |
|
632 | 623 | h.flash(_('No filename'), category='warning') |
|
633 | 624 | return redirect(url(redirect_url, repo_name=c.repo_name, |
|
634 | 625 | revision='tip')) |
|
635 | 626 | |
|
636 | 627 | # extract the location from filename, |
|
637 | 628 | # allows using foo/bar.txt syntax to create subdirectories |
|
638 | 629 | subdir_loc = filename.rsplit('/', 1) |
|
639 | 630 | if len(subdir_loc) == 2: |
|
640 | 631 | location = os.path.join(location, subdir_loc[0]) |
|
641 | 632 | |
|
642 | 633 | # strip all crap out of file, just leave the basename |
|
643 | 634 | filename = os.path.basename(filename) |
|
644 | 635 | node_path = os.path.join(location, filename) |
|
645 | 636 | author = c.rhodecode_user.full_contact |
|
646 | 637 | |
|
647 | 638 | try: |
|
648 | 639 | nodes = { |
|
649 | 640 | node_path: { |
|
650 | 641 | 'content': content |
|
651 | 642 | } |
|
652 | 643 | } |
|
653 | 644 | self.scm_model.create_nodes( |
|
654 | 645 | user=c.rhodecode_user.user_id, |
|
655 | 646 | repo=c.rhodecode_db_repo, |
|
656 | 647 | message=message, |
|
657 | 648 | nodes=nodes, |
|
658 | 649 | parent_commit=c.commit, |
|
659 | 650 | author=author, |
|
660 | 651 | ) |
|
661 | 652 | |
|
662 | 653 | h.flash(_('Successfully committed to %s') % node_path, |
|
663 | 654 | category='success') |
|
664 | 655 | except NonRelativePathError as e: |
|
665 | 656 | h.flash(_( |
|
666 | 657 | 'The location specified must be a relative path and must not ' |
|
667 | 658 | 'contain .. in the path'), category='warning') |
|
668 | 659 | return redirect(url('changeset_home', repo_name=c.repo_name, |
|
669 | 660 | revision='tip')) |
|
670 | 661 | except (NodeError, NodeAlreadyExistsError) as e: |
|
671 | 662 | h.flash(_(e), category='error') |
|
672 | 663 | except Exception: |
|
673 | 664 | msg = _('Error occurred during commit') |
|
674 | 665 | log.exception(msg) |
|
675 | 666 | h.flash(msg, category='error') |
|
676 | 667 | return redirect(url('changeset_home', |
|
677 | 668 | repo_name=c.repo_name, revision='tip')) |
|
678 | 669 | |
|
679 | 670 | @LoginRequired() |
|
680 | 671 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
681 | 672 | def add_home(self, repo_name, revision, f_path): |
|
682 | 673 | |
|
683 | 674 | repo = Repository.get_by_repo_name(repo_name) |
|
684 | 675 | if repo.enable_locking and repo.locked[0]: |
|
685 | 676 | h.flash(_('This repository has been locked by %s on %s') |
|
686 | 677 | % (h.person_by_id(repo.locked[0]), |
|
687 | 678 | h.format_date(h.time_to_datetime(repo.locked[1]))), |
|
688 | 679 | 'warning') |
|
689 | 680 | return redirect(h.url('files_home', |
|
690 | 681 | repo_name=repo_name, revision='tip')) |
|
691 | 682 | |
|
692 | 683 | c.commit = self.__get_commit_or_redirect( |
|
693 | 684 | revision, repo_name, redirect_after=False) |
|
694 | 685 | if c.commit is None: |
|
695 | 686 | c.commit = EmptyCommit(alias=c.rhodecode_repo.alias) |
|
696 | 687 | c.default_message = (_('Added file via RhodeCode Enterprise')) |
|
697 | 688 | c.f_path = f_path |
|
698 | 689 | |
|
699 | 690 | return render('files/files_add.html') |
|
700 | 691 | |
|
701 | 692 | @LoginRequired() |
|
702 | 693 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
703 | 694 | 'repository.admin') |
|
704 | 695 | def archivefile(self, repo_name, fname): |
|
705 | 696 | fileformat = None |
|
706 | 697 | commit_id = None |
|
707 | 698 | ext = None |
|
708 | 699 | subrepos = request.GET.get('subrepos') == 'true' |
|
709 | 700 | |
|
710 | 701 | for a_type, ext_data in settings.ARCHIVE_SPECS.items(): |
|
711 | 702 | archive_spec = fname.split(ext_data[1]) |
|
712 | 703 | if len(archive_spec) == 2 and archive_spec[1] == '': |
|
713 | 704 | fileformat = a_type or ext_data[1] |
|
714 | 705 | commit_id = archive_spec[0] |
|
715 | 706 | ext = ext_data[1] |
|
716 | 707 | |
|
717 | 708 | dbrepo = RepoModel().get_by_repo_name(repo_name) |
|
718 | 709 | if not dbrepo.enable_downloads: |
|
719 | 710 | return _('Downloads disabled') |
|
720 | 711 | |
|
721 | 712 | try: |
|
722 | 713 | commit = c.rhodecode_repo.get_commit(commit_id) |
|
723 | 714 | content_type = settings.ARCHIVE_SPECS[fileformat][0] |
|
724 | 715 | except CommitDoesNotExistError: |
|
725 | 716 | return _('Unknown revision %s') % commit_id |
|
726 | 717 | except EmptyRepositoryError: |
|
727 | 718 | return _('Empty repository') |
|
728 | 719 | except KeyError: |
|
729 | 720 | return _('Unknown archive type') |
|
730 | 721 | |
|
731 | 722 | # archive cache |
|
732 | 723 | from rhodecode import CONFIG |
|
733 | 724 | |
|
734 | 725 | archive_name = '%s-%s%s%s' % ( |
|
735 | 726 | safe_str(repo_name.replace('/', '_')), |
|
736 | 727 | '-sub' if subrepos else '', |
|
737 | 728 | safe_str(commit.short_id), ext) |
|
738 | 729 | |
|
739 | 730 | use_cached_archive = False |
|
740 | 731 | archive_cache_enabled = CONFIG.get( |
|
741 | 732 | 'archive_cache_dir') and not request.GET.get('no_cache') |
|
742 | 733 | |
|
743 | 734 | if archive_cache_enabled: |
|
744 | 735 | # check if we it's ok to write |
|
745 | 736 | if not os.path.isdir(CONFIG['archive_cache_dir']): |
|
746 | 737 | os.makedirs(CONFIG['archive_cache_dir']) |
|
747 | 738 | cached_archive_path = os.path.join( |
|
748 | 739 | CONFIG['archive_cache_dir'], archive_name) |
|
749 | 740 | if os.path.isfile(cached_archive_path): |
|
750 | 741 | log.debug('Found cached archive in %s', cached_archive_path) |
|
751 | 742 | fd, archive = None, cached_archive_path |
|
752 | 743 | use_cached_archive = True |
|
753 | 744 | else: |
|
754 | 745 | log.debug('Archive %s is not yet cached', archive_name) |
|
755 | 746 | |
|
756 | 747 | if not use_cached_archive: |
|
757 | 748 | # generate new archive |
|
758 | 749 | fd, archive = tempfile.mkstemp() |
|
759 | 750 | log.debug('Creating new temp archive in %s' % (archive,)) |
|
760 | 751 | try: |
|
761 | 752 | commit.archive_repo(archive, kind=fileformat, subrepos=subrepos) |
|
762 | 753 | except ImproperArchiveTypeError: |
|
763 | 754 | return _('Unknown archive type') |
|
764 | 755 | if archive_cache_enabled: |
|
765 | 756 | # if we generated the archive and we have cache enabled |
|
766 | 757 | # let's use this for future |
|
767 | 758 | log.debug('Storing new archive in %s' % (cached_archive_path,)) |
|
768 | 759 | shutil.move(archive, cached_archive_path) |
|
769 | 760 | archive = cached_archive_path |
|
770 | 761 | |
|
771 | 762 | def get_chunked_archive(archive): |
|
772 | 763 | with open(archive, 'rb') as stream: |
|
773 | 764 | while True: |
|
774 | 765 | data = stream.read(16 * 1024) |
|
775 | 766 | if not data: |
|
776 | 767 | if fd: # fd means we used temporary file |
|
777 | 768 | os.close(fd) |
|
778 | 769 | if not archive_cache_enabled: |
|
779 | 770 | log.debug('Destroying temp archive %s', archive) |
|
780 | 771 | os.remove(archive) |
|
781 | 772 | break |
|
782 | 773 | yield data |
|
783 | 774 | |
|
784 | 775 | # store download action |
|
785 | 776 | action_logger(user=c.rhodecode_user, |
|
786 | 777 | action='user_downloaded_archive:%s' % archive_name, |
|
787 | 778 | repo=repo_name, ipaddr=self.ip_addr, commit=True) |
|
788 | 779 | response.content_disposition = str( |
|
789 | 780 | 'attachment; filename=%s' % archive_name) |
|
790 | 781 | response.content_type = str(content_type) |
|
791 | 782 | |
|
792 | 783 | return get_chunked_archive(archive) |
|
793 | 784 | |
|
794 | 785 | @LoginRequired() |
|
795 | 786 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
796 | 787 | 'repository.admin') |
|
797 | 788 | def diff(self, repo_name, f_path): |
|
798 | 789 | ignore_whitespace = request.GET.get('ignorews') == '1' |
|
799 | 790 | line_context = request.GET.get('context', 3) |
|
800 | 791 | diff1 = request.GET.get('diff1', '') |
|
801 | 792 | |
|
802 | 793 | path1, diff1 = parse_path_ref(diff1, default_path=f_path) |
|
803 | 794 | |
|
804 | 795 | diff2 = request.GET.get('diff2', '') |
|
805 | 796 | c.action = request.GET.get('diff') |
|
806 | 797 | c.no_changes = diff1 == diff2 |
|
807 | 798 | c.f_path = f_path |
|
808 | 799 | c.big_diff = False |
|
809 | 800 | c.ignorews_url = _ignorews_url |
|
810 | 801 | c.context_url = _context_url |
|
811 | 802 | c.changes = OrderedDict() |
|
812 | 803 | c.changes[diff2] = [] |
|
813 | 804 | |
|
814 | 805 | if not any((diff1, diff2)): |
|
815 | 806 | h.flash( |
|
816 | 807 | 'Need query parameter "diff1" or "diff2" to generate a diff.', |
|
817 | 808 | category='error') |
|
818 | 809 | raise HTTPBadRequest() |
|
819 | 810 | |
|
820 | 811 | # special case if we want a show commit_id only, it's impl here |
|
821 | 812 | # to reduce JS and callbacks |
|
822 | 813 | |
|
823 | 814 | if request.GET.get('show_rev') and diff1: |
|
824 | 815 | if str2bool(request.GET.get('annotate', 'False')): |
|
825 | 816 | _url = url('files_annotate_home', repo_name=c.repo_name, |
|
826 | 817 | revision=diff1, f_path=path1) |
|
827 | 818 | else: |
|
828 | 819 | _url = url('files_home', repo_name=c.repo_name, |
|
829 | 820 | revision=diff1, f_path=path1) |
|
830 | 821 | |
|
831 | 822 | return redirect(_url) |
|
832 | 823 | |
|
833 | 824 | try: |
|
834 | 825 | node1 = self._get_file_node(diff1, path1) |
|
835 | 826 | node2 = self._get_file_node(diff2, f_path) |
|
836 | 827 | except (RepositoryError, NodeError): |
|
837 | 828 | log.exception("Exception while trying to get node from repository") |
|
838 | 829 | return redirect(url( |
|
839 | 830 | 'files_home', repo_name=c.repo_name, f_path=f_path)) |
|
840 | 831 | |
|
841 | 832 | if all(isinstance(node.commit, EmptyCommit) |
|
842 | 833 | for node in (node1, node2)): |
|
843 | 834 | raise HTTPNotFound |
|
844 | 835 | |
|
845 | 836 | c.commit_1 = node1.commit |
|
846 | 837 | c.commit_2 = node2.commit |
|
847 | 838 | |
|
848 | 839 | if c.action == 'download': |
|
849 | 840 | _diff = diffs.get_gitdiff(node1, node2, |
|
850 | 841 | ignore_whitespace=ignore_whitespace, |
|
851 | 842 | context=line_context) |
|
852 | 843 | diff = diffs.DiffProcessor(_diff, format='gitdiff') |
|
853 | 844 | |
|
854 | 845 | diff_name = '%s_vs_%s.diff' % (diff1, diff2) |
|
855 | 846 | response.content_type = 'text/plain' |
|
856 | 847 | response.content_disposition = ( |
|
857 | 848 | 'attachment; filename=%s' % (diff_name,) |
|
858 | 849 | ) |
|
859 | 850 | charset = self._get_default_encoding() |
|
860 | 851 | if charset: |
|
861 | 852 | response.charset = charset |
|
862 | 853 | return diff.as_raw() |
|
863 | 854 | |
|
864 | 855 | elif c.action == 'raw': |
|
865 | 856 | _diff = diffs.get_gitdiff(node1, node2, |
|
866 | 857 | ignore_whitespace=ignore_whitespace, |
|
867 | 858 | context=line_context) |
|
868 | 859 | diff = diffs.DiffProcessor(_diff, format='gitdiff') |
|
869 | 860 | response.content_type = 'text/plain' |
|
870 | 861 | charset = self._get_default_encoding() |
|
871 | 862 | if charset: |
|
872 | 863 | response.charset = charset |
|
873 | 864 | return diff.as_raw() |
|
874 | 865 | |
|
875 | 866 | else: |
|
876 | 867 | fid = h.FID(diff2, node2.path) |
|
877 | 868 | line_context_lcl = get_line_ctx(fid, request.GET) |
|
878 | 869 | ign_whitespace_lcl = get_ignore_ws(fid, request.GET) |
|
879 | 870 | |
|
880 | 871 | __, commit1, commit2, diff, st, data = diffs.wrapped_diff( |
|
881 | 872 | filenode_old=node1, |
|
882 | 873 | filenode_new=node2, |
|
883 | 874 | diff_limit=self.cut_off_limit_diff, |
|
884 | 875 | file_limit=self.cut_off_limit_file, |
|
885 | 876 | show_full_diff=request.GET.get('fulldiff'), |
|
886 | 877 | ignore_whitespace=ign_whitespace_lcl, |
|
887 | 878 | line_context=line_context_lcl,) |
|
888 | 879 | |
|
889 | 880 | c.lines_added = data['stats']['added'] if data else 0 |
|
890 | 881 | c.lines_deleted = data['stats']['deleted'] if data else 0 |
|
891 | 882 | c.files = [data] |
|
892 | 883 | c.commit_ranges = [c.commit_1, c.commit_2] |
|
893 | 884 | c.ancestor = None |
|
894 | 885 | c.statuses = [] |
|
895 | 886 | c.target_repo = c.rhodecode_db_repo |
|
896 | 887 | c.filename1 = node1.path |
|
897 | 888 | c.filename = node2.path |
|
898 | 889 | c.binary_file = node1.is_binary or node2.is_binary |
|
899 | 890 | operation = data['operation'] if data else '' |
|
900 | 891 | |
|
901 | 892 | commit_changes = { |
|
902 | 893 | # TODO: it's passing the old file to the diff to keep the |
|
903 | 894 | # standard but this is not being used for this template, |
|
904 | 895 | # but might need both files in the future or a more standard |
|
905 | 896 | # way to work with that |
|
906 | 897 | 'fid': [commit1, commit2, operation, |
|
907 | 898 | c.filename, diff, st, data] |
|
908 | 899 | } |
|
909 | 900 | |
|
910 | 901 | c.changes = commit_changes |
|
911 | 902 | |
|
912 | 903 | return render('files/file_diff.html') |
|
913 | 904 | |
|
914 | 905 | @LoginRequired() |
|
915 | 906 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
916 | 907 | 'repository.admin') |
|
917 | 908 | def diff_2way(self, repo_name, f_path): |
|
918 | 909 | diff1 = request.GET.get('diff1', '') |
|
919 | 910 | diff2 = request.GET.get('diff2', '') |
|
920 | 911 | |
|
921 | 912 | nodes = [] |
|
922 | 913 | unknown_commits = [] |
|
923 | 914 | for commit in [diff1, diff2]: |
|
924 | 915 | try: |
|
925 | 916 | nodes.append(self._get_file_node(commit, f_path)) |
|
926 | 917 | except (RepositoryError, NodeError): |
|
927 | 918 | log.exception('%(commit)s does not exist' % {'commit': commit}) |
|
928 | 919 | unknown_commits.append(commit) |
|
929 | 920 | h.flash(h.literal( |
|
930 | 921 | _('Commit %(commit)s does not exist.') % {'commit': commit} |
|
931 | 922 | ), category='error') |
|
932 | 923 | |
|
933 | 924 | if unknown_commits: |
|
934 | 925 | return redirect(url('files_home', repo_name=c.repo_name, |
|
935 | 926 | f_path=f_path)) |
|
936 | 927 | |
|
937 | 928 | if all(isinstance(node.commit, EmptyCommit) for node in nodes): |
|
938 | 929 | raise HTTPNotFound |
|
939 | 930 | |
|
940 | 931 | node1, node2 = nodes |
|
941 | 932 | |
|
942 | 933 | f_gitdiff = diffs.get_gitdiff(node1, node2, ignore_whitespace=False) |
|
943 | 934 | diff_processor = diffs.DiffProcessor(f_gitdiff, format='gitdiff') |
|
944 | 935 | diff_data = diff_processor.prepare() |
|
945 | 936 | |
|
946 | 937 | if not diff_data or diff_data[0]['raw_diff'] == '': |
|
947 | 938 | h.flash(h.literal(_('%(file_path)s has not changed ' |
|
948 | 939 | 'between %(commit_1)s and %(commit_2)s.') % { |
|
949 | 940 | 'file_path': f_path, |
|
950 | 941 | 'commit_1': node1.commit.id, |
|
951 | 942 | 'commit_2': node2.commit.id |
|
952 | 943 | }), category='error') |
|
953 | 944 | return redirect(url('files_home', repo_name=c.repo_name, |
|
954 | 945 | f_path=f_path)) |
|
955 | 946 | |
|
956 | 947 | c.diff_data = diff_data[0] |
|
957 | 948 | c.FID = h.FID(diff2, node2.path) |
|
958 | 949 | # cleanup some unneeded data |
|
959 | 950 | del c.diff_data['raw_diff'] |
|
960 | 951 | del c.diff_data['chunks'] |
|
961 | 952 | |
|
962 | 953 | c.node1 = node1 |
|
963 | 954 | c.commit_1 = node1.commit |
|
964 | 955 | c.node2 = node2 |
|
965 | 956 | c.commit_2 = node2.commit |
|
966 | 957 | |
|
967 | 958 | return render('files/diff_2way.html') |
|
968 | 959 | |
|
969 | 960 | def _get_file_node(self, commit_id, f_path): |
|
970 | 961 | if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]: |
|
971 | 962 | commit = c.rhodecode_repo.get_commit(commit_id=commit_id) |
|
972 | 963 | try: |
|
973 | 964 | node = commit.get_node(f_path) |
|
974 | 965 | if node.is_dir(): |
|
975 | 966 | raise NodeError('%s path is a %s not a file' |
|
976 | 967 | % (node, type(node))) |
|
977 | 968 | except NodeDoesNotExistError: |
|
978 | 969 | commit = EmptyCommit( |
|
979 | 970 | commit_id=commit_id, |
|
980 | 971 | idx=commit.idx, |
|
981 | 972 | repo=commit.repository, |
|
982 | 973 | alias=commit.repository.alias, |
|
983 | 974 | message=commit.message, |
|
984 | 975 | author=commit.author, |
|
985 | 976 | date=commit.date) |
|
986 | 977 | node = FileNode(f_path, '', commit=commit) |
|
987 | 978 | else: |
|
988 | 979 | commit = EmptyCommit( |
|
989 | 980 | repo=c.rhodecode_repo, |
|
990 | 981 | alias=c.rhodecode_repo.alias) |
|
991 | 982 | node = FileNode(f_path, '', commit=commit) |
|
992 | 983 | return node |
|
993 | 984 | |
|
994 | 985 | def _get_node_history(self, commit, f_path, commits=None): |
|
995 | 986 | """ |
|
996 | 987 | get commit history for given node |
|
997 | 988 | |
|
998 | 989 | :param commit: commit to calculate history |
|
999 | 990 | :param f_path: path for node to calculate history for |
|
1000 | 991 | :param commits: if passed don't calculate history and take |
|
1001 | 992 | commits defined in this list |
|
1002 | 993 | """ |
|
1003 | 994 | # calculate history based on tip |
|
1004 | 995 | tip = c.rhodecode_repo.get_commit() |
|
1005 | 996 | if commits is None: |
|
1006 | 997 | pre_load = ["author", "branch"] |
|
1007 | 998 | try: |
|
1008 | 999 | commits = tip.get_file_history(f_path, pre_load=pre_load) |
|
1009 | 1000 | except (NodeDoesNotExistError, CommitError): |
|
1010 | 1001 | # this node is not present at tip! |
|
1011 | 1002 | commits = commit.get_file_history(f_path, pre_load=pre_load) |
|
1012 | 1003 | |
|
1013 | 1004 | history = [] |
|
1014 | 1005 | commits_group = ([], _("Changesets")) |
|
1015 | 1006 | for commit in commits: |
|
1016 | 1007 | branch = ' (%s)' % commit.branch if commit.branch else '' |
|
1017 | 1008 | n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch) |
|
1018 | 1009 | commits_group[0].append((commit.raw_id, n_desc,)) |
|
1019 | 1010 | history.append(commits_group) |
|
1020 | 1011 | |
|
1021 | 1012 | symbolic_reference = self._symbolic_reference |
|
1022 | 1013 | |
|
1023 | 1014 | if c.rhodecode_repo.alias == 'svn': |
|
1024 | 1015 | adjusted_f_path = self._adjust_file_path_for_svn( |
|
1025 | 1016 | f_path, c.rhodecode_repo) |
|
1026 | 1017 | if adjusted_f_path != f_path: |
|
1027 | 1018 | log.debug( |
|
1028 | 1019 | 'Recognized svn tag or branch in file "%s", using svn ' |
|
1029 | 1020 | 'specific symbolic references', f_path) |
|
1030 | 1021 | f_path = adjusted_f_path |
|
1031 | 1022 | symbolic_reference = self._symbolic_reference_svn |
|
1032 | 1023 | |
|
1033 | 1024 | branches = self._create_references( |
|
1034 | 1025 | c.rhodecode_repo.branches, symbolic_reference, f_path) |
|
1035 | 1026 | branches_group = (branches, _("Branches")) |
|
1036 | 1027 | |
|
1037 | 1028 | tags = self._create_references( |
|
1038 | 1029 | c.rhodecode_repo.tags, symbolic_reference, f_path) |
|
1039 | 1030 | tags_group = (tags, _("Tags")) |
|
1040 | 1031 | |
|
1041 | 1032 | history.append(branches_group) |
|
1042 | 1033 | history.append(tags_group) |
|
1043 | 1034 | |
|
1044 | 1035 | return history, commits |
|
1045 | 1036 | |
|
1046 | 1037 | def _adjust_file_path_for_svn(self, f_path, repo): |
|
1047 | 1038 | """ |
|
1048 | 1039 | Computes the relative path of `f_path`. |
|
1049 | 1040 | |
|
1050 | 1041 | This is mainly based on prefix matching of the recognized tags and |
|
1051 | 1042 | branches in the underlying repository. |
|
1052 | 1043 | """ |
|
1053 | 1044 | tags_and_branches = itertools.chain( |
|
1054 | 1045 | repo.branches.iterkeys(), |
|
1055 | 1046 | repo.tags.iterkeys()) |
|
1056 | 1047 | tags_and_branches = sorted(tags_and_branches, key=len, reverse=True) |
|
1057 | 1048 | |
|
1058 | 1049 | for name in tags_and_branches: |
|
1059 | 1050 | if f_path.startswith(name + '/'): |
|
1060 | 1051 | f_path = vcspath.relpath(f_path, name) |
|
1061 | 1052 | break |
|
1062 | 1053 | return f_path |
|
1063 | 1054 | |
|
1064 | 1055 | def _create_references( |
|
1065 | 1056 | self, branches_or_tags, symbolic_reference, f_path): |
|
1066 | 1057 | items = [] |
|
1067 | 1058 | for name, commit_id in branches_or_tags.items(): |
|
1068 | 1059 | sym_ref = symbolic_reference(commit_id, name, f_path) |
|
1069 | 1060 | items.append((sym_ref, name)) |
|
1070 | 1061 | return items |
|
1071 | 1062 | |
|
1072 | 1063 | def _symbolic_reference(self, commit_id, name, f_path): |
|
1073 | 1064 | return commit_id |
|
1074 | 1065 | |
|
1075 | 1066 | def _symbolic_reference_svn(self, commit_id, name, f_path): |
|
1076 | 1067 | new_f_path = vcspath.join(name, f_path) |
|
1077 | 1068 | return u'%s@%s' % (new_f_path, commit_id) |
|
1078 | 1069 | |
|
1079 | 1070 | @LoginRequired() |
|
1080 | 1071 | @XHRRequired() |
|
1081 | 1072 | @HasRepoPermissionAnyDecorator( |
|
1082 | 1073 | 'repository.read', 'repository.write', 'repository.admin') |
|
1083 | 1074 | @jsonify |
|
1084 | 1075 | def nodelist(self, repo_name, revision, f_path): |
|
1085 | 1076 | commit = self.__get_commit_or_redirect(revision, repo_name) |
|
1086 | 1077 | |
|
1087 | 1078 | metadata = self._get_nodelist_at_commit( |
|
1088 | 1079 | repo_name, commit.raw_id, f_path) |
|
1089 | 1080 | return {'nodes': metadata} |
|
1090 | 1081 | |
|
1091 | 1082 | @LoginRequired() |
|
1092 | 1083 | @XHRRequired() |
|
1093 | 1084 | @HasRepoPermissionAnyDecorator( |
|
1094 | 1085 | 'repository.read', 'repository.write', 'repository.admin') |
|
1095 | @jsonify | |
|
1096 | def metadata_list(self, repo_name, revision, f_path): | |
|
1086 | def nodetree_full(self, repo_name, commit_id, f_path): | |
|
1097 | 1087 | """ |
|
1098 |
Returns |
|
|
1099 |
a |
|
|
1088 | Returns rendered html of file tree that contains commit date, | |
|
1089 | author, revision for the specified combination of | |
|
1090 | repo, commit_id and file path | |
|
1100 | 1091 | |
|
1101 | 1092 | :param repo_name: name of the repository |
|
1102 |
:param |
|
|
1093 | :param commit_id: commit_id of file tree | |
|
1103 | 1094 | :param f_path: file path of the requested directory |
|
1104 | 1095 | """ |
|
1105 | 1096 | |
|
1106 |
commit = self.__get_commit_or_redirect( |
|
|
1097 | commit = self.__get_commit_or_redirect(commit_id, repo_name) | |
|
1107 | 1098 | try: |
|
1108 |
|
|
|
1099 | dir_node = commit.get_node(f_path) | |
|
1109 | 1100 | except RepositoryError as e: |
|
1110 |
return |
|
|
1101 | return 'error {}'.format(safe_str(e)) | |
|
1102 | ||
|
1103 | if dir_node.is_file(): | |
|
1104 | return '' | |
|
1111 | 1105 | |
|
1112 | metadata = self._get_metadata_at_commit( | |
|
1113 | repo_name, commit, file_node) | |
|
1114 | return {'metadata': metadata} | |
|
1106 | c.file = dir_node | |
|
1107 | c.commit = commit | |
|
1108 | ||
|
1109 | # using force=True here, make a little trick. We flush the cache and | |
|
1110 | # compute it using the same key as without full_load, so the fully | |
|
1111 | # loaded cached tree is now returned instead of partial | |
|
1112 | return self._get_tree_at_commit( | |
|
1113 | repo_name, commit.raw_id, dir_node.path, full_load=True, | |
|
1114 | force=True) |
@@ -1,226 +1,226 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2015-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | import beaker |
|
23 | 23 | import logging |
|
24 | 24 | |
|
25 | 25 | from beaker.cache import _cache_decorate, cache_regions, region_invalidate |
|
26 | 26 | |
|
27 | 27 | from rhodecode.lib.utils import safe_str, md5 |
|
28 | 28 | from rhodecode.model.db import Session, CacheKey, IntegrityError |
|
29 | 29 | |
|
30 | 30 | log = logging.getLogger(__name__) |
|
31 | 31 | |
|
32 | 32 | FILE_TREE = 'cache_file_tree' |
|
33 | 33 | FILE_TREE_META = 'cache_file_tree_metadata' |
|
34 | 34 | FILE_SEARCH_TREE_META = 'cache_file_search_metadata' |
|
35 | 35 | SUMMARY_STATS = 'cache_summary_stats' |
|
36 | 36 | |
|
37 | 37 | # This list of caches gets purged when invalidation happens |
|
38 |
USED_REPO_CACHES = (FILE_TREE, FILE_ |
|
|
38 | USED_REPO_CACHES = (FILE_TREE, FILE_SEARCH_TREE_META) | |
|
39 | 39 | |
|
40 | 40 | DEFAULT_CACHE_MANAGER_CONFIG = { |
|
41 | 41 | 'type': 'memorylru_base', |
|
42 | 42 | 'max_items': 10240, |
|
43 | 43 | 'key_length': 256, |
|
44 | 44 | 'enabled': True |
|
45 | 45 | } |
|
46 | 46 | |
|
47 | 47 | |
|
48 | 48 | def configure_cache_region( |
|
49 | 49 | region_name, region_kw, default_cache_kw, default_expire=60): |
|
50 | 50 | default_type = default_cache_kw.get('type', 'memory') |
|
51 | 51 | default_lock_dir = default_cache_kw.get('lock_dir') |
|
52 | 52 | default_data_dir = default_cache_kw.get('data_dir') |
|
53 | 53 | |
|
54 | 54 | region_kw['lock_dir'] = region_kw.get('lock_dir', default_lock_dir) |
|
55 | 55 | region_kw['data_dir'] = region_kw.get('data_dir', default_data_dir) |
|
56 | 56 | region_kw['type'] = region_kw.get('type', default_type) |
|
57 | 57 | region_kw['expire'] = int(region_kw.get('expire', default_expire)) |
|
58 | 58 | |
|
59 | 59 | beaker.cache.cache_regions[region_name] = region_kw |
|
60 | 60 | |
|
61 | 61 | |
|
62 | 62 | def get_cache_manager(region_name, cache_name, custom_ttl=None): |
|
63 | 63 | """ |
|
64 | 64 | Creates a Beaker cache manager. Such instance can be used like that:: |
|
65 | 65 | |
|
66 | 66 | _namespace = caches.get_repo_namespace_key(caches.XXX, repo_name) |
|
67 | 67 | cache_manager = caches.get_cache_manager('repo_cache_long', _namespace) |
|
68 | 68 | _cache_key = caches.compute_key_from_params(repo_name, commit.raw_id) |
|
69 | 69 | def heavy_compute(): |
|
70 | 70 | ... |
|
71 | 71 | result = cache_manager.get(_cache_key, createfunc=heavy_compute) |
|
72 | 72 | |
|
73 | 73 | :param region_name: region from ini file |
|
74 | 74 | :param cache_name: custom cache name, usually prefix+repo_name. eg |
|
75 | 75 | file_switcher_repo1 |
|
76 | 76 | :param custom_ttl: override .ini file timeout on this cache |
|
77 | 77 | :return: instance of cache manager |
|
78 | 78 | """ |
|
79 | 79 | |
|
80 | 80 | cache_config = cache_regions.get(region_name, DEFAULT_CACHE_MANAGER_CONFIG) |
|
81 | 81 | if custom_ttl: |
|
82 | 82 | log.debug('Updating region %s with custom ttl: %s', |
|
83 | 83 | region_name, custom_ttl) |
|
84 | 84 | cache_config.update({'expire': custom_ttl}) |
|
85 | 85 | |
|
86 | 86 | return beaker.cache.Cache._get_cache(cache_name, cache_config) |
|
87 | 87 | |
|
88 | 88 | |
|
89 | 89 | def clear_cache_manager(cache_manager): |
|
90 | 90 | """ |
|
91 | 91 | namespace = 'foobar' |
|
92 | 92 | cache_manager = get_cache_manager('repo_cache_long', namespace) |
|
93 | 93 | clear_cache_manager(cache_manager) |
|
94 | 94 | """ |
|
95 | 95 | |
|
96 | 96 | log.debug('Clearing all values for cache manager %s', cache_manager) |
|
97 | 97 | cache_manager.clear() |
|
98 | 98 | |
|
99 | 99 | |
|
100 | 100 | def clear_repo_caches(repo_name): |
|
101 | 101 | # invalidate cache manager for this repo |
|
102 | 102 | for prefix in USED_REPO_CACHES: |
|
103 | 103 | namespace = get_repo_namespace_key(prefix, repo_name) |
|
104 | 104 | cache_manager = get_cache_manager('repo_cache_long', namespace) |
|
105 | 105 | clear_cache_manager(cache_manager) |
|
106 | 106 | |
|
107 | 107 | |
|
108 | 108 | def compute_key_from_params(*args): |
|
109 | 109 | """ |
|
110 | 110 | Helper to compute key from given params to be used in cache manager |
|
111 | 111 | """ |
|
112 | 112 | return md5("_".join(map(safe_str, args))) |
|
113 | 113 | |
|
114 | 114 | |
|
115 | 115 | def get_repo_namespace_key(prefix, repo_name): |
|
116 | 116 | return '{0}_{1}'.format(prefix, compute_key_from_params(repo_name)) |
|
117 | 117 | |
|
118 | 118 | |
|
119 | 119 | def conditional_cache(region, prefix, condition, func): |
|
120 | 120 | """ |
|
121 | 121 | Conditional caching function use like:: |
|
122 | 122 | def _c(arg): |
|
123 | 123 | # heavy computation function |
|
124 | 124 | return data |
|
125 | 125 | |
|
126 | 126 | # depending on the condition the compute is wrapped in cache or not |
|
127 | 127 | compute = conditional_cache('short_term', 'cache_desc', |
|
128 | 128 | condition=True, func=func) |
|
129 | 129 | return compute(arg) |
|
130 | 130 | |
|
131 | 131 | :param region: name of cache region |
|
132 | 132 | :param prefix: cache region prefix |
|
133 | 133 | :param condition: condition for cache to be triggered, and |
|
134 | 134 | return data cached |
|
135 | 135 | :param func: wrapped heavy function to compute |
|
136 | 136 | |
|
137 | 137 | """ |
|
138 | 138 | wrapped = func |
|
139 | 139 | if condition: |
|
140 | 140 | log.debug('conditional_cache: True, wrapping call of ' |
|
141 | 141 | 'func: %s into %s region cache', region, func) |
|
142 | 142 | cached_region = _cache_decorate((prefix,), None, None, region) |
|
143 | 143 | wrapped = cached_region(func) |
|
144 | 144 | return wrapped |
|
145 | 145 | |
|
146 | 146 | |
|
147 | 147 | class ActiveRegionCache(object): |
|
148 | 148 | def __init__(self, context): |
|
149 | 149 | self.context = context |
|
150 | 150 | |
|
151 | 151 | def invalidate(self, *args, **kwargs): |
|
152 | 152 | return False |
|
153 | 153 | |
|
154 | 154 | def compute(self): |
|
155 | 155 | log.debug('Context cache: getting obj %s from cache', self.context) |
|
156 | 156 | return self.context.compute_func(self.context.cache_key) |
|
157 | 157 | |
|
158 | 158 | |
|
159 | 159 | class FreshRegionCache(ActiveRegionCache): |
|
160 | 160 | def invalidate(self): |
|
161 | 161 | log.debug('Context cache: invalidating cache for %s', self.context) |
|
162 | 162 | region_invalidate( |
|
163 | 163 | self.context.compute_func, None, self.context.cache_key) |
|
164 | 164 | return True |
|
165 | 165 | |
|
166 | 166 | |
|
167 | 167 | class InvalidationContext(object): |
|
168 | 168 | def __repr__(self): |
|
169 | 169 | return '<InvalidationContext:{}[{}]>'.format( |
|
170 | 170 | safe_str(self.repo_name), safe_str(self.cache_type)) |
|
171 | 171 | |
|
172 | 172 | def __init__(self, compute_func, repo_name, cache_type, |
|
173 | 173 | raise_exception=False): |
|
174 | 174 | self.compute_func = compute_func |
|
175 | 175 | self.repo_name = repo_name |
|
176 | 176 | self.cache_type = cache_type |
|
177 | 177 | self.cache_key = compute_key_from_params( |
|
178 | 178 | repo_name, cache_type) |
|
179 | 179 | self.raise_exception = raise_exception |
|
180 | 180 | |
|
181 | 181 | def get_cache_obj(self): |
|
182 | 182 | cache_key = CacheKey.get_cache_key( |
|
183 | 183 | self.repo_name, self.cache_type) |
|
184 | 184 | cache_obj = CacheKey.get_active_cache(cache_key) |
|
185 | 185 | if not cache_obj: |
|
186 | 186 | cache_obj = CacheKey(cache_key, self.repo_name) |
|
187 | 187 | return cache_obj |
|
188 | 188 | |
|
189 | 189 | def __enter__(self): |
|
190 | 190 | """ |
|
191 | 191 | Test if current object is valid, and return CacheRegion function |
|
192 | 192 | that does invalidation and calculation |
|
193 | 193 | """ |
|
194 | 194 | |
|
195 | 195 | self.cache_obj = self.get_cache_obj() |
|
196 | 196 | if self.cache_obj.cache_active: |
|
197 | 197 | # means our cache obj is existing and marked as it's |
|
198 | 198 | # cache is not outdated, we return BaseInvalidator |
|
199 | 199 | self.skip_cache_active_change = True |
|
200 | 200 | return ActiveRegionCache(self) |
|
201 | 201 | |
|
202 | 202 | # the key is either not existing or set to False, we return |
|
203 | 203 | # the real invalidator which re-computes value. We additionally set |
|
204 | 204 | # the flag to actually update the Database objects |
|
205 | 205 | self.skip_cache_active_change = False |
|
206 | 206 | return FreshRegionCache(self) |
|
207 | 207 | |
|
208 | 208 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
209 | 209 | |
|
210 | 210 | if self.skip_cache_active_change: |
|
211 | 211 | return |
|
212 | 212 | |
|
213 | 213 | try: |
|
214 | 214 | self.cache_obj.cache_active = True |
|
215 | 215 | Session().add(self.cache_obj) |
|
216 | 216 | Session().commit() |
|
217 | 217 | except IntegrityError: |
|
218 | 218 | # if we catch integrity error, it means we inserted this object |
|
219 | 219 | # assumption is that's really an edge race-condition case and |
|
220 | 220 | # it's safe is to skip it |
|
221 | 221 | Session().rollback() |
|
222 | 222 | except Exception: |
|
223 | 223 | log.exception('Failed to commit on cache key update') |
|
224 | 224 | Session().rollback() |
|
225 | 225 | if self.raise_exception: |
|
226 | 226 | raise |
@@ -1,50 +1,51 b'' | |||
|
1 | 1 | |
|
2 | 2 | /****************************************************************************** |
|
3 | 3 | * * |
|
4 | 4 | * DO NOT CHANGE THIS FILE MANUALLY * |
|
5 | 5 | * * |
|
6 | 6 | * * |
|
7 |
* This file is automatically generated when the app starts up |
|
|
7 | * This file is automatically generated when the app starts up with * | |
|
8 | * generate_js_files = true * | |
|
8 | 9 | * * |
|
9 | 10 | * To add a route here pass jsroute=True to the route definition in the app * |
|
10 | 11 | * * |
|
11 | 12 | ******************************************************************************/ |
|
12 | 13 | function registerRCRoutes() { |
|
13 | 14 | // routes registration |
|
14 | 15 | pyroutes.register('home', '/', []); |
|
15 | 16 | pyroutes.register('user_autocomplete_data', '/_users', []); |
|
16 | 17 | pyroutes.register('new_repo', '/_admin/create_repository', []); |
|
17 | 18 | pyroutes.register('edit_user_group_members', '/_admin/user_groups/%(user_group_id)s/edit/members', ['user_group_id']); |
|
18 | 19 | pyroutes.register('gists', '/_admin/gists', []); |
|
19 | 20 | pyroutes.register('new_gist', '/_admin/gists/new', []); |
|
20 | 21 | pyroutes.register('toggle_following', '/_admin/toggle_following', []); |
|
21 | 22 | pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']); |
|
22 | 23 | pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']); |
|
23 | 24 | pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']); |
|
24 | 25 | pyroutes.register('changeset_home', '/%(repo_name)s/changeset/%(revision)s', ['repo_name', 'revision']); |
|
25 | 26 | pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']); |
|
26 | 27 | pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']); |
|
27 | 28 | pyroutes.register('changeset_comment', '/%(repo_name)s/changeset/%(revision)s/comment', ['repo_name', 'revision']); |
|
28 | 29 | pyroutes.register('changeset_comment_preview', '/%(repo_name)s/changeset/comment/preview', ['repo_name']); |
|
29 | 30 | pyroutes.register('changeset_comment_delete', '/%(repo_name)s/changeset/comment/%(comment_id)s/delete', ['repo_name', 'comment_id']); |
|
30 | 31 | pyroutes.register('changeset_info', '/changeset_info/%(repo_name)s/%(revision)s', ['repo_name', 'revision']); |
|
31 | 32 | pyroutes.register('compare_url', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']); |
|
32 | 33 | pyroutes.register('pullrequest_home', '/%(repo_name)s/pull-request/new', ['repo_name']); |
|
33 | 34 | pyroutes.register('pullrequest', '/%(repo_name)s/pull-request/new', ['repo_name']); |
|
34 | 35 | pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']); |
|
35 | 36 | pyroutes.register('pullrequest_repo_destinations', '/%(repo_name)s/pull-request/repo-destinations', ['repo_name']); |
|
36 | 37 | pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']); |
|
37 | 38 | pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']); |
|
38 | 39 | pyroutes.register('pullrequest_comment', '/%(repo_name)s/pull-request-comment/%(pull_request_id)s', ['repo_name', 'pull_request_id']); |
|
39 | 40 | pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request-comment/%(comment_id)s/delete', ['repo_name', 'comment_id']); |
|
40 | 41 | pyroutes.register('changelog_home', '/%(repo_name)s/changelog', ['repo_name']); |
|
41 | 42 | pyroutes.register('changelog_file_home', '/%(repo_name)s/changelog/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']); |
|
42 | 43 | pyroutes.register('files_home', '/%(repo_name)s/files/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']); |
|
43 | 44 | pyroutes.register('files_history_home', '/%(repo_name)s/history/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']); |
|
44 | 45 | pyroutes.register('files_authors_home', '/%(repo_name)s/authors/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']); |
|
45 | 46 | pyroutes.register('files_archive_home', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']); |
|
46 | 47 | pyroutes.register('files_nodelist_home', '/%(repo_name)s/nodelist/%(revision)s/%(f_path)s', ['repo_name', 'revision', 'f_path']); |
|
47 |
pyroutes.register('files_ |
|
|
48 | pyroutes.register('files_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
|
48 | 49 | pyroutes.register('summary_home_slash', '/%(repo_name)s/', ['repo_name']); |
|
49 | 50 | pyroutes.register('summary_home', '/%(repo_name)s', ['repo_name']); |
|
50 | 51 | } |
@@ -1,333 +1,322 b'' | |||
|
1 | 1 | <%inherit file="/base/base.html"/> |
|
2 | 2 | |
|
3 | 3 | <%def name="title(*args)"> |
|
4 | 4 | ${_('%s Files') % c.repo_name} |
|
5 | 5 | %if hasattr(c,'file'): |
|
6 | 6 | · ${h.safe_unicode(c.file.path) or '\\'} |
|
7 | 7 | %endif |
|
8 | 8 | |
|
9 | 9 | %if c.rhodecode_name: |
|
10 | 10 | · ${h.branding(c.rhodecode_name)} |
|
11 | 11 | %endif |
|
12 | 12 | </%def> |
|
13 | 13 | |
|
14 | 14 | <%def name="breadcrumbs_links()"> |
|
15 | 15 | ${_('Files')} |
|
16 | 16 | %if c.file: |
|
17 | 17 | @ ${h.show_id(c.commit)} |
|
18 | 18 | %endif |
|
19 | 19 | </%def> |
|
20 | 20 | |
|
21 | 21 | <%def name="menu_bar_nav()"> |
|
22 | 22 | ${self.menu_items(active='repositories')} |
|
23 | 23 | </%def> |
|
24 | 24 | |
|
25 | 25 | <%def name="menu_bar_subnav()"> |
|
26 | 26 | ${self.repo_menu(active='files')} |
|
27 | 27 | </%def> |
|
28 | 28 | |
|
29 | 29 | <%def name="main()"> |
|
30 | 30 | <div class="title"> |
|
31 | 31 | ${self.repo_page_title(c.rhodecode_db_repo)} |
|
32 | 32 | </div> |
|
33 | 33 | |
|
34 | 34 | <div id="pjax-container" class="summary"> |
|
35 | 35 | <div id="files_data"> |
|
36 | 36 | <%include file='files_pjax.html'/> |
|
37 | 37 | </div> |
|
38 | 38 | </div> |
|
39 | 39 | <script> |
|
40 | 40 | var curState = { |
|
41 | 41 | commit_id: "${c.commit.raw_id}" |
|
42 | 42 | }; |
|
43 | 43 | |
|
44 | 44 | var getState = function(context) { |
|
45 | 45 | var url = $(location).attr('href'); |
|
46 | 46 | var _base_url = '${h.url("files_home",repo_name=c.repo_name,revision='',f_path='')}'; |
|
47 | 47 | var _annotate_url = '${h.url("files_annotate_home",repo_name=c.repo_name,revision='',f_path='')}'; |
|
48 | 48 | _base_url = _base_url.replace('//', '/'); |
|
49 | 49 | _annotate_url = _annotate_url.replace('//', '/'); |
|
50 | 50 | |
|
51 | 51 | //extract f_path from url. |
|
52 | 52 | var parts = url.split(_base_url); |
|
53 | 53 | if (parts.length != 2) { |
|
54 | 54 | parts = url.split(_annotate_url); |
|
55 | 55 | if (parts.length != 2) { |
|
56 | 56 | var rev = "tip"; |
|
57 | 57 | var f_path = ""; |
|
58 | 58 | } else { |
|
59 | 59 | var parts2 = parts[1].split('/'); |
|
60 | 60 | var rev = parts2.shift(); // pop the first element which is the revision |
|
61 | 61 | var f_path = parts2.join('/'); |
|
62 | 62 | } |
|
63 | 63 | |
|
64 | 64 | } else { |
|
65 | 65 | var parts2 = parts[1].split('/'); |
|
66 | 66 | var rev = parts2.shift(); // pop the first element which is the revision |
|
67 | 67 | var f_path = parts2.join('/'); |
|
68 | 68 | } |
|
69 | 69 | |
|
70 | 70 | var _node_list_url = pyroutes.url('files_nodelist_home', |
|
71 | 71 | {repo_name: templateContext.repo_name, |
|
72 | 72 | revision: rev, f_path: f_path}); |
|
73 | 73 | var _url_base = pyroutes.url('files_home', |
|
74 | 74 | {repo_name: templateContext.repo_name, |
|
75 | 75 | revision: rev, f_path:'__FPATH__'}); |
|
76 | 76 | return { |
|
77 | 77 | url: url, |
|
78 | 78 | f_path: f_path, |
|
79 | 79 | rev: rev, |
|
80 | 80 | commit_id: curState.commit_id, |
|
81 | 81 | node_list_url: _node_list_url, |
|
82 | 82 | url_base: _url_base |
|
83 | 83 | }; |
|
84 | 84 | }; |
|
85 | 85 | |
|
86 | 86 | var metadataRequest = null; |
|
87 | 87 | var getFilesMetadata = function() { |
|
88 | 88 | if (metadataRequest && metadataRequest.readyState != 4) { |
|
89 | 89 | metadataRequest.abort(); |
|
90 | 90 | } |
|
91 | 91 | if (source_page) { |
|
92 | 92 | return false; |
|
93 | 93 | } |
|
94 | ||
|
95 | if ($('#file-tree-wrapper').hasClass('full-load')) { | |
|
96 | // in case our HTML wrapper has full-load class we don't | |
|
97 | // trigger the async load of metadata | |
|
98 | return false; | |
|
99 | } | |
|
100 | ||
|
94 | 101 | var state = getState('metadata'); |
|
95 | 102 | var url_data = { |
|
96 | 103 | 'repo_name': templateContext.repo_name, |
|
97 |
' |
|
|
104 | 'commit_id': state.commit_id, | |
|
98 | 105 | 'f_path': state.f_path |
|
99 | 106 | }; |
|
100 | 107 | |
|
101 |
var url = pyroutes.url('files_ |
|
|
108 | var url = pyroutes.url('files_nodetree_full', url_data); | |
|
102 | 109 | |
|
103 | 110 | metadataRequest = $.ajax({url: url}); |
|
104 | 111 | |
|
105 | 112 | metadataRequest.done(function(data) { |
|
106 | var data = data.metadata; | |
|
107 | var dataLength = data.length; | |
|
108 | for (var i = 0; i < dataLength; i++) { | |
|
109 | var rowData = data[i]; | |
|
110 | var name = rowData.name.replace('\\', '\\\\'); | |
|
111 | ||
|
112 | $('td[title="size-' + name + '"]').html(rowData.size); | |
|
113 | var timeComponent = AgeModule.createTimeComponent( | |
|
114 | rowData.modified_ts, rowData.modified_at); | |
|
115 | $('td[title="modified_at-' + name + '"]').html(timeComponent); | |
|
116 | ||
|
117 | $('td[title="revision-' + name + '"]').html( | |
|
118 | '<div class="tooltip" title="{0}"><pre>r{1}:{2}</pre></div>'.format( | |
|
119 | data[i].message, data[i].revision, data[i].short_id)); | |
|
120 | $('td[title="author-' + name + '"]').html( | |
|
121 | '<span title="{0}">{1}</span>'.format( | |
|
122 | data[i].author, data[i].user_profile)); | |
|
123 | } | |
|
113 | $('#file-tree').html(data); | |
|
124 | 114 | timeagoActivate(); |
|
125 | 115 | }); |
|
126 | 116 | metadataRequest.fail(function (data, textStatus, errorThrown) { |
|
127 | 117 | console.log(data); |
|
128 | 118 | if (data.status != 0) { |
|
129 | 119 | alert("Error while fetching metadata.\nError code {0} ({1}).Please consider reloading the page".format(data.status,data.statusText)); |
|
130 | 120 | } |
|
131 | 121 | }); |
|
132 | 122 | }; |
|
133 | 123 | |
|
134 | 124 | var callbacks = function() { |
|
135 | 125 | var state = getState('callbacks'); |
|
136 | 126 | timeagoActivate(); |
|
137 | 127 | |
|
138 | 128 | // used for history, and switch to |
|
139 | 129 | var initialCommitData = { |
|
140 | 130 | id: null, |
|
141 |
text: |
|
|
131 | text: '${_("Switch To Commit")}', | |
|
142 | 132 | type: 'sha', |
|
143 | 133 | raw_id: null, |
|
144 | 134 | files_url: null |
|
145 | 135 | }; |
|
146 | 136 | |
|
147 | 137 | if ($('#trimmed_message_box').height() < 50) { |
|
148 | 138 | $('#message_expand').hide(); |
|
149 | 139 | } |
|
150 | 140 | |
|
151 | 141 | $('#message_expand').on('click', function(e) { |
|
152 | 142 | $('#trimmed_message_box').css('max-height', 'none'); |
|
153 | 143 | $(this).hide(); |
|
154 | 144 | }); |
|
155 | 145 | |
|
156 | 146 | |
|
157 | 147 | if (source_page) { |
|
158 | 148 | // variants for with source code, not tree view |
|
159 | 149 | |
|
160 | 150 | if (location.href.indexOf('#') != -1) { |
|
161 | 151 | page_highlights = location.href.substring(location.href.indexOf('#') + 1).split('L'); |
|
162 | 152 | if (page_highlights.length == 2) { |
|
163 | 153 | highlight_ranges = page_highlights[1].split(","); |
|
164 | 154 | |
|
165 | 155 | var h_lines = []; |
|
166 | 156 | for (pos in highlight_ranges) { |
|
167 | 157 | var _range = highlight_ranges[pos].split('-'); |
|
168 | 158 | if (_range.length == 2) { |
|
169 | 159 | var start = parseInt(_range[0]); |
|
170 | 160 | var end = parseInt(_range[1]); |
|
171 | 161 | if (start < end) { |
|
172 | 162 | for (var i = start; i <= end; i++) { |
|
173 | 163 | h_lines.push(i); |
|
174 | 164 | } |
|
175 | 165 | } |
|
176 | 166 | } |
|
177 | 167 | else { |
|
178 | 168 | h_lines.push(parseInt(highlight_ranges[pos])); |
|
179 | 169 | } |
|
180 | 170 | } |
|
181 | 171 | |
|
182 | 172 | for (pos in h_lines) { |
|
183 | 173 | // @comment-highlight-color |
|
184 | 174 | $('#L' + h_lines[pos]).css('background-color', '#ffd887'); |
|
185 | 175 | } |
|
186 | 176 | |
|
187 | 177 | var _first_line = $('#L' + h_lines[0]).get(0); |
|
188 | 178 | if (_first_line) { |
|
189 | 179 | var line = $('#L' + h_lines[0]); |
|
190 | 180 | offsetScroll(line, 70); |
|
191 | 181 | } |
|
192 | 182 | } |
|
193 | 183 | } |
|
194 | 184 | |
|
195 | 185 | // select code link event |
|
196 | 186 | $("#hlcode").mouseup(getSelectionLink); |
|
197 | 187 | |
|
198 | 188 | // file history select2 |
|
199 | 189 | select2FileHistorySwitcher('#diff1', initialCommitData, state); |
|
200 | 190 | $('#diff1').on('change', function(e) { |
|
201 | 191 | $('#diff').removeClass('disabled').removeAttr("disabled"); |
|
202 | 192 | $('#show_rev').removeClass('disabled').removeAttr("disabled"); |
|
203 | 193 | }); |
|
204 | 194 | |
|
205 | 195 | // show more authors |
|
206 | 196 | $('#show_authors').on('click', function(e) { |
|
207 | 197 | e.preventDefault(); |
|
208 | 198 | var url = pyroutes.url('files_authors_home', |
|
209 | 199 | {'repo_name': templateContext.repo_name, |
|
210 | 200 | 'revision': state.rev, 'f_path': state.f_path}); |
|
211 | 201 | |
|
212 | 202 | $.pjax({ |
|
213 | 203 | url: url, |
|
214 | 204 | data: 'annotate=${"1" if c.annotate else "0"}', |
|
215 | 205 | container: '#file_authors', |
|
216 | 206 | push: false, |
|
217 | 207 | timeout: pjaxTimeout |
|
218 | 208 | }).complete(function(){ |
|
219 | 209 | $('#show_authors').hide(); |
|
220 | 210 | }) |
|
221 | 211 | }); |
|
222 | 212 | |
|
223 | 213 | // load file short history |
|
224 | 214 | $('#file_history_overview').on('click', function(e) { |
|
225 | 215 | e.preventDefault(); |
|
226 | 216 | path = state.f_path; |
|
227 | 217 | if (path.indexOf("#") >= 0) { |
|
228 | 218 | path = path.slice(0, path.indexOf("#")); |
|
229 | 219 | } |
|
230 | 220 | var url = pyroutes.url('changelog_file_home', |
|
231 | 221 | {'repo_name': templateContext.repo_name, |
|
232 | 222 | 'revision': state.rev, 'f_path': path, 'limit': 6}); |
|
233 | 223 | $('#file_history_container').show(); |
|
234 | 224 | $('#file_history_container').html('<div class="file-history-inner">{0}</div>'.format(_gettext('Loading ...'))); |
|
235 | 225 | |
|
236 | 226 | $.pjax({ |
|
237 | 227 | url: url, |
|
238 | 228 | container: '#file_history_container', |
|
239 | 229 | push: false, |
|
240 | 230 | timeout: pjaxTimeout |
|
241 | 231 | }) |
|
242 | 232 | }); |
|
243 | 233 | |
|
244 | 234 | } |
|
245 | 235 | else { |
|
246 | 236 | getFilesMetadata(); |
|
247 | 237 | |
|
248 | 238 | // fuzzy file filter |
|
249 | 239 | fileBrowserListeners(state.node_list_url, state.url_base); |
|
250 | 240 | |
|
251 | 241 | // switch to widget |
|
252 | 242 | select2RefSwitcher('#refs_filter', initialCommitData); |
|
253 | 243 | $('#refs_filter').on('change', function(e) { |
|
254 | 244 | var data = $('#refs_filter').select2('data'); |
|
255 | 245 | curState.commit_id = data.raw_id; |
|
256 | 246 | $.pjax({url: data.files_url, container: '#pjax-container', timeout: pjaxTimeout}); |
|
257 | 247 | }); |
|
258 | 248 | |
|
259 | 249 | $("#prev_commit_link").on('click', function(e) { |
|
260 | 250 | var data = $(this).data(); |
|
261 | 251 | curState.commit_id = data.commitId; |
|
262 | 252 | }); |
|
263 | 253 | |
|
264 | 254 | $("#next_commit_link").on('click', function(e) { |
|
265 | 255 | var data = $(this).data(); |
|
266 | 256 | curState.commit_id = data.commitId; |
|
267 | 257 | }); |
|
268 | 258 | |
|
269 | 259 | $('#at_rev').on("keypress", function(e) { |
|
270 | 260 | /* ENTER PRESSED */ |
|
271 | 261 | if (e.keyCode === 13) { |
|
272 | 262 | var rev = $('#at_rev').val(); |
|
273 | 263 | // explicit reload page here. with pjax entering bad input |
|
274 | 264 | // produces not so nice results |
|
275 | 265 | window.location = pyroutes.url('files_home', |
|
276 | 266 | {'repo_name': templateContext.repo_name, |
|
277 | 267 | 'revision': rev, 'f_path': state.f_path}); |
|
278 | 268 | } |
|
279 | 269 | }); |
|
280 | 270 | } |
|
281 | 271 | }; |
|
282 | 272 | |
|
283 | 273 | var pjaxTimeout = 5000; |
|
284 | 274 | |
|
285 | 275 | $(document).pjax(".pjax-link", "#pjax-container", { |
|
286 | 276 | "fragment": "#pjax-content", |
|
287 | 277 | "maxCacheLength": 1000, |
|
288 | 278 | "timeout": pjaxTimeout |
|
289 | 279 | }); |
|
290 | 280 | |
|
291 | 281 | // define global back/forward states |
|
292 | 282 | var isPjaxPopState = false; |
|
293 | 283 | $(document).on('pjax:popstate', function() { |
|
294 | 284 | isPjaxPopState = true; |
|
295 | 285 | }); |
|
296 | 286 | |
|
297 | 287 | $(document).on('pjax:end', function(xhr, options) { |
|
298 | 288 | if (isPjaxPopState) { |
|
299 | 289 | isPjaxPopState = false; |
|
300 | 290 | callbacks(); |
|
301 | 291 | _NODEFILTER.resetFilter(); |
|
302 | 292 | } |
|
303 | 293 | |
|
304 | 294 | // run callback for tracking if defined for google analytics etc. |
|
305 | 295 | // this is used to trigger tracking on pjax |
|
306 | 296 | if (typeof window.rhodecode_statechange_callback !== 'undefined') { |
|
307 | 297 | var state = getState('statechange'); |
|
308 | 298 | rhodecode_statechange_callback(state.url, null) |
|
309 | 299 | } |
|
310 | 300 | }); |
|
311 | 301 | |
|
312 | 302 | $(document).on('pjax:success', function(event, xhr, options) { |
|
313 | 303 | if (event.target.id == "file_history_container") { |
|
314 | 304 | $('#file_history_overview').hide(); |
|
315 | 305 | $('#file_history_overview_full').show(); |
|
316 | 306 | timeagoActivate(); |
|
317 | 307 | } else { |
|
318 | 308 | callbacks(); |
|
319 | 309 | } |
|
320 | 310 | }); |
|
321 | 311 | |
|
322 | 312 | $(document).ready(function() { |
|
323 | 313 | callbacks(); |
|
324 | 314 | var search_GET = "${request.GET.get('search','')}"; |
|
325 | 315 | if (search_GET == "1") { |
|
326 | 316 | _NODEFILTER.initFilter(); |
|
327 | 317 | } |
|
328 | 318 | }); |
|
329 | 319 | |
|
330 | 320 | </script> |
|
331 | 321 | |
|
332 | ||
|
333 | 322 | </%def> |
@@ -1,51 +1,53 b'' | |||
|
1 | 1 | |
|
2 | 2 | <div id="codeblock" class="browserblock"> |
|
3 | 3 | <div class="browser-header"> |
|
4 | 4 | <div class="browser-nav"> |
|
5 | 5 | ${h.form(h.url.current(), method='GET', id='at_rev_form')} |
|
6 | 6 | <div class="info_box"> |
|
7 | 7 | ${h.hidden('refs_filter')} |
|
8 | 8 | <div class="info_box_elem previous"> |
|
9 | 9 | <a id="prev_commit_link" data-commit-id="${c.prev_commit.raw_id}" class="pjax-link ${'disabled' if c.url_prev == '#' else ''}" href="${c.url_prev}" title="${_('Previous commit')}"><i class="icon-chevron-left"></i></a> |
|
10 | 10 | </div> |
|
11 | 11 | <div class="info_box_elem">${h.text('at_rev',value=c.commit.revision)}</div> |
|
12 | 12 | <div class="info_box_elem next"> |
|
13 | 13 | <a id="next_commit_link" data-commit-id="${c.next_commit.raw_id}" class="pjax-link ${'disabled' if c.url_next == '#' else ''}" href="${c.url_next}" title="${_('Next commit')}"><i class="icon-chevron-right"></i></a> |
|
14 | 14 | </div> |
|
15 | 15 | </div> |
|
16 | 16 | ${h.end_form()} |
|
17 | 17 | |
|
18 | 18 | <div id="search_activate_id" class="search_activate"> |
|
19 | 19 | <a class="btn btn-default" id="filter_activate" href="javascript:void(0)">${_('Search File List')}</a> |
|
20 | 20 | </div> |
|
21 | 21 | <div id="search_deactivate_id" class="search_activate hidden"> |
|
22 | 22 | <a class="btn btn-default" id="filter_deactivate" href="javascript:void(0)">${_('Close File List')}</a> |
|
23 | 23 | </div> |
|
24 | 24 | % if h.HasRepoPermissionAny('repository.write','repository.admin')(c.repo_name): |
|
25 | 25 | <div title="${_('Add New File')}" class="btn btn-primary new-file"> |
|
26 | 26 | <a href="${h.url('files_add_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.f_path, anchor='edit')}"> |
|
27 | 27 | ${_('Add File')}</a> |
|
28 | 28 | </div> |
|
29 | 29 | % endif |
|
30 | 30 | </div> |
|
31 | 31 | |
|
32 | 32 | <div class="browser-search"> |
|
33 | 33 | <div class="node-filter"> |
|
34 | 34 | <div class="node_filter_box hidden" id="node_filter_box_loading" >${_('Loading file list...')}</div> |
|
35 | 35 | <div class="node_filter_box hidden" id="node_filter_box" > |
|
36 | 36 | <div class="node-filter-path">${h.get_last_path_part(c.file)}/</div> |
|
37 | 37 | <div class="node-filter-input"> |
|
38 | 38 | <input class="init" type="text" name="filter" size="25" id="node_filter" autocomplete="off"> |
|
39 | 39 | </div> |
|
40 | 40 | </div> |
|
41 | 41 | </div> |
|
42 | 42 | </div> |
|
43 | 43 | </div> |
|
44 | 44 | ## file tree is computed from caches, and filled in |
|
45 | <div id="file-tree"> | |
|
45 | 46 | ${c.file_tree} |
|
47 | </div> | |
|
46 | 48 | |
|
47 | 49 | </div> |
|
48 | 50 | |
|
49 | 51 | <script> |
|
50 | 52 | var source_page = false; |
|
51 | 53 | </script> |
@@ -1,60 +1,78 b'' | |||
|
1 | <div class="browser-body"> | |
|
1 | <div id="file-tree-wrapper" class="browser-body ${'full-load' if c.full_load else ''}"> | |
|
2 | 2 | <table class="code-browser rctable"> |
|
3 | 3 | <thead> |
|
4 | 4 | <tr> |
|
5 | 5 | <th>${_('Name')}</th> |
|
6 | 6 | <th>${_('Size')}</th> |
|
7 | 7 | <th>${_('Modified')}</th> |
|
8 | 8 | <th>${_('Last Commit')}</th> |
|
9 | 9 | <th>${_('Author')}</th> |
|
10 | 10 | </tr> |
|
11 | 11 | </thead> |
|
12 | 12 | |
|
13 | 13 | <tbody id="tbody"> |
|
14 | 14 | %if c.file.parent: |
|
15 | 15 | <tr class="parity0"> |
|
16 | 16 | <td class="td-componentname"> |
|
17 | 17 | <a href="${h.url('files_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=c.file.parent.path)}" class="pjax-link"> |
|
18 | 18 | <i class="icon-folder"></i>.. |
|
19 | 19 | </a> |
|
20 | 20 | </td> |
|
21 | 21 | <td></td> |
|
22 | 22 | <td></td> |
|
23 | 23 | <td></td> |
|
24 | 24 | <td></td> |
|
25 | 25 | </tr> |
|
26 | 26 | %endif |
|
27 | 27 | %for cnt,node in enumerate(c.file): |
|
28 | 28 | <tr class="parity${cnt%2}"> |
|
29 | 29 | <td class="td-componentname"> |
|
30 | 30 | %if node.is_submodule(): |
|
31 | 31 | <span class="submodule-dir"> |
|
32 | 32 | ${h.link_to_if( |
|
33 | 33 | node.url.startswith('http://') or node.url.startswith('https://'), |
|
34 | 34 | node.name,node.url)} |
|
35 | 35 | </span> |
|
36 | 36 | %else: |
|
37 | 37 | <a href="${h.url('files_home',repo_name=c.repo_name,revision=c.commit.raw_id,f_path=h.safe_unicode(node.path))}" class="pjax-link"> |
|
38 | 38 | <i class="${'icon-file browser-file' if node.is_file() else 'icon-folder browser-dir'}"></i>${node.name} |
|
39 | 39 | </a> |
|
40 | 40 | %endif |
|
41 | 41 | </td> |
|
42 | 42 | %if node.is_file(): |
|
43 |
<td class="td-size" |
|
|
44 | <td class="td-time" title="${'modified_at-%s' % node.name}"> | |
|
45 | <span class="browser-loading">${_('Loading...')}</span> | |
|
43 | <td class="td-size" data-attr-name="size"> | |
|
44 | % if c.full_load: | |
|
45 | <span data-size="${node.size}">${h.format_byte_size_binary(node.size)}</span> | |
|
46 | % else: | |
|
47 | ${_('Loading ...')} | |
|
48 | % endif | |
|
49 | </td> | |
|
50 | <td class="td-time" data-attr-name="modified_at"> | |
|
51 | % if c.full_load: | |
|
52 | <span data-date="${node.last_commit.date}">${h.age_component(node.last_commit.date)}</span> | |
|
53 | % endif | |
|
46 | 54 | </td> |
|
47 |
<td class="td-hash" |
|
|
48 | <td class="td-user" title="${'author-%s' % node.name}"></td> | |
|
55 | <td class="td-hash" data-attr-name="commit_id"> | |
|
56 | % if c.full_load: | |
|
57 | <div class="tooltip" title="${node.last_commit.message}"> | |
|
58 | <pre data-commit-id="${node.last_commit.raw_id}">r${node.last_commit.revision}:${node.last_commit.short_id}</pre> | |
|
59 | </div> | |
|
60 | % endif | |
|
61 | </td> | |
|
62 | <td class="td-user" data-attr-name="author"> | |
|
63 | % if c.full_load: | |
|
64 | <span data-author="${node.last_commit.author}" title="${node.last_commit.author}">${h.gravatar_with_user(node.last_commit.author)|n}</span> | |
|
65 | % endif | |
|
66 | </td> | |
|
49 | 67 | %else: |
|
50 | 68 | <td></td> |
|
51 | 69 | <td></td> |
|
52 | 70 | <td></td> |
|
53 | 71 | <td></td> |
|
54 | 72 | %endif |
|
55 | 73 | </tr> |
|
56 | 74 | %endfor |
|
57 | 75 | </tbody> |
|
58 | 76 | <tbody id="tbody_filtered"></tbody> |
|
59 | 77 | </table> |
|
60 | </div> No newline at end of file | |
|
78 | </div> |
@@ -1,939 +1,942 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | |
|
23 | 23 | import mock |
|
24 | 24 | import pytest |
|
25 | 25 | |
|
26 | 26 | from rhodecode.controllers.files import FilesController |
|
27 | 27 | from rhodecode.lib import helpers as h |
|
28 | 28 | from rhodecode.lib.compat import OrderedDict |
|
29 | 29 | from rhodecode.lib.ext_json import json |
|
30 | 30 | from rhodecode.lib.vcs import nodes |
|
31 | 31 | from rhodecode.lib.vcs.conf import settings |
|
32 | 32 | from rhodecode.tests import ( |
|
33 | 33 | url, assert_session_flash, assert_not_in_session_flash) |
|
34 | 34 | from rhodecode.tests.fixture import Fixture |
|
35 | 35 | from rhodecode.tests.utils import AssertResponse |
|
36 | 36 | |
|
37 | 37 | fixture = Fixture() |
|
38 | 38 | |
|
39 | 39 | NODE_HISTORY = { |
|
40 | 40 | 'hg': json.loads(fixture.load_resource('hg_node_history_response.json')), |
|
41 | 41 | 'git': json.loads(fixture.load_resource('git_node_history_response.json')), |
|
42 | 42 | 'svn': json.loads(fixture.load_resource('svn_node_history_response.json')), |
|
43 | 43 | } |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | @pytest.mark.usefixtures("app") |
|
47 | 47 | class TestFilesController: |
|
48 | 48 | |
|
49 | 49 | def test_index(self, backend): |
|
50 | 50 | response = self.app.get(url( |
|
51 | 51 | controller='files', action='index', |
|
52 | 52 | repo_name=backend.repo_name, revision='tip', f_path='/')) |
|
53 | 53 | commit = backend.repo.get_commit() |
|
54 | 54 | |
|
55 | 55 | params = { |
|
56 | 56 | 'repo_name': backend.repo_name, |
|
57 |
' |
|
|
57 | 'commit_id': commit.raw_id, | |
|
58 | 58 | 'date': commit.date |
|
59 | 59 | } |
|
60 | 60 | assert_dirs_in_response(response, ['docs', 'vcs'], params) |
|
61 | 61 | files = [ |
|
62 | 62 | '.gitignore', |
|
63 | 63 | '.hgignore', |
|
64 | 64 | '.hgtags', |
|
65 | 65 | # TODO: missing in Git |
|
66 | 66 | # '.travis.yml', |
|
67 | 67 | 'MANIFEST.in', |
|
68 | 68 | 'README.rst', |
|
69 | 69 | # TODO: File is missing in svn repository |
|
70 | 70 | # 'run_test_and_report.sh', |
|
71 | 71 | 'setup.cfg', |
|
72 | 72 | 'setup.py', |
|
73 | 73 | 'test_and_report.sh', |
|
74 | 74 | 'tox.ini', |
|
75 | 75 | ] |
|
76 | 76 | assert_files_in_response(response, files, params) |
|
77 | 77 | assert_timeago_in_response(response, files, params) |
|
78 | 78 | |
|
79 | 79 | def test_index_links_submodules_with_absolute_url(self, backend_hg): |
|
80 | 80 | repo = backend_hg['subrepos'] |
|
81 | 81 | response = self.app.get(url( |
|
82 | 82 | controller='files', action='index', |
|
83 | 83 | repo_name=repo.repo_name, revision='tip', f_path='/')) |
|
84 | 84 | assert_response = AssertResponse(response) |
|
85 | 85 | assert_response.contains_one_link( |
|
86 | 86 | 'absolute-path @ 000000000000', 'http://example.com/absolute-path') |
|
87 | 87 | |
|
88 | 88 | def test_index_links_submodules_with_absolute_url_subpaths( |
|
89 | 89 | self, backend_hg): |
|
90 | 90 | repo = backend_hg['subrepos'] |
|
91 | 91 | response = self.app.get(url( |
|
92 | 92 | controller='files', action='index', |
|
93 | 93 | repo_name=repo.repo_name, revision='tip', f_path='/')) |
|
94 | 94 | assert_response = AssertResponse(response) |
|
95 | 95 | assert_response.contains_one_link( |
|
96 | 96 | 'subpaths-path @ 000000000000', |
|
97 | 97 | 'http://sub-base.example.com/subpaths-path') |
|
98 | 98 | |
|
99 | 99 | @pytest.mark.xfail_backends("svn", reason="Depends on branch support") |
|
100 | 100 | def test_files_menu(self, backend): |
|
101 | 101 | new_branch = "temp_branch_name" |
|
102 | 102 | commits = [ |
|
103 | 103 | {'message': 'a'}, |
|
104 | 104 | {'message': 'b', 'branch': new_branch} |
|
105 | 105 | ] |
|
106 | 106 | backend.create_repo(commits) |
|
107 | 107 | |
|
108 | 108 | backend.repo.landing_rev = "branch:%s" % new_branch |
|
109 | 109 | |
|
110 | 110 | # get response based on tip and not new revision |
|
111 | 111 | response = self.app.get(url( |
|
112 | 112 | controller='files', action='index', |
|
113 | 113 | repo_name=backend.repo_name, revision='tip', f_path='/'), |
|
114 | 114 | status=200) |
|
115 | 115 | |
|
116 | 116 | # make sure Files menu url is not tip but new revision |
|
117 | 117 | landing_rev = backend.repo.landing_rev[1] |
|
118 | 118 | files_url = url('files_home', repo_name=backend.repo_name, |
|
119 | 119 | revision=landing_rev) |
|
120 | 120 | |
|
121 | 121 | assert landing_rev != 'tip' |
|
122 | 122 | response.mustcontain('<li class="active"><a class="menulink" href="%s">' % files_url) |
|
123 | 123 | |
|
124 | 124 | def test_index_commit(self, backend): |
|
125 | 125 | commit = backend.repo.get_commit(commit_idx=32) |
|
126 | 126 | |
|
127 | 127 | response = self.app.get(url( |
|
128 | 128 | controller='files', action='index', |
|
129 | 129 | repo_name=backend.repo_name, |
|
130 | 130 | revision=commit.raw_id, |
|
131 | 131 | f_path='/') |
|
132 | 132 | ) |
|
133 | 133 | |
|
134 | 134 | dirs = ['docs', 'tests'] |
|
135 | 135 | files = ['README.rst'] |
|
136 | 136 | params = { |
|
137 | 137 | 'repo_name': backend.repo_name, |
|
138 |
' |
|
|
138 | 'commit_id': commit.raw_id, | |
|
139 | 139 | } |
|
140 | 140 | assert_dirs_in_response(response, dirs, params) |
|
141 | 141 | assert_files_in_response(response, files, params) |
|
142 | 142 | |
|
143 | 143 | @pytest.mark.xfail_backends("git", reason="Missing branches in git repo") |
|
144 | 144 | @pytest.mark.xfail_backends("svn", reason="Depends on branch support") |
|
145 | 145 | def test_index_different_branch(self, backend): |
|
146 | 146 | # TODO: Git test repository does not contain branches |
|
147 | 147 | # TODO: Branch support in Subversion |
|
148 | 148 | |
|
149 | 149 | commit = backend.repo.get_commit(commit_idx=150) |
|
150 | 150 | response = self.app.get(url( |
|
151 | 151 | controller='files', action='index', |
|
152 | 152 | repo_name=backend.repo_name, |
|
153 | 153 | revision=commit.raw_id, |
|
154 | 154 | f_path='/')) |
|
155 | 155 | assert_response = AssertResponse(response) |
|
156 | 156 | assert_response.element_contains( |
|
157 | 157 | '.tags .branchtag', 'git') |
|
158 | 158 | |
|
159 | 159 | def test_index_paging(self, backend): |
|
160 | 160 | repo = backend.repo |
|
161 | 161 | indexes = [73, 92, 109, 1, 0] |
|
162 | 162 | idx_map = [(rev, repo.get_commit(commit_idx=rev).raw_id) |
|
163 | 163 | for rev in indexes] |
|
164 | 164 | |
|
165 | 165 | for idx in idx_map: |
|
166 | 166 | response = self.app.get(url( |
|
167 | 167 | controller='files', action='index', |
|
168 | 168 | repo_name=backend.repo_name, |
|
169 | 169 | revision=idx[1], |
|
170 | 170 | f_path='/')) |
|
171 | 171 | |
|
172 | 172 | response.mustcontain("""r%s:%s""" % (idx[0], idx[1][:8])) |
|
173 | 173 | |
|
174 | 174 | def test_file_source(self, backend): |
|
175 | 175 | commit = backend.repo.get_commit(commit_idx=167) |
|
176 | 176 | response = self.app.get(url( |
|
177 | 177 | controller='files', action='index', |
|
178 | 178 | repo_name=backend.repo_name, |
|
179 | 179 | revision=commit.raw_id, |
|
180 | 180 | f_path='vcs/nodes.py')) |
|
181 | 181 | |
|
182 | 182 | msgbox = """<div class="commit right-content">%s</div>""" |
|
183 | 183 | response.mustcontain(msgbox % (commit.message, )) |
|
184 | 184 | |
|
185 | 185 | assert_response = AssertResponse(response) |
|
186 | 186 | if commit.branch: |
|
187 | 187 | assert_response.element_contains('.tags.tags-main .branchtag', commit.branch) |
|
188 | 188 | if commit.tags: |
|
189 | 189 | for tag in commit.tags: |
|
190 | 190 | assert_response.element_contains('.tags.tags-main .tagtag', tag) |
|
191 | 191 | |
|
192 | 192 | def test_file_source_history(self, backend): |
|
193 | 193 | response = self.app.get( |
|
194 | 194 | url( |
|
195 | 195 | controller='files', action='history', |
|
196 | 196 | repo_name=backend.repo_name, |
|
197 | 197 | revision='tip', |
|
198 | 198 | f_path='vcs/nodes.py'), |
|
199 | 199 | extra_environ={'HTTP_X_PARTIAL_XHR': '1'}) |
|
200 | 200 | assert NODE_HISTORY[backend.alias] == json.loads(response.body) |
|
201 | 201 | |
|
202 | 202 | def test_file_source_history_svn(self, backend_svn): |
|
203 | 203 | simple_repo = backend_svn['svn-simple-layout'] |
|
204 | 204 | response = self.app.get( |
|
205 | 205 | url( |
|
206 | 206 | controller='files', action='history', |
|
207 | 207 | repo_name=simple_repo.repo_name, |
|
208 | 208 | revision='tip', |
|
209 | 209 | f_path='trunk/example.py'), |
|
210 | 210 | extra_environ={'HTTP_X_PARTIAL_XHR': '1'}) |
|
211 | 211 | |
|
212 | 212 | expected_data = json.loads( |
|
213 | 213 | fixture.load_resource('svn_node_history_branches.json')) |
|
214 | 214 | assert expected_data == response.json |
|
215 | 215 | |
|
216 | 216 | def test_file_annotation_history(self, backend): |
|
217 | 217 | response = self.app.get( |
|
218 | 218 | url( |
|
219 | 219 | controller='files', action='history', |
|
220 | 220 | repo_name=backend.repo_name, |
|
221 | 221 | revision='tip', |
|
222 | 222 | f_path='vcs/nodes.py', |
|
223 | 223 | annotate=True), |
|
224 | 224 | extra_environ={'HTTP_X_PARTIAL_XHR': '1'}) |
|
225 | 225 | assert NODE_HISTORY[backend.alias] == json.loads(response.body) |
|
226 | 226 | |
|
227 | 227 | def test_file_annotation(self, backend): |
|
228 | 228 | response = self.app.get(url( |
|
229 | 229 | controller='files', action='index', |
|
230 | 230 | repo_name=backend.repo_name, revision='tip', f_path='vcs/nodes.py', |
|
231 | 231 | annotate=True)) |
|
232 | 232 | |
|
233 | 233 | expected_revisions = { |
|
234 | 234 | 'hg': 'r356:25213a5fbb04', |
|
235 | 235 | 'git': 'r345:c994f0de03b2', |
|
236 | 236 | 'svn': 'r208:209', |
|
237 | 237 | } |
|
238 | 238 | response.mustcontain(expected_revisions[backend.alias]) |
|
239 | 239 | |
|
240 | 240 | def test_file_authors(self, backend): |
|
241 | 241 | response = self.app.get(url( |
|
242 | 242 | controller='files', action='authors', |
|
243 | 243 | repo_name=backend.repo_name, |
|
244 | 244 | revision='tip', |
|
245 | 245 | f_path='vcs/nodes.py', |
|
246 | 246 | annotate=True)) |
|
247 | 247 | |
|
248 | 248 | expected_authors = { |
|
249 | 249 | 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'), |
|
250 | 250 | 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'), |
|
251 | 251 | 'svn': ('marcin', 'lukasz'), |
|
252 | 252 | } |
|
253 | 253 | |
|
254 | 254 | for author in expected_authors[backend.alias]: |
|
255 | 255 | response.mustcontain(author) |
|
256 | 256 | |
|
257 | 257 | def test_tree_search_top_level(self, backend, xhr_header): |
|
258 | 258 | commit = backend.repo.get_commit(commit_idx=173) |
|
259 | 259 | response = self.app.get( |
|
260 | 260 | url('files_nodelist_home', repo_name=backend.repo_name, |
|
261 | 261 | revision=commit.raw_id, f_path='/'), |
|
262 | 262 | extra_environ=xhr_header) |
|
263 | 263 | assert 'nodes' in response.json |
|
264 | 264 | assert {'name': 'docs', 'type': 'dir'} in response.json['nodes'] |
|
265 | 265 | |
|
266 | 266 | def test_tree_search_at_path(self, backend, xhr_header): |
|
267 | 267 | commit = backend.repo.get_commit(commit_idx=173) |
|
268 | 268 | response = self.app.get( |
|
269 | 269 | url('files_nodelist_home', repo_name=backend.repo_name, |
|
270 | 270 | revision=commit.raw_id, f_path='/docs'), |
|
271 | 271 | extra_environ=xhr_header) |
|
272 | 272 | assert 'nodes' in response.json |
|
273 | 273 | nodes = response.json['nodes'] |
|
274 | 274 | assert {'name': 'docs/api', 'type': 'dir'} in nodes |
|
275 | 275 | assert {'name': 'docs/index.rst', 'type': 'file'} in nodes |
|
276 | 276 | |
|
277 | 277 | def test_tree_search_at_path_missing_xhr(self, backend): |
|
278 | 278 | self.app.get( |
|
279 | 279 | url('files_nodelist_home', repo_name=backend.repo_name, |
|
280 | 280 | revision='tip', f_path=''), status=400) |
|
281 | 281 | |
|
282 | 282 | def test_tree_view_list(self, backend, xhr_header): |
|
283 | 283 | commit = backend.repo.get_commit(commit_idx=173) |
|
284 | 284 | response = self.app.get( |
|
285 | 285 | url('files_nodelist_home', repo_name=backend.repo_name, |
|
286 | 286 | f_path='/', revision=commit.raw_id), |
|
287 | 287 | extra_environ=xhr_header, |
|
288 | 288 | ) |
|
289 | 289 | response.mustcontain("vcs/web/simplevcs/views/repository.py") |
|
290 | 290 | |
|
291 | 291 | def test_tree_view_list_at_path(self, backend, xhr_header): |
|
292 | 292 | commit = backend.repo.get_commit(commit_idx=173) |
|
293 | 293 | response = self.app.get( |
|
294 | 294 | url('files_nodelist_home', repo_name=backend.repo_name, |
|
295 | 295 | f_path='/docs', revision=commit.raw_id), |
|
296 | 296 | extra_environ=xhr_header, |
|
297 | 297 | ) |
|
298 | 298 | response.mustcontain("docs/index.rst") |
|
299 | 299 | |
|
300 | 300 | def test_tree_view_list_missing_xhr(self, backend): |
|
301 | 301 | self.app.get( |
|
302 | 302 | url('files_nodelist_home', repo_name=backend.repo_name, |
|
303 | 303 | f_path='/', revision='tip'), status=400) |
|
304 | 304 | |
|
305 |
def test_ |
|
|
305 | def test_nodetree_full_success(self, backend, xhr_header): | |
|
306 | 306 | commit = backend.repo.get_commit(commit_idx=173) |
|
307 | 307 | response = self.app.get( |
|
308 |
url('files_ |
|
|
309 |
f_path='/', |
|
|
308 | url('files_nodetree_full', repo_name=backend.repo_name, | |
|
309 | f_path='/', commit_id=commit.raw_id), | |
|
310 | 310 | extra_environ=xhr_header) |
|
311 | 311 | |
|
312 | expected_keys = ['author', 'message', 'modified_at', 'modified_ts', | |
|
313 | 'name', 'revision', 'short_id', 'size'] | |
|
314 | for filename in response.json.get('metadata'): | |
|
315 | for key in expected_keys: | |
|
316 | assert key in filename | |
|
312 | assert_response = AssertResponse(response) | |
|
317 | 313 | |
|
318 | def test_tree_metadata_list_if_file(self, backend, xhr_header): | |
|
314 | for attr in ['data-commit-id', 'data-date', 'data-author']: | |
|
315 | elements = assert_response.get_elements('[{}]'.format(attr)) | |
|
316 | assert len(elements) > 1 | |
|
317 | ||
|
318 | for element in elements: | |
|
319 | assert element.get(attr) | |
|
320 | ||
|
321 | def test_nodetree_full_if_file(self, backend, xhr_header): | |
|
319 | 322 | commit = backend.repo.get_commit(commit_idx=173) |
|
320 | 323 | response = self.app.get( |
|
321 |
url('files_ |
|
|
322 |
f_path='README.rst', |
|
|
324 | url('files_nodetree_full', repo_name=backend.repo_name, | |
|
325 | f_path='README.rst', commit_id=commit.raw_id), | |
|
323 | 326 | extra_environ=xhr_header) |
|
324 |
assert response. |
|
|
327 | assert response.body == '' | |
|
325 | 328 | |
|
326 | 329 | def test_tree_metadata_list_missing_xhr(self, backend): |
|
327 | 330 | self.app.get( |
|
328 |
url('files_ |
|
|
329 |
f_path='/', |
|
|
331 | url('files_nodetree_full', repo_name=backend.repo_name, | |
|
332 | f_path='/', commit_id='tip'), status=400) | |
|
330 | 333 | |
|
331 | 334 | def test_access_empty_repo_redirect_to_summary_with_alert_write_perms( |
|
332 | 335 | self, app, backend_stub, autologin_regular_user, user_regular, |
|
333 | 336 | user_util): |
|
334 | 337 | repo = backend_stub.create_repo() |
|
335 | 338 | user_util.grant_user_permission_to_repo( |
|
336 | 339 | repo, user_regular, 'repository.write') |
|
337 | 340 | response = self.app.get(url( |
|
338 | 341 | controller='files', action='index', |
|
339 | 342 | repo_name=repo.repo_name, revision='tip', f_path='/')) |
|
340 | 343 | assert_session_flash( |
|
341 | 344 | response, |
|
342 | 345 | 'There are no files yet. <a class="alert-link" ' |
|
343 | 346 | 'href="/%s/add/0/#edit">Click here to add a new file.</a>' |
|
344 | 347 | % (repo.repo_name)) |
|
345 | 348 | |
|
346 | 349 | def test_access_empty_repo_redirect_to_summary_with_alert_no_write_perms( |
|
347 | 350 | self, backend_stub, user_util): |
|
348 | 351 | repo = backend_stub.create_repo() |
|
349 | 352 | repo_file_url = url( |
|
350 | 353 | 'files_add_home', |
|
351 | 354 | repo_name=repo.repo_name, |
|
352 | 355 | revision=0, f_path='', anchor='edit') |
|
353 | 356 | response = self.app.get(url( |
|
354 | 357 | controller='files', action='index', |
|
355 | 358 | repo_name=repo.repo_name, revision='tip', f_path='/')) |
|
356 | 359 | assert_not_in_session_flash(response, repo_file_url) |
|
357 | 360 | |
|
358 | 361 | |
|
359 | 362 | # TODO: johbo: Think about a better place for these tests. Either controller |
|
360 | 363 | # specific unit tests or we move down the whole logic further towards the vcs |
|
361 | 364 | # layer |
|
362 | 365 | class TestAdjustFilePathForSvn: |
|
363 | 366 | """SVN specific adjustments of node history in FileController.""" |
|
364 | 367 | |
|
365 | 368 | def test_returns_path_relative_to_matched_reference(self): |
|
366 | 369 | repo = self._repo(branches=['trunk']) |
|
367 | 370 | self.assert_file_adjustment('trunk/file', 'file', repo) |
|
368 | 371 | |
|
369 | 372 | def test_does_not_modify_file_if_no_reference_matches(self): |
|
370 | 373 | repo = self._repo(branches=['trunk']) |
|
371 | 374 | self.assert_file_adjustment('notes/file', 'notes/file', repo) |
|
372 | 375 | |
|
373 | 376 | def test_does_not_adjust_partial_directory_names(self): |
|
374 | 377 | repo = self._repo(branches=['trun']) |
|
375 | 378 | self.assert_file_adjustment('trunk/file', 'trunk/file', repo) |
|
376 | 379 | |
|
377 | 380 | def test_is_robust_to_patterns_which_prefix_other_patterns(self): |
|
378 | 381 | repo = self._repo(branches=['trunk', 'trunk/new', 'trunk/old']) |
|
379 | 382 | self.assert_file_adjustment('trunk/new/file', 'file', repo) |
|
380 | 383 | |
|
381 | 384 | def assert_file_adjustment(self, f_path, expected, repo): |
|
382 | 385 | controller = FilesController() |
|
383 | 386 | result = controller._adjust_file_path_for_svn(f_path, repo) |
|
384 | 387 | assert result == expected |
|
385 | 388 | |
|
386 | 389 | def _repo(self, branches=None): |
|
387 | 390 | repo = mock.Mock() |
|
388 | 391 | repo.branches = OrderedDict((name, '0') for name in branches or []) |
|
389 | 392 | repo.tags = {} |
|
390 | 393 | return repo |
|
391 | 394 | |
|
392 | 395 | |
|
393 | 396 | @pytest.mark.usefixtures("app") |
|
394 | 397 | class TestRepositoryArchival: |
|
395 | 398 | |
|
396 | 399 | def test_archival(self, backend): |
|
397 | 400 | backend.enable_downloads() |
|
398 | 401 | commit = backend.repo.get_commit(commit_idx=173) |
|
399 | 402 | for archive, info in settings.ARCHIVE_SPECS.items(): |
|
400 | 403 | mime_type, arch_ext = info |
|
401 | 404 | short = commit.short_id + arch_ext |
|
402 | 405 | fname = commit.raw_id + arch_ext |
|
403 | 406 | filename = '%s-%s' % (backend.repo_name, short) |
|
404 | 407 | response = self.app.get(url(controller='files', |
|
405 | 408 | action='archivefile', |
|
406 | 409 | repo_name=backend.repo_name, |
|
407 | 410 | fname=fname)) |
|
408 | 411 | |
|
409 | 412 | assert response.status == '200 OK' |
|
410 | 413 | headers = { |
|
411 | 414 | 'Pragma': 'no-cache', |
|
412 | 415 | 'Cache-Control': 'no-cache', |
|
413 | 416 | 'Content-Disposition': 'attachment; filename=%s' % filename, |
|
414 | 417 | 'Content-Type': '%s; charset=utf-8' % mime_type, |
|
415 | 418 | } |
|
416 | 419 | if 'Set-Cookie' in response.response.headers: |
|
417 | 420 | del response.response.headers['Set-Cookie'] |
|
418 | 421 | assert response.response.headers == headers |
|
419 | 422 | |
|
420 | 423 | def test_archival_wrong_ext(self, backend): |
|
421 | 424 | backend.enable_downloads() |
|
422 | 425 | commit = backend.repo.get_commit(commit_idx=173) |
|
423 | 426 | for arch_ext in ['tar', 'rar', 'x', '..ax', '.zipz']: |
|
424 | 427 | fname = commit.raw_id + arch_ext |
|
425 | 428 | |
|
426 | 429 | response = self.app.get(url(controller='files', |
|
427 | 430 | action='archivefile', |
|
428 | 431 | repo_name=backend.repo_name, |
|
429 | 432 | fname=fname)) |
|
430 | 433 | response.mustcontain('Unknown archive type') |
|
431 | 434 | |
|
432 | 435 | def test_archival_wrong_commit_id(self, backend): |
|
433 | 436 | backend.enable_downloads() |
|
434 | 437 | for commit_id in ['00x000000', 'tar', 'wrong', '@##$@$42413232', |
|
435 | 438 | '232dffcd']: |
|
436 | 439 | fname = '%s.zip' % commit_id |
|
437 | 440 | |
|
438 | 441 | response = self.app.get(url(controller='files', |
|
439 | 442 | action='archivefile', |
|
440 | 443 | repo_name=backend.repo_name, |
|
441 | 444 | fname=fname)) |
|
442 | 445 | response.mustcontain('Unknown revision') |
|
443 | 446 | |
|
444 | 447 | |
|
445 | 448 | @pytest.mark.usefixtures("app", "autologin_user") |
|
446 | 449 | class TestRawFileHandling: |
|
447 | 450 | |
|
448 | 451 | def test_raw_file_ok(self, backend): |
|
449 | 452 | commit = backend.repo.get_commit(commit_idx=173) |
|
450 | 453 | response = self.app.get(url(controller='files', action='rawfile', |
|
451 | 454 | repo_name=backend.repo_name, |
|
452 | 455 | revision=commit.raw_id, |
|
453 | 456 | f_path='vcs/nodes.py')) |
|
454 | 457 | |
|
455 | 458 | assert response.content_disposition == "attachment; filename=nodes.py" |
|
456 | 459 | assert response.content_type == "text/x-python" |
|
457 | 460 | |
|
458 | 461 | def test_raw_file_wrong_cs(self, backend): |
|
459 | 462 | commit_id = u'ERRORce30c96924232dffcd24178a07ffeb5dfc' |
|
460 | 463 | f_path = 'vcs/nodes.py' |
|
461 | 464 | |
|
462 | 465 | response = self.app.get(url(controller='files', action='rawfile', |
|
463 | 466 | repo_name=backend.repo_name, |
|
464 | 467 | revision=commit_id, |
|
465 | 468 | f_path=f_path), status=404) |
|
466 | 469 | |
|
467 | 470 | msg = """No such commit exists for this repository""" |
|
468 | 471 | response.mustcontain(msg) |
|
469 | 472 | |
|
470 | 473 | def test_raw_file_wrong_f_path(self, backend): |
|
471 | 474 | commit = backend.repo.get_commit(commit_idx=173) |
|
472 | 475 | f_path = 'vcs/ERRORnodes.py' |
|
473 | 476 | response = self.app.get(url(controller='files', action='rawfile', |
|
474 | 477 | repo_name=backend.repo_name, |
|
475 | 478 | revision=commit.raw_id, |
|
476 | 479 | f_path=f_path), status=404) |
|
477 | 480 | |
|
478 | 481 | msg = ( |
|
479 | 482 | "There is no file nor directory at the given path: " |
|
480 | 483 | "'%s' at commit %s" % (f_path, commit.short_id)) |
|
481 | 484 | response.mustcontain(msg) |
|
482 | 485 | |
|
483 | 486 | def test_raw_ok(self, backend): |
|
484 | 487 | commit = backend.repo.get_commit(commit_idx=173) |
|
485 | 488 | response = self.app.get(url(controller='files', action='raw', |
|
486 | 489 | repo_name=backend.repo_name, |
|
487 | 490 | revision=commit.raw_id, |
|
488 | 491 | f_path='vcs/nodes.py')) |
|
489 | 492 | |
|
490 | 493 | assert response.content_type == "text/plain" |
|
491 | 494 | |
|
492 | 495 | def test_raw_wrong_cs(self, backend): |
|
493 | 496 | commit_id = u'ERRORcce30c96924232dffcd24178a07ffeb5dfc' |
|
494 | 497 | f_path = 'vcs/nodes.py' |
|
495 | 498 | |
|
496 | 499 | response = self.app.get(url(controller='files', action='raw', |
|
497 | 500 | repo_name=backend.repo_name, |
|
498 | 501 | revision=commit_id, |
|
499 | 502 | f_path=f_path), status=404) |
|
500 | 503 | |
|
501 | 504 | msg = """No such commit exists for this repository""" |
|
502 | 505 | response.mustcontain(msg) |
|
503 | 506 | |
|
504 | 507 | def test_raw_wrong_f_path(self, backend): |
|
505 | 508 | commit = backend.repo.get_commit(commit_idx=173) |
|
506 | 509 | f_path = 'vcs/ERRORnodes.py' |
|
507 | 510 | response = self.app.get(url(controller='files', action='raw', |
|
508 | 511 | repo_name=backend.repo_name, |
|
509 | 512 | revision=commit.raw_id, |
|
510 | 513 | f_path=f_path), status=404) |
|
511 | 514 | msg = ( |
|
512 | 515 | "There is no file nor directory at the given path: " |
|
513 | 516 | "'%s' at commit %s" % (f_path, commit.short_id)) |
|
514 | 517 | response.mustcontain(msg) |
|
515 | 518 | |
|
516 | 519 | def test_raw_svg_should_not_be_rendered(self, backend): |
|
517 | 520 | backend.create_repo() |
|
518 | 521 | backend.ensure_file("xss.svg") |
|
519 | 522 | response = self.app.get(url(controller='files', action='raw', |
|
520 | 523 | repo_name=backend.repo_name, |
|
521 | 524 | revision='tip', |
|
522 | 525 | f_path='xss.svg')) |
|
523 | 526 | |
|
524 | 527 | # If the content type is image/svg+xml then it allows to render HTML |
|
525 | 528 | # and malicious SVG. |
|
526 | 529 | assert response.content_type == "text/plain" |
|
527 | 530 | |
|
528 | 531 | |
|
529 | 532 | @pytest.mark.usefixtures("app") |
|
530 | 533 | class TestFilesDiff: |
|
531 | 534 | |
|
532 | 535 | @pytest.mark.parametrize("diff", ['diff', 'download', 'raw']) |
|
533 | 536 | def test_file_full_diff(self, backend, diff): |
|
534 | 537 | commit1 = backend.repo.get_commit(commit_idx=-1) |
|
535 | 538 | commit2 = backend.repo.get_commit(commit_idx=-2) |
|
536 | 539 | response = self.app.get( |
|
537 | 540 | url( |
|
538 | 541 | controller='files', |
|
539 | 542 | action='diff', |
|
540 | 543 | repo_name=backend.repo_name, |
|
541 | 544 | f_path='README'), |
|
542 | 545 | params={ |
|
543 | 546 | 'diff1': commit1.raw_id, |
|
544 | 547 | 'diff2': commit2.raw_id, |
|
545 | 548 | 'fulldiff': '1', |
|
546 | 549 | 'diff': diff, |
|
547 | 550 | }) |
|
548 | 551 | response.mustcontain('README.rst') |
|
549 | 552 | response.mustcontain('No newline at end of file') |
|
550 | 553 | |
|
551 | 554 | def test_file_binary_diff(self, backend): |
|
552 | 555 | commits = [ |
|
553 | 556 | {'message': 'First commit'}, |
|
554 | 557 | {'message': 'Commit with binary', |
|
555 | 558 | 'added': [nodes.FileNode('file.bin', content='\0BINARY\0')]}, |
|
556 | 559 | ] |
|
557 | 560 | repo = backend.create_repo(commits=commits) |
|
558 | 561 | |
|
559 | 562 | response = self.app.get( |
|
560 | 563 | url( |
|
561 | 564 | controller='files', |
|
562 | 565 | action='diff', |
|
563 | 566 | repo_name=backend.repo_name, |
|
564 | 567 | f_path='file.bin'), |
|
565 | 568 | params={ |
|
566 | 569 | 'diff1': repo.get_commit(commit_idx=0).raw_id, |
|
567 | 570 | 'diff2': repo.get_commit(commit_idx=1).raw_id, |
|
568 | 571 | 'fulldiff': '1', |
|
569 | 572 | 'diff': 'diff', |
|
570 | 573 | }) |
|
571 | 574 | response.mustcontain('Cannot diff binary files') |
|
572 | 575 | |
|
573 | 576 | def test_diff_2way(self, backend): |
|
574 | 577 | commit1 = backend.repo.get_commit(commit_idx=-1) |
|
575 | 578 | commit2 = backend.repo.get_commit(commit_idx=-2) |
|
576 | 579 | response = self.app.get( |
|
577 | 580 | url( |
|
578 | 581 | controller='files', |
|
579 | 582 | action='diff_2way', |
|
580 | 583 | repo_name=backend.repo_name, |
|
581 | 584 | f_path='README'), |
|
582 | 585 | params={ |
|
583 | 586 | 'diff1': commit1.raw_id, |
|
584 | 587 | 'diff2': commit2.raw_id, |
|
585 | 588 | }) |
|
586 | 589 | |
|
587 | 590 | # Expecting links to both variants of the file. Links are used |
|
588 | 591 | # to load the content dynamically. |
|
589 | 592 | response.mustcontain('/%s/README' % commit1.raw_id) |
|
590 | 593 | response.mustcontain('/%s/README' % commit2.raw_id) |
|
591 | 594 | |
|
592 | 595 | def test_requires_one_commit_id(self, backend, autologin_user): |
|
593 | 596 | response = self.app.get( |
|
594 | 597 | url( |
|
595 | 598 | controller='files', |
|
596 | 599 | action='diff', |
|
597 | 600 | repo_name=backend.repo_name, |
|
598 | 601 | f_path='README.rst'), |
|
599 | 602 | status=400) |
|
600 | 603 | response.mustcontain( |
|
601 | 604 | 'Need query parameter', 'diff1', 'diff2', 'to generate a diff.') |
|
602 | 605 | |
|
603 | 606 | def test_returns_not_found_if_file_does_not_exist(self, vcsbackend): |
|
604 | 607 | repo = vcsbackend.repo |
|
605 | 608 | self.app.get( |
|
606 | 609 | url( |
|
607 | 610 | controller='files', |
|
608 | 611 | action='diff', |
|
609 | 612 | repo_name=repo.name, |
|
610 | 613 | f_path='does-not-exist-in-any-commit', |
|
611 | 614 | diff1=repo[0].raw_id, |
|
612 | 615 | diff2=repo[1].raw_id), |
|
613 | 616 | status=404) |
|
614 | 617 | |
|
615 | 618 | def test_returns_redirect_if_file_not_changed(self, backend): |
|
616 | 619 | commit = backend.repo.get_commit(commit_idx=-1) |
|
617 | 620 | f_path= 'README' |
|
618 | 621 | response = self.app.get( |
|
619 | 622 | url( |
|
620 | 623 | controller='files', |
|
621 | 624 | action='diff_2way', |
|
622 | 625 | repo_name=backend.repo_name, |
|
623 | 626 | f_path=f_path, |
|
624 | 627 | diff1=commit.raw_id, |
|
625 | 628 | diff2=commit.raw_id, |
|
626 | 629 | ), |
|
627 | 630 | status=302 |
|
628 | 631 | ) |
|
629 | 632 | assert response.headers['Location'].endswith(f_path) |
|
630 | 633 | redirected = response.follow() |
|
631 | 634 | redirected.mustcontain('has not changed between') |
|
632 | 635 | |
|
633 | 636 | def test_supports_diff_to_different_path_svn(self, backend_svn): |
|
634 | 637 | repo = backend_svn['svn-simple-layout'].scm_instance() |
|
635 | 638 | commit_id = repo[-1].raw_id |
|
636 | 639 | response = self.app.get( |
|
637 | 640 | url( |
|
638 | 641 | controller='files', |
|
639 | 642 | action='diff', |
|
640 | 643 | repo_name=repo.name, |
|
641 | 644 | f_path='trunk/example.py', |
|
642 | 645 | diff1='tags/v0.2/example.py@' + commit_id, |
|
643 | 646 | diff2=commit_id), |
|
644 | 647 | status=200) |
|
645 | 648 | response.mustcontain( |
|
646 | 649 | "Will print out a useful message on invocation.") |
|
647 | 650 | |
|
648 | 651 | # Note: Expecting that we indicate the user what's being compared |
|
649 | 652 | response.mustcontain("trunk/example.py") |
|
650 | 653 | response.mustcontain("tags/v0.2/example.py") |
|
651 | 654 | |
|
652 | 655 | def test_show_rev_redirects_to_svn_path(self, backend_svn): |
|
653 | 656 | repo = backend_svn['svn-simple-layout'].scm_instance() |
|
654 | 657 | commit_id = repo[-1].raw_id |
|
655 | 658 | response = self.app.get( |
|
656 | 659 | url( |
|
657 | 660 | controller='files', |
|
658 | 661 | action='diff', |
|
659 | 662 | repo_name=repo.name, |
|
660 | 663 | f_path='trunk/example.py', |
|
661 | 664 | diff1='branches/argparse/example.py@' + commit_id, |
|
662 | 665 | diff2=commit_id), |
|
663 | 666 | params={'show_rev': 'Show at Revision'}, |
|
664 | 667 | status=302) |
|
665 | 668 | assert response.headers['Location'].endswith( |
|
666 | 669 | 'svn-svn-simple-layout/files/26/branches/argparse/example.py') |
|
667 | 670 | |
|
668 | 671 | def test_show_rev_and_annotate_redirects_to_svn_path(self, backend_svn): |
|
669 | 672 | repo = backend_svn['svn-simple-layout'].scm_instance() |
|
670 | 673 | commit_id = repo[-1].raw_id |
|
671 | 674 | response = self.app.get( |
|
672 | 675 | url( |
|
673 | 676 | controller='files', |
|
674 | 677 | action='diff', |
|
675 | 678 | repo_name=repo.name, |
|
676 | 679 | f_path='trunk/example.py', |
|
677 | 680 | diff1='branches/argparse/example.py@' + commit_id, |
|
678 | 681 | diff2=commit_id), |
|
679 | 682 | params={ |
|
680 | 683 | 'show_rev': 'Show at Revision', |
|
681 | 684 | 'annotate': 'true', |
|
682 | 685 | }, |
|
683 | 686 | status=302) |
|
684 | 687 | assert response.headers['Location'].endswith( |
|
685 | 688 | 'svn-svn-simple-layout/annotate/26/branches/argparse/example.py') |
|
686 | 689 | |
|
687 | 690 | |
|
688 | 691 | @pytest.mark.usefixtures("app", "autologin_user") |
|
689 | 692 | class TestChangingFiles: |
|
690 | 693 | |
|
691 | 694 | def test_add_file_view(self, backend): |
|
692 | 695 | self.app.get(url( |
|
693 | 696 | 'files_add_home', |
|
694 | 697 | repo_name=backend.repo_name, |
|
695 | 698 | revision='tip', f_path='/')) |
|
696 | 699 | |
|
697 | 700 | @pytest.mark.xfail_backends("svn", reason="Depends on online editing") |
|
698 | 701 | def test_add_file_into_repo_missing_content(self, backend, csrf_token): |
|
699 | 702 | repo = backend.create_repo() |
|
700 | 703 | filename = 'init.py' |
|
701 | 704 | response = self.app.post( |
|
702 | 705 | url( |
|
703 | 706 | 'files_add', |
|
704 | 707 | repo_name=repo.repo_name, |
|
705 | 708 | revision='tip', f_path='/'), |
|
706 | 709 | params={ |
|
707 | 710 | 'content': "", |
|
708 | 711 | 'filename': filename, |
|
709 | 712 | 'location': "", |
|
710 | 713 | 'csrf_token': csrf_token, |
|
711 | 714 | }, |
|
712 | 715 | status=302) |
|
713 | 716 | assert_session_flash( |
|
714 | 717 | response, 'Successfully committed to %s' |
|
715 | 718 | % os.path.join(filename)) |
|
716 | 719 | |
|
717 | 720 | def test_add_file_into_repo_missing_filename(self, backend, csrf_token): |
|
718 | 721 | response = self.app.post( |
|
719 | 722 | url( |
|
720 | 723 | 'files_add', |
|
721 | 724 | repo_name=backend.repo_name, |
|
722 | 725 | revision='tip', f_path='/'), |
|
723 | 726 | params={ |
|
724 | 727 | 'content': "foo", |
|
725 | 728 | 'csrf_token': csrf_token, |
|
726 | 729 | }, |
|
727 | 730 | status=302) |
|
728 | 731 | |
|
729 | 732 | assert_session_flash(response, 'No filename') |
|
730 | 733 | |
|
731 | 734 | def test_add_file_into_repo_errors_and_no_commits( |
|
732 | 735 | self, backend, csrf_token): |
|
733 | 736 | repo = backend.create_repo() |
|
734 | 737 | # Create a file with no filename, it will display an error but |
|
735 | 738 | # the repo has no commits yet |
|
736 | 739 | response = self.app.post( |
|
737 | 740 | url( |
|
738 | 741 | 'files_add', |
|
739 | 742 | repo_name=repo.repo_name, |
|
740 | 743 | revision='tip', f_path='/'), |
|
741 | 744 | params={ |
|
742 | 745 | 'content': "foo", |
|
743 | 746 | 'csrf_token': csrf_token, |
|
744 | 747 | }, |
|
745 | 748 | status=302) |
|
746 | 749 | |
|
747 | 750 | assert_session_flash(response, 'No filename') |
|
748 | 751 | |
|
749 | 752 | # Not allowed, redirect to the summary |
|
750 | 753 | redirected = response.follow() |
|
751 | 754 | summary_url = url('summary_home', repo_name=repo.repo_name) |
|
752 | 755 | |
|
753 | 756 | # As there are no commits, displays the summary page with the error of |
|
754 | 757 | # creating a file with no filename |
|
755 | 758 | assert redirected.req.path == summary_url |
|
756 | 759 | |
|
757 | 760 | @pytest.mark.parametrize("location, filename", [ |
|
758 | 761 | ('/abs', 'foo'), |
|
759 | 762 | ('../rel', 'foo'), |
|
760 | 763 | ('file/../foo', 'foo'), |
|
761 | 764 | ]) |
|
762 | 765 | def test_add_file_into_repo_bad_filenames( |
|
763 | 766 | self, location, filename, backend, csrf_token): |
|
764 | 767 | response = self.app.post( |
|
765 | 768 | url( |
|
766 | 769 | 'files_add', |
|
767 | 770 | repo_name=backend.repo_name, |
|
768 | 771 | revision='tip', f_path='/'), |
|
769 | 772 | params={ |
|
770 | 773 | 'content': "foo", |
|
771 | 774 | 'filename': filename, |
|
772 | 775 | 'location': location, |
|
773 | 776 | 'csrf_token': csrf_token, |
|
774 | 777 | }, |
|
775 | 778 | status=302) |
|
776 | 779 | |
|
777 | 780 | assert_session_flash( |
|
778 | 781 | response, |
|
779 | 782 | 'The location specified must be a relative path and must not ' |
|
780 | 783 | 'contain .. in the path') |
|
781 | 784 | |
|
782 | 785 | @pytest.mark.parametrize("cnt, location, filename", [ |
|
783 | 786 | (1, '', 'foo.txt'), |
|
784 | 787 | (2, 'dir', 'foo.rst'), |
|
785 | 788 | (3, 'rel/dir', 'foo.bar'), |
|
786 | 789 | ]) |
|
787 | 790 | def test_add_file_into_repo(self, cnt, location, filename, backend, |
|
788 | 791 | csrf_token): |
|
789 | 792 | repo = backend.create_repo() |
|
790 | 793 | response = self.app.post( |
|
791 | 794 | url( |
|
792 | 795 | 'files_add', |
|
793 | 796 | repo_name=repo.repo_name, |
|
794 | 797 | revision='tip', f_path='/'), |
|
795 | 798 | params={ |
|
796 | 799 | 'content': "foo", |
|
797 | 800 | 'filename': filename, |
|
798 | 801 | 'location': location, |
|
799 | 802 | 'csrf_token': csrf_token, |
|
800 | 803 | }, |
|
801 | 804 | status=302) |
|
802 | 805 | assert_session_flash( |
|
803 | 806 | response, 'Successfully committed to %s' |
|
804 | 807 | % os.path.join(location, filename)) |
|
805 | 808 | |
|
806 | 809 | def test_edit_file_view(self, backend): |
|
807 | 810 | response = self.app.get( |
|
808 | 811 | url( |
|
809 | 812 | 'files_edit_home', |
|
810 | 813 | repo_name=backend.repo_name, |
|
811 | 814 | revision=backend.default_head_id, |
|
812 | 815 | f_path='vcs/nodes.py'), |
|
813 | 816 | status=200) |
|
814 | 817 | response.mustcontain("Module holding everything related to vcs nodes.") |
|
815 | 818 | |
|
816 | 819 | def test_edit_file_view_not_on_branch(self, backend): |
|
817 | 820 | repo = backend.create_repo() |
|
818 | 821 | backend.ensure_file("vcs/nodes.py") |
|
819 | 822 | |
|
820 | 823 | response = self.app.get( |
|
821 | 824 | url( |
|
822 | 825 | 'files_edit_home', |
|
823 | 826 | repo_name=repo.repo_name, |
|
824 | 827 | revision='tip', f_path='vcs/nodes.py'), |
|
825 | 828 | status=302) |
|
826 | 829 | assert_session_flash( |
|
827 | 830 | response, |
|
828 | 831 | 'You can only edit files with revision being a valid branch') |
|
829 | 832 | |
|
830 | 833 | def test_edit_file_view_commit_changes(self, backend, csrf_token): |
|
831 | 834 | repo = backend.create_repo() |
|
832 | 835 | backend.ensure_file("vcs/nodes.py", content="print 'hello'") |
|
833 | 836 | |
|
834 | 837 | response = self.app.post( |
|
835 | 838 | url( |
|
836 | 839 | 'files_edit', |
|
837 | 840 | repo_name=repo.repo_name, |
|
838 | 841 | revision=backend.default_head_id, |
|
839 | 842 | f_path='vcs/nodes.py'), |
|
840 | 843 | params={ |
|
841 | 844 | 'content': "print 'hello world'", |
|
842 | 845 | 'message': 'I committed', |
|
843 | 846 | 'filename': "vcs/nodes.py", |
|
844 | 847 | 'csrf_token': csrf_token, |
|
845 | 848 | }, |
|
846 | 849 | status=302) |
|
847 | 850 | assert_session_flash( |
|
848 | 851 | response, 'Successfully committed to vcs/nodes.py') |
|
849 | 852 | tip = repo.get_commit(commit_idx=-1) |
|
850 | 853 | assert tip.message == 'I committed' |
|
851 | 854 | |
|
852 | 855 | def test_edit_file_view_commit_changes_default_message(self, backend, |
|
853 | 856 | csrf_token): |
|
854 | 857 | repo = backend.create_repo() |
|
855 | 858 | backend.ensure_file("vcs/nodes.py", content="print 'hello'") |
|
856 | 859 | |
|
857 | 860 | commit_id = ( |
|
858 | 861 | backend.default_branch_name or |
|
859 | 862 | backend.repo.scm_instance().commit_ids[-1]) |
|
860 | 863 | |
|
861 | 864 | response = self.app.post( |
|
862 | 865 | url( |
|
863 | 866 | 'files_edit', |
|
864 | 867 | repo_name=repo.repo_name, |
|
865 | 868 | revision=commit_id, |
|
866 | 869 | f_path='vcs/nodes.py'), |
|
867 | 870 | params={ |
|
868 | 871 | 'content': "print 'hello world'", |
|
869 | 872 | 'message': '', |
|
870 | 873 | 'filename': "vcs/nodes.py", |
|
871 | 874 | 'csrf_token': csrf_token, |
|
872 | 875 | }, |
|
873 | 876 | status=302) |
|
874 | 877 | assert_session_flash( |
|
875 | 878 | response, 'Successfully committed to vcs/nodes.py') |
|
876 | 879 | tip = repo.get_commit(commit_idx=-1) |
|
877 | 880 | assert tip.message == 'Edited file vcs/nodes.py via RhodeCode Enterprise' |
|
878 | 881 | |
|
879 | 882 | def test_delete_file_view(self, backend): |
|
880 | 883 | self.app.get(url( |
|
881 | 884 | 'files_delete_home', |
|
882 | 885 | repo_name=backend.repo_name, |
|
883 | 886 | revision='tip', f_path='vcs/nodes.py')) |
|
884 | 887 | |
|
885 | 888 | def test_delete_file_view_not_on_branch(self, backend): |
|
886 | 889 | repo = backend.create_repo() |
|
887 | 890 | backend.ensure_file('vcs/nodes.py') |
|
888 | 891 | |
|
889 | 892 | response = self.app.get( |
|
890 | 893 | url( |
|
891 | 894 | 'files_delete_home', |
|
892 | 895 | repo_name=repo.repo_name, |
|
893 | 896 | revision='tip', f_path='vcs/nodes.py'), |
|
894 | 897 | status=302) |
|
895 | 898 | assert_session_flash( |
|
896 | 899 | response, |
|
897 | 900 | 'You can only delete files with revision being a valid branch') |
|
898 | 901 | |
|
899 | 902 | def test_delete_file_view_commit_changes(self, backend, csrf_token): |
|
900 | 903 | repo = backend.create_repo() |
|
901 | 904 | backend.ensure_file("vcs/nodes.py") |
|
902 | 905 | |
|
903 | 906 | response = self.app.post( |
|
904 | 907 | url( |
|
905 | 908 | 'files_delete_home', |
|
906 | 909 | repo_name=repo.repo_name, |
|
907 | 910 | revision=backend.default_head_id, |
|
908 | 911 | f_path='vcs/nodes.py'), |
|
909 | 912 | params={ |
|
910 | 913 | 'message': 'i commited', |
|
911 | 914 | 'csrf_token': csrf_token, |
|
912 | 915 | }, |
|
913 | 916 | status=302) |
|
914 | 917 | assert_session_flash( |
|
915 | 918 | response, 'Successfully deleted file vcs/nodes.py') |
|
916 | 919 | |
|
917 | 920 | |
|
918 | 921 | def assert_files_in_response(response, files, params): |
|
919 | 922 | template = ( |
|
920 |
" |
|
|
923 | 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"') | |
|
921 | 924 | _assert_items_in_response(response, files, template, params) |
|
922 | 925 | |
|
923 | 926 | |
|
924 | 927 | def assert_dirs_in_response(response, dirs, params): |
|
925 | 928 | template = ( |
|
926 |
" |
|
|
929 | 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"') | |
|
927 | 930 | _assert_items_in_response(response, dirs, template, params) |
|
928 | 931 | |
|
929 | 932 | |
|
930 | 933 | def _assert_items_in_response(response, items, template, params): |
|
931 | 934 | for item in items: |
|
932 | 935 | item_params = {'name': item} |
|
933 | 936 | item_params.update(params) |
|
934 | 937 | response.mustcontain(template % item_params) |
|
935 | 938 | |
|
936 | 939 | |
|
937 | 940 | def assert_timeago_in_response(response, items, params): |
|
938 | 941 | for item in items: |
|
939 | 942 | response.mustcontain(h.age_component(params['date'])) |
@@ -1,282 +1,285 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import threading |
|
22 | 22 | import time |
|
23 | 23 | import logging |
|
24 | 24 | import os.path |
|
25 | 25 | import subprocess |
|
26 | 26 | import urllib2 |
|
27 | 27 | from urlparse import urlparse, parse_qsl |
|
28 | 28 | from urllib import unquote_plus |
|
29 | 29 | |
|
30 | 30 | import pytest |
|
31 | 31 | import rc_testdata |
|
32 | 32 | from lxml.html import fromstring, tostring |
|
33 | 33 | from lxml.cssselect import CSSSelector |
|
34 | 34 | |
|
35 | 35 | from rhodecode.model.db import User |
|
36 | 36 | from rhodecode.model.meta import Session |
|
37 | 37 | from rhodecode.model.scm import ScmModel |
|
38 | 38 | from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository |
|
39 | 39 | |
|
40 | 40 | |
|
41 | 41 | log = logging.getLogger(__name__) |
|
42 | 42 | |
|
43 | 43 | |
|
44 | 44 | def set_anonymous_access(enabled): |
|
45 | 45 | """(Dis)allows anonymous access depending on parameter `enabled`""" |
|
46 | 46 | user = User.get_default_user() |
|
47 | 47 | user.active = enabled |
|
48 | 48 | Session().add(user) |
|
49 | 49 | Session().commit() |
|
50 | 50 | log.info('anonymous access is now: %s', enabled) |
|
51 | 51 | assert enabled == User.get_default_user().active, ( |
|
52 | 52 | 'Cannot set anonymous access') |
|
53 | 53 | |
|
54 | 54 | |
|
55 | 55 | def check_xfail_backends(node, backend_alias): |
|
56 | 56 | # Using "xfail_backends" here intentionally, since this marks work |
|
57 | 57 | # which is "to be done" soon. |
|
58 | 58 | skip_marker = node.get_marker('xfail_backends') |
|
59 | 59 | if skip_marker and backend_alias in skip_marker.args: |
|
60 | 60 | msg = "Support for backend %s to be developed." % (backend_alias, ) |
|
61 | 61 | msg = skip_marker.kwargs.get('reason', msg) |
|
62 | 62 | pytest.xfail(msg) |
|
63 | 63 | |
|
64 | 64 | |
|
65 | 65 | def check_skip_backends(node, backend_alias): |
|
66 | 66 | # Using "skip_backends" here intentionally, since this marks work which is |
|
67 | 67 | # not supported. |
|
68 | 68 | skip_marker = node.get_marker('skip_backends') |
|
69 | 69 | if skip_marker and backend_alias in skip_marker.args: |
|
70 | 70 | msg = "Feature not supported for backend %s." % (backend_alias, ) |
|
71 | 71 | msg = skip_marker.kwargs.get('reason', msg) |
|
72 | 72 | pytest.skip(msg) |
|
73 | 73 | |
|
74 | 74 | |
|
75 | 75 | def extract_git_repo_from_dump(dump_name, repo_name): |
|
76 | 76 | """Create git repo `repo_name` from dump `dump_name`.""" |
|
77 | 77 | repos_path = ScmModel().repos_path |
|
78 | 78 | target_path = os.path.join(repos_path, repo_name) |
|
79 | 79 | rc_testdata.extract_git_dump(dump_name, target_path) |
|
80 | 80 | return target_path |
|
81 | 81 | |
|
82 | 82 | |
|
83 | 83 | def extract_hg_repo_from_dump(dump_name, repo_name): |
|
84 | 84 | """Create hg repo `repo_name` from dump `dump_name`.""" |
|
85 | 85 | repos_path = ScmModel().repos_path |
|
86 | 86 | target_path = os.path.join(repos_path, repo_name) |
|
87 | 87 | rc_testdata.extract_hg_dump(dump_name, target_path) |
|
88 | 88 | return target_path |
|
89 | 89 | |
|
90 | 90 | |
|
91 | 91 | def extract_svn_repo_from_dump(dump_name, repo_name): |
|
92 | 92 | """Create a svn repo `repo_name` from dump `dump_name`.""" |
|
93 | 93 | repos_path = ScmModel().repos_path |
|
94 | 94 | target_path = os.path.join(repos_path, repo_name) |
|
95 | 95 | SubversionRepository(target_path, create=True) |
|
96 | 96 | _load_svn_dump_into_repo(dump_name, target_path) |
|
97 | 97 | return target_path |
|
98 | 98 | |
|
99 | 99 | |
|
100 | 100 | def assert_message_in_log(log_records, message, levelno, module): |
|
101 | 101 | messages = [ |
|
102 | 102 | r.message for r in log_records |
|
103 | 103 | if r.module == module and r.levelno == levelno |
|
104 | 104 | ] |
|
105 | 105 | assert message in messages |
|
106 | 106 | |
|
107 | 107 | |
|
108 | 108 | def _load_svn_dump_into_repo(dump_name, repo_path): |
|
109 | 109 | """ |
|
110 | 110 | Utility to populate a svn repository with a named dump |
|
111 | 111 | |
|
112 | 112 | Currently the dumps are in rc_testdata. They might later on be |
|
113 | 113 | integrated with the main repository once they stabilize more. |
|
114 | 114 | """ |
|
115 | 115 | dump = rc_testdata.load_svn_dump(dump_name) |
|
116 | 116 | load_dump = subprocess.Popen( |
|
117 | 117 | ['svnadmin', 'load', repo_path], |
|
118 | 118 | stdin=subprocess.PIPE, stdout=subprocess.PIPE, |
|
119 | 119 | stderr=subprocess.PIPE) |
|
120 | 120 | out, err = load_dump.communicate(dump) |
|
121 | 121 | if load_dump.returncode != 0: |
|
122 | 122 | log.error("Output of load_dump command: %s", out) |
|
123 | 123 | log.error("Error output of load_dump command: %s", err) |
|
124 | 124 | raise Exception( |
|
125 | 125 | 'Failed to load dump "%s" into repository at path "%s".' |
|
126 | 126 | % (dump_name, repo_path)) |
|
127 | 127 | |
|
128 | 128 | |
|
129 | 129 | class AssertResponse(object): |
|
130 | 130 | """ |
|
131 | 131 | Utility that helps to assert things about a given HTML response. |
|
132 | 132 | """ |
|
133 | 133 | |
|
134 | 134 | def __init__(self, response): |
|
135 | 135 | self.response = response |
|
136 | 136 | |
|
137 | 137 | def one_element_exists(self, css_selector): |
|
138 | 138 | self.get_element(css_selector) |
|
139 | 139 | |
|
140 | 140 | def no_element_exists(self, css_selector): |
|
141 | 141 | assert not self._get_elements(css_selector) |
|
142 | 142 | |
|
143 | 143 | def element_equals_to(self, css_selector, expected_content): |
|
144 | 144 | element = self.get_element(css_selector) |
|
145 | 145 | element_text = self._element_to_string(element) |
|
146 | 146 | assert expected_content in element_text |
|
147 | 147 | |
|
148 | 148 | def element_contains(self, css_selector, expected_content): |
|
149 | 149 | element = self.get_element(css_selector) |
|
150 | 150 | assert expected_content in element.text_content() |
|
151 | 151 | |
|
152 | 152 | def contains_one_link(self, link_text, href): |
|
153 | 153 | doc = fromstring(self.response.body) |
|
154 | 154 | sel = CSSSelector('a[href]') |
|
155 | 155 | elements = [ |
|
156 | 156 | e for e in sel(doc) if e.text_content().strip() == link_text] |
|
157 | 157 | assert len(elements) == 1, "Did not find link or found multiple links" |
|
158 | 158 | self._ensure_url_equal(elements[0].attrib.get('href'), href) |
|
159 | 159 | |
|
160 | 160 | def contains_one_anchor(self, anchor_id): |
|
161 | 161 | doc = fromstring(self.response.body) |
|
162 | 162 | sel = CSSSelector('#' + anchor_id) |
|
163 | 163 | elements = sel(doc) |
|
164 | 164 | assert len(elements) == 1 |
|
165 | 165 | |
|
166 | 166 | def _ensure_url_equal(self, found, expected): |
|
167 | 167 | assert _Url(found) == _Url(expected) |
|
168 | 168 | |
|
169 | 169 | def get_element(self, css_selector): |
|
170 | 170 | elements = self._get_elements(css_selector) |
|
171 | 171 | assert len(elements) == 1 |
|
172 | 172 | return elements[0] |
|
173 | 173 | |
|
174 | def get_elements(self, css_selector): | |
|
175 | return self._get_elements(css_selector) | |
|
176 | ||
|
174 | 177 | def _get_elements(self, css_selector): |
|
175 | 178 | doc = fromstring(self.response.body) |
|
176 | 179 | sel = CSSSelector(css_selector) |
|
177 | 180 | elements = sel(doc) |
|
178 | 181 | return elements |
|
179 | 182 | |
|
180 | 183 | def _element_to_string(self, element): |
|
181 | 184 | return tostring(element) |
|
182 | 185 | |
|
183 | 186 | |
|
184 | 187 | class _Url(object): |
|
185 | 188 | """ |
|
186 | 189 | A url object that can be compared with other url orbjects |
|
187 | 190 | without regard to the vagaries of encoding, escaping, and ordering |
|
188 | 191 | of parameters in query strings. |
|
189 | 192 | |
|
190 | 193 | Inspired by |
|
191 | 194 | http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python |
|
192 | 195 | """ |
|
193 | 196 | |
|
194 | 197 | def __init__(self, url): |
|
195 | 198 | parts = urlparse(url) |
|
196 | 199 | _query = frozenset(parse_qsl(parts.query)) |
|
197 | 200 | _path = unquote_plus(parts.path) |
|
198 | 201 | parts = parts._replace(query=_query, path=_path) |
|
199 | 202 | self.parts = parts |
|
200 | 203 | |
|
201 | 204 | def __eq__(self, other): |
|
202 | 205 | return self.parts == other.parts |
|
203 | 206 | |
|
204 | 207 | def __hash__(self): |
|
205 | 208 | return hash(self.parts) |
|
206 | 209 | |
|
207 | 210 | |
|
208 | 211 | def run_test_concurrently(times, raise_catched_exc=True): |
|
209 | 212 | """ |
|
210 | 213 | Add this decorator to small pieces of code that you want to test |
|
211 | 214 | concurrently |
|
212 | 215 | |
|
213 | 216 | ex: |
|
214 | 217 | |
|
215 | 218 | @test_concurrently(25) |
|
216 | 219 | def my_test_function(): |
|
217 | 220 | ... |
|
218 | 221 | """ |
|
219 | 222 | def test_concurrently_decorator(test_func): |
|
220 | 223 | def wrapper(*args, **kwargs): |
|
221 | 224 | exceptions = [] |
|
222 | 225 | |
|
223 | 226 | def call_test_func(): |
|
224 | 227 | try: |
|
225 | 228 | test_func(*args, **kwargs) |
|
226 | 229 | except Exception, e: |
|
227 | 230 | exceptions.append(e) |
|
228 | 231 | if raise_catched_exc: |
|
229 | 232 | raise |
|
230 | 233 | threads = [] |
|
231 | 234 | for i in range(times): |
|
232 | 235 | threads.append(threading.Thread(target=call_test_func)) |
|
233 | 236 | for t in threads: |
|
234 | 237 | t.start() |
|
235 | 238 | for t in threads: |
|
236 | 239 | t.join() |
|
237 | 240 | if exceptions: |
|
238 | 241 | raise Exception( |
|
239 | 242 | 'test_concurrently intercepted %s exceptions: %s' % ( |
|
240 | 243 | len(exceptions), exceptions)) |
|
241 | 244 | return wrapper |
|
242 | 245 | return test_concurrently_decorator |
|
243 | 246 | |
|
244 | 247 | |
|
245 | 248 | def wait_for_url(url, timeout=10): |
|
246 | 249 | """ |
|
247 | 250 | Wait until URL becomes reachable. |
|
248 | 251 | |
|
249 | 252 | It polls the URL until the timeout is reached or it became reachable. |
|
250 | 253 | If will call to `py.test.fail` in case the URL is not reachable. |
|
251 | 254 | """ |
|
252 | 255 | timeout = time.time() + timeout |
|
253 | 256 | last = 0 |
|
254 | 257 | wait = 0.1 |
|
255 | 258 | |
|
256 | 259 | while (timeout > last): |
|
257 | 260 | last = time.time() |
|
258 | 261 | if is_url_reachable(url): |
|
259 | 262 | break |
|
260 | 263 | elif ((last + wait) > time.time()): |
|
261 | 264 | # Go to sleep because not enough time has passed since last check. |
|
262 | 265 | time.sleep(wait) |
|
263 | 266 | else: |
|
264 | 267 | pytest.fail("Timeout while waiting for URL {}".format(url)) |
|
265 | 268 | |
|
266 | 269 | |
|
267 | 270 | def is_url_reachable(url): |
|
268 | 271 | try: |
|
269 | 272 | urllib2.urlopen(url) |
|
270 | 273 | except urllib2.URLError: |
|
271 | 274 | return False |
|
272 | 275 | return True |
|
273 | 276 | |
|
274 | 277 | |
|
275 | 278 | def get_session_from_response(response): |
|
276 | 279 | """ |
|
277 | 280 | This returns the session from a response object. Pylons has some magic |
|
278 | 281 | to make the session available as `response.session`. But pyramid |
|
279 | 282 | doesn't expose it. |
|
280 | 283 | """ |
|
281 | 284 | # TODO: Try to look up the session key also. |
|
282 | 285 | return response.request.environ['beaker.session'] |
General Comments 0
You need to be logged in to leave comments.
Login now