##// END OF EJS Templates
user-profile: migrated to pyramid views.
marcink -
r1502:f2363971 default
parent child Browse files
Show More
@@ -0,0 +1,53 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 import logging
22 from pylons import tmpl_context as c
23
24 from rhodecode.lib.utils2 import StrictAttributeDict
25
26 log = logging.getLogger(__name__)
27
28
29 class TemplateArgs(StrictAttributeDict):
30 pass
31
32
33 class BaseAppView(object):
34
35 def __init__(self, context, request):
36 self.request = request
37 self.context = context
38 self.session = request.session
39 self._rhodecode_user = request.user
40
41 def _get_local_tmpl_context(self):
42 return TemplateArgs()
43
44 def _get_template_context(self, tmpl_args):
45
46 for k, v in tmpl_args.items():
47 setattr(c, k, v)
48
49 return {
50 'defaults': {},
51 'errors': {},
52 }
53
@@ -0,0 +1,28 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21
22 def includeme(config):
23 config.add_route(
24 name='user_profile',
25 pattern='/_profiles/{username}')
26
27 # Scan module for configuration decorators.
28 config.scan()
1 NO CONTENT: new file 100644
@@ -0,0 +1,75 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 import pytest
22
23 from rhodecode.model.db import User
24 from rhodecode.tests import (
25 TestController, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS,
26 TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
27 from rhodecode.tests.fixture import Fixture
28 from rhodecode.tests.utils import AssertResponse
29
30 fixture = Fixture()
31
32
33 def route_path(name, **kwargs):
34 return '/_profiles/{username}'.format(**kwargs)
35
36
37 class TestUsersController(TestController):
38
39 def test_user_profile(self, user_util):
40 edit_link_css = '.user-profile .panel-edit'
41 self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
42 user = user_util.create_user(
43 'test-my-user', password='qweqwe', email='testme@rhodecode.org')
44 username = user.username
45
46 response = self.app.get(route_path('user_profile', username=username))
47 response.mustcontain('testme')
48 response.mustcontain('testme@rhodecode.org')
49 assert_response = AssertResponse(response)
50 assert_response.no_element_exists(edit_link_css)
51
52 # edit should be available to superadmin users
53 self.logout_user()
54 self.log_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
55 response = self.app.get(route_path('user_profile', username=username))
56 assert_response = AssertResponse(response)
57 assert_response.element_contains(edit_link_css, 'Edit')
58
59 def test_user_profile_not_available(self, user_util):
60 user = user_util.create_user()
61 username = user.username
62
63 # not logged in, redirect
64 self.app.get(route_path('user_profile', username=username), status=302)
65
66 self.log_user()
67 # after log-in show
68 self.app.get(route_path('user_profile', username=username), status=200)
69
70 # default user, not allowed to show it
71 self.app.get(
72 route_path('user_profile', username=User.DEFAULT_USER), status=404)
73
74 # actual 404
75 self.app.get(route_path('user_profile', username='unknown'), status=404)
@@ -0,0 +1,53 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 import logging
22
23 from pyramid.httpexceptions import HTTPNotFound
24 from pyramid.view import view_config
25
26 from rhodecode.apps._base import BaseAppView
27 from rhodecode.lib.auth import LoginRequired, NotAnonymous
28
29 from rhodecode.model.db import User
30 from rhodecode.model.user import UserModel
31
32 log = logging.getLogger(__name__)
33
34
35 class UserProfileView(BaseAppView):
36
37 @LoginRequired()
38 @NotAnonymous()
39 @view_config(
40 route_name='user_profile', request_method='GET',
41 renderer='rhodecode:templates/users/user.mako')
42 def login(self):
43 # register local template context
44 c = self._get_local_tmpl_context()
45 c.active = 'user_profile'
46
47 username = self.request.matchdict.get('username')
48
49 c.user = UserModel().get_by_username(username)
50 if not c.user or c.user.username == User.DEFAULT_USER:
51 raise HTTPNotFound()
52
53 return self._get_template_context(c)
@@ -1,495 +1,499 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Pylons middleware initialization
23 23 """
24 24 import logging
25 25 from collections import OrderedDict
26 26
27 27 from paste.registry import RegistryManager
28 28 from paste.gzipper import make_gzip_middleware
29 29 from pylons.wsgiapp import PylonsApp
30 30 from pyramid.authorization import ACLAuthorizationPolicy
31 31 from pyramid.config import Configurator
32 32 from pyramid.settings import asbool, aslist
33 33 from pyramid.wsgi import wsgiapp
34 34 from pyramid.httpexceptions import (
35 35 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound)
36 36 from pyramid.events import ApplicationCreated
37 37 from pyramid.renderers import render_to_response
38 38 from routes.middleware import RoutesMiddleware
39 39 import routes.util
40 40
41 41 import rhodecode
42 42 from rhodecode.model import meta
43 43 from rhodecode.config import patches
44 44 from rhodecode.config.routing import STATIC_FILE_PREFIX
45 45 from rhodecode.config.environment import (
46 46 load_environment, load_pyramid_environment)
47 47 from rhodecode.lib.middleware import csrf
48 48 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
49 49 from rhodecode.lib.middleware.error_handling import (
50 50 PylonsErrorHandlingMiddleware)
51 51 from rhodecode.lib.middleware.https_fixup import HttpsFixup
52 52 from rhodecode.lib.middleware.vcs import VCSMiddleware
53 53 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
54 54 from rhodecode.lib.utils2 import aslist as rhodecode_aslist
55 55 from rhodecode.subscribers import (
56 56 scan_repositories_if_enabled, write_metadata_if_needed)
57 57
58 58
59 59 log = logging.getLogger(__name__)
60 60
61 61
62 62 # this is used to avoid avoid the route lookup overhead in routesmiddleware
63 63 # for certain routes which won't go to pylons to - eg. static files, debugger
64 64 # it is only needed for the pylons migration and can be removed once complete
65 65 class SkippableRoutesMiddleware(RoutesMiddleware):
66 66 """ Routes middleware that allows you to skip prefixes """
67 67
68 68 def __init__(self, *args, **kw):
69 69 self.skip_prefixes = kw.pop('skip_prefixes', [])
70 70 super(SkippableRoutesMiddleware, self).__init__(*args, **kw)
71 71
72 72 def __call__(self, environ, start_response):
73 73 for prefix in self.skip_prefixes:
74 74 if environ['PATH_INFO'].startswith(prefix):
75 75 # added to avoid the case when a missing /_static route falls
76 76 # through to pylons and causes an exception as pylons is
77 77 # expecting wsgiorg.routingargs to be set in the environ
78 78 # by RoutesMiddleware.
79 79 if 'wsgiorg.routing_args' not in environ:
80 80 environ['wsgiorg.routing_args'] = (None, {})
81 81 return self.app(environ, start_response)
82 82
83 83 return super(SkippableRoutesMiddleware, self).__call__(
84 84 environ, start_response)
85 85
86 86
87 87 def make_app(global_conf, static_files=True, **app_conf):
88 88 """Create a Pylons WSGI application and return it
89 89
90 90 ``global_conf``
91 91 The inherited configuration for this application. Normally from
92 92 the [DEFAULT] section of the Paste ini file.
93 93
94 94 ``app_conf``
95 95 The application's local configuration. Normally specified in
96 96 the [app:<name>] section of the Paste ini file (where <name>
97 97 defaults to main).
98 98
99 99 """
100 100 # Apply compatibility patches
101 101 patches.kombu_1_5_1_python_2_7_11()
102 102 patches.inspect_getargspec()
103 103
104 104 # Configure the Pylons environment
105 105 config = load_environment(global_conf, app_conf)
106 106
107 107 # The Pylons WSGI app
108 108 app = PylonsApp(config=config)
109 109 if rhodecode.is_test:
110 110 app = csrf.CSRFDetector(app)
111 111
112 112 expected_origin = config.get('expected_origin')
113 113 if expected_origin:
114 114 # The API can be accessed from other Origins.
115 115 app = csrf.OriginChecker(app, expected_origin,
116 116 skip_urls=[routes.util.url_for('api')])
117 117
118 118 # Establish the Registry for this application
119 119 app = RegistryManager(app)
120 120
121 121 app.config = config
122 122
123 123 return app
124 124
125 125
126 126 def make_pyramid_app(global_config, **settings):
127 127 """
128 128 Constructs the WSGI application based on Pyramid and wraps the Pylons based
129 129 application.
130 130
131 131 Specials:
132 132
133 133 * We migrate from Pylons to Pyramid. While doing this, we keep both
134 134 frameworks functional. This involves moving some WSGI middlewares around
135 135 and providing access to some data internals, so that the old code is
136 136 still functional.
137 137
138 138 * The application can also be integrated like a plugin via the call to
139 139 `includeme`. This is accompanied with the other utility functions which
140 140 are called. Changing this should be done with great care to not break
141 141 cases when these fragments are assembled from another place.
142 142
143 143 """
144 144 # The edition string should be available in pylons too, so we add it here
145 145 # before copying the settings.
146 146 settings.setdefault('rhodecode.edition', 'Community Edition')
147 147
148 148 # As long as our Pylons application does expect "unprepared" settings, make
149 149 # sure that we keep an unmodified copy. This avoids unintentional change of
150 150 # behavior in the old application.
151 151 settings_pylons = settings.copy()
152 152
153 153 sanitize_settings_and_apply_defaults(settings)
154 154 config = Configurator(settings=settings)
155 155 add_pylons_compat_data(config.registry, global_config, settings_pylons)
156 156
157 157 load_pyramid_environment(global_config, settings)
158 158
159 159 includeme_first(config)
160 160 includeme(config)
161 161 pyramid_app = config.make_wsgi_app()
162 162 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
163 163 pyramid_app.config = config
164 164
165 165 # creating the app uses a connection - return it after we are done
166 166 meta.Session.remove()
167 167
168 168 return pyramid_app
169 169
170 170
171 171 def make_not_found_view(config):
172 172 """
173 173 This creates the view which should be registered as not-found-view to
174 174 pyramid. Basically it contains of the old pylons app, converted to a view.
175 175 Additionally it is wrapped by some other middlewares.
176 176 """
177 177 settings = config.registry.settings
178 178 vcs_server_enabled = settings['vcs.server.enable']
179 179
180 180 # Make pylons app from unprepared settings.
181 181 pylons_app = make_app(
182 182 config.registry._pylons_compat_global_config,
183 183 **config.registry._pylons_compat_settings)
184 184 config.registry._pylons_compat_config = pylons_app.config
185 185
186 186 # Appenlight monitoring.
187 187 pylons_app, appenlight_client = wrap_in_appenlight_if_enabled(
188 188 pylons_app, settings)
189 189
190 190 # The pylons app is executed inside of the pyramid 404 exception handler.
191 191 # Exceptions which are raised inside of it are not handled by pyramid
192 192 # again. Therefore we add a middleware that invokes the error handler in
193 193 # case of an exception or error response. This way we return proper error
194 194 # HTML pages in case of an error.
195 195 reraise = (settings.get('debugtoolbar.enabled', False) or
196 196 rhodecode.disable_error_handler)
197 197 pylons_app = PylonsErrorHandlingMiddleware(
198 198 pylons_app, error_handler, reraise)
199 199
200 200 # The VCSMiddleware shall operate like a fallback if pyramid doesn't find a
201 201 # view to handle the request. Therefore it is wrapped around the pylons
202 202 # app. It has to be outside of the error handling otherwise error responses
203 203 # from the vcsserver are converted to HTML error pages. This confuses the
204 204 # command line tools and the user won't get a meaningful error message.
205 205 if vcs_server_enabled:
206 206 pylons_app = VCSMiddleware(
207 207 pylons_app, settings, appenlight_client, registry=config.registry)
208 208
209 209 # Convert WSGI app to pyramid view and return it.
210 210 return wsgiapp(pylons_app)
211 211
212 212
213 213 def add_pylons_compat_data(registry, global_config, settings):
214 214 """
215 215 Attach data to the registry to support the Pylons integration.
216 216 """
217 217 registry._pylons_compat_global_config = global_config
218 218 registry._pylons_compat_settings = settings
219 219
220 220
221 221 def error_handler(exception, request):
222 222 import rhodecode
223 223 from rhodecode.lib.utils2 import AttributeDict
224 224
225 225 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
226 226
227 227 base_response = HTTPInternalServerError()
228 228 # prefer original exception for the response since it may have headers set
229 229 if isinstance(exception, HTTPException):
230 230 base_response = exception
231 231
232 232 def is_http_error(response):
233 233 # error which should have traceback
234 234 return response.status_code > 499
235 235
236 236 if is_http_error(base_response):
237 237 log.exception(
238 238 'error occurred handling this request for path: %s', request.path)
239 239
240 240 c = AttributeDict()
241 241 c.error_message = base_response.status
242 242 c.error_explanation = base_response.explanation or str(base_response)
243 243 c.visual = AttributeDict()
244 244
245 245 c.visual.rhodecode_support_url = (
246 246 request.registry.settings.get('rhodecode_support_url') or
247 247 request.route_url('rhodecode_support')
248 248 )
249 249 c.redirect_time = 0
250 250 c.rhodecode_name = rhodecode_title
251 251 if not c.rhodecode_name:
252 252 c.rhodecode_name = 'Rhodecode'
253 253
254 254 c.causes = []
255 255 if hasattr(base_response, 'causes'):
256 256 c.causes = base_response.causes
257 257
258 258 response = render_to_response(
259 259 '/errors/error_document.mako', {'c': c}, request=request,
260 260 response=base_response)
261 261
262 262 return response
263 263
264 264
265 265 def includeme(config):
266 266 settings = config.registry.settings
267 267
268 268 # plugin information
269 269 config.registry.rhodecode_plugins = OrderedDict()
270 270
271 271 config.add_directive(
272 272 'register_rhodecode_plugin', register_rhodecode_plugin)
273 273
274 274 if asbool(settings.get('appenlight', 'false')):
275 275 config.include('appenlight_client.ext.pyramid_tween')
276 276
277 277 # Includes which are required. The application would fail without them.
278 278 config.include('pyramid_mako')
279 279 config.include('pyramid_beaker')
280 280 config.include('rhodecode.channelstream')
281 281 config.include('rhodecode.admin')
282 282 config.include('rhodecode.authentication')
283 283 config.include('rhodecode.integrations')
284
285 # apps
284 286 config.include('rhodecode.apps.login')
287 config.include('rhodecode.apps.user_profile')
288
285 289 config.include('rhodecode.tweens')
286 290 config.include('rhodecode.api')
287 291 config.include('rhodecode.svn_support')
288 292 config.add_route(
289 293 'rhodecode_support', 'https://rhodecode.com/help/', static=True)
290 294
291 295 config.add_translation_dirs('rhodecode:i18n/')
292 296 settings['default_locale_name'] = settings.get('lang', 'en')
293 297
294 298 # Add subscribers.
295 299 config.add_subscriber(scan_repositories_if_enabled, ApplicationCreated)
296 300 config.add_subscriber(write_metadata_if_needed, ApplicationCreated)
297 301
298 302 # Set the authorization policy.
299 303 authz_policy = ACLAuthorizationPolicy()
300 304 config.set_authorization_policy(authz_policy)
301 305
302 306 # Set the default renderer for HTML templates to mako.
303 307 config.add_mako_renderer('.html')
304 308
305 309 # include RhodeCode plugins
306 310 includes = aslist(settings.get('rhodecode.includes', []))
307 311 for inc in includes:
308 312 config.include(inc)
309 313
310 314 # This is the glue which allows us to migrate in chunks. By registering the
311 315 # pylons based application as the "Not Found" view in Pyramid, we will
312 316 # fallback to the old application each time the new one does not yet know
313 317 # how to handle a request.
314 318 config.add_notfound_view(make_not_found_view(config))
315 319
316 320 if not settings.get('debugtoolbar.enabled', False):
317 321 # if no toolbar, then any exception gets caught and rendered
318 322 config.add_view(error_handler, context=Exception)
319 323
320 324 config.add_view(error_handler, context=HTTPError)
321 325
322 326
323 327 def includeme_first(config):
324 328 # redirect automatic browser favicon.ico requests to correct place
325 329 def favicon_redirect(context, request):
326 330 return HTTPFound(
327 331 request.static_path('rhodecode:public/images/favicon.ico'))
328 332
329 333 config.add_view(favicon_redirect, route_name='favicon')
330 334 config.add_route('favicon', '/favicon.ico')
331 335
332 336 def robots_redirect(context, request):
333 337 return HTTPFound(
334 338 request.static_path('rhodecode:public/robots.txt'))
335 339
336 340 config.add_view(robots_redirect, route_name='robots')
337 341 config.add_route('robots', '/robots.txt')
338 342
339 343 config.add_static_view(
340 344 '_static/deform', 'deform:static')
341 345 config.add_static_view(
342 346 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
343 347
344 348
345 349 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
346 350 """
347 351 Apply outer WSGI middlewares around the application.
348 352
349 353 Part of this has been moved up from the Pylons layer, so that the
350 354 data is also available if old Pylons code is hit through an already ported
351 355 view.
352 356 """
353 357 settings = config.registry.settings
354 358
355 359 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
356 360 pyramid_app = HttpsFixup(pyramid_app, settings)
357 361
358 362 # Add RoutesMiddleware to support the pylons compatibility tween during
359 363 # migration to pyramid.
360 364 pyramid_app = SkippableRoutesMiddleware(
361 365 pyramid_app, config.registry._pylons_compat_config['routes.map'],
362 366 skip_prefixes=(STATIC_FILE_PREFIX, '/_debug_toolbar'))
363 367
364 368 pyramid_app, _ = wrap_in_appenlight_if_enabled(pyramid_app, settings)
365 369
366 370 if settings['gzip_responses']:
367 371 pyramid_app = make_gzip_middleware(
368 372 pyramid_app, settings, compress_level=1)
369 373
370 374 # this should be the outer most middleware in the wsgi stack since
371 375 # middleware like Routes make database calls
372 376 def pyramid_app_with_cleanup(environ, start_response):
373 377 try:
374 378 return pyramid_app(environ, start_response)
375 379 finally:
376 380 # Dispose current database session and rollback uncommitted
377 381 # transactions.
378 382 meta.Session.remove()
379 383
380 384 # In a single threaded mode server, on non sqlite db we should have
381 385 # '0 Current Checked out connections' at the end of a request,
382 386 # if not, then something, somewhere is leaving a connection open
383 387 pool = meta.Base.metadata.bind.engine.pool
384 388 log.debug('sa pool status: %s', pool.status())
385 389
386 390
387 391 return pyramid_app_with_cleanup
388 392
389 393
390 394 def sanitize_settings_and_apply_defaults(settings):
391 395 """
392 396 Applies settings defaults and does all type conversion.
393 397
394 398 We would move all settings parsing and preparation into this place, so that
395 399 we have only one place left which deals with this part. The remaining parts
396 400 of the application would start to rely fully on well prepared settings.
397 401
398 402 This piece would later be split up per topic to avoid a big fat monster
399 403 function.
400 404 """
401 405
402 406 # Pyramid's mako renderer has to search in the templates folder so that the
403 407 # old templates still work. Ported and new templates are expected to use
404 408 # real asset specifications for the includes.
405 409 mako_directories = settings.setdefault('mako.directories', [
406 410 # Base templates of the original Pylons application
407 411 'rhodecode:templates',
408 412 ])
409 413 log.debug(
410 414 "Using the following Mako template directories: %s",
411 415 mako_directories)
412 416
413 417 # Default includes, possible to change as a user
414 418 pyramid_includes = settings.setdefault('pyramid.includes', [
415 419 'rhodecode.lib.middleware.request_wrapper',
416 420 ])
417 421 log.debug(
418 422 "Using the following pyramid.includes: %s",
419 423 pyramid_includes)
420 424
421 425 # TODO: johbo: Re-think this, usually the call to config.include
422 426 # should allow to pass in a prefix.
423 427 settings.setdefault('rhodecode.api.url', '/_admin/api')
424 428
425 429 # Sanitize generic settings.
426 430 _list_setting(settings, 'default_encoding', 'UTF-8')
427 431 _bool_setting(settings, 'is_test', 'false')
428 432 _bool_setting(settings, 'gzip_responses', 'false')
429 433
430 434 # Call split out functions that sanitize settings for each topic.
431 435 _sanitize_appenlight_settings(settings)
432 436 _sanitize_vcs_settings(settings)
433 437
434 438 return settings
435 439
436 440
437 441 def _sanitize_appenlight_settings(settings):
438 442 _bool_setting(settings, 'appenlight', 'false')
439 443
440 444
441 445 def _sanitize_vcs_settings(settings):
442 446 """
443 447 Applies settings defaults and does type conversion for all VCS related
444 448 settings.
445 449 """
446 450 _string_setting(settings, 'vcs.svn.compatible_version', '')
447 451 _string_setting(settings, 'git_rev_filter', '--all')
448 452 _string_setting(settings, 'vcs.hooks.protocol', 'http')
449 453 _string_setting(settings, 'vcs.scm_app_implementation', 'http')
450 454 _string_setting(settings, 'vcs.server', '')
451 455 _string_setting(settings, 'vcs.server.log_level', 'debug')
452 456 _string_setting(settings, 'vcs.server.protocol', 'http')
453 457 _bool_setting(settings, 'startup.import_repos', 'false')
454 458 _bool_setting(settings, 'vcs.hooks.direct_calls', 'false')
455 459 _bool_setting(settings, 'vcs.server.enable', 'true')
456 460 _bool_setting(settings, 'vcs.start_server', 'false')
457 461 _list_setting(settings, 'vcs.backends', 'hg, git, svn')
458 462 _int_setting(settings, 'vcs.connection_timeout', 3600)
459 463
460 464 # Support legacy values of vcs.scm_app_implementation. Legacy
461 465 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http'
462 466 # which is now mapped to 'http'.
463 467 scm_app_impl = settings['vcs.scm_app_implementation']
464 468 if scm_app_impl == 'rhodecode.lib.middleware.utils.scm_app_http':
465 469 settings['vcs.scm_app_implementation'] = 'http'
466 470
467 471
468 472 def _int_setting(settings, name, default):
469 473 settings[name] = int(settings.get(name, default))
470 474
471 475
472 476 def _bool_setting(settings, name, default):
473 477 input = settings.get(name, default)
474 478 if isinstance(input, unicode):
475 479 input = input.encode('utf8')
476 480 settings[name] = asbool(input)
477 481
478 482
479 483 def _list_setting(settings, name, default):
480 484 raw_value = settings.get(name, default)
481 485
482 486 old_separator = ','
483 487 if old_separator in raw_value:
484 488 # If we get a comma separated list, pass it to our own function.
485 489 settings[name] = rhodecode_aslist(raw_value, sep=old_separator)
486 490 else:
487 491 # Otherwise we assume it uses pyramids space/newline separation.
488 492 settings[name] = aslist(raw_value)
489 493
490 494
491 495 def _string_setting(settings, name, default, lower=True):
492 496 value = settings.get(name, default)
493 497 if lower:
494 498 value = value.lower()
495 499 settings[name] = value
@@ -1,1173 +1,1169 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Routes configuration
23 23
24 24 The more specific and detailed routes should be defined first so they
25 25 may take precedent over the more generic routes. For more information
26 26 refer to the routes manual at http://routes.groovie.org/docs/
27 27
28 28 IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py
29 29 and _route_name variable which uses some of stored naming here to do redirects.
30 30 """
31 31 import os
32 32 import re
33 33 from routes import Mapper
34 34
35 35 from rhodecode.config import routing_links
36 36
37 37 # prefix for non repository related links needs to be prefixed with `/`
38 38 ADMIN_PREFIX = '/_admin'
39 39 STATIC_FILE_PREFIX = '/_static'
40 40
41 41 # Default requirements for URL parts
42 42 URL_NAME_REQUIREMENTS = {
43 43 # group name can have a slash in them, but they must not end with a slash
44 44 'group_name': r'.*?[^/]',
45 45 'repo_group_name': r'.*?[^/]',
46 46 # repo names can have a slash in them, but they must not end with a slash
47 47 'repo_name': r'.*?[^/]',
48 48 # file path eats up everything at the end
49 49 'f_path': r'.*',
50 50 # reference types
51 51 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
52 52 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
53 53 }
54 54
55 55
56 56 def add_route_requirements(route_path, requirements):
57 57 """
58 58 Adds regex requirements to pyramid routes using a mapping dict
59 59
60 60 >>> add_route_requirements('/{action}/{id}', {'id': r'\d+'})
61 61 '/{action}/{id:\d+}'
62 62
63 63 """
64 64 for key, regex in requirements.items():
65 65 route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex))
66 66 return route_path
67 67
68 68
69 69 class JSRoutesMapper(Mapper):
70 70 """
71 71 Wrapper for routes.Mapper to make pyroutes compatible url definitions
72 72 """
73 73 _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$')
74 74 _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)')
75 75 def __init__(self, *args, **kw):
76 76 super(JSRoutesMapper, self).__init__(*args, **kw)
77 77 self._jsroutes = []
78 78
79 79 def connect(self, *args, **kw):
80 80 """
81 81 Wrapper for connect to take an extra argument jsroute=True
82 82
83 83 :param jsroute: boolean, if True will add the route to the pyroutes list
84 84 """
85 85 if kw.pop('jsroute', False):
86 86 if not self._named_route_regex.match(args[0]):
87 87 raise Exception('only named routes can be added to pyroutes')
88 88 self._jsroutes.append(args[0])
89 89
90 90 super(JSRoutesMapper, self).connect(*args, **kw)
91 91
92 92 def _extract_route_information(self, route):
93 93 """
94 94 Convert a route into tuple(name, path, args), eg:
95 ('user_profile', '/profile/%(username)s', ['username'])
95 ('show_user', '/profile/%(username)s', ['username'])
96 96 """
97 97 routepath = route.routepath
98 98 def replace(matchobj):
99 99 if matchobj.group(1):
100 100 return "%%(%s)s" % matchobj.group(1).split(':')[0]
101 101 else:
102 102 return "%%(%s)s" % matchobj.group(2)
103 103
104 104 routepath = self._argument_prog.sub(replace, routepath)
105 105 return (
106 106 route.name,
107 107 routepath,
108 108 [(arg[0].split(':')[0] if arg[0] != '' else arg[1])
109 109 for arg in self._argument_prog.findall(route.routepath)]
110 110 )
111 111
112 112 def jsroutes(self):
113 113 """
114 114 Return a list of pyroutes.js compatible routes
115 115 """
116 116 for route_name in self._jsroutes:
117 117 yield self._extract_route_information(self._routenames[route_name])
118 118
119 119
120 120 def make_map(config):
121 121 """Create, configure and return the routes Mapper"""
122 122 rmap = JSRoutesMapper(directory=config['pylons.paths']['controllers'],
123 123 always_scan=config['debug'])
124 124 rmap.minimization = False
125 125 rmap.explicit = False
126 126
127 127 from rhodecode.lib.utils2 import str2bool
128 128 from rhodecode.model import repo, repo_group
129 129
130 130 def check_repo(environ, match_dict):
131 131 """
132 132 check for valid repository for proper 404 handling
133 133
134 134 :param environ:
135 135 :param match_dict:
136 136 """
137 137 repo_name = match_dict.get('repo_name')
138 138
139 139 if match_dict.get('f_path'):
140 140 # fix for multiple initial slashes that causes errors
141 141 match_dict['f_path'] = match_dict['f_path'].lstrip('/')
142 142 repo_model = repo.RepoModel()
143 143 by_name_match = repo_model.get_by_repo_name(repo_name)
144 144 # if we match quickly from database, short circuit the operation,
145 145 # and validate repo based on the type.
146 146 if by_name_match:
147 147 return True
148 148
149 149 by_id_match = repo_model.get_repo_by_id(repo_name)
150 150 if by_id_match:
151 151 repo_name = by_id_match.repo_name
152 152 match_dict['repo_name'] = repo_name
153 153 return True
154 154
155 155 return False
156 156
157 157 def check_group(environ, match_dict):
158 158 """
159 159 check for valid repository group path for proper 404 handling
160 160
161 161 :param environ:
162 162 :param match_dict:
163 163 """
164 164 repo_group_name = match_dict.get('group_name')
165 165 repo_group_model = repo_group.RepoGroupModel()
166 166 by_name_match = repo_group_model.get_by_group_name(repo_group_name)
167 167 if by_name_match:
168 168 return True
169 169
170 170 return False
171 171
172 172 def check_user_group(environ, match_dict):
173 173 """
174 174 check for valid user group for proper 404 handling
175 175
176 176 :param environ:
177 177 :param match_dict:
178 178 """
179 179 return True
180 180
181 181 def check_int(environ, match_dict):
182 182 return match_dict.get('id').isdigit()
183 183
184 184
185 185 #==========================================================================
186 186 # CUSTOM ROUTES HERE
187 187 #==========================================================================
188 188
189 189 # MAIN PAGE
190 190 rmap.connect('home', '/', controller='home', action='index', jsroute=True)
191 191 rmap.connect('goto_switcher_data', '/_goto_data', controller='home',
192 192 action='goto_switcher_data')
193 193 rmap.connect('repo_list_data', '/_repos', controller='home',
194 194 action='repo_list_data')
195 195
196 196 rmap.connect('user_autocomplete_data', '/_users', controller='home',
197 197 action='user_autocomplete_data', jsroute=True)
198 198 rmap.connect('user_group_autocomplete_data', '/_user_groups', controller='home',
199 199 action='user_group_autocomplete_data', jsroute=True)
200 200
201 rmap.connect(
202 'user_profile', '/_profiles/{username}', controller='users',
203 action='user_profile')
204
205 201 # TODO: johbo: Static links, to be replaced by our redirection mechanism
206 202 rmap.connect('rst_help',
207 203 'http://docutils.sourceforge.net/docs/user/rst/quickref.html',
208 204 _static=True)
209 205 rmap.connect('markdown_help',
210 206 'http://daringfireball.net/projects/markdown/syntax',
211 207 _static=True)
212 208 rmap.connect('rhodecode_official', 'https://rhodecode.com', _static=True)
213 209 rmap.connect('rhodecode_support', 'https://rhodecode.com/help/', _static=True)
214 210 rmap.connect('rhodecode_translations', 'https://rhodecode.com/translate/enterprise', _static=True)
215 211 # TODO: anderson - making this a static link since redirect won't play
216 212 # nice with POST requests
217 213 rmap.connect('enterprise_license_convert_from_old',
218 214 'https://rhodecode.com/u/license-upgrade',
219 215 _static=True)
220 216
221 217 routing_links.connect_redirection_links(rmap)
222 218
223 219 rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping')
224 220 rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test')
225 221
226 222 # ADMIN REPOSITORY ROUTES
227 223 with rmap.submapper(path_prefix=ADMIN_PREFIX,
228 224 controller='admin/repos') as m:
229 225 m.connect('repos', '/repos',
230 226 action='create', conditions={'method': ['POST']})
231 227 m.connect('repos', '/repos',
232 228 action='index', conditions={'method': ['GET']})
233 229 m.connect('new_repo', '/create_repository', jsroute=True,
234 230 action='create_repository', conditions={'method': ['GET']})
235 231 m.connect('/repos/{repo_name}',
236 232 action='update', conditions={'method': ['PUT'],
237 233 'function': check_repo},
238 234 requirements=URL_NAME_REQUIREMENTS)
239 235 m.connect('delete_repo', '/repos/{repo_name}',
240 236 action='delete', conditions={'method': ['DELETE']},
241 237 requirements=URL_NAME_REQUIREMENTS)
242 238 m.connect('repo', '/repos/{repo_name}',
243 239 action='show', conditions={'method': ['GET'],
244 240 'function': check_repo},
245 241 requirements=URL_NAME_REQUIREMENTS)
246 242
247 243 # ADMIN REPOSITORY GROUPS ROUTES
248 244 with rmap.submapper(path_prefix=ADMIN_PREFIX,
249 245 controller='admin/repo_groups') as m:
250 246 m.connect('repo_groups', '/repo_groups',
251 247 action='create', conditions={'method': ['POST']})
252 248 m.connect('repo_groups', '/repo_groups',
253 249 action='index', conditions={'method': ['GET']})
254 250 m.connect('new_repo_group', '/repo_groups/new',
255 251 action='new', conditions={'method': ['GET']})
256 252 m.connect('update_repo_group', '/repo_groups/{group_name}',
257 253 action='update', conditions={'method': ['PUT'],
258 254 'function': check_group},
259 255 requirements=URL_NAME_REQUIREMENTS)
260 256
261 257 # EXTRAS REPO GROUP ROUTES
262 258 m.connect('edit_repo_group', '/repo_groups/{group_name}/edit',
263 259 action='edit',
264 260 conditions={'method': ['GET'], 'function': check_group},
265 261 requirements=URL_NAME_REQUIREMENTS)
266 262 m.connect('edit_repo_group', '/repo_groups/{group_name}/edit',
267 263 action='edit',
268 264 conditions={'method': ['PUT'], 'function': check_group},
269 265 requirements=URL_NAME_REQUIREMENTS)
270 266
271 267 m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced',
272 268 action='edit_repo_group_advanced',
273 269 conditions={'method': ['GET'], 'function': check_group},
274 270 requirements=URL_NAME_REQUIREMENTS)
275 271 m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced',
276 272 action='edit_repo_group_advanced',
277 273 conditions={'method': ['PUT'], 'function': check_group},
278 274 requirements=URL_NAME_REQUIREMENTS)
279 275
280 276 m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions',
281 277 action='edit_repo_group_perms',
282 278 conditions={'method': ['GET'], 'function': check_group},
283 279 requirements=URL_NAME_REQUIREMENTS)
284 280 m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions',
285 281 action='update_perms',
286 282 conditions={'method': ['PUT'], 'function': check_group},
287 283 requirements=URL_NAME_REQUIREMENTS)
288 284
289 285 m.connect('delete_repo_group', '/repo_groups/{group_name}',
290 286 action='delete', conditions={'method': ['DELETE'],
291 287 'function': check_group},
292 288 requirements=URL_NAME_REQUIREMENTS)
293 289
294 290 # ADMIN USER ROUTES
295 291 with rmap.submapper(path_prefix=ADMIN_PREFIX,
296 292 controller='admin/users') as m:
297 293 m.connect('users', '/users',
298 294 action='create', conditions={'method': ['POST']})
299 295 m.connect('users', '/users',
300 296 action='index', conditions={'method': ['GET']})
301 297 m.connect('new_user', '/users/new',
302 298 action='new', conditions={'method': ['GET']})
303 299 m.connect('update_user', '/users/{user_id}',
304 300 action='update', conditions={'method': ['PUT']})
305 301 m.connect('delete_user', '/users/{user_id}',
306 302 action='delete', conditions={'method': ['DELETE']})
307 303 m.connect('edit_user', '/users/{user_id}/edit',
308 304 action='edit', conditions={'method': ['GET']}, jsroute=True)
309 305 m.connect('user', '/users/{user_id}',
310 306 action='show', conditions={'method': ['GET']})
311 307 m.connect('force_password_reset_user', '/users/{user_id}/password_reset',
312 308 action='reset_password', conditions={'method': ['POST']})
313 309 m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group',
314 310 action='create_personal_repo_group', conditions={'method': ['POST']})
315 311
316 312 # EXTRAS USER ROUTES
317 313 m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced',
318 314 action='edit_advanced', conditions={'method': ['GET']})
319 315 m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced',
320 316 action='update_advanced', conditions={'method': ['PUT']})
321 317
322 318 m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens',
323 319 action='edit_auth_tokens', conditions={'method': ['GET']})
324 320 m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens',
325 321 action='add_auth_token', conditions={'method': ['PUT']})
326 322 m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens',
327 323 action='delete_auth_token', conditions={'method': ['DELETE']})
328 324
329 325 m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions',
330 326 action='edit_global_perms', conditions={'method': ['GET']})
331 327 m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions',
332 328 action='update_global_perms', conditions={'method': ['PUT']})
333 329
334 330 m.connect('edit_user_perms_summary', '/users/{user_id}/edit/permissions_summary',
335 331 action='edit_perms_summary', conditions={'method': ['GET']})
336 332
337 333 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
338 334 action='edit_emails', conditions={'method': ['GET']})
339 335 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
340 336 action='add_email', conditions={'method': ['PUT']})
341 337 m.connect('edit_user_emails', '/users/{user_id}/edit/emails',
342 338 action='delete_email', conditions={'method': ['DELETE']})
343 339
344 340 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
345 341 action='edit_ips', conditions={'method': ['GET']})
346 342 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
347 343 action='add_ip', conditions={'method': ['PUT']})
348 344 m.connect('edit_user_ips', '/users/{user_id}/edit/ips',
349 345 action='delete_ip', conditions={'method': ['DELETE']})
350 346
351 347 # ADMIN USER GROUPS REST ROUTES
352 348 with rmap.submapper(path_prefix=ADMIN_PREFIX,
353 349 controller='admin/user_groups') as m:
354 350 m.connect('users_groups', '/user_groups',
355 351 action='create', conditions={'method': ['POST']})
356 352 m.connect('users_groups', '/user_groups',
357 353 action='index', conditions={'method': ['GET']})
358 354 m.connect('new_users_group', '/user_groups/new',
359 355 action='new', conditions={'method': ['GET']})
360 356 m.connect('update_users_group', '/user_groups/{user_group_id}',
361 357 action='update', conditions={'method': ['PUT']})
362 358 m.connect('delete_users_group', '/user_groups/{user_group_id}',
363 359 action='delete', conditions={'method': ['DELETE']})
364 360 m.connect('edit_users_group', '/user_groups/{user_group_id}/edit',
365 361 action='edit', conditions={'method': ['GET']},
366 362 function=check_user_group)
367 363
368 364 # EXTRAS USER GROUP ROUTES
369 365 m.connect('edit_user_group_global_perms',
370 366 '/user_groups/{user_group_id}/edit/global_permissions',
371 367 action='edit_global_perms', conditions={'method': ['GET']})
372 368 m.connect('edit_user_group_global_perms',
373 369 '/user_groups/{user_group_id}/edit/global_permissions',
374 370 action='update_global_perms', conditions={'method': ['PUT']})
375 371 m.connect('edit_user_group_perms_summary',
376 372 '/user_groups/{user_group_id}/edit/permissions_summary',
377 373 action='edit_perms_summary', conditions={'method': ['GET']})
378 374
379 375 m.connect('edit_user_group_perms',
380 376 '/user_groups/{user_group_id}/edit/permissions',
381 377 action='edit_perms', conditions={'method': ['GET']})
382 378 m.connect('edit_user_group_perms',
383 379 '/user_groups/{user_group_id}/edit/permissions',
384 380 action='update_perms', conditions={'method': ['PUT']})
385 381
386 382 m.connect('edit_user_group_advanced',
387 383 '/user_groups/{user_group_id}/edit/advanced',
388 384 action='edit_advanced', conditions={'method': ['GET']})
389 385
390 386 m.connect('edit_user_group_members',
391 387 '/user_groups/{user_group_id}/edit/members', jsroute=True,
392 388 action='user_group_members', conditions={'method': ['GET']})
393 389
394 390 # ADMIN PERMISSIONS ROUTES
395 391 with rmap.submapper(path_prefix=ADMIN_PREFIX,
396 392 controller='admin/permissions') as m:
397 393 m.connect('admin_permissions_application', '/permissions/application',
398 394 action='permission_application_update', conditions={'method': ['POST']})
399 395 m.connect('admin_permissions_application', '/permissions/application',
400 396 action='permission_application', conditions={'method': ['GET']})
401 397
402 398 m.connect('admin_permissions_global', '/permissions/global',
403 399 action='permission_global_update', conditions={'method': ['POST']})
404 400 m.connect('admin_permissions_global', '/permissions/global',
405 401 action='permission_global', conditions={'method': ['GET']})
406 402
407 403 m.connect('admin_permissions_object', '/permissions/object',
408 404 action='permission_objects_update', conditions={'method': ['POST']})
409 405 m.connect('admin_permissions_object', '/permissions/object',
410 406 action='permission_objects', conditions={'method': ['GET']})
411 407
412 408 m.connect('admin_permissions_ips', '/permissions/ips',
413 409 action='permission_ips', conditions={'method': ['POST']})
414 410 m.connect('admin_permissions_ips', '/permissions/ips',
415 411 action='permission_ips', conditions={'method': ['GET']})
416 412
417 413 m.connect('admin_permissions_overview', '/permissions/overview',
418 414 action='permission_perms', conditions={'method': ['GET']})
419 415
420 416 # ADMIN DEFAULTS REST ROUTES
421 417 with rmap.submapper(path_prefix=ADMIN_PREFIX,
422 418 controller='admin/defaults') as m:
423 419 m.connect('admin_defaults_repositories', '/defaults/repositories',
424 420 action='update_repository_defaults', conditions={'method': ['POST']})
425 421 m.connect('admin_defaults_repositories', '/defaults/repositories',
426 422 action='index', conditions={'method': ['GET']})
427 423
428 424 # ADMIN DEBUG STYLE ROUTES
429 425 if str2bool(config.get('debug_style')):
430 426 with rmap.submapper(path_prefix=ADMIN_PREFIX + '/debug_style',
431 427 controller='debug_style') as m:
432 428 m.connect('debug_style_home', '',
433 429 action='index', conditions={'method': ['GET']})
434 430 m.connect('debug_style_template', '/t/{t_path}',
435 431 action='template', conditions={'method': ['GET']})
436 432
437 433 # ADMIN SETTINGS ROUTES
438 434 with rmap.submapper(path_prefix=ADMIN_PREFIX,
439 435 controller='admin/settings') as m:
440 436
441 437 # default
442 438 m.connect('admin_settings', '/settings',
443 439 action='settings_global_update',
444 440 conditions={'method': ['POST']})
445 441 m.connect('admin_settings', '/settings',
446 442 action='settings_global', conditions={'method': ['GET']})
447 443
448 444 m.connect('admin_settings_vcs', '/settings/vcs',
449 445 action='settings_vcs_update',
450 446 conditions={'method': ['POST']})
451 447 m.connect('admin_settings_vcs', '/settings/vcs',
452 448 action='settings_vcs',
453 449 conditions={'method': ['GET']})
454 450 m.connect('admin_settings_vcs', '/settings/vcs',
455 451 action='delete_svn_pattern',
456 452 conditions={'method': ['DELETE']})
457 453
458 454 m.connect('admin_settings_mapping', '/settings/mapping',
459 455 action='settings_mapping_update',
460 456 conditions={'method': ['POST']})
461 457 m.connect('admin_settings_mapping', '/settings/mapping',
462 458 action='settings_mapping', conditions={'method': ['GET']})
463 459
464 460 m.connect('admin_settings_global', '/settings/global',
465 461 action='settings_global_update',
466 462 conditions={'method': ['POST']})
467 463 m.connect('admin_settings_global', '/settings/global',
468 464 action='settings_global', conditions={'method': ['GET']})
469 465
470 466 m.connect('admin_settings_visual', '/settings/visual',
471 467 action='settings_visual_update',
472 468 conditions={'method': ['POST']})
473 469 m.connect('admin_settings_visual', '/settings/visual',
474 470 action='settings_visual', conditions={'method': ['GET']})
475 471
476 472 m.connect('admin_settings_issuetracker',
477 473 '/settings/issue-tracker', action='settings_issuetracker',
478 474 conditions={'method': ['GET']})
479 475 m.connect('admin_settings_issuetracker_save',
480 476 '/settings/issue-tracker/save',
481 477 action='settings_issuetracker_save',
482 478 conditions={'method': ['POST']})
483 479 m.connect('admin_issuetracker_test', '/settings/issue-tracker/test',
484 480 action='settings_issuetracker_test',
485 481 conditions={'method': ['POST']})
486 482 m.connect('admin_issuetracker_delete',
487 483 '/settings/issue-tracker/delete',
488 484 action='settings_issuetracker_delete',
489 485 conditions={'method': ['DELETE']})
490 486
491 487 m.connect('admin_settings_email', '/settings/email',
492 488 action='settings_email_update',
493 489 conditions={'method': ['POST']})
494 490 m.connect('admin_settings_email', '/settings/email',
495 491 action='settings_email', conditions={'method': ['GET']})
496 492
497 493 m.connect('admin_settings_hooks', '/settings/hooks',
498 494 action='settings_hooks_update',
499 495 conditions={'method': ['POST', 'DELETE']})
500 496 m.connect('admin_settings_hooks', '/settings/hooks',
501 497 action='settings_hooks', conditions={'method': ['GET']})
502 498
503 499 m.connect('admin_settings_search', '/settings/search',
504 500 action='settings_search', conditions={'method': ['GET']})
505 501
506 502 m.connect('admin_settings_supervisor', '/settings/supervisor',
507 503 action='settings_supervisor', conditions={'method': ['GET']})
508 504 m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log',
509 505 action='settings_supervisor_log', conditions={'method': ['GET']})
510 506
511 507 m.connect('admin_settings_labs', '/settings/labs',
512 508 action='settings_labs_update',
513 509 conditions={'method': ['POST']})
514 510 m.connect('admin_settings_labs', '/settings/labs',
515 511 action='settings_labs', conditions={'method': ['GET']})
516 512
517 513 # ADMIN MY ACCOUNT
518 514 with rmap.submapper(path_prefix=ADMIN_PREFIX,
519 515 controller='admin/my_account') as m:
520 516
521 517 m.connect('my_account', '/my_account',
522 518 action='my_account', conditions={'method': ['GET']})
523 519 m.connect('my_account_edit', '/my_account/edit',
524 520 action='my_account_edit', conditions={'method': ['GET']})
525 521 m.connect('my_account', '/my_account',
526 522 action='my_account_update', conditions={'method': ['POST']})
527 523
528 524 m.connect('my_account_password', '/my_account/password',
529 525 action='my_account_password', conditions={'method': ['GET', 'POST']})
530 526
531 527 m.connect('my_account_repos', '/my_account/repos',
532 528 action='my_account_repos', conditions={'method': ['GET']})
533 529
534 530 m.connect('my_account_watched', '/my_account/watched',
535 531 action='my_account_watched', conditions={'method': ['GET']})
536 532
537 533 m.connect('my_account_pullrequests', '/my_account/pull_requests',
538 534 action='my_account_pullrequests', conditions={'method': ['GET']})
539 535
540 536 m.connect('my_account_perms', '/my_account/perms',
541 537 action='my_account_perms', conditions={'method': ['GET']})
542 538
543 539 m.connect('my_account_emails', '/my_account/emails',
544 540 action='my_account_emails', conditions={'method': ['GET']})
545 541 m.connect('my_account_emails', '/my_account/emails',
546 542 action='my_account_emails_add', conditions={'method': ['POST']})
547 543 m.connect('my_account_emails', '/my_account/emails',
548 544 action='my_account_emails_delete', conditions={'method': ['DELETE']})
549 545
550 546 m.connect('my_account_auth_tokens', '/my_account/auth_tokens',
551 547 action='my_account_auth_tokens', conditions={'method': ['GET']})
552 548 m.connect('my_account_auth_tokens', '/my_account/auth_tokens',
553 549 action='my_account_auth_tokens_add', conditions={'method': ['POST']})
554 550 m.connect('my_account_auth_tokens', '/my_account/auth_tokens',
555 551 action='my_account_auth_tokens_delete', conditions={'method': ['DELETE']})
556 552 m.connect('my_account_notifications', '/my_account/notifications',
557 553 action='my_notifications',
558 554 conditions={'method': ['GET']})
559 555 m.connect('my_account_notifications_toggle_visibility',
560 556 '/my_account/toggle_visibility',
561 557 action='my_notifications_toggle_visibility',
562 558 conditions={'method': ['POST']})
563 559 m.connect('my_account_notifications_test_channelstream',
564 560 '/my_account/test_channelstream',
565 561 action='my_account_notifications_test_channelstream',
566 562 conditions={'method': ['POST']})
567 563
568 564 # NOTIFICATION REST ROUTES
569 565 with rmap.submapper(path_prefix=ADMIN_PREFIX,
570 566 controller='admin/notifications') as m:
571 567 m.connect('notifications', '/notifications',
572 568 action='index', conditions={'method': ['GET']})
573 569 m.connect('notifications_mark_all_read', '/notifications/mark_all_read',
574 570 action='mark_all_read', conditions={'method': ['POST']})
575 571 m.connect('/notifications/{notification_id}',
576 572 action='update', conditions={'method': ['PUT']})
577 573 m.connect('/notifications/{notification_id}',
578 574 action='delete', conditions={'method': ['DELETE']})
579 575 m.connect('notification', '/notifications/{notification_id}',
580 576 action='show', conditions={'method': ['GET']})
581 577
582 578 # ADMIN GIST
583 579 with rmap.submapper(path_prefix=ADMIN_PREFIX,
584 580 controller='admin/gists') as m:
585 581 m.connect('gists', '/gists',
586 582 action='create', conditions={'method': ['POST']})
587 583 m.connect('gists', '/gists', jsroute=True,
588 584 action='index', conditions={'method': ['GET']})
589 585 m.connect('new_gist', '/gists/new', jsroute=True,
590 586 action='new', conditions={'method': ['GET']})
591 587
592 588 m.connect('/gists/{gist_id}',
593 589 action='delete', conditions={'method': ['DELETE']})
594 590 m.connect('edit_gist', '/gists/{gist_id}/edit',
595 591 action='edit_form', conditions={'method': ['GET']})
596 592 m.connect('edit_gist', '/gists/{gist_id}/edit',
597 593 action='edit', conditions={'method': ['POST']})
598 594 m.connect(
599 595 'edit_gist_check_revision', '/gists/{gist_id}/edit/check_revision',
600 596 action='check_revision', conditions={'method': ['GET']})
601 597
602 598 m.connect('gist', '/gists/{gist_id}',
603 599 action='show', conditions={'method': ['GET']})
604 600 m.connect('gist_rev', '/gists/{gist_id}/{revision}',
605 601 revision='tip',
606 602 action='show', conditions={'method': ['GET']})
607 603 m.connect('formatted_gist', '/gists/{gist_id}/{revision}/{format}',
608 604 revision='tip',
609 605 action='show', conditions={'method': ['GET']})
610 606 m.connect('formatted_gist_file', '/gists/{gist_id}/{revision}/{format}/{f_path}',
611 607 revision='tip',
612 608 action='show', conditions={'method': ['GET']},
613 609 requirements=URL_NAME_REQUIREMENTS)
614 610
615 611 # ADMIN MAIN PAGES
616 612 with rmap.submapper(path_prefix=ADMIN_PREFIX,
617 613 controller='admin/admin') as m:
618 614 m.connect('admin_home', '', action='index')
619 615 m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}',
620 616 action='add_repo')
621 617 m.connect(
622 618 'pull_requests_global_0', '/pull_requests/{pull_request_id:[0-9]+}',
623 619 action='pull_requests')
624 620 m.connect(
625 621 'pull_requests_global_1', '/pull-requests/{pull_request_id:[0-9]+}',
626 622 action='pull_requests')
627 623 m.connect(
628 624 'pull_requests_global', '/pull-request/{pull_request_id:[0-9]+}',
629 625 action='pull_requests')
630 626
631 627 # USER JOURNAL
632 628 rmap.connect('journal', '%s/journal' % (ADMIN_PREFIX,),
633 629 controller='journal', action='index')
634 630 rmap.connect('journal_rss', '%s/journal/rss' % (ADMIN_PREFIX,),
635 631 controller='journal', action='journal_rss')
636 632 rmap.connect('journal_atom', '%s/journal/atom' % (ADMIN_PREFIX,),
637 633 controller='journal', action='journal_atom')
638 634
639 635 rmap.connect('public_journal', '%s/public_journal' % (ADMIN_PREFIX,),
640 636 controller='journal', action='public_journal')
641 637
642 638 rmap.connect('public_journal_rss', '%s/public_journal/rss' % (ADMIN_PREFIX,),
643 639 controller='journal', action='public_journal_rss')
644 640
645 641 rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % (ADMIN_PREFIX,),
646 642 controller='journal', action='public_journal_rss')
647 643
648 644 rmap.connect('public_journal_atom',
649 645 '%s/public_journal/atom' % (ADMIN_PREFIX,), controller='journal',
650 646 action='public_journal_atom')
651 647
652 648 rmap.connect('public_journal_atom_old',
653 649 '%s/public_journal_atom' % (ADMIN_PREFIX,), controller='journal',
654 650 action='public_journal_atom')
655 651
656 652 rmap.connect('toggle_following', '%s/toggle_following' % (ADMIN_PREFIX,),
657 653 controller='journal', action='toggle_following', jsroute=True,
658 654 conditions={'method': ['POST']})
659 655
660 656 # FULL TEXT SEARCH
661 657 rmap.connect('search', '%s/search' % (ADMIN_PREFIX,),
662 658 controller='search')
663 659 rmap.connect('search_repo_home', '/{repo_name}/search',
664 660 controller='search',
665 661 action='index',
666 662 conditions={'function': check_repo},
667 663 requirements=URL_NAME_REQUIREMENTS)
668 664
669 665 # FEEDS
670 666 rmap.connect('rss_feed_home', '/{repo_name}/feed/rss',
671 667 controller='feed', action='rss',
672 668 conditions={'function': check_repo},
673 669 requirements=URL_NAME_REQUIREMENTS)
674 670
675 671 rmap.connect('atom_feed_home', '/{repo_name}/feed/atom',
676 672 controller='feed', action='atom',
677 673 conditions={'function': check_repo},
678 674 requirements=URL_NAME_REQUIREMENTS)
679 675
680 676 #==========================================================================
681 677 # REPOSITORY ROUTES
682 678 #==========================================================================
683 679
684 680 rmap.connect('repo_creating_home', '/{repo_name}/repo_creating',
685 681 controller='admin/repos', action='repo_creating',
686 682 requirements=URL_NAME_REQUIREMENTS)
687 683 rmap.connect('repo_check_home', '/{repo_name}/crepo_check',
688 684 controller='admin/repos', action='repo_check',
689 685 requirements=URL_NAME_REQUIREMENTS)
690 686
691 687 rmap.connect('repo_stats', '/{repo_name}/repo_stats/{commit_id}',
692 688 controller='summary', action='repo_stats',
693 689 conditions={'function': check_repo},
694 690 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
695 691
696 692 rmap.connect('repo_refs_data', '/{repo_name}/refs-data',
697 693 controller='summary', action='repo_refs_data',
698 694 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
699 695 rmap.connect('repo_refs_changelog_data', '/{repo_name}/refs-data-changelog',
700 696 controller='summary', action='repo_refs_changelog_data',
701 697 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
702 698 rmap.connect('repo_default_reviewers_data', '/{repo_name}/default-reviewers',
703 699 controller='summary', action='repo_default_reviewers_data',
704 700 jsroute=True, requirements=URL_NAME_REQUIREMENTS)
705 701
706 702 rmap.connect('changeset_home', '/{repo_name}/changeset/{revision}',
707 703 controller='changeset', revision='tip',
708 704 conditions={'function': check_repo},
709 705 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
710 706 rmap.connect('changeset_children', '/{repo_name}/changeset_children/{revision}',
711 707 controller='changeset', revision='tip', action='changeset_children',
712 708 conditions={'function': check_repo},
713 709 requirements=URL_NAME_REQUIREMENTS)
714 710 rmap.connect('changeset_parents', '/{repo_name}/changeset_parents/{revision}',
715 711 controller='changeset', revision='tip', action='changeset_parents',
716 712 conditions={'function': check_repo},
717 713 requirements=URL_NAME_REQUIREMENTS)
718 714
719 715 # repo edit options
720 716 rmap.connect('edit_repo', '/{repo_name}/settings', jsroute=True,
721 717 controller='admin/repos', action='edit',
722 718 conditions={'method': ['GET'], 'function': check_repo},
723 719 requirements=URL_NAME_REQUIREMENTS)
724 720
725 721 rmap.connect('edit_repo_perms', '/{repo_name}/settings/permissions',
726 722 jsroute=True,
727 723 controller='admin/repos', action='edit_permissions',
728 724 conditions={'method': ['GET'], 'function': check_repo},
729 725 requirements=URL_NAME_REQUIREMENTS)
730 726 rmap.connect('edit_repo_perms_update', '/{repo_name}/settings/permissions',
731 727 controller='admin/repos', action='edit_permissions_update',
732 728 conditions={'method': ['PUT'], 'function': check_repo},
733 729 requirements=URL_NAME_REQUIREMENTS)
734 730
735 731 rmap.connect('edit_repo_fields', '/{repo_name}/settings/fields',
736 732 controller='admin/repos', action='edit_fields',
737 733 conditions={'method': ['GET'], 'function': check_repo},
738 734 requirements=URL_NAME_REQUIREMENTS)
739 735 rmap.connect('create_repo_fields', '/{repo_name}/settings/fields/new',
740 736 controller='admin/repos', action='create_repo_field',
741 737 conditions={'method': ['PUT'], 'function': check_repo},
742 738 requirements=URL_NAME_REQUIREMENTS)
743 739 rmap.connect('delete_repo_fields', '/{repo_name}/settings/fields/{field_id}',
744 740 controller='admin/repos', action='delete_repo_field',
745 741 conditions={'method': ['DELETE'], 'function': check_repo},
746 742 requirements=URL_NAME_REQUIREMENTS)
747 743
748 744 rmap.connect('edit_repo_advanced', '/{repo_name}/settings/advanced',
749 745 controller='admin/repos', action='edit_advanced',
750 746 conditions={'method': ['GET'], 'function': check_repo},
751 747 requirements=URL_NAME_REQUIREMENTS)
752 748
753 749 rmap.connect('edit_repo_advanced_locking', '/{repo_name}/settings/advanced/locking',
754 750 controller='admin/repos', action='edit_advanced_locking',
755 751 conditions={'method': ['PUT'], 'function': check_repo},
756 752 requirements=URL_NAME_REQUIREMENTS)
757 753 rmap.connect('toggle_locking', '/{repo_name}/settings/advanced/locking_toggle',
758 754 controller='admin/repos', action='toggle_locking',
759 755 conditions={'method': ['GET'], 'function': check_repo},
760 756 requirements=URL_NAME_REQUIREMENTS)
761 757
762 758 rmap.connect('edit_repo_advanced_journal', '/{repo_name}/settings/advanced/journal',
763 759 controller='admin/repos', action='edit_advanced_journal',
764 760 conditions={'method': ['PUT'], 'function': check_repo},
765 761 requirements=URL_NAME_REQUIREMENTS)
766 762
767 763 rmap.connect('edit_repo_advanced_fork', '/{repo_name}/settings/advanced/fork',
768 764 controller='admin/repos', action='edit_advanced_fork',
769 765 conditions={'method': ['PUT'], 'function': check_repo},
770 766 requirements=URL_NAME_REQUIREMENTS)
771 767
772 768 rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches',
773 769 controller='admin/repos', action='edit_caches_form',
774 770 conditions={'method': ['GET'], 'function': check_repo},
775 771 requirements=URL_NAME_REQUIREMENTS)
776 772 rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches',
777 773 controller='admin/repos', action='edit_caches',
778 774 conditions={'method': ['PUT'], 'function': check_repo},
779 775 requirements=URL_NAME_REQUIREMENTS)
780 776
781 777 rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote',
782 778 controller='admin/repos', action='edit_remote_form',
783 779 conditions={'method': ['GET'], 'function': check_repo},
784 780 requirements=URL_NAME_REQUIREMENTS)
785 781 rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote',
786 782 controller='admin/repos', action='edit_remote',
787 783 conditions={'method': ['PUT'], 'function': check_repo},
788 784 requirements=URL_NAME_REQUIREMENTS)
789 785
790 786 rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics',
791 787 controller='admin/repos', action='edit_statistics_form',
792 788 conditions={'method': ['GET'], 'function': check_repo},
793 789 requirements=URL_NAME_REQUIREMENTS)
794 790 rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics',
795 791 controller='admin/repos', action='edit_statistics',
796 792 conditions={'method': ['PUT'], 'function': check_repo},
797 793 requirements=URL_NAME_REQUIREMENTS)
798 794 rmap.connect('repo_settings_issuetracker',
799 795 '/{repo_name}/settings/issue-tracker',
800 796 controller='admin/repos', action='repo_issuetracker',
801 797 conditions={'method': ['GET'], 'function': check_repo},
802 798 requirements=URL_NAME_REQUIREMENTS)
803 799 rmap.connect('repo_issuetracker_test',
804 800 '/{repo_name}/settings/issue-tracker/test',
805 801 controller='admin/repos', action='repo_issuetracker_test',
806 802 conditions={'method': ['POST'], 'function': check_repo},
807 803 requirements=URL_NAME_REQUIREMENTS)
808 804 rmap.connect('repo_issuetracker_delete',
809 805 '/{repo_name}/settings/issue-tracker/delete',
810 806 controller='admin/repos', action='repo_issuetracker_delete',
811 807 conditions={'method': ['DELETE'], 'function': check_repo},
812 808 requirements=URL_NAME_REQUIREMENTS)
813 809 rmap.connect('repo_issuetracker_save',
814 810 '/{repo_name}/settings/issue-tracker/save',
815 811 controller='admin/repos', action='repo_issuetracker_save',
816 812 conditions={'method': ['POST'], 'function': check_repo},
817 813 requirements=URL_NAME_REQUIREMENTS)
818 814 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
819 815 controller='admin/repos', action='repo_settings_vcs_update',
820 816 conditions={'method': ['POST'], 'function': check_repo},
821 817 requirements=URL_NAME_REQUIREMENTS)
822 818 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
823 819 controller='admin/repos', action='repo_settings_vcs',
824 820 conditions={'method': ['GET'], 'function': check_repo},
825 821 requirements=URL_NAME_REQUIREMENTS)
826 822 rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs',
827 823 controller='admin/repos', action='repo_delete_svn_pattern',
828 824 conditions={'method': ['DELETE'], 'function': check_repo},
829 825 requirements=URL_NAME_REQUIREMENTS)
830 826 rmap.connect('repo_pullrequest_settings', '/{repo_name}/settings/pullrequest',
831 827 controller='admin/repos', action='repo_settings_pullrequest',
832 828 conditions={'method': ['GET', 'POST'], 'function': check_repo},
833 829 requirements=URL_NAME_REQUIREMENTS)
834 830
835 831 # still working url for backward compat.
836 832 rmap.connect('raw_changeset_home_depraced',
837 833 '/{repo_name}/raw-changeset/{revision}',
838 834 controller='changeset', action='changeset_raw',
839 835 revision='tip', conditions={'function': check_repo},
840 836 requirements=URL_NAME_REQUIREMENTS)
841 837
842 838 # new URLs
843 839 rmap.connect('changeset_raw_home',
844 840 '/{repo_name}/changeset-diff/{revision}',
845 841 controller='changeset', action='changeset_raw',
846 842 revision='tip', conditions={'function': check_repo},
847 843 requirements=URL_NAME_REQUIREMENTS)
848 844
849 845 rmap.connect('changeset_patch_home',
850 846 '/{repo_name}/changeset-patch/{revision}',
851 847 controller='changeset', action='changeset_patch',
852 848 revision='tip', conditions={'function': check_repo},
853 849 requirements=URL_NAME_REQUIREMENTS)
854 850
855 851 rmap.connect('changeset_download_home',
856 852 '/{repo_name}/changeset-download/{revision}',
857 853 controller='changeset', action='changeset_download',
858 854 revision='tip', conditions={'function': check_repo},
859 855 requirements=URL_NAME_REQUIREMENTS)
860 856
861 857 rmap.connect('changeset_comment',
862 858 '/{repo_name}/changeset/{revision}/comment', jsroute=True,
863 859 controller='changeset', revision='tip', action='comment',
864 860 conditions={'function': check_repo},
865 861 requirements=URL_NAME_REQUIREMENTS)
866 862
867 863 rmap.connect('changeset_comment_preview',
868 864 '/{repo_name}/changeset/comment/preview', jsroute=True,
869 865 controller='changeset', action='preview_comment',
870 866 conditions={'function': check_repo, 'method': ['POST']},
871 867 requirements=URL_NAME_REQUIREMENTS)
872 868
873 869 rmap.connect('changeset_comment_delete',
874 870 '/{repo_name}/changeset/comment/{comment_id}/delete',
875 871 controller='changeset', action='delete_comment',
876 872 conditions={'function': check_repo, 'method': ['DELETE']},
877 873 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
878 874
879 875 rmap.connect('changeset_info', '/{repo_name}/changeset_info/{revision}',
880 876 controller='changeset', action='changeset_info',
881 877 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
882 878
883 879 rmap.connect('compare_home',
884 880 '/{repo_name}/compare',
885 881 controller='compare', action='index',
886 882 conditions={'function': check_repo},
887 883 requirements=URL_NAME_REQUIREMENTS)
888 884
889 885 rmap.connect('compare_url',
890 886 '/{repo_name}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}',
891 887 controller='compare', action='compare',
892 888 conditions={'function': check_repo},
893 889 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
894 890
895 891 rmap.connect('pullrequest_home',
896 892 '/{repo_name}/pull-request/new', controller='pullrequests',
897 893 action='index', conditions={'function': check_repo,
898 894 'method': ['GET']},
899 895 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
900 896
901 897 rmap.connect('pullrequest',
902 898 '/{repo_name}/pull-request/new', controller='pullrequests',
903 899 action='create', conditions={'function': check_repo,
904 900 'method': ['POST']},
905 901 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
906 902
907 903 rmap.connect('pullrequest_repo_refs',
908 904 '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
909 905 controller='pullrequests',
910 906 action='get_repo_refs',
911 907 conditions={'function': check_repo, 'method': ['GET']},
912 908 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
913 909
914 910 rmap.connect('pullrequest_repo_destinations',
915 911 '/{repo_name}/pull-request/repo-destinations',
916 912 controller='pullrequests',
917 913 action='get_repo_destinations',
918 914 conditions={'function': check_repo, 'method': ['GET']},
919 915 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
920 916
921 917 rmap.connect('pullrequest_show',
922 918 '/{repo_name}/pull-request/{pull_request_id}',
923 919 controller='pullrequests',
924 920 action='show', conditions={'function': check_repo,
925 921 'method': ['GET']},
926 922 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
927 923
928 924 rmap.connect('pullrequest_update',
929 925 '/{repo_name}/pull-request/{pull_request_id}',
930 926 controller='pullrequests',
931 927 action='update', conditions={'function': check_repo,
932 928 'method': ['PUT']},
933 929 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
934 930
935 931 rmap.connect('pullrequest_merge',
936 932 '/{repo_name}/pull-request/{pull_request_id}',
937 933 controller='pullrequests',
938 934 action='merge', conditions={'function': check_repo,
939 935 'method': ['POST']},
940 936 requirements=URL_NAME_REQUIREMENTS)
941 937
942 938 rmap.connect('pullrequest_delete',
943 939 '/{repo_name}/pull-request/{pull_request_id}',
944 940 controller='pullrequests',
945 941 action='delete', conditions={'function': check_repo,
946 942 'method': ['DELETE']},
947 943 requirements=URL_NAME_REQUIREMENTS)
948 944
949 945 rmap.connect('pullrequest_show_all',
950 946 '/{repo_name}/pull-request',
951 947 controller='pullrequests',
952 948 action='show_all', conditions={'function': check_repo,
953 949 'method': ['GET']},
954 950 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
955 951
956 952 rmap.connect('pullrequest_comment',
957 953 '/{repo_name}/pull-request-comment/{pull_request_id}',
958 954 controller='pullrequests',
959 955 action='comment', conditions={'function': check_repo,
960 956 'method': ['POST']},
961 957 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
962 958
963 959 rmap.connect('pullrequest_comment_delete',
964 960 '/{repo_name}/pull-request-comment/{comment_id}/delete',
965 961 controller='pullrequests', action='delete_comment',
966 962 conditions={'function': check_repo, 'method': ['DELETE']},
967 963 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
968 964
969 965 rmap.connect('summary_home_explicit', '/{repo_name}/summary',
970 966 controller='summary', conditions={'function': check_repo},
971 967 requirements=URL_NAME_REQUIREMENTS)
972 968
973 969 rmap.connect('branches_home', '/{repo_name}/branches',
974 970 controller='branches', conditions={'function': check_repo},
975 971 requirements=URL_NAME_REQUIREMENTS)
976 972
977 973 rmap.connect('tags_home', '/{repo_name}/tags',
978 974 controller='tags', conditions={'function': check_repo},
979 975 requirements=URL_NAME_REQUIREMENTS)
980 976
981 977 rmap.connect('bookmarks_home', '/{repo_name}/bookmarks',
982 978 controller='bookmarks', conditions={'function': check_repo},
983 979 requirements=URL_NAME_REQUIREMENTS)
984 980
985 981 rmap.connect('changelog_home', '/{repo_name}/changelog', jsroute=True,
986 982 controller='changelog', conditions={'function': check_repo},
987 983 requirements=URL_NAME_REQUIREMENTS)
988 984
989 985 rmap.connect('changelog_summary_home', '/{repo_name}/changelog_summary',
990 986 controller='changelog', action='changelog_summary',
991 987 conditions={'function': check_repo},
992 988 requirements=URL_NAME_REQUIREMENTS)
993 989
994 990 rmap.connect('changelog_file_home',
995 991 '/{repo_name}/changelog/{revision}/{f_path}',
996 992 controller='changelog', f_path=None,
997 993 conditions={'function': check_repo},
998 994 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
999 995
1000 996 rmap.connect('changelog_elements', '/{repo_name}/changelog_details',
1001 997 controller='changelog', action='changelog_elements',
1002 998 conditions={'function': check_repo},
1003 999 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1004 1000
1005 1001 rmap.connect('files_home', '/{repo_name}/files/{revision}/{f_path}',
1006 1002 controller='files', revision='tip', f_path='',
1007 1003 conditions={'function': check_repo},
1008 1004 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1009 1005
1010 1006 rmap.connect('files_home_simple_catchrev',
1011 1007 '/{repo_name}/files/{revision}',
1012 1008 controller='files', revision='tip', f_path='',
1013 1009 conditions={'function': check_repo},
1014 1010 requirements=URL_NAME_REQUIREMENTS)
1015 1011
1016 1012 rmap.connect('files_home_simple_catchall',
1017 1013 '/{repo_name}/files',
1018 1014 controller='files', revision='tip', f_path='',
1019 1015 conditions={'function': check_repo},
1020 1016 requirements=URL_NAME_REQUIREMENTS)
1021 1017
1022 1018 rmap.connect('files_history_home',
1023 1019 '/{repo_name}/history/{revision}/{f_path}',
1024 1020 controller='files', action='history', revision='tip', f_path='',
1025 1021 conditions={'function': check_repo},
1026 1022 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1027 1023
1028 1024 rmap.connect('files_authors_home',
1029 1025 '/{repo_name}/authors/{revision}/{f_path}',
1030 1026 controller='files', action='authors', revision='tip', f_path='',
1031 1027 conditions={'function': check_repo},
1032 1028 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1033 1029
1034 1030 rmap.connect('files_diff_home', '/{repo_name}/diff/{f_path}',
1035 1031 controller='files', action='diff', f_path='',
1036 1032 conditions={'function': check_repo},
1037 1033 requirements=URL_NAME_REQUIREMENTS)
1038 1034
1039 1035 rmap.connect('files_diff_2way_home',
1040 1036 '/{repo_name}/diff-2way/{f_path}',
1041 1037 controller='files', action='diff_2way', f_path='',
1042 1038 conditions={'function': check_repo},
1043 1039 requirements=URL_NAME_REQUIREMENTS)
1044 1040
1045 1041 rmap.connect('files_rawfile_home',
1046 1042 '/{repo_name}/rawfile/{revision}/{f_path}',
1047 1043 controller='files', action='rawfile', revision='tip',
1048 1044 f_path='', conditions={'function': check_repo},
1049 1045 requirements=URL_NAME_REQUIREMENTS)
1050 1046
1051 1047 rmap.connect('files_raw_home',
1052 1048 '/{repo_name}/raw/{revision}/{f_path}',
1053 1049 controller='files', action='raw', revision='tip', f_path='',
1054 1050 conditions={'function': check_repo},
1055 1051 requirements=URL_NAME_REQUIREMENTS)
1056 1052
1057 1053 rmap.connect('files_render_home',
1058 1054 '/{repo_name}/render/{revision}/{f_path}',
1059 1055 controller='files', action='index', revision='tip', f_path='',
1060 1056 rendered=True, conditions={'function': check_repo},
1061 1057 requirements=URL_NAME_REQUIREMENTS)
1062 1058
1063 1059 rmap.connect('files_annotate_home',
1064 1060 '/{repo_name}/annotate/{revision}/{f_path}',
1065 1061 controller='files', action='index', revision='tip',
1066 1062 f_path='', annotate=True, conditions={'function': check_repo},
1067 1063 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1068 1064
1069 1065 rmap.connect('files_annotate_previous',
1070 1066 '/{repo_name}/annotate-previous/{revision}/{f_path}',
1071 1067 controller='files', action='annotate_previous', revision='tip',
1072 1068 f_path='', annotate=True, conditions={'function': check_repo},
1073 1069 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1074 1070
1075 1071 rmap.connect('files_edit',
1076 1072 '/{repo_name}/edit/{revision}/{f_path}',
1077 1073 controller='files', action='edit', revision='tip',
1078 1074 f_path='',
1079 1075 conditions={'function': check_repo, 'method': ['POST']},
1080 1076 requirements=URL_NAME_REQUIREMENTS)
1081 1077
1082 1078 rmap.connect('files_edit_home',
1083 1079 '/{repo_name}/edit/{revision}/{f_path}',
1084 1080 controller='files', action='edit_home', revision='tip',
1085 1081 f_path='', conditions={'function': check_repo},
1086 1082 requirements=URL_NAME_REQUIREMENTS)
1087 1083
1088 1084 rmap.connect('files_add',
1089 1085 '/{repo_name}/add/{revision}/{f_path}',
1090 1086 controller='files', action='add', revision='tip',
1091 1087 f_path='',
1092 1088 conditions={'function': check_repo, 'method': ['POST']},
1093 1089 requirements=URL_NAME_REQUIREMENTS)
1094 1090
1095 1091 rmap.connect('files_add_home',
1096 1092 '/{repo_name}/add/{revision}/{f_path}',
1097 1093 controller='files', action='add_home', revision='tip',
1098 1094 f_path='', conditions={'function': check_repo},
1099 1095 requirements=URL_NAME_REQUIREMENTS)
1100 1096
1101 1097 rmap.connect('files_delete',
1102 1098 '/{repo_name}/delete/{revision}/{f_path}',
1103 1099 controller='files', action='delete', revision='tip',
1104 1100 f_path='',
1105 1101 conditions={'function': check_repo, 'method': ['POST']},
1106 1102 requirements=URL_NAME_REQUIREMENTS)
1107 1103
1108 1104 rmap.connect('files_delete_home',
1109 1105 '/{repo_name}/delete/{revision}/{f_path}',
1110 1106 controller='files', action='delete_home', revision='tip',
1111 1107 f_path='', conditions={'function': check_repo},
1112 1108 requirements=URL_NAME_REQUIREMENTS)
1113 1109
1114 1110 rmap.connect('files_archive_home', '/{repo_name}/archive/{fname}',
1115 1111 controller='files', action='archivefile',
1116 1112 conditions={'function': check_repo},
1117 1113 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1118 1114
1119 1115 rmap.connect('files_nodelist_home',
1120 1116 '/{repo_name}/nodelist/{revision}/{f_path}',
1121 1117 controller='files', action='nodelist',
1122 1118 conditions={'function': check_repo},
1123 1119 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1124 1120
1125 1121 rmap.connect('files_nodetree_full',
1126 1122 '/{repo_name}/nodetree_full/{commit_id}/{f_path}',
1127 1123 controller='files', action='nodetree_full',
1128 1124 conditions={'function': check_repo},
1129 1125 requirements=URL_NAME_REQUIREMENTS, jsroute=True)
1130 1126
1131 1127 rmap.connect('repo_fork_create_home', '/{repo_name}/fork',
1132 1128 controller='forks', action='fork_create',
1133 1129 conditions={'function': check_repo, 'method': ['POST']},
1134 1130 requirements=URL_NAME_REQUIREMENTS)
1135 1131
1136 1132 rmap.connect('repo_fork_home', '/{repo_name}/fork',
1137 1133 controller='forks', action='fork',
1138 1134 conditions={'function': check_repo},
1139 1135 requirements=URL_NAME_REQUIREMENTS)
1140 1136
1141 1137 rmap.connect('repo_forks_home', '/{repo_name}/forks',
1142 1138 controller='forks', action='forks',
1143 1139 conditions={'function': check_repo},
1144 1140 requirements=URL_NAME_REQUIREMENTS)
1145 1141
1146 1142 rmap.connect('repo_followers_home', '/{repo_name}/followers',
1147 1143 controller='followers', action='followers',
1148 1144 conditions={'function': check_repo},
1149 1145 requirements=URL_NAME_REQUIREMENTS)
1150 1146
1151 1147 # must be here for proper group/repo catching pattern
1152 1148 _connect_with_slash(
1153 1149 rmap, 'repo_group_home', '/{group_name}',
1154 1150 controller='home', action='index_repo_group',
1155 1151 conditions={'function': check_group},
1156 1152 requirements=URL_NAME_REQUIREMENTS)
1157 1153
1158 1154 # catch all, at the end
1159 1155 _connect_with_slash(
1160 1156 rmap, 'summary_home', '/{repo_name}', jsroute=True,
1161 1157 controller='summary', action='index',
1162 1158 conditions={'function': check_repo},
1163 1159 requirements=URL_NAME_REQUIREMENTS)
1164 1160
1165 1161 return rmap
1166 1162
1167 1163
1168 1164 def _connect_with_slash(mapper, name, path, *args, **kwargs):
1169 1165 """
1170 1166 Connect a route with an optional trailing slash in `path`.
1171 1167 """
1172 1168 mapper.connect(name + '_slash', path + '/', *args, **kwargs)
1173 1169 mapper.connect(name, path, *args, **kwargs)
@@ -1,2019 +1,2019 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Helper functions
23 23
24 24 Consists of functions to typically be used within templates, but also
25 25 available to Controllers. This module is available to both as 'h'.
26 26 """
27 27
28 28 import random
29 29 import hashlib
30 30 import StringIO
31 31 import urllib
32 32 import math
33 33 import logging
34 34 import re
35 35 import urlparse
36 36 import time
37 37 import string
38 38 import hashlib
39 39 import pygments
40 40 import itertools
41 41 import fnmatch
42 42
43 43 from datetime import datetime
44 44 from functools import partial
45 45 from pygments.formatters.html import HtmlFormatter
46 46 from pygments import highlight as code_highlight
47 47 from pygments.lexers import (
48 48 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
49 49 from pylons import url as pylons_url
50 50 from pylons.i18n.translation import _, ungettext
51 51 from pyramid.threadlocal import get_current_request
52 52
53 53 from webhelpers.html import literal, HTML, escape
54 54 from webhelpers.html.tools import *
55 55 from webhelpers.html.builder import make_tag
56 56 from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
57 57 end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \
58 58 link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
59 59 submit, text, password, textarea, title, ul, xml_declaration, radio
60 60 from webhelpers.html.tools import auto_link, button_to, highlight, \
61 61 js_obfuscate, mail_to, strip_links, strip_tags, tag_re
62 62 from webhelpers.pylonslib import Flash as _Flash
63 63 from webhelpers.text import chop_at, collapse, convert_accented_entities, \
64 64 convert_misc_entities, lchop, plural, rchop, remove_formatting, \
65 65 replace_whitespace, urlify, truncate, wrap_paragraphs
66 66 from webhelpers.date import time_ago_in_words
67 67 from webhelpers.paginate import Page as _Page
68 68 from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
69 69 convert_boolean_attrs, NotGiven, _make_safe_id_component
70 70 from webhelpers2.number import format_byte_size
71 71
72 72 from rhodecode.lib.action_parser import action_parser
73 73 from rhodecode.lib.ext_json import json
74 74 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
75 75 from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \
76 76 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \
77 77 AttributeDict, safe_int, md5, md5_safe
78 78 from rhodecode.lib.markup_renderer import MarkupRenderer
79 79 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
80 80 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
81 81 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
82 82 from rhodecode.model.changeset_status import ChangesetStatusModel
83 83 from rhodecode.model.db import Permission, User, Repository
84 84 from rhodecode.model.repo_group import RepoGroupModel
85 85 from rhodecode.model.settings import IssueTrackerSettingsModel
86 86
87 87 log = logging.getLogger(__name__)
88 88
89 89
90 90 DEFAULT_USER = User.DEFAULT_USER
91 91 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
92 92
93 93
94 94 def url(*args, **kw):
95 95 return pylons_url(*args, **kw)
96 96
97 97
98 98 def pylons_url_current(*args, **kw):
99 99 """
100 100 This function overrides pylons.url.current() which returns the current
101 101 path so that it will also work from a pyramid only context. This
102 102 should be removed once port to pyramid is complete.
103 103 """
104 104 if not args and not kw:
105 105 request = get_current_request()
106 106 return request.path
107 107 return pylons_url.current(*args, **kw)
108 108
109 109 url.current = pylons_url_current
110 110
111 111
112 112 def url_replace(**qargs):
113 113 """ Returns the current request url while replacing query string args """
114 114
115 115 request = get_current_request()
116 116 new_args = request.GET.mixed()
117 117 new_args.update(qargs)
118 118 return url('', **new_args)
119 119
120 120
121 121 def asset(path, ver=None, **kwargs):
122 122 """
123 123 Helper to generate a static asset file path for rhodecode assets
124 124
125 125 eg. h.asset('images/image.png', ver='3923')
126 126
127 127 :param path: path of asset
128 128 :param ver: optional version query param to append as ?ver=
129 129 """
130 130 request = get_current_request()
131 131 query = {}
132 132 query.update(kwargs)
133 133 if ver:
134 134 query = {'ver': ver}
135 135 return request.static_path(
136 136 'rhodecode:public/{}'.format(path), _query=query)
137 137
138 138
139 139 default_html_escape_table = {
140 140 ord('&'): u'&amp;',
141 141 ord('<'): u'&lt;',
142 142 ord('>'): u'&gt;',
143 143 ord('"'): u'&quot;',
144 144 ord("'"): u'&#39;',
145 145 }
146 146
147 147
148 148 def html_escape(text, html_escape_table=default_html_escape_table):
149 149 """Produce entities within text."""
150 150 return text.translate(html_escape_table)
151 151
152 152
153 153 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
154 154 """
155 155 Truncate string ``s`` at the first occurrence of ``sub``.
156 156
157 157 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
158 158 """
159 159 suffix_if_chopped = suffix_if_chopped or ''
160 160 pos = s.find(sub)
161 161 if pos == -1:
162 162 return s
163 163
164 164 if inclusive:
165 165 pos += len(sub)
166 166
167 167 chopped = s[:pos]
168 168 left = s[pos:].strip()
169 169
170 170 if left and suffix_if_chopped:
171 171 chopped += suffix_if_chopped
172 172
173 173 return chopped
174 174
175 175
176 176 def shorter(text, size=20):
177 177 postfix = '...'
178 178 if len(text) > size:
179 179 return text[:size - len(postfix)] + postfix
180 180 return text
181 181
182 182
183 183 def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
184 184 """
185 185 Reset button
186 186 """
187 187 _set_input_attrs(attrs, type, name, value)
188 188 _set_id_attr(attrs, id, name)
189 189 convert_boolean_attrs(attrs, ["disabled"])
190 190 return HTML.input(**attrs)
191 191
192 192 reset = _reset
193 193 safeid = _make_safe_id_component
194 194
195 195
196 196 def branding(name, length=40):
197 197 return truncate(name, length, indicator="")
198 198
199 199
200 200 def FID(raw_id, path):
201 201 """
202 202 Creates a unique ID for filenode based on it's hash of path and commit
203 203 it's safe to use in urls
204 204
205 205 :param raw_id:
206 206 :param path:
207 207 """
208 208
209 209 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
210 210
211 211
212 212 class _GetError(object):
213 213 """Get error from form_errors, and represent it as span wrapped error
214 214 message
215 215
216 216 :param field_name: field to fetch errors for
217 217 :param form_errors: form errors dict
218 218 """
219 219
220 220 def __call__(self, field_name, form_errors):
221 221 tmpl = """<span class="error_msg">%s</span>"""
222 222 if form_errors and field_name in form_errors:
223 223 return literal(tmpl % form_errors.get(field_name))
224 224
225 225 get_error = _GetError()
226 226
227 227
228 228 class _ToolTip(object):
229 229
230 230 def __call__(self, tooltip_title, trim_at=50):
231 231 """
232 232 Special function just to wrap our text into nice formatted
233 233 autowrapped text
234 234
235 235 :param tooltip_title:
236 236 """
237 237 tooltip_title = escape(tooltip_title)
238 238 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
239 239 return tooltip_title
240 240 tooltip = _ToolTip()
241 241
242 242
243 243 def files_breadcrumbs(repo_name, commit_id, file_path):
244 244 if isinstance(file_path, str):
245 245 file_path = safe_unicode(file_path)
246 246
247 247 # TODO: johbo: Is this always a url like path, or is this operating
248 248 # system dependent?
249 249 path_segments = file_path.split('/')
250 250
251 251 repo_name_html = escape(repo_name)
252 252 if len(path_segments) == 1 and path_segments[0] == '':
253 253 url_segments = [repo_name_html]
254 254 else:
255 255 url_segments = [
256 256 link_to(
257 257 repo_name_html,
258 258 url('files_home',
259 259 repo_name=repo_name,
260 260 revision=commit_id,
261 261 f_path=''),
262 262 class_='pjax-link')]
263 263
264 264 last_cnt = len(path_segments) - 1
265 265 for cnt, segment in enumerate(path_segments):
266 266 if not segment:
267 267 continue
268 268 segment_html = escape(segment)
269 269
270 270 if cnt != last_cnt:
271 271 url_segments.append(
272 272 link_to(
273 273 segment_html,
274 274 url('files_home',
275 275 repo_name=repo_name,
276 276 revision=commit_id,
277 277 f_path='/'.join(path_segments[:cnt + 1])),
278 278 class_='pjax-link'))
279 279 else:
280 280 url_segments.append(segment_html)
281 281
282 282 return literal('/'.join(url_segments))
283 283
284 284
285 285 class CodeHtmlFormatter(HtmlFormatter):
286 286 """
287 287 My code Html Formatter for source codes
288 288 """
289 289
290 290 def wrap(self, source, outfile):
291 291 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
292 292
293 293 def _wrap_code(self, source):
294 294 for cnt, it in enumerate(source):
295 295 i, t = it
296 296 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
297 297 yield i, t
298 298
299 299 def _wrap_tablelinenos(self, inner):
300 300 dummyoutfile = StringIO.StringIO()
301 301 lncount = 0
302 302 for t, line in inner:
303 303 if t:
304 304 lncount += 1
305 305 dummyoutfile.write(line)
306 306
307 307 fl = self.linenostart
308 308 mw = len(str(lncount + fl - 1))
309 309 sp = self.linenospecial
310 310 st = self.linenostep
311 311 la = self.lineanchors
312 312 aln = self.anchorlinenos
313 313 nocls = self.noclasses
314 314 if sp:
315 315 lines = []
316 316
317 317 for i in range(fl, fl + lncount):
318 318 if i % st == 0:
319 319 if i % sp == 0:
320 320 if aln:
321 321 lines.append('<a href="#%s%d" class="special">%*d</a>' %
322 322 (la, i, mw, i))
323 323 else:
324 324 lines.append('<span class="special">%*d</span>' % (mw, i))
325 325 else:
326 326 if aln:
327 327 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
328 328 else:
329 329 lines.append('%*d' % (mw, i))
330 330 else:
331 331 lines.append('')
332 332 ls = '\n'.join(lines)
333 333 else:
334 334 lines = []
335 335 for i in range(fl, fl + lncount):
336 336 if i % st == 0:
337 337 if aln:
338 338 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
339 339 else:
340 340 lines.append('%*d' % (mw, i))
341 341 else:
342 342 lines.append('')
343 343 ls = '\n'.join(lines)
344 344
345 345 # in case you wonder about the seemingly redundant <div> here: since the
346 346 # content in the other cell also is wrapped in a div, some browsers in
347 347 # some configurations seem to mess up the formatting...
348 348 if nocls:
349 349 yield 0, ('<table class="%stable">' % self.cssclass +
350 350 '<tr><td><div class="linenodiv" '
351 351 'style="background-color: #f0f0f0; padding-right: 10px">'
352 352 '<pre style="line-height: 125%">' +
353 353 ls + '</pre></div></td><td id="hlcode" class="code">')
354 354 else:
355 355 yield 0, ('<table class="%stable">' % self.cssclass +
356 356 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
357 357 ls + '</pre></div></td><td id="hlcode" class="code">')
358 358 yield 0, dummyoutfile.getvalue()
359 359 yield 0, '</td></tr></table>'
360 360
361 361
362 362 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
363 363 def __init__(self, **kw):
364 364 # only show these line numbers if set
365 365 self.only_lines = kw.pop('only_line_numbers', [])
366 366 self.query_terms = kw.pop('query_terms', [])
367 367 self.max_lines = kw.pop('max_lines', 5)
368 368 self.line_context = kw.pop('line_context', 3)
369 369 self.url = kw.pop('url', None)
370 370
371 371 super(CodeHtmlFormatter, self).__init__(**kw)
372 372
373 373 def _wrap_code(self, source):
374 374 for cnt, it in enumerate(source):
375 375 i, t = it
376 376 t = '<pre>%s</pre>' % t
377 377 yield i, t
378 378
379 379 def _wrap_tablelinenos(self, inner):
380 380 yield 0, '<table class="code-highlight %stable">' % self.cssclass
381 381
382 382 last_shown_line_number = 0
383 383 current_line_number = 1
384 384
385 385 for t, line in inner:
386 386 if not t:
387 387 yield t, line
388 388 continue
389 389
390 390 if current_line_number in self.only_lines:
391 391 if last_shown_line_number + 1 != current_line_number:
392 392 yield 0, '<tr>'
393 393 yield 0, '<td class="line">...</td>'
394 394 yield 0, '<td id="hlcode" class="code"></td>'
395 395 yield 0, '</tr>'
396 396
397 397 yield 0, '<tr>'
398 398 if self.url:
399 399 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
400 400 self.url, current_line_number, current_line_number)
401 401 else:
402 402 yield 0, '<td class="line"><a href="">%i</a></td>' % (
403 403 current_line_number)
404 404 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
405 405 yield 0, '</tr>'
406 406
407 407 last_shown_line_number = current_line_number
408 408
409 409 current_line_number += 1
410 410
411 411
412 412 yield 0, '</table>'
413 413
414 414
415 415 def extract_phrases(text_query):
416 416 """
417 417 Extracts phrases from search term string making sure phrases
418 418 contained in double quotes are kept together - and discarding empty values
419 419 or fully whitespace values eg.
420 420
421 421 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
422 422
423 423 """
424 424
425 425 in_phrase = False
426 426 buf = ''
427 427 phrases = []
428 428 for char in text_query:
429 429 if in_phrase:
430 430 if char == '"': # end phrase
431 431 phrases.append(buf)
432 432 buf = ''
433 433 in_phrase = False
434 434 continue
435 435 else:
436 436 buf += char
437 437 continue
438 438 else:
439 439 if char == '"': # start phrase
440 440 in_phrase = True
441 441 phrases.append(buf)
442 442 buf = ''
443 443 continue
444 444 elif char == ' ':
445 445 phrases.append(buf)
446 446 buf = ''
447 447 continue
448 448 else:
449 449 buf += char
450 450
451 451 phrases.append(buf)
452 452 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
453 453 return phrases
454 454
455 455
456 456 def get_matching_offsets(text, phrases):
457 457 """
458 458 Returns a list of string offsets in `text` that the list of `terms` match
459 459
460 460 >>> get_matching_offsets('some text here', ['some', 'here'])
461 461 [(0, 4), (10, 14)]
462 462
463 463 """
464 464 offsets = []
465 465 for phrase in phrases:
466 466 for match in re.finditer(phrase, text):
467 467 offsets.append((match.start(), match.end()))
468 468
469 469 return offsets
470 470
471 471
472 472 def normalize_text_for_matching(x):
473 473 """
474 474 Replaces all non alnum characters to spaces and lower cases the string,
475 475 useful for comparing two text strings without punctuation
476 476 """
477 477 return re.sub(r'[^\w]', ' ', x.lower())
478 478
479 479
480 480 def get_matching_line_offsets(lines, terms):
481 481 """ Return a set of `lines` indices (starting from 1) matching a
482 482 text search query, along with `context` lines above/below matching lines
483 483
484 484 :param lines: list of strings representing lines
485 485 :param terms: search term string to match in lines eg. 'some text'
486 486 :param context: number of lines above/below a matching line to add to result
487 487 :param max_lines: cut off for lines of interest
488 488 eg.
489 489
490 490 text = '''
491 491 words words words
492 492 words words words
493 493 some text some
494 494 words words words
495 495 words words words
496 496 text here what
497 497 '''
498 498 get_matching_line_offsets(text, 'text', context=1)
499 499 {3: [(5, 9)], 6: [(0, 4)]]
500 500
501 501 """
502 502 matching_lines = {}
503 503 phrases = [normalize_text_for_matching(phrase)
504 504 for phrase in extract_phrases(terms)]
505 505
506 506 for line_index, line in enumerate(lines, start=1):
507 507 match_offsets = get_matching_offsets(
508 508 normalize_text_for_matching(line), phrases)
509 509 if match_offsets:
510 510 matching_lines[line_index] = match_offsets
511 511
512 512 return matching_lines
513 513
514 514
515 515 def hsv_to_rgb(h, s, v):
516 516 """ Convert hsv color values to rgb """
517 517
518 518 if s == 0.0:
519 519 return v, v, v
520 520 i = int(h * 6.0) # XXX assume int() truncates!
521 521 f = (h * 6.0) - i
522 522 p = v * (1.0 - s)
523 523 q = v * (1.0 - s * f)
524 524 t = v * (1.0 - s * (1.0 - f))
525 525 i = i % 6
526 526 if i == 0:
527 527 return v, t, p
528 528 if i == 1:
529 529 return q, v, p
530 530 if i == 2:
531 531 return p, v, t
532 532 if i == 3:
533 533 return p, q, v
534 534 if i == 4:
535 535 return t, p, v
536 536 if i == 5:
537 537 return v, p, q
538 538
539 539
540 540 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
541 541 """
542 542 Generator for getting n of evenly distributed colors using
543 543 hsv color and golden ratio. It always return same order of colors
544 544
545 545 :param n: number of colors to generate
546 546 :param saturation: saturation of returned colors
547 547 :param lightness: lightness of returned colors
548 548 :returns: RGB tuple
549 549 """
550 550
551 551 golden_ratio = 0.618033988749895
552 552 h = 0.22717784590367374
553 553
554 554 for _ in xrange(n):
555 555 h += golden_ratio
556 556 h %= 1
557 557 HSV_tuple = [h, saturation, lightness]
558 558 RGB_tuple = hsv_to_rgb(*HSV_tuple)
559 559 yield map(lambda x: str(int(x * 256)), RGB_tuple)
560 560
561 561
562 562 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
563 563 """
564 564 Returns a function which when called with an argument returns a unique
565 565 color for that argument, eg.
566 566
567 567 :param n: number of colors to generate
568 568 :param saturation: saturation of returned colors
569 569 :param lightness: lightness of returned colors
570 570 :returns: css RGB string
571 571
572 572 >>> color_hash = color_hasher()
573 573 >>> color_hash('hello')
574 574 'rgb(34, 12, 59)'
575 575 >>> color_hash('hello')
576 576 'rgb(34, 12, 59)'
577 577 >>> color_hash('other')
578 578 'rgb(90, 224, 159)'
579 579 """
580 580
581 581 color_dict = {}
582 582 cgenerator = unique_color_generator(
583 583 saturation=saturation, lightness=lightness)
584 584
585 585 def get_color_string(thing):
586 586 if thing in color_dict:
587 587 col = color_dict[thing]
588 588 else:
589 589 col = color_dict[thing] = cgenerator.next()
590 590 return "rgb(%s)" % (', '.join(col))
591 591
592 592 return get_color_string
593 593
594 594
595 595 def get_lexer_safe(mimetype=None, filepath=None):
596 596 """
597 597 Tries to return a relevant pygments lexer using mimetype/filepath name,
598 598 defaulting to plain text if none could be found
599 599 """
600 600 lexer = None
601 601 try:
602 602 if mimetype:
603 603 lexer = get_lexer_for_mimetype(mimetype)
604 604 if not lexer:
605 605 lexer = get_lexer_for_filename(filepath)
606 606 except pygments.util.ClassNotFound:
607 607 pass
608 608
609 609 if not lexer:
610 610 lexer = get_lexer_by_name('text')
611 611
612 612 return lexer
613 613
614 614
615 615 def get_lexer_for_filenode(filenode):
616 616 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
617 617 return lexer
618 618
619 619
620 620 def pygmentize(filenode, **kwargs):
621 621 """
622 622 pygmentize function using pygments
623 623
624 624 :param filenode:
625 625 """
626 626 lexer = get_lexer_for_filenode(filenode)
627 627 return literal(code_highlight(filenode.content, lexer,
628 628 CodeHtmlFormatter(**kwargs)))
629 629
630 630
631 631 def is_following_repo(repo_name, user_id):
632 632 from rhodecode.model.scm import ScmModel
633 633 return ScmModel().is_following_repo(repo_name, user_id)
634 634
635 635
636 636 class _Message(object):
637 637 """A message returned by ``Flash.pop_messages()``.
638 638
639 639 Converting the message to a string returns the message text. Instances
640 640 also have the following attributes:
641 641
642 642 * ``message``: the message text.
643 643 * ``category``: the category specified when the message was created.
644 644 """
645 645
646 646 def __init__(self, category, message):
647 647 self.category = category
648 648 self.message = message
649 649
650 650 def __str__(self):
651 651 return self.message
652 652
653 653 __unicode__ = __str__
654 654
655 655 def __html__(self):
656 656 return escape(safe_unicode(self.message))
657 657
658 658
659 659 class Flash(_Flash):
660 660
661 661 def pop_messages(self):
662 662 """Return all accumulated messages and delete them from the session.
663 663
664 664 The return value is a list of ``Message`` objects.
665 665 """
666 666 from pylons import session
667 667
668 668 messages = []
669 669
670 670 # Pop the 'old' pylons flash messages. They are tuples of the form
671 671 # (category, message)
672 672 for cat, msg in session.pop(self.session_key, []):
673 673 messages.append(_Message(cat, msg))
674 674
675 675 # Pop the 'new' pyramid flash messages for each category as list
676 676 # of strings.
677 677 for cat in self.categories:
678 678 for msg in session.pop_flash(queue=cat):
679 679 messages.append(_Message(cat, msg))
680 680 # Map messages from the default queue to the 'notice' category.
681 681 for msg in session.pop_flash():
682 682 messages.append(_Message('notice', msg))
683 683
684 684 session.save()
685 685 return messages
686 686
687 687 def json_alerts(self):
688 688 payloads = []
689 689 messages = flash.pop_messages()
690 690 if messages:
691 691 for message in messages:
692 692 subdata = {}
693 693 if hasattr(message.message, 'rsplit'):
694 694 flash_data = message.message.rsplit('|DELIM|', 1)
695 695 org_message = flash_data[0]
696 696 if len(flash_data) > 1:
697 697 subdata = json.loads(flash_data[1])
698 698 else:
699 699 org_message = message.message
700 700 payloads.append({
701 701 'message': {
702 702 'message': u'{}'.format(org_message),
703 703 'level': message.category,
704 704 'force': True,
705 705 'subdata': subdata
706 706 }
707 707 })
708 708 return json.dumps(payloads)
709 709
710 710 flash = Flash()
711 711
712 712 #==============================================================================
713 713 # SCM FILTERS available via h.
714 714 #==============================================================================
715 715 from rhodecode.lib.vcs.utils import author_name, author_email
716 716 from rhodecode.lib.utils2 import credentials_filter, age as _age
717 717 from rhodecode.model.db import User, ChangesetStatus
718 718
719 719 age = _age
720 720 capitalize = lambda x: x.capitalize()
721 721 email = author_email
722 722 short_id = lambda x: x[:12]
723 723 hide_credentials = lambda x: ''.join(credentials_filter(x))
724 724
725 725
726 726 def age_component(datetime_iso, value=None, time_is_local=False):
727 727 title = value or format_date(datetime_iso)
728 728 tzinfo = '+00:00'
729 729
730 730 # detect if we have a timezone info, otherwise, add it
731 731 if isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
732 732 if time_is_local:
733 733 tzinfo = time.strftime("+%H:%M",
734 734 time.gmtime(
735 735 (datetime.now() - datetime.utcnow()).seconds + 1
736 736 )
737 737 )
738 738
739 739 return literal(
740 740 '<time class="timeago tooltip" '
741 741 'title="{1}{2}" datetime="{0}{2}">{1}</time>'.format(
742 742 datetime_iso, title, tzinfo))
743 743
744 744
745 745 def _shorten_commit_id(commit_id):
746 746 from rhodecode import CONFIG
747 747 def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12))
748 748 return commit_id[:def_len]
749 749
750 750
751 751 def show_id(commit):
752 752 """
753 753 Configurable function that shows ID
754 754 by default it's r123:fffeeefffeee
755 755
756 756 :param commit: commit instance
757 757 """
758 758 from rhodecode import CONFIG
759 759 show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True))
760 760
761 761 raw_id = _shorten_commit_id(commit.raw_id)
762 762 if show_idx:
763 763 return 'r%s:%s' % (commit.idx, raw_id)
764 764 else:
765 765 return '%s' % (raw_id, )
766 766
767 767
768 768 def format_date(date):
769 769 """
770 770 use a standardized formatting for dates used in RhodeCode
771 771
772 772 :param date: date/datetime object
773 773 :return: formatted date
774 774 """
775 775
776 776 if date:
777 777 _fmt = "%a, %d %b %Y %H:%M:%S"
778 778 return safe_unicode(date.strftime(_fmt))
779 779
780 780 return u""
781 781
782 782
783 783 class _RepoChecker(object):
784 784
785 785 def __init__(self, backend_alias):
786 786 self._backend_alias = backend_alias
787 787
788 788 def __call__(self, repository):
789 789 if hasattr(repository, 'alias'):
790 790 _type = repository.alias
791 791 elif hasattr(repository, 'repo_type'):
792 792 _type = repository.repo_type
793 793 else:
794 794 _type = repository
795 795 return _type == self._backend_alias
796 796
797 797 is_git = _RepoChecker('git')
798 798 is_hg = _RepoChecker('hg')
799 799 is_svn = _RepoChecker('svn')
800 800
801 801
802 802 def get_repo_type_by_name(repo_name):
803 803 repo = Repository.get_by_repo_name(repo_name)
804 804 return repo.repo_type
805 805
806 806
807 807 def is_svn_without_proxy(repository):
808 808 if is_svn(repository):
809 809 from rhodecode.model.settings import VcsSettingsModel
810 810 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
811 811 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
812 812 return False
813 813
814 814
815 815 def discover_user(author):
816 816 """
817 817 Tries to discover RhodeCode User based on the autho string. Author string
818 818 is typically `FirstName LastName <email@address.com>`
819 819 """
820 820
821 821 # if author is already an instance use it for extraction
822 822 if isinstance(author, User):
823 823 return author
824 824
825 825 # Valid email in the attribute passed, see if they're in the system
826 826 _email = author_email(author)
827 827 if _email != '':
828 828 user = User.get_by_email(_email, case_insensitive=True, cache=True)
829 829 if user is not None:
830 830 return user
831 831
832 832 # Maybe it's a username, we try to extract it and fetch by username ?
833 833 _author = author_name(author)
834 834 user = User.get_by_username(_author, case_insensitive=True, cache=True)
835 835 if user is not None:
836 836 return user
837 837
838 838 return None
839 839
840 840
841 841 def email_or_none(author):
842 842 # extract email from the commit string
843 843 _email = author_email(author)
844 844
845 845 # If we have an email, use it, otherwise
846 846 # see if it contains a username we can get an email from
847 847 if _email != '':
848 848 return _email
849 849 else:
850 850 user = User.get_by_username(
851 851 author_name(author), case_insensitive=True, cache=True)
852 852
853 853 if user is not None:
854 854 return user.email
855 855
856 856 # No valid email, not a valid user in the system, none!
857 857 return None
858 858
859 859
860 860 def link_to_user(author, length=0, **kwargs):
861 861 user = discover_user(author)
862 862 # user can be None, but if we have it already it means we can re-use it
863 863 # in the person() function, so we save 1 intensive-query
864 864 if user:
865 865 author = user
866 866
867 867 display_person = person(author, 'username_or_name_or_email')
868 868 if length:
869 869 display_person = shorter(display_person, length)
870 870
871 871 if user:
872 872 return link_to(
873 873 escape(display_person),
874 url('user_profile', username=user.username),
874 route_path('user_profile', username=user.username),
875 875 **kwargs)
876 876 else:
877 877 return escape(display_person)
878 878
879 879
880 880 def person(author, show_attr="username_and_name"):
881 881 user = discover_user(author)
882 882 if user:
883 883 return getattr(user, show_attr)
884 884 else:
885 885 _author = author_name(author)
886 886 _email = email(author)
887 887 return _author or _email
888 888
889 889
890 890 def author_string(email):
891 891 if email:
892 892 user = User.get_by_email(email, case_insensitive=True, cache=True)
893 893 if user:
894 894 if user.firstname or user.lastname:
895 895 return '%s %s &lt;%s&gt;' % (user.firstname, user.lastname, email)
896 896 else:
897 897 return email
898 898 else:
899 899 return email
900 900 else:
901 901 return None
902 902
903 903
904 904 def person_by_id(id_, show_attr="username_and_name"):
905 905 # attr to return from fetched user
906 906 person_getter = lambda usr: getattr(usr, show_attr)
907 907
908 908 #maybe it's an ID ?
909 909 if str(id_).isdigit() or isinstance(id_, int):
910 910 id_ = int(id_)
911 911 user = User.get(id_)
912 912 if user is not None:
913 913 return person_getter(user)
914 914 return id_
915 915
916 916
917 917 def gravatar_with_user(author, show_disabled=False):
918 918 from rhodecode.lib.utils import PartialRenderer
919 919 _render = PartialRenderer('base/base.mako')
920 920 return _render('gravatar_with_user', author, show_disabled=show_disabled)
921 921
922 922
923 923 def desc_stylize(value):
924 924 """
925 925 converts tags from value into html equivalent
926 926
927 927 :param value:
928 928 """
929 929 if not value:
930 930 return ''
931 931
932 932 value = re.sub(r'\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
933 933 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
934 934 value = re.sub(r'\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
935 935 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
936 936 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]',
937 937 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
938 938 value = re.sub(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+]*)\]',
939 939 '<div class="metatag" tag="lang">\\2</div>', value)
940 940 value = re.sub(r'\[([a-z]+)\]',
941 941 '<div class="metatag" tag="\\1">\\1</div>', value)
942 942
943 943 return value
944 944
945 945
946 946 def escaped_stylize(value):
947 947 """
948 948 converts tags from value into html equivalent, but escaping its value first
949 949 """
950 950 if not value:
951 951 return ''
952 952
953 953 # Using default webhelper escape method, but has to force it as a
954 954 # plain unicode instead of a markup tag to be used in regex expressions
955 955 value = unicode(escape(safe_unicode(value)))
956 956
957 957 value = re.sub(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
958 958 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
959 959 value = re.sub(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
960 960 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
961 961 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]',
962 962 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
963 963 value = re.sub(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+]*)\]',
964 964 '<div class="metatag" tag="lang">\\2</div>', value)
965 965 value = re.sub(r'\[([a-z]+)\]',
966 966 '<div class="metatag" tag="\\1">\\1</div>', value)
967 967
968 968 return value
969 969
970 970
971 971 def bool2icon(value):
972 972 """
973 973 Returns boolean value of a given value, represented as html element with
974 974 classes that will represent icons
975 975
976 976 :param value: given value to convert to html node
977 977 """
978 978
979 979 if value: # does bool conversion
980 980 return HTML.tag('i', class_="icon-true")
981 981 else: # not true as bool
982 982 return HTML.tag('i', class_="icon-false")
983 983
984 984
985 985 #==============================================================================
986 986 # PERMS
987 987 #==============================================================================
988 988 from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
989 989 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \
990 990 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, \
991 991 csrf_token_key
992 992
993 993
994 994 #==============================================================================
995 995 # GRAVATAR URL
996 996 #==============================================================================
997 997 class InitialsGravatar(object):
998 998 def __init__(self, email_address, first_name, last_name, size=30,
999 999 background=None, text_color='#fff'):
1000 1000 self.size = size
1001 1001 self.first_name = first_name
1002 1002 self.last_name = last_name
1003 1003 self.email_address = email_address
1004 1004 self.background = background or self.str2color(email_address)
1005 1005 self.text_color = text_color
1006 1006
1007 1007 def get_color_bank(self):
1008 1008 """
1009 1009 returns a predefined list of colors that gravatars can use.
1010 1010 Those are randomized distinct colors that guarantee readability and
1011 1011 uniqueness.
1012 1012
1013 1013 generated with: http://phrogz.net/css/distinct-colors.html
1014 1014 """
1015 1015 return [
1016 1016 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1017 1017 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1018 1018 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1019 1019 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1020 1020 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1021 1021 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1022 1022 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1023 1023 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1024 1024 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1025 1025 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1026 1026 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1027 1027 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1028 1028 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1029 1029 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1030 1030 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1031 1031 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1032 1032 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1033 1033 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1034 1034 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1035 1035 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1036 1036 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1037 1037 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1038 1038 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1039 1039 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1040 1040 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1041 1041 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1042 1042 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1043 1043 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1044 1044 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1045 1045 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1046 1046 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1047 1047 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1048 1048 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1049 1049 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1050 1050 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1051 1051 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1052 1052 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1053 1053 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1054 1054 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1055 1055 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1056 1056 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1057 1057 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1058 1058 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1059 1059 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1060 1060 '#4f8c46', '#368dd9', '#5c0073'
1061 1061 ]
1062 1062
1063 1063 def rgb_to_hex_color(self, rgb_tuple):
1064 1064 """
1065 1065 Converts an rgb_tuple passed to an hex color.
1066 1066
1067 1067 :param rgb_tuple: tuple with 3 ints represents rgb color space
1068 1068 """
1069 1069 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1070 1070
1071 1071 def email_to_int_list(self, email_str):
1072 1072 """
1073 1073 Get every byte of the hex digest value of email and turn it to integer.
1074 1074 It's going to be always between 0-255
1075 1075 """
1076 1076 digest = md5_safe(email_str.lower())
1077 1077 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1078 1078
1079 1079 def pick_color_bank_index(self, email_str, color_bank):
1080 1080 return self.email_to_int_list(email_str)[0] % len(color_bank)
1081 1081
1082 1082 def str2color(self, email_str):
1083 1083 """
1084 1084 Tries to map in a stable algorithm an email to color
1085 1085
1086 1086 :param email_str:
1087 1087 """
1088 1088 color_bank = self.get_color_bank()
1089 1089 # pick position (module it's length so we always find it in the
1090 1090 # bank even if it's smaller than 256 values
1091 1091 pos = self.pick_color_bank_index(email_str, color_bank)
1092 1092 return color_bank[pos]
1093 1093
1094 1094 def normalize_email(self, email_address):
1095 1095 import unicodedata
1096 1096 # default host used to fill in the fake/missing email
1097 1097 default_host = u'localhost'
1098 1098
1099 1099 if not email_address:
1100 1100 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1101 1101
1102 1102 email_address = safe_unicode(email_address)
1103 1103
1104 1104 if u'@' not in email_address:
1105 1105 email_address = u'%s@%s' % (email_address, default_host)
1106 1106
1107 1107 if email_address.endswith(u'@'):
1108 1108 email_address = u'%s%s' % (email_address, default_host)
1109 1109
1110 1110 email_address = unicodedata.normalize('NFKD', email_address)\
1111 1111 .encode('ascii', 'ignore')
1112 1112 return email_address
1113 1113
1114 1114 def get_initials(self):
1115 1115 """
1116 1116 Returns 2 letter initials calculated based on the input.
1117 1117 The algorithm picks first given email address, and takes first letter
1118 1118 of part before @, and then the first letter of server name. In case
1119 1119 the part before @ is in a format of `somestring.somestring2` it replaces
1120 1120 the server letter with first letter of somestring2
1121 1121
1122 1122 In case function was initialized with both first and lastname, this
1123 1123 overrides the extraction from email by first letter of the first and
1124 1124 last name. We add special logic to that functionality, In case Full name
1125 1125 is compound, like Guido Von Rossum, we use last part of the last name
1126 1126 (Von Rossum) picking `R`.
1127 1127
1128 1128 Function also normalizes the non-ascii characters to they ascii
1129 1129 representation, eg Ą => A
1130 1130 """
1131 1131 import unicodedata
1132 1132 # replace non-ascii to ascii
1133 1133 first_name = unicodedata.normalize(
1134 1134 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1135 1135 last_name = unicodedata.normalize(
1136 1136 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1137 1137
1138 1138 # do NFKD encoding, and also make sure email has proper format
1139 1139 email_address = self.normalize_email(self.email_address)
1140 1140
1141 1141 # first push the email initials
1142 1142 prefix, server = email_address.split('@', 1)
1143 1143
1144 1144 # check if prefix is maybe a 'firstname.lastname' syntax
1145 1145 _dot_split = prefix.rsplit('.', 1)
1146 1146 if len(_dot_split) == 2:
1147 1147 initials = [_dot_split[0][0], _dot_split[1][0]]
1148 1148 else:
1149 1149 initials = [prefix[0], server[0]]
1150 1150
1151 1151 # then try to replace either firtname or lastname
1152 1152 fn_letter = (first_name or " ")[0].strip()
1153 1153 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1154 1154
1155 1155 if fn_letter:
1156 1156 initials[0] = fn_letter
1157 1157
1158 1158 if ln_letter:
1159 1159 initials[1] = ln_letter
1160 1160
1161 1161 return ''.join(initials).upper()
1162 1162
1163 1163 def get_img_data_by_type(self, font_family, img_type):
1164 1164 default_user = """
1165 1165 <svg xmlns="http://www.w3.org/2000/svg"
1166 1166 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1167 1167 viewBox="-15 -10 439.165 429.164"
1168 1168
1169 1169 xml:space="preserve"
1170 1170 style="background:{background};" >
1171 1171
1172 1172 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1173 1173 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1174 1174 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1175 1175 168.596,153.916,216.671,
1176 1176 204.583,216.671z" fill="{text_color}"/>
1177 1177 <path d="M407.164,374.717L360.88,
1178 1178 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1179 1179 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1180 1180 15.366-44.203,23.488-69.076,23.488c-24.877,
1181 1181 0-48.762-8.122-69.078-23.488
1182 1182 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1183 1183 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1184 1184 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1185 1185 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1186 1186 19.402-10.527 C409.699,390.129,
1187 1187 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1188 1188 </svg>""".format(
1189 1189 size=self.size,
1190 1190 background='#979797', # @grey4
1191 1191 text_color=self.text_color,
1192 1192 font_family=font_family)
1193 1193
1194 1194 return {
1195 1195 "default_user": default_user
1196 1196 }[img_type]
1197 1197
1198 1198 def get_img_data(self, svg_type=None):
1199 1199 """
1200 1200 generates the svg metadata for image
1201 1201 """
1202 1202
1203 1203 font_family = ','.join([
1204 1204 'proximanovaregular',
1205 1205 'Proxima Nova Regular',
1206 1206 'Proxima Nova',
1207 1207 'Arial',
1208 1208 'Lucida Grande',
1209 1209 'sans-serif'
1210 1210 ])
1211 1211 if svg_type:
1212 1212 return self.get_img_data_by_type(font_family, svg_type)
1213 1213
1214 1214 initials = self.get_initials()
1215 1215 img_data = """
1216 1216 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1217 1217 width="{size}" height="{size}"
1218 1218 style="width: 100%; height: 100%; background-color: {background}"
1219 1219 viewBox="0 0 {size} {size}">
1220 1220 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1221 1221 pointer-events="auto" fill="{text_color}"
1222 1222 font-family="{font_family}"
1223 1223 style="font-weight: 400; font-size: {f_size}px;">{text}
1224 1224 </text>
1225 1225 </svg>""".format(
1226 1226 size=self.size,
1227 1227 f_size=self.size/1.85, # scale the text inside the box nicely
1228 1228 background=self.background,
1229 1229 text_color=self.text_color,
1230 1230 text=initials.upper(),
1231 1231 font_family=font_family)
1232 1232
1233 1233 return img_data
1234 1234
1235 1235 def generate_svg(self, svg_type=None):
1236 1236 img_data = self.get_img_data(svg_type)
1237 1237 return "data:image/svg+xml;base64,%s" % img_data.encode('base64')
1238 1238
1239 1239
1240 1240 def initials_gravatar(email_address, first_name, last_name, size=30):
1241 1241 svg_type = None
1242 1242 if email_address == User.DEFAULT_USER_EMAIL:
1243 1243 svg_type = 'default_user'
1244 1244 klass = InitialsGravatar(email_address, first_name, last_name, size)
1245 1245 return klass.generate_svg(svg_type=svg_type)
1246 1246
1247 1247
1248 1248 def gravatar_url(email_address, size=30):
1249 1249 # doh, we need to re-import those to mock it later
1250 1250 from pylons import tmpl_context as c
1251 1251
1252 1252 _use_gravatar = c.visual.use_gravatar
1253 1253 _gravatar_url = c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL
1254 1254
1255 1255 email_address = email_address or User.DEFAULT_USER_EMAIL
1256 1256 if isinstance(email_address, unicode):
1257 1257 # hashlib crashes on unicode items
1258 1258 email_address = safe_str(email_address)
1259 1259
1260 1260 # empty email or default user
1261 1261 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1262 1262 return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size)
1263 1263
1264 1264 if _use_gravatar:
1265 1265 # TODO: Disuse pyramid thread locals. Think about another solution to
1266 1266 # get the host and schema here.
1267 1267 request = get_current_request()
1268 1268 tmpl = safe_str(_gravatar_url)
1269 1269 tmpl = tmpl.replace('{email}', email_address)\
1270 1270 .replace('{md5email}', md5_safe(email_address.lower())) \
1271 1271 .replace('{netloc}', request.host)\
1272 1272 .replace('{scheme}', request.scheme)\
1273 1273 .replace('{size}', safe_str(size))
1274 1274 return tmpl
1275 1275 else:
1276 1276 return initials_gravatar(email_address, '', '', size=size)
1277 1277
1278 1278
1279 1279 class Page(_Page):
1280 1280 """
1281 1281 Custom pager to match rendering style with paginator
1282 1282 """
1283 1283
1284 1284 def _get_pos(self, cur_page, max_page, items):
1285 1285 edge = (items / 2) + 1
1286 1286 if (cur_page <= edge):
1287 1287 radius = max(items / 2, items - cur_page)
1288 1288 elif (max_page - cur_page) < edge:
1289 1289 radius = (items - 1) - (max_page - cur_page)
1290 1290 else:
1291 1291 radius = items / 2
1292 1292
1293 1293 left = max(1, (cur_page - (radius)))
1294 1294 right = min(max_page, cur_page + (radius))
1295 1295 return left, cur_page, right
1296 1296
1297 1297 def _range(self, regexp_match):
1298 1298 """
1299 1299 Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
1300 1300
1301 1301 Arguments:
1302 1302
1303 1303 regexp_match
1304 1304 A "re" (regular expressions) match object containing the
1305 1305 radius of linked pages around the current page in
1306 1306 regexp_match.group(1) as a string
1307 1307
1308 1308 This function is supposed to be called as a callable in
1309 1309 re.sub.
1310 1310
1311 1311 """
1312 1312 radius = int(regexp_match.group(1))
1313 1313
1314 1314 # Compute the first and last page number within the radius
1315 1315 # e.g. '1 .. 5 6 [7] 8 9 .. 12'
1316 1316 # -> leftmost_page = 5
1317 1317 # -> rightmost_page = 9
1318 1318 leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
1319 1319 self.last_page,
1320 1320 (radius * 2) + 1)
1321 1321 nav_items = []
1322 1322
1323 1323 # Create a link to the first page (unless we are on the first page
1324 1324 # or there would be no need to insert '..' spacers)
1325 1325 if self.page != self.first_page and self.first_page < leftmost_page:
1326 1326 nav_items.append(self._pagerlink(self.first_page, self.first_page))
1327 1327
1328 1328 # Insert dots if there are pages between the first page
1329 1329 # and the currently displayed page range
1330 1330 if leftmost_page - self.first_page > 1:
1331 1331 # Wrap in a SPAN tag if nolink_attr is set
1332 1332 text = '..'
1333 1333 if self.dotdot_attr:
1334 1334 text = HTML.span(c=text, **self.dotdot_attr)
1335 1335 nav_items.append(text)
1336 1336
1337 1337 for thispage in xrange(leftmost_page, rightmost_page + 1):
1338 1338 # Hilight the current page number and do not use a link
1339 1339 if thispage == self.page:
1340 1340 text = '%s' % (thispage,)
1341 1341 # Wrap in a SPAN tag if nolink_attr is set
1342 1342 if self.curpage_attr:
1343 1343 text = HTML.span(c=text, **self.curpage_attr)
1344 1344 nav_items.append(text)
1345 1345 # Otherwise create just a link to that page
1346 1346 else:
1347 1347 text = '%s' % (thispage,)
1348 1348 nav_items.append(self._pagerlink(thispage, text))
1349 1349
1350 1350 # Insert dots if there are pages between the displayed
1351 1351 # page numbers and the end of the page range
1352 1352 if self.last_page - rightmost_page > 1:
1353 1353 text = '..'
1354 1354 # Wrap in a SPAN tag if nolink_attr is set
1355 1355 if self.dotdot_attr:
1356 1356 text = HTML.span(c=text, **self.dotdot_attr)
1357 1357 nav_items.append(text)
1358 1358
1359 1359 # Create a link to the very last page (unless we are on the last
1360 1360 # page or there would be no need to insert '..' spacers)
1361 1361 if self.page != self.last_page and rightmost_page < self.last_page:
1362 1362 nav_items.append(self._pagerlink(self.last_page, self.last_page))
1363 1363
1364 1364 ## prerender links
1365 1365 #_page_link = url.current()
1366 1366 #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1367 1367 #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1368 1368 return self.separator.join(nav_items)
1369 1369
1370 1370 def pager(self, format='~2~', page_param='page', partial_param='partial',
1371 1371 show_if_single_page=False, separator=' ', onclick=None,
1372 1372 symbol_first='<<', symbol_last='>>',
1373 1373 symbol_previous='<', symbol_next='>',
1374 1374 link_attr={'class': 'pager_link', 'rel': 'prerender'},
1375 1375 curpage_attr={'class': 'pager_curpage'},
1376 1376 dotdot_attr={'class': 'pager_dotdot'}, **kwargs):
1377 1377
1378 1378 self.curpage_attr = curpage_attr
1379 1379 self.separator = separator
1380 1380 self.pager_kwargs = kwargs
1381 1381 self.page_param = page_param
1382 1382 self.partial_param = partial_param
1383 1383 self.onclick = onclick
1384 1384 self.link_attr = link_attr
1385 1385 self.dotdot_attr = dotdot_attr
1386 1386
1387 1387 # Don't show navigator if there is no more than one page
1388 1388 if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
1389 1389 return ''
1390 1390
1391 1391 from string import Template
1392 1392 # Replace ~...~ in token format by range of pages
1393 1393 result = re.sub(r'~(\d+)~', self._range, format)
1394 1394
1395 1395 # Interpolate '%' variables
1396 1396 result = Template(result).safe_substitute({
1397 1397 'first_page': self.first_page,
1398 1398 'last_page': self.last_page,
1399 1399 'page': self.page,
1400 1400 'page_count': self.page_count,
1401 1401 'items_per_page': self.items_per_page,
1402 1402 'first_item': self.first_item,
1403 1403 'last_item': self.last_item,
1404 1404 'item_count': self.item_count,
1405 1405 'link_first': self.page > self.first_page and \
1406 1406 self._pagerlink(self.first_page, symbol_first) or '',
1407 1407 'link_last': self.page < self.last_page and \
1408 1408 self._pagerlink(self.last_page, symbol_last) or '',
1409 1409 'link_previous': self.previous_page and \
1410 1410 self._pagerlink(self.previous_page, symbol_previous) \
1411 1411 or HTML.span(symbol_previous, class_="pg-previous disabled"),
1412 1412 'link_next': self.next_page and \
1413 1413 self._pagerlink(self.next_page, symbol_next) \
1414 1414 or HTML.span(symbol_next, class_="pg-next disabled")
1415 1415 })
1416 1416
1417 1417 return literal(result)
1418 1418
1419 1419
1420 1420 #==============================================================================
1421 1421 # REPO PAGER, PAGER FOR REPOSITORY
1422 1422 #==============================================================================
1423 1423 class RepoPage(Page):
1424 1424
1425 1425 def __init__(self, collection, page=1, items_per_page=20,
1426 1426 item_count=None, url=None, **kwargs):
1427 1427
1428 1428 """Create a "RepoPage" instance. special pager for paging
1429 1429 repository
1430 1430 """
1431 1431 self._url_generator = url
1432 1432
1433 1433 # Safe the kwargs class-wide so they can be used in the pager() method
1434 1434 self.kwargs = kwargs
1435 1435
1436 1436 # Save a reference to the collection
1437 1437 self.original_collection = collection
1438 1438
1439 1439 self.collection = collection
1440 1440
1441 1441 # The self.page is the number of the current page.
1442 1442 # The first page has the number 1!
1443 1443 try:
1444 1444 self.page = int(page) # make it int() if we get it as a string
1445 1445 except (ValueError, TypeError):
1446 1446 self.page = 1
1447 1447
1448 1448 self.items_per_page = items_per_page
1449 1449
1450 1450 # Unless the user tells us how many items the collections has
1451 1451 # we calculate that ourselves.
1452 1452 if item_count is not None:
1453 1453 self.item_count = item_count
1454 1454 else:
1455 1455 self.item_count = len(self.collection)
1456 1456
1457 1457 # Compute the number of the first and last available page
1458 1458 if self.item_count > 0:
1459 1459 self.first_page = 1
1460 1460 self.page_count = int(math.ceil(float(self.item_count) /
1461 1461 self.items_per_page))
1462 1462 self.last_page = self.first_page + self.page_count - 1
1463 1463
1464 1464 # Make sure that the requested page number is the range of
1465 1465 # valid pages
1466 1466 if self.page > self.last_page:
1467 1467 self.page = self.last_page
1468 1468 elif self.page < self.first_page:
1469 1469 self.page = self.first_page
1470 1470
1471 1471 # Note: the number of items on this page can be less than
1472 1472 # items_per_page if the last page is not full
1473 1473 self.first_item = max(0, (self.item_count) - (self.page *
1474 1474 items_per_page))
1475 1475 self.last_item = ((self.item_count - 1) - items_per_page *
1476 1476 (self.page - 1))
1477 1477
1478 1478 self.items = list(self.collection[self.first_item:self.last_item + 1])
1479 1479
1480 1480 # Links to previous and next page
1481 1481 if self.page > self.first_page:
1482 1482 self.previous_page = self.page - 1
1483 1483 else:
1484 1484 self.previous_page = None
1485 1485
1486 1486 if self.page < self.last_page:
1487 1487 self.next_page = self.page + 1
1488 1488 else:
1489 1489 self.next_page = None
1490 1490
1491 1491 # No items available
1492 1492 else:
1493 1493 self.first_page = None
1494 1494 self.page_count = 0
1495 1495 self.last_page = None
1496 1496 self.first_item = None
1497 1497 self.last_item = None
1498 1498 self.previous_page = None
1499 1499 self.next_page = None
1500 1500 self.items = []
1501 1501
1502 1502 # This is a subclass of the 'list' type. Initialise the list now.
1503 1503 list.__init__(self, reversed(self.items))
1504 1504
1505 1505
1506 1506 def changed_tooltip(nodes):
1507 1507 """
1508 1508 Generates a html string for changed nodes in commit page.
1509 1509 It limits the output to 30 entries
1510 1510
1511 1511 :param nodes: LazyNodesGenerator
1512 1512 """
1513 1513 if nodes:
1514 1514 pref = ': <br/> '
1515 1515 suf = ''
1516 1516 if len(nodes) > 30:
1517 1517 suf = '<br/>' + _(' and %s more') % (len(nodes) - 30)
1518 1518 return literal(pref + '<br/> '.join([safe_unicode(x.path)
1519 1519 for x in nodes[:30]]) + suf)
1520 1520 else:
1521 1521 return ': ' + _('No Files')
1522 1522
1523 1523
1524 1524 def breadcrumb_repo_link(repo):
1525 1525 """
1526 1526 Makes a breadcrumbs path link to repo
1527 1527
1528 1528 ex::
1529 1529 group >> subgroup >> repo
1530 1530
1531 1531 :param repo: a Repository instance
1532 1532 """
1533 1533
1534 1534 path = [
1535 1535 link_to(group.name, url('repo_group_home', group_name=group.group_name))
1536 1536 for group in repo.groups_with_parents
1537 1537 ] + [
1538 1538 link_to(repo.just_name, url('summary_home', repo_name=repo.repo_name))
1539 1539 ]
1540 1540
1541 1541 return literal(' &raquo; '.join(path))
1542 1542
1543 1543
1544 1544 def format_byte_size_binary(file_size):
1545 1545 """
1546 1546 Formats file/folder sizes to standard.
1547 1547 """
1548 1548 formatted_size = format_byte_size(file_size, binary=True)
1549 1549 return formatted_size
1550 1550
1551 1551
1552 1552 def fancy_file_stats(stats):
1553 1553 """
1554 1554 Displays a fancy two colored bar for number of added/deleted
1555 1555 lines of code on file
1556 1556
1557 1557 :param stats: two element list of added/deleted lines of code
1558 1558 """
1559 1559 from rhodecode.lib.diffs import NEW_FILENODE, DEL_FILENODE, \
1560 1560 MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE
1561 1561
1562 1562 def cgen(l_type, a_v, d_v):
1563 1563 mapping = {'tr': 'top-right-rounded-corner-mid',
1564 1564 'tl': 'top-left-rounded-corner-mid',
1565 1565 'br': 'bottom-right-rounded-corner-mid',
1566 1566 'bl': 'bottom-left-rounded-corner-mid'}
1567 1567 map_getter = lambda x: mapping[x]
1568 1568
1569 1569 if l_type == 'a' and d_v:
1570 1570 #case when added and deleted are present
1571 1571 return ' '.join(map(map_getter, ['tl', 'bl']))
1572 1572
1573 1573 if l_type == 'a' and not d_v:
1574 1574 return ' '.join(map(map_getter, ['tr', 'br', 'tl', 'bl']))
1575 1575
1576 1576 if l_type == 'd' and a_v:
1577 1577 return ' '.join(map(map_getter, ['tr', 'br']))
1578 1578
1579 1579 if l_type == 'd' and not a_v:
1580 1580 return ' '.join(map(map_getter, ['tr', 'br', 'tl', 'bl']))
1581 1581
1582 1582 a, d = stats['added'], stats['deleted']
1583 1583 width = 100
1584 1584
1585 1585 if stats['binary']: # binary operations like chmod/rename etc
1586 1586 lbl = []
1587 1587 bin_op = 0 # undefined
1588 1588
1589 1589 # prefix with bin for binary files
1590 1590 if BIN_FILENODE in stats['ops']:
1591 1591 lbl += ['bin']
1592 1592
1593 1593 if NEW_FILENODE in stats['ops']:
1594 1594 lbl += [_('new file')]
1595 1595 bin_op = NEW_FILENODE
1596 1596 elif MOD_FILENODE in stats['ops']:
1597 1597 lbl += [_('mod')]
1598 1598 bin_op = MOD_FILENODE
1599 1599 elif DEL_FILENODE in stats['ops']:
1600 1600 lbl += [_('del')]
1601 1601 bin_op = DEL_FILENODE
1602 1602 elif RENAMED_FILENODE in stats['ops']:
1603 1603 lbl += [_('rename')]
1604 1604 bin_op = RENAMED_FILENODE
1605 1605
1606 1606 # chmod can go with other operations, so we add a + to lbl if needed
1607 1607 if CHMOD_FILENODE in stats['ops']:
1608 1608 lbl += [_('chmod')]
1609 1609 if bin_op == 0:
1610 1610 bin_op = CHMOD_FILENODE
1611 1611
1612 1612 lbl = '+'.join(lbl)
1613 1613 b_a = '<div class="bin bin%s %s" style="width:100%%">%s</div>' \
1614 1614 % (bin_op, cgen('a', a_v='', d_v=0), lbl)
1615 1615 b_d = '<div class="bin bin1" style="width:0%%"></div>'
1616 1616 return literal('<div style="width:%spx">%s%s</div>' % (width, b_a, b_d))
1617 1617
1618 1618 t = stats['added'] + stats['deleted']
1619 1619 unit = float(width) / (t or 1)
1620 1620
1621 1621 # needs > 9% of width to be visible or 0 to be hidden
1622 1622 a_p = max(9, unit * a) if a > 0 else 0
1623 1623 d_p = max(9, unit * d) if d > 0 else 0
1624 1624 p_sum = a_p + d_p
1625 1625
1626 1626 if p_sum > width:
1627 1627 #adjust the percentage to be == 100% since we adjusted to 9
1628 1628 if a_p > d_p:
1629 1629 a_p = a_p - (p_sum - width)
1630 1630 else:
1631 1631 d_p = d_p - (p_sum - width)
1632 1632
1633 1633 a_v = a if a > 0 else ''
1634 1634 d_v = d if d > 0 else ''
1635 1635
1636 1636 d_a = '<div class="added %s" style="width:%s%%">%s</div>' % (
1637 1637 cgen('a', a_v, d_v), a_p, a_v
1638 1638 )
1639 1639 d_d = '<div class="deleted %s" style="width:%s%%">%s</div>' % (
1640 1640 cgen('d', a_v, d_v), d_p, d_v
1641 1641 )
1642 1642 return literal('<div style="width:%spx">%s%s</div>' % (width, d_a, d_d))
1643 1643
1644 1644
1645 1645 def urlify_text(text_, safe=True):
1646 1646 """
1647 1647 Extrac urls from text and make html links out of them
1648 1648
1649 1649 :param text_:
1650 1650 """
1651 1651
1652 1652 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1653 1653 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1654 1654
1655 1655 def url_func(match_obj):
1656 1656 url_full = match_obj.groups()[0]
1657 1657 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
1658 1658 _newtext = url_pat.sub(url_func, text_)
1659 1659 if safe:
1660 1660 return literal(_newtext)
1661 1661 return _newtext
1662 1662
1663 1663
1664 1664 def urlify_commits(text_, repository):
1665 1665 """
1666 1666 Extract commit ids from text and make link from them
1667 1667
1668 1668 :param text_:
1669 1669 :param repository: repo name to build the URL with
1670 1670 """
1671 1671 from pylons import url # doh, we need to re-import url to mock it later
1672 1672 URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1673 1673
1674 1674 def url_func(match_obj):
1675 1675 commit_id = match_obj.groups()[1]
1676 1676 pref = match_obj.groups()[0]
1677 1677 suf = match_obj.groups()[2]
1678 1678
1679 1679 tmpl = (
1680 1680 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1681 1681 '%(commit_id)s</a>%(suf)s'
1682 1682 )
1683 1683 return tmpl % {
1684 1684 'pref': pref,
1685 1685 'cls': 'revision-link',
1686 1686 'url': url('changeset_home', repo_name=repository,
1687 1687 revision=commit_id, qualified=True),
1688 1688 'commit_id': commit_id,
1689 1689 'suf': suf
1690 1690 }
1691 1691
1692 1692 newtext = URL_PAT.sub(url_func, text_)
1693 1693
1694 1694 return newtext
1695 1695
1696 1696
1697 1697 def _process_url_func(match_obj, repo_name, uid, entry,
1698 1698 return_raw_data=False):
1699 1699 pref = ''
1700 1700 if match_obj.group().startswith(' '):
1701 1701 pref = ' '
1702 1702
1703 1703 issue_id = ''.join(match_obj.groups())
1704 1704 tmpl = (
1705 1705 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1706 1706 '%(issue-prefix)s%(id-repr)s'
1707 1707 '</a>')
1708 1708
1709 1709 (repo_name_cleaned,
1710 1710 parent_group_name) = RepoGroupModel().\
1711 1711 _get_group_name_and_parent(repo_name)
1712 1712
1713 1713 # variables replacement
1714 1714 named_vars = {
1715 1715 'id': issue_id,
1716 1716 'repo': repo_name,
1717 1717 'repo_name': repo_name_cleaned,
1718 1718 'group_name': parent_group_name
1719 1719 }
1720 1720 # named regex variables
1721 1721 named_vars.update(match_obj.groupdict())
1722 1722 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1723 1723
1724 1724 data = {
1725 1725 'pref': pref,
1726 1726 'cls': 'issue-tracker-link',
1727 1727 'url': _url,
1728 1728 'id-repr': issue_id,
1729 1729 'issue-prefix': entry['pref'],
1730 1730 'serv': entry['url'],
1731 1731 }
1732 1732 if return_raw_data:
1733 1733 return {
1734 1734 'id': issue_id,
1735 1735 'url': _url
1736 1736 }
1737 1737 return tmpl % data
1738 1738
1739 1739
1740 1740 def process_patterns(text_string, repo_name, config=None):
1741 1741 repo = None
1742 1742 if repo_name:
1743 1743 # Retrieving repo_name to avoid invalid repo_name to explode on
1744 1744 # IssueTrackerSettingsModel but still passing invalid name further down
1745 1745 repo = Repository.get_by_repo_name(repo_name, cache=True)
1746 1746
1747 1747 settings_model = IssueTrackerSettingsModel(repo=repo)
1748 1748 active_entries = settings_model.get_settings(cache=True)
1749 1749
1750 1750 issues_data = []
1751 1751 newtext = text_string
1752 1752 for uid, entry in active_entries.items():
1753 1753 log.debug('found issue tracker entry with uid %s' % (uid,))
1754 1754
1755 1755 if not (entry['pat'] and entry['url']):
1756 1756 log.debug('skipping due to missing data')
1757 1757 continue
1758 1758
1759 1759 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s'
1760 1760 % (uid, entry['pat'], entry['url'], entry['pref']))
1761 1761
1762 1762 try:
1763 1763 pattern = re.compile(r'%s' % entry['pat'])
1764 1764 except re.error:
1765 1765 log.exception(
1766 1766 'issue tracker pattern: `%s` failed to compile',
1767 1767 entry['pat'])
1768 1768 continue
1769 1769
1770 1770 data_func = partial(
1771 1771 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1772 1772 return_raw_data=True)
1773 1773
1774 1774 for match_obj in pattern.finditer(text_string):
1775 1775 issues_data.append(data_func(match_obj))
1776 1776
1777 1777 url_func = partial(
1778 1778 _process_url_func, repo_name=repo_name, entry=entry, uid=uid)
1779 1779
1780 1780 newtext = pattern.sub(url_func, newtext)
1781 1781 log.debug('processed prefix:uid `%s`' % (uid,))
1782 1782
1783 1783 return newtext, issues_data
1784 1784
1785 1785
1786 1786 def urlify_commit_message(commit_text, repository=None):
1787 1787 """
1788 1788 Parses given text message and makes proper links.
1789 1789 issues are linked to given issue-server, and rest is a commit link
1790 1790
1791 1791 :param commit_text:
1792 1792 :param repository:
1793 1793 """
1794 1794 from pylons import url # doh, we need to re-import url to mock it later
1795 1795
1796 1796 def escaper(string):
1797 1797 return string.replace('<', '&lt;').replace('>', '&gt;')
1798 1798
1799 1799 newtext = escaper(commit_text)
1800 1800
1801 1801 # extract http/https links and make them real urls
1802 1802 newtext = urlify_text(newtext, safe=False)
1803 1803
1804 1804 # urlify commits - extract commit ids and make link out of them, if we have
1805 1805 # the scope of repository present.
1806 1806 if repository:
1807 1807 newtext = urlify_commits(newtext, repository)
1808 1808
1809 1809 # process issue tracker patterns
1810 1810 newtext, issues = process_patterns(newtext, repository or '')
1811 1811
1812 1812 return literal(newtext)
1813 1813
1814 1814
1815 1815 def renderer_from_filename(filename, exclude=None):
1816 1816 """
1817 1817 choose a renderer based on filename
1818 1818 """
1819 1819
1820 1820 # ipython
1821 1821 for ext in ['*.ipynb']:
1822 1822 if fnmatch.fnmatch(filename, pat=ext):
1823 1823 return 'jupyter'
1824 1824
1825 1825 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1826 1826 if is_markup:
1827 1827 return is_markup
1828 1828 return None
1829 1829
1830 1830
1831 1831 def render(source, renderer='rst', mentions=False):
1832 1832 if renderer == 'rst':
1833 1833 return literal(
1834 1834 '<div class="rst-block">%s</div>' %
1835 1835 MarkupRenderer.rst(source, mentions=mentions))
1836 1836 elif renderer == 'markdown':
1837 1837 return literal(
1838 1838 '<div class="markdown-block">%s</div>' %
1839 1839 MarkupRenderer.markdown(source, flavored=True, mentions=mentions))
1840 1840 elif renderer == 'jupyter':
1841 1841 return literal(
1842 1842 '<div class="ipynb">%s</div>' %
1843 1843 MarkupRenderer.jupyter(source))
1844 1844
1845 1845 # None means just show the file-source
1846 1846 return None
1847 1847
1848 1848
1849 1849 def commit_status(repo, commit_id):
1850 1850 return ChangesetStatusModel().get_status(repo, commit_id)
1851 1851
1852 1852
1853 1853 def commit_status_lbl(commit_status):
1854 1854 return dict(ChangesetStatus.STATUSES).get(commit_status)
1855 1855
1856 1856
1857 1857 def commit_time(repo_name, commit_id):
1858 1858 repo = Repository.get_by_repo_name(repo_name)
1859 1859 commit = repo.get_commit(commit_id=commit_id)
1860 1860 return commit.date
1861 1861
1862 1862
1863 1863 def get_permission_name(key):
1864 1864 return dict(Permission.PERMS).get(key)
1865 1865
1866 1866
1867 1867 def journal_filter_help():
1868 1868 return _(
1869 1869 'Example filter terms:\n' +
1870 1870 ' repository:vcs\n' +
1871 1871 ' username:marcin\n' +
1872 1872 ' action:*push*\n' +
1873 1873 ' ip:127.0.0.1\n' +
1874 1874 ' date:20120101\n' +
1875 1875 ' date:[20120101100000 TO 20120102]\n' +
1876 1876 '\n' +
1877 1877 'Generate wildcards using \'*\' character:\n' +
1878 1878 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1879 1879 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1880 1880 '\n' +
1881 1881 'Optional AND / OR operators in queries\n' +
1882 1882 ' "repository:vcs OR repository:test"\n' +
1883 1883 ' "username:test AND repository:test*"\n'
1884 1884 )
1885 1885
1886 1886
1887 1887 def not_mapped_error(repo_name):
1888 1888 flash(_('%s repository is not mapped to db perhaps'
1889 1889 ' it was created or renamed from the filesystem'
1890 1890 ' please run the application again'
1891 1891 ' in order to rescan repositories') % repo_name, category='error')
1892 1892
1893 1893
1894 1894 def ip_range(ip_addr):
1895 1895 from rhodecode.model.db import UserIpMap
1896 1896 s, e = UserIpMap._get_ip_range(ip_addr)
1897 1897 return '%s - %s' % (s, e)
1898 1898
1899 1899
1900 1900 def form(url, method='post', needs_csrf_token=True, **attrs):
1901 1901 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1902 1902 if method.lower() != 'get' and needs_csrf_token:
1903 1903 raise Exception(
1904 1904 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1905 1905 'CSRF token. If the endpoint does not require such token you can ' +
1906 1906 'explicitly set the parameter needs_csrf_token to false.')
1907 1907
1908 1908 return wh_form(url, method=method, **attrs)
1909 1909
1910 1910
1911 1911 def secure_form(url, method="POST", multipart=False, **attrs):
1912 1912 """Start a form tag that points the action to an url. This
1913 1913 form tag will also include the hidden field containing
1914 1914 the auth token.
1915 1915
1916 1916 The url options should be given either as a string, or as a
1917 1917 ``url()`` function. The method for the form defaults to POST.
1918 1918
1919 1919 Options:
1920 1920
1921 1921 ``multipart``
1922 1922 If set to True, the enctype is set to "multipart/form-data".
1923 1923 ``method``
1924 1924 The method to use when submitting the form, usually either
1925 1925 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1926 1926 hidden input with name _method is added to simulate the verb
1927 1927 over POST.
1928 1928
1929 1929 """
1930 1930 from webhelpers.pylonslib.secure_form import insecure_form
1931 1931 form = insecure_form(url, method, multipart, **attrs)
1932 1932 token = csrf_input()
1933 1933 return literal("%s\n%s" % (form, token))
1934 1934
1935 1935 def csrf_input():
1936 1936 return literal(
1937 1937 '<input type="hidden" id="{}" name="{}" value="{}">'.format(
1938 1938 csrf_token_key, csrf_token_key, get_csrf_token()))
1939 1939
1940 1940 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1941 1941 select_html = select(name, selected, options, **attrs)
1942 1942 select2 = """
1943 1943 <script>
1944 1944 $(document).ready(function() {
1945 1945 $('#%s').select2({
1946 1946 containerCssClass: 'drop-menu',
1947 1947 dropdownCssClass: 'drop-menu-dropdown',
1948 1948 dropdownAutoWidth: true%s
1949 1949 });
1950 1950 });
1951 1951 </script>
1952 1952 """
1953 1953 filter_option = """,
1954 1954 minimumResultsForSearch: -1
1955 1955 """
1956 1956 input_id = attrs.get('id') or name
1957 1957 filter_enabled = "" if enable_filter else filter_option
1958 1958 select_script = literal(select2 % (input_id, filter_enabled))
1959 1959
1960 1960 return literal(select_html+select_script)
1961 1961
1962 1962
1963 1963 def get_visual_attr(tmpl_context_var, attr_name):
1964 1964 """
1965 1965 A safe way to get a variable from visual variable of template context
1966 1966
1967 1967 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
1968 1968 :param attr_name: name of the attribute we fetch from the c.visual
1969 1969 """
1970 1970 visual = getattr(tmpl_context_var, 'visual', None)
1971 1971 if not visual:
1972 1972 return
1973 1973 else:
1974 1974 return getattr(visual, attr_name, None)
1975 1975
1976 1976
1977 1977 def get_last_path_part(file_node):
1978 1978 if not file_node.path:
1979 1979 return u''
1980 1980
1981 1981 path = safe_unicode(file_node.path.split('/')[-1])
1982 1982 return u'../' + path
1983 1983
1984 1984
1985 1985 def route_path(*args, **kwds):
1986 1986 """
1987 1987 Wrapper around pyramids `route_path` function. It is used to generate
1988 1988 URLs from within pylons views or templates. This will be removed when
1989 1989 pyramid migration if finished.
1990 1990 """
1991 1991 req = get_current_request()
1992 1992 return req.route_path(*args, **kwds)
1993 1993
1994 1994
1995 1995 def route_path_or_none(*args, **kwargs):
1996 1996 try:
1997 1997 return route_path(*args, **kwargs)
1998 1998 except KeyError:
1999 1999 return None
2000 2000
2001 2001
2002 2002 def static_url(*args, **kwds):
2003 2003 """
2004 2004 Wrapper around pyramids `route_path` function. It is used to generate
2005 2005 URLs from within pylons views or templates. This will be removed when
2006 2006 pyramid migration if finished.
2007 2007 """
2008 2008 req = get_current_request()
2009 2009 return req.static_url(*args, **kwds)
2010 2010
2011 2011
2012 2012 def resource_path(*args, **kwds):
2013 2013 """
2014 2014 Wrapper around pyramids `route_path` function. It is used to generate
2015 2015 URLs from within pylons views or templates. This will be removed when
2016 2016 pyramid migration if finished.
2017 2017 """
2018 2018 req = get_current_request()
2019 2019 return req.resource_path(*args, **kwds)
@@ -1,27 +1,27 b''
1 1 ## -*- coding: utf-8 -*-
2 2 <%inherit file="base.mako"/>
3 3
4 4 <%def name="subject()" filter="n,trim">
5 5 RhodeCode new user registration: ${user.username}
6 6 </%def>
7 7
8 8 <%def name="body_plaintext()" filter="n,trim">
9 9
10 10 A new user `${user.username}` has registered on ${h.format_date(date)}
11 11
12 12 - Username: ${user.username}
13 13 - Full Name: ${user.firstname} ${user.lastname}
14 14 - Email: ${user.email}
15 - Profile link: ${h.url('user_profile', username=user.username, qualified=True)}
15 - Profile link: ${h.route_path('user_profile', username=user.username, qualified=True)}
16 16
17 17 ${self.plaintext_footer()}
18 18 </%def>
19 19
20 20 ## BODY GOES BELOW
21 21 <table style="text-align:left;vertical-align:middle;">
22 <tr><td colspan="2" style="width:100%;padding-bottom:15px;border-bottom:1px solid #dbd9da;"><h4><a href="${h.url('user_profile', username=user.username, qualified=True)}" style="color:#427cc9;text-decoration:none;cursor:pointer">${_('New user %(user)s has registered on %(date)s') % {'user': user.username, 'date': h.format_date(date)}}</a></h4></td></tr>
22 <tr><td colspan="2" style="width:100%;padding-bottom:15px;border-bottom:1px solid #dbd9da;"><h4><a href="${h.route_path('user_profile', username=user.username, qualified=True)}" style="color:#427cc9;text-decoration:none;cursor:pointer">${_('New user %(user)s has registered on %(date)s') % {'user': user.username, 'date': h.format_date(date)}}</a></h4></td></tr>
23 23 <tr><td style="padding-right:20px;padding-top:20px;">${_('Username')}</td><td style="line-height:1;padding-top:20px;"><img style="margin-bottom:-5px;text-align:left;border:1px solid #dbd9da" src="${h.gravatar_url(user.email, 16)}" height="16" width="16">&nbsp;${user.username}</td></tr>
24 24 <tr><td style="padding-right:20px;">${_('Full Name')}</td><td>${user.firstname} ${user.lastname}</td></tr>
25 25 <tr><td style="padding-right:20px;">${_('Email')}</td><td>${user.email}</td></tr>
26 <tr><td style="padding-right:20px;">${_('Profile')}</td><td><a href="${h.url('user_profile', username=user.username, qualified=True)}">${h.url('user_profile', username=user.username, qualified=True)}</a></td></tr>
26 <tr><td style="padding-right:20px;">${_('Profile')}</td><td><a href="${h.route_path('user_profile', username=user.username, qualified=True)}">${h.route_path('user_profile', username=user.username, qualified=True)}</a></td></tr>
27 27 </table> No newline at end of file
@@ -1,46 +1,46 b''
1 1 <%inherit file="/base/base.mako"/>
2 2
3 3 <%def name="title()">
4 4 ${c.user.username}
5 5 %if c.rhodecode_name:
6 6 &middot; ${h.branding(c.rhodecode_name)}
7 7 %endif
8 8 </%def>
9 9
10 10 <%def name="breadcrumbs_links()">
11 11 ${c.user.username}
12 12 </%def>
13 13
14 14 <%def name="menu_bar_nav()">
15 ${self.menu_items(active='admin')}
15 ${self.menu_items(active='my_account')}
16 16 </%def>
17 17
18 18 <%def name="main()">
19 19 <div class="box">
20 20 <div class="title">
21 21 ${self.breadcrumbs()}
22 22 </div>
23 23
24 24 <div class="sidebar-col-wrapper scw-small">
25 25 ##main
26 26 <div class="sidebar">
27 27 <ul class="nav nav-pills nav-stacked">
28 28 <li class="${'active' if c.active=='user_profile' else ''}">
29 <a href="${h.url('user_profile', username=c.user.username)}">${_('Profile')}</a></li>
29 <a href="${h.route_path('user_profile', username=c.user.username)}">${_('Profile')}</a></li>
30 30 ## These placeholders are here only for styling purposes. For every new item added to the list, you should remove one placeholder
31 31 <li class="placeholder"><a href="#" style="visibility: hidden;">placeholder</a></li>
32 32 <li class="placeholder"><a href="#" style="visibility: hidden;">placeholder</a></li>
33 33 <li class="placeholder"><a href="#" style="visibility: hidden;">placeholder</a></li>
34 34 <li class="placeholder"><a href="#" style="visibility: hidden;">placeholder</a></li>
35 35 <li class="placeholder"><a href="#" style="visibility: hidden;">placeholder</a></li>
36 36 <li class="placeholder"><a href="#" style="visibility: hidden;">placeholder</a></li>
37 37 </ul>
38 38 </div>
39 39
40 40 <div class="main-content-full-width">
41 41 <%include file="/users/${c.active}.mako"/>
42 42 </div>
43 43 </div>
44 44 </div>
45 45
46 46 </%def>
@@ -1,359 +1,359 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23
24 24 from rhodecode.lib import helpers as h
25 25 from rhodecode.model.db import User, Gist
26 26 from rhodecode.model.gist import GistModel
27 27 from rhodecode.model.meta import Session
28 28 from rhodecode.tests import (
29 29 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS,
30 30 TestController, assert_session_flash, url)
31 31 from rhodecode.tests.utils import AssertResponse
32 32
33 33
34 34 class GistUtility(object):
35 35
36 36 def __init__(self):
37 37 self._gist_ids = []
38 38
39 39 def __call__(
40 40 self, f_name, content='some gist', lifetime=-1,
41 41 description='gist-desc', gist_type='public',
42 42 acl_level=Gist.GIST_PUBLIC, owner=TEST_USER_ADMIN_LOGIN):
43 43 gist_mapping = {
44 44 f_name: {'content': content}
45 45 }
46 46 user = User.get_by_username(owner)
47 47 gist = GistModel().create(
48 48 description, owner=user, gist_mapping=gist_mapping,
49 49 gist_type=gist_type, lifetime=lifetime, gist_acl_level=acl_level)
50 50 Session().commit()
51 51 self._gist_ids.append(gist.gist_id)
52 52 return gist
53 53
54 54 def cleanup(self):
55 55 for gist_id in self._gist_ids:
56 56 gist = Gist.get(gist_id)
57 57 if gist:
58 58 Session().delete(gist)
59 59
60 60 Session().commit()
61 61
62 62
63 63 @pytest.fixture
64 64 def create_gist(request):
65 65 gist_utility = GistUtility()
66 66 request.addfinalizer(gist_utility.cleanup)
67 67 return gist_utility
68 68
69 69
70 70 class TestGistsController(TestController):
71 71
72 72 def test_index_empty(self, create_gist):
73 73 self.log_user()
74 74 response = self.app.get(url('gists'))
75 75 response.mustcontain('data: [],')
76 76
77 77 def test_index(self, create_gist):
78 78 self.log_user()
79 79 g1 = create_gist('gist1')
80 80 g2 = create_gist('gist2', lifetime=1400)
81 81 g3 = create_gist('gist3', description='gist3-desc')
82 82 g4 = create_gist('gist4', gist_type='private').gist_access_id
83 83 response = self.app.get(url('gists'))
84 84
85 85 response.mustcontain('gist: %s' % g1.gist_access_id)
86 86 response.mustcontain('gist: %s' % g2.gist_access_id)
87 87 response.mustcontain('gist: %s' % g3.gist_access_id)
88 88 response.mustcontain('gist3-desc')
89 89 response.mustcontain(no=['gist: %s' % g4])
90 90
91 91 # Expiration information should be visible
92 92 expires_tag = '%s' % h.age_component(
93 93 h.time_to_utcdatetime(g2.gist_expires))
94 94 response.mustcontain(expires_tag.replace('"', '\\"'))
95 95
96 96 def test_index_private_gists(self, create_gist):
97 97 self.log_user()
98 98 gist = create_gist('gist5', gist_type='private')
99 99 response = self.app.get(url('gists', private=1))
100 100
101 101 # and privates
102 102 response.mustcontain('gist: %s' % gist.gist_access_id)
103 103
104 104 def test_index_show_all(self, create_gist):
105 105 self.log_user()
106 106 create_gist('gist1')
107 107 create_gist('gist2', lifetime=1400)
108 108 create_gist('gist3', description='gist3-desc')
109 109 create_gist('gist4', gist_type='private')
110 110
111 111 response = self.app.get(url('gists', all=1))
112 112
113 113 assert len(GistModel.get_all()) == 4
114 114 # and privates
115 115 for gist in GistModel.get_all():
116 116 response.mustcontain('gist: %s' % gist.gist_access_id)
117 117
118 118 def test_index_show_all_hidden_from_regular(self, create_gist):
119 119 self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
120 120 create_gist('gist2', gist_type='private')
121 121 create_gist('gist3', gist_type='private')
122 122 create_gist('gist4', gist_type='private')
123 123
124 124 response = self.app.get(url('gists', all=1))
125 125
126 126 assert len(GistModel.get_all()) == 3
127 127 # since we don't have access to private in this view, we
128 128 # should see nothing
129 129 for gist in GistModel.get_all():
130 130 response.mustcontain(no=['gist: %s' % gist.gist_access_id])
131 131
132 132 def test_create(self):
133 133 self.log_user()
134 134 response = self.app.post(
135 135 url('gists'),
136 136 params={'lifetime': -1,
137 137 'content': 'gist test',
138 138 'filename': 'foo',
139 139 'public': 'public',
140 140 'gist_acl_level': Gist.ACL_LEVEL_PUBLIC,
141 141 'csrf_token': self.csrf_token},
142 142 status=302)
143 143 response = response.follow()
144 144 response.mustcontain('added file: foo')
145 145 response.mustcontain('gist test')
146 146
147 147 def test_create_with_path_with_dirs(self):
148 148 self.log_user()
149 149 response = self.app.post(
150 150 url('gists'),
151 151 params={'lifetime': -1,
152 152 'content': 'gist test',
153 153 'filename': '/home/foo',
154 154 'public': 'public',
155 155 'gist_acl_level': Gist.ACL_LEVEL_PUBLIC,
156 156 'csrf_token': self.csrf_token},
157 157 status=200)
158 158 response.mustcontain('Filename /home/foo cannot be inside a directory')
159 159
160 160 def test_access_expired_gist(self, create_gist):
161 161 self.log_user()
162 162 gist = create_gist('never-see-me')
163 163 gist.gist_expires = 0 # 1970
164 164 Session().add(gist)
165 165 Session().commit()
166 166
167 167 self.app.get(url('gist', gist_id=gist.gist_access_id), status=404)
168 168
169 169 def test_create_private(self):
170 170 self.log_user()
171 171 response = self.app.post(
172 172 url('gists'),
173 173 params={'lifetime': -1,
174 174 'content': 'private gist test',
175 175 'filename': 'private-foo',
176 176 'private': 'private',
177 177 'gist_acl_level': Gist.ACL_LEVEL_PUBLIC,
178 178 'csrf_token': self.csrf_token},
179 179 status=302)
180 180 response = response.follow()
181 181 response.mustcontain('added file: private-foo<')
182 182 response.mustcontain('private gist test')
183 183 response.mustcontain('Private Gist')
184 184 # Make sure private gists are not indexed by robots
185 185 response.mustcontain(
186 186 '<meta name="robots" content="noindex, nofollow">')
187 187
188 188 def test_create_private_acl_private(self):
189 189 self.log_user()
190 190 response = self.app.post(
191 191 url('gists'),
192 192 params={'lifetime': -1,
193 193 'content': 'private gist test',
194 194 'filename': 'private-foo',
195 195 'private': 'private',
196 196 'gist_acl_level': Gist.ACL_LEVEL_PRIVATE,
197 197 'csrf_token': self.csrf_token},
198 198 status=302)
199 199 response = response.follow()
200 200 response.mustcontain('added file: private-foo<')
201 201 response.mustcontain('private gist test')
202 202 response.mustcontain('Private Gist')
203 203 # Make sure private gists are not indexed by robots
204 204 response.mustcontain(
205 205 '<meta name="robots" content="noindex, nofollow">')
206 206
207 207 def test_create_with_description(self):
208 208 self.log_user()
209 209 response = self.app.post(
210 210 url('gists'),
211 211 params={'lifetime': -1,
212 212 'content': 'gist test',
213 213 'filename': 'foo-desc',
214 214 'description': 'gist-desc',
215 215 'public': 'public',
216 216 'gist_acl_level': Gist.ACL_LEVEL_PUBLIC,
217 217 'csrf_token': self.csrf_token},
218 218 status=302)
219 219 response = response.follow()
220 220 response.mustcontain('added file: foo-desc')
221 221 response.mustcontain('gist test')
222 222 response.mustcontain('gist-desc')
223 223
224 224 def test_create_public_with_anonymous_access(self):
225 225 self.log_user()
226 226 params = {
227 227 'lifetime': -1,
228 228 'content': 'gist test',
229 229 'filename': 'foo-desc',
230 230 'description': 'gist-desc',
231 231 'public': 'public',
232 232 'gist_acl_level': Gist.ACL_LEVEL_PUBLIC,
233 233 'csrf_token': self.csrf_token
234 234 }
235 235 response = self.app.post(url('gists'), params=params, status=302)
236 236 self.logout_user()
237 237 response = response.follow()
238 238 response.mustcontain('added file: foo-desc')
239 239 response.mustcontain('gist test')
240 240 response.mustcontain('gist-desc')
241 241
242 242 def test_new(self):
243 243 self.log_user()
244 244 self.app.get(url('new_gist'))
245 245
246 246 def test_delete(self, create_gist):
247 247 self.log_user()
248 248 gist = create_gist('delete-me')
249 249 response = self.app.post(
250 250 url('gist', gist_id=gist.gist_id),
251 251 params={'_method': 'delete', 'csrf_token': self.csrf_token})
252 252 assert_session_flash(response, 'Deleted gist %s' % gist.gist_id)
253 253
254 254 def test_delete_normal_user_his_gist(self, create_gist):
255 255 self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
256 256 gist = create_gist('delete-me', owner=TEST_USER_REGULAR_LOGIN)
257 257 response = self.app.post(
258 258 url('gist', gist_id=gist.gist_id),
259 259 params={'_method': 'delete', 'csrf_token': self.csrf_token})
260 260 assert_session_flash(response, 'Deleted gist %s' % gist.gist_id)
261 261
262 262 def test_delete_normal_user_not_his_own_gist(self, create_gist):
263 263 self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
264 264 gist = create_gist('delete-me')
265 265 self.app.post(
266 266 url('gist', gist_id=gist.gist_id),
267 267 params={'_method': 'delete', 'csrf_token': self.csrf_token},
268 268 status=403)
269 269
270 270 def test_show(self, create_gist):
271 271 gist = create_gist('gist-show-me')
272 272 response = self.app.get(url('gist', gist_id=gist.gist_access_id))
273 273
274 274 response.mustcontain('added file: gist-show-me<')
275 275
276 276 assert_response = AssertResponse(response)
277 277 assert_response.element_equals_to(
278 278 'div.rc-user span.user',
279 '<span class="user"> %s</span>' % h.link_to_user('test_admin'))
279 '<a href="/_profiles/test_admin">test_admin</a></span>')
280 280
281 281 response.mustcontain('gist-desc')
282 282
283 283 def test_show_without_hg(self, create_gist):
284 284 with mock.patch(
285 285 'rhodecode.lib.vcs.settings.ALIASES', ['git']):
286 286 gist = create_gist('gist-show-me-again')
287 287 self.app.get(url('gist', gist_id=gist.gist_access_id), status=200)
288 288
289 289 def test_show_acl_private(self, create_gist):
290 290 gist = create_gist('gist-show-me-only-when-im-logged-in',
291 291 acl_level=Gist.ACL_LEVEL_PRIVATE)
292 292 self.app.get(url('gist', gist_id=gist.gist_access_id), status=404)
293 293
294 294 # now we log-in we should see thi gist
295 295 self.log_user()
296 296 response = self.app.get(url('gist', gist_id=gist.gist_access_id))
297 297 response.mustcontain('added file: gist-show-me-only-when-im-logged-in')
298 298
299 299 assert_response = AssertResponse(response)
300 300 assert_response.element_equals_to(
301 301 'div.rc-user span.user',
302 '<span class="user"> %s</span>' % h.link_to_user('test_admin'))
302 '<a href="/_profiles/test_admin">test_admin</a></span>')
303 303 response.mustcontain('gist-desc')
304 304
305 305 def test_show_as_raw(self, create_gist):
306 306 gist = create_gist('gist-show-me', content='GIST CONTENT')
307 307 response = self.app.get(url('formatted_gist',
308 308 gist_id=gist.gist_access_id, format='raw'))
309 309 assert response.body == 'GIST CONTENT'
310 310
311 311 def test_show_as_raw_individual_file(self, create_gist):
312 312 gist = create_gist('gist-show-me-raw', content='GIST BODY')
313 313 response = self.app.get(url('formatted_gist_file',
314 314 gist_id=gist.gist_access_id, format='raw',
315 315 revision='tip', f_path='gist-show-me-raw'))
316 316 assert response.body == 'GIST BODY'
317 317
318 318 def test_edit_page(self, create_gist):
319 319 self.log_user()
320 320 gist = create_gist('gist-for-edit', content='GIST EDIT BODY')
321 321 response = self.app.get(url('edit_gist', gist_id=gist.gist_access_id))
322 322 response.mustcontain('GIST EDIT BODY')
323 323
324 324 def test_edit_page_non_logged_user(self, create_gist):
325 325 gist = create_gist('gist-for-edit', content='GIST EDIT BODY')
326 326 self.app.get(url('edit_gist', gist_id=gist.gist_access_id), status=302)
327 327
328 328 def test_edit_normal_user_his_gist(self, create_gist):
329 329 self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
330 330 gist = create_gist('gist-for-edit', owner=TEST_USER_REGULAR_LOGIN)
331 331 self.app.get(url('edit_gist', gist_id=gist.gist_access_id, status=200))
332 332
333 333 def test_edit_normal_user_not_his_own_gist(self, create_gist):
334 334 self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
335 335 gist = create_gist('delete-me')
336 336 self.app.get(url('edit_gist', gist_id=gist.gist_access_id), status=403)
337 337
338 338 def test_user_first_name_is_escaped(self, user_util, create_gist):
339 339 xss_atack_string = '"><script>alert(\'First Name\')</script>'
340 340 xss_escaped_string = (
341 341 '&#34;&gt;&lt;script&gt;alert(&#39;First Name&#39;)&lt;/script'
342 342 '&gt;')
343 343 password = 'test'
344 344 user = user_util.create_user(
345 345 firstname=xss_atack_string, password=password)
346 346 create_gist('gist', gist_type='public', owner=user.username)
347 347 response = self.app.get(url('gists'))
348 348 response.mustcontain(xss_escaped_string)
349 349
350 350 def test_user_last_name_is_escaped(self, user_util, create_gist):
351 351 xss_atack_string = '"><script>alert(\'Last Name\')</script>'
352 352 xss_escaped_string = (
353 353 '&#34;&gt;&lt;script&gt;alert(&#39;Last Name&#39;)&lt;/script&gt;')
354 354 password = 'test'
355 355 user = user_util.create_user(
356 356 lastname=xss_atack_string, password=password)
357 357 create_gist('gist', gist_type='public', owner=user.username)
358 358 response = self.app.get(url('gists'))
359 359 response.mustcontain(xss_escaped_string)
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now