##// END OF EJS Templates
diffs: move diffmode to template global context and add it to session...
dan -
r1137:14bd0daf default
parent child Browse files
Show More
@@ -1,592 +1,604 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 The base Controller API
22 The base Controller API
23 Provides the BaseController class for subclassing. And usage in different
23 Provides the BaseController class for subclassing. And usage in different
24 controllers
24 controllers
25 """
25 """
26
26
27 import logging
27 import logging
28 import socket
28 import socket
29
29
30 import ipaddress
30 import ipaddress
31 import pyramid.threadlocal
31 import pyramid.threadlocal
32
32
33 from paste.auth.basic import AuthBasicAuthenticator
33 from paste.auth.basic import AuthBasicAuthenticator
34 from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception
34 from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception
35 from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION
35 from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION
36 from pylons import config, tmpl_context as c, request, session, url
36 from pylons import config, tmpl_context as c, request, session, url
37 from pylons.controllers import WSGIController
37 from pylons.controllers import WSGIController
38 from pylons.controllers.util import redirect
38 from pylons.controllers.util import redirect
39 from pylons.i18n import translation
39 from pylons.i18n import translation
40 # marcink: don't remove this import
40 # marcink: don't remove this import
41 from pylons.templating import render_mako as render # noqa
41 from pylons.templating import render_mako as render # noqa
42 from pylons.i18n.translation import _
42 from pylons.i18n.translation import _
43 from webob.exc import HTTPFound
43 from webob.exc import HTTPFound
44
44
45
45
46 import rhodecode
46 import rhodecode
47 from rhodecode.authentication.base import VCS_TYPE
47 from rhodecode.authentication.base import VCS_TYPE
48 from rhodecode.lib import auth, utils2
48 from rhodecode.lib import auth, utils2
49 from rhodecode.lib import helpers as h
49 from rhodecode.lib import helpers as h
50 from rhodecode.lib.auth import AuthUser, CookieStoreWrapper
50 from rhodecode.lib.auth import AuthUser, CookieStoreWrapper
51 from rhodecode.lib.exceptions import UserCreationError
51 from rhodecode.lib.exceptions import UserCreationError
52 from rhodecode.lib.utils import (
52 from rhodecode.lib.utils import (
53 get_repo_slug, set_rhodecode_config, password_changed,
53 get_repo_slug, set_rhodecode_config, password_changed,
54 get_enabled_hook_classes)
54 get_enabled_hook_classes)
55 from rhodecode.lib.utils2 import (
55 from rhodecode.lib.utils2 import (
56 str2bool, safe_unicode, AttributeDict, safe_int, md5, aslist)
56 str2bool, safe_unicode, AttributeDict, safe_int, md5, aslist)
57 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
57 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
58 from rhodecode.model import meta
58 from rhodecode.model import meta
59 from rhodecode.model.db import Repository, User
59 from rhodecode.model.db import Repository, User
60 from rhodecode.model.notification import NotificationModel
60 from rhodecode.model.notification import NotificationModel
61 from rhodecode.model.scm import ScmModel
61 from rhodecode.model.scm import ScmModel
62 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
62 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
63
63
64
64
65 log = logging.getLogger(__name__)
65 log = logging.getLogger(__name__)
66
66
67
67
68 def _filter_proxy(ip):
68 def _filter_proxy(ip):
69 """
69 """
70 Passed in IP addresses in HEADERS can be in a special format of multiple
70 Passed in IP addresses in HEADERS can be in a special format of multiple
71 ips. Those comma separated IPs are passed from various proxies in the
71 ips. Those comma separated IPs are passed from various proxies in the
72 chain of request processing. The left-most being the original client.
72 chain of request processing. The left-most being the original client.
73 We only care about the first IP which came from the org. client.
73 We only care about the first IP which came from the org. client.
74
74
75 :param ip: ip string from headers
75 :param ip: ip string from headers
76 """
76 """
77 if ',' in ip:
77 if ',' in ip:
78 _ips = ip.split(',')
78 _ips = ip.split(',')
79 _first_ip = _ips[0].strip()
79 _first_ip = _ips[0].strip()
80 log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
80 log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
81 return _first_ip
81 return _first_ip
82 return ip
82 return ip
83
83
84
84
85 def _filter_port(ip):
85 def _filter_port(ip):
86 """
86 """
87 Removes a port from ip, there are 4 main cases to handle here.
87 Removes a port from ip, there are 4 main cases to handle here.
88 - ipv4 eg. 127.0.0.1
88 - ipv4 eg. 127.0.0.1
89 - ipv6 eg. ::1
89 - ipv6 eg. ::1
90 - ipv4+port eg. 127.0.0.1:8080
90 - ipv4+port eg. 127.0.0.1:8080
91 - ipv6+port eg. [::1]:8080
91 - ipv6+port eg. [::1]:8080
92
92
93 :param ip:
93 :param ip:
94 """
94 """
95 def is_ipv6(ip_addr):
95 def is_ipv6(ip_addr):
96 if hasattr(socket, 'inet_pton'):
96 if hasattr(socket, 'inet_pton'):
97 try:
97 try:
98 socket.inet_pton(socket.AF_INET6, ip_addr)
98 socket.inet_pton(socket.AF_INET6, ip_addr)
99 except socket.error:
99 except socket.error:
100 return False
100 return False
101 else:
101 else:
102 # fallback to ipaddress
102 # fallback to ipaddress
103 try:
103 try:
104 ipaddress.IPv6Address(ip_addr)
104 ipaddress.IPv6Address(ip_addr)
105 except Exception:
105 except Exception:
106 return False
106 return False
107 return True
107 return True
108
108
109 if ':' not in ip: # must be ipv4 pure ip
109 if ':' not in ip: # must be ipv4 pure ip
110 return ip
110 return ip
111
111
112 if '[' in ip and ']' in ip: # ipv6 with port
112 if '[' in ip and ']' in ip: # ipv6 with port
113 return ip.split(']')[0][1:].lower()
113 return ip.split(']')[0][1:].lower()
114
114
115 # must be ipv6 or ipv4 with port
115 # must be ipv6 or ipv4 with port
116 if is_ipv6(ip):
116 if is_ipv6(ip):
117 return ip
117 return ip
118 else:
118 else:
119 ip, _port = ip.split(':')[:2] # means ipv4+port
119 ip, _port = ip.split(':')[:2] # means ipv4+port
120 return ip
120 return ip
121
121
122
122
123 def get_ip_addr(environ):
123 def get_ip_addr(environ):
124 proxy_key = 'HTTP_X_REAL_IP'
124 proxy_key = 'HTTP_X_REAL_IP'
125 proxy_key2 = 'HTTP_X_FORWARDED_FOR'
125 proxy_key2 = 'HTTP_X_FORWARDED_FOR'
126 def_key = 'REMOTE_ADDR'
126 def_key = 'REMOTE_ADDR'
127 _filters = lambda x: _filter_port(_filter_proxy(x))
127 _filters = lambda x: _filter_port(_filter_proxy(x))
128
128
129 ip = environ.get(proxy_key)
129 ip = environ.get(proxy_key)
130 if ip:
130 if ip:
131 return _filters(ip)
131 return _filters(ip)
132
132
133 ip = environ.get(proxy_key2)
133 ip = environ.get(proxy_key2)
134 if ip:
134 if ip:
135 return _filters(ip)
135 return _filters(ip)
136
136
137 ip = environ.get(def_key, '0.0.0.0')
137 ip = environ.get(def_key, '0.0.0.0')
138 return _filters(ip)
138 return _filters(ip)
139
139
140
140
141 def get_server_ip_addr(environ, log_errors=True):
141 def get_server_ip_addr(environ, log_errors=True):
142 hostname = environ.get('SERVER_NAME')
142 hostname = environ.get('SERVER_NAME')
143 try:
143 try:
144 return socket.gethostbyname(hostname)
144 return socket.gethostbyname(hostname)
145 except Exception as e:
145 except Exception as e:
146 if log_errors:
146 if log_errors:
147 # in some cases this lookup is not possible, and we don't want to
147 # in some cases this lookup is not possible, and we don't want to
148 # make it an exception in logs
148 # make it an exception in logs
149 log.exception('Could not retrieve server ip address: %s', e)
149 log.exception('Could not retrieve server ip address: %s', e)
150 return hostname
150 return hostname
151
151
152
152
153 def get_server_port(environ):
153 def get_server_port(environ):
154 return environ.get('SERVER_PORT')
154 return environ.get('SERVER_PORT')
155
155
156
156
157 def get_access_path(environ):
157 def get_access_path(environ):
158 path = environ.get('PATH_INFO')
158 path = environ.get('PATH_INFO')
159 org_req = environ.get('pylons.original_request')
159 org_req = environ.get('pylons.original_request')
160 if org_req:
160 if org_req:
161 path = org_req.environ.get('PATH_INFO')
161 path = org_req.environ.get('PATH_INFO')
162 return path
162 return path
163
163
164
164
165 def vcs_operation_context(
165 def vcs_operation_context(
166 environ, repo_name, username, action, scm, check_locking=True,
166 environ, repo_name, username, action, scm, check_locking=True,
167 is_shadow_repo=False):
167 is_shadow_repo=False):
168 """
168 """
169 Generate the context for a vcs operation, e.g. push or pull.
169 Generate the context for a vcs operation, e.g. push or pull.
170
170
171 This context is passed over the layers so that hooks triggered by the
171 This context is passed over the layers so that hooks triggered by the
172 vcs operation know details like the user, the user's IP address etc.
172 vcs operation know details like the user, the user's IP address etc.
173
173
174 :param check_locking: Allows to switch of the computation of the locking
174 :param check_locking: Allows to switch of the computation of the locking
175 data. This serves mainly the need of the simplevcs middleware to be
175 data. This serves mainly the need of the simplevcs middleware to be
176 able to disable this for certain operations.
176 able to disable this for certain operations.
177
177
178 """
178 """
179 # Tri-state value: False: unlock, None: nothing, True: lock
179 # Tri-state value: False: unlock, None: nothing, True: lock
180 make_lock = None
180 make_lock = None
181 locked_by = [None, None, None]
181 locked_by = [None, None, None]
182 is_anonymous = username == User.DEFAULT_USER
182 is_anonymous = username == User.DEFAULT_USER
183 if not is_anonymous and check_locking:
183 if not is_anonymous and check_locking:
184 log.debug('Checking locking on repository "%s"', repo_name)
184 log.debug('Checking locking on repository "%s"', repo_name)
185 user = User.get_by_username(username)
185 user = User.get_by_username(username)
186 repo = Repository.get_by_repo_name(repo_name)
186 repo = Repository.get_by_repo_name(repo_name)
187 make_lock, __, locked_by = repo.get_locking_state(
187 make_lock, __, locked_by = repo.get_locking_state(
188 action, user.user_id)
188 action, user.user_id)
189
189
190 settings_model = VcsSettingsModel(repo=repo_name)
190 settings_model = VcsSettingsModel(repo=repo_name)
191 ui_settings = settings_model.get_ui_settings()
191 ui_settings = settings_model.get_ui_settings()
192
192
193 extras = {
193 extras = {
194 'ip': get_ip_addr(environ),
194 'ip': get_ip_addr(environ),
195 'username': username,
195 'username': username,
196 'action': action,
196 'action': action,
197 'repository': repo_name,
197 'repository': repo_name,
198 'scm': scm,
198 'scm': scm,
199 'config': rhodecode.CONFIG['__file__'],
199 'config': rhodecode.CONFIG['__file__'],
200 'make_lock': make_lock,
200 'make_lock': make_lock,
201 'locked_by': locked_by,
201 'locked_by': locked_by,
202 'server_url': utils2.get_server_url(environ),
202 'server_url': utils2.get_server_url(environ),
203 'hooks': get_enabled_hook_classes(ui_settings),
203 'hooks': get_enabled_hook_classes(ui_settings),
204 'is_shadow_repo': is_shadow_repo,
204 'is_shadow_repo': is_shadow_repo,
205 }
205 }
206 return extras
206 return extras
207
207
208
208
209 class BasicAuth(AuthBasicAuthenticator):
209 class BasicAuth(AuthBasicAuthenticator):
210
210
211 def __init__(self, realm, authfunc, registry, auth_http_code=None,
211 def __init__(self, realm, authfunc, registry, auth_http_code=None,
212 initial_call_detection=False):
212 initial_call_detection=False):
213 self.realm = realm
213 self.realm = realm
214 self.initial_call = initial_call_detection
214 self.initial_call = initial_call_detection
215 self.authfunc = authfunc
215 self.authfunc = authfunc
216 self.registry = registry
216 self.registry = registry
217 self._rc_auth_http_code = auth_http_code
217 self._rc_auth_http_code = auth_http_code
218
218
219 def _get_response_from_code(self, http_code):
219 def _get_response_from_code(self, http_code):
220 try:
220 try:
221 return get_exception(safe_int(http_code))
221 return get_exception(safe_int(http_code))
222 except Exception:
222 except Exception:
223 log.exception('Failed to fetch response for code %s' % http_code)
223 log.exception('Failed to fetch response for code %s' % http_code)
224 return HTTPForbidden
224 return HTTPForbidden
225
225
226 def build_authentication(self):
226 def build_authentication(self):
227 head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm)
227 head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm)
228 if self._rc_auth_http_code and not self.initial_call:
228 if self._rc_auth_http_code and not self.initial_call:
229 # return alternative HTTP code if alternative http return code
229 # return alternative HTTP code if alternative http return code
230 # is specified in RhodeCode config, but ONLY if it's not the
230 # is specified in RhodeCode config, but ONLY if it's not the
231 # FIRST call
231 # FIRST call
232 custom_response_klass = self._get_response_from_code(
232 custom_response_klass = self._get_response_from_code(
233 self._rc_auth_http_code)
233 self._rc_auth_http_code)
234 return custom_response_klass(headers=head)
234 return custom_response_klass(headers=head)
235 return HTTPUnauthorized(headers=head)
235 return HTTPUnauthorized(headers=head)
236
236
237 def authenticate(self, environ):
237 def authenticate(self, environ):
238 authorization = AUTHORIZATION(environ)
238 authorization = AUTHORIZATION(environ)
239 if not authorization:
239 if not authorization:
240 return self.build_authentication()
240 return self.build_authentication()
241 (authmeth, auth) = authorization.split(' ', 1)
241 (authmeth, auth) = authorization.split(' ', 1)
242 if 'basic' != authmeth.lower():
242 if 'basic' != authmeth.lower():
243 return self.build_authentication()
243 return self.build_authentication()
244 auth = auth.strip().decode('base64')
244 auth = auth.strip().decode('base64')
245 _parts = auth.split(':', 1)
245 _parts = auth.split(':', 1)
246 if len(_parts) == 2:
246 if len(_parts) == 2:
247 username, password = _parts
247 username, password = _parts
248 if self.authfunc(
248 if self.authfunc(
249 username, password, environ, VCS_TYPE,
249 username, password, environ, VCS_TYPE,
250 registry=self.registry):
250 registry=self.registry):
251 return username
251 return username
252 if username and password:
252 if username and password:
253 # we mark that we actually executed authentication once, at
253 # we mark that we actually executed authentication once, at
254 # that point we can use the alternative auth code
254 # that point we can use the alternative auth code
255 self.initial_call = False
255 self.initial_call = False
256
256
257 return self.build_authentication()
257 return self.build_authentication()
258
258
259 __call__ = authenticate
259 __call__ = authenticate
260
260
261
261
262 def attach_context_attributes(context, request):
262 def attach_context_attributes(context, request):
263 """
263 """
264 Attach variables into template context called `c`, please note that
264 Attach variables into template context called `c`, please note that
265 request could be pylons or pyramid request in here.
265 request could be pylons or pyramid request in here.
266 """
266 """
267 rc_config = SettingsModel().get_all_settings(cache=True)
267 rc_config = SettingsModel().get_all_settings(cache=True)
268
268
269 context.rhodecode_version = rhodecode.__version__
269 context.rhodecode_version = rhodecode.__version__
270 context.rhodecode_edition = config.get('rhodecode.edition')
270 context.rhodecode_edition = config.get('rhodecode.edition')
271 # unique secret + version does not leak the version but keep consistency
271 # unique secret + version does not leak the version but keep consistency
272 context.rhodecode_version_hash = md5(
272 context.rhodecode_version_hash = md5(
273 config.get('beaker.session.secret', '') +
273 config.get('beaker.session.secret', '') +
274 rhodecode.__version__)[:8]
274 rhodecode.__version__)[:8]
275
275
276 # Default language set for the incoming request
276 # Default language set for the incoming request
277 context.language = translation.get_lang()[0]
277 context.language = translation.get_lang()[0]
278
278
279 # Visual options
279 # Visual options
280 context.visual = AttributeDict({})
280 context.visual = AttributeDict({})
281
281
282 # DB stored Visual Items
282 # DB stored Visual Items
283 context.visual.show_public_icon = str2bool(
283 context.visual.show_public_icon = str2bool(
284 rc_config.get('rhodecode_show_public_icon'))
284 rc_config.get('rhodecode_show_public_icon'))
285 context.visual.show_private_icon = str2bool(
285 context.visual.show_private_icon = str2bool(
286 rc_config.get('rhodecode_show_private_icon'))
286 rc_config.get('rhodecode_show_private_icon'))
287 context.visual.stylify_metatags = str2bool(
287 context.visual.stylify_metatags = str2bool(
288 rc_config.get('rhodecode_stylify_metatags'))
288 rc_config.get('rhodecode_stylify_metatags'))
289 context.visual.dashboard_items = safe_int(
289 context.visual.dashboard_items = safe_int(
290 rc_config.get('rhodecode_dashboard_items', 100))
290 rc_config.get('rhodecode_dashboard_items', 100))
291 context.visual.admin_grid_items = safe_int(
291 context.visual.admin_grid_items = safe_int(
292 rc_config.get('rhodecode_admin_grid_items', 100))
292 rc_config.get('rhodecode_admin_grid_items', 100))
293 context.visual.repository_fields = str2bool(
293 context.visual.repository_fields = str2bool(
294 rc_config.get('rhodecode_repository_fields'))
294 rc_config.get('rhodecode_repository_fields'))
295 context.visual.show_version = str2bool(
295 context.visual.show_version = str2bool(
296 rc_config.get('rhodecode_show_version'))
296 rc_config.get('rhodecode_show_version'))
297 context.visual.use_gravatar = str2bool(
297 context.visual.use_gravatar = str2bool(
298 rc_config.get('rhodecode_use_gravatar'))
298 rc_config.get('rhodecode_use_gravatar'))
299 context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url')
299 context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url')
300 context.visual.default_renderer = rc_config.get(
300 context.visual.default_renderer = rc_config.get(
301 'rhodecode_markup_renderer', 'rst')
301 'rhodecode_markup_renderer', 'rst')
302 context.visual.rhodecode_support_url = \
302 context.visual.rhodecode_support_url = \
303 rc_config.get('rhodecode_support_url') or url('rhodecode_support')
303 rc_config.get('rhodecode_support_url') or url('rhodecode_support')
304
304
305 context.pre_code = rc_config.get('rhodecode_pre_code')
305 context.pre_code = rc_config.get('rhodecode_pre_code')
306 context.post_code = rc_config.get('rhodecode_post_code')
306 context.post_code = rc_config.get('rhodecode_post_code')
307 context.rhodecode_name = rc_config.get('rhodecode_title')
307 context.rhodecode_name = rc_config.get('rhodecode_title')
308 context.default_encodings = aslist(config.get('default_encoding'), sep=',')
308 context.default_encodings = aslist(config.get('default_encoding'), sep=',')
309 # if we have specified default_encoding in the request, it has more
309 # if we have specified default_encoding in the request, it has more
310 # priority
310 # priority
311 if request.GET.get('default_encoding'):
311 if request.GET.get('default_encoding'):
312 context.default_encodings.insert(0, request.GET.get('default_encoding'))
312 context.default_encodings.insert(0, request.GET.get('default_encoding'))
313 context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl')
313 context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl')
314
314
315 # INI stored
315 # INI stored
316 context.labs_active = str2bool(
316 context.labs_active = str2bool(
317 config.get('labs_settings_active', 'false'))
317 config.get('labs_settings_active', 'false'))
318 context.visual.allow_repo_location_change = str2bool(
318 context.visual.allow_repo_location_change = str2bool(
319 config.get('allow_repo_location_change', True))
319 config.get('allow_repo_location_change', True))
320 context.visual.allow_custom_hooks_settings = str2bool(
320 context.visual.allow_custom_hooks_settings = str2bool(
321 config.get('allow_custom_hooks_settings', True))
321 config.get('allow_custom_hooks_settings', True))
322 context.debug_style = str2bool(config.get('debug_style', False))
322 context.debug_style = str2bool(config.get('debug_style', False))
323
323
324 context.rhodecode_instanceid = config.get('instance_id')
324 context.rhodecode_instanceid = config.get('instance_id')
325
325
326 # AppEnlight
326 # AppEnlight
327 context.appenlight_enabled = str2bool(config.get('appenlight', 'false'))
327 context.appenlight_enabled = str2bool(config.get('appenlight', 'false'))
328 context.appenlight_api_public_key = config.get(
328 context.appenlight_api_public_key = config.get(
329 'appenlight.api_public_key', '')
329 'appenlight.api_public_key', '')
330 context.appenlight_server_url = config.get('appenlight.server_url', '')
330 context.appenlight_server_url = config.get('appenlight.server_url', '')
331
331
332 # JS template context
332 # JS template context
333 context.template_context = {
333 context.template_context = {
334 'repo_name': None,
334 'repo_name': None,
335 'repo_type': None,
335 'repo_type': None,
336 'repo_landing_commit': None,
336 'repo_landing_commit': None,
337 'rhodecode_user': {
337 'rhodecode_user': {
338 'username': None,
338 'username': None,
339 'email': None,
339 'email': None,
340 'notification_status': False
340 'notification_status': False
341 },
341 },
342 'visual': {
342 'visual': {
343 'default_renderer': None
343 'default_renderer': None
344 },
344 },
345 'commit_data': {
345 'commit_data': {
346 'commit_id': None
346 'commit_id': None
347 },
347 },
348 'pull_request_data': {'pull_request_id': None},
348 'pull_request_data': {'pull_request_id': None},
349 'timeago': {
349 'timeago': {
350 'refresh_time': 120 * 1000,
350 'refresh_time': 120 * 1000,
351 'cutoff_limit': 1000 * 60 * 60 * 24 * 7
351 'cutoff_limit': 1000 * 60 * 60 * 24 * 7
352 },
352 },
353 'pylons_dispatch': {
353 'pylons_dispatch': {
354 # 'controller': request.environ['pylons.routes_dict']['controller'],
354 # 'controller': request.environ['pylons.routes_dict']['controller'],
355 # 'action': request.environ['pylons.routes_dict']['action'],
355 # 'action': request.environ['pylons.routes_dict']['action'],
356 },
356 },
357 'pyramid_dispatch': {
357 'pyramid_dispatch': {
358
358
359 },
359 },
360 'extra': {'plugins': {}}
360 'extra': {'plugins': {}}
361 }
361 }
362 # END CONFIG VARS
362 # END CONFIG VARS
363
363
364 # TODO: This dosn't work when called from pylons compatibility tween.
364 # TODO: This dosn't work when called from pylons compatibility tween.
365 # Fix this and remove it from base controller.
365 # Fix this and remove it from base controller.
366 # context.repo_name = get_repo_slug(request) # can be empty
366 # context.repo_name = get_repo_slug(request) # can be empty
367
367
368 diffmode = 'sideside'
369 if request.GET.get('diffmode'):
370 if request.GET['diffmode'] == 'unified':
371 diffmode = 'unified'
372 elif request.session.get('diffmode'):
373 diffmode = request.session['diffmode']
374
375 context.diffmode = diffmode
376
377 if request.session.get('diffmode') != diffmode:
378 request.session['diffmode'] = diffmode
379
368 context.csrf_token = auth.get_csrf_token()
380 context.csrf_token = auth.get_csrf_token()
369 context.backends = rhodecode.BACKENDS.keys()
381 context.backends = rhodecode.BACKENDS.keys()
370 context.backends.sort()
382 context.backends.sort()
371 context.unread_notifications = NotificationModel().get_unread_cnt_for_user(
383 context.unread_notifications = NotificationModel().get_unread_cnt_for_user(
372 context.rhodecode_user.user_id)
384 context.rhodecode_user.user_id)
373
385
374 context.pyramid_request = pyramid.threadlocal.get_current_request()
386 context.pyramid_request = pyramid.threadlocal.get_current_request()
375
387
376
388
377 def get_auth_user(environ):
389 def get_auth_user(environ):
378 ip_addr = get_ip_addr(environ)
390 ip_addr = get_ip_addr(environ)
379 # make sure that we update permissions each time we call controller
391 # make sure that we update permissions each time we call controller
380 _auth_token = (request.GET.get('auth_token', '') or
392 _auth_token = (request.GET.get('auth_token', '') or
381 request.GET.get('api_key', ''))
393 request.GET.get('api_key', ''))
382
394
383 if _auth_token:
395 if _auth_token:
384 # when using API_KEY we are sure user exists.
396 # when using API_KEY we are sure user exists.
385 auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr)
397 auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr)
386 authenticated = False
398 authenticated = False
387 else:
399 else:
388 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
400 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
389 try:
401 try:
390 auth_user = AuthUser(user_id=cookie_store.get('user_id', None),
402 auth_user = AuthUser(user_id=cookie_store.get('user_id', None),
391 ip_addr=ip_addr)
403 ip_addr=ip_addr)
392 except UserCreationError as e:
404 except UserCreationError as e:
393 h.flash(e, 'error')
405 h.flash(e, 'error')
394 # container auth or other auth functions that create users
406 # container auth or other auth functions that create users
395 # on the fly can throw this exception signaling that there's
407 # on the fly can throw this exception signaling that there's
396 # issue with user creation, explanation should be provided
408 # issue with user creation, explanation should be provided
397 # in Exception itself. We then create a simple blank
409 # in Exception itself. We then create a simple blank
398 # AuthUser
410 # AuthUser
399 auth_user = AuthUser(ip_addr=ip_addr)
411 auth_user = AuthUser(ip_addr=ip_addr)
400
412
401 if password_changed(auth_user, session):
413 if password_changed(auth_user, session):
402 session.invalidate()
414 session.invalidate()
403 cookie_store = CookieStoreWrapper(
415 cookie_store = CookieStoreWrapper(
404 session.get('rhodecode_user'))
416 session.get('rhodecode_user'))
405 auth_user = AuthUser(ip_addr=ip_addr)
417 auth_user = AuthUser(ip_addr=ip_addr)
406
418
407 authenticated = cookie_store.get('is_authenticated')
419 authenticated = cookie_store.get('is_authenticated')
408
420
409 if not auth_user.is_authenticated and auth_user.is_user_object:
421 if not auth_user.is_authenticated and auth_user.is_user_object:
410 # user is not authenticated and not empty
422 # user is not authenticated and not empty
411 auth_user.set_authenticated(authenticated)
423 auth_user.set_authenticated(authenticated)
412
424
413 return auth_user
425 return auth_user
414
426
415
427
416 class BaseController(WSGIController):
428 class BaseController(WSGIController):
417
429
418 def __before__(self):
430 def __before__(self):
419 """
431 """
420 __before__ is called before controller methods and after __call__
432 __before__ is called before controller methods and after __call__
421 """
433 """
422 # on each call propagate settings calls into global settings.
434 # on each call propagate settings calls into global settings.
423 set_rhodecode_config(config)
435 set_rhodecode_config(config)
424 attach_context_attributes(c, request)
436 attach_context_attributes(c, request)
425
437
426 # TODO: Remove this when fixed in attach_context_attributes()
438 # TODO: Remove this when fixed in attach_context_attributes()
427 c.repo_name = get_repo_slug(request) # can be empty
439 c.repo_name = get_repo_slug(request) # can be empty
428
440
429 self.cut_off_limit_diff = safe_int(config.get('cut_off_limit_diff'))
441 self.cut_off_limit_diff = safe_int(config.get('cut_off_limit_diff'))
430 self.cut_off_limit_file = safe_int(config.get('cut_off_limit_file'))
442 self.cut_off_limit_file = safe_int(config.get('cut_off_limit_file'))
431 self.sa = meta.Session
443 self.sa = meta.Session
432 self.scm_model = ScmModel(self.sa)
444 self.scm_model = ScmModel(self.sa)
433
445
434 default_lang = c.language
446 default_lang = c.language
435 user_lang = c.language
447 user_lang = c.language
436 try:
448 try:
437 user_obj = self._rhodecode_user.get_instance()
449 user_obj = self._rhodecode_user.get_instance()
438 if user_obj:
450 if user_obj:
439 user_lang = user_obj.user_data.get('language')
451 user_lang = user_obj.user_data.get('language')
440 except Exception:
452 except Exception:
441 log.exception('Failed to fetch user language for user %s',
453 log.exception('Failed to fetch user language for user %s',
442 self._rhodecode_user)
454 self._rhodecode_user)
443
455
444 if user_lang and user_lang != default_lang:
456 if user_lang and user_lang != default_lang:
445 log.debug('set language to %s for user %s', user_lang,
457 log.debug('set language to %s for user %s', user_lang,
446 self._rhodecode_user)
458 self._rhodecode_user)
447 translation.set_lang(user_lang)
459 translation.set_lang(user_lang)
448
460
449 def _dispatch_redirect(self, with_url, environ, start_response):
461 def _dispatch_redirect(self, with_url, environ, start_response):
450 resp = HTTPFound(with_url)
462 resp = HTTPFound(with_url)
451 environ['SCRIPT_NAME'] = '' # handle prefix middleware
463 environ['SCRIPT_NAME'] = '' # handle prefix middleware
452 environ['PATH_INFO'] = with_url
464 environ['PATH_INFO'] = with_url
453 return resp(environ, start_response)
465 return resp(environ, start_response)
454
466
455 def __call__(self, environ, start_response):
467 def __call__(self, environ, start_response):
456 """Invoke the Controller"""
468 """Invoke the Controller"""
457 # WSGIController.__call__ dispatches to the Controller method
469 # WSGIController.__call__ dispatches to the Controller method
458 # the request is routed to. This routing information is
470 # the request is routed to. This routing information is
459 # available in environ['pylons.routes_dict']
471 # available in environ['pylons.routes_dict']
460 from rhodecode.lib import helpers as h
472 from rhodecode.lib import helpers as h
461
473
462 # Provide the Pylons context to Pyramid's debugtoolbar if it asks
474 # Provide the Pylons context to Pyramid's debugtoolbar if it asks
463 if environ.get('debugtoolbar.wants_pylons_context', False):
475 if environ.get('debugtoolbar.wants_pylons_context', False):
464 environ['debugtoolbar.pylons_context'] = c._current_obj()
476 environ['debugtoolbar.pylons_context'] = c._current_obj()
465
477
466 _route_name = '.'.join([environ['pylons.routes_dict']['controller'],
478 _route_name = '.'.join([environ['pylons.routes_dict']['controller'],
467 environ['pylons.routes_dict']['action']])
479 environ['pylons.routes_dict']['action']])
468
480
469 self.rc_config = SettingsModel().get_all_settings(cache=True)
481 self.rc_config = SettingsModel().get_all_settings(cache=True)
470 self.ip_addr = get_ip_addr(environ)
482 self.ip_addr = get_ip_addr(environ)
471
483
472 # The rhodecode auth user is looked up and passed through the
484 # The rhodecode auth user is looked up and passed through the
473 # environ by the pylons compatibility tween in pyramid.
485 # environ by the pylons compatibility tween in pyramid.
474 # So we can just grab it from there.
486 # So we can just grab it from there.
475 auth_user = environ['rc_auth_user']
487 auth_user = environ['rc_auth_user']
476
488
477 # set globals for auth user
489 # set globals for auth user
478 request.user = auth_user
490 request.user = auth_user
479 c.rhodecode_user = self._rhodecode_user = auth_user
491 c.rhodecode_user = self._rhodecode_user = auth_user
480
492
481 log.info('IP: %s User: %s accessed %s [%s]' % (
493 log.info('IP: %s User: %s accessed %s [%s]' % (
482 self.ip_addr, auth_user, safe_unicode(get_access_path(environ)),
494 self.ip_addr, auth_user, safe_unicode(get_access_path(environ)),
483 _route_name)
495 _route_name)
484 )
496 )
485
497
486 # TODO: Maybe this should be move to pyramid to cover all views.
498 # TODO: Maybe this should be move to pyramid to cover all views.
487 # check user attributes for password change flag
499 # check user attributes for password change flag
488 user_obj = auth_user.get_instance()
500 user_obj = auth_user.get_instance()
489 if user_obj and user_obj.user_data.get('force_password_change'):
501 if user_obj and user_obj.user_data.get('force_password_change'):
490 h.flash('You are required to change your password', 'warning',
502 h.flash('You are required to change your password', 'warning',
491 ignore_duplicate=True)
503 ignore_duplicate=True)
492
504
493 skip_user_check_urls = [
505 skip_user_check_urls = [
494 'error.document', 'login.logout', 'login.index',
506 'error.document', 'login.logout', 'login.index',
495 'admin/my_account.my_account_password',
507 'admin/my_account.my_account_password',
496 'admin/my_account.my_account_password_update'
508 'admin/my_account.my_account_password_update'
497 ]
509 ]
498 if _route_name not in skip_user_check_urls:
510 if _route_name not in skip_user_check_urls:
499 return self._dispatch_redirect(
511 return self._dispatch_redirect(
500 url('my_account_password'), environ, start_response)
512 url('my_account_password'), environ, start_response)
501
513
502 return WSGIController.__call__(self, environ, start_response)
514 return WSGIController.__call__(self, environ, start_response)
503
515
504
516
505 class BaseRepoController(BaseController):
517 class BaseRepoController(BaseController):
506 """
518 """
507 Base class for controllers responsible for loading all needed data for
519 Base class for controllers responsible for loading all needed data for
508 repository loaded items are
520 repository loaded items are
509
521
510 c.rhodecode_repo: instance of scm repository
522 c.rhodecode_repo: instance of scm repository
511 c.rhodecode_db_repo: instance of db
523 c.rhodecode_db_repo: instance of db
512 c.repository_requirements_missing: shows that repository specific data
524 c.repository_requirements_missing: shows that repository specific data
513 could not be displayed due to the missing requirements
525 could not be displayed due to the missing requirements
514 c.repository_pull_requests: show number of open pull requests
526 c.repository_pull_requests: show number of open pull requests
515 """
527 """
516
528
517 def __before__(self):
529 def __before__(self):
518 super(BaseRepoController, self).__before__()
530 super(BaseRepoController, self).__before__()
519 if c.repo_name: # extracted from routes
531 if c.repo_name: # extracted from routes
520 db_repo = Repository.get_by_repo_name(c.repo_name)
532 db_repo = Repository.get_by_repo_name(c.repo_name)
521 if not db_repo:
533 if not db_repo:
522 return
534 return
523
535
524 log.debug(
536 log.debug(
525 'Found repository in database %s with state `%s`',
537 'Found repository in database %s with state `%s`',
526 safe_unicode(db_repo), safe_unicode(db_repo.repo_state))
538 safe_unicode(db_repo), safe_unicode(db_repo.repo_state))
527 route = getattr(request.environ.get('routes.route'), 'name', '')
539 route = getattr(request.environ.get('routes.route'), 'name', '')
528
540
529 # allow to delete repos that are somehow damages in filesystem
541 # allow to delete repos that are somehow damages in filesystem
530 if route in ['delete_repo']:
542 if route in ['delete_repo']:
531 return
543 return
532
544
533 if db_repo.repo_state in [Repository.STATE_PENDING]:
545 if db_repo.repo_state in [Repository.STATE_PENDING]:
534 if route in ['repo_creating_home']:
546 if route in ['repo_creating_home']:
535 return
547 return
536 check_url = url('repo_creating_home', repo_name=c.repo_name)
548 check_url = url('repo_creating_home', repo_name=c.repo_name)
537 return redirect(check_url)
549 return redirect(check_url)
538
550
539 self.rhodecode_db_repo = db_repo
551 self.rhodecode_db_repo = db_repo
540
552
541 missing_requirements = False
553 missing_requirements = False
542 try:
554 try:
543 self.rhodecode_repo = self.rhodecode_db_repo.scm_instance()
555 self.rhodecode_repo = self.rhodecode_db_repo.scm_instance()
544 except RepositoryRequirementError as e:
556 except RepositoryRequirementError as e:
545 missing_requirements = True
557 missing_requirements = True
546 self._handle_missing_requirements(e)
558 self._handle_missing_requirements(e)
547
559
548 if self.rhodecode_repo is None and not missing_requirements:
560 if self.rhodecode_repo is None and not missing_requirements:
549 log.error('%s this repository is present in database but it '
561 log.error('%s this repository is present in database but it '
550 'cannot be created as an scm instance', c.repo_name)
562 'cannot be created as an scm instance', c.repo_name)
551
563
552 h.flash(_(
564 h.flash(_(
553 "The repository at %(repo_name)s cannot be located.") %
565 "The repository at %(repo_name)s cannot be located.") %
554 {'repo_name': c.repo_name},
566 {'repo_name': c.repo_name},
555 category='error', ignore_duplicate=True)
567 category='error', ignore_duplicate=True)
556 redirect(url('home'))
568 redirect(url('home'))
557
569
558 # update last change according to VCS data
570 # update last change according to VCS data
559 if not missing_requirements:
571 if not missing_requirements:
560 commit = db_repo.get_commit(
572 commit = db_repo.get_commit(
561 pre_load=["author", "date", "message", "parents"])
573 pre_load=["author", "date", "message", "parents"])
562 db_repo.update_commit_cache(commit)
574 db_repo.update_commit_cache(commit)
563
575
564 # Prepare context
576 # Prepare context
565 c.rhodecode_db_repo = db_repo
577 c.rhodecode_db_repo = db_repo
566 c.rhodecode_repo = self.rhodecode_repo
578 c.rhodecode_repo = self.rhodecode_repo
567 c.repository_requirements_missing = missing_requirements
579 c.repository_requirements_missing = missing_requirements
568
580
569 self._update_global_counters(self.scm_model, db_repo)
581 self._update_global_counters(self.scm_model, db_repo)
570
582
571 def _update_global_counters(self, scm_model, db_repo):
583 def _update_global_counters(self, scm_model, db_repo):
572 """
584 """
573 Base variables that are exposed to every page of repository
585 Base variables that are exposed to every page of repository
574 """
586 """
575 c.repository_pull_requests = scm_model.get_pull_requests(db_repo)
587 c.repository_pull_requests = scm_model.get_pull_requests(db_repo)
576
588
577 def _handle_missing_requirements(self, error):
589 def _handle_missing_requirements(self, error):
578 self.rhodecode_repo = None
590 self.rhodecode_repo = None
579 log.error(
591 log.error(
580 'Requirements are missing for repository %s: %s',
592 'Requirements are missing for repository %s: %s',
581 c.repo_name, error.message)
593 c.repo_name, error.message)
582
594
583 summary_url = url('summary_home', repo_name=c.repo_name)
595 summary_url = url('summary_home', repo_name=c.repo_name)
584 statistics_url = url('edit_repo_statistics', repo_name=c.repo_name)
596 statistics_url = url('edit_repo_statistics', repo_name=c.repo_name)
585 settings_update_url = url('repo', repo_name=c.repo_name)
597 settings_update_url = url('repo', repo_name=c.repo_name)
586 path = request.path
598 path = request.path
587 should_redirect = (
599 should_redirect = (
588 path not in (summary_url, settings_update_url)
600 path not in (summary_url, settings_update_url)
589 and '/settings' not in path or path == statistics_url
601 and '/settings' not in path or path == statistics_url
590 )
602 )
591 if should_redirect:
603 if should_redirect:
592 redirect(summary_url)
604 redirect(summary_url)
@@ -1,1995 +1,2004 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Helper functions
22 Helper functions
23
23
24 Consists of functions to typically be used within templates, but also
24 Consists of functions to typically be used within templates, but also
25 available to Controllers. This module is available to both as 'h'.
25 available to Controllers. This module is available to both as 'h'.
26 """
26 """
27
27
28 import random
28 import random
29 import hashlib
29 import hashlib
30 import StringIO
30 import StringIO
31 import urllib
31 import urllib
32 import math
32 import math
33 import logging
33 import logging
34 import re
34 import re
35 import urlparse
35 import urlparse
36 import time
36 import time
37 import string
37 import string
38 import hashlib
38 import hashlib
39 import pygments
39 import pygments
40
40
41 from datetime import datetime
41 from datetime import datetime
42 from functools import partial
42 from functools import partial
43 from pygments.formatters.html import HtmlFormatter
43 from pygments.formatters.html import HtmlFormatter
44 from pygments import highlight as code_highlight
44 from pygments import highlight as code_highlight
45 from pygments.lexers import (
45 from pygments.lexers import (
46 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
46 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
47 from pylons import url as pylons_url
47 from pylons import url as pylons_url
48 from pylons.i18n.translation import _, ungettext
48 from pylons.i18n.translation import _, ungettext
49 from pyramid.threadlocal import get_current_request
49 from pyramid.threadlocal import get_current_request
50
50
51 from webhelpers.html import literal, HTML, escape
51 from webhelpers.html import literal, HTML, escape
52 from webhelpers.html.tools import *
52 from webhelpers.html.tools import *
53 from webhelpers.html.builder import make_tag
53 from webhelpers.html.builder import make_tag
54 from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
54 from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
55 end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \
55 end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \
56 link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
56 link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
57 submit, text, password, textarea, title, ul, xml_declaration, radio
57 submit, text, password, textarea, title, ul, xml_declaration, radio
58 from webhelpers.html.tools import auto_link, button_to, highlight, \
58 from webhelpers.html.tools import auto_link, button_to, highlight, \
59 js_obfuscate, mail_to, strip_links, strip_tags, tag_re
59 js_obfuscate, mail_to, strip_links, strip_tags, tag_re
60 from webhelpers.pylonslib import Flash as _Flash
60 from webhelpers.pylonslib import Flash as _Flash
61 from webhelpers.text import chop_at, collapse, convert_accented_entities, \
61 from webhelpers.text import chop_at, collapse, convert_accented_entities, \
62 convert_misc_entities, lchop, plural, rchop, remove_formatting, \
62 convert_misc_entities, lchop, plural, rchop, remove_formatting, \
63 replace_whitespace, urlify, truncate, wrap_paragraphs
63 replace_whitespace, urlify, truncate, wrap_paragraphs
64 from webhelpers.date import time_ago_in_words
64 from webhelpers.date import time_ago_in_words
65 from webhelpers.paginate import Page as _Page
65 from webhelpers.paginate import Page as _Page
66 from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
66 from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
67 convert_boolean_attrs, NotGiven, _make_safe_id_component
67 convert_boolean_attrs, NotGiven, _make_safe_id_component
68 from webhelpers2.number import format_byte_size
68 from webhelpers2.number import format_byte_size
69
69
70 from rhodecode.lib.action_parser import action_parser
70 from rhodecode.lib.action_parser import action_parser
71 from rhodecode.lib.ext_json import json
71 from rhodecode.lib.ext_json import json
72 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
72 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
73 from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \
73 from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \
74 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \
74 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \
75 AttributeDict, safe_int, md5, md5_safe
75 AttributeDict, safe_int, md5, md5_safe
76 from rhodecode.lib.markup_renderer import MarkupRenderer
76 from rhodecode.lib.markup_renderer import MarkupRenderer
77 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
77 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
78 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
78 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
79 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
79 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
80 from rhodecode.model.changeset_status import ChangesetStatusModel
80 from rhodecode.model.changeset_status import ChangesetStatusModel
81 from rhodecode.model.db import Permission, User, Repository
81 from rhodecode.model.db import Permission, User, Repository
82 from rhodecode.model.repo_group import RepoGroupModel
82 from rhodecode.model.repo_group import RepoGroupModel
83 from rhodecode.model.settings import IssueTrackerSettingsModel
83 from rhodecode.model.settings import IssueTrackerSettingsModel
84
84
85 log = logging.getLogger(__name__)
85 log = logging.getLogger(__name__)
86
86
87
87
88 DEFAULT_USER = User.DEFAULT_USER
88 DEFAULT_USER = User.DEFAULT_USER
89 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
89 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
90
90
91
91
92 def url(*args, **kw):
92 def url(*args, **kw):
93 return pylons_url(*args, **kw)
93 return pylons_url(*args, **kw)
94
94
95
95
96 def pylons_url_current(*args, **kw):
96 def pylons_url_current(*args, **kw):
97 """
97 """
98 This function overrides pylons.url.current() which returns the current
98 This function overrides pylons.url.current() which returns the current
99 path so that it will also work from a pyramid only context. This
99 path so that it will also work from a pyramid only context. This
100 should be removed once port to pyramid is complete.
100 should be removed once port to pyramid is complete.
101 """
101 """
102 if not args and not kw:
102 if not args and not kw:
103 request = get_current_request()
103 request = get_current_request()
104 return request.path
104 return request.path
105 return pylons_url.current(*args, **kw)
105 return pylons_url.current(*args, **kw)
106
106
107 url.current = pylons_url_current
107 url.current = pylons_url_current
108
108
109
109
110 def url_replace(**qargs):
111 """ Returns the current request url while replacing query string args """
112
113 request = get_current_request()
114 new_args = request.GET.mixed()
115 new_args.update(qargs)
116 return url('', **new_args)
117
118
110 def asset(path, ver=None):
119 def asset(path, ver=None):
111 """
120 """
112 Helper to generate a static asset file path for rhodecode assets
121 Helper to generate a static asset file path for rhodecode assets
113
122
114 eg. h.asset('images/image.png', ver='3923')
123 eg. h.asset('images/image.png', ver='3923')
115
124
116 :param path: path of asset
125 :param path: path of asset
117 :param ver: optional version query param to append as ?ver=
126 :param ver: optional version query param to append as ?ver=
118 """
127 """
119 request = get_current_request()
128 request = get_current_request()
120 query = {}
129 query = {}
121 if ver:
130 if ver:
122 query = {'ver': ver}
131 query = {'ver': ver}
123 return request.static_path(
132 return request.static_path(
124 'rhodecode:public/{}'.format(path), _query=query)
133 'rhodecode:public/{}'.format(path), _query=query)
125
134
126
135
127 default_html_escape_table = {
136 default_html_escape_table = {
128 ord('&'): u'&amp;',
137 ord('&'): u'&amp;',
129 ord('<'): u'&lt;',
138 ord('<'): u'&lt;',
130 ord('>'): u'&gt;',
139 ord('>'): u'&gt;',
131 ord('"'): u'&quot;',
140 ord('"'): u'&quot;',
132 ord("'"): u'&#39;',
141 ord("'"): u'&#39;',
133 }
142 }
134
143
135
144
136 def html_escape(text, html_escape_table=default_html_escape_table):
145 def html_escape(text, html_escape_table=default_html_escape_table):
137 """Produce entities within text."""
146 """Produce entities within text."""
138 return text.translate(html_escape_table)
147 return text.translate(html_escape_table)
139
148
140
149
141 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
150 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
142 """
151 """
143 Truncate string ``s`` at the first occurrence of ``sub``.
152 Truncate string ``s`` at the first occurrence of ``sub``.
144
153
145 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
154 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
146 """
155 """
147 suffix_if_chopped = suffix_if_chopped or ''
156 suffix_if_chopped = suffix_if_chopped or ''
148 pos = s.find(sub)
157 pos = s.find(sub)
149 if pos == -1:
158 if pos == -1:
150 return s
159 return s
151
160
152 if inclusive:
161 if inclusive:
153 pos += len(sub)
162 pos += len(sub)
154
163
155 chopped = s[:pos]
164 chopped = s[:pos]
156 left = s[pos:].strip()
165 left = s[pos:].strip()
157
166
158 if left and suffix_if_chopped:
167 if left and suffix_if_chopped:
159 chopped += suffix_if_chopped
168 chopped += suffix_if_chopped
160
169
161 return chopped
170 return chopped
162
171
163
172
164 def shorter(text, size=20):
173 def shorter(text, size=20):
165 postfix = '...'
174 postfix = '...'
166 if len(text) > size:
175 if len(text) > size:
167 return text[:size - len(postfix)] + postfix
176 return text[:size - len(postfix)] + postfix
168 return text
177 return text
169
178
170
179
171 def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
180 def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
172 """
181 """
173 Reset button
182 Reset button
174 """
183 """
175 _set_input_attrs(attrs, type, name, value)
184 _set_input_attrs(attrs, type, name, value)
176 _set_id_attr(attrs, id, name)
185 _set_id_attr(attrs, id, name)
177 convert_boolean_attrs(attrs, ["disabled"])
186 convert_boolean_attrs(attrs, ["disabled"])
178 return HTML.input(**attrs)
187 return HTML.input(**attrs)
179
188
180 reset = _reset
189 reset = _reset
181 safeid = _make_safe_id_component
190 safeid = _make_safe_id_component
182
191
183
192
184 def branding(name, length=40):
193 def branding(name, length=40):
185 return truncate(name, length, indicator="")
194 return truncate(name, length, indicator="")
186
195
187
196
188 def FID(raw_id, path):
197 def FID(raw_id, path):
189 """
198 """
190 Creates a unique ID for filenode based on it's hash of path and commit
199 Creates a unique ID for filenode based on it's hash of path and commit
191 it's safe to use in urls
200 it's safe to use in urls
192
201
193 :param raw_id:
202 :param raw_id:
194 :param path:
203 :param path:
195 """
204 """
196
205
197 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
206 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
198
207
199
208
200 class _GetError(object):
209 class _GetError(object):
201 """Get error from form_errors, and represent it as span wrapped error
210 """Get error from form_errors, and represent it as span wrapped error
202 message
211 message
203
212
204 :param field_name: field to fetch errors for
213 :param field_name: field to fetch errors for
205 :param form_errors: form errors dict
214 :param form_errors: form errors dict
206 """
215 """
207
216
208 def __call__(self, field_name, form_errors):
217 def __call__(self, field_name, form_errors):
209 tmpl = """<span class="error_msg">%s</span>"""
218 tmpl = """<span class="error_msg">%s</span>"""
210 if form_errors and field_name in form_errors:
219 if form_errors and field_name in form_errors:
211 return literal(tmpl % form_errors.get(field_name))
220 return literal(tmpl % form_errors.get(field_name))
212
221
213 get_error = _GetError()
222 get_error = _GetError()
214
223
215
224
216 class _ToolTip(object):
225 class _ToolTip(object):
217
226
218 def __call__(self, tooltip_title, trim_at=50):
227 def __call__(self, tooltip_title, trim_at=50):
219 """
228 """
220 Special function just to wrap our text into nice formatted
229 Special function just to wrap our text into nice formatted
221 autowrapped text
230 autowrapped text
222
231
223 :param tooltip_title:
232 :param tooltip_title:
224 """
233 """
225 tooltip_title = escape(tooltip_title)
234 tooltip_title = escape(tooltip_title)
226 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
235 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
227 return tooltip_title
236 return tooltip_title
228 tooltip = _ToolTip()
237 tooltip = _ToolTip()
229
238
230
239
231 def files_breadcrumbs(repo_name, commit_id, file_path):
240 def files_breadcrumbs(repo_name, commit_id, file_path):
232 if isinstance(file_path, str):
241 if isinstance(file_path, str):
233 file_path = safe_unicode(file_path)
242 file_path = safe_unicode(file_path)
234
243
235 # TODO: johbo: Is this always a url like path, or is this operating
244 # TODO: johbo: Is this always a url like path, or is this operating
236 # system dependent?
245 # system dependent?
237 path_segments = file_path.split('/')
246 path_segments = file_path.split('/')
238
247
239 repo_name_html = escape(repo_name)
248 repo_name_html = escape(repo_name)
240 if len(path_segments) == 1 and path_segments[0] == '':
249 if len(path_segments) == 1 and path_segments[0] == '':
241 url_segments = [repo_name_html]
250 url_segments = [repo_name_html]
242 else:
251 else:
243 url_segments = [
252 url_segments = [
244 link_to(
253 link_to(
245 repo_name_html,
254 repo_name_html,
246 url('files_home',
255 url('files_home',
247 repo_name=repo_name,
256 repo_name=repo_name,
248 revision=commit_id,
257 revision=commit_id,
249 f_path=''),
258 f_path=''),
250 class_='pjax-link')]
259 class_='pjax-link')]
251
260
252 last_cnt = len(path_segments) - 1
261 last_cnt = len(path_segments) - 1
253 for cnt, segment in enumerate(path_segments):
262 for cnt, segment in enumerate(path_segments):
254 if not segment:
263 if not segment:
255 continue
264 continue
256 segment_html = escape(segment)
265 segment_html = escape(segment)
257
266
258 if cnt != last_cnt:
267 if cnt != last_cnt:
259 url_segments.append(
268 url_segments.append(
260 link_to(
269 link_to(
261 segment_html,
270 segment_html,
262 url('files_home',
271 url('files_home',
263 repo_name=repo_name,
272 repo_name=repo_name,
264 revision=commit_id,
273 revision=commit_id,
265 f_path='/'.join(path_segments[:cnt + 1])),
274 f_path='/'.join(path_segments[:cnt + 1])),
266 class_='pjax-link'))
275 class_='pjax-link'))
267 else:
276 else:
268 url_segments.append(segment_html)
277 url_segments.append(segment_html)
269
278
270 return literal('/'.join(url_segments))
279 return literal('/'.join(url_segments))
271
280
272
281
273 class CodeHtmlFormatter(HtmlFormatter):
282 class CodeHtmlFormatter(HtmlFormatter):
274 """
283 """
275 My code Html Formatter for source codes
284 My code Html Formatter for source codes
276 """
285 """
277
286
278 def wrap(self, source, outfile):
287 def wrap(self, source, outfile):
279 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
288 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
280
289
281 def _wrap_code(self, source):
290 def _wrap_code(self, source):
282 for cnt, it in enumerate(source):
291 for cnt, it in enumerate(source):
283 i, t = it
292 i, t = it
284 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
293 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
285 yield i, t
294 yield i, t
286
295
287 def _wrap_tablelinenos(self, inner):
296 def _wrap_tablelinenos(self, inner):
288 dummyoutfile = StringIO.StringIO()
297 dummyoutfile = StringIO.StringIO()
289 lncount = 0
298 lncount = 0
290 for t, line in inner:
299 for t, line in inner:
291 if t:
300 if t:
292 lncount += 1
301 lncount += 1
293 dummyoutfile.write(line)
302 dummyoutfile.write(line)
294
303
295 fl = self.linenostart
304 fl = self.linenostart
296 mw = len(str(lncount + fl - 1))
305 mw = len(str(lncount + fl - 1))
297 sp = self.linenospecial
306 sp = self.linenospecial
298 st = self.linenostep
307 st = self.linenostep
299 la = self.lineanchors
308 la = self.lineanchors
300 aln = self.anchorlinenos
309 aln = self.anchorlinenos
301 nocls = self.noclasses
310 nocls = self.noclasses
302 if sp:
311 if sp:
303 lines = []
312 lines = []
304
313
305 for i in range(fl, fl + lncount):
314 for i in range(fl, fl + lncount):
306 if i % st == 0:
315 if i % st == 0:
307 if i % sp == 0:
316 if i % sp == 0:
308 if aln:
317 if aln:
309 lines.append('<a href="#%s%d" class="special">%*d</a>' %
318 lines.append('<a href="#%s%d" class="special">%*d</a>' %
310 (la, i, mw, i))
319 (la, i, mw, i))
311 else:
320 else:
312 lines.append('<span class="special">%*d</span>' % (mw, i))
321 lines.append('<span class="special">%*d</span>' % (mw, i))
313 else:
322 else:
314 if aln:
323 if aln:
315 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
324 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
316 else:
325 else:
317 lines.append('%*d' % (mw, i))
326 lines.append('%*d' % (mw, i))
318 else:
327 else:
319 lines.append('')
328 lines.append('')
320 ls = '\n'.join(lines)
329 ls = '\n'.join(lines)
321 else:
330 else:
322 lines = []
331 lines = []
323 for i in range(fl, fl + lncount):
332 for i in range(fl, fl + lncount):
324 if i % st == 0:
333 if i % st == 0:
325 if aln:
334 if aln:
326 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
335 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
327 else:
336 else:
328 lines.append('%*d' % (mw, i))
337 lines.append('%*d' % (mw, i))
329 else:
338 else:
330 lines.append('')
339 lines.append('')
331 ls = '\n'.join(lines)
340 ls = '\n'.join(lines)
332
341
333 # in case you wonder about the seemingly redundant <div> here: since the
342 # in case you wonder about the seemingly redundant <div> here: since the
334 # content in the other cell also is wrapped in a div, some browsers in
343 # content in the other cell also is wrapped in a div, some browsers in
335 # some configurations seem to mess up the formatting...
344 # some configurations seem to mess up the formatting...
336 if nocls:
345 if nocls:
337 yield 0, ('<table class="%stable">' % self.cssclass +
346 yield 0, ('<table class="%stable">' % self.cssclass +
338 '<tr><td><div class="linenodiv" '
347 '<tr><td><div class="linenodiv" '
339 'style="background-color: #f0f0f0; padding-right: 10px">'
348 'style="background-color: #f0f0f0; padding-right: 10px">'
340 '<pre style="line-height: 125%">' +
349 '<pre style="line-height: 125%">' +
341 ls + '</pre></div></td><td id="hlcode" class="code">')
350 ls + '</pre></div></td><td id="hlcode" class="code">')
342 else:
351 else:
343 yield 0, ('<table class="%stable">' % self.cssclass +
352 yield 0, ('<table class="%stable">' % self.cssclass +
344 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
353 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
345 ls + '</pre></div></td><td id="hlcode" class="code">')
354 ls + '</pre></div></td><td id="hlcode" class="code">')
346 yield 0, dummyoutfile.getvalue()
355 yield 0, dummyoutfile.getvalue()
347 yield 0, '</td></tr></table>'
356 yield 0, '</td></tr></table>'
348
357
349
358
350 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
359 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
351 def __init__(self, **kw):
360 def __init__(self, **kw):
352 # only show these line numbers if set
361 # only show these line numbers if set
353 self.only_lines = kw.pop('only_line_numbers', [])
362 self.only_lines = kw.pop('only_line_numbers', [])
354 self.query_terms = kw.pop('query_terms', [])
363 self.query_terms = kw.pop('query_terms', [])
355 self.max_lines = kw.pop('max_lines', 5)
364 self.max_lines = kw.pop('max_lines', 5)
356 self.line_context = kw.pop('line_context', 3)
365 self.line_context = kw.pop('line_context', 3)
357 self.url = kw.pop('url', None)
366 self.url = kw.pop('url', None)
358
367
359 super(CodeHtmlFormatter, self).__init__(**kw)
368 super(CodeHtmlFormatter, self).__init__(**kw)
360
369
361 def _wrap_code(self, source):
370 def _wrap_code(self, source):
362 for cnt, it in enumerate(source):
371 for cnt, it in enumerate(source):
363 i, t = it
372 i, t = it
364 t = '<pre>%s</pre>' % t
373 t = '<pre>%s</pre>' % t
365 yield i, t
374 yield i, t
366
375
367 def _wrap_tablelinenos(self, inner):
376 def _wrap_tablelinenos(self, inner):
368 yield 0, '<table class="code-highlight %stable">' % self.cssclass
377 yield 0, '<table class="code-highlight %stable">' % self.cssclass
369
378
370 last_shown_line_number = 0
379 last_shown_line_number = 0
371 current_line_number = 1
380 current_line_number = 1
372
381
373 for t, line in inner:
382 for t, line in inner:
374 if not t:
383 if not t:
375 yield t, line
384 yield t, line
376 continue
385 continue
377
386
378 if current_line_number in self.only_lines:
387 if current_line_number in self.only_lines:
379 if last_shown_line_number + 1 != current_line_number:
388 if last_shown_line_number + 1 != current_line_number:
380 yield 0, '<tr>'
389 yield 0, '<tr>'
381 yield 0, '<td class="line">...</td>'
390 yield 0, '<td class="line">...</td>'
382 yield 0, '<td id="hlcode" class="code"></td>'
391 yield 0, '<td id="hlcode" class="code"></td>'
383 yield 0, '</tr>'
392 yield 0, '</tr>'
384
393
385 yield 0, '<tr>'
394 yield 0, '<tr>'
386 if self.url:
395 if self.url:
387 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
396 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
388 self.url, current_line_number, current_line_number)
397 self.url, current_line_number, current_line_number)
389 else:
398 else:
390 yield 0, '<td class="line"><a href="">%i</a></td>' % (
399 yield 0, '<td class="line"><a href="">%i</a></td>' % (
391 current_line_number)
400 current_line_number)
392 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
401 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
393 yield 0, '</tr>'
402 yield 0, '</tr>'
394
403
395 last_shown_line_number = current_line_number
404 last_shown_line_number = current_line_number
396
405
397 current_line_number += 1
406 current_line_number += 1
398
407
399
408
400 yield 0, '</table>'
409 yield 0, '</table>'
401
410
402
411
403 def extract_phrases(text_query):
412 def extract_phrases(text_query):
404 """
413 """
405 Extracts phrases from search term string making sure phrases
414 Extracts phrases from search term string making sure phrases
406 contained in double quotes are kept together - and discarding empty values
415 contained in double quotes are kept together - and discarding empty values
407 or fully whitespace values eg.
416 or fully whitespace values eg.
408
417
409 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
418 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
410
419
411 """
420 """
412
421
413 in_phrase = False
422 in_phrase = False
414 buf = ''
423 buf = ''
415 phrases = []
424 phrases = []
416 for char in text_query:
425 for char in text_query:
417 if in_phrase:
426 if in_phrase:
418 if char == '"': # end phrase
427 if char == '"': # end phrase
419 phrases.append(buf)
428 phrases.append(buf)
420 buf = ''
429 buf = ''
421 in_phrase = False
430 in_phrase = False
422 continue
431 continue
423 else:
432 else:
424 buf += char
433 buf += char
425 continue
434 continue
426 else:
435 else:
427 if char == '"': # start phrase
436 if char == '"': # start phrase
428 in_phrase = True
437 in_phrase = True
429 phrases.append(buf)
438 phrases.append(buf)
430 buf = ''
439 buf = ''
431 continue
440 continue
432 elif char == ' ':
441 elif char == ' ':
433 phrases.append(buf)
442 phrases.append(buf)
434 buf = ''
443 buf = ''
435 continue
444 continue
436 else:
445 else:
437 buf += char
446 buf += char
438
447
439 phrases.append(buf)
448 phrases.append(buf)
440 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
449 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
441 return phrases
450 return phrases
442
451
443
452
444 def get_matching_offsets(text, phrases):
453 def get_matching_offsets(text, phrases):
445 """
454 """
446 Returns a list of string offsets in `text` that the list of `terms` match
455 Returns a list of string offsets in `text` that the list of `terms` match
447
456
448 >>> get_matching_offsets('some text here', ['some', 'here'])
457 >>> get_matching_offsets('some text here', ['some', 'here'])
449 [(0, 4), (10, 14)]
458 [(0, 4), (10, 14)]
450
459
451 """
460 """
452 offsets = []
461 offsets = []
453 for phrase in phrases:
462 for phrase in phrases:
454 for match in re.finditer(phrase, text):
463 for match in re.finditer(phrase, text):
455 offsets.append((match.start(), match.end()))
464 offsets.append((match.start(), match.end()))
456
465
457 return offsets
466 return offsets
458
467
459
468
460 def normalize_text_for_matching(x):
469 def normalize_text_for_matching(x):
461 """
470 """
462 Replaces all non alnum characters to spaces and lower cases the string,
471 Replaces all non alnum characters to spaces and lower cases the string,
463 useful for comparing two text strings without punctuation
472 useful for comparing two text strings without punctuation
464 """
473 """
465 return re.sub(r'[^\w]', ' ', x.lower())
474 return re.sub(r'[^\w]', ' ', x.lower())
466
475
467
476
468 def get_matching_line_offsets(lines, terms):
477 def get_matching_line_offsets(lines, terms):
469 """ Return a set of `lines` indices (starting from 1) matching a
478 """ Return a set of `lines` indices (starting from 1) matching a
470 text search query, along with `context` lines above/below matching lines
479 text search query, along with `context` lines above/below matching lines
471
480
472 :param lines: list of strings representing lines
481 :param lines: list of strings representing lines
473 :param terms: search term string to match in lines eg. 'some text'
482 :param terms: search term string to match in lines eg. 'some text'
474 :param context: number of lines above/below a matching line to add to result
483 :param context: number of lines above/below a matching line to add to result
475 :param max_lines: cut off for lines of interest
484 :param max_lines: cut off for lines of interest
476 eg.
485 eg.
477
486
478 text = '''
487 text = '''
479 words words words
488 words words words
480 words words words
489 words words words
481 some text some
490 some text some
482 words words words
491 words words words
483 words words words
492 words words words
484 text here what
493 text here what
485 '''
494 '''
486 get_matching_line_offsets(text, 'text', context=1)
495 get_matching_line_offsets(text, 'text', context=1)
487 {3: [(5, 9)], 6: [(0, 4)]]
496 {3: [(5, 9)], 6: [(0, 4)]]
488
497
489 """
498 """
490 matching_lines = {}
499 matching_lines = {}
491 phrases = [normalize_text_for_matching(phrase)
500 phrases = [normalize_text_for_matching(phrase)
492 for phrase in extract_phrases(terms)]
501 for phrase in extract_phrases(terms)]
493
502
494 for line_index, line in enumerate(lines, start=1):
503 for line_index, line in enumerate(lines, start=1):
495 match_offsets = get_matching_offsets(
504 match_offsets = get_matching_offsets(
496 normalize_text_for_matching(line), phrases)
505 normalize_text_for_matching(line), phrases)
497 if match_offsets:
506 if match_offsets:
498 matching_lines[line_index] = match_offsets
507 matching_lines[line_index] = match_offsets
499
508
500 return matching_lines
509 return matching_lines
501
510
502
511
503 def hsv_to_rgb(h, s, v):
512 def hsv_to_rgb(h, s, v):
504 """ Convert hsv color values to rgb """
513 """ Convert hsv color values to rgb """
505
514
506 if s == 0.0:
515 if s == 0.0:
507 return v, v, v
516 return v, v, v
508 i = int(h * 6.0) # XXX assume int() truncates!
517 i = int(h * 6.0) # XXX assume int() truncates!
509 f = (h * 6.0) - i
518 f = (h * 6.0) - i
510 p = v * (1.0 - s)
519 p = v * (1.0 - s)
511 q = v * (1.0 - s * f)
520 q = v * (1.0 - s * f)
512 t = v * (1.0 - s * (1.0 - f))
521 t = v * (1.0 - s * (1.0 - f))
513 i = i % 6
522 i = i % 6
514 if i == 0:
523 if i == 0:
515 return v, t, p
524 return v, t, p
516 if i == 1:
525 if i == 1:
517 return q, v, p
526 return q, v, p
518 if i == 2:
527 if i == 2:
519 return p, v, t
528 return p, v, t
520 if i == 3:
529 if i == 3:
521 return p, q, v
530 return p, q, v
522 if i == 4:
531 if i == 4:
523 return t, p, v
532 return t, p, v
524 if i == 5:
533 if i == 5:
525 return v, p, q
534 return v, p, q
526
535
527
536
528 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
537 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
529 """
538 """
530 Generator for getting n of evenly distributed colors using
539 Generator for getting n of evenly distributed colors using
531 hsv color and golden ratio. It always return same order of colors
540 hsv color and golden ratio. It always return same order of colors
532
541
533 :param n: number of colors to generate
542 :param n: number of colors to generate
534 :param saturation: saturation of returned colors
543 :param saturation: saturation of returned colors
535 :param lightness: lightness of returned colors
544 :param lightness: lightness of returned colors
536 :returns: RGB tuple
545 :returns: RGB tuple
537 """
546 """
538
547
539 golden_ratio = 0.618033988749895
548 golden_ratio = 0.618033988749895
540 h = 0.22717784590367374
549 h = 0.22717784590367374
541
550
542 for _ in xrange(n):
551 for _ in xrange(n):
543 h += golden_ratio
552 h += golden_ratio
544 h %= 1
553 h %= 1
545 HSV_tuple = [h, saturation, lightness]
554 HSV_tuple = [h, saturation, lightness]
546 RGB_tuple = hsv_to_rgb(*HSV_tuple)
555 RGB_tuple = hsv_to_rgb(*HSV_tuple)
547 yield map(lambda x: str(int(x * 256)), RGB_tuple)
556 yield map(lambda x: str(int(x * 256)), RGB_tuple)
548
557
549
558
550 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
559 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
551 """
560 """
552 Returns a function which when called with an argument returns a unique
561 Returns a function which when called with an argument returns a unique
553 color for that argument, eg.
562 color for that argument, eg.
554
563
555 :param n: number of colors to generate
564 :param n: number of colors to generate
556 :param saturation: saturation of returned colors
565 :param saturation: saturation of returned colors
557 :param lightness: lightness of returned colors
566 :param lightness: lightness of returned colors
558 :returns: css RGB string
567 :returns: css RGB string
559
568
560 >>> color_hash = color_hasher()
569 >>> color_hash = color_hasher()
561 >>> color_hash('hello')
570 >>> color_hash('hello')
562 'rgb(34, 12, 59)'
571 'rgb(34, 12, 59)'
563 >>> color_hash('hello')
572 >>> color_hash('hello')
564 'rgb(34, 12, 59)'
573 'rgb(34, 12, 59)'
565 >>> color_hash('other')
574 >>> color_hash('other')
566 'rgb(90, 224, 159)'
575 'rgb(90, 224, 159)'
567 """
576 """
568
577
569 color_dict = {}
578 color_dict = {}
570 cgenerator = unique_color_generator(
579 cgenerator = unique_color_generator(
571 saturation=saturation, lightness=lightness)
580 saturation=saturation, lightness=lightness)
572
581
573 def get_color_string(thing):
582 def get_color_string(thing):
574 if thing in color_dict:
583 if thing in color_dict:
575 col = color_dict[thing]
584 col = color_dict[thing]
576 else:
585 else:
577 col = color_dict[thing] = cgenerator.next()
586 col = color_dict[thing] = cgenerator.next()
578 return "rgb(%s)" % (', '.join(col))
587 return "rgb(%s)" % (', '.join(col))
579
588
580 return get_color_string
589 return get_color_string
581
590
582
591
583 def get_lexer_safe(mimetype=None, filepath=None):
592 def get_lexer_safe(mimetype=None, filepath=None):
584 """
593 """
585 Tries to return a relevant pygments lexer using mimetype/filepath name,
594 Tries to return a relevant pygments lexer using mimetype/filepath name,
586 defaulting to plain text if none could be found
595 defaulting to plain text if none could be found
587 """
596 """
588 lexer = None
597 lexer = None
589 try:
598 try:
590 if mimetype:
599 if mimetype:
591 lexer = get_lexer_for_mimetype(mimetype)
600 lexer = get_lexer_for_mimetype(mimetype)
592 if not lexer:
601 if not lexer:
593 lexer = get_lexer_for_filename(filepath)
602 lexer = get_lexer_for_filename(filepath)
594 except pygments.util.ClassNotFound:
603 except pygments.util.ClassNotFound:
595 pass
604 pass
596
605
597 if not lexer:
606 if not lexer:
598 lexer = get_lexer_by_name('text')
607 lexer = get_lexer_by_name('text')
599
608
600 return lexer
609 return lexer
601
610
602
611
603 def get_lexer_for_filenode(filenode):
612 def get_lexer_for_filenode(filenode):
604 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
613 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
605 return lexer
614 return lexer
606
615
607
616
608 def pygmentize(filenode, **kwargs):
617 def pygmentize(filenode, **kwargs):
609 """
618 """
610 pygmentize function using pygments
619 pygmentize function using pygments
611
620
612 :param filenode:
621 :param filenode:
613 """
622 """
614 lexer = get_lexer_for_filenode(filenode)
623 lexer = get_lexer_for_filenode(filenode)
615 return literal(code_highlight(filenode.content, lexer,
624 return literal(code_highlight(filenode.content, lexer,
616 CodeHtmlFormatter(**kwargs)))
625 CodeHtmlFormatter(**kwargs)))
617
626
618
627
619 def is_following_repo(repo_name, user_id):
628 def is_following_repo(repo_name, user_id):
620 from rhodecode.model.scm import ScmModel
629 from rhodecode.model.scm import ScmModel
621 return ScmModel().is_following_repo(repo_name, user_id)
630 return ScmModel().is_following_repo(repo_name, user_id)
622
631
623
632
624 class _Message(object):
633 class _Message(object):
625 """A message returned by ``Flash.pop_messages()``.
634 """A message returned by ``Flash.pop_messages()``.
626
635
627 Converting the message to a string returns the message text. Instances
636 Converting the message to a string returns the message text. Instances
628 also have the following attributes:
637 also have the following attributes:
629
638
630 * ``message``: the message text.
639 * ``message``: the message text.
631 * ``category``: the category specified when the message was created.
640 * ``category``: the category specified when the message was created.
632 """
641 """
633
642
634 def __init__(self, category, message):
643 def __init__(self, category, message):
635 self.category = category
644 self.category = category
636 self.message = message
645 self.message = message
637
646
638 def __str__(self):
647 def __str__(self):
639 return self.message
648 return self.message
640
649
641 __unicode__ = __str__
650 __unicode__ = __str__
642
651
643 def __html__(self):
652 def __html__(self):
644 return escape(safe_unicode(self.message))
653 return escape(safe_unicode(self.message))
645
654
646
655
647 class Flash(_Flash):
656 class Flash(_Flash):
648
657
649 def pop_messages(self):
658 def pop_messages(self):
650 """Return all accumulated messages and delete them from the session.
659 """Return all accumulated messages and delete them from the session.
651
660
652 The return value is a list of ``Message`` objects.
661 The return value is a list of ``Message`` objects.
653 """
662 """
654 from pylons import session
663 from pylons import session
655
664
656 messages = []
665 messages = []
657
666
658 # Pop the 'old' pylons flash messages. They are tuples of the form
667 # Pop the 'old' pylons flash messages. They are tuples of the form
659 # (category, message)
668 # (category, message)
660 for cat, msg in session.pop(self.session_key, []):
669 for cat, msg in session.pop(self.session_key, []):
661 messages.append(_Message(cat, msg))
670 messages.append(_Message(cat, msg))
662
671
663 # Pop the 'new' pyramid flash messages for each category as list
672 # Pop the 'new' pyramid flash messages for each category as list
664 # of strings.
673 # of strings.
665 for cat in self.categories:
674 for cat in self.categories:
666 for msg in session.pop_flash(queue=cat):
675 for msg in session.pop_flash(queue=cat):
667 messages.append(_Message(cat, msg))
676 messages.append(_Message(cat, msg))
668 # Map messages from the default queue to the 'notice' category.
677 # Map messages from the default queue to the 'notice' category.
669 for msg in session.pop_flash():
678 for msg in session.pop_flash():
670 messages.append(_Message('notice', msg))
679 messages.append(_Message('notice', msg))
671
680
672 session.save()
681 session.save()
673 return messages
682 return messages
674
683
675 def json_alerts(self):
684 def json_alerts(self):
676 payloads = []
685 payloads = []
677 messages = flash.pop_messages()
686 messages = flash.pop_messages()
678 if messages:
687 if messages:
679 for message in messages:
688 for message in messages:
680 subdata = {}
689 subdata = {}
681 if hasattr(message.message, 'rsplit'):
690 if hasattr(message.message, 'rsplit'):
682 flash_data = message.message.rsplit('|DELIM|', 1)
691 flash_data = message.message.rsplit('|DELIM|', 1)
683 org_message = flash_data[0]
692 org_message = flash_data[0]
684 if len(flash_data) > 1:
693 if len(flash_data) > 1:
685 subdata = json.loads(flash_data[1])
694 subdata = json.loads(flash_data[1])
686 else:
695 else:
687 org_message = message.message
696 org_message = message.message
688 payloads.append({
697 payloads.append({
689 'message': {
698 'message': {
690 'message': u'{}'.format(org_message),
699 'message': u'{}'.format(org_message),
691 'level': message.category,
700 'level': message.category,
692 'force': True,
701 'force': True,
693 'subdata': subdata
702 'subdata': subdata
694 }
703 }
695 })
704 })
696 return json.dumps(payloads)
705 return json.dumps(payloads)
697
706
698 flash = Flash()
707 flash = Flash()
699
708
700 #==============================================================================
709 #==============================================================================
701 # SCM FILTERS available via h.
710 # SCM FILTERS available via h.
702 #==============================================================================
711 #==============================================================================
703 from rhodecode.lib.vcs.utils import author_name, author_email
712 from rhodecode.lib.vcs.utils import author_name, author_email
704 from rhodecode.lib.utils2 import credentials_filter, age as _age
713 from rhodecode.lib.utils2 import credentials_filter, age as _age
705 from rhodecode.model.db import User, ChangesetStatus
714 from rhodecode.model.db import User, ChangesetStatus
706
715
707 age = _age
716 age = _age
708 capitalize = lambda x: x.capitalize()
717 capitalize = lambda x: x.capitalize()
709 email = author_email
718 email = author_email
710 short_id = lambda x: x[:12]
719 short_id = lambda x: x[:12]
711 hide_credentials = lambda x: ''.join(credentials_filter(x))
720 hide_credentials = lambda x: ''.join(credentials_filter(x))
712
721
713
722
714 def age_component(datetime_iso, value=None, time_is_local=False):
723 def age_component(datetime_iso, value=None, time_is_local=False):
715 title = value or format_date(datetime_iso)
724 title = value or format_date(datetime_iso)
716
725
717 # detect if we have a timezone info, otherwise, add it
726 # detect if we have a timezone info, otherwise, add it
718 if isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
727 if isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
719 tzinfo = '+00:00'
728 tzinfo = '+00:00'
720
729
721 if time_is_local:
730 if time_is_local:
722 tzinfo = time.strftime("+%H:%M",
731 tzinfo = time.strftime("+%H:%M",
723 time.gmtime(
732 time.gmtime(
724 (datetime.now() - datetime.utcnow()).seconds + 1
733 (datetime.now() - datetime.utcnow()).seconds + 1
725 )
734 )
726 )
735 )
727
736
728 return literal(
737 return literal(
729 '<time class="timeago tooltip" '
738 '<time class="timeago tooltip" '
730 'title="{1}" datetime="{0}{2}">{1}</time>'.format(
739 'title="{1}" datetime="{0}{2}">{1}</time>'.format(
731 datetime_iso, title, tzinfo))
740 datetime_iso, title, tzinfo))
732
741
733
742
734 def _shorten_commit_id(commit_id):
743 def _shorten_commit_id(commit_id):
735 from rhodecode import CONFIG
744 from rhodecode import CONFIG
736 def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12))
745 def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12))
737 return commit_id[:def_len]
746 return commit_id[:def_len]
738
747
739
748
740 def show_id(commit):
749 def show_id(commit):
741 """
750 """
742 Configurable function that shows ID
751 Configurable function that shows ID
743 by default it's r123:fffeeefffeee
752 by default it's r123:fffeeefffeee
744
753
745 :param commit: commit instance
754 :param commit: commit instance
746 """
755 """
747 from rhodecode import CONFIG
756 from rhodecode import CONFIG
748 show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True))
757 show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True))
749
758
750 raw_id = _shorten_commit_id(commit.raw_id)
759 raw_id = _shorten_commit_id(commit.raw_id)
751 if show_idx:
760 if show_idx:
752 return 'r%s:%s' % (commit.idx, raw_id)
761 return 'r%s:%s' % (commit.idx, raw_id)
753 else:
762 else:
754 return '%s' % (raw_id, )
763 return '%s' % (raw_id, )
755
764
756
765
757 def format_date(date):
766 def format_date(date):
758 """
767 """
759 use a standardized formatting for dates used in RhodeCode
768 use a standardized formatting for dates used in RhodeCode
760
769
761 :param date: date/datetime object
770 :param date: date/datetime object
762 :return: formatted date
771 :return: formatted date
763 """
772 """
764
773
765 if date:
774 if date:
766 _fmt = "%a, %d %b %Y %H:%M:%S"
775 _fmt = "%a, %d %b %Y %H:%M:%S"
767 return safe_unicode(date.strftime(_fmt))
776 return safe_unicode(date.strftime(_fmt))
768
777
769 return u""
778 return u""
770
779
771
780
772 class _RepoChecker(object):
781 class _RepoChecker(object):
773
782
774 def __init__(self, backend_alias):
783 def __init__(self, backend_alias):
775 self._backend_alias = backend_alias
784 self._backend_alias = backend_alias
776
785
777 def __call__(self, repository):
786 def __call__(self, repository):
778 if hasattr(repository, 'alias'):
787 if hasattr(repository, 'alias'):
779 _type = repository.alias
788 _type = repository.alias
780 elif hasattr(repository, 'repo_type'):
789 elif hasattr(repository, 'repo_type'):
781 _type = repository.repo_type
790 _type = repository.repo_type
782 else:
791 else:
783 _type = repository
792 _type = repository
784 return _type == self._backend_alias
793 return _type == self._backend_alias
785
794
786 is_git = _RepoChecker('git')
795 is_git = _RepoChecker('git')
787 is_hg = _RepoChecker('hg')
796 is_hg = _RepoChecker('hg')
788 is_svn = _RepoChecker('svn')
797 is_svn = _RepoChecker('svn')
789
798
790
799
791 def get_repo_type_by_name(repo_name):
800 def get_repo_type_by_name(repo_name):
792 repo = Repository.get_by_repo_name(repo_name)
801 repo = Repository.get_by_repo_name(repo_name)
793 return repo.repo_type
802 return repo.repo_type
794
803
795
804
796 def is_svn_without_proxy(repository):
805 def is_svn_without_proxy(repository):
797 if is_svn(repository):
806 if is_svn(repository):
798 from rhodecode.model.settings import VcsSettingsModel
807 from rhodecode.model.settings import VcsSettingsModel
799 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
808 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
800 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
809 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
801 return False
810 return False
802
811
803
812
804 def discover_user(author):
813 def discover_user(author):
805 """
814 """
806 Tries to discover RhodeCode User based on the autho string. Author string
815 Tries to discover RhodeCode User based on the autho string. Author string
807 is typically `FirstName LastName <email@address.com>`
816 is typically `FirstName LastName <email@address.com>`
808 """
817 """
809
818
810 # if author is already an instance use it for extraction
819 # if author is already an instance use it for extraction
811 if isinstance(author, User):
820 if isinstance(author, User):
812 return author
821 return author
813
822
814 # Valid email in the attribute passed, see if they're in the system
823 # Valid email in the attribute passed, see if they're in the system
815 _email = author_email(author)
824 _email = author_email(author)
816 if _email != '':
825 if _email != '':
817 user = User.get_by_email(_email, case_insensitive=True, cache=True)
826 user = User.get_by_email(_email, case_insensitive=True, cache=True)
818 if user is not None:
827 if user is not None:
819 return user
828 return user
820
829
821 # Maybe it's a username, we try to extract it and fetch by username ?
830 # Maybe it's a username, we try to extract it and fetch by username ?
822 _author = author_name(author)
831 _author = author_name(author)
823 user = User.get_by_username(_author, case_insensitive=True, cache=True)
832 user = User.get_by_username(_author, case_insensitive=True, cache=True)
824 if user is not None:
833 if user is not None:
825 return user
834 return user
826
835
827 return None
836 return None
828
837
829
838
830 def email_or_none(author):
839 def email_or_none(author):
831 # extract email from the commit string
840 # extract email from the commit string
832 _email = author_email(author)
841 _email = author_email(author)
833
842
834 # If we have an email, use it, otherwise
843 # If we have an email, use it, otherwise
835 # see if it contains a username we can get an email from
844 # see if it contains a username we can get an email from
836 if _email != '':
845 if _email != '':
837 return _email
846 return _email
838 else:
847 else:
839 user = User.get_by_username(
848 user = User.get_by_username(
840 author_name(author), case_insensitive=True, cache=True)
849 author_name(author), case_insensitive=True, cache=True)
841
850
842 if user is not None:
851 if user is not None:
843 return user.email
852 return user.email
844
853
845 # No valid email, not a valid user in the system, none!
854 # No valid email, not a valid user in the system, none!
846 return None
855 return None
847
856
848
857
849 def link_to_user(author, length=0, **kwargs):
858 def link_to_user(author, length=0, **kwargs):
850 user = discover_user(author)
859 user = discover_user(author)
851 # user can be None, but if we have it already it means we can re-use it
860 # user can be None, but if we have it already it means we can re-use it
852 # in the person() function, so we save 1 intensive-query
861 # in the person() function, so we save 1 intensive-query
853 if user:
862 if user:
854 author = user
863 author = user
855
864
856 display_person = person(author, 'username_or_name_or_email')
865 display_person = person(author, 'username_or_name_or_email')
857 if length:
866 if length:
858 display_person = shorter(display_person, length)
867 display_person = shorter(display_person, length)
859
868
860 if user:
869 if user:
861 return link_to(
870 return link_to(
862 escape(display_person),
871 escape(display_person),
863 url('user_profile', username=user.username),
872 url('user_profile', username=user.username),
864 **kwargs)
873 **kwargs)
865 else:
874 else:
866 return escape(display_person)
875 return escape(display_person)
867
876
868
877
869 def person(author, show_attr="username_and_name"):
878 def person(author, show_attr="username_and_name"):
870 user = discover_user(author)
879 user = discover_user(author)
871 if user:
880 if user:
872 return getattr(user, show_attr)
881 return getattr(user, show_attr)
873 else:
882 else:
874 _author = author_name(author)
883 _author = author_name(author)
875 _email = email(author)
884 _email = email(author)
876 return _author or _email
885 return _author or _email
877
886
878
887
879 def author_string(email):
888 def author_string(email):
880 if email:
889 if email:
881 user = User.get_by_email(email, case_insensitive=True, cache=True)
890 user = User.get_by_email(email, case_insensitive=True, cache=True)
882 if user:
891 if user:
883 if user.firstname or user.lastname:
892 if user.firstname or user.lastname:
884 return '%s %s &lt;%s&gt;' % (user.firstname, user.lastname, email)
893 return '%s %s &lt;%s&gt;' % (user.firstname, user.lastname, email)
885 else:
894 else:
886 return email
895 return email
887 else:
896 else:
888 return email
897 return email
889 else:
898 else:
890 return None
899 return None
891
900
892
901
893 def person_by_id(id_, show_attr="username_and_name"):
902 def person_by_id(id_, show_attr="username_and_name"):
894 # attr to return from fetched user
903 # attr to return from fetched user
895 person_getter = lambda usr: getattr(usr, show_attr)
904 person_getter = lambda usr: getattr(usr, show_attr)
896
905
897 #maybe it's an ID ?
906 #maybe it's an ID ?
898 if str(id_).isdigit() or isinstance(id_, int):
907 if str(id_).isdigit() or isinstance(id_, int):
899 id_ = int(id_)
908 id_ = int(id_)
900 user = User.get(id_)
909 user = User.get(id_)
901 if user is not None:
910 if user is not None:
902 return person_getter(user)
911 return person_getter(user)
903 return id_
912 return id_
904
913
905
914
906 def gravatar_with_user(author, show_disabled=False):
915 def gravatar_with_user(author, show_disabled=False):
907 from rhodecode.lib.utils import PartialRenderer
916 from rhodecode.lib.utils import PartialRenderer
908 _render = PartialRenderer('base/base.html')
917 _render = PartialRenderer('base/base.html')
909 return _render('gravatar_with_user', author, show_disabled=show_disabled)
918 return _render('gravatar_with_user', author, show_disabled=show_disabled)
910
919
911
920
912 def desc_stylize(value):
921 def desc_stylize(value):
913 """
922 """
914 converts tags from value into html equivalent
923 converts tags from value into html equivalent
915
924
916 :param value:
925 :param value:
917 """
926 """
918 if not value:
927 if not value:
919 return ''
928 return ''
920
929
921 value = re.sub(r'\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
930 value = re.sub(r'\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
922 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
931 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
923 value = re.sub(r'\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
932 value = re.sub(r'\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
924 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
933 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
925 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]',
934 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]',
926 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
935 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
927 value = re.sub(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+]*)\]',
936 value = re.sub(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+]*)\]',
928 '<div class="metatag" tag="lang">\\2</div>', value)
937 '<div class="metatag" tag="lang">\\2</div>', value)
929 value = re.sub(r'\[([a-z]+)\]',
938 value = re.sub(r'\[([a-z]+)\]',
930 '<div class="metatag" tag="\\1">\\1</div>', value)
939 '<div class="metatag" tag="\\1">\\1</div>', value)
931
940
932 return value
941 return value
933
942
934
943
935 def escaped_stylize(value):
944 def escaped_stylize(value):
936 """
945 """
937 converts tags from value into html equivalent, but escaping its value first
946 converts tags from value into html equivalent, but escaping its value first
938 """
947 """
939 if not value:
948 if not value:
940 return ''
949 return ''
941
950
942 # Using default webhelper escape method, but has to force it as a
951 # Using default webhelper escape method, but has to force it as a
943 # plain unicode instead of a markup tag to be used in regex expressions
952 # plain unicode instead of a markup tag to be used in regex expressions
944 value = unicode(escape(safe_unicode(value)))
953 value = unicode(escape(safe_unicode(value)))
945
954
946 value = re.sub(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
955 value = re.sub(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
947 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
956 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
948 value = re.sub(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
957 value = re.sub(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
949 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
958 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
950 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]',
959 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]',
951 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
960 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
952 value = re.sub(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+]*)\]',
961 value = re.sub(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+]*)\]',
953 '<div class="metatag" tag="lang">\\2</div>', value)
962 '<div class="metatag" tag="lang">\\2</div>', value)
954 value = re.sub(r'\[([a-z]+)\]',
963 value = re.sub(r'\[([a-z]+)\]',
955 '<div class="metatag" tag="\\1">\\1</div>', value)
964 '<div class="metatag" tag="\\1">\\1</div>', value)
956
965
957 return value
966 return value
958
967
959
968
960 def bool2icon(value):
969 def bool2icon(value):
961 """
970 """
962 Returns boolean value of a given value, represented as html element with
971 Returns boolean value of a given value, represented as html element with
963 classes that will represent icons
972 classes that will represent icons
964
973
965 :param value: given value to convert to html node
974 :param value: given value to convert to html node
966 """
975 """
967
976
968 if value: # does bool conversion
977 if value: # does bool conversion
969 return HTML.tag('i', class_="icon-true")
978 return HTML.tag('i', class_="icon-true")
970 else: # not true as bool
979 else: # not true as bool
971 return HTML.tag('i', class_="icon-false")
980 return HTML.tag('i', class_="icon-false")
972
981
973
982
974 #==============================================================================
983 #==============================================================================
975 # PERMS
984 # PERMS
976 #==============================================================================
985 #==============================================================================
977 from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
986 from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
978 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \
987 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \
979 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, \
988 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, \
980 csrf_token_key
989 csrf_token_key
981
990
982
991
983 #==============================================================================
992 #==============================================================================
984 # GRAVATAR URL
993 # GRAVATAR URL
985 #==============================================================================
994 #==============================================================================
986 class InitialsGravatar(object):
995 class InitialsGravatar(object):
987 def __init__(self, email_address, first_name, last_name, size=30,
996 def __init__(self, email_address, first_name, last_name, size=30,
988 background=None, text_color='#fff'):
997 background=None, text_color='#fff'):
989 self.size = size
998 self.size = size
990 self.first_name = first_name
999 self.first_name = first_name
991 self.last_name = last_name
1000 self.last_name = last_name
992 self.email_address = email_address
1001 self.email_address = email_address
993 self.background = background or self.str2color(email_address)
1002 self.background = background or self.str2color(email_address)
994 self.text_color = text_color
1003 self.text_color = text_color
995
1004
996 def get_color_bank(self):
1005 def get_color_bank(self):
997 """
1006 """
998 returns a predefined list of colors that gravatars can use.
1007 returns a predefined list of colors that gravatars can use.
999 Those are randomized distinct colors that guarantee readability and
1008 Those are randomized distinct colors that guarantee readability and
1000 uniqueness.
1009 uniqueness.
1001
1010
1002 generated with: http://phrogz.net/css/distinct-colors.html
1011 generated with: http://phrogz.net/css/distinct-colors.html
1003 """
1012 """
1004 return [
1013 return [
1005 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1014 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1006 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1015 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1007 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1016 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1008 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1017 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1009 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1018 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1010 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1019 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1011 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1020 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1012 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1021 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1013 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1022 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1014 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1023 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1015 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1024 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1016 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1025 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1017 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1026 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1018 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1027 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1019 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1028 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1020 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1029 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1021 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1030 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1022 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1031 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1023 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1032 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1024 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1033 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1025 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1034 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1026 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1035 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1027 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1036 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1028 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1037 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1029 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1038 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1030 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1039 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1031 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1040 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1032 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1041 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1033 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1042 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1034 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1043 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1035 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1044 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1036 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1045 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1037 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1046 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1038 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1047 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1039 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1048 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1040 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1049 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1041 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1050 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1042 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1051 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1043 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1052 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1044 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1053 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1045 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1054 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1046 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1055 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1047 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1056 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1048 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1057 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1049 '#4f8c46', '#368dd9', '#5c0073'
1058 '#4f8c46', '#368dd9', '#5c0073'
1050 ]
1059 ]
1051
1060
1052 def rgb_to_hex_color(self, rgb_tuple):
1061 def rgb_to_hex_color(self, rgb_tuple):
1053 """
1062 """
1054 Converts an rgb_tuple passed to an hex color.
1063 Converts an rgb_tuple passed to an hex color.
1055
1064
1056 :param rgb_tuple: tuple with 3 ints represents rgb color space
1065 :param rgb_tuple: tuple with 3 ints represents rgb color space
1057 """
1066 """
1058 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1067 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1059
1068
1060 def email_to_int_list(self, email_str):
1069 def email_to_int_list(self, email_str):
1061 """
1070 """
1062 Get every byte of the hex digest value of email and turn it to integer.
1071 Get every byte of the hex digest value of email and turn it to integer.
1063 It's going to be always between 0-255
1072 It's going to be always between 0-255
1064 """
1073 """
1065 digest = md5_safe(email_str.lower())
1074 digest = md5_safe(email_str.lower())
1066 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1075 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1067
1076
1068 def pick_color_bank_index(self, email_str, color_bank):
1077 def pick_color_bank_index(self, email_str, color_bank):
1069 return self.email_to_int_list(email_str)[0] % len(color_bank)
1078 return self.email_to_int_list(email_str)[0] % len(color_bank)
1070
1079
1071 def str2color(self, email_str):
1080 def str2color(self, email_str):
1072 """
1081 """
1073 Tries to map in a stable algorithm an email to color
1082 Tries to map in a stable algorithm an email to color
1074
1083
1075 :param email_str:
1084 :param email_str:
1076 """
1085 """
1077 color_bank = self.get_color_bank()
1086 color_bank = self.get_color_bank()
1078 # pick position (module it's length so we always find it in the
1087 # pick position (module it's length so we always find it in the
1079 # bank even if it's smaller than 256 values
1088 # bank even if it's smaller than 256 values
1080 pos = self.pick_color_bank_index(email_str, color_bank)
1089 pos = self.pick_color_bank_index(email_str, color_bank)
1081 return color_bank[pos]
1090 return color_bank[pos]
1082
1091
1083 def normalize_email(self, email_address):
1092 def normalize_email(self, email_address):
1084 import unicodedata
1093 import unicodedata
1085 # default host used to fill in the fake/missing email
1094 # default host used to fill in the fake/missing email
1086 default_host = u'localhost'
1095 default_host = u'localhost'
1087
1096
1088 if not email_address:
1097 if not email_address:
1089 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1098 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1090
1099
1091 email_address = safe_unicode(email_address)
1100 email_address = safe_unicode(email_address)
1092
1101
1093 if u'@' not in email_address:
1102 if u'@' not in email_address:
1094 email_address = u'%s@%s' % (email_address, default_host)
1103 email_address = u'%s@%s' % (email_address, default_host)
1095
1104
1096 if email_address.endswith(u'@'):
1105 if email_address.endswith(u'@'):
1097 email_address = u'%s%s' % (email_address, default_host)
1106 email_address = u'%s%s' % (email_address, default_host)
1098
1107
1099 email_address = unicodedata.normalize('NFKD', email_address)\
1108 email_address = unicodedata.normalize('NFKD', email_address)\
1100 .encode('ascii', 'ignore')
1109 .encode('ascii', 'ignore')
1101 return email_address
1110 return email_address
1102
1111
1103 def get_initials(self):
1112 def get_initials(self):
1104 """
1113 """
1105 Returns 2 letter initials calculated based on the input.
1114 Returns 2 letter initials calculated based on the input.
1106 The algorithm picks first given email address, and takes first letter
1115 The algorithm picks first given email address, and takes first letter
1107 of part before @, and then the first letter of server name. In case
1116 of part before @, and then the first letter of server name. In case
1108 the part before @ is in a format of `somestring.somestring2` it replaces
1117 the part before @ is in a format of `somestring.somestring2` it replaces
1109 the server letter with first letter of somestring2
1118 the server letter with first letter of somestring2
1110
1119
1111 In case function was initialized with both first and lastname, this
1120 In case function was initialized with both first and lastname, this
1112 overrides the extraction from email by first letter of the first and
1121 overrides the extraction from email by first letter of the first and
1113 last name. We add special logic to that functionality, In case Full name
1122 last name. We add special logic to that functionality, In case Full name
1114 is compound, like Guido Von Rossum, we use last part of the last name
1123 is compound, like Guido Von Rossum, we use last part of the last name
1115 (Von Rossum) picking `R`.
1124 (Von Rossum) picking `R`.
1116
1125
1117 Function also normalizes the non-ascii characters to they ascii
1126 Function also normalizes the non-ascii characters to they ascii
1118 representation, eg Δ„ => A
1127 representation, eg Δ„ => A
1119 """
1128 """
1120 import unicodedata
1129 import unicodedata
1121 # replace non-ascii to ascii
1130 # replace non-ascii to ascii
1122 first_name = unicodedata.normalize(
1131 first_name = unicodedata.normalize(
1123 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1132 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1124 last_name = unicodedata.normalize(
1133 last_name = unicodedata.normalize(
1125 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1134 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1126
1135
1127 # do NFKD encoding, and also make sure email has proper format
1136 # do NFKD encoding, and also make sure email has proper format
1128 email_address = self.normalize_email(self.email_address)
1137 email_address = self.normalize_email(self.email_address)
1129
1138
1130 # first push the email initials
1139 # first push the email initials
1131 prefix, server = email_address.split('@', 1)
1140 prefix, server = email_address.split('@', 1)
1132
1141
1133 # check if prefix is maybe a 'firstname.lastname' syntax
1142 # check if prefix is maybe a 'firstname.lastname' syntax
1134 _dot_split = prefix.rsplit('.', 1)
1143 _dot_split = prefix.rsplit('.', 1)
1135 if len(_dot_split) == 2:
1144 if len(_dot_split) == 2:
1136 initials = [_dot_split[0][0], _dot_split[1][0]]
1145 initials = [_dot_split[0][0], _dot_split[1][0]]
1137 else:
1146 else:
1138 initials = [prefix[0], server[0]]
1147 initials = [prefix[0], server[0]]
1139
1148
1140 # then try to replace either firtname or lastname
1149 # then try to replace either firtname or lastname
1141 fn_letter = (first_name or " ")[0].strip()
1150 fn_letter = (first_name or " ")[0].strip()
1142 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1151 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1143
1152
1144 if fn_letter:
1153 if fn_letter:
1145 initials[0] = fn_letter
1154 initials[0] = fn_letter
1146
1155
1147 if ln_letter:
1156 if ln_letter:
1148 initials[1] = ln_letter
1157 initials[1] = ln_letter
1149
1158
1150 return ''.join(initials).upper()
1159 return ''.join(initials).upper()
1151
1160
1152 def get_img_data_by_type(self, font_family, img_type):
1161 def get_img_data_by_type(self, font_family, img_type):
1153 default_user = """
1162 default_user = """
1154 <svg xmlns="http://www.w3.org/2000/svg"
1163 <svg xmlns="http://www.w3.org/2000/svg"
1155 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1164 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1156 viewBox="-15 -10 439.165 429.164"
1165 viewBox="-15 -10 439.165 429.164"
1157
1166
1158 xml:space="preserve"
1167 xml:space="preserve"
1159 style="background:{background};" >
1168 style="background:{background};" >
1160
1169
1161 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1170 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1162 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1171 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1163 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1172 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1164 168.596,153.916,216.671,
1173 168.596,153.916,216.671,
1165 204.583,216.671z" fill="{text_color}"/>
1174 204.583,216.671z" fill="{text_color}"/>
1166 <path d="M407.164,374.717L360.88,
1175 <path d="M407.164,374.717L360.88,
1167 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1176 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1168 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1177 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1169 15.366-44.203,23.488-69.076,23.488c-24.877,
1178 15.366-44.203,23.488-69.076,23.488c-24.877,
1170 0-48.762-8.122-69.078-23.488
1179 0-48.762-8.122-69.078-23.488
1171 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1180 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1172 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1181 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1173 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1182 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1174 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1183 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1175 19.402-10.527 C409.699,390.129,
1184 19.402-10.527 C409.699,390.129,
1176 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1185 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1177 </svg>""".format(
1186 </svg>""".format(
1178 size=self.size,
1187 size=self.size,
1179 background='#979797', # @grey4
1188 background='#979797', # @grey4
1180 text_color=self.text_color,
1189 text_color=self.text_color,
1181 font_family=font_family)
1190 font_family=font_family)
1182
1191
1183 return {
1192 return {
1184 "default_user": default_user
1193 "default_user": default_user
1185 }[img_type]
1194 }[img_type]
1186
1195
1187 def get_img_data(self, svg_type=None):
1196 def get_img_data(self, svg_type=None):
1188 """
1197 """
1189 generates the svg metadata for image
1198 generates the svg metadata for image
1190 """
1199 """
1191
1200
1192 font_family = ','.join([
1201 font_family = ','.join([
1193 'proximanovaregular',
1202 'proximanovaregular',
1194 'Proxima Nova Regular',
1203 'Proxima Nova Regular',
1195 'Proxima Nova',
1204 'Proxima Nova',
1196 'Arial',
1205 'Arial',
1197 'Lucida Grande',
1206 'Lucida Grande',
1198 'sans-serif'
1207 'sans-serif'
1199 ])
1208 ])
1200 if svg_type:
1209 if svg_type:
1201 return self.get_img_data_by_type(font_family, svg_type)
1210 return self.get_img_data_by_type(font_family, svg_type)
1202
1211
1203 initials = self.get_initials()
1212 initials = self.get_initials()
1204 img_data = """
1213 img_data = """
1205 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1214 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1206 width="{size}" height="{size}"
1215 width="{size}" height="{size}"
1207 style="width: 100%; height: 100%; background-color: {background}"
1216 style="width: 100%; height: 100%; background-color: {background}"
1208 viewBox="0 0 {size} {size}">
1217 viewBox="0 0 {size} {size}">
1209 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1218 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1210 pointer-events="auto" fill="{text_color}"
1219 pointer-events="auto" fill="{text_color}"
1211 font-family="{font_family}"
1220 font-family="{font_family}"
1212 style="font-weight: 400; font-size: {f_size}px;">{text}
1221 style="font-weight: 400; font-size: {f_size}px;">{text}
1213 </text>
1222 </text>
1214 </svg>""".format(
1223 </svg>""".format(
1215 size=self.size,
1224 size=self.size,
1216 f_size=self.size/1.85, # scale the text inside the box nicely
1225 f_size=self.size/1.85, # scale the text inside the box nicely
1217 background=self.background,
1226 background=self.background,
1218 text_color=self.text_color,
1227 text_color=self.text_color,
1219 text=initials.upper(),
1228 text=initials.upper(),
1220 font_family=font_family)
1229 font_family=font_family)
1221
1230
1222 return img_data
1231 return img_data
1223
1232
1224 def generate_svg(self, svg_type=None):
1233 def generate_svg(self, svg_type=None):
1225 img_data = self.get_img_data(svg_type)
1234 img_data = self.get_img_data(svg_type)
1226 return "data:image/svg+xml;base64,%s" % img_data.encode('base64')
1235 return "data:image/svg+xml;base64,%s" % img_data.encode('base64')
1227
1236
1228
1237
1229 def initials_gravatar(email_address, first_name, last_name, size=30):
1238 def initials_gravatar(email_address, first_name, last_name, size=30):
1230 svg_type = None
1239 svg_type = None
1231 if email_address == User.DEFAULT_USER_EMAIL:
1240 if email_address == User.DEFAULT_USER_EMAIL:
1232 svg_type = 'default_user'
1241 svg_type = 'default_user'
1233 klass = InitialsGravatar(email_address, first_name, last_name, size)
1242 klass = InitialsGravatar(email_address, first_name, last_name, size)
1234 return klass.generate_svg(svg_type=svg_type)
1243 return klass.generate_svg(svg_type=svg_type)
1235
1244
1236
1245
1237 def gravatar_url(email_address, size=30):
1246 def gravatar_url(email_address, size=30):
1238 # doh, we need to re-import those to mock it later
1247 # doh, we need to re-import those to mock it later
1239 from pylons import tmpl_context as c
1248 from pylons import tmpl_context as c
1240
1249
1241 _use_gravatar = c.visual.use_gravatar
1250 _use_gravatar = c.visual.use_gravatar
1242 _gravatar_url = c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL
1251 _gravatar_url = c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL
1243
1252
1244 email_address = email_address or User.DEFAULT_USER_EMAIL
1253 email_address = email_address or User.DEFAULT_USER_EMAIL
1245 if isinstance(email_address, unicode):
1254 if isinstance(email_address, unicode):
1246 # hashlib crashes on unicode items
1255 # hashlib crashes on unicode items
1247 email_address = safe_str(email_address)
1256 email_address = safe_str(email_address)
1248
1257
1249 # empty email or default user
1258 # empty email or default user
1250 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1259 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1251 return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size)
1260 return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size)
1252
1261
1253 if _use_gravatar:
1262 if _use_gravatar:
1254 # TODO: Disuse pyramid thread locals. Think about another solution to
1263 # TODO: Disuse pyramid thread locals. Think about another solution to
1255 # get the host and schema here.
1264 # get the host and schema here.
1256 request = get_current_request()
1265 request = get_current_request()
1257 tmpl = safe_str(_gravatar_url)
1266 tmpl = safe_str(_gravatar_url)
1258 tmpl = tmpl.replace('{email}', email_address)\
1267 tmpl = tmpl.replace('{email}', email_address)\
1259 .replace('{md5email}', md5_safe(email_address.lower())) \
1268 .replace('{md5email}', md5_safe(email_address.lower())) \
1260 .replace('{netloc}', request.host)\
1269 .replace('{netloc}', request.host)\
1261 .replace('{scheme}', request.scheme)\
1270 .replace('{scheme}', request.scheme)\
1262 .replace('{size}', safe_str(size))
1271 .replace('{size}', safe_str(size))
1263 return tmpl
1272 return tmpl
1264 else:
1273 else:
1265 return initials_gravatar(email_address, '', '', size=size)
1274 return initials_gravatar(email_address, '', '', size=size)
1266
1275
1267
1276
1268 class Page(_Page):
1277 class Page(_Page):
1269 """
1278 """
1270 Custom pager to match rendering style with paginator
1279 Custom pager to match rendering style with paginator
1271 """
1280 """
1272
1281
1273 def _get_pos(self, cur_page, max_page, items):
1282 def _get_pos(self, cur_page, max_page, items):
1274 edge = (items / 2) + 1
1283 edge = (items / 2) + 1
1275 if (cur_page <= edge):
1284 if (cur_page <= edge):
1276 radius = max(items / 2, items - cur_page)
1285 radius = max(items / 2, items - cur_page)
1277 elif (max_page - cur_page) < edge:
1286 elif (max_page - cur_page) < edge:
1278 radius = (items - 1) - (max_page - cur_page)
1287 radius = (items - 1) - (max_page - cur_page)
1279 else:
1288 else:
1280 radius = items / 2
1289 radius = items / 2
1281
1290
1282 left = max(1, (cur_page - (radius)))
1291 left = max(1, (cur_page - (radius)))
1283 right = min(max_page, cur_page + (radius))
1292 right = min(max_page, cur_page + (radius))
1284 return left, cur_page, right
1293 return left, cur_page, right
1285
1294
1286 def _range(self, regexp_match):
1295 def _range(self, regexp_match):
1287 """
1296 """
1288 Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
1297 Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
1289
1298
1290 Arguments:
1299 Arguments:
1291
1300
1292 regexp_match
1301 regexp_match
1293 A "re" (regular expressions) match object containing the
1302 A "re" (regular expressions) match object containing the
1294 radius of linked pages around the current page in
1303 radius of linked pages around the current page in
1295 regexp_match.group(1) as a string
1304 regexp_match.group(1) as a string
1296
1305
1297 This function is supposed to be called as a callable in
1306 This function is supposed to be called as a callable in
1298 re.sub.
1307 re.sub.
1299
1308
1300 """
1309 """
1301 radius = int(regexp_match.group(1))
1310 radius = int(regexp_match.group(1))
1302
1311
1303 # Compute the first and last page number within the radius
1312 # Compute the first and last page number within the radius
1304 # e.g. '1 .. 5 6 [7] 8 9 .. 12'
1313 # e.g. '1 .. 5 6 [7] 8 9 .. 12'
1305 # -> leftmost_page = 5
1314 # -> leftmost_page = 5
1306 # -> rightmost_page = 9
1315 # -> rightmost_page = 9
1307 leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
1316 leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
1308 self.last_page,
1317 self.last_page,
1309 (radius * 2) + 1)
1318 (radius * 2) + 1)
1310 nav_items = []
1319 nav_items = []
1311
1320
1312 # Create a link to the first page (unless we are on the first page
1321 # Create a link to the first page (unless we are on the first page
1313 # or there would be no need to insert '..' spacers)
1322 # or there would be no need to insert '..' spacers)
1314 if self.page != self.first_page and self.first_page < leftmost_page:
1323 if self.page != self.first_page and self.first_page < leftmost_page:
1315 nav_items.append(self._pagerlink(self.first_page, self.first_page))
1324 nav_items.append(self._pagerlink(self.first_page, self.first_page))
1316
1325
1317 # Insert dots if there are pages between the first page
1326 # Insert dots if there are pages between the first page
1318 # and the currently displayed page range
1327 # and the currently displayed page range
1319 if leftmost_page - self.first_page > 1:
1328 if leftmost_page - self.first_page > 1:
1320 # Wrap in a SPAN tag if nolink_attr is set
1329 # Wrap in a SPAN tag if nolink_attr is set
1321 text = '..'
1330 text = '..'
1322 if self.dotdot_attr:
1331 if self.dotdot_attr:
1323 text = HTML.span(c=text, **self.dotdot_attr)
1332 text = HTML.span(c=text, **self.dotdot_attr)
1324 nav_items.append(text)
1333 nav_items.append(text)
1325
1334
1326 for thispage in xrange(leftmost_page, rightmost_page + 1):
1335 for thispage in xrange(leftmost_page, rightmost_page + 1):
1327 # Hilight the current page number and do not use a link
1336 # Hilight the current page number and do not use a link
1328 if thispage == self.page:
1337 if thispage == self.page:
1329 text = '%s' % (thispage,)
1338 text = '%s' % (thispage,)
1330 # Wrap in a SPAN tag if nolink_attr is set
1339 # Wrap in a SPAN tag if nolink_attr is set
1331 if self.curpage_attr:
1340 if self.curpage_attr:
1332 text = HTML.span(c=text, **self.curpage_attr)
1341 text = HTML.span(c=text, **self.curpage_attr)
1333 nav_items.append(text)
1342 nav_items.append(text)
1334 # Otherwise create just a link to that page
1343 # Otherwise create just a link to that page
1335 else:
1344 else:
1336 text = '%s' % (thispage,)
1345 text = '%s' % (thispage,)
1337 nav_items.append(self._pagerlink(thispage, text))
1346 nav_items.append(self._pagerlink(thispage, text))
1338
1347
1339 # Insert dots if there are pages between the displayed
1348 # Insert dots if there are pages between the displayed
1340 # page numbers and the end of the page range
1349 # page numbers and the end of the page range
1341 if self.last_page - rightmost_page > 1:
1350 if self.last_page - rightmost_page > 1:
1342 text = '..'
1351 text = '..'
1343 # Wrap in a SPAN tag if nolink_attr is set
1352 # Wrap in a SPAN tag if nolink_attr is set
1344 if self.dotdot_attr:
1353 if self.dotdot_attr:
1345 text = HTML.span(c=text, **self.dotdot_attr)
1354 text = HTML.span(c=text, **self.dotdot_attr)
1346 nav_items.append(text)
1355 nav_items.append(text)
1347
1356
1348 # Create a link to the very last page (unless we are on the last
1357 # Create a link to the very last page (unless we are on the last
1349 # page or there would be no need to insert '..' spacers)
1358 # page or there would be no need to insert '..' spacers)
1350 if self.page != self.last_page and rightmost_page < self.last_page:
1359 if self.page != self.last_page and rightmost_page < self.last_page:
1351 nav_items.append(self._pagerlink(self.last_page, self.last_page))
1360 nav_items.append(self._pagerlink(self.last_page, self.last_page))
1352
1361
1353 ## prerender links
1362 ## prerender links
1354 #_page_link = url.current()
1363 #_page_link = url.current()
1355 #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1364 #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1356 #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1365 #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1357 return self.separator.join(nav_items)
1366 return self.separator.join(nav_items)
1358
1367
1359 def pager(self, format='~2~', page_param='page', partial_param='partial',
1368 def pager(self, format='~2~', page_param='page', partial_param='partial',
1360 show_if_single_page=False, separator=' ', onclick=None,
1369 show_if_single_page=False, separator=' ', onclick=None,
1361 symbol_first='<<', symbol_last='>>',
1370 symbol_first='<<', symbol_last='>>',
1362 symbol_previous='<', symbol_next='>',
1371 symbol_previous='<', symbol_next='>',
1363 link_attr={'class': 'pager_link', 'rel': 'prerender'},
1372 link_attr={'class': 'pager_link', 'rel': 'prerender'},
1364 curpage_attr={'class': 'pager_curpage'},
1373 curpage_attr={'class': 'pager_curpage'},
1365 dotdot_attr={'class': 'pager_dotdot'}, **kwargs):
1374 dotdot_attr={'class': 'pager_dotdot'}, **kwargs):
1366
1375
1367 self.curpage_attr = curpage_attr
1376 self.curpage_attr = curpage_attr
1368 self.separator = separator
1377 self.separator = separator
1369 self.pager_kwargs = kwargs
1378 self.pager_kwargs = kwargs
1370 self.page_param = page_param
1379 self.page_param = page_param
1371 self.partial_param = partial_param
1380 self.partial_param = partial_param
1372 self.onclick = onclick
1381 self.onclick = onclick
1373 self.link_attr = link_attr
1382 self.link_attr = link_attr
1374 self.dotdot_attr = dotdot_attr
1383 self.dotdot_attr = dotdot_attr
1375
1384
1376 # Don't show navigator if there is no more than one page
1385 # Don't show navigator if there is no more than one page
1377 if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
1386 if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
1378 return ''
1387 return ''
1379
1388
1380 from string import Template
1389 from string import Template
1381 # Replace ~...~ in token format by range of pages
1390 # Replace ~...~ in token format by range of pages
1382 result = re.sub(r'~(\d+)~', self._range, format)
1391 result = re.sub(r'~(\d+)~', self._range, format)
1383
1392
1384 # Interpolate '%' variables
1393 # Interpolate '%' variables
1385 result = Template(result).safe_substitute({
1394 result = Template(result).safe_substitute({
1386 'first_page': self.first_page,
1395 'first_page': self.first_page,
1387 'last_page': self.last_page,
1396 'last_page': self.last_page,
1388 'page': self.page,
1397 'page': self.page,
1389 'page_count': self.page_count,
1398 'page_count': self.page_count,
1390 'items_per_page': self.items_per_page,
1399 'items_per_page': self.items_per_page,
1391 'first_item': self.first_item,
1400 'first_item': self.first_item,
1392 'last_item': self.last_item,
1401 'last_item': self.last_item,
1393 'item_count': self.item_count,
1402 'item_count': self.item_count,
1394 'link_first': self.page > self.first_page and \
1403 'link_first': self.page > self.first_page and \
1395 self._pagerlink(self.first_page, symbol_first) or '',
1404 self._pagerlink(self.first_page, symbol_first) or '',
1396 'link_last': self.page < self.last_page and \
1405 'link_last': self.page < self.last_page and \
1397 self._pagerlink(self.last_page, symbol_last) or '',
1406 self._pagerlink(self.last_page, symbol_last) or '',
1398 'link_previous': self.previous_page and \
1407 'link_previous': self.previous_page and \
1399 self._pagerlink(self.previous_page, symbol_previous) \
1408 self._pagerlink(self.previous_page, symbol_previous) \
1400 or HTML.span(symbol_previous, class_="pg-previous disabled"),
1409 or HTML.span(symbol_previous, class_="pg-previous disabled"),
1401 'link_next': self.next_page and \
1410 'link_next': self.next_page and \
1402 self._pagerlink(self.next_page, symbol_next) \
1411 self._pagerlink(self.next_page, symbol_next) \
1403 or HTML.span(symbol_next, class_="pg-next disabled")
1412 or HTML.span(symbol_next, class_="pg-next disabled")
1404 })
1413 })
1405
1414
1406 return literal(result)
1415 return literal(result)
1407
1416
1408
1417
1409 #==============================================================================
1418 #==============================================================================
1410 # REPO PAGER, PAGER FOR REPOSITORY
1419 # REPO PAGER, PAGER FOR REPOSITORY
1411 #==============================================================================
1420 #==============================================================================
1412 class RepoPage(Page):
1421 class RepoPage(Page):
1413
1422
1414 def __init__(self, collection, page=1, items_per_page=20,
1423 def __init__(self, collection, page=1, items_per_page=20,
1415 item_count=None, url=None, **kwargs):
1424 item_count=None, url=None, **kwargs):
1416
1425
1417 """Create a "RepoPage" instance. special pager for paging
1426 """Create a "RepoPage" instance. special pager for paging
1418 repository
1427 repository
1419 """
1428 """
1420 self._url_generator = url
1429 self._url_generator = url
1421
1430
1422 # Safe the kwargs class-wide so they can be used in the pager() method
1431 # Safe the kwargs class-wide so they can be used in the pager() method
1423 self.kwargs = kwargs
1432 self.kwargs = kwargs
1424
1433
1425 # Save a reference to the collection
1434 # Save a reference to the collection
1426 self.original_collection = collection
1435 self.original_collection = collection
1427
1436
1428 self.collection = collection
1437 self.collection = collection
1429
1438
1430 # The self.page is the number of the current page.
1439 # The self.page is the number of the current page.
1431 # The first page has the number 1!
1440 # The first page has the number 1!
1432 try:
1441 try:
1433 self.page = int(page) # make it int() if we get it as a string
1442 self.page = int(page) # make it int() if we get it as a string
1434 except (ValueError, TypeError):
1443 except (ValueError, TypeError):
1435 self.page = 1
1444 self.page = 1
1436
1445
1437 self.items_per_page = items_per_page
1446 self.items_per_page = items_per_page
1438
1447
1439 # Unless the user tells us how many items the collections has
1448 # Unless the user tells us how many items the collections has
1440 # we calculate that ourselves.
1449 # we calculate that ourselves.
1441 if item_count is not None:
1450 if item_count is not None:
1442 self.item_count = item_count
1451 self.item_count = item_count
1443 else:
1452 else:
1444 self.item_count = len(self.collection)
1453 self.item_count = len(self.collection)
1445
1454
1446 # Compute the number of the first and last available page
1455 # Compute the number of the first and last available page
1447 if self.item_count > 0:
1456 if self.item_count > 0:
1448 self.first_page = 1
1457 self.first_page = 1
1449 self.page_count = int(math.ceil(float(self.item_count) /
1458 self.page_count = int(math.ceil(float(self.item_count) /
1450 self.items_per_page))
1459 self.items_per_page))
1451 self.last_page = self.first_page + self.page_count - 1
1460 self.last_page = self.first_page + self.page_count - 1
1452
1461
1453 # Make sure that the requested page number is the range of
1462 # Make sure that the requested page number is the range of
1454 # valid pages
1463 # valid pages
1455 if self.page > self.last_page:
1464 if self.page > self.last_page:
1456 self.page = self.last_page
1465 self.page = self.last_page
1457 elif self.page < self.first_page:
1466 elif self.page < self.first_page:
1458 self.page = self.first_page
1467 self.page = self.first_page
1459
1468
1460 # Note: the number of items on this page can be less than
1469 # Note: the number of items on this page can be less than
1461 # items_per_page if the last page is not full
1470 # items_per_page if the last page is not full
1462 self.first_item = max(0, (self.item_count) - (self.page *
1471 self.first_item = max(0, (self.item_count) - (self.page *
1463 items_per_page))
1472 items_per_page))
1464 self.last_item = ((self.item_count - 1) - items_per_page *
1473 self.last_item = ((self.item_count - 1) - items_per_page *
1465 (self.page - 1))
1474 (self.page - 1))
1466
1475
1467 self.items = list(self.collection[self.first_item:self.last_item + 1])
1476 self.items = list(self.collection[self.first_item:self.last_item + 1])
1468
1477
1469 # Links to previous and next page
1478 # Links to previous and next page
1470 if self.page > self.first_page:
1479 if self.page > self.first_page:
1471 self.previous_page = self.page - 1
1480 self.previous_page = self.page - 1
1472 else:
1481 else:
1473 self.previous_page = None
1482 self.previous_page = None
1474
1483
1475 if self.page < self.last_page:
1484 if self.page < self.last_page:
1476 self.next_page = self.page + 1
1485 self.next_page = self.page + 1
1477 else:
1486 else:
1478 self.next_page = None
1487 self.next_page = None
1479
1488
1480 # No items available
1489 # No items available
1481 else:
1490 else:
1482 self.first_page = None
1491 self.first_page = None
1483 self.page_count = 0
1492 self.page_count = 0
1484 self.last_page = None
1493 self.last_page = None
1485 self.first_item = None
1494 self.first_item = None
1486 self.last_item = None
1495 self.last_item = None
1487 self.previous_page = None
1496 self.previous_page = None
1488 self.next_page = None
1497 self.next_page = None
1489 self.items = []
1498 self.items = []
1490
1499
1491 # This is a subclass of the 'list' type. Initialise the list now.
1500 # This is a subclass of the 'list' type. Initialise the list now.
1492 list.__init__(self, reversed(self.items))
1501 list.__init__(self, reversed(self.items))
1493
1502
1494
1503
1495 def changed_tooltip(nodes):
1504 def changed_tooltip(nodes):
1496 """
1505 """
1497 Generates a html string for changed nodes in commit page.
1506 Generates a html string for changed nodes in commit page.
1498 It limits the output to 30 entries
1507 It limits the output to 30 entries
1499
1508
1500 :param nodes: LazyNodesGenerator
1509 :param nodes: LazyNodesGenerator
1501 """
1510 """
1502 if nodes:
1511 if nodes:
1503 pref = ': <br/> '
1512 pref = ': <br/> '
1504 suf = ''
1513 suf = ''
1505 if len(nodes) > 30:
1514 if len(nodes) > 30:
1506 suf = '<br/>' + _(' and %s more') % (len(nodes) - 30)
1515 suf = '<br/>' + _(' and %s more') % (len(nodes) - 30)
1507 return literal(pref + '<br/> '.join([safe_unicode(x.path)
1516 return literal(pref + '<br/> '.join([safe_unicode(x.path)
1508 for x in nodes[:30]]) + suf)
1517 for x in nodes[:30]]) + suf)
1509 else:
1518 else:
1510 return ': ' + _('No Files')
1519 return ': ' + _('No Files')
1511
1520
1512
1521
1513 def breadcrumb_repo_link(repo):
1522 def breadcrumb_repo_link(repo):
1514 """
1523 """
1515 Makes a breadcrumbs path link to repo
1524 Makes a breadcrumbs path link to repo
1516
1525
1517 ex::
1526 ex::
1518 group >> subgroup >> repo
1527 group >> subgroup >> repo
1519
1528
1520 :param repo: a Repository instance
1529 :param repo: a Repository instance
1521 """
1530 """
1522
1531
1523 path = [
1532 path = [
1524 link_to(group.name, url('repo_group_home', group_name=group.group_name))
1533 link_to(group.name, url('repo_group_home', group_name=group.group_name))
1525 for group in repo.groups_with_parents
1534 for group in repo.groups_with_parents
1526 ] + [
1535 ] + [
1527 link_to(repo.just_name, url('summary_home', repo_name=repo.repo_name))
1536 link_to(repo.just_name, url('summary_home', repo_name=repo.repo_name))
1528 ]
1537 ]
1529
1538
1530 return literal(' &raquo; '.join(path))
1539 return literal(' &raquo; '.join(path))
1531
1540
1532
1541
1533 def format_byte_size_binary(file_size):
1542 def format_byte_size_binary(file_size):
1534 """
1543 """
1535 Formats file/folder sizes to standard.
1544 Formats file/folder sizes to standard.
1536 """
1545 """
1537 formatted_size = format_byte_size(file_size, binary=True)
1546 formatted_size = format_byte_size(file_size, binary=True)
1538 return formatted_size
1547 return formatted_size
1539
1548
1540
1549
1541 def fancy_file_stats(stats):
1550 def fancy_file_stats(stats):
1542 """
1551 """
1543 Displays a fancy two colored bar for number of added/deleted
1552 Displays a fancy two colored bar for number of added/deleted
1544 lines of code on file
1553 lines of code on file
1545
1554
1546 :param stats: two element list of added/deleted lines of code
1555 :param stats: two element list of added/deleted lines of code
1547 """
1556 """
1548 from rhodecode.lib.diffs import NEW_FILENODE, DEL_FILENODE, \
1557 from rhodecode.lib.diffs import NEW_FILENODE, DEL_FILENODE, \
1549 MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE
1558 MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE
1550
1559
1551 def cgen(l_type, a_v, d_v):
1560 def cgen(l_type, a_v, d_v):
1552 mapping = {'tr': 'top-right-rounded-corner-mid',
1561 mapping = {'tr': 'top-right-rounded-corner-mid',
1553 'tl': 'top-left-rounded-corner-mid',
1562 'tl': 'top-left-rounded-corner-mid',
1554 'br': 'bottom-right-rounded-corner-mid',
1563 'br': 'bottom-right-rounded-corner-mid',
1555 'bl': 'bottom-left-rounded-corner-mid'}
1564 'bl': 'bottom-left-rounded-corner-mid'}
1556 map_getter = lambda x: mapping[x]
1565 map_getter = lambda x: mapping[x]
1557
1566
1558 if l_type == 'a' and d_v:
1567 if l_type == 'a' and d_v:
1559 #case when added and deleted are present
1568 #case when added and deleted are present
1560 return ' '.join(map(map_getter, ['tl', 'bl']))
1569 return ' '.join(map(map_getter, ['tl', 'bl']))
1561
1570
1562 if l_type == 'a' and not d_v:
1571 if l_type == 'a' and not d_v:
1563 return ' '.join(map(map_getter, ['tr', 'br', 'tl', 'bl']))
1572 return ' '.join(map(map_getter, ['tr', 'br', 'tl', 'bl']))
1564
1573
1565 if l_type == 'd' and a_v:
1574 if l_type == 'd' and a_v:
1566 return ' '.join(map(map_getter, ['tr', 'br']))
1575 return ' '.join(map(map_getter, ['tr', 'br']))
1567
1576
1568 if l_type == 'd' and not a_v:
1577 if l_type == 'd' and not a_v:
1569 return ' '.join(map(map_getter, ['tr', 'br', 'tl', 'bl']))
1578 return ' '.join(map(map_getter, ['tr', 'br', 'tl', 'bl']))
1570
1579
1571 a, d = stats['added'], stats['deleted']
1580 a, d = stats['added'], stats['deleted']
1572 width = 100
1581 width = 100
1573
1582
1574 if stats['binary']: # binary operations like chmod/rename etc
1583 if stats['binary']: # binary operations like chmod/rename etc
1575 lbl = []
1584 lbl = []
1576 bin_op = 0 # undefined
1585 bin_op = 0 # undefined
1577
1586
1578 # prefix with bin for binary files
1587 # prefix with bin for binary files
1579 if BIN_FILENODE in stats['ops']:
1588 if BIN_FILENODE in stats['ops']:
1580 lbl += ['bin']
1589 lbl += ['bin']
1581
1590
1582 if NEW_FILENODE in stats['ops']:
1591 if NEW_FILENODE in stats['ops']:
1583 lbl += [_('new file')]
1592 lbl += [_('new file')]
1584 bin_op = NEW_FILENODE
1593 bin_op = NEW_FILENODE
1585 elif MOD_FILENODE in stats['ops']:
1594 elif MOD_FILENODE in stats['ops']:
1586 lbl += [_('mod')]
1595 lbl += [_('mod')]
1587 bin_op = MOD_FILENODE
1596 bin_op = MOD_FILENODE
1588 elif DEL_FILENODE in stats['ops']:
1597 elif DEL_FILENODE in stats['ops']:
1589 lbl += [_('del')]
1598 lbl += [_('del')]
1590 bin_op = DEL_FILENODE
1599 bin_op = DEL_FILENODE
1591 elif RENAMED_FILENODE in stats['ops']:
1600 elif RENAMED_FILENODE in stats['ops']:
1592 lbl += [_('rename')]
1601 lbl += [_('rename')]
1593 bin_op = RENAMED_FILENODE
1602 bin_op = RENAMED_FILENODE
1594
1603
1595 # chmod can go with other operations, so we add a + to lbl if needed
1604 # chmod can go with other operations, so we add a + to lbl if needed
1596 if CHMOD_FILENODE in stats['ops']:
1605 if CHMOD_FILENODE in stats['ops']:
1597 lbl += [_('chmod')]
1606 lbl += [_('chmod')]
1598 if bin_op == 0:
1607 if bin_op == 0:
1599 bin_op = CHMOD_FILENODE
1608 bin_op = CHMOD_FILENODE
1600
1609
1601 lbl = '+'.join(lbl)
1610 lbl = '+'.join(lbl)
1602 b_a = '<div class="bin bin%s %s" style="width:100%%">%s</div>' \
1611 b_a = '<div class="bin bin%s %s" style="width:100%%">%s</div>' \
1603 % (bin_op, cgen('a', a_v='', d_v=0), lbl)
1612 % (bin_op, cgen('a', a_v='', d_v=0), lbl)
1604 b_d = '<div class="bin bin1" style="width:0%%"></div>'
1613 b_d = '<div class="bin bin1" style="width:0%%"></div>'
1605 return literal('<div style="width:%spx">%s%s</div>' % (width, b_a, b_d))
1614 return literal('<div style="width:%spx">%s%s</div>' % (width, b_a, b_d))
1606
1615
1607 t = stats['added'] + stats['deleted']
1616 t = stats['added'] + stats['deleted']
1608 unit = float(width) / (t or 1)
1617 unit = float(width) / (t or 1)
1609
1618
1610 # needs > 9% of width to be visible or 0 to be hidden
1619 # needs > 9% of width to be visible or 0 to be hidden
1611 a_p = max(9, unit * a) if a > 0 else 0
1620 a_p = max(9, unit * a) if a > 0 else 0
1612 d_p = max(9, unit * d) if d > 0 else 0
1621 d_p = max(9, unit * d) if d > 0 else 0
1613 p_sum = a_p + d_p
1622 p_sum = a_p + d_p
1614
1623
1615 if p_sum > width:
1624 if p_sum > width:
1616 #adjust the percentage to be == 100% since we adjusted to 9
1625 #adjust the percentage to be == 100% since we adjusted to 9
1617 if a_p > d_p:
1626 if a_p > d_p:
1618 a_p = a_p - (p_sum - width)
1627 a_p = a_p - (p_sum - width)
1619 else:
1628 else:
1620 d_p = d_p - (p_sum - width)
1629 d_p = d_p - (p_sum - width)
1621
1630
1622 a_v = a if a > 0 else ''
1631 a_v = a if a > 0 else ''
1623 d_v = d if d > 0 else ''
1632 d_v = d if d > 0 else ''
1624
1633
1625 d_a = '<div class="added %s" style="width:%s%%">%s</div>' % (
1634 d_a = '<div class="added %s" style="width:%s%%">%s</div>' % (
1626 cgen('a', a_v, d_v), a_p, a_v
1635 cgen('a', a_v, d_v), a_p, a_v
1627 )
1636 )
1628 d_d = '<div class="deleted %s" style="width:%s%%">%s</div>' % (
1637 d_d = '<div class="deleted %s" style="width:%s%%">%s</div>' % (
1629 cgen('d', a_v, d_v), d_p, d_v
1638 cgen('d', a_v, d_v), d_p, d_v
1630 )
1639 )
1631 return literal('<div style="width:%spx">%s%s</div>' % (width, d_a, d_d))
1640 return literal('<div style="width:%spx">%s%s</div>' % (width, d_a, d_d))
1632
1641
1633
1642
1634 def urlify_text(text_, safe=True):
1643 def urlify_text(text_, safe=True):
1635 """
1644 """
1636 Extrac urls from text and make html links out of them
1645 Extrac urls from text and make html links out of them
1637
1646
1638 :param text_:
1647 :param text_:
1639 """
1648 """
1640
1649
1641 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1650 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1642 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1651 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1643
1652
1644 def url_func(match_obj):
1653 def url_func(match_obj):
1645 url_full = match_obj.groups()[0]
1654 url_full = match_obj.groups()[0]
1646 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
1655 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
1647 _newtext = url_pat.sub(url_func, text_)
1656 _newtext = url_pat.sub(url_func, text_)
1648 if safe:
1657 if safe:
1649 return literal(_newtext)
1658 return literal(_newtext)
1650 return _newtext
1659 return _newtext
1651
1660
1652
1661
1653 def urlify_commits(text_, repository):
1662 def urlify_commits(text_, repository):
1654 """
1663 """
1655 Extract commit ids from text and make link from them
1664 Extract commit ids from text and make link from them
1656
1665
1657 :param text_:
1666 :param text_:
1658 :param repository: repo name to build the URL with
1667 :param repository: repo name to build the URL with
1659 """
1668 """
1660 from pylons import url # doh, we need to re-import url to mock it later
1669 from pylons import url # doh, we need to re-import url to mock it later
1661 URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1670 URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1662
1671
1663 def url_func(match_obj):
1672 def url_func(match_obj):
1664 commit_id = match_obj.groups()[1]
1673 commit_id = match_obj.groups()[1]
1665 pref = match_obj.groups()[0]
1674 pref = match_obj.groups()[0]
1666 suf = match_obj.groups()[2]
1675 suf = match_obj.groups()[2]
1667
1676
1668 tmpl = (
1677 tmpl = (
1669 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1678 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1670 '%(commit_id)s</a>%(suf)s'
1679 '%(commit_id)s</a>%(suf)s'
1671 )
1680 )
1672 return tmpl % {
1681 return tmpl % {
1673 'pref': pref,
1682 'pref': pref,
1674 'cls': 'revision-link',
1683 'cls': 'revision-link',
1675 'url': url('changeset_home', repo_name=repository,
1684 'url': url('changeset_home', repo_name=repository,
1676 revision=commit_id, qualified=True),
1685 revision=commit_id, qualified=True),
1677 'commit_id': commit_id,
1686 'commit_id': commit_id,
1678 'suf': suf
1687 'suf': suf
1679 }
1688 }
1680
1689
1681 newtext = URL_PAT.sub(url_func, text_)
1690 newtext = URL_PAT.sub(url_func, text_)
1682
1691
1683 return newtext
1692 return newtext
1684
1693
1685
1694
1686 def _process_url_func(match_obj, repo_name, uid, entry,
1695 def _process_url_func(match_obj, repo_name, uid, entry,
1687 return_raw_data=False):
1696 return_raw_data=False):
1688 pref = ''
1697 pref = ''
1689 if match_obj.group().startswith(' '):
1698 if match_obj.group().startswith(' '):
1690 pref = ' '
1699 pref = ' '
1691
1700
1692 issue_id = ''.join(match_obj.groups())
1701 issue_id = ''.join(match_obj.groups())
1693 tmpl = (
1702 tmpl = (
1694 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1703 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1695 '%(issue-prefix)s%(id-repr)s'
1704 '%(issue-prefix)s%(id-repr)s'
1696 '</a>')
1705 '</a>')
1697
1706
1698 (repo_name_cleaned,
1707 (repo_name_cleaned,
1699 parent_group_name) = RepoGroupModel().\
1708 parent_group_name) = RepoGroupModel().\
1700 _get_group_name_and_parent(repo_name)
1709 _get_group_name_and_parent(repo_name)
1701
1710
1702 # variables replacement
1711 # variables replacement
1703 named_vars = {
1712 named_vars = {
1704 'id': issue_id,
1713 'id': issue_id,
1705 'repo': repo_name,
1714 'repo': repo_name,
1706 'repo_name': repo_name_cleaned,
1715 'repo_name': repo_name_cleaned,
1707 'group_name': parent_group_name
1716 'group_name': parent_group_name
1708 }
1717 }
1709 # named regex variables
1718 # named regex variables
1710 named_vars.update(match_obj.groupdict())
1719 named_vars.update(match_obj.groupdict())
1711 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1720 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1712
1721
1713 data = {
1722 data = {
1714 'pref': pref,
1723 'pref': pref,
1715 'cls': 'issue-tracker-link',
1724 'cls': 'issue-tracker-link',
1716 'url': _url,
1725 'url': _url,
1717 'id-repr': issue_id,
1726 'id-repr': issue_id,
1718 'issue-prefix': entry['pref'],
1727 'issue-prefix': entry['pref'],
1719 'serv': entry['url'],
1728 'serv': entry['url'],
1720 }
1729 }
1721 if return_raw_data:
1730 if return_raw_data:
1722 return {
1731 return {
1723 'id': issue_id,
1732 'id': issue_id,
1724 'url': _url
1733 'url': _url
1725 }
1734 }
1726 return tmpl % data
1735 return tmpl % data
1727
1736
1728
1737
1729 def process_patterns(text_string, repo_name, config=None):
1738 def process_patterns(text_string, repo_name, config=None):
1730 repo = None
1739 repo = None
1731 if repo_name:
1740 if repo_name:
1732 # Retrieving repo_name to avoid invalid repo_name to explode on
1741 # Retrieving repo_name to avoid invalid repo_name to explode on
1733 # IssueTrackerSettingsModel but still passing invalid name further down
1742 # IssueTrackerSettingsModel but still passing invalid name further down
1734 repo = Repository.get_by_repo_name(repo_name, cache=True)
1743 repo = Repository.get_by_repo_name(repo_name, cache=True)
1735
1744
1736 settings_model = IssueTrackerSettingsModel(repo=repo)
1745 settings_model = IssueTrackerSettingsModel(repo=repo)
1737 active_entries = settings_model.get_settings(cache=True)
1746 active_entries = settings_model.get_settings(cache=True)
1738
1747
1739 issues_data = []
1748 issues_data = []
1740 newtext = text_string
1749 newtext = text_string
1741 for uid, entry in active_entries.items():
1750 for uid, entry in active_entries.items():
1742 log.debug('found issue tracker entry with uid %s' % (uid,))
1751 log.debug('found issue tracker entry with uid %s' % (uid,))
1743
1752
1744 if not (entry['pat'] and entry['url']):
1753 if not (entry['pat'] and entry['url']):
1745 log.debug('skipping due to missing data')
1754 log.debug('skipping due to missing data')
1746 continue
1755 continue
1747
1756
1748 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s'
1757 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s'
1749 % (uid, entry['pat'], entry['url'], entry['pref']))
1758 % (uid, entry['pat'], entry['url'], entry['pref']))
1750
1759
1751 try:
1760 try:
1752 pattern = re.compile(r'%s' % entry['pat'])
1761 pattern = re.compile(r'%s' % entry['pat'])
1753 except re.error:
1762 except re.error:
1754 log.exception(
1763 log.exception(
1755 'issue tracker pattern: `%s` failed to compile',
1764 'issue tracker pattern: `%s` failed to compile',
1756 entry['pat'])
1765 entry['pat'])
1757 continue
1766 continue
1758
1767
1759 data_func = partial(
1768 data_func = partial(
1760 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1769 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1761 return_raw_data=True)
1770 return_raw_data=True)
1762
1771
1763 for match_obj in pattern.finditer(text_string):
1772 for match_obj in pattern.finditer(text_string):
1764 issues_data.append(data_func(match_obj))
1773 issues_data.append(data_func(match_obj))
1765
1774
1766 url_func = partial(
1775 url_func = partial(
1767 _process_url_func, repo_name=repo_name, entry=entry, uid=uid)
1776 _process_url_func, repo_name=repo_name, entry=entry, uid=uid)
1768
1777
1769 newtext = pattern.sub(url_func, newtext)
1778 newtext = pattern.sub(url_func, newtext)
1770 log.debug('processed prefix:uid `%s`' % (uid,))
1779 log.debug('processed prefix:uid `%s`' % (uid,))
1771
1780
1772 return newtext, issues_data
1781 return newtext, issues_data
1773
1782
1774
1783
1775 def urlify_commit_message(commit_text, repository=None):
1784 def urlify_commit_message(commit_text, repository=None):
1776 """
1785 """
1777 Parses given text message and makes proper links.
1786 Parses given text message and makes proper links.
1778 issues are linked to given issue-server, and rest is a commit link
1787 issues are linked to given issue-server, and rest is a commit link
1779
1788
1780 :param commit_text:
1789 :param commit_text:
1781 :param repository:
1790 :param repository:
1782 """
1791 """
1783 from pylons import url # doh, we need to re-import url to mock it later
1792 from pylons import url # doh, we need to re-import url to mock it later
1784
1793
1785 def escaper(string):
1794 def escaper(string):
1786 return string.replace('<', '&lt;').replace('>', '&gt;')
1795 return string.replace('<', '&lt;').replace('>', '&gt;')
1787
1796
1788 newtext = escaper(commit_text)
1797 newtext = escaper(commit_text)
1789
1798
1790 # extract http/https links and make them real urls
1799 # extract http/https links and make them real urls
1791 newtext = urlify_text(newtext, safe=False)
1800 newtext = urlify_text(newtext, safe=False)
1792
1801
1793 # urlify commits - extract commit ids and make link out of them, if we have
1802 # urlify commits - extract commit ids and make link out of them, if we have
1794 # the scope of repository present.
1803 # the scope of repository present.
1795 if repository:
1804 if repository:
1796 newtext = urlify_commits(newtext, repository)
1805 newtext = urlify_commits(newtext, repository)
1797
1806
1798 # process issue tracker patterns
1807 # process issue tracker patterns
1799 newtext, issues = process_patterns(newtext, repository or '')
1808 newtext, issues = process_patterns(newtext, repository or '')
1800
1809
1801 return literal(newtext)
1810 return literal(newtext)
1802
1811
1803
1812
1804 def rst(source, mentions=False):
1813 def rst(source, mentions=False):
1805 return literal('<div class="rst-block">%s</div>' %
1814 return literal('<div class="rst-block">%s</div>' %
1806 MarkupRenderer.rst(source, mentions=mentions))
1815 MarkupRenderer.rst(source, mentions=mentions))
1807
1816
1808
1817
1809 def markdown(source, mentions=False):
1818 def markdown(source, mentions=False):
1810 return literal('<div class="markdown-block">%s</div>' %
1819 return literal('<div class="markdown-block">%s</div>' %
1811 MarkupRenderer.markdown(source, flavored=True,
1820 MarkupRenderer.markdown(source, flavored=True,
1812 mentions=mentions))
1821 mentions=mentions))
1813
1822
1814 def renderer_from_filename(filename, exclude=None):
1823 def renderer_from_filename(filename, exclude=None):
1815 return MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1824 return MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1816
1825
1817
1826
1818 def render(source, renderer='rst', mentions=False):
1827 def render(source, renderer='rst', mentions=False):
1819 if renderer == 'rst':
1828 if renderer == 'rst':
1820 return rst(source, mentions=mentions)
1829 return rst(source, mentions=mentions)
1821 if renderer == 'markdown':
1830 if renderer == 'markdown':
1822 return markdown(source, mentions=mentions)
1831 return markdown(source, mentions=mentions)
1823
1832
1824
1833
1825 def commit_status(repo, commit_id):
1834 def commit_status(repo, commit_id):
1826 return ChangesetStatusModel().get_status(repo, commit_id)
1835 return ChangesetStatusModel().get_status(repo, commit_id)
1827
1836
1828
1837
1829 def commit_status_lbl(commit_status):
1838 def commit_status_lbl(commit_status):
1830 return dict(ChangesetStatus.STATUSES).get(commit_status)
1839 return dict(ChangesetStatus.STATUSES).get(commit_status)
1831
1840
1832
1841
1833 def commit_time(repo_name, commit_id):
1842 def commit_time(repo_name, commit_id):
1834 repo = Repository.get_by_repo_name(repo_name)
1843 repo = Repository.get_by_repo_name(repo_name)
1835 commit = repo.get_commit(commit_id=commit_id)
1844 commit = repo.get_commit(commit_id=commit_id)
1836 return commit.date
1845 return commit.date
1837
1846
1838
1847
1839 def get_permission_name(key):
1848 def get_permission_name(key):
1840 return dict(Permission.PERMS).get(key)
1849 return dict(Permission.PERMS).get(key)
1841
1850
1842
1851
1843 def journal_filter_help():
1852 def journal_filter_help():
1844 return _(
1853 return _(
1845 'Example filter terms:\n' +
1854 'Example filter terms:\n' +
1846 ' repository:vcs\n' +
1855 ' repository:vcs\n' +
1847 ' username:marcin\n' +
1856 ' username:marcin\n' +
1848 ' action:*push*\n' +
1857 ' action:*push*\n' +
1849 ' ip:127.0.0.1\n' +
1858 ' ip:127.0.0.1\n' +
1850 ' date:20120101\n' +
1859 ' date:20120101\n' +
1851 ' date:[20120101100000 TO 20120102]\n' +
1860 ' date:[20120101100000 TO 20120102]\n' +
1852 '\n' +
1861 '\n' +
1853 'Generate wildcards using \'*\' character:\n' +
1862 'Generate wildcards using \'*\' character:\n' +
1854 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1863 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1855 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1864 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1856 '\n' +
1865 '\n' +
1857 'Optional AND / OR operators in queries\n' +
1866 'Optional AND / OR operators in queries\n' +
1858 ' "repository:vcs OR repository:test"\n' +
1867 ' "repository:vcs OR repository:test"\n' +
1859 ' "username:test AND repository:test*"\n'
1868 ' "username:test AND repository:test*"\n'
1860 )
1869 )
1861
1870
1862
1871
1863 def not_mapped_error(repo_name):
1872 def not_mapped_error(repo_name):
1864 flash(_('%s repository is not mapped to db perhaps'
1873 flash(_('%s repository is not mapped to db perhaps'
1865 ' it was created or renamed from the filesystem'
1874 ' it was created or renamed from the filesystem'
1866 ' please run the application again'
1875 ' please run the application again'
1867 ' in order to rescan repositories') % repo_name, category='error')
1876 ' in order to rescan repositories') % repo_name, category='error')
1868
1877
1869
1878
1870 def ip_range(ip_addr):
1879 def ip_range(ip_addr):
1871 from rhodecode.model.db import UserIpMap
1880 from rhodecode.model.db import UserIpMap
1872 s, e = UserIpMap._get_ip_range(ip_addr)
1881 s, e = UserIpMap._get_ip_range(ip_addr)
1873 return '%s - %s' % (s, e)
1882 return '%s - %s' % (s, e)
1874
1883
1875
1884
1876 def form(url, method='post', needs_csrf_token=True, **attrs):
1885 def form(url, method='post', needs_csrf_token=True, **attrs):
1877 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1886 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1878 if method.lower() != 'get' and needs_csrf_token:
1887 if method.lower() != 'get' and needs_csrf_token:
1879 raise Exception(
1888 raise Exception(
1880 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1889 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1881 'CSRF token. If the endpoint does not require such token you can ' +
1890 'CSRF token. If the endpoint does not require such token you can ' +
1882 'explicitly set the parameter needs_csrf_token to false.')
1891 'explicitly set the parameter needs_csrf_token to false.')
1883
1892
1884 return wh_form(url, method=method, **attrs)
1893 return wh_form(url, method=method, **attrs)
1885
1894
1886
1895
1887 def secure_form(url, method="POST", multipart=False, **attrs):
1896 def secure_form(url, method="POST", multipart=False, **attrs):
1888 """Start a form tag that points the action to an url. This
1897 """Start a form tag that points the action to an url. This
1889 form tag will also include the hidden field containing
1898 form tag will also include the hidden field containing
1890 the auth token.
1899 the auth token.
1891
1900
1892 The url options should be given either as a string, or as a
1901 The url options should be given either as a string, or as a
1893 ``url()`` function. The method for the form defaults to POST.
1902 ``url()`` function. The method for the form defaults to POST.
1894
1903
1895 Options:
1904 Options:
1896
1905
1897 ``multipart``
1906 ``multipart``
1898 If set to True, the enctype is set to "multipart/form-data".
1907 If set to True, the enctype is set to "multipart/form-data".
1899 ``method``
1908 ``method``
1900 The method to use when submitting the form, usually either
1909 The method to use when submitting the form, usually either
1901 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1910 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1902 hidden input with name _method is added to simulate the verb
1911 hidden input with name _method is added to simulate the verb
1903 over POST.
1912 over POST.
1904
1913
1905 """
1914 """
1906 from webhelpers.pylonslib.secure_form import insecure_form
1915 from webhelpers.pylonslib.secure_form import insecure_form
1907 form = insecure_form(url, method, multipart, **attrs)
1916 form = insecure_form(url, method, multipart, **attrs)
1908 token = csrf_input()
1917 token = csrf_input()
1909 return literal("%s\n%s" % (form, token))
1918 return literal("%s\n%s" % (form, token))
1910
1919
1911 def csrf_input():
1920 def csrf_input():
1912 return literal(
1921 return literal(
1913 '<input type="hidden" id="{}" name="{}" value="{}">'.format(
1922 '<input type="hidden" id="{}" name="{}" value="{}">'.format(
1914 csrf_token_key, csrf_token_key, get_csrf_token()))
1923 csrf_token_key, csrf_token_key, get_csrf_token()))
1915
1924
1916 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1925 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1917 select_html = select(name, selected, options, **attrs)
1926 select_html = select(name, selected, options, **attrs)
1918 select2 = """
1927 select2 = """
1919 <script>
1928 <script>
1920 $(document).ready(function() {
1929 $(document).ready(function() {
1921 $('#%s').select2({
1930 $('#%s').select2({
1922 containerCssClass: 'drop-menu',
1931 containerCssClass: 'drop-menu',
1923 dropdownCssClass: 'drop-menu-dropdown',
1932 dropdownCssClass: 'drop-menu-dropdown',
1924 dropdownAutoWidth: true%s
1933 dropdownAutoWidth: true%s
1925 });
1934 });
1926 });
1935 });
1927 </script>
1936 </script>
1928 """
1937 """
1929 filter_option = """,
1938 filter_option = """,
1930 minimumResultsForSearch: -1
1939 minimumResultsForSearch: -1
1931 """
1940 """
1932 input_id = attrs.get('id') or name
1941 input_id = attrs.get('id') or name
1933 filter_enabled = "" if enable_filter else filter_option
1942 filter_enabled = "" if enable_filter else filter_option
1934 select_script = literal(select2 % (input_id, filter_enabled))
1943 select_script = literal(select2 % (input_id, filter_enabled))
1935
1944
1936 return literal(select_html+select_script)
1945 return literal(select_html+select_script)
1937
1946
1938
1947
1939 def get_visual_attr(tmpl_context_var, attr_name):
1948 def get_visual_attr(tmpl_context_var, attr_name):
1940 """
1949 """
1941 A safe way to get a variable from visual variable of template context
1950 A safe way to get a variable from visual variable of template context
1942
1951
1943 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
1952 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
1944 :param attr_name: name of the attribute we fetch from the c.visual
1953 :param attr_name: name of the attribute we fetch from the c.visual
1945 """
1954 """
1946 visual = getattr(tmpl_context_var, 'visual', None)
1955 visual = getattr(tmpl_context_var, 'visual', None)
1947 if not visual:
1956 if not visual:
1948 return
1957 return
1949 else:
1958 else:
1950 return getattr(visual, attr_name, None)
1959 return getattr(visual, attr_name, None)
1951
1960
1952
1961
1953 def get_last_path_part(file_node):
1962 def get_last_path_part(file_node):
1954 if not file_node.path:
1963 if not file_node.path:
1955 return u''
1964 return u''
1956
1965
1957 path = safe_unicode(file_node.path.split('/')[-1])
1966 path = safe_unicode(file_node.path.split('/')[-1])
1958 return u'../' + path
1967 return u'../' + path
1959
1968
1960
1969
1961 def route_path(*args, **kwds):
1970 def route_path(*args, **kwds):
1962 """
1971 """
1963 Wrapper around pyramids `route_path` function. It is used to generate
1972 Wrapper around pyramids `route_path` function. It is used to generate
1964 URLs from within pylons views or templates. This will be removed when
1973 URLs from within pylons views or templates. This will be removed when
1965 pyramid migration if finished.
1974 pyramid migration if finished.
1966 """
1975 """
1967 req = get_current_request()
1976 req = get_current_request()
1968 return req.route_path(*args, **kwds)
1977 return req.route_path(*args, **kwds)
1969
1978
1970
1979
1971 def route_path_or_none(*args, **kwargs):
1980 def route_path_or_none(*args, **kwargs):
1972 try:
1981 try:
1973 return route_path(*args, **kwargs)
1982 return route_path(*args, **kwargs)
1974 except KeyError:
1983 except KeyError:
1975 return None
1984 return None
1976
1985
1977
1986
1978 def static_url(*args, **kwds):
1987 def static_url(*args, **kwds):
1979 """
1988 """
1980 Wrapper around pyramids `route_path` function. It is used to generate
1989 Wrapper around pyramids `route_path` function. It is used to generate
1981 URLs from within pylons views or templates. This will be removed when
1990 URLs from within pylons views or templates. This will be removed when
1982 pyramid migration if finished.
1991 pyramid migration if finished.
1983 """
1992 """
1984 req = get_current_request()
1993 req = get_current_request()
1985 return req.static_url(*args, **kwds)
1994 return req.static_url(*args, **kwds)
1986
1995
1987
1996
1988 def resource_path(*args, **kwds):
1997 def resource_path(*args, **kwds):
1989 """
1998 """
1990 Wrapper around pyramids `route_path` function. It is used to generate
1999 Wrapper around pyramids `route_path` function. It is used to generate
1991 URLs from within pylons views or templates. This will be removed when
2000 URLs from within pylons views or templates. This will be removed when
1992 pyramid migration if finished.
2001 pyramid migration if finished.
1993 """
2002 """
1994 req = get_current_request()
2003 req = get_current_request()
1995 return req.resource_path(*args, **kwds)
2004 return req.resource_path(*args, **kwds)
@@ -1,412 +1,404 b''
1 <%def name="diff_line_anchor(filename, line, type)"><%
1 <%def name="diff_line_anchor(filename, line, type)"><%
2 return '%s_%s_%i' % (h.safeid(filename), type, line)
2 return '%s_%s_%i' % (h.safeid(filename), type, line)
3 %></%def>
3 %></%def>
4
4
5 <%def name="action_class(action)"><%
5 <%def name="action_class(action)"><%
6 return {
6 return {
7 '-': 'cb-deletion',
7 '-': 'cb-deletion',
8 '+': 'cb-addition',
8 '+': 'cb-addition',
9 ' ': 'cb-context',
9 ' ': 'cb-context',
10 }.get(action, 'cb-empty')
10 }.get(action, 'cb-empty')
11 %></%def>
11 %></%def>
12
12
13 <%def name="op_class(op_id)"><%
13 <%def name="op_class(op_id)"><%
14 return {
14 return {
15 DEL_FILENODE: 'deletion', # file deleted
15 DEL_FILENODE: 'deletion', # file deleted
16 BIN_FILENODE: 'warning' # binary diff hidden
16 BIN_FILENODE: 'warning' # binary diff hidden
17 }.get(op_id, 'addition')
17 }.get(op_id, 'addition')
18 %></%def>
18 %></%def>
19
19
20 <%def name="link_for(**kw)"><%
20 <%def name="link_for(**kw)"><%
21 new_args = request.GET.mixed()
21 new_args = request.GET.mixed()
22 new_args.update(kw)
22 new_args.update(kw)
23 return h.url('', **new_args)
23 return h.url('', **new_args)
24 %></%def>
24 %></%def>
25
25
26 <%def name="render_diffset(diffset, commit_id=None,
26 <%def name="render_diffset(diffset, commit_id=None,
27
27
28 # collapse all file diff entries when there are more than this amount of files in the diff
28 # collapse all file diff entries when there are more than this amount of files in the diff
29 collapse_when_files_over=20,
29 collapse_when_files_over=20,
30
30
31 # collapse lines in the diff when more than this amount of lines changed in the file diff
31 # collapse lines in the diff when more than this amount of lines changed in the file diff
32 lines_changed_limit=500,
32 lines_changed_limit=500,
33
33
34 # add a ruler at to the output
34 # add a ruler at to the output
35 ruler_at_chars=0,
35 ruler_at_chars=0,
36 )">
36 )">
37 <%
37 <%
38 # TODO: dan: move this to an argument - and set a cookie so that it is saved
39 # default option for future requests
40 diff_mode = request.GET.get('diffmode', 'sideside')
41 if diff_mode not in ('sideside', 'unified'):
42 diff_mode = 'sideside'
43
44 collapse_all = len(diffset.files) > collapse_when_files_over
38 collapse_all = len(diffset.files) > collapse_when_files_over
45 %>
39 %>
46
40
47 %if diff_mode == 'sideside':
41 %if c.diffmode == 'sideside':
48 <style>
42 <style>
49 .wrapper {
43 .wrapper {
50 max-width: 1600px !important;
44 max-width: 1600px !important;
51 }
45 }
52 </style>
46 </style>
53 %endif
47 %endif
54 %if ruler_at_chars:
48 %if ruler_at_chars:
55 <style>
49 <style>
56 .diff table.cb .cb-content:after {
50 .diff table.cb .cb-content:after {
57 content: "";
51 content: "";
58 border-left: 1px solid blue;
52 border-left: 1px solid blue;
59 position: absolute;
53 position: absolute;
60 top: 0;
54 top: 0;
61 height: 18px;
55 height: 18px;
62 opacity: .2;
56 opacity: .2;
63 z-index: 10;
57 z-index: 10;
64 ## +5 to account for diff action (+/-)
58 ## +5 to account for diff action (+/-)
65 left: ${ruler_at_chars + 5}ch;
59 left: ${ruler_at_chars + 5}ch;
66 </style>
60 </style>
67 %endif
61 %endif
68 % if diffset.limited_diff:
62 % if diffset.limited_diff:
69 <div class="alert alert-warning">
63 <div class="alert alert-warning">
70 ${_('The requested commit is too big and content was truncated.')} <a href="${link_for(fulldiff=1)}" onclick="return confirm('${_("Showing a big diff might take some time and resources, continue?")}')">${_('Show full diff')}</a>
64 ${_('The requested commit is too big and content was truncated.')} <a href="${link_for(fulldiff=1)}" onclick="return confirm('${_("Showing a big diff might take some time and resources, continue?")}')">${_('Show full diff')}</a>
71 </div>
65 </div>
72 % endif
66 % endif
73
67
74 <div class="diffset">
68 <div class="diffset">
75 <div class="diffset-heading">
69 <div class="diffset-heading">
76 %if diffset.files:
77 <div class="pull-right">
70 <div class="pull-right">
78 <div class="btn-group">
71 <div class="btn-group">
79 <a
72 <a
80 class="btn ${diff_mode == 'sideside' and 'btn-primary'} tooltip"
73 class="btn ${c.diffmode == 'sideside' and 'btn-primary'} tooltip"
81 title="${_('View side by side')}"
74 title="${_('View side by side')}"
82 href="${link_for(diffmode='sideside')}">
75 href="${h.url_replace(diffmode='sideside')}">
83 <span>${_('Side by Side')}</span>
76 <span>${_('Side by Side')}</span>
84 </a>
77 </a>
85 <a
78 <a
86 class="btn ${diff_mode == 'unified' and 'btn-primary'} tooltip"
79 class="btn ${c.diffmode == 'unified' and 'btn-primary'} tooltip"
87 title="${_('View unified')}" href="${link_for(diffmode='unified')}">
80 title="${_('View unified')}" href="${h.url_replace(diffmode='unified')}">
88 <span>${_('Unified')}</span>
81 <span>${_('Unified')}</span>
89 </a>
82 </a>
90 </div>
83 </div>
91 </div>
84 </div>
92 <div class="pull-left">
85 <div class="pull-left">
93 <div class="btn-group">
86 <div class="btn-group">
94 <a
87 <a
95 class="btn"
88 class="btn"
96 href="#"
89 href="#"
97 onclick="$('input[class=filediff-collapse-state]').prop('checked', false); return false">${_('Expand All')}</a>
90 onclick="$('input[class=filediff-collapse-state]').prop('checked', false); return false">${_('Expand All')}</a>
98 <a
91 <a
99 class="btn"
92 class="btn"
100 href="#"
93 href="#"
101 onclick="$('input[class=filediff-collapse-state]').prop('checked', true); return false">${_('Collapse All')}</a>
94 onclick="$('input[class=filediff-collapse-state]').prop('checked', true); return false">${_('Collapse All')}</a>
102 </div>
95 </div>
103 </div>
96 </div>
104 %endif
105 <h2 style="padding: 5px; text-align: center;">
97 <h2 style="padding: 5px; text-align: center;">
106 %if diffset.limited_diff:
98 %if diffset.limited_diff:
107 ${ungettext('%(num)s file changed', '%(num)s files changed', diffset.changed_files) % {'num': diffset.changed_files}}
99 ${ungettext('%(num)s file changed', '%(num)s files changed', diffset.changed_files) % {'num': diffset.changed_files}}
108 %else:
100 %else:
109 ${ungettext('%(num)s file changed: %(linesadd)s inserted, ''%(linesdel)s deleted',
101 ${ungettext('%(num)s file changed: %(linesadd)s inserted, ''%(linesdel)s deleted',
110 '%(num)s files changed: %(linesadd)s inserted, %(linesdel)s deleted', diffset.changed_files) % {'num': diffset.changed_files, 'linesadd': diffset.lines_added, 'linesdel': diffset.lines_deleted}}
102 '%(num)s files changed: %(linesadd)s inserted, %(linesdel)s deleted', diffset.changed_files) % {'num': diffset.changed_files, 'linesadd': diffset.lines_added, 'linesdel': diffset.lines_deleted}}
111 %endif
103 %endif
112 </h2>
104 </h2>
113 </div>
105 </div>
114
106
115 %if not diffset.files:
107 %if not diffset.files:
116 <p class="empty_data">${_('No files')}</p>
108 <p class="empty_data">${_('No files')}</p>
117 %endif
109 %endif
118
110
119 <div class="filediffs">
111 <div class="filediffs">
120 %for i, filediff in enumerate(diffset.files):
112 %for i, filediff in enumerate(diffset.files):
121 <%
113 <%
122 lines_changed = filediff['patch']['stats']['added'] + filediff['patch']['stats']['deleted']
114 lines_changed = filediff['patch']['stats']['added'] + filediff['patch']['stats']['deleted']
123 over_lines_changed_limit = lines_changed > lines_changed_limit
115 over_lines_changed_limit = lines_changed > lines_changed_limit
124 %>
116 %>
125 <input ${collapse_all and 'checked' or ''} class="filediff-collapse-state" id="filediff-collapse-${id(filediff)}" type="checkbox">
117 <input ${collapse_all and 'checked' or ''} class="filediff-collapse-state" id="filediff-collapse-${id(filediff)}" type="checkbox">
126 <div
118 <div
127 class="filediff"
119 class="filediff"
128 data-f-path="${filediff['patch']['filename']}"
120 data-f-path="${filediff['patch']['filename']}"
129 id="a_${h.FID(commit_id or '', filediff['patch']['filename'])}">
121 id="a_${h.FID(commit_id or '', filediff['patch']['filename'])}">
130 <label for="filediff-collapse-${id(filediff)}" class="filediff-heading">
122 <label for="filediff-collapse-${id(filediff)}" class="filediff-heading">
131 <div class="filediff-collapse-indicator"></div>
123 <div class="filediff-collapse-indicator"></div>
132 ${diff_ops(filediff)}
124 ${diff_ops(filediff)}
133 </label>
125 </label>
134 ${diff_menu(filediff)}
126 ${diff_menu(filediff)}
135 <table class="cb cb-diff-${diff_mode} code-highlight ${over_lines_changed_limit and 'cb-collapsed' or ''}">
127 <table class="cb cb-diff-${c.diffmode} code-highlight ${over_lines_changed_limit and 'cb-collapsed' or ''}">
136 %if not filediff.hunks:
128 %if not filediff.hunks:
137 %for op_id, op_text in filediff['patch']['stats']['ops'].items():
129 %for op_id, op_text in filediff['patch']['stats']['ops'].items():
138 <tr>
130 <tr>
139 <td class="cb-text cb-${op_class(op_id)}" ${diff_mode == 'unified' and 'colspan=3' or 'colspan=4'}>
131 <td class="cb-text cb-${op_class(op_id)}" ${c.diffmode == 'unified' and 'colspan=3' or 'colspan=4'}>
140 %if op_id == DEL_FILENODE:
132 %if op_id == DEL_FILENODE:
141 ${_('File was deleted')}
133 ${_('File was deleted')}
142 %elif op_id == BIN_FILENODE:
134 %elif op_id == BIN_FILENODE:
143 ${_('Binary file hidden')}
135 ${_('Binary file hidden')}
144 %else:
136 %else:
145 ${op_text}
137 ${op_text}
146 %endif
138 %endif
147 </td>
139 </td>
148 </tr>
140 </tr>
149 %endfor
141 %endfor
150 %endif
142 %endif
151 %if over_lines_changed_limit:
143 %if over_lines_changed_limit:
152 <tr class="cb-warning cb-collapser">
144 <tr class="cb-warning cb-collapser">
153 <td class="cb-text" ${diff_mode == 'unified' and 'colspan=3' or 'colspan=4'}>
145 <td class="cb-text" ${c.diffmode == 'unified' and 'colspan=3' or 'colspan=4'}>
154 ${_('This diff has been collapsed as it changes many lines, (%i lines changed)' % lines_changed)}
146 ${_('This diff has been collapsed as it changes many lines, (%i lines changed)' % lines_changed)}
155 <a href="#" class="cb-expand"
147 <a href="#" class="cb-expand"
156 onclick="$(this).closest('table').removeClass('cb-collapsed'); return false;">${_('Show them')}
148 onclick="$(this).closest('table').removeClass('cb-collapsed'); return false;">${_('Show them')}
157 </a>
149 </a>
158 <a href="#" class="cb-collapse"
150 <a href="#" class="cb-collapse"
159 onclick="$(this).closest('table').addClass('cb-collapsed'); return false;">${_('Hide them')}
151 onclick="$(this).closest('table').addClass('cb-collapsed'); return false;">${_('Hide them')}
160 </a>
152 </a>
161 </td>
153 </td>
162 </tr>
154 </tr>
163 %endif
155 %endif
164 %if filediff.patch['is_limited_diff']:
156 %if filediff.patch['is_limited_diff']:
165 <tr class="cb-warning cb-collapser">
157 <tr class="cb-warning cb-collapser">
166 <td class="cb-text" ${diff_mode == 'unified' and 'colspan=3' or 'colspan=4'}>
158 <td class="cb-text" ${c.diffmode == 'unified' and 'colspan=3' or 'colspan=4'}>
167 ${_('The requested commit is too big and content was truncated.')} <a href="${link_for(fulldiff=1)}" onclick="return confirm('${_("Showing a big diff might take some time and resources, continue?")}')">${_('Show full diff')}</a>
159 ${_('The requested commit is too big and content was truncated.')} <a href="${link_for(fulldiff=1)}" onclick="return confirm('${_("Showing a big diff might take some time and resources, continue?")}')">${_('Show full diff')}</a>
168 </td>
160 </td>
169 </tr>
161 </tr>
170 %endif
162 %endif
171 %for hunk in filediff.hunks:
163 %for hunk in filediff.hunks:
172 <tr class="cb-hunk">
164 <tr class="cb-hunk">
173 <td ${diff_mode == 'unified' and 'colspan=2' or ''}>
165 <td ${c.diffmode == 'unified' and 'colspan=2' or ''}>
174 ## TODO: dan: add ajax loading of more context here
166 ## TODO: dan: add ajax loading of more context here
175 ## <a href="#">
167 ## <a href="#">
176 <i class="icon-more"></i>
168 <i class="icon-more"></i>
177 ## </a>
169 ## </a>
178 </td>
170 </td>
179 <td ${diff_mode == 'sideside' and 'colspan=3' or ''}>
171 <td ${c.diffmode == 'sideside' and 'colspan=3' or ''}>
180 @@
172 @@
181 -${hunk.source_start},${hunk.source_length}
173 -${hunk.source_start},${hunk.source_length}
182 +${hunk.target_start},${hunk.target_length}
174 +${hunk.target_start},${hunk.target_length}
183 ${hunk.section_header}
175 ${hunk.section_header}
184 </td>
176 </td>
185 </tr>
177 </tr>
186 %if diff_mode == 'unified':
178 %if c.diffmode == 'unified':
187 ${render_hunk_lines_unified(hunk)}
179 ${render_hunk_lines_unified(hunk)}
188 %elif diff_mode == 'sideside':
180 %elif c.diffmode == 'sideside':
189 ${render_hunk_lines_sideside(hunk)}
181 ${render_hunk_lines_sideside(hunk)}
190 %else:
182 %else:
191 <tr class="cb-line">
183 <tr class="cb-line">
192 <td>unknown diff mode</td>
184 <td>unknown diff mode</td>
193 </tr>
185 </tr>
194 %endif
186 %endif
195 %endfor
187 %endfor
196 </table>
188 </table>
197 </div>
189 </div>
198 %endfor
190 %endfor
199 </div>
191 </div>
200 </div>
192 </div>
201 </%def>
193 </%def>
202
194
203 <%def name="diff_ops(filediff)">
195 <%def name="diff_ops(filediff)">
204 <%
196 <%
205 stats = filediff['patch']['stats']
197 stats = filediff['patch']['stats']
206 from rhodecode.lib.diffs import NEW_FILENODE, DEL_FILENODE, \
198 from rhodecode.lib.diffs import NEW_FILENODE, DEL_FILENODE, \
207 MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE
199 MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE
208 %>
200 %>
209 <span class="pill">
201 <span class="pill">
210 %if filediff.source_file_path and filediff.target_file_path:
202 %if filediff.source_file_path and filediff.target_file_path:
211 %if filediff.source_file_path != filediff.target_file_path: # file was renamed
203 %if filediff.source_file_path != filediff.target_file_path: # file was renamed
212 <strong>${filediff.target_file_path}</strong> β¬… <del>${filediff.source_file_path}</del>
204 <strong>${filediff.target_file_path}</strong> β¬… <del>${filediff.source_file_path}</del>
213 %else:
205 %else:
214 ## file was modified
206 ## file was modified
215 <strong>${filediff.source_file_path}</strong>
207 <strong>${filediff.source_file_path}</strong>
216 %endif
208 %endif
217 %else:
209 %else:
218 %if filediff.source_file_path:
210 %if filediff.source_file_path:
219 ## file was deleted
211 ## file was deleted
220 <strong>${filediff.source_file_path}</strong>
212 <strong>${filediff.source_file_path}</strong>
221 %else:
213 %else:
222 ## file was added
214 ## file was added
223 <strong>${filediff.target_file_path}</strong>
215 <strong>${filediff.target_file_path}</strong>
224 %endif
216 %endif
225 %endif
217 %endif
226 </span>
218 </span>
227 <span class="pill-group" style="float: left">
219 <span class="pill-group" style="float: left">
228 %if filediff.patch['is_limited_diff']:
220 %if filediff.patch['is_limited_diff']:
229 <span class="pill tooltip" op="limited" title="The stats for this diff are not complete">limited diff</span>
221 <span class="pill tooltip" op="limited" title="The stats for this diff are not complete">limited diff</span>
230 %endif
222 %endif
231 %if RENAMED_FILENODE in stats['ops']:
223 %if RENAMED_FILENODE in stats['ops']:
232 <span class="pill" op="renamed">renamed</span>
224 <span class="pill" op="renamed">renamed</span>
233 %endif
225 %endif
234
226
235 %if NEW_FILENODE in stats['ops']:
227 %if NEW_FILENODE in stats['ops']:
236 <span class="pill" op="created">created</span>
228 <span class="pill" op="created">created</span>
237 %if filediff['target_mode'].startswith('120'):
229 %if filediff['target_mode'].startswith('120'):
238 <span class="pill" op="symlink">symlink</span>
230 <span class="pill" op="symlink">symlink</span>
239 %else:
231 %else:
240 <span class="pill" op="mode">${nice_mode(filediff['target_mode'])}</span>
232 <span class="pill" op="mode">${nice_mode(filediff['target_mode'])}</span>
241 %endif
233 %endif
242 %endif
234 %endif
243
235
244 %if DEL_FILENODE in stats['ops']:
236 %if DEL_FILENODE in stats['ops']:
245 <span class="pill" op="removed">removed</span>
237 <span class="pill" op="removed">removed</span>
246 %endif
238 %endif
247
239
248 %if CHMOD_FILENODE in stats['ops']:
240 %if CHMOD_FILENODE in stats['ops']:
249 <span class="pill" op="mode">
241 <span class="pill" op="mode">
250 ${nice_mode(filediff['source_mode'])} ➑ ${nice_mode(filediff['target_mode'])}
242 ${nice_mode(filediff['source_mode'])} ➑ ${nice_mode(filediff['target_mode'])}
251 </span>
243 </span>
252 %endif
244 %endif
253 </span>
245 </span>
254
246
255 <a class="pill filediff-anchor" href="#a_${h.FID(commit_id or '', filediff.patch['filename'])}">ΒΆ</a>
247 <a class="pill filediff-anchor" href="#a_${h.FID(commit_id or '', filediff.patch['filename'])}">ΒΆ</a>
256
248
257 <span class="pill-group" style="float: right">
249 <span class="pill-group" style="float: right">
258 %if BIN_FILENODE in stats['ops']:
250 %if BIN_FILENODE in stats['ops']:
259 <span class="pill" op="binary">binary</span>
251 <span class="pill" op="binary">binary</span>
260 %if MOD_FILENODE in stats['ops']:
252 %if MOD_FILENODE in stats['ops']:
261 <span class="pill" op="modified">modified</span>
253 <span class="pill" op="modified">modified</span>
262 %endif
254 %endif
263 %endif
255 %endif
264 %if stats['added']:
256 %if stats['added']:
265 <span class="pill" op="added">+${stats['added']}</span>
257 <span class="pill" op="added">+${stats['added']}</span>
266 %endif
258 %endif
267 %if stats['deleted']:
259 %if stats['deleted']:
268 <span class="pill" op="deleted">-${stats['deleted']}</span>
260 <span class="pill" op="deleted">-${stats['deleted']}</span>
269 %endif
261 %endif
270 </span>
262 </span>
271
263
272 </%def>
264 </%def>
273
265
274 <%def name="nice_mode(filemode)">
266 <%def name="nice_mode(filemode)">
275 ${filemode.startswith('100') and filemode[3:] or filemode}
267 ${filemode.startswith('100') and filemode[3:] or filemode}
276 </%def>
268 </%def>
277
269
278 <%def name="diff_menu(filediff)">
270 <%def name="diff_menu(filediff)">
279 <div class="filediff-menu">
271 <div class="filediff-menu">
280 %if filediff.diffset.source_ref:
272 %if filediff.diffset.source_ref:
281 %if filediff.patch['operation'] in ['D', 'M']:
273 %if filediff.patch['operation'] in ['D', 'M']:
282 <a
274 <a
283 class="tooltip"
275 class="tooltip"
284 href="${h.url('files_home',repo_name=c.repo_name,f_path=filediff.source_file_path,revision=filediff.diffset.source_ref)}"
276 href="${h.url('files_home',repo_name=c.repo_name,f_path=filediff.source_file_path,revision=filediff.diffset.source_ref)}"
285 title="${h.tooltip(_('Show file at commit: %(commit_id)s') % {'commit_id': filediff.diffset.source_ref[:12]})}"
277 title="${h.tooltip(_('Show file at commit: %(commit_id)s') % {'commit_id': filediff.diffset.source_ref[:12]})}"
286 >
278 >
287 ${_('Show file before')}
279 ${_('Show file before')}
288 </a>
280 </a>
289 %else:
281 %else:
290 <span
282 <span
291 class="tooltip"
283 class="tooltip"
292 title="${h.tooltip(_('File no longer present at commit: %(commit_id)s') % {'commit_id': filediff.diffset.source_ref[:12]})}"
284 title="${h.tooltip(_('File no longer present at commit: %(commit_id)s') % {'commit_id': filediff.diffset.source_ref[:12]})}"
293 >
285 >
294 ${_('Show file before')}
286 ${_('Show file before')}
295 </span>
287 </span>
296 %endif
288 %endif
297 %if filediff.patch['operation'] in ['A', 'M']:
289 %if filediff.patch['operation'] in ['A', 'M']:
298 <a
290 <a
299 class="tooltip"
291 class="tooltip"
300 href="${h.url('files_home',repo_name=c.repo_name,f_path=filediff.target_file_path,revision=filediff.diffset.target_ref)}"
292 href="${h.url('files_home',repo_name=c.repo_name,f_path=filediff.target_file_path,revision=filediff.diffset.target_ref)}"
301 title="${h.tooltip(_('Show file at commit: %(commit_id)s') % {'commit_id': filediff.diffset.target_ref[:12]})}"
293 title="${h.tooltip(_('Show file at commit: %(commit_id)s') % {'commit_id': filediff.diffset.target_ref[:12]})}"
302 >
294 >
303 ${_('Show file after')}
295 ${_('Show file after')}
304 </a>
296 </a>
305 %else:
297 %else:
306 <span
298 <span
307 class="tooltip"
299 class="tooltip"
308 title="${h.tooltip(_('File no longer present at commit: %(commit_id)s') % {'commit_id': filediff.diffset.target_ref[:12]})}"
300 title="${h.tooltip(_('File no longer present at commit: %(commit_id)s') % {'commit_id': filediff.diffset.target_ref[:12]})}"
309 >
301 >
310 ${_('Show file after')}
302 ${_('Show file after')}
311 </span>
303 </span>
312 %endif
304 %endif
313 <a
305 <a
314 class="tooltip"
306 class="tooltip"
315 title="${h.tooltip(_('Raw diff'))}"
307 title="${h.tooltip(_('Raw diff'))}"
316 href="${h.url('files_diff_home',repo_name=c.repo_name,f_path=filediff.target_file_path,diff2=filediff.diffset.target_ref,diff1=filediff.diffset.source_ref,diff='raw')}"
308 href="${h.url('files_diff_home',repo_name=c.repo_name,f_path=filediff.target_file_path,diff2=filediff.diffset.target_ref,diff1=filediff.diffset.source_ref,diff='raw')}"
317 >
309 >
318 ${_('Raw diff')}
310 ${_('Raw diff')}
319 </a>
311 </a>
320 <a
312 <a
321 class="tooltip"
313 class="tooltip"
322 title="${h.tooltip(_('Download diff'))}"
314 title="${h.tooltip(_('Download diff'))}"
323 href="${h.url('files_diff_home',repo_name=c.repo_name,f_path=filediff.target_file_path,diff2=filediff.diffset.target_ref,diff1=filediff.diffset.source_ref,diff='download')}"
315 href="${h.url('files_diff_home',repo_name=c.repo_name,f_path=filediff.target_file_path,diff2=filediff.diffset.target_ref,diff1=filediff.diffset.source_ref,diff='download')}"
324 >
316 >
325 ${_('Download diff')}
317 ${_('Download diff')}
326 </a>
318 </a>
327 %endif
319 %endif
328 </div>
320 </div>
329 </%def>
321 </%def>
330
322
331
323
332 <%def name="render_hunk_lines_sideside(hunk)">
324 <%def name="render_hunk_lines_sideside(hunk)">
333 %for i, line in enumerate(hunk.sideside):
325 %for i, line in enumerate(hunk.sideside):
334 <%
326 <%
335 old_line_anchor, new_line_anchor = None, None
327 old_line_anchor, new_line_anchor = None, None
336 if line.original.lineno:
328 if line.original.lineno:
337 old_line_anchor = diff_line_anchor(hunk.filediff.source_file_path, line.original.lineno, 'o')
329 old_line_anchor = diff_line_anchor(hunk.filediff.source_file_path, line.original.lineno, 'o')
338 if line.modified.lineno:
330 if line.modified.lineno:
339 new_line_anchor = diff_line_anchor(hunk.filediff.target_file_path, line.modified.lineno, 'n')
331 new_line_anchor = diff_line_anchor(hunk.filediff.target_file_path, line.modified.lineno, 'n')
340 %>
332 %>
341 <tr class="cb-line">
333 <tr class="cb-line">
342 <td class="cb-lineno ${action_class(line.original.action)}"
334 <td class="cb-lineno ${action_class(line.original.action)}"
343 data-line-number="${line.original.lineno}"
335 data-line-number="${line.original.lineno}"
344 %if old_line_anchor:
336 %if old_line_anchor:
345 id="${old_line_anchor}"
337 id="${old_line_anchor}"
346 %endif
338 %endif
347 >
339 >
348 %if line.original.lineno:
340 %if line.original.lineno:
349 <a name="${old_line_anchor}" href="#${old_line_anchor}">${line.original.lineno}</a>
341 <a name="${old_line_anchor}" href="#${old_line_anchor}">${line.original.lineno}</a>
350 %endif
342 %endif
351 </td>
343 </td>
352 <td class="cb-content ${action_class(line.original.action)}"
344 <td class="cb-content ${action_class(line.original.action)}"
353 data-line-number="o${line.original.lineno}"
345 data-line-number="o${line.original.lineno}"
354 ><span class="cb-code">${line.original.action} ${line.original.content or '' | n}</span>
346 ><span class="cb-code">${line.original.action} ${line.original.content or '' | n}</span>
355 </td>
347 </td>
356 <td class="cb-lineno ${action_class(line.modified.action)}"
348 <td class="cb-lineno ${action_class(line.modified.action)}"
357 data-line-number="${line.modified.lineno}"
349 data-line-number="${line.modified.lineno}"
358 %if new_line_anchor:
350 %if new_line_anchor:
359 id="${new_line_anchor}"
351 id="${new_line_anchor}"
360 %endif
352 %endif
361 >
353 >
362 %if line.modified.lineno:
354 %if line.modified.lineno:
363 <a name="${new_line_anchor}" href="#${new_line_anchor}">${line.modified.lineno}</a>
355 <a name="${new_line_anchor}" href="#${new_line_anchor}">${line.modified.lineno}</a>
364 %endif
356 %endif
365 </td>
357 </td>
366 <td class="cb-content ${action_class(line.modified.action)}"
358 <td class="cb-content ${action_class(line.modified.action)}"
367 data-line-number="n${line.modified.lineno}"
359 data-line-number="n${line.modified.lineno}"
368 >
360 >
369 <span class="cb-code">${line.modified.action} ${line.modified.content or '' | n}</span>
361 <span class="cb-code">${line.modified.action} ${line.modified.content or '' | n}</span>
370 </td>
362 </td>
371 </tr>
363 </tr>
372 %endfor
364 %endfor
373 </%def>
365 </%def>
374
366
375
367
376 <%def name="render_hunk_lines_unified(hunk)">
368 <%def name="render_hunk_lines_unified(hunk)">
377 %for old_line_no, new_line_no, action, content in hunk.unified:
369 %for old_line_no, new_line_no, action, content in hunk.unified:
378 <%
370 <%
379 old_line_anchor, new_line_anchor = None, None
371 old_line_anchor, new_line_anchor = None, None
380 if old_line_no:
372 if old_line_no:
381 old_line_anchor = diff_line_anchor(hunk.filediff.source_file_path, old_line_no, 'o')
373 old_line_anchor = diff_line_anchor(hunk.filediff.source_file_path, old_line_no, 'o')
382 if new_line_no:
374 if new_line_no:
383 new_line_anchor = diff_line_anchor(hunk.filediff.target_file_path, new_line_no, 'n')
375 new_line_anchor = diff_line_anchor(hunk.filediff.target_file_path, new_line_no, 'n')
384 %>
376 %>
385 <tr class="cb-line">
377 <tr class="cb-line">
386 <td class="cb-lineno ${action_class(action)}"
378 <td class="cb-lineno ${action_class(action)}"
387 data-line-number="${old_line_no}"
379 data-line-number="${old_line_no}"
388 %if old_line_anchor:
380 %if old_line_anchor:
389 id="${old_line_anchor}"
381 id="${old_line_anchor}"
390 %endif
382 %endif
391 >
383 >
392 %if old_line_anchor:
384 %if old_line_anchor:
393 <a name="${old_line_anchor}" href="#${old_line_anchor}">${old_line_no}</a>
385 <a name="${old_line_anchor}" href="#${old_line_anchor}">${old_line_no}</a>
394 %endif
386 %endif
395 </td>
387 </td>
396 <td class="cb-lineno ${action_class(action)}"
388 <td class="cb-lineno ${action_class(action)}"
397 data-line-number="${new_line_no}"
389 data-line-number="${new_line_no}"
398 %if new_line_anchor:
390 %if new_line_anchor:
399 id="${new_line_anchor}"
391 id="${new_line_anchor}"
400 %endif
392 %endif
401 >
393 >
402 %if new_line_anchor:
394 %if new_line_anchor:
403 <a name="${new_line_anchor}" href="#${new_line_anchor}">${new_line_no}</a>
395 <a name="${new_line_anchor}" href="#${new_line_anchor}">${new_line_no}</a>
404 %endif
396 %endif
405 </td>
397 </td>
406 <td class="cb-content ${action_class(action)}"
398 <td class="cb-content ${action_class(action)}"
407 data-line-number="${new_line_no and 'n' or 'o'}${new_line_no or old_line_no}"
399 data-line-number="${new_line_no and 'n' or 'o'}${new_line_no or old_line_no}"
408 ><span class="cb-code">${action} ${content or '' | n}</span>
400 ><span class="cb-code">${action} ${content or '' | n}</span>
409 </td>
401 </td>
410 </tr>
402 </tr>
411 %endfor
403 %endfor
412 </%def>
404 </%def>
General Comments 0
You need to be logged in to leave comments. Login now