##// END OF EJS Templates
renderer: remove usage of old non request PartialRenderer
marcink -
r1947:4566477c default
parent child Browse files
Show More
@@ -1,505 +1,505 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2017 RhodeCode GmbH
3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import datetime
22 import datetime
23 import formencode
23 import formencode
24
24
25 from pyramid.httpexceptions import HTTPFound
25 from pyramid.httpexceptions import HTTPFound
26 from pyramid.view import view_config
26 from pyramid.view import view_config
27 from sqlalchemy.sql.functions import coalesce
27 from sqlalchemy.sql.functions import coalesce
28
28
29 from rhodecode.apps._base import BaseAppView, DataGridAppView
29 from rhodecode.apps._base import BaseAppView, DataGridAppView
30
30
31 from rhodecode.lib import audit_logger
31 from rhodecode.lib import audit_logger
32 from rhodecode.lib.ext_json import json
32 from rhodecode.lib.ext_json import json
33 from rhodecode.lib.auth import (
33 from rhodecode.lib.auth import (
34 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
34 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
35 from rhodecode.lib import helpers as h
35 from rhodecode.lib import helpers as h
36 from rhodecode.lib.utils2 import safe_int, safe_unicode
36 from rhodecode.lib.utils2 import safe_int, safe_unicode
37 from rhodecode.model.auth_token import AuthTokenModel
37 from rhodecode.model.auth_token import AuthTokenModel
38 from rhodecode.model.user import UserModel
38 from rhodecode.model.user import UserModel
39 from rhodecode.model.user_group import UserGroupModel
39 from rhodecode.model.user_group import UserGroupModel
40 from rhodecode.model.db import User, or_, UserIpMap, UserEmailMap, UserApiKeys
40 from rhodecode.model.db import User, or_, UserIpMap, UserEmailMap, UserApiKeys
41 from rhodecode.model.meta import Session
41 from rhodecode.model.meta import Session
42
42
43 log = logging.getLogger(__name__)
43 log = logging.getLogger(__name__)
44
44
45
45
46 class AdminUsersView(BaseAppView, DataGridAppView):
46 class AdminUsersView(BaseAppView, DataGridAppView):
47 ALLOW_SCOPED_TOKENS = False
47 ALLOW_SCOPED_TOKENS = False
48 """
48 """
49 This view has alternative version inside EE, if modified please take a look
49 This view has alternative version inside EE, if modified please take a look
50 in there as well.
50 in there as well.
51 """
51 """
52
52
53 def load_default_context(self):
53 def load_default_context(self):
54 c = self._get_local_tmpl_context()
54 c = self._get_local_tmpl_context()
55 c.allow_scoped_tokens = self.ALLOW_SCOPED_TOKENS
55 c.allow_scoped_tokens = self.ALLOW_SCOPED_TOKENS
56 self._register_global_c(c)
56 self._register_global_c(c)
57 return c
57 return c
58
58
59 def _redirect_for_default_user(self, username):
59 def _redirect_for_default_user(self, username):
60 _ = self.request.translate
60 _ = self.request.translate
61 if username == User.DEFAULT_USER:
61 if username == User.DEFAULT_USER:
62 h.flash(_("You can't edit this user"), category='warning')
62 h.flash(_("You can't edit this user"), category='warning')
63 # TODO(marcink): redirect to 'users' admin panel once this
63 # TODO(marcink): redirect to 'users' admin panel once this
64 # is a pyramid view
64 # is a pyramid view
65 raise HTTPFound('/')
65 raise HTTPFound('/')
66
66
67 @HasPermissionAllDecorator('hg.admin')
67 @HasPermissionAllDecorator('hg.admin')
68 @view_config(
68 @view_config(
69 route_name='users', request_method='GET',
69 route_name='users', request_method='GET',
70 renderer='rhodecode:templates/admin/users/users.mako')
70 renderer='rhodecode:templates/admin/users/users.mako')
71 def users_list(self):
71 def users_list(self):
72 c = self.load_default_context()
72 c = self.load_default_context()
73 return self._get_template_context(c)
73 return self._get_template_context(c)
74
74
75 @HasPermissionAllDecorator('hg.admin')
75 @HasPermissionAllDecorator('hg.admin')
76 @view_config(
76 @view_config(
77 # renderer defined below
77 # renderer defined below
78 route_name='users_data', request_method='GET',
78 route_name='users_data', request_method='GET',
79 renderer='json_ext', xhr=True)
79 renderer='json_ext', xhr=True)
80 def users_list_data(self):
80 def users_list_data(self):
81 draw, start, limit = self._extract_chunk(self.request)
81 draw, start, limit = self._extract_chunk(self.request)
82 search_q, order_by, order_dir = self._extract_ordering(self.request)
82 search_q, order_by, order_dir = self._extract_ordering(self.request)
83
83
84 _render = self.request.get_partial_renderer(
84 _render = self.request.get_partial_renderer(
85 'data_table/_dt_elements.mako')
85 'data_table/_dt_elements.mako')
86
86
87 def user_actions(user_id, username):
87 def user_actions(user_id, username):
88 return _render("user_actions", user_id, username)
88 return _render("user_actions", user_id, username)
89
89
90 users_data_total_count = User.query()\
90 users_data_total_count = User.query()\
91 .filter(User.username != User.DEFAULT_USER) \
91 .filter(User.username != User.DEFAULT_USER) \
92 .count()
92 .count()
93
93
94 # json generate
94 # json generate
95 base_q = User.query().filter(User.username != User.DEFAULT_USER)
95 base_q = User.query().filter(User.username != User.DEFAULT_USER)
96
96
97 if search_q:
97 if search_q:
98 like_expression = u'%{}%'.format(safe_unicode(search_q))
98 like_expression = u'%{}%'.format(safe_unicode(search_q))
99 base_q = base_q.filter(or_(
99 base_q = base_q.filter(or_(
100 User.username.ilike(like_expression),
100 User.username.ilike(like_expression),
101 User._email.ilike(like_expression),
101 User._email.ilike(like_expression),
102 User.name.ilike(like_expression),
102 User.name.ilike(like_expression),
103 User.lastname.ilike(like_expression),
103 User.lastname.ilike(like_expression),
104 ))
104 ))
105
105
106 users_data_total_filtered_count = base_q.count()
106 users_data_total_filtered_count = base_q.count()
107
107
108 sort_col = getattr(User, order_by, None)
108 sort_col = getattr(User, order_by, None)
109 if sort_col:
109 if sort_col:
110 if order_dir == 'asc':
110 if order_dir == 'asc':
111 # handle null values properly to order by NULL last
111 # handle null values properly to order by NULL last
112 if order_by in ['last_activity']:
112 if order_by in ['last_activity']:
113 sort_col = coalesce(sort_col, datetime.date.max)
113 sort_col = coalesce(sort_col, datetime.date.max)
114 sort_col = sort_col.asc()
114 sort_col = sort_col.asc()
115 else:
115 else:
116 # handle null values properly to order by NULL last
116 # handle null values properly to order by NULL last
117 if order_by in ['last_activity']:
117 if order_by in ['last_activity']:
118 sort_col = coalesce(sort_col, datetime.date.min)
118 sort_col = coalesce(sort_col, datetime.date.min)
119 sort_col = sort_col.desc()
119 sort_col = sort_col.desc()
120
120
121 base_q = base_q.order_by(sort_col)
121 base_q = base_q.order_by(sort_col)
122 base_q = base_q.offset(start).limit(limit)
122 base_q = base_q.offset(start).limit(limit)
123
123
124 users_list = base_q.all()
124 users_list = base_q.all()
125
125
126 users_data = []
126 users_data = []
127 for user in users_list:
127 for user in users_list:
128 users_data.append({
128 users_data.append({
129 "username": h.gravatar_with_user(user.username),
129 "username": h.gravatar_with_user(self.request, user.username),
130 "email": user.email,
130 "email": user.email,
131 "first_name": user.first_name,
131 "first_name": user.first_name,
132 "last_name": user.last_name,
132 "last_name": user.last_name,
133 "last_login": h.format_date(user.last_login),
133 "last_login": h.format_date(user.last_login),
134 "last_activity": h.format_date(user.last_activity),
134 "last_activity": h.format_date(user.last_activity),
135 "active": h.bool2icon(user.active),
135 "active": h.bool2icon(user.active),
136 "active_raw": user.active,
136 "active_raw": user.active,
137 "admin": h.bool2icon(user.admin),
137 "admin": h.bool2icon(user.admin),
138 "extern_type": user.extern_type,
138 "extern_type": user.extern_type,
139 "extern_name": user.extern_name,
139 "extern_name": user.extern_name,
140 "action": user_actions(user.user_id, user.username),
140 "action": user_actions(user.user_id, user.username),
141 })
141 })
142
142
143 data = ({
143 data = ({
144 'draw': draw,
144 'draw': draw,
145 'data': users_data,
145 'data': users_data,
146 'recordsTotal': users_data_total_count,
146 'recordsTotal': users_data_total_count,
147 'recordsFiltered': users_data_total_filtered_count,
147 'recordsFiltered': users_data_total_filtered_count,
148 })
148 })
149
149
150 return data
150 return data
151
151
152 @LoginRequired()
152 @LoginRequired()
153 @HasPermissionAllDecorator('hg.admin')
153 @HasPermissionAllDecorator('hg.admin')
154 @view_config(
154 @view_config(
155 route_name='edit_user_auth_tokens', request_method='GET',
155 route_name='edit_user_auth_tokens', request_method='GET',
156 renderer='rhodecode:templates/admin/users/user_edit.mako')
156 renderer='rhodecode:templates/admin/users/user_edit.mako')
157 def auth_tokens(self):
157 def auth_tokens(self):
158 _ = self.request.translate
158 _ = self.request.translate
159 c = self.load_default_context()
159 c = self.load_default_context()
160
160
161 user_id = self.request.matchdict.get('user_id')
161 user_id = self.request.matchdict.get('user_id')
162 c.user = User.get_or_404(user_id, pyramid_exc=True)
162 c.user = User.get_or_404(user_id, pyramid_exc=True)
163 self._redirect_for_default_user(c.user.username)
163 self._redirect_for_default_user(c.user.username)
164
164
165 c.active = 'auth_tokens'
165 c.active = 'auth_tokens'
166
166
167 c.lifetime_values = [
167 c.lifetime_values = [
168 (str(-1), _('forever')),
168 (str(-1), _('forever')),
169 (str(5), _('5 minutes')),
169 (str(5), _('5 minutes')),
170 (str(60), _('1 hour')),
170 (str(60), _('1 hour')),
171 (str(60 * 24), _('1 day')),
171 (str(60 * 24), _('1 day')),
172 (str(60 * 24 * 30), _('1 month')),
172 (str(60 * 24 * 30), _('1 month')),
173 ]
173 ]
174 c.lifetime_options = [(c.lifetime_values, _("Lifetime"))]
174 c.lifetime_options = [(c.lifetime_values, _("Lifetime"))]
175 c.role_values = [
175 c.role_values = [
176 (x, AuthTokenModel.cls._get_role_name(x))
176 (x, AuthTokenModel.cls._get_role_name(x))
177 for x in AuthTokenModel.cls.ROLES]
177 for x in AuthTokenModel.cls.ROLES]
178 c.role_options = [(c.role_values, _("Role"))]
178 c.role_options = [(c.role_values, _("Role"))]
179 c.user_auth_tokens = AuthTokenModel().get_auth_tokens(
179 c.user_auth_tokens = AuthTokenModel().get_auth_tokens(
180 c.user.user_id, show_expired=True)
180 c.user.user_id, show_expired=True)
181 return self._get_template_context(c)
181 return self._get_template_context(c)
182
182
183 def maybe_attach_token_scope(self, token):
183 def maybe_attach_token_scope(self, token):
184 # implemented in EE edition
184 # implemented in EE edition
185 pass
185 pass
186
186
187 @LoginRequired()
187 @LoginRequired()
188 @HasPermissionAllDecorator('hg.admin')
188 @HasPermissionAllDecorator('hg.admin')
189 @CSRFRequired()
189 @CSRFRequired()
190 @view_config(
190 @view_config(
191 route_name='edit_user_auth_tokens_add', request_method='POST')
191 route_name='edit_user_auth_tokens_add', request_method='POST')
192 def auth_tokens_add(self):
192 def auth_tokens_add(self):
193 _ = self.request.translate
193 _ = self.request.translate
194 c = self.load_default_context()
194 c = self.load_default_context()
195
195
196 user_id = self.request.matchdict.get('user_id')
196 user_id = self.request.matchdict.get('user_id')
197 c.user = User.get_or_404(user_id, pyramid_exc=True)
197 c.user = User.get_or_404(user_id, pyramid_exc=True)
198
198
199 self._redirect_for_default_user(c.user.username)
199 self._redirect_for_default_user(c.user.username)
200
200
201 user_data = c.user.get_api_data()
201 user_data = c.user.get_api_data()
202 lifetime = safe_int(self.request.POST.get('lifetime'), -1)
202 lifetime = safe_int(self.request.POST.get('lifetime'), -1)
203 description = self.request.POST.get('description')
203 description = self.request.POST.get('description')
204 role = self.request.POST.get('role')
204 role = self.request.POST.get('role')
205
205
206 token = AuthTokenModel().create(
206 token = AuthTokenModel().create(
207 c.user.user_id, description, lifetime, role)
207 c.user.user_id, description, lifetime, role)
208 token_data = token.get_api_data()
208 token_data = token.get_api_data()
209
209
210 self.maybe_attach_token_scope(token)
210 self.maybe_attach_token_scope(token)
211 audit_logger.store_web(
211 audit_logger.store_web(
212 'user.edit.token.add', action_data={
212 'user.edit.token.add', action_data={
213 'data': {'token': token_data, 'user': user_data}},
213 'data': {'token': token_data, 'user': user_data}},
214 user=self._rhodecode_user, )
214 user=self._rhodecode_user, )
215 Session().commit()
215 Session().commit()
216
216
217 h.flash(_("Auth token successfully created"), category='success')
217 h.flash(_("Auth token successfully created"), category='success')
218 return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id))
218 return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id))
219
219
220 @LoginRequired()
220 @LoginRequired()
221 @HasPermissionAllDecorator('hg.admin')
221 @HasPermissionAllDecorator('hg.admin')
222 @CSRFRequired()
222 @CSRFRequired()
223 @view_config(
223 @view_config(
224 route_name='edit_user_auth_tokens_delete', request_method='POST')
224 route_name='edit_user_auth_tokens_delete', request_method='POST')
225 def auth_tokens_delete(self):
225 def auth_tokens_delete(self):
226 _ = self.request.translate
226 _ = self.request.translate
227 c = self.load_default_context()
227 c = self.load_default_context()
228
228
229 user_id = self.request.matchdict.get('user_id')
229 user_id = self.request.matchdict.get('user_id')
230 c.user = User.get_or_404(user_id, pyramid_exc=True)
230 c.user = User.get_or_404(user_id, pyramid_exc=True)
231 self._redirect_for_default_user(c.user.username)
231 self._redirect_for_default_user(c.user.username)
232 user_data = c.user.get_api_data()
232 user_data = c.user.get_api_data()
233
233
234 del_auth_token = self.request.POST.get('del_auth_token')
234 del_auth_token = self.request.POST.get('del_auth_token')
235
235
236 if del_auth_token:
236 if del_auth_token:
237 token = UserApiKeys.get_or_404(del_auth_token, pyramid_exc=True)
237 token = UserApiKeys.get_or_404(del_auth_token, pyramid_exc=True)
238 token_data = token.get_api_data()
238 token_data = token.get_api_data()
239
239
240 AuthTokenModel().delete(del_auth_token, c.user.user_id)
240 AuthTokenModel().delete(del_auth_token, c.user.user_id)
241 audit_logger.store_web(
241 audit_logger.store_web(
242 'user.edit.token.delete', action_data={
242 'user.edit.token.delete', action_data={
243 'data': {'token': token_data, 'user': user_data}},
243 'data': {'token': token_data, 'user': user_data}},
244 user=self._rhodecode_user,)
244 user=self._rhodecode_user,)
245 Session().commit()
245 Session().commit()
246 h.flash(_("Auth token successfully deleted"), category='success')
246 h.flash(_("Auth token successfully deleted"), category='success')
247
247
248 return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id))
248 return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id))
249
249
250 @LoginRequired()
250 @LoginRequired()
251 @HasPermissionAllDecorator('hg.admin')
251 @HasPermissionAllDecorator('hg.admin')
252 @view_config(
252 @view_config(
253 route_name='edit_user_emails', request_method='GET',
253 route_name='edit_user_emails', request_method='GET',
254 renderer='rhodecode:templates/admin/users/user_edit.mako')
254 renderer='rhodecode:templates/admin/users/user_edit.mako')
255 def emails(self):
255 def emails(self):
256 _ = self.request.translate
256 _ = self.request.translate
257 c = self.load_default_context()
257 c = self.load_default_context()
258
258
259 user_id = self.request.matchdict.get('user_id')
259 user_id = self.request.matchdict.get('user_id')
260 c.user = User.get_or_404(user_id, pyramid_exc=True)
260 c.user = User.get_or_404(user_id, pyramid_exc=True)
261 self._redirect_for_default_user(c.user.username)
261 self._redirect_for_default_user(c.user.username)
262
262
263 c.active = 'emails'
263 c.active = 'emails'
264 c.user_email_map = UserEmailMap.query() \
264 c.user_email_map = UserEmailMap.query() \
265 .filter(UserEmailMap.user == c.user).all()
265 .filter(UserEmailMap.user == c.user).all()
266
266
267 return self._get_template_context(c)
267 return self._get_template_context(c)
268
268
269 @LoginRequired()
269 @LoginRequired()
270 @HasPermissionAllDecorator('hg.admin')
270 @HasPermissionAllDecorator('hg.admin')
271 @CSRFRequired()
271 @CSRFRequired()
272 @view_config(
272 @view_config(
273 route_name='edit_user_emails_add', request_method='POST')
273 route_name='edit_user_emails_add', request_method='POST')
274 def emails_add(self):
274 def emails_add(self):
275 _ = self.request.translate
275 _ = self.request.translate
276 c = self.load_default_context()
276 c = self.load_default_context()
277
277
278 user_id = self.request.matchdict.get('user_id')
278 user_id = self.request.matchdict.get('user_id')
279 c.user = User.get_or_404(user_id, pyramid_exc=True)
279 c.user = User.get_or_404(user_id, pyramid_exc=True)
280 self._redirect_for_default_user(c.user.username)
280 self._redirect_for_default_user(c.user.username)
281
281
282 email = self.request.POST.get('new_email')
282 email = self.request.POST.get('new_email')
283 user_data = c.user.get_api_data()
283 user_data = c.user.get_api_data()
284 try:
284 try:
285 UserModel().add_extra_email(c.user.user_id, email)
285 UserModel().add_extra_email(c.user.user_id, email)
286 audit_logger.store_web(
286 audit_logger.store_web(
287 'user.edit.email.add', action_data={'email': email, 'user': user_data},
287 'user.edit.email.add', action_data={'email': email, 'user': user_data},
288 user=self._rhodecode_user)
288 user=self._rhodecode_user)
289 Session().commit()
289 Session().commit()
290 h.flash(_("Added new email address `%s` for user account") % email,
290 h.flash(_("Added new email address `%s` for user account") % email,
291 category='success')
291 category='success')
292 except formencode.Invalid as error:
292 except formencode.Invalid as error:
293 h.flash(h.escape(error.error_dict['email']), category='error')
293 h.flash(h.escape(error.error_dict['email']), category='error')
294 except Exception:
294 except Exception:
295 log.exception("Exception during email saving")
295 log.exception("Exception during email saving")
296 h.flash(_('An error occurred during email saving'),
296 h.flash(_('An error occurred during email saving'),
297 category='error')
297 category='error')
298 raise HTTPFound(h.route_path('edit_user_emails', user_id=user_id))
298 raise HTTPFound(h.route_path('edit_user_emails', user_id=user_id))
299
299
300 @LoginRequired()
300 @LoginRequired()
301 @HasPermissionAllDecorator('hg.admin')
301 @HasPermissionAllDecorator('hg.admin')
302 @CSRFRequired()
302 @CSRFRequired()
303 @view_config(
303 @view_config(
304 route_name='edit_user_emails_delete', request_method='POST')
304 route_name='edit_user_emails_delete', request_method='POST')
305 def emails_delete(self):
305 def emails_delete(self):
306 _ = self.request.translate
306 _ = self.request.translate
307 c = self.load_default_context()
307 c = self.load_default_context()
308
308
309 user_id = self.request.matchdict.get('user_id')
309 user_id = self.request.matchdict.get('user_id')
310 c.user = User.get_or_404(user_id, pyramid_exc=True)
310 c.user = User.get_or_404(user_id, pyramid_exc=True)
311 self._redirect_for_default_user(c.user.username)
311 self._redirect_for_default_user(c.user.username)
312
312
313 email_id = self.request.POST.get('del_email_id')
313 email_id = self.request.POST.get('del_email_id')
314 user_model = UserModel()
314 user_model = UserModel()
315
315
316 email = UserEmailMap.query().get(email_id).email
316 email = UserEmailMap.query().get(email_id).email
317 user_data = c.user.get_api_data()
317 user_data = c.user.get_api_data()
318 user_model.delete_extra_email(c.user.user_id, email_id)
318 user_model.delete_extra_email(c.user.user_id, email_id)
319 audit_logger.store_web(
319 audit_logger.store_web(
320 'user.edit.email.delete', action_data={'email': email, 'user': user_data},
320 'user.edit.email.delete', action_data={'email': email, 'user': user_data},
321 user=self._rhodecode_user)
321 user=self._rhodecode_user)
322 Session().commit()
322 Session().commit()
323 h.flash(_("Removed email address from user account"),
323 h.flash(_("Removed email address from user account"),
324 category='success')
324 category='success')
325 raise HTTPFound(h.route_path('edit_user_emails', user_id=user_id))
325 raise HTTPFound(h.route_path('edit_user_emails', user_id=user_id))
326
326
327 @LoginRequired()
327 @LoginRequired()
328 @HasPermissionAllDecorator('hg.admin')
328 @HasPermissionAllDecorator('hg.admin')
329 @view_config(
329 @view_config(
330 route_name='edit_user_ips', request_method='GET',
330 route_name='edit_user_ips', request_method='GET',
331 renderer='rhodecode:templates/admin/users/user_edit.mako')
331 renderer='rhodecode:templates/admin/users/user_edit.mako')
332 def ips(self):
332 def ips(self):
333 _ = self.request.translate
333 _ = self.request.translate
334 c = self.load_default_context()
334 c = self.load_default_context()
335
335
336 user_id = self.request.matchdict.get('user_id')
336 user_id = self.request.matchdict.get('user_id')
337 c.user = User.get_or_404(user_id, pyramid_exc=True)
337 c.user = User.get_or_404(user_id, pyramid_exc=True)
338 self._redirect_for_default_user(c.user.username)
338 self._redirect_for_default_user(c.user.username)
339
339
340 c.active = 'ips'
340 c.active = 'ips'
341 c.user_ip_map = UserIpMap.query() \
341 c.user_ip_map = UserIpMap.query() \
342 .filter(UserIpMap.user == c.user).all()
342 .filter(UserIpMap.user == c.user).all()
343
343
344 c.inherit_default_ips = c.user.inherit_default_permissions
344 c.inherit_default_ips = c.user.inherit_default_permissions
345 c.default_user_ip_map = UserIpMap.query() \
345 c.default_user_ip_map = UserIpMap.query() \
346 .filter(UserIpMap.user == User.get_default_user()).all()
346 .filter(UserIpMap.user == User.get_default_user()).all()
347
347
348 return self._get_template_context(c)
348 return self._get_template_context(c)
349
349
350 @LoginRequired()
350 @LoginRequired()
351 @HasPermissionAllDecorator('hg.admin')
351 @HasPermissionAllDecorator('hg.admin')
352 @CSRFRequired()
352 @CSRFRequired()
353 @view_config(
353 @view_config(
354 route_name='edit_user_ips_add', request_method='POST')
354 route_name='edit_user_ips_add', request_method='POST')
355 def ips_add(self):
355 def ips_add(self):
356 _ = self.request.translate
356 _ = self.request.translate
357 c = self.load_default_context()
357 c = self.load_default_context()
358
358
359 user_id = self.request.matchdict.get('user_id')
359 user_id = self.request.matchdict.get('user_id')
360 c.user = User.get_or_404(user_id, pyramid_exc=True)
360 c.user = User.get_or_404(user_id, pyramid_exc=True)
361 # NOTE(marcink): this view is allowed for default users, as we can
361 # NOTE(marcink): this view is allowed for default users, as we can
362 # edit their IP white list
362 # edit their IP white list
363
363
364 user_model = UserModel()
364 user_model = UserModel()
365 desc = self.request.POST.get('description')
365 desc = self.request.POST.get('description')
366 try:
366 try:
367 ip_list = user_model.parse_ip_range(
367 ip_list = user_model.parse_ip_range(
368 self.request.POST.get('new_ip'))
368 self.request.POST.get('new_ip'))
369 except Exception as e:
369 except Exception as e:
370 ip_list = []
370 ip_list = []
371 log.exception("Exception during ip saving")
371 log.exception("Exception during ip saving")
372 h.flash(_('An error occurred during ip saving:%s' % (e,)),
372 h.flash(_('An error occurred during ip saving:%s' % (e,)),
373 category='error')
373 category='error')
374 added = []
374 added = []
375 user_data = c.user.get_api_data()
375 user_data = c.user.get_api_data()
376 for ip in ip_list:
376 for ip in ip_list:
377 try:
377 try:
378 user_model.add_extra_ip(c.user.user_id, ip, desc)
378 user_model.add_extra_ip(c.user.user_id, ip, desc)
379 audit_logger.store_web(
379 audit_logger.store_web(
380 'user.edit.ip.add', action_data={'ip': ip, 'user': user_data},
380 'user.edit.ip.add', action_data={'ip': ip, 'user': user_data},
381 user=self._rhodecode_user)
381 user=self._rhodecode_user)
382 Session().commit()
382 Session().commit()
383 added.append(ip)
383 added.append(ip)
384 except formencode.Invalid as error:
384 except formencode.Invalid as error:
385 msg = error.error_dict['ip']
385 msg = error.error_dict['ip']
386 h.flash(msg, category='error')
386 h.flash(msg, category='error')
387 except Exception:
387 except Exception:
388 log.exception("Exception during ip saving")
388 log.exception("Exception during ip saving")
389 h.flash(_('An error occurred during ip saving'),
389 h.flash(_('An error occurred during ip saving'),
390 category='error')
390 category='error')
391 if added:
391 if added:
392 h.flash(
392 h.flash(
393 _("Added ips %s to user whitelist") % (', '.join(ip_list), ),
393 _("Added ips %s to user whitelist") % (', '.join(ip_list), ),
394 category='success')
394 category='success')
395 if 'default_user' in self.request.POST:
395 if 'default_user' in self.request.POST:
396 # case for editing global IP list we do it for 'DEFAULT' user
396 # case for editing global IP list we do it for 'DEFAULT' user
397 raise HTTPFound(h.route_path('admin_permissions_ips'))
397 raise HTTPFound(h.route_path('admin_permissions_ips'))
398 raise HTTPFound(h.route_path('edit_user_ips', user_id=user_id))
398 raise HTTPFound(h.route_path('edit_user_ips', user_id=user_id))
399
399
400 @LoginRequired()
400 @LoginRequired()
401 @HasPermissionAllDecorator('hg.admin')
401 @HasPermissionAllDecorator('hg.admin')
402 @CSRFRequired()
402 @CSRFRequired()
403 @view_config(
403 @view_config(
404 route_name='edit_user_ips_delete', request_method='POST')
404 route_name='edit_user_ips_delete', request_method='POST')
405 def ips_delete(self):
405 def ips_delete(self):
406 _ = self.request.translate
406 _ = self.request.translate
407 c = self.load_default_context()
407 c = self.load_default_context()
408
408
409 user_id = self.request.matchdict.get('user_id')
409 user_id = self.request.matchdict.get('user_id')
410 c.user = User.get_or_404(user_id, pyramid_exc=True)
410 c.user = User.get_or_404(user_id, pyramid_exc=True)
411 # NOTE(marcink): this view is allowed for default users, as we can
411 # NOTE(marcink): this view is allowed for default users, as we can
412 # edit their IP white list
412 # edit their IP white list
413
413
414 ip_id = self.request.POST.get('del_ip_id')
414 ip_id = self.request.POST.get('del_ip_id')
415 user_model = UserModel()
415 user_model = UserModel()
416 user_data = c.user.get_api_data()
416 user_data = c.user.get_api_data()
417 ip = UserIpMap.query().get(ip_id).ip_addr
417 ip = UserIpMap.query().get(ip_id).ip_addr
418 user_model.delete_extra_ip(c.user.user_id, ip_id)
418 user_model.delete_extra_ip(c.user.user_id, ip_id)
419 audit_logger.store_web(
419 audit_logger.store_web(
420 'user.edit.ip.delete', action_data={'ip': ip, 'user': user_data},
420 'user.edit.ip.delete', action_data={'ip': ip, 'user': user_data},
421 user=self._rhodecode_user)
421 user=self._rhodecode_user)
422 Session().commit()
422 Session().commit()
423 h.flash(_("Removed ip address from user whitelist"), category='success')
423 h.flash(_("Removed ip address from user whitelist"), category='success')
424
424
425 if 'default_user' in self.request.POST:
425 if 'default_user' in self.request.POST:
426 # case for editing global IP list we do it for 'DEFAULT' user
426 # case for editing global IP list we do it for 'DEFAULT' user
427 raise HTTPFound(h.route_path('admin_permissions_ips'))
427 raise HTTPFound(h.route_path('admin_permissions_ips'))
428 raise HTTPFound(h.route_path('edit_user_ips', user_id=user_id))
428 raise HTTPFound(h.route_path('edit_user_ips', user_id=user_id))
429
429
430 @LoginRequired()
430 @LoginRequired()
431 @HasPermissionAllDecorator('hg.admin')
431 @HasPermissionAllDecorator('hg.admin')
432 @view_config(
432 @view_config(
433 route_name='edit_user_groups_management', request_method='GET',
433 route_name='edit_user_groups_management', request_method='GET',
434 renderer='rhodecode:templates/admin/users/user_edit.mako')
434 renderer='rhodecode:templates/admin/users/user_edit.mako')
435 def groups_management(self):
435 def groups_management(self):
436 c = self.load_default_context()
436 c = self.load_default_context()
437
437
438 user_id = self.request.matchdict.get('user_id')
438 user_id = self.request.matchdict.get('user_id')
439 c.user = User.get_or_404(user_id, pyramid_exc=True)
439 c.user = User.get_or_404(user_id, pyramid_exc=True)
440 c.data = c.user.group_member
440 c.data = c.user.group_member
441 self._redirect_for_default_user(c.user.username)
441 self._redirect_for_default_user(c.user.username)
442 groups = [UserGroupModel.get_user_groups_as_dict(group.users_group)
442 groups = [UserGroupModel.get_user_groups_as_dict(group.users_group)
443 for group in c.user.group_member]
443 for group in c.user.group_member]
444 c.groups = json.dumps(groups)
444 c.groups = json.dumps(groups)
445 c.active = 'groups'
445 c.active = 'groups'
446
446
447 return self._get_template_context(c)
447 return self._get_template_context(c)
448
448
449 @LoginRequired()
449 @LoginRequired()
450 @HasPermissionAllDecorator('hg.admin')
450 @HasPermissionAllDecorator('hg.admin')
451 @CSRFRequired()
451 @CSRFRequired()
452 @view_config(
452 @view_config(
453 route_name='edit_user_groups_management_updates', request_method='POST')
453 route_name='edit_user_groups_management_updates', request_method='POST')
454 def groups_management_updates(self):
454 def groups_management_updates(self):
455 _ = self.request.translate
455 _ = self.request.translate
456 c = self.load_default_context()
456 c = self.load_default_context()
457
457
458 user_id = self.request.matchdict.get('user_id')
458 user_id = self.request.matchdict.get('user_id')
459 c.user = User.get_or_404(user_id, pyramid_exc=True)
459 c.user = User.get_or_404(user_id, pyramid_exc=True)
460 self._redirect_for_default_user(c.user.username)
460 self._redirect_for_default_user(c.user.username)
461
461
462 users_groups = set(self.request.POST.getall('users_group_id'))
462 users_groups = set(self.request.POST.getall('users_group_id'))
463 users_groups_model = []
463 users_groups_model = []
464
464
465 for ugid in users_groups:
465 for ugid in users_groups:
466 users_groups_model.append(UserGroupModel().get_group(safe_int(ugid)))
466 users_groups_model.append(UserGroupModel().get_group(safe_int(ugid)))
467 user_group_model = UserGroupModel()
467 user_group_model = UserGroupModel()
468 user_group_model.change_groups(c.user, users_groups_model)
468 user_group_model.change_groups(c.user, users_groups_model)
469
469
470 Session().commit()
470 Session().commit()
471 c.active = 'user_groups_management'
471 c.active = 'user_groups_management'
472 h.flash(_("Groups successfully changed"), category='success')
472 h.flash(_("Groups successfully changed"), category='success')
473
473
474 return HTTPFound(h.route_path(
474 return HTTPFound(h.route_path(
475 'edit_user_groups_management', user_id=user_id))
475 'edit_user_groups_management', user_id=user_id))
476
476
477 @LoginRequired()
477 @LoginRequired()
478 @HasPermissionAllDecorator('hg.admin')
478 @HasPermissionAllDecorator('hg.admin')
479 @view_config(
479 @view_config(
480 route_name='edit_user_audit_logs', request_method='GET',
480 route_name='edit_user_audit_logs', request_method='GET',
481 renderer='rhodecode:templates/admin/users/user_edit.mako')
481 renderer='rhodecode:templates/admin/users/user_edit.mako')
482 def user_audit_logs(self):
482 def user_audit_logs(self):
483 _ = self.request.translate
483 _ = self.request.translate
484 c = self.load_default_context()
484 c = self.load_default_context()
485
485
486 user_id = self.request.matchdict.get('user_id')
486 user_id = self.request.matchdict.get('user_id')
487 c.user = User.get_or_404(user_id, pyramid_exc=True)
487 c.user = User.get_or_404(user_id, pyramid_exc=True)
488 self._redirect_for_default_user(c.user.username)
488 self._redirect_for_default_user(c.user.username)
489 c.active = 'audit'
489 c.active = 'audit'
490
490
491 p = safe_int(self.request.GET.get('page', 1), 1)
491 p = safe_int(self.request.GET.get('page', 1), 1)
492
492
493 filter_term = self.request.GET.get('filter')
493 filter_term = self.request.GET.get('filter')
494 user_log = UserModel().get_user_log(c.user, filter_term)
494 user_log = UserModel().get_user_log(c.user, filter_term)
495
495
496 def url_generator(**kw):
496 def url_generator(**kw):
497 if filter_term:
497 if filter_term:
498 kw['filter'] = filter_term
498 kw['filter'] = filter_term
499 return self.request.current_route_path(_query=kw)
499 return self.request.current_route_path(_query=kw)
500
500
501 c.audit_logs = h.Page(
501 c.audit_logs = h.Page(
502 user_log, page=p, items_per_page=10, url=url_generator)
502 user_log, page=p, items_per_page=10, url=url_generator)
503 c.filter_term = filter_term
503 c.filter_term = filter_term
504 return self._get_template_context(c)
504 return self._get_template_context(c)
505
505
@@ -1,2046 +1,2045 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Helper functions
22 Helper functions
23
23
24 Consists of functions to typically be used within templates, but also
24 Consists of functions to typically be used within templates, but also
25 available to Controllers. This module is available to both as 'h'.
25 available to Controllers. This module is available to both as 'h'.
26 """
26 """
27
27
28 import random
28 import random
29 import hashlib
29 import hashlib
30 import StringIO
30 import StringIO
31 import urllib
31 import urllib
32 import math
32 import math
33 import logging
33 import logging
34 import re
34 import re
35 import urlparse
35 import urlparse
36 import time
36 import time
37 import string
37 import string
38 import hashlib
38 import hashlib
39 from collections import OrderedDict
39 from collections import OrderedDict
40
40
41 import pygments
41 import pygments
42 import itertools
42 import itertools
43 import fnmatch
43 import fnmatch
44
44
45 from datetime import datetime
45 from datetime import datetime
46 from functools import partial
46 from functools import partial
47 from pygments.formatters.html import HtmlFormatter
47 from pygments.formatters.html import HtmlFormatter
48 from pygments import highlight as code_highlight
48 from pygments import highlight as code_highlight
49 from pygments.lexers import (
49 from pygments.lexers import (
50 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
50 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
51
51
52 from pyramid.threadlocal import get_current_request
52 from pyramid.threadlocal import get_current_request
53
53
54 from webhelpers.html import literal, HTML, escape
54 from webhelpers.html import literal, HTML, escape
55 from webhelpers.html.tools import *
55 from webhelpers.html.tools import *
56 from webhelpers.html.builder import make_tag
56 from webhelpers.html.builder import make_tag
57 from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
57 from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
58 end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \
58 end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \
59 link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
59 link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
60 submit, text, password, textarea, title, ul, xml_declaration, radio
60 submit, text, password, textarea, title, ul, xml_declaration, radio
61 from webhelpers.html.tools import auto_link, button_to, highlight, \
61 from webhelpers.html.tools import auto_link, button_to, highlight, \
62 js_obfuscate, mail_to, strip_links, strip_tags, tag_re
62 js_obfuscate, mail_to, strip_links, strip_tags, tag_re
63 from webhelpers.pylonslib import Flash as _Flash
63 from webhelpers.pylonslib import Flash as _Flash
64 from webhelpers.text import chop_at, collapse, convert_accented_entities, \
64 from webhelpers.text import chop_at, collapse, convert_accented_entities, \
65 convert_misc_entities, lchop, plural, rchop, remove_formatting, \
65 convert_misc_entities, lchop, plural, rchop, remove_formatting, \
66 replace_whitespace, urlify, truncate, wrap_paragraphs
66 replace_whitespace, urlify, truncate, wrap_paragraphs
67 from webhelpers.date import time_ago_in_words
67 from webhelpers.date import time_ago_in_words
68 from webhelpers.paginate import Page as _Page
68 from webhelpers.paginate import Page as _Page
69 from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
69 from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
70 convert_boolean_attrs, NotGiven, _make_safe_id_component
70 convert_boolean_attrs, NotGiven, _make_safe_id_component
71 from webhelpers2.number import format_byte_size
71 from webhelpers2.number import format_byte_size
72
72
73 from rhodecode.lib.action_parser import action_parser
73 from rhodecode.lib.action_parser import action_parser
74 from rhodecode.lib.ext_json import json
74 from rhodecode.lib.ext_json import json
75 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
75 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
76 from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \
76 from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \
77 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \
77 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \
78 AttributeDict, safe_int, md5, md5_safe
78 AttributeDict, safe_int, md5, md5_safe
79 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
79 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
80 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
80 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
81 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
81 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
82 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
82 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
83 from rhodecode.model.changeset_status import ChangesetStatusModel
83 from rhodecode.model.changeset_status import ChangesetStatusModel
84 from rhodecode.model.db import Permission, User, Repository
84 from rhodecode.model.db import Permission, User, Repository
85 from rhodecode.model.repo_group import RepoGroupModel
85 from rhodecode.model.repo_group import RepoGroupModel
86 from rhodecode.model.settings import IssueTrackerSettingsModel
86 from rhodecode.model.settings import IssueTrackerSettingsModel
87
87
88 log = logging.getLogger(__name__)
88 log = logging.getLogger(__name__)
89
89
90
90
91 DEFAULT_USER = User.DEFAULT_USER
91 DEFAULT_USER = User.DEFAULT_USER
92 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
92 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
93
93
94
94
95 def url(*args, **kw):
95 def url(*args, **kw):
96 from pylons import url as pylons_url
96 from pylons import url as pylons_url
97 return pylons_url(*args, **kw)
97 return pylons_url(*args, **kw)
98
98
99
99
100 def pylons_url_current(*args, **kw):
100 def pylons_url_current(*args, **kw):
101 """
101 """
102 This function overrides pylons.url.current() which returns the current
102 This function overrides pylons.url.current() which returns the current
103 path so that it will also work from a pyramid only context. This
103 path so that it will also work from a pyramid only context. This
104 should be removed once port to pyramid is complete.
104 should be removed once port to pyramid is complete.
105 """
105 """
106 from pylons import url as pylons_url
106 from pylons import url as pylons_url
107 if not args and not kw:
107 if not args and not kw:
108 request = get_current_request()
108 request = get_current_request()
109 return request.path
109 return request.path
110 return pylons_url.current(*args, **kw)
110 return pylons_url.current(*args, **kw)
111
111
112 url.current = pylons_url_current
112 url.current = pylons_url_current
113
113
114
114
115 def url_replace(**qargs):
115 def url_replace(**qargs):
116 """ Returns the current request url while replacing query string args """
116 """ Returns the current request url while replacing query string args """
117
117
118 request = get_current_request()
118 request = get_current_request()
119 new_args = request.GET.mixed()
119 new_args = request.GET.mixed()
120 new_args.update(qargs)
120 new_args.update(qargs)
121 return url('', **new_args)
121 return url('', **new_args)
122
122
123
123
124 def asset(path, ver=None, **kwargs):
124 def asset(path, ver=None, **kwargs):
125 """
125 """
126 Helper to generate a static asset file path for rhodecode assets
126 Helper to generate a static asset file path for rhodecode assets
127
127
128 eg. h.asset('images/image.png', ver='3923')
128 eg. h.asset('images/image.png', ver='3923')
129
129
130 :param path: path of asset
130 :param path: path of asset
131 :param ver: optional version query param to append as ?ver=
131 :param ver: optional version query param to append as ?ver=
132 """
132 """
133 request = get_current_request()
133 request = get_current_request()
134 query = {}
134 query = {}
135 query.update(kwargs)
135 query.update(kwargs)
136 if ver:
136 if ver:
137 query = {'ver': ver}
137 query = {'ver': ver}
138 return request.static_path(
138 return request.static_path(
139 'rhodecode:public/{}'.format(path), _query=query)
139 'rhodecode:public/{}'.format(path), _query=query)
140
140
141
141
142 default_html_escape_table = {
142 default_html_escape_table = {
143 ord('&'): u'&amp;',
143 ord('&'): u'&amp;',
144 ord('<'): u'&lt;',
144 ord('<'): u'&lt;',
145 ord('>'): u'&gt;',
145 ord('>'): u'&gt;',
146 ord('"'): u'&quot;',
146 ord('"'): u'&quot;',
147 ord("'"): u'&#39;',
147 ord("'"): u'&#39;',
148 }
148 }
149
149
150
150
151 def html_escape(text, html_escape_table=default_html_escape_table):
151 def html_escape(text, html_escape_table=default_html_escape_table):
152 """Produce entities within text."""
152 """Produce entities within text."""
153 return text.translate(html_escape_table)
153 return text.translate(html_escape_table)
154
154
155
155
156 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
156 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
157 """
157 """
158 Truncate string ``s`` at the first occurrence of ``sub``.
158 Truncate string ``s`` at the first occurrence of ``sub``.
159
159
160 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
160 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
161 """
161 """
162 suffix_if_chopped = suffix_if_chopped or ''
162 suffix_if_chopped = suffix_if_chopped or ''
163 pos = s.find(sub)
163 pos = s.find(sub)
164 if pos == -1:
164 if pos == -1:
165 return s
165 return s
166
166
167 if inclusive:
167 if inclusive:
168 pos += len(sub)
168 pos += len(sub)
169
169
170 chopped = s[:pos]
170 chopped = s[:pos]
171 left = s[pos:].strip()
171 left = s[pos:].strip()
172
172
173 if left and suffix_if_chopped:
173 if left and suffix_if_chopped:
174 chopped += suffix_if_chopped
174 chopped += suffix_if_chopped
175
175
176 return chopped
176 return chopped
177
177
178
178
179 def shorter(text, size=20):
179 def shorter(text, size=20):
180 postfix = '...'
180 postfix = '...'
181 if len(text) > size:
181 if len(text) > size:
182 return text[:size - len(postfix)] + postfix
182 return text[:size - len(postfix)] + postfix
183 return text
183 return text
184
184
185
185
186 def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
186 def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
187 """
187 """
188 Reset button
188 Reset button
189 """
189 """
190 _set_input_attrs(attrs, type, name, value)
190 _set_input_attrs(attrs, type, name, value)
191 _set_id_attr(attrs, id, name)
191 _set_id_attr(attrs, id, name)
192 convert_boolean_attrs(attrs, ["disabled"])
192 convert_boolean_attrs(attrs, ["disabled"])
193 return HTML.input(**attrs)
193 return HTML.input(**attrs)
194
194
195 reset = _reset
195 reset = _reset
196 safeid = _make_safe_id_component
196 safeid = _make_safe_id_component
197
197
198
198
199 def branding(name, length=40):
199 def branding(name, length=40):
200 return truncate(name, length, indicator="")
200 return truncate(name, length, indicator="")
201
201
202
202
203 def FID(raw_id, path):
203 def FID(raw_id, path):
204 """
204 """
205 Creates a unique ID for filenode based on it's hash of path and commit
205 Creates a unique ID for filenode based on it's hash of path and commit
206 it's safe to use in urls
206 it's safe to use in urls
207
207
208 :param raw_id:
208 :param raw_id:
209 :param path:
209 :param path:
210 """
210 """
211
211
212 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
212 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
213
213
214
214
215 class _GetError(object):
215 class _GetError(object):
216 """Get error from form_errors, and represent it as span wrapped error
216 """Get error from form_errors, and represent it as span wrapped error
217 message
217 message
218
218
219 :param field_name: field to fetch errors for
219 :param field_name: field to fetch errors for
220 :param form_errors: form errors dict
220 :param form_errors: form errors dict
221 """
221 """
222
222
223 def __call__(self, field_name, form_errors):
223 def __call__(self, field_name, form_errors):
224 tmpl = """<span class="error_msg">%s</span>"""
224 tmpl = """<span class="error_msg">%s</span>"""
225 if form_errors and field_name in form_errors:
225 if form_errors and field_name in form_errors:
226 return literal(tmpl % form_errors.get(field_name))
226 return literal(tmpl % form_errors.get(field_name))
227
227
228 get_error = _GetError()
228 get_error = _GetError()
229
229
230
230
231 class _ToolTip(object):
231 class _ToolTip(object):
232
232
233 def __call__(self, tooltip_title, trim_at=50):
233 def __call__(self, tooltip_title, trim_at=50):
234 """
234 """
235 Special function just to wrap our text into nice formatted
235 Special function just to wrap our text into nice formatted
236 autowrapped text
236 autowrapped text
237
237
238 :param tooltip_title:
238 :param tooltip_title:
239 """
239 """
240 tooltip_title = escape(tooltip_title)
240 tooltip_title = escape(tooltip_title)
241 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
241 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
242 return tooltip_title
242 return tooltip_title
243 tooltip = _ToolTip()
243 tooltip = _ToolTip()
244
244
245
245
246 def files_breadcrumbs(repo_name, commit_id, file_path):
246 def files_breadcrumbs(repo_name, commit_id, file_path):
247 if isinstance(file_path, str):
247 if isinstance(file_path, str):
248 file_path = safe_unicode(file_path)
248 file_path = safe_unicode(file_path)
249
249
250 # TODO: johbo: Is this always a url like path, or is this operating
250 # TODO: johbo: Is this always a url like path, or is this operating
251 # system dependent?
251 # system dependent?
252 path_segments = file_path.split('/')
252 path_segments = file_path.split('/')
253
253
254 repo_name_html = escape(repo_name)
254 repo_name_html = escape(repo_name)
255 if len(path_segments) == 1 and path_segments[0] == '':
255 if len(path_segments) == 1 and path_segments[0] == '':
256 url_segments = [repo_name_html]
256 url_segments = [repo_name_html]
257 else:
257 else:
258 url_segments = [
258 url_segments = [
259 link_to(
259 link_to(
260 repo_name_html,
260 repo_name_html,
261 route_path(
261 route_path(
262 'repo_files',
262 'repo_files',
263 repo_name=repo_name,
263 repo_name=repo_name,
264 commit_id=commit_id,
264 commit_id=commit_id,
265 f_path=''),
265 f_path=''),
266 class_='pjax-link')]
266 class_='pjax-link')]
267
267
268 last_cnt = len(path_segments) - 1
268 last_cnt = len(path_segments) - 1
269 for cnt, segment in enumerate(path_segments):
269 for cnt, segment in enumerate(path_segments):
270 if not segment:
270 if not segment:
271 continue
271 continue
272 segment_html = escape(segment)
272 segment_html = escape(segment)
273
273
274 if cnt != last_cnt:
274 if cnt != last_cnt:
275 url_segments.append(
275 url_segments.append(
276 link_to(
276 link_to(
277 segment_html,
277 segment_html,
278 route_path(
278 route_path(
279 'repo_files',
279 'repo_files',
280 repo_name=repo_name,
280 repo_name=repo_name,
281 commit_id=commit_id,
281 commit_id=commit_id,
282 f_path='/'.join(path_segments[:cnt + 1])),
282 f_path='/'.join(path_segments[:cnt + 1])),
283 class_='pjax-link'))
283 class_='pjax-link'))
284 else:
284 else:
285 url_segments.append(segment_html)
285 url_segments.append(segment_html)
286
286
287 return literal('/'.join(url_segments))
287 return literal('/'.join(url_segments))
288
288
289
289
290 class CodeHtmlFormatter(HtmlFormatter):
290 class CodeHtmlFormatter(HtmlFormatter):
291 """
291 """
292 My code Html Formatter for source codes
292 My code Html Formatter for source codes
293 """
293 """
294
294
295 def wrap(self, source, outfile):
295 def wrap(self, source, outfile):
296 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
296 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
297
297
298 def _wrap_code(self, source):
298 def _wrap_code(self, source):
299 for cnt, it in enumerate(source):
299 for cnt, it in enumerate(source):
300 i, t = it
300 i, t = it
301 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
301 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
302 yield i, t
302 yield i, t
303
303
304 def _wrap_tablelinenos(self, inner):
304 def _wrap_tablelinenos(self, inner):
305 dummyoutfile = StringIO.StringIO()
305 dummyoutfile = StringIO.StringIO()
306 lncount = 0
306 lncount = 0
307 for t, line in inner:
307 for t, line in inner:
308 if t:
308 if t:
309 lncount += 1
309 lncount += 1
310 dummyoutfile.write(line)
310 dummyoutfile.write(line)
311
311
312 fl = self.linenostart
312 fl = self.linenostart
313 mw = len(str(lncount + fl - 1))
313 mw = len(str(lncount + fl - 1))
314 sp = self.linenospecial
314 sp = self.linenospecial
315 st = self.linenostep
315 st = self.linenostep
316 la = self.lineanchors
316 la = self.lineanchors
317 aln = self.anchorlinenos
317 aln = self.anchorlinenos
318 nocls = self.noclasses
318 nocls = self.noclasses
319 if sp:
319 if sp:
320 lines = []
320 lines = []
321
321
322 for i in range(fl, fl + lncount):
322 for i in range(fl, fl + lncount):
323 if i % st == 0:
323 if i % st == 0:
324 if i % sp == 0:
324 if i % sp == 0:
325 if aln:
325 if aln:
326 lines.append('<a href="#%s%d" class="special">%*d</a>' %
326 lines.append('<a href="#%s%d" class="special">%*d</a>' %
327 (la, i, mw, i))
327 (la, i, mw, i))
328 else:
328 else:
329 lines.append('<span class="special">%*d</span>' % (mw, i))
329 lines.append('<span class="special">%*d</span>' % (mw, i))
330 else:
330 else:
331 if aln:
331 if aln:
332 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
332 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
333 else:
333 else:
334 lines.append('%*d' % (mw, i))
334 lines.append('%*d' % (mw, i))
335 else:
335 else:
336 lines.append('')
336 lines.append('')
337 ls = '\n'.join(lines)
337 ls = '\n'.join(lines)
338 else:
338 else:
339 lines = []
339 lines = []
340 for i in range(fl, fl + lncount):
340 for i in range(fl, fl + lncount):
341 if i % st == 0:
341 if i % st == 0:
342 if aln:
342 if aln:
343 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
343 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
344 else:
344 else:
345 lines.append('%*d' % (mw, i))
345 lines.append('%*d' % (mw, i))
346 else:
346 else:
347 lines.append('')
347 lines.append('')
348 ls = '\n'.join(lines)
348 ls = '\n'.join(lines)
349
349
350 # in case you wonder about the seemingly redundant <div> here: since the
350 # in case you wonder about the seemingly redundant <div> here: since the
351 # content in the other cell also is wrapped in a div, some browsers in
351 # content in the other cell also is wrapped in a div, some browsers in
352 # some configurations seem to mess up the formatting...
352 # some configurations seem to mess up the formatting...
353 if nocls:
353 if nocls:
354 yield 0, ('<table class="%stable">' % self.cssclass +
354 yield 0, ('<table class="%stable">' % self.cssclass +
355 '<tr><td><div class="linenodiv" '
355 '<tr><td><div class="linenodiv" '
356 'style="background-color: #f0f0f0; padding-right: 10px">'
356 'style="background-color: #f0f0f0; padding-right: 10px">'
357 '<pre style="line-height: 125%">' +
357 '<pre style="line-height: 125%">' +
358 ls + '</pre></div></td><td id="hlcode" class="code">')
358 ls + '</pre></div></td><td id="hlcode" class="code">')
359 else:
359 else:
360 yield 0, ('<table class="%stable">' % self.cssclass +
360 yield 0, ('<table class="%stable">' % self.cssclass +
361 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
361 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
362 ls + '</pre></div></td><td id="hlcode" class="code">')
362 ls + '</pre></div></td><td id="hlcode" class="code">')
363 yield 0, dummyoutfile.getvalue()
363 yield 0, dummyoutfile.getvalue()
364 yield 0, '</td></tr></table>'
364 yield 0, '</td></tr></table>'
365
365
366
366
367 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
367 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
368 def __init__(self, **kw):
368 def __init__(self, **kw):
369 # only show these line numbers if set
369 # only show these line numbers if set
370 self.only_lines = kw.pop('only_line_numbers', [])
370 self.only_lines = kw.pop('only_line_numbers', [])
371 self.query_terms = kw.pop('query_terms', [])
371 self.query_terms = kw.pop('query_terms', [])
372 self.max_lines = kw.pop('max_lines', 5)
372 self.max_lines = kw.pop('max_lines', 5)
373 self.line_context = kw.pop('line_context', 3)
373 self.line_context = kw.pop('line_context', 3)
374 self.url = kw.pop('url', None)
374 self.url = kw.pop('url', None)
375
375
376 super(CodeHtmlFormatter, self).__init__(**kw)
376 super(CodeHtmlFormatter, self).__init__(**kw)
377
377
378 def _wrap_code(self, source):
378 def _wrap_code(self, source):
379 for cnt, it in enumerate(source):
379 for cnt, it in enumerate(source):
380 i, t = it
380 i, t = it
381 t = '<pre>%s</pre>' % t
381 t = '<pre>%s</pre>' % t
382 yield i, t
382 yield i, t
383
383
384 def _wrap_tablelinenos(self, inner):
384 def _wrap_tablelinenos(self, inner):
385 yield 0, '<table class="code-highlight %stable">' % self.cssclass
385 yield 0, '<table class="code-highlight %stable">' % self.cssclass
386
386
387 last_shown_line_number = 0
387 last_shown_line_number = 0
388 current_line_number = 1
388 current_line_number = 1
389
389
390 for t, line in inner:
390 for t, line in inner:
391 if not t:
391 if not t:
392 yield t, line
392 yield t, line
393 continue
393 continue
394
394
395 if current_line_number in self.only_lines:
395 if current_line_number in self.only_lines:
396 if last_shown_line_number + 1 != current_line_number:
396 if last_shown_line_number + 1 != current_line_number:
397 yield 0, '<tr>'
397 yield 0, '<tr>'
398 yield 0, '<td class="line">...</td>'
398 yield 0, '<td class="line">...</td>'
399 yield 0, '<td id="hlcode" class="code"></td>'
399 yield 0, '<td id="hlcode" class="code"></td>'
400 yield 0, '</tr>'
400 yield 0, '</tr>'
401
401
402 yield 0, '<tr>'
402 yield 0, '<tr>'
403 if self.url:
403 if self.url:
404 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
404 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
405 self.url, current_line_number, current_line_number)
405 self.url, current_line_number, current_line_number)
406 else:
406 else:
407 yield 0, '<td class="line"><a href="">%i</a></td>' % (
407 yield 0, '<td class="line"><a href="">%i</a></td>' % (
408 current_line_number)
408 current_line_number)
409 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
409 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
410 yield 0, '</tr>'
410 yield 0, '</tr>'
411
411
412 last_shown_line_number = current_line_number
412 last_shown_line_number = current_line_number
413
413
414 current_line_number += 1
414 current_line_number += 1
415
415
416
416
417 yield 0, '</table>'
417 yield 0, '</table>'
418
418
419
419
420 def extract_phrases(text_query):
420 def extract_phrases(text_query):
421 """
421 """
422 Extracts phrases from search term string making sure phrases
422 Extracts phrases from search term string making sure phrases
423 contained in double quotes are kept together - and discarding empty values
423 contained in double quotes are kept together - and discarding empty values
424 or fully whitespace values eg.
424 or fully whitespace values eg.
425
425
426 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
426 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
427
427
428 """
428 """
429
429
430 in_phrase = False
430 in_phrase = False
431 buf = ''
431 buf = ''
432 phrases = []
432 phrases = []
433 for char in text_query:
433 for char in text_query:
434 if in_phrase:
434 if in_phrase:
435 if char == '"': # end phrase
435 if char == '"': # end phrase
436 phrases.append(buf)
436 phrases.append(buf)
437 buf = ''
437 buf = ''
438 in_phrase = False
438 in_phrase = False
439 continue
439 continue
440 else:
440 else:
441 buf += char
441 buf += char
442 continue
442 continue
443 else:
443 else:
444 if char == '"': # start phrase
444 if char == '"': # start phrase
445 in_phrase = True
445 in_phrase = True
446 phrases.append(buf)
446 phrases.append(buf)
447 buf = ''
447 buf = ''
448 continue
448 continue
449 elif char == ' ':
449 elif char == ' ':
450 phrases.append(buf)
450 phrases.append(buf)
451 buf = ''
451 buf = ''
452 continue
452 continue
453 else:
453 else:
454 buf += char
454 buf += char
455
455
456 phrases.append(buf)
456 phrases.append(buf)
457 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
457 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
458 return phrases
458 return phrases
459
459
460
460
461 def get_matching_offsets(text, phrases):
461 def get_matching_offsets(text, phrases):
462 """
462 """
463 Returns a list of string offsets in `text` that the list of `terms` match
463 Returns a list of string offsets in `text` that the list of `terms` match
464
464
465 >>> get_matching_offsets('some text here', ['some', 'here'])
465 >>> get_matching_offsets('some text here', ['some', 'here'])
466 [(0, 4), (10, 14)]
466 [(0, 4), (10, 14)]
467
467
468 """
468 """
469 offsets = []
469 offsets = []
470 for phrase in phrases:
470 for phrase in phrases:
471 for match in re.finditer(phrase, text):
471 for match in re.finditer(phrase, text):
472 offsets.append((match.start(), match.end()))
472 offsets.append((match.start(), match.end()))
473
473
474 return offsets
474 return offsets
475
475
476
476
477 def normalize_text_for_matching(x):
477 def normalize_text_for_matching(x):
478 """
478 """
479 Replaces all non alnum characters to spaces and lower cases the string,
479 Replaces all non alnum characters to spaces and lower cases the string,
480 useful for comparing two text strings without punctuation
480 useful for comparing two text strings without punctuation
481 """
481 """
482 return re.sub(r'[^\w]', ' ', x.lower())
482 return re.sub(r'[^\w]', ' ', x.lower())
483
483
484
484
485 def get_matching_line_offsets(lines, terms):
485 def get_matching_line_offsets(lines, terms):
486 """ Return a set of `lines` indices (starting from 1) matching a
486 """ Return a set of `lines` indices (starting from 1) matching a
487 text search query, along with `context` lines above/below matching lines
487 text search query, along with `context` lines above/below matching lines
488
488
489 :param lines: list of strings representing lines
489 :param lines: list of strings representing lines
490 :param terms: search term string to match in lines eg. 'some text'
490 :param terms: search term string to match in lines eg. 'some text'
491 :param context: number of lines above/below a matching line to add to result
491 :param context: number of lines above/below a matching line to add to result
492 :param max_lines: cut off for lines of interest
492 :param max_lines: cut off for lines of interest
493 eg.
493 eg.
494
494
495 text = '''
495 text = '''
496 words words words
496 words words words
497 words words words
497 words words words
498 some text some
498 some text some
499 words words words
499 words words words
500 words words words
500 words words words
501 text here what
501 text here what
502 '''
502 '''
503 get_matching_line_offsets(text, 'text', context=1)
503 get_matching_line_offsets(text, 'text', context=1)
504 {3: [(5, 9)], 6: [(0, 4)]]
504 {3: [(5, 9)], 6: [(0, 4)]]
505
505
506 """
506 """
507 matching_lines = {}
507 matching_lines = {}
508 phrases = [normalize_text_for_matching(phrase)
508 phrases = [normalize_text_for_matching(phrase)
509 for phrase in extract_phrases(terms)]
509 for phrase in extract_phrases(terms)]
510
510
511 for line_index, line in enumerate(lines, start=1):
511 for line_index, line in enumerate(lines, start=1):
512 match_offsets = get_matching_offsets(
512 match_offsets = get_matching_offsets(
513 normalize_text_for_matching(line), phrases)
513 normalize_text_for_matching(line), phrases)
514 if match_offsets:
514 if match_offsets:
515 matching_lines[line_index] = match_offsets
515 matching_lines[line_index] = match_offsets
516
516
517 return matching_lines
517 return matching_lines
518
518
519
519
520 def hsv_to_rgb(h, s, v):
520 def hsv_to_rgb(h, s, v):
521 """ Convert hsv color values to rgb """
521 """ Convert hsv color values to rgb """
522
522
523 if s == 0.0:
523 if s == 0.0:
524 return v, v, v
524 return v, v, v
525 i = int(h * 6.0) # XXX assume int() truncates!
525 i = int(h * 6.0) # XXX assume int() truncates!
526 f = (h * 6.0) - i
526 f = (h * 6.0) - i
527 p = v * (1.0 - s)
527 p = v * (1.0 - s)
528 q = v * (1.0 - s * f)
528 q = v * (1.0 - s * f)
529 t = v * (1.0 - s * (1.0 - f))
529 t = v * (1.0 - s * (1.0 - f))
530 i = i % 6
530 i = i % 6
531 if i == 0:
531 if i == 0:
532 return v, t, p
532 return v, t, p
533 if i == 1:
533 if i == 1:
534 return q, v, p
534 return q, v, p
535 if i == 2:
535 if i == 2:
536 return p, v, t
536 return p, v, t
537 if i == 3:
537 if i == 3:
538 return p, q, v
538 return p, q, v
539 if i == 4:
539 if i == 4:
540 return t, p, v
540 return t, p, v
541 if i == 5:
541 if i == 5:
542 return v, p, q
542 return v, p, q
543
543
544
544
545 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
545 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
546 """
546 """
547 Generator for getting n of evenly distributed colors using
547 Generator for getting n of evenly distributed colors using
548 hsv color and golden ratio. It always return same order of colors
548 hsv color and golden ratio. It always return same order of colors
549
549
550 :param n: number of colors to generate
550 :param n: number of colors to generate
551 :param saturation: saturation of returned colors
551 :param saturation: saturation of returned colors
552 :param lightness: lightness of returned colors
552 :param lightness: lightness of returned colors
553 :returns: RGB tuple
553 :returns: RGB tuple
554 """
554 """
555
555
556 golden_ratio = 0.618033988749895
556 golden_ratio = 0.618033988749895
557 h = 0.22717784590367374
557 h = 0.22717784590367374
558
558
559 for _ in xrange(n):
559 for _ in xrange(n):
560 h += golden_ratio
560 h += golden_ratio
561 h %= 1
561 h %= 1
562 HSV_tuple = [h, saturation, lightness]
562 HSV_tuple = [h, saturation, lightness]
563 RGB_tuple = hsv_to_rgb(*HSV_tuple)
563 RGB_tuple = hsv_to_rgb(*HSV_tuple)
564 yield map(lambda x: str(int(x * 256)), RGB_tuple)
564 yield map(lambda x: str(int(x * 256)), RGB_tuple)
565
565
566
566
567 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
567 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
568 """
568 """
569 Returns a function which when called with an argument returns a unique
569 Returns a function which when called with an argument returns a unique
570 color for that argument, eg.
570 color for that argument, eg.
571
571
572 :param n: number of colors to generate
572 :param n: number of colors to generate
573 :param saturation: saturation of returned colors
573 :param saturation: saturation of returned colors
574 :param lightness: lightness of returned colors
574 :param lightness: lightness of returned colors
575 :returns: css RGB string
575 :returns: css RGB string
576
576
577 >>> color_hash = color_hasher()
577 >>> color_hash = color_hasher()
578 >>> color_hash('hello')
578 >>> color_hash('hello')
579 'rgb(34, 12, 59)'
579 'rgb(34, 12, 59)'
580 >>> color_hash('hello')
580 >>> color_hash('hello')
581 'rgb(34, 12, 59)'
581 'rgb(34, 12, 59)'
582 >>> color_hash('other')
582 >>> color_hash('other')
583 'rgb(90, 224, 159)'
583 'rgb(90, 224, 159)'
584 """
584 """
585
585
586 color_dict = {}
586 color_dict = {}
587 cgenerator = unique_color_generator(
587 cgenerator = unique_color_generator(
588 saturation=saturation, lightness=lightness)
588 saturation=saturation, lightness=lightness)
589
589
590 def get_color_string(thing):
590 def get_color_string(thing):
591 if thing in color_dict:
591 if thing in color_dict:
592 col = color_dict[thing]
592 col = color_dict[thing]
593 else:
593 else:
594 col = color_dict[thing] = cgenerator.next()
594 col = color_dict[thing] = cgenerator.next()
595 return "rgb(%s)" % (', '.join(col))
595 return "rgb(%s)" % (', '.join(col))
596
596
597 return get_color_string
597 return get_color_string
598
598
599
599
600 def get_lexer_safe(mimetype=None, filepath=None):
600 def get_lexer_safe(mimetype=None, filepath=None):
601 """
601 """
602 Tries to return a relevant pygments lexer using mimetype/filepath name,
602 Tries to return a relevant pygments lexer using mimetype/filepath name,
603 defaulting to plain text if none could be found
603 defaulting to plain text if none could be found
604 """
604 """
605 lexer = None
605 lexer = None
606 try:
606 try:
607 if mimetype:
607 if mimetype:
608 lexer = get_lexer_for_mimetype(mimetype)
608 lexer = get_lexer_for_mimetype(mimetype)
609 if not lexer:
609 if not lexer:
610 lexer = get_lexer_for_filename(filepath)
610 lexer = get_lexer_for_filename(filepath)
611 except pygments.util.ClassNotFound:
611 except pygments.util.ClassNotFound:
612 pass
612 pass
613
613
614 if not lexer:
614 if not lexer:
615 lexer = get_lexer_by_name('text')
615 lexer = get_lexer_by_name('text')
616
616
617 return lexer
617 return lexer
618
618
619
619
620 def get_lexer_for_filenode(filenode):
620 def get_lexer_for_filenode(filenode):
621 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
621 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
622 return lexer
622 return lexer
623
623
624
624
625 def pygmentize(filenode, **kwargs):
625 def pygmentize(filenode, **kwargs):
626 """
626 """
627 pygmentize function using pygments
627 pygmentize function using pygments
628
628
629 :param filenode:
629 :param filenode:
630 """
630 """
631 lexer = get_lexer_for_filenode(filenode)
631 lexer = get_lexer_for_filenode(filenode)
632 return literal(code_highlight(filenode.content, lexer,
632 return literal(code_highlight(filenode.content, lexer,
633 CodeHtmlFormatter(**kwargs)))
633 CodeHtmlFormatter(**kwargs)))
634
634
635
635
636 def is_following_repo(repo_name, user_id):
636 def is_following_repo(repo_name, user_id):
637 from rhodecode.model.scm import ScmModel
637 from rhodecode.model.scm import ScmModel
638 return ScmModel().is_following_repo(repo_name, user_id)
638 return ScmModel().is_following_repo(repo_name, user_id)
639
639
640
640
641 class _Message(object):
641 class _Message(object):
642 """A message returned by ``Flash.pop_messages()``.
642 """A message returned by ``Flash.pop_messages()``.
643
643
644 Converting the message to a string returns the message text. Instances
644 Converting the message to a string returns the message text. Instances
645 also have the following attributes:
645 also have the following attributes:
646
646
647 * ``message``: the message text.
647 * ``message``: the message text.
648 * ``category``: the category specified when the message was created.
648 * ``category``: the category specified when the message was created.
649 """
649 """
650
650
651 def __init__(self, category, message):
651 def __init__(self, category, message):
652 self.category = category
652 self.category = category
653 self.message = message
653 self.message = message
654
654
655 def __str__(self):
655 def __str__(self):
656 return self.message
656 return self.message
657
657
658 __unicode__ = __str__
658 __unicode__ = __str__
659
659
660 def __html__(self):
660 def __html__(self):
661 return escape(safe_unicode(self.message))
661 return escape(safe_unicode(self.message))
662
662
663
663
664 class Flash(_Flash):
664 class Flash(_Flash):
665
665
666 def pop_messages(self, request=None):
666 def pop_messages(self, request=None):
667 """Return all accumulated messages and delete them from the session.
667 """Return all accumulated messages and delete them from the session.
668
668
669 The return value is a list of ``Message`` objects.
669 The return value is a list of ``Message`` objects.
670 """
670 """
671 messages = []
671 messages = []
672
672
673 if request:
673 if request:
674 session = request.session
674 session = request.session
675 else:
675 else:
676 from pylons import session
676 from pylons import session
677
677
678 # Pop the 'old' pylons flash messages. They are tuples of the form
678 # Pop the 'old' pylons flash messages. They are tuples of the form
679 # (category, message)
679 # (category, message)
680 for cat, msg in session.pop(self.session_key, []):
680 for cat, msg in session.pop(self.session_key, []):
681 messages.append(_Message(cat, msg))
681 messages.append(_Message(cat, msg))
682
682
683 # Pop the 'new' pyramid flash messages for each category as list
683 # Pop the 'new' pyramid flash messages for each category as list
684 # of strings.
684 # of strings.
685 for cat in self.categories:
685 for cat in self.categories:
686 for msg in session.pop_flash(queue=cat):
686 for msg in session.pop_flash(queue=cat):
687 messages.append(_Message(cat, msg))
687 messages.append(_Message(cat, msg))
688 # Map messages from the default queue to the 'notice' category.
688 # Map messages from the default queue to the 'notice' category.
689 for msg in session.pop_flash():
689 for msg in session.pop_flash():
690 messages.append(_Message('notice', msg))
690 messages.append(_Message('notice', msg))
691
691
692 session.save()
692 session.save()
693 return messages
693 return messages
694
694
695 def json_alerts(self, request=None):
695 def json_alerts(self, request=None):
696 payloads = []
696 payloads = []
697 messages = flash.pop_messages(request=request)
697 messages = flash.pop_messages(request=request)
698 if messages:
698 if messages:
699 for message in messages:
699 for message in messages:
700 subdata = {}
700 subdata = {}
701 if hasattr(message.message, 'rsplit'):
701 if hasattr(message.message, 'rsplit'):
702 flash_data = message.message.rsplit('|DELIM|', 1)
702 flash_data = message.message.rsplit('|DELIM|', 1)
703 org_message = flash_data[0]
703 org_message = flash_data[0]
704 if len(flash_data) > 1:
704 if len(flash_data) > 1:
705 subdata = json.loads(flash_data[1])
705 subdata = json.loads(flash_data[1])
706 else:
706 else:
707 org_message = message.message
707 org_message = message.message
708 payloads.append({
708 payloads.append({
709 'message': {
709 'message': {
710 'message': u'{}'.format(org_message),
710 'message': u'{}'.format(org_message),
711 'level': message.category,
711 'level': message.category,
712 'force': True,
712 'force': True,
713 'subdata': subdata
713 'subdata': subdata
714 }
714 }
715 })
715 })
716 return json.dumps(payloads)
716 return json.dumps(payloads)
717
717
718 flash = Flash()
718 flash = Flash()
719
719
720 #==============================================================================
720 #==============================================================================
721 # SCM FILTERS available via h.
721 # SCM FILTERS available via h.
722 #==============================================================================
722 #==============================================================================
723 from rhodecode.lib.vcs.utils import author_name, author_email
723 from rhodecode.lib.vcs.utils import author_name, author_email
724 from rhodecode.lib.utils2 import credentials_filter, age as _age
724 from rhodecode.lib.utils2 import credentials_filter, age as _age
725 from rhodecode.model.db import User, ChangesetStatus
725 from rhodecode.model.db import User, ChangesetStatus
726
726
727 age = _age
727 age = _age
728 capitalize = lambda x: x.capitalize()
728 capitalize = lambda x: x.capitalize()
729 email = author_email
729 email = author_email
730 short_id = lambda x: x[:12]
730 short_id = lambda x: x[:12]
731 hide_credentials = lambda x: ''.join(credentials_filter(x))
731 hide_credentials = lambda x: ''.join(credentials_filter(x))
732
732
733
733
734 def age_component(datetime_iso, value=None, time_is_local=False):
734 def age_component(datetime_iso, value=None, time_is_local=False):
735 title = value or format_date(datetime_iso)
735 title = value or format_date(datetime_iso)
736 tzinfo = '+00:00'
736 tzinfo = '+00:00'
737
737
738 # detect if we have a timezone info, otherwise, add it
738 # detect if we have a timezone info, otherwise, add it
739 if isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
739 if isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
740 if time_is_local:
740 if time_is_local:
741 tzinfo = time.strftime("+%H:%M",
741 tzinfo = time.strftime("+%H:%M",
742 time.gmtime(
742 time.gmtime(
743 (datetime.now() - datetime.utcnow()).seconds + 1
743 (datetime.now() - datetime.utcnow()).seconds + 1
744 )
744 )
745 )
745 )
746
746
747 return literal(
747 return literal(
748 '<time class="timeago tooltip" '
748 '<time class="timeago tooltip" '
749 'title="{1}{2}" datetime="{0}{2}">{1}</time>'.format(
749 'title="{1}{2}" datetime="{0}{2}">{1}</time>'.format(
750 datetime_iso, title, tzinfo))
750 datetime_iso, title, tzinfo))
751
751
752
752
753 def _shorten_commit_id(commit_id):
753 def _shorten_commit_id(commit_id):
754 from rhodecode import CONFIG
754 from rhodecode import CONFIG
755 def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12))
755 def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12))
756 return commit_id[:def_len]
756 return commit_id[:def_len]
757
757
758
758
759 def show_id(commit):
759 def show_id(commit):
760 """
760 """
761 Configurable function that shows ID
761 Configurable function that shows ID
762 by default it's r123:fffeeefffeee
762 by default it's r123:fffeeefffeee
763
763
764 :param commit: commit instance
764 :param commit: commit instance
765 """
765 """
766 from rhodecode import CONFIG
766 from rhodecode import CONFIG
767 show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True))
767 show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True))
768
768
769 raw_id = _shorten_commit_id(commit.raw_id)
769 raw_id = _shorten_commit_id(commit.raw_id)
770 if show_idx:
770 if show_idx:
771 return 'r%s:%s' % (commit.idx, raw_id)
771 return 'r%s:%s' % (commit.idx, raw_id)
772 else:
772 else:
773 return '%s' % (raw_id, )
773 return '%s' % (raw_id, )
774
774
775
775
776 def format_date(date):
776 def format_date(date):
777 """
777 """
778 use a standardized formatting for dates used in RhodeCode
778 use a standardized formatting for dates used in RhodeCode
779
779
780 :param date: date/datetime object
780 :param date: date/datetime object
781 :return: formatted date
781 :return: formatted date
782 """
782 """
783
783
784 if date:
784 if date:
785 _fmt = "%a, %d %b %Y %H:%M:%S"
785 _fmt = "%a, %d %b %Y %H:%M:%S"
786 return safe_unicode(date.strftime(_fmt))
786 return safe_unicode(date.strftime(_fmt))
787
787
788 return u""
788 return u""
789
789
790
790
791 class _RepoChecker(object):
791 class _RepoChecker(object):
792
792
793 def __init__(self, backend_alias):
793 def __init__(self, backend_alias):
794 self._backend_alias = backend_alias
794 self._backend_alias = backend_alias
795
795
796 def __call__(self, repository):
796 def __call__(self, repository):
797 if hasattr(repository, 'alias'):
797 if hasattr(repository, 'alias'):
798 _type = repository.alias
798 _type = repository.alias
799 elif hasattr(repository, 'repo_type'):
799 elif hasattr(repository, 'repo_type'):
800 _type = repository.repo_type
800 _type = repository.repo_type
801 else:
801 else:
802 _type = repository
802 _type = repository
803 return _type == self._backend_alias
803 return _type == self._backend_alias
804
804
805 is_git = _RepoChecker('git')
805 is_git = _RepoChecker('git')
806 is_hg = _RepoChecker('hg')
806 is_hg = _RepoChecker('hg')
807 is_svn = _RepoChecker('svn')
807 is_svn = _RepoChecker('svn')
808
808
809
809
810 def get_repo_type_by_name(repo_name):
810 def get_repo_type_by_name(repo_name):
811 repo = Repository.get_by_repo_name(repo_name)
811 repo = Repository.get_by_repo_name(repo_name)
812 return repo.repo_type
812 return repo.repo_type
813
813
814
814
815 def is_svn_without_proxy(repository):
815 def is_svn_without_proxy(repository):
816 if is_svn(repository):
816 if is_svn(repository):
817 from rhodecode.model.settings import VcsSettingsModel
817 from rhodecode.model.settings import VcsSettingsModel
818 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
818 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
819 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
819 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
820 return False
820 return False
821
821
822
822
823 def discover_user(author):
823 def discover_user(author):
824 """
824 """
825 Tries to discover RhodeCode User based on the autho string. Author string
825 Tries to discover RhodeCode User based on the autho string. Author string
826 is typically `FirstName LastName <email@address.com>`
826 is typically `FirstName LastName <email@address.com>`
827 """
827 """
828
828
829 # if author is already an instance use it for extraction
829 # if author is already an instance use it for extraction
830 if isinstance(author, User):
830 if isinstance(author, User):
831 return author
831 return author
832
832
833 # Valid email in the attribute passed, see if they're in the system
833 # Valid email in the attribute passed, see if they're in the system
834 _email = author_email(author)
834 _email = author_email(author)
835 if _email != '':
835 if _email != '':
836 user = User.get_by_email(_email, case_insensitive=True, cache=True)
836 user = User.get_by_email(_email, case_insensitive=True, cache=True)
837 if user is not None:
837 if user is not None:
838 return user
838 return user
839
839
840 # Maybe it's a username, we try to extract it and fetch by username ?
840 # Maybe it's a username, we try to extract it and fetch by username ?
841 _author = author_name(author)
841 _author = author_name(author)
842 user = User.get_by_username(_author, case_insensitive=True, cache=True)
842 user = User.get_by_username(_author, case_insensitive=True, cache=True)
843 if user is not None:
843 if user is not None:
844 return user
844 return user
845
845
846 return None
846 return None
847
847
848
848
849 def email_or_none(author):
849 def email_or_none(author):
850 # extract email from the commit string
850 # extract email from the commit string
851 _email = author_email(author)
851 _email = author_email(author)
852
852
853 # If we have an email, use it, otherwise
853 # If we have an email, use it, otherwise
854 # see if it contains a username we can get an email from
854 # see if it contains a username we can get an email from
855 if _email != '':
855 if _email != '':
856 return _email
856 return _email
857 else:
857 else:
858 user = User.get_by_username(
858 user = User.get_by_username(
859 author_name(author), case_insensitive=True, cache=True)
859 author_name(author), case_insensitive=True, cache=True)
860
860
861 if user is not None:
861 if user is not None:
862 return user.email
862 return user.email
863
863
864 # No valid email, not a valid user in the system, none!
864 # No valid email, not a valid user in the system, none!
865 return None
865 return None
866
866
867
867
868 def link_to_user(author, length=0, **kwargs):
868 def link_to_user(author, length=0, **kwargs):
869 user = discover_user(author)
869 user = discover_user(author)
870 # user can be None, but if we have it already it means we can re-use it
870 # user can be None, but if we have it already it means we can re-use it
871 # in the person() function, so we save 1 intensive-query
871 # in the person() function, so we save 1 intensive-query
872 if user:
872 if user:
873 author = user
873 author = user
874
874
875 display_person = person(author, 'username_or_name_or_email')
875 display_person = person(author, 'username_or_name_or_email')
876 if length:
876 if length:
877 display_person = shorter(display_person, length)
877 display_person = shorter(display_person, length)
878
878
879 if user:
879 if user:
880 return link_to(
880 return link_to(
881 escape(display_person),
881 escape(display_person),
882 route_path('user_profile', username=user.username),
882 route_path('user_profile', username=user.username),
883 **kwargs)
883 **kwargs)
884 else:
884 else:
885 return escape(display_person)
885 return escape(display_person)
886
886
887
887
888 def person(author, show_attr="username_and_name"):
888 def person(author, show_attr="username_and_name"):
889 user = discover_user(author)
889 user = discover_user(author)
890 if user:
890 if user:
891 return getattr(user, show_attr)
891 return getattr(user, show_attr)
892 else:
892 else:
893 _author = author_name(author)
893 _author = author_name(author)
894 _email = email(author)
894 _email = email(author)
895 return _author or _email
895 return _author or _email
896
896
897
897
898 def author_string(email):
898 def author_string(email):
899 if email:
899 if email:
900 user = User.get_by_email(email, case_insensitive=True, cache=True)
900 user = User.get_by_email(email, case_insensitive=True, cache=True)
901 if user:
901 if user:
902 if user.first_name or user.last_name:
902 if user.first_name or user.last_name:
903 return '%s %s &lt;%s&gt;' % (
903 return '%s %s &lt;%s&gt;' % (
904 user.first_name, user.last_name, email)
904 user.first_name, user.last_name, email)
905 else:
905 else:
906 return email
906 return email
907 else:
907 else:
908 return email
908 return email
909 else:
909 else:
910 return None
910 return None
911
911
912
912
913 def person_by_id(id_, show_attr="username_and_name"):
913 def person_by_id(id_, show_attr="username_and_name"):
914 # attr to return from fetched user
914 # attr to return from fetched user
915 person_getter = lambda usr: getattr(usr, show_attr)
915 person_getter = lambda usr: getattr(usr, show_attr)
916
916
917 #maybe it's an ID ?
917 #maybe it's an ID ?
918 if str(id_).isdigit() or isinstance(id_, int):
918 if str(id_).isdigit() or isinstance(id_, int):
919 id_ = int(id_)
919 id_ = int(id_)
920 user = User.get(id_)
920 user = User.get(id_)
921 if user is not None:
921 if user is not None:
922 return person_getter(user)
922 return person_getter(user)
923 return id_
923 return id_
924
924
925
925
926 def gravatar_with_user(author, show_disabled=False):
926 def gravatar_with_user(request, author, show_disabled=False):
927 from rhodecode.lib.utils import PartialRenderer
927 _render = request.get_partial_renderer('base/base.mako')
928 _render = PartialRenderer('base/base.mako')
929 return _render('gravatar_with_user', author, show_disabled=show_disabled)
928 return _render('gravatar_with_user', author, show_disabled=show_disabled)
930
929
931
930
932 def desc_stylize(value):
931 def desc_stylize(value):
933 """
932 """
934 converts tags from value into html equivalent
933 converts tags from value into html equivalent
935
934
936 :param value:
935 :param value:
937 """
936 """
938 if not value:
937 if not value:
939 return ''
938 return ''
940
939
941 value = re.sub(r'\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
940 value = re.sub(r'\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
942 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
941 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
943 value = re.sub(r'\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
942 value = re.sub(r'\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
944 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
943 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
945 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]',
944 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]',
946 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
945 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
947 value = re.sub(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+]*)\]',
946 value = re.sub(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+]*)\]',
948 '<div class="metatag" tag="lang">\\2</div>', value)
947 '<div class="metatag" tag="lang">\\2</div>', value)
949 value = re.sub(r'\[([a-z]+)\]',
948 value = re.sub(r'\[([a-z]+)\]',
950 '<div class="metatag" tag="\\1">\\1</div>', value)
949 '<div class="metatag" tag="\\1">\\1</div>', value)
951
950
952 return value
951 return value
953
952
954
953
955 def escaped_stylize(value):
954 def escaped_stylize(value):
956 """
955 """
957 converts tags from value into html equivalent, but escaping its value first
956 converts tags from value into html equivalent, but escaping its value first
958 """
957 """
959 if not value:
958 if not value:
960 return ''
959 return ''
961
960
962 # Using default webhelper escape method, but has to force it as a
961 # Using default webhelper escape method, but has to force it as a
963 # plain unicode instead of a markup tag to be used in regex expressions
962 # plain unicode instead of a markup tag to be used in regex expressions
964 value = unicode(escape(safe_unicode(value)))
963 value = unicode(escape(safe_unicode(value)))
965
964
966 value = re.sub(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
965 value = re.sub(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
967 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
966 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
968 value = re.sub(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
967 value = re.sub(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
969 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
968 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
970 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]',
969 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]',
971 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
970 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
972 value = re.sub(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+]*)\]',
971 value = re.sub(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+]*)\]',
973 '<div class="metatag" tag="lang">\\2</div>', value)
972 '<div class="metatag" tag="lang">\\2</div>', value)
974 value = re.sub(r'\[([a-z]+)\]',
973 value = re.sub(r'\[([a-z]+)\]',
975 '<div class="metatag" tag="\\1">\\1</div>', value)
974 '<div class="metatag" tag="\\1">\\1</div>', value)
976
975
977 return value
976 return value
978
977
979
978
980 def bool2icon(value):
979 def bool2icon(value):
981 """
980 """
982 Returns boolean value of a given value, represented as html element with
981 Returns boolean value of a given value, represented as html element with
983 classes that will represent icons
982 classes that will represent icons
984
983
985 :param value: given value to convert to html node
984 :param value: given value to convert to html node
986 """
985 """
987
986
988 if value: # does bool conversion
987 if value: # does bool conversion
989 return HTML.tag('i', class_="icon-true")
988 return HTML.tag('i', class_="icon-true")
990 else: # not true as bool
989 else: # not true as bool
991 return HTML.tag('i', class_="icon-false")
990 return HTML.tag('i', class_="icon-false")
992
991
993
992
994 #==============================================================================
993 #==============================================================================
995 # PERMS
994 # PERMS
996 #==============================================================================
995 #==============================================================================
997 from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
996 from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
998 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \
997 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \
999 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, \
998 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, \
1000 csrf_token_key
999 csrf_token_key
1001
1000
1002
1001
1003 #==============================================================================
1002 #==============================================================================
1004 # GRAVATAR URL
1003 # GRAVATAR URL
1005 #==============================================================================
1004 #==============================================================================
1006 class InitialsGravatar(object):
1005 class InitialsGravatar(object):
1007 def __init__(self, email_address, first_name, last_name, size=30,
1006 def __init__(self, email_address, first_name, last_name, size=30,
1008 background=None, text_color='#fff'):
1007 background=None, text_color='#fff'):
1009 self.size = size
1008 self.size = size
1010 self.first_name = first_name
1009 self.first_name = first_name
1011 self.last_name = last_name
1010 self.last_name = last_name
1012 self.email_address = email_address
1011 self.email_address = email_address
1013 self.background = background or self.str2color(email_address)
1012 self.background = background or self.str2color(email_address)
1014 self.text_color = text_color
1013 self.text_color = text_color
1015
1014
1016 def get_color_bank(self):
1015 def get_color_bank(self):
1017 """
1016 """
1018 returns a predefined list of colors that gravatars can use.
1017 returns a predefined list of colors that gravatars can use.
1019 Those are randomized distinct colors that guarantee readability and
1018 Those are randomized distinct colors that guarantee readability and
1020 uniqueness.
1019 uniqueness.
1021
1020
1022 generated with: http://phrogz.net/css/distinct-colors.html
1021 generated with: http://phrogz.net/css/distinct-colors.html
1023 """
1022 """
1024 return [
1023 return [
1025 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1024 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1026 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1025 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1027 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1026 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1028 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1027 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1029 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1028 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1030 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1029 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1031 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1030 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1032 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1031 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1033 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1032 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1034 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1033 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1035 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1034 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1036 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1035 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1037 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1036 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1038 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1037 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1039 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1038 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1040 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1039 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1041 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1040 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1042 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1041 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1043 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1042 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1044 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1043 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1045 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1044 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1046 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1045 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1047 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1046 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1048 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1047 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1049 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1048 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1050 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1049 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1051 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1050 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1052 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1051 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1053 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1052 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1054 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1053 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1055 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1054 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1056 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1055 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1057 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1056 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1058 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1057 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1059 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1058 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1060 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1059 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1061 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1060 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1062 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1061 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1063 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1062 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1064 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1063 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1065 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1064 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1066 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1065 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1067 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1066 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1068 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1067 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1069 '#4f8c46', '#368dd9', '#5c0073'
1068 '#4f8c46', '#368dd9', '#5c0073'
1070 ]
1069 ]
1071
1070
1072 def rgb_to_hex_color(self, rgb_tuple):
1071 def rgb_to_hex_color(self, rgb_tuple):
1073 """
1072 """
1074 Converts an rgb_tuple passed to an hex color.
1073 Converts an rgb_tuple passed to an hex color.
1075
1074
1076 :param rgb_tuple: tuple with 3 ints represents rgb color space
1075 :param rgb_tuple: tuple with 3 ints represents rgb color space
1077 """
1076 """
1078 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1077 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1079
1078
1080 def email_to_int_list(self, email_str):
1079 def email_to_int_list(self, email_str):
1081 """
1080 """
1082 Get every byte of the hex digest value of email and turn it to integer.
1081 Get every byte of the hex digest value of email and turn it to integer.
1083 It's going to be always between 0-255
1082 It's going to be always between 0-255
1084 """
1083 """
1085 digest = md5_safe(email_str.lower())
1084 digest = md5_safe(email_str.lower())
1086 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1085 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1087
1086
1088 def pick_color_bank_index(self, email_str, color_bank):
1087 def pick_color_bank_index(self, email_str, color_bank):
1089 return self.email_to_int_list(email_str)[0] % len(color_bank)
1088 return self.email_to_int_list(email_str)[0] % len(color_bank)
1090
1089
1091 def str2color(self, email_str):
1090 def str2color(self, email_str):
1092 """
1091 """
1093 Tries to map in a stable algorithm an email to color
1092 Tries to map in a stable algorithm an email to color
1094
1093
1095 :param email_str:
1094 :param email_str:
1096 """
1095 """
1097 color_bank = self.get_color_bank()
1096 color_bank = self.get_color_bank()
1098 # pick position (module it's length so we always find it in the
1097 # pick position (module it's length so we always find it in the
1099 # bank even if it's smaller than 256 values
1098 # bank even if it's smaller than 256 values
1100 pos = self.pick_color_bank_index(email_str, color_bank)
1099 pos = self.pick_color_bank_index(email_str, color_bank)
1101 return color_bank[pos]
1100 return color_bank[pos]
1102
1101
1103 def normalize_email(self, email_address):
1102 def normalize_email(self, email_address):
1104 import unicodedata
1103 import unicodedata
1105 # default host used to fill in the fake/missing email
1104 # default host used to fill in the fake/missing email
1106 default_host = u'localhost'
1105 default_host = u'localhost'
1107
1106
1108 if not email_address:
1107 if not email_address:
1109 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1108 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1110
1109
1111 email_address = safe_unicode(email_address)
1110 email_address = safe_unicode(email_address)
1112
1111
1113 if u'@' not in email_address:
1112 if u'@' not in email_address:
1114 email_address = u'%s@%s' % (email_address, default_host)
1113 email_address = u'%s@%s' % (email_address, default_host)
1115
1114
1116 if email_address.endswith(u'@'):
1115 if email_address.endswith(u'@'):
1117 email_address = u'%s%s' % (email_address, default_host)
1116 email_address = u'%s%s' % (email_address, default_host)
1118
1117
1119 email_address = unicodedata.normalize('NFKD', email_address)\
1118 email_address = unicodedata.normalize('NFKD', email_address)\
1120 .encode('ascii', 'ignore')
1119 .encode('ascii', 'ignore')
1121 return email_address
1120 return email_address
1122
1121
1123 def get_initials(self):
1122 def get_initials(self):
1124 """
1123 """
1125 Returns 2 letter initials calculated based on the input.
1124 Returns 2 letter initials calculated based on the input.
1126 The algorithm picks first given email address, and takes first letter
1125 The algorithm picks first given email address, and takes first letter
1127 of part before @, and then the first letter of server name. In case
1126 of part before @, and then the first letter of server name. In case
1128 the part before @ is in a format of `somestring.somestring2` it replaces
1127 the part before @ is in a format of `somestring.somestring2` it replaces
1129 the server letter with first letter of somestring2
1128 the server letter with first letter of somestring2
1130
1129
1131 In case function was initialized with both first and lastname, this
1130 In case function was initialized with both first and lastname, this
1132 overrides the extraction from email by first letter of the first and
1131 overrides the extraction from email by first letter of the first and
1133 last name. We add special logic to that functionality, In case Full name
1132 last name. We add special logic to that functionality, In case Full name
1134 is compound, like Guido Von Rossum, we use last part of the last name
1133 is compound, like Guido Von Rossum, we use last part of the last name
1135 (Von Rossum) picking `R`.
1134 (Von Rossum) picking `R`.
1136
1135
1137 Function also normalizes the non-ascii characters to they ascii
1136 Function also normalizes the non-ascii characters to they ascii
1138 representation, eg Δ„ => A
1137 representation, eg Δ„ => A
1139 """
1138 """
1140 import unicodedata
1139 import unicodedata
1141 # replace non-ascii to ascii
1140 # replace non-ascii to ascii
1142 first_name = unicodedata.normalize(
1141 first_name = unicodedata.normalize(
1143 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1142 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1144 last_name = unicodedata.normalize(
1143 last_name = unicodedata.normalize(
1145 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1144 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1146
1145
1147 # do NFKD encoding, and also make sure email has proper format
1146 # do NFKD encoding, and also make sure email has proper format
1148 email_address = self.normalize_email(self.email_address)
1147 email_address = self.normalize_email(self.email_address)
1149
1148
1150 # first push the email initials
1149 # first push the email initials
1151 prefix, server = email_address.split('@', 1)
1150 prefix, server = email_address.split('@', 1)
1152
1151
1153 # check if prefix is maybe a 'first_name.last_name' syntax
1152 # check if prefix is maybe a 'first_name.last_name' syntax
1154 _dot_split = prefix.rsplit('.', 1)
1153 _dot_split = prefix.rsplit('.', 1)
1155 if len(_dot_split) == 2:
1154 if len(_dot_split) == 2:
1156 initials = [_dot_split[0][0], _dot_split[1][0]]
1155 initials = [_dot_split[0][0], _dot_split[1][0]]
1157 else:
1156 else:
1158 initials = [prefix[0], server[0]]
1157 initials = [prefix[0], server[0]]
1159
1158
1160 # then try to replace either first_name or last_name
1159 # then try to replace either first_name or last_name
1161 fn_letter = (first_name or " ")[0].strip()
1160 fn_letter = (first_name or " ")[0].strip()
1162 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1161 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1163
1162
1164 if fn_letter:
1163 if fn_letter:
1165 initials[0] = fn_letter
1164 initials[0] = fn_letter
1166
1165
1167 if ln_letter:
1166 if ln_letter:
1168 initials[1] = ln_letter
1167 initials[1] = ln_letter
1169
1168
1170 return ''.join(initials).upper()
1169 return ''.join(initials).upper()
1171
1170
1172 def get_img_data_by_type(self, font_family, img_type):
1171 def get_img_data_by_type(self, font_family, img_type):
1173 default_user = """
1172 default_user = """
1174 <svg xmlns="http://www.w3.org/2000/svg"
1173 <svg xmlns="http://www.w3.org/2000/svg"
1175 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1174 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1176 viewBox="-15 -10 439.165 429.164"
1175 viewBox="-15 -10 439.165 429.164"
1177
1176
1178 xml:space="preserve"
1177 xml:space="preserve"
1179 style="background:{background};" >
1178 style="background:{background};" >
1180
1179
1181 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1180 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1182 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1181 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1183 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1182 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1184 168.596,153.916,216.671,
1183 168.596,153.916,216.671,
1185 204.583,216.671z" fill="{text_color}"/>
1184 204.583,216.671z" fill="{text_color}"/>
1186 <path d="M407.164,374.717L360.88,
1185 <path d="M407.164,374.717L360.88,
1187 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1186 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1188 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1187 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1189 15.366-44.203,23.488-69.076,23.488c-24.877,
1188 15.366-44.203,23.488-69.076,23.488c-24.877,
1190 0-48.762-8.122-69.078-23.488
1189 0-48.762-8.122-69.078-23.488
1191 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1190 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1192 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1191 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1193 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1192 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1194 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1193 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1195 19.402-10.527 C409.699,390.129,
1194 19.402-10.527 C409.699,390.129,
1196 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1195 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1197 </svg>""".format(
1196 </svg>""".format(
1198 size=self.size,
1197 size=self.size,
1199 background='#979797', # @grey4
1198 background='#979797', # @grey4
1200 text_color=self.text_color,
1199 text_color=self.text_color,
1201 font_family=font_family)
1200 font_family=font_family)
1202
1201
1203 return {
1202 return {
1204 "default_user": default_user
1203 "default_user": default_user
1205 }[img_type]
1204 }[img_type]
1206
1205
1207 def get_img_data(self, svg_type=None):
1206 def get_img_data(self, svg_type=None):
1208 """
1207 """
1209 generates the svg metadata for image
1208 generates the svg metadata for image
1210 """
1209 """
1211
1210
1212 font_family = ','.join([
1211 font_family = ','.join([
1213 'proximanovaregular',
1212 'proximanovaregular',
1214 'Proxima Nova Regular',
1213 'Proxima Nova Regular',
1215 'Proxima Nova',
1214 'Proxima Nova',
1216 'Arial',
1215 'Arial',
1217 'Lucida Grande',
1216 'Lucida Grande',
1218 'sans-serif'
1217 'sans-serif'
1219 ])
1218 ])
1220 if svg_type:
1219 if svg_type:
1221 return self.get_img_data_by_type(font_family, svg_type)
1220 return self.get_img_data_by_type(font_family, svg_type)
1222
1221
1223 initials = self.get_initials()
1222 initials = self.get_initials()
1224 img_data = """
1223 img_data = """
1225 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1224 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1226 width="{size}" height="{size}"
1225 width="{size}" height="{size}"
1227 style="width: 100%; height: 100%; background-color: {background}"
1226 style="width: 100%; height: 100%; background-color: {background}"
1228 viewBox="0 0 {size} {size}">
1227 viewBox="0 0 {size} {size}">
1229 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1228 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1230 pointer-events="auto" fill="{text_color}"
1229 pointer-events="auto" fill="{text_color}"
1231 font-family="{font_family}"
1230 font-family="{font_family}"
1232 style="font-weight: 400; font-size: {f_size}px;">{text}
1231 style="font-weight: 400; font-size: {f_size}px;">{text}
1233 </text>
1232 </text>
1234 </svg>""".format(
1233 </svg>""".format(
1235 size=self.size,
1234 size=self.size,
1236 f_size=self.size/1.85, # scale the text inside the box nicely
1235 f_size=self.size/1.85, # scale the text inside the box nicely
1237 background=self.background,
1236 background=self.background,
1238 text_color=self.text_color,
1237 text_color=self.text_color,
1239 text=initials.upper(),
1238 text=initials.upper(),
1240 font_family=font_family)
1239 font_family=font_family)
1241
1240
1242 return img_data
1241 return img_data
1243
1242
1244 def generate_svg(self, svg_type=None):
1243 def generate_svg(self, svg_type=None):
1245 img_data = self.get_img_data(svg_type)
1244 img_data = self.get_img_data(svg_type)
1246 return "data:image/svg+xml;base64,%s" % img_data.encode('base64')
1245 return "data:image/svg+xml;base64,%s" % img_data.encode('base64')
1247
1246
1248
1247
1249 def initials_gravatar(email_address, first_name, last_name, size=30):
1248 def initials_gravatar(email_address, first_name, last_name, size=30):
1250 svg_type = None
1249 svg_type = None
1251 if email_address == User.DEFAULT_USER_EMAIL:
1250 if email_address == User.DEFAULT_USER_EMAIL:
1252 svg_type = 'default_user'
1251 svg_type = 'default_user'
1253 klass = InitialsGravatar(email_address, first_name, last_name, size)
1252 klass = InitialsGravatar(email_address, first_name, last_name, size)
1254 return klass.generate_svg(svg_type=svg_type)
1253 return klass.generate_svg(svg_type=svg_type)
1255
1254
1256
1255
1257 def gravatar_url(email_address, size=30, request=None):
1256 def gravatar_url(email_address, size=30, request=None):
1258 request = get_current_request()
1257 request = get_current_request()
1259 if request and hasattr(request, 'call_context'):
1258 if request and hasattr(request, 'call_context'):
1260 _use_gravatar = request.call_context.visual.use_gravatar
1259 _use_gravatar = request.call_context.visual.use_gravatar
1261 _gravatar_url = request.call_context.visual.gravatar_url
1260 _gravatar_url = request.call_context.visual.gravatar_url
1262 else:
1261 else:
1263 # doh, we need to re-import those to mock it later
1262 # doh, we need to re-import those to mock it later
1264 from pylons import tmpl_context as c
1263 from pylons import tmpl_context as c
1265
1264
1266 _use_gravatar = c.visual.use_gravatar
1265 _use_gravatar = c.visual.use_gravatar
1267 _gravatar_url = c.visual.gravatar_url
1266 _gravatar_url = c.visual.gravatar_url
1268
1267
1269 _gravatar_url = _gravatar_url or User.DEFAULT_GRAVATAR_URL
1268 _gravatar_url = _gravatar_url or User.DEFAULT_GRAVATAR_URL
1270
1269
1271 email_address = email_address or User.DEFAULT_USER_EMAIL
1270 email_address = email_address or User.DEFAULT_USER_EMAIL
1272 if isinstance(email_address, unicode):
1271 if isinstance(email_address, unicode):
1273 # hashlib crashes on unicode items
1272 # hashlib crashes on unicode items
1274 email_address = safe_str(email_address)
1273 email_address = safe_str(email_address)
1275
1274
1276 # empty email or default user
1275 # empty email or default user
1277 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1276 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1278 return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size)
1277 return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size)
1279
1278
1280 if _use_gravatar:
1279 if _use_gravatar:
1281 # TODO: Disuse pyramid thread locals. Think about another solution to
1280 # TODO: Disuse pyramid thread locals. Think about another solution to
1282 # get the host and schema here.
1281 # get the host and schema here.
1283 request = get_current_request()
1282 request = get_current_request()
1284 tmpl = safe_str(_gravatar_url)
1283 tmpl = safe_str(_gravatar_url)
1285 tmpl = tmpl.replace('{email}', email_address)\
1284 tmpl = tmpl.replace('{email}', email_address)\
1286 .replace('{md5email}', md5_safe(email_address.lower())) \
1285 .replace('{md5email}', md5_safe(email_address.lower())) \
1287 .replace('{netloc}', request.host)\
1286 .replace('{netloc}', request.host)\
1288 .replace('{scheme}', request.scheme)\
1287 .replace('{scheme}', request.scheme)\
1289 .replace('{size}', safe_str(size))
1288 .replace('{size}', safe_str(size))
1290 return tmpl
1289 return tmpl
1291 else:
1290 else:
1292 return initials_gravatar(email_address, '', '', size=size)
1291 return initials_gravatar(email_address, '', '', size=size)
1293
1292
1294
1293
1295 class Page(_Page):
1294 class Page(_Page):
1296 """
1295 """
1297 Custom pager to match rendering style with paginator
1296 Custom pager to match rendering style with paginator
1298 """
1297 """
1299
1298
1300 def _get_pos(self, cur_page, max_page, items):
1299 def _get_pos(self, cur_page, max_page, items):
1301 edge = (items / 2) + 1
1300 edge = (items / 2) + 1
1302 if (cur_page <= edge):
1301 if (cur_page <= edge):
1303 radius = max(items / 2, items - cur_page)
1302 radius = max(items / 2, items - cur_page)
1304 elif (max_page - cur_page) < edge:
1303 elif (max_page - cur_page) < edge:
1305 radius = (items - 1) - (max_page - cur_page)
1304 radius = (items - 1) - (max_page - cur_page)
1306 else:
1305 else:
1307 radius = items / 2
1306 radius = items / 2
1308
1307
1309 left = max(1, (cur_page - (radius)))
1308 left = max(1, (cur_page - (radius)))
1310 right = min(max_page, cur_page + (radius))
1309 right = min(max_page, cur_page + (radius))
1311 return left, cur_page, right
1310 return left, cur_page, right
1312
1311
1313 def _range(self, regexp_match):
1312 def _range(self, regexp_match):
1314 """
1313 """
1315 Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
1314 Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
1316
1315
1317 Arguments:
1316 Arguments:
1318
1317
1319 regexp_match
1318 regexp_match
1320 A "re" (regular expressions) match object containing the
1319 A "re" (regular expressions) match object containing the
1321 radius of linked pages around the current page in
1320 radius of linked pages around the current page in
1322 regexp_match.group(1) as a string
1321 regexp_match.group(1) as a string
1323
1322
1324 This function is supposed to be called as a callable in
1323 This function is supposed to be called as a callable in
1325 re.sub.
1324 re.sub.
1326
1325
1327 """
1326 """
1328 radius = int(regexp_match.group(1))
1327 radius = int(regexp_match.group(1))
1329
1328
1330 # Compute the first and last page number within the radius
1329 # Compute the first and last page number within the radius
1331 # e.g. '1 .. 5 6 [7] 8 9 .. 12'
1330 # e.g. '1 .. 5 6 [7] 8 9 .. 12'
1332 # -> leftmost_page = 5
1331 # -> leftmost_page = 5
1333 # -> rightmost_page = 9
1332 # -> rightmost_page = 9
1334 leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
1333 leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
1335 self.last_page,
1334 self.last_page,
1336 (radius * 2) + 1)
1335 (radius * 2) + 1)
1337 nav_items = []
1336 nav_items = []
1338
1337
1339 # Create a link to the first page (unless we are on the first page
1338 # Create a link to the first page (unless we are on the first page
1340 # or there would be no need to insert '..' spacers)
1339 # or there would be no need to insert '..' spacers)
1341 if self.page != self.first_page and self.first_page < leftmost_page:
1340 if self.page != self.first_page and self.first_page < leftmost_page:
1342 nav_items.append(self._pagerlink(self.first_page, self.first_page))
1341 nav_items.append(self._pagerlink(self.first_page, self.first_page))
1343
1342
1344 # Insert dots if there are pages between the first page
1343 # Insert dots if there are pages between the first page
1345 # and the currently displayed page range
1344 # and the currently displayed page range
1346 if leftmost_page - self.first_page > 1:
1345 if leftmost_page - self.first_page > 1:
1347 # Wrap in a SPAN tag if nolink_attr is set
1346 # Wrap in a SPAN tag if nolink_attr is set
1348 text = '..'
1347 text = '..'
1349 if self.dotdot_attr:
1348 if self.dotdot_attr:
1350 text = HTML.span(c=text, **self.dotdot_attr)
1349 text = HTML.span(c=text, **self.dotdot_attr)
1351 nav_items.append(text)
1350 nav_items.append(text)
1352
1351
1353 for thispage in xrange(leftmost_page, rightmost_page + 1):
1352 for thispage in xrange(leftmost_page, rightmost_page + 1):
1354 # Hilight the current page number and do not use a link
1353 # Hilight the current page number and do not use a link
1355 if thispage == self.page:
1354 if thispage == self.page:
1356 text = '%s' % (thispage,)
1355 text = '%s' % (thispage,)
1357 # Wrap in a SPAN tag if nolink_attr is set
1356 # Wrap in a SPAN tag if nolink_attr is set
1358 if self.curpage_attr:
1357 if self.curpage_attr:
1359 text = HTML.span(c=text, **self.curpage_attr)
1358 text = HTML.span(c=text, **self.curpage_attr)
1360 nav_items.append(text)
1359 nav_items.append(text)
1361 # Otherwise create just a link to that page
1360 # Otherwise create just a link to that page
1362 else:
1361 else:
1363 text = '%s' % (thispage,)
1362 text = '%s' % (thispage,)
1364 nav_items.append(self._pagerlink(thispage, text))
1363 nav_items.append(self._pagerlink(thispage, text))
1365
1364
1366 # Insert dots if there are pages between the displayed
1365 # Insert dots if there are pages between the displayed
1367 # page numbers and the end of the page range
1366 # page numbers and the end of the page range
1368 if self.last_page - rightmost_page > 1:
1367 if self.last_page - rightmost_page > 1:
1369 text = '..'
1368 text = '..'
1370 # Wrap in a SPAN tag if nolink_attr is set
1369 # Wrap in a SPAN tag if nolink_attr is set
1371 if self.dotdot_attr:
1370 if self.dotdot_attr:
1372 text = HTML.span(c=text, **self.dotdot_attr)
1371 text = HTML.span(c=text, **self.dotdot_attr)
1373 nav_items.append(text)
1372 nav_items.append(text)
1374
1373
1375 # Create a link to the very last page (unless we are on the last
1374 # Create a link to the very last page (unless we are on the last
1376 # page or there would be no need to insert '..' spacers)
1375 # page or there would be no need to insert '..' spacers)
1377 if self.page != self.last_page and rightmost_page < self.last_page:
1376 if self.page != self.last_page and rightmost_page < self.last_page:
1378 nav_items.append(self._pagerlink(self.last_page, self.last_page))
1377 nav_items.append(self._pagerlink(self.last_page, self.last_page))
1379
1378
1380 ## prerender links
1379 ## prerender links
1381 #_page_link = url.current()
1380 #_page_link = url.current()
1382 #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1381 #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1383 #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1382 #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1384 return self.separator.join(nav_items)
1383 return self.separator.join(nav_items)
1385
1384
1386 def pager(self, format='~2~', page_param='page', partial_param='partial',
1385 def pager(self, format='~2~', page_param='page', partial_param='partial',
1387 show_if_single_page=False, separator=' ', onclick=None,
1386 show_if_single_page=False, separator=' ', onclick=None,
1388 symbol_first='<<', symbol_last='>>',
1387 symbol_first='<<', symbol_last='>>',
1389 symbol_previous='<', symbol_next='>',
1388 symbol_previous='<', symbol_next='>',
1390 link_attr={'class': 'pager_link', 'rel': 'prerender'},
1389 link_attr={'class': 'pager_link', 'rel': 'prerender'},
1391 curpage_attr={'class': 'pager_curpage'},
1390 curpage_attr={'class': 'pager_curpage'},
1392 dotdot_attr={'class': 'pager_dotdot'}, **kwargs):
1391 dotdot_attr={'class': 'pager_dotdot'}, **kwargs):
1393
1392
1394 self.curpage_attr = curpage_attr
1393 self.curpage_attr = curpage_attr
1395 self.separator = separator
1394 self.separator = separator
1396 self.pager_kwargs = kwargs
1395 self.pager_kwargs = kwargs
1397 self.page_param = page_param
1396 self.page_param = page_param
1398 self.partial_param = partial_param
1397 self.partial_param = partial_param
1399 self.onclick = onclick
1398 self.onclick = onclick
1400 self.link_attr = link_attr
1399 self.link_attr = link_attr
1401 self.dotdot_attr = dotdot_attr
1400 self.dotdot_attr = dotdot_attr
1402
1401
1403 # Don't show navigator if there is no more than one page
1402 # Don't show navigator if there is no more than one page
1404 if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
1403 if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
1405 return ''
1404 return ''
1406
1405
1407 from string import Template
1406 from string import Template
1408 # Replace ~...~ in token format by range of pages
1407 # Replace ~...~ in token format by range of pages
1409 result = re.sub(r'~(\d+)~', self._range, format)
1408 result = re.sub(r'~(\d+)~', self._range, format)
1410
1409
1411 # Interpolate '%' variables
1410 # Interpolate '%' variables
1412 result = Template(result).safe_substitute({
1411 result = Template(result).safe_substitute({
1413 'first_page': self.first_page,
1412 'first_page': self.first_page,
1414 'last_page': self.last_page,
1413 'last_page': self.last_page,
1415 'page': self.page,
1414 'page': self.page,
1416 'page_count': self.page_count,
1415 'page_count': self.page_count,
1417 'items_per_page': self.items_per_page,
1416 'items_per_page': self.items_per_page,
1418 'first_item': self.first_item,
1417 'first_item': self.first_item,
1419 'last_item': self.last_item,
1418 'last_item': self.last_item,
1420 'item_count': self.item_count,
1419 'item_count': self.item_count,
1421 'link_first': self.page > self.first_page and \
1420 'link_first': self.page > self.first_page and \
1422 self._pagerlink(self.first_page, symbol_first) or '',
1421 self._pagerlink(self.first_page, symbol_first) or '',
1423 'link_last': self.page < self.last_page and \
1422 'link_last': self.page < self.last_page and \
1424 self._pagerlink(self.last_page, symbol_last) or '',
1423 self._pagerlink(self.last_page, symbol_last) or '',
1425 'link_previous': self.previous_page and \
1424 'link_previous': self.previous_page and \
1426 self._pagerlink(self.previous_page, symbol_previous) \
1425 self._pagerlink(self.previous_page, symbol_previous) \
1427 or HTML.span(symbol_previous, class_="pg-previous disabled"),
1426 or HTML.span(symbol_previous, class_="pg-previous disabled"),
1428 'link_next': self.next_page and \
1427 'link_next': self.next_page and \
1429 self._pagerlink(self.next_page, symbol_next) \
1428 self._pagerlink(self.next_page, symbol_next) \
1430 or HTML.span(symbol_next, class_="pg-next disabled")
1429 or HTML.span(symbol_next, class_="pg-next disabled")
1431 })
1430 })
1432
1431
1433 return literal(result)
1432 return literal(result)
1434
1433
1435
1434
1436 #==============================================================================
1435 #==============================================================================
1437 # REPO PAGER, PAGER FOR REPOSITORY
1436 # REPO PAGER, PAGER FOR REPOSITORY
1438 #==============================================================================
1437 #==============================================================================
1439 class RepoPage(Page):
1438 class RepoPage(Page):
1440
1439
1441 def __init__(self, collection, page=1, items_per_page=20,
1440 def __init__(self, collection, page=1, items_per_page=20,
1442 item_count=None, url=None, **kwargs):
1441 item_count=None, url=None, **kwargs):
1443
1442
1444 """Create a "RepoPage" instance. special pager for paging
1443 """Create a "RepoPage" instance. special pager for paging
1445 repository
1444 repository
1446 """
1445 """
1447 self._url_generator = url
1446 self._url_generator = url
1448
1447
1449 # Safe the kwargs class-wide so they can be used in the pager() method
1448 # Safe the kwargs class-wide so they can be used in the pager() method
1450 self.kwargs = kwargs
1449 self.kwargs = kwargs
1451
1450
1452 # Save a reference to the collection
1451 # Save a reference to the collection
1453 self.original_collection = collection
1452 self.original_collection = collection
1454
1453
1455 self.collection = collection
1454 self.collection = collection
1456
1455
1457 # The self.page is the number of the current page.
1456 # The self.page is the number of the current page.
1458 # The first page has the number 1!
1457 # The first page has the number 1!
1459 try:
1458 try:
1460 self.page = int(page) # make it int() if we get it as a string
1459 self.page = int(page) # make it int() if we get it as a string
1461 except (ValueError, TypeError):
1460 except (ValueError, TypeError):
1462 self.page = 1
1461 self.page = 1
1463
1462
1464 self.items_per_page = items_per_page
1463 self.items_per_page = items_per_page
1465
1464
1466 # Unless the user tells us how many items the collections has
1465 # Unless the user tells us how many items the collections has
1467 # we calculate that ourselves.
1466 # we calculate that ourselves.
1468 if item_count is not None:
1467 if item_count is not None:
1469 self.item_count = item_count
1468 self.item_count = item_count
1470 else:
1469 else:
1471 self.item_count = len(self.collection)
1470 self.item_count = len(self.collection)
1472
1471
1473 # Compute the number of the first and last available page
1472 # Compute the number of the first and last available page
1474 if self.item_count > 0:
1473 if self.item_count > 0:
1475 self.first_page = 1
1474 self.first_page = 1
1476 self.page_count = int(math.ceil(float(self.item_count) /
1475 self.page_count = int(math.ceil(float(self.item_count) /
1477 self.items_per_page))
1476 self.items_per_page))
1478 self.last_page = self.first_page + self.page_count - 1
1477 self.last_page = self.first_page + self.page_count - 1
1479
1478
1480 # Make sure that the requested page number is the range of
1479 # Make sure that the requested page number is the range of
1481 # valid pages
1480 # valid pages
1482 if self.page > self.last_page:
1481 if self.page > self.last_page:
1483 self.page = self.last_page
1482 self.page = self.last_page
1484 elif self.page < self.first_page:
1483 elif self.page < self.first_page:
1485 self.page = self.first_page
1484 self.page = self.first_page
1486
1485
1487 # Note: the number of items on this page can be less than
1486 # Note: the number of items on this page can be less than
1488 # items_per_page if the last page is not full
1487 # items_per_page if the last page is not full
1489 self.first_item = max(0, (self.item_count) - (self.page *
1488 self.first_item = max(0, (self.item_count) - (self.page *
1490 items_per_page))
1489 items_per_page))
1491 self.last_item = ((self.item_count - 1) - items_per_page *
1490 self.last_item = ((self.item_count - 1) - items_per_page *
1492 (self.page - 1))
1491 (self.page - 1))
1493
1492
1494 self.items = list(self.collection[self.first_item:self.last_item + 1])
1493 self.items = list(self.collection[self.first_item:self.last_item + 1])
1495
1494
1496 # Links to previous and next page
1495 # Links to previous and next page
1497 if self.page > self.first_page:
1496 if self.page > self.first_page:
1498 self.previous_page = self.page - 1
1497 self.previous_page = self.page - 1
1499 else:
1498 else:
1500 self.previous_page = None
1499 self.previous_page = None
1501
1500
1502 if self.page < self.last_page:
1501 if self.page < self.last_page:
1503 self.next_page = self.page + 1
1502 self.next_page = self.page + 1
1504 else:
1503 else:
1505 self.next_page = None
1504 self.next_page = None
1506
1505
1507 # No items available
1506 # No items available
1508 else:
1507 else:
1509 self.first_page = None
1508 self.first_page = None
1510 self.page_count = 0
1509 self.page_count = 0
1511 self.last_page = None
1510 self.last_page = None
1512 self.first_item = None
1511 self.first_item = None
1513 self.last_item = None
1512 self.last_item = None
1514 self.previous_page = None
1513 self.previous_page = None
1515 self.next_page = None
1514 self.next_page = None
1516 self.items = []
1515 self.items = []
1517
1516
1518 # This is a subclass of the 'list' type. Initialise the list now.
1517 # This is a subclass of the 'list' type. Initialise the list now.
1519 list.__init__(self, reversed(self.items))
1518 list.__init__(self, reversed(self.items))
1520
1519
1521
1520
1522 def breadcrumb_repo_link(repo):
1521 def breadcrumb_repo_link(repo):
1523 """
1522 """
1524 Makes a breadcrumbs path link to repo
1523 Makes a breadcrumbs path link to repo
1525
1524
1526 ex::
1525 ex::
1527 group >> subgroup >> repo
1526 group >> subgroup >> repo
1528
1527
1529 :param repo: a Repository instance
1528 :param repo: a Repository instance
1530 """
1529 """
1531
1530
1532 path = [
1531 path = [
1533 link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name))
1532 link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name))
1534 for group in repo.groups_with_parents
1533 for group in repo.groups_with_parents
1535 ] + [
1534 ] + [
1536 link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name))
1535 link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name))
1537 ]
1536 ]
1538
1537
1539 return literal(' &raquo; '.join(path))
1538 return literal(' &raquo; '.join(path))
1540
1539
1541
1540
1542 def format_byte_size_binary(file_size):
1541 def format_byte_size_binary(file_size):
1543 """
1542 """
1544 Formats file/folder sizes to standard.
1543 Formats file/folder sizes to standard.
1545 """
1544 """
1546 if file_size is None:
1545 if file_size is None:
1547 file_size = 0
1546 file_size = 0
1548
1547
1549 formatted_size = format_byte_size(file_size, binary=True)
1548 formatted_size = format_byte_size(file_size, binary=True)
1550 return formatted_size
1549 return formatted_size
1551
1550
1552
1551
1553 def urlify_text(text_, safe=True):
1552 def urlify_text(text_, safe=True):
1554 """
1553 """
1555 Extrac urls from text and make html links out of them
1554 Extrac urls from text and make html links out of them
1556
1555
1557 :param text_:
1556 :param text_:
1558 """
1557 """
1559
1558
1560 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1559 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1561 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1560 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1562
1561
1563 def url_func(match_obj):
1562 def url_func(match_obj):
1564 url_full = match_obj.groups()[0]
1563 url_full = match_obj.groups()[0]
1565 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
1564 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
1566 _newtext = url_pat.sub(url_func, text_)
1565 _newtext = url_pat.sub(url_func, text_)
1567 if safe:
1566 if safe:
1568 return literal(_newtext)
1567 return literal(_newtext)
1569 return _newtext
1568 return _newtext
1570
1569
1571
1570
1572 def urlify_commits(text_, repository):
1571 def urlify_commits(text_, repository):
1573 """
1572 """
1574 Extract commit ids from text and make link from them
1573 Extract commit ids from text and make link from them
1575
1574
1576 :param text_:
1575 :param text_:
1577 :param repository: repo name to build the URL with
1576 :param repository: repo name to build the URL with
1578 """
1577 """
1579 from pylons import url # doh, we need to re-import url to mock it later
1578 from pylons import url # doh, we need to re-import url to mock it later
1580 URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1579 URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1581
1580
1582 def url_func(match_obj):
1581 def url_func(match_obj):
1583 commit_id = match_obj.groups()[1]
1582 commit_id = match_obj.groups()[1]
1584 pref = match_obj.groups()[0]
1583 pref = match_obj.groups()[0]
1585 suf = match_obj.groups()[2]
1584 suf = match_obj.groups()[2]
1586
1585
1587 tmpl = (
1586 tmpl = (
1588 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1587 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1589 '%(commit_id)s</a>%(suf)s'
1588 '%(commit_id)s</a>%(suf)s'
1590 )
1589 )
1591 return tmpl % {
1590 return tmpl % {
1592 'pref': pref,
1591 'pref': pref,
1593 'cls': 'revision-link',
1592 'cls': 'revision-link',
1594 'url': url('changeset_home', repo_name=repository,
1593 'url': url('changeset_home', repo_name=repository,
1595 revision=commit_id, qualified=True),
1594 revision=commit_id, qualified=True),
1596 'commit_id': commit_id,
1595 'commit_id': commit_id,
1597 'suf': suf
1596 'suf': suf
1598 }
1597 }
1599
1598
1600 newtext = URL_PAT.sub(url_func, text_)
1599 newtext = URL_PAT.sub(url_func, text_)
1601
1600
1602 return newtext
1601 return newtext
1603
1602
1604
1603
1605 def _process_url_func(match_obj, repo_name, uid, entry,
1604 def _process_url_func(match_obj, repo_name, uid, entry,
1606 return_raw_data=False, link_format='html'):
1605 return_raw_data=False, link_format='html'):
1607 pref = ''
1606 pref = ''
1608 if match_obj.group().startswith(' '):
1607 if match_obj.group().startswith(' '):
1609 pref = ' '
1608 pref = ' '
1610
1609
1611 issue_id = ''.join(match_obj.groups())
1610 issue_id = ''.join(match_obj.groups())
1612
1611
1613 if link_format == 'html':
1612 if link_format == 'html':
1614 tmpl = (
1613 tmpl = (
1615 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1614 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1616 '%(issue-prefix)s%(id-repr)s'
1615 '%(issue-prefix)s%(id-repr)s'
1617 '</a>')
1616 '</a>')
1618 elif link_format == 'rst':
1617 elif link_format == 'rst':
1619 tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_'
1618 tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_'
1620 elif link_format == 'markdown':
1619 elif link_format == 'markdown':
1621 tmpl = '[%(issue-prefix)s%(id-repr)s](%(url)s)'
1620 tmpl = '[%(issue-prefix)s%(id-repr)s](%(url)s)'
1622 else:
1621 else:
1623 raise ValueError('Bad link_format:{}'.format(link_format))
1622 raise ValueError('Bad link_format:{}'.format(link_format))
1624
1623
1625 (repo_name_cleaned,
1624 (repo_name_cleaned,
1626 parent_group_name) = RepoGroupModel().\
1625 parent_group_name) = RepoGroupModel().\
1627 _get_group_name_and_parent(repo_name)
1626 _get_group_name_and_parent(repo_name)
1628
1627
1629 # variables replacement
1628 # variables replacement
1630 named_vars = {
1629 named_vars = {
1631 'id': issue_id,
1630 'id': issue_id,
1632 'repo': repo_name,
1631 'repo': repo_name,
1633 'repo_name': repo_name_cleaned,
1632 'repo_name': repo_name_cleaned,
1634 'group_name': parent_group_name
1633 'group_name': parent_group_name
1635 }
1634 }
1636 # named regex variables
1635 # named regex variables
1637 named_vars.update(match_obj.groupdict())
1636 named_vars.update(match_obj.groupdict())
1638 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1637 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1639
1638
1640 data = {
1639 data = {
1641 'pref': pref,
1640 'pref': pref,
1642 'cls': 'issue-tracker-link',
1641 'cls': 'issue-tracker-link',
1643 'url': _url,
1642 'url': _url,
1644 'id-repr': issue_id,
1643 'id-repr': issue_id,
1645 'issue-prefix': entry['pref'],
1644 'issue-prefix': entry['pref'],
1646 'serv': entry['url'],
1645 'serv': entry['url'],
1647 }
1646 }
1648 if return_raw_data:
1647 if return_raw_data:
1649 return {
1648 return {
1650 'id': issue_id,
1649 'id': issue_id,
1651 'url': _url
1650 'url': _url
1652 }
1651 }
1653 return tmpl % data
1652 return tmpl % data
1654
1653
1655
1654
1656 def process_patterns(text_string, repo_name, link_format='html'):
1655 def process_patterns(text_string, repo_name, link_format='html'):
1657 allowed_formats = ['html', 'rst', 'markdown']
1656 allowed_formats = ['html', 'rst', 'markdown']
1658 if link_format not in allowed_formats:
1657 if link_format not in allowed_formats:
1659 raise ValueError('Link format can be only one of:{} got {}'.format(
1658 raise ValueError('Link format can be only one of:{} got {}'.format(
1660 allowed_formats, link_format))
1659 allowed_formats, link_format))
1661
1660
1662 repo = None
1661 repo = None
1663 if repo_name:
1662 if repo_name:
1664 # Retrieving repo_name to avoid invalid repo_name to explode on
1663 # Retrieving repo_name to avoid invalid repo_name to explode on
1665 # IssueTrackerSettingsModel but still passing invalid name further down
1664 # IssueTrackerSettingsModel but still passing invalid name further down
1666 repo = Repository.get_by_repo_name(repo_name, cache=True)
1665 repo = Repository.get_by_repo_name(repo_name, cache=True)
1667
1666
1668 settings_model = IssueTrackerSettingsModel(repo=repo)
1667 settings_model = IssueTrackerSettingsModel(repo=repo)
1669 active_entries = settings_model.get_settings(cache=True)
1668 active_entries = settings_model.get_settings(cache=True)
1670
1669
1671 issues_data = []
1670 issues_data = []
1672 newtext = text_string
1671 newtext = text_string
1673
1672
1674 for uid, entry in active_entries.items():
1673 for uid, entry in active_entries.items():
1675 log.debug('found issue tracker entry with uid %s' % (uid,))
1674 log.debug('found issue tracker entry with uid %s' % (uid,))
1676
1675
1677 if not (entry['pat'] and entry['url']):
1676 if not (entry['pat'] and entry['url']):
1678 log.debug('skipping due to missing data')
1677 log.debug('skipping due to missing data')
1679 continue
1678 continue
1680
1679
1681 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s'
1680 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s'
1682 % (uid, entry['pat'], entry['url'], entry['pref']))
1681 % (uid, entry['pat'], entry['url'], entry['pref']))
1683
1682
1684 try:
1683 try:
1685 pattern = re.compile(r'%s' % entry['pat'])
1684 pattern = re.compile(r'%s' % entry['pat'])
1686 except re.error:
1685 except re.error:
1687 log.exception(
1686 log.exception(
1688 'issue tracker pattern: `%s` failed to compile',
1687 'issue tracker pattern: `%s` failed to compile',
1689 entry['pat'])
1688 entry['pat'])
1690 continue
1689 continue
1691
1690
1692 data_func = partial(
1691 data_func = partial(
1693 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1692 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1694 return_raw_data=True)
1693 return_raw_data=True)
1695
1694
1696 for match_obj in pattern.finditer(text_string):
1695 for match_obj in pattern.finditer(text_string):
1697 issues_data.append(data_func(match_obj))
1696 issues_data.append(data_func(match_obj))
1698
1697
1699 url_func = partial(
1698 url_func = partial(
1700 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1699 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1701 link_format=link_format)
1700 link_format=link_format)
1702
1701
1703 newtext = pattern.sub(url_func, newtext)
1702 newtext = pattern.sub(url_func, newtext)
1704 log.debug('processed prefix:uid `%s`' % (uid,))
1703 log.debug('processed prefix:uid `%s`' % (uid,))
1705
1704
1706 return newtext, issues_data
1705 return newtext, issues_data
1707
1706
1708
1707
1709 def urlify_commit_message(commit_text, repository=None):
1708 def urlify_commit_message(commit_text, repository=None):
1710 """
1709 """
1711 Parses given text message and makes proper links.
1710 Parses given text message and makes proper links.
1712 issues are linked to given issue-server, and rest is a commit link
1711 issues are linked to given issue-server, and rest is a commit link
1713
1712
1714 :param commit_text:
1713 :param commit_text:
1715 :param repository:
1714 :param repository:
1716 """
1715 """
1717 from pylons import url # doh, we need to re-import url to mock it later
1716 from pylons import url # doh, we need to re-import url to mock it later
1718
1717
1719 def escaper(string):
1718 def escaper(string):
1720 return string.replace('<', '&lt;').replace('>', '&gt;')
1719 return string.replace('<', '&lt;').replace('>', '&gt;')
1721
1720
1722 newtext = escaper(commit_text)
1721 newtext = escaper(commit_text)
1723
1722
1724 # extract http/https links and make them real urls
1723 # extract http/https links and make them real urls
1725 newtext = urlify_text(newtext, safe=False)
1724 newtext = urlify_text(newtext, safe=False)
1726
1725
1727 # urlify commits - extract commit ids and make link out of them, if we have
1726 # urlify commits - extract commit ids and make link out of them, if we have
1728 # the scope of repository present.
1727 # the scope of repository present.
1729 if repository:
1728 if repository:
1730 newtext = urlify_commits(newtext, repository)
1729 newtext = urlify_commits(newtext, repository)
1731
1730
1732 # process issue tracker patterns
1731 # process issue tracker patterns
1733 newtext, issues = process_patterns(newtext, repository or '')
1732 newtext, issues = process_patterns(newtext, repository or '')
1734
1733
1735 return literal(newtext)
1734 return literal(newtext)
1736
1735
1737
1736
1738 def render_binary(repo_name, file_obj):
1737 def render_binary(repo_name, file_obj):
1739 """
1738 """
1740 Choose how to render a binary file
1739 Choose how to render a binary file
1741 """
1740 """
1742 filename = file_obj.name
1741 filename = file_obj.name
1743
1742
1744 # images
1743 # images
1745 for ext in ['*.png', '*.jpg', '*.ico', '*.gif']:
1744 for ext in ['*.png', '*.jpg', '*.ico', '*.gif']:
1746 if fnmatch.fnmatch(filename, pat=ext):
1745 if fnmatch.fnmatch(filename, pat=ext):
1747 alt = filename
1746 alt = filename
1748 src = route_path(
1747 src = route_path(
1749 'repo_file_raw', repo_name=repo_name,
1748 'repo_file_raw', repo_name=repo_name,
1750 commit_id=file_obj.commit.raw_id, f_path=file_obj.path)
1749 commit_id=file_obj.commit.raw_id, f_path=file_obj.path)
1751 return literal('<img class="rendered-binary" alt="{}" src="{}">'.format(alt, src))
1750 return literal('<img class="rendered-binary" alt="{}" src="{}">'.format(alt, src))
1752
1751
1753
1752
1754 def renderer_from_filename(filename, exclude=None):
1753 def renderer_from_filename(filename, exclude=None):
1755 """
1754 """
1756 choose a renderer based on filename, this works only for text based files
1755 choose a renderer based on filename, this works only for text based files
1757 """
1756 """
1758
1757
1759 # ipython
1758 # ipython
1760 for ext in ['*.ipynb']:
1759 for ext in ['*.ipynb']:
1761 if fnmatch.fnmatch(filename, pat=ext):
1760 if fnmatch.fnmatch(filename, pat=ext):
1762 return 'jupyter'
1761 return 'jupyter'
1763
1762
1764 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1763 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1765 if is_markup:
1764 if is_markup:
1766 return is_markup
1765 return is_markup
1767 return None
1766 return None
1768
1767
1769
1768
1770 def render(source, renderer='rst', mentions=False, relative_url=None,
1769 def render(source, renderer='rst', mentions=False, relative_url=None,
1771 repo_name=None):
1770 repo_name=None):
1772
1771
1773 def maybe_convert_relative_links(html_source):
1772 def maybe_convert_relative_links(html_source):
1774 if relative_url:
1773 if relative_url:
1775 return relative_links(html_source, relative_url)
1774 return relative_links(html_source, relative_url)
1776 return html_source
1775 return html_source
1777
1776
1778 if renderer == 'rst':
1777 if renderer == 'rst':
1779 if repo_name:
1778 if repo_name:
1780 # process patterns on comments if we pass in repo name
1779 # process patterns on comments if we pass in repo name
1781 source, issues = process_patterns(
1780 source, issues = process_patterns(
1782 source, repo_name, link_format='rst')
1781 source, repo_name, link_format='rst')
1783
1782
1784 return literal(
1783 return literal(
1785 '<div class="rst-block">%s</div>' %
1784 '<div class="rst-block">%s</div>' %
1786 maybe_convert_relative_links(
1785 maybe_convert_relative_links(
1787 MarkupRenderer.rst(source, mentions=mentions)))
1786 MarkupRenderer.rst(source, mentions=mentions)))
1788 elif renderer == 'markdown':
1787 elif renderer == 'markdown':
1789 if repo_name:
1788 if repo_name:
1790 # process patterns on comments if we pass in repo name
1789 # process patterns on comments if we pass in repo name
1791 source, issues = process_patterns(
1790 source, issues = process_patterns(
1792 source, repo_name, link_format='markdown')
1791 source, repo_name, link_format='markdown')
1793
1792
1794 return literal(
1793 return literal(
1795 '<div class="markdown-block">%s</div>' %
1794 '<div class="markdown-block">%s</div>' %
1796 maybe_convert_relative_links(
1795 maybe_convert_relative_links(
1797 MarkupRenderer.markdown(source, flavored=True,
1796 MarkupRenderer.markdown(source, flavored=True,
1798 mentions=mentions)))
1797 mentions=mentions)))
1799 elif renderer == 'jupyter':
1798 elif renderer == 'jupyter':
1800 return literal(
1799 return literal(
1801 '<div class="ipynb">%s</div>' %
1800 '<div class="ipynb">%s</div>' %
1802 maybe_convert_relative_links(
1801 maybe_convert_relative_links(
1803 MarkupRenderer.jupyter(source)))
1802 MarkupRenderer.jupyter(source)))
1804
1803
1805 # None means just show the file-source
1804 # None means just show the file-source
1806 return None
1805 return None
1807
1806
1808
1807
1809 def commit_status(repo, commit_id):
1808 def commit_status(repo, commit_id):
1810 return ChangesetStatusModel().get_status(repo, commit_id)
1809 return ChangesetStatusModel().get_status(repo, commit_id)
1811
1810
1812
1811
1813 def commit_status_lbl(commit_status):
1812 def commit_status_lbl(commit_status):
1814 return dict(ChangesetStatus.STATUSES).get(commit_status)
1813 return dict(ChangesetStatus.STATUSES).get(commit_status)
1815
1814
1816
1815
1817 def commit_time(repo_name, commit_id):
1816 def commit_time(repo_name, commit_id):
1818 repo = Repository.get_by_repo_name(repo_name)
1817 repo = Repository.get_by_repo_name(repo_name)
1819 commit = repo.get_commit(commit_id=commit_id)
1818 commit = repo.get_commit(commit_id=commit_id)
1820 return commit.date
1819 return commit.date
1821
1820
1822
1821
1823 def get_permission_name(key):
1822 def get_permission_name(key):
1824 return dict(Permission.PERMS).get(key)
1823 return dict(Permission.PERMS).get(key)
1825
1824
1826
1825
1827 def journal_filter_help(request):
1826 def journal_filter_help(request):
1828 _ = request.translate
1827 _ = request.translate
1829
1828
1830 return _(
1829 return _(
1831 'Example filter terms:\n' +
1830 'Example filter terms:\n' +
1832 ' repository:vcs\n' +
1831 ' repository:vcs\n' +
1833 ' username:marcin\n' +
1832 ' username:marcin\n' +
1834 ' username:(NOT marcin)\n' +
1833 ' username:(NOT marcin)\n' +
1835 ' action:*push*\n' +
1834 ' action:*push*\n' +
1836 ' ip:127.0.0.1\n' +
1835 ' ip:127.0.0.1\n' +
1837 ' date:20120101\n' +
1836 ' date:20120101\n' +
1838 ' date:[20120101100000 TO 20120102]\n' +
1837 ' date:[20120101100000 TO 20120102]\n' +
1839 '\n' +
1838 '\n' +
1840 'Generate wildcards using \'*\' character:\n' +
1839 'Generate wildcards using \'*\' character:\n' +
1841 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1840 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1842 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1841 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1843 '\n' +
1842 '\n' +
1844 'Optional AND / OR operators in queries\n' +
1843 'Optional AND / OR operators in queries\n' +
1845 ' "repository:vcs OR repository:test"\n' +
1844 ' "repository:vcs OR repository:test"\n' +
1846 ' "username:test AND repository:test*"\n'
1845 ' "username:test AND repository:test*"\n'
1847 )
1846 )
1848
1847
1849
1848
1850 def search_filter_help(searcher, request):
1849 def search_filter_help(searcher, request):
1851 _ = request.translate
1850 _ = request.translate
1852
1851
1853 terms = ''
1852 terms = ''
1854 return _(
1853 return _(
1855 'Example filter terms for `{searcher}` search:\n' +
1854 'Example filter terms for `{searcher}` search:\n' +
1856 '{terms}\n' +
1855 '{terms}\n' +
1857 'Generate wildcards using \'*\' character:\n' +
1856 'Generate wildcards using \'*\' character:\n' +
1858 ' "repo_name:vcs*" - search everything starting with \'vcs\'\n' +
1857 ' "repo_name:vcs*" - search everything starting with \'vcs\'\n' +
1859 ' "repo_name:*vcs*" - search for repository containing \'vcs\'\n' +
1858 ' "repo_name:*vcs*" - search for repository containing \'vcs\'\n' +
1860 '\n' +
1859 '\n' +
1861 'Optional AND / OR operators in queries\n' +
1860 'Optional AND / OR operators in queries\n' +
1862 ' "repo_name:vcs OR repo_name:test"\n' +
1861 ' "repo_name:vcs OR repo_name:test"\n' +
1863 ' "owner:test AND repo_name:test*"\n' +
1862 ' "owner:test AND repo_name:test*"\n' +
1864 'More: {search_doc}'
1863 'More: {search_doc}'
1865 ).format(searcher=searcher.name,
1864 ).format(searcher=searcher.name,
1866 terms=terms, search_doc=searcher.query_lang_doc)
1865 terms=terms, search_doc=searcher.query_lang_doc)
1867
1866
1868
1867
1869 def not_mapped_error(repo_name):
1868 def not_mapped_error(repo_name):
1870 from rhodecode.translation import _
1869 from rhodecode.translation import _
1871 flash(_('%s repository is not mapped to db perhaps'
1870 flash(_('%s repository is not mapped to db perhaps'
1872 ' it was created or renamed from the filesystem'
1871 ' it was created or renamed from the filesystem'
1873 ' please run the application again'
1872 ' please run the application again'
1874 ' in order to rescan repositories') % repo_name, category='error')
1873 ' in order to rescan repositories') % repo_name, category='error')
1875
1874
1876
1875
1877 def ip_range(ip_addr):
1876 def ip_range(ip_addr):
1878 from rhodecode.model.db import UserIpMap
1877 from rhodecode.model.db import UserIpMap
1879 s, e = UserIpMap._get_ip_range(ip_addr)
1878 s, e = UserIpMap._get_ip_range(ip_addr)
1880 return '%s - %s' % (s, e)
1879 return '%s - %s' % (s, e)
1881
1880
1882
1881
1883 def form(url, method='post', needs_csrf_token=True, **attrs):
1882 def form(url, method='post', needs_csrf_token=True, **attrs):
1884 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1883 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1885 if method.lower() != 'get' and needs_csrf_token:
1884 if method.lower() != 'get' and needs_csrf_token:
1886 raise Exception(
1885 raise Exception(
1887 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1886 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1888 'CSRF token. If the endpoint does not require such token you can ' +
1887 'CSRF token. If the endpoint does not require such token you can ' +
1889 'explicitly set the parameter needs_csrf_token to false.')
1888 'explicitly set the parameter needs_csrf_token to false.')
1890
1889
1891 return wh_form(url, method=method, **attrs)
1890 return wh_form(url, method=method, **attrs)
1892
1891
1893
1892
1894 def secure_form(url, method="POST", multipart=False, **attrs):
1893 def secure_form(url, method="POST", multipart=False, **attrs):
1895 """Start a form tag that points the action to an url. This
1894 """Start a form tag that points the action to an url. This
1896 form tag will also include the hidden field containing
1895 form tag will also include the hidden field containing
1897 the auth token.
1896 the auth token.
1898
1897
1899 The url options should be given either as a string, or as a
1898 The url options should be given either as a string, or as a
1900 ``url()`` function. The method for the form defaults to POST.
1899 ``url()`` function. The method for the form defaults to POST.
1901
1900
1902 Options:
1901 Options:
1903
1902
1904 ``multipart``
1903 ``multipart``
1905 If set to True, the enctype is set to "multipart/form-data".
1904 If set to True, the enctype is set to "multipart/form-data".
1906 ``method``
1905 ``method``
1907 The method to use when submitting the form, usually either
1906 The method to use when submitting the form, usually either
1908 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1907 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1909 hidden input with name _method is added to simulate the verb
1908 hidden input with name _method is added to simulate the verb
1910 over POST.
1909 over POST.
1911
1910
1912 """
1911 """
1913 from webhelpers.pylonslib.secure_form import insecure_form
1912 from webhelpers.pylonslib.secure_form import insecure_form
1914 form = insecure_form(url, method, multipart, **attrs)
1913 form = insecure_form(url, method, multipart, **attrs)
1915
1914
1916 session = None
1915 session = None
1917 # TODO(marcink): after pyramid migration require request variable ALWAYS
1916 # TODO(marcink): after pyramid migration require request variable ALWAYS
1918 if 'request' in attrs:
1917 if 'request' in attrs:
1919 session = attrs['request'].session
1918 session = attrs['request'].session
1920
1919
1921 token = literal(
1920 token = literal(
1922 '<input type="hidden" id="{}" name="{}" value="{}">'.format(
1921 '<input type="hidden" id="{}" name="{}" value="{}">'.format(
1923 csrf_token_key, csrf_token_key, get_csrf_token(session)))
1922 csrf_token_key, csrf_token_key, get_csrf_token(session)))
1924
1923
1925 return literal("%s\n%s" % (form, token))
1924 return literal("%s\n%s" % (form, token))
1926
1925
1927
1926
1928 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1927 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1929 select_html = select(name, selected, options, **attrs)
1928 select_html = select(name, selected, options, **attrs)
1930 select2 = """
1929 select2 = """
1931 <script>
1930 <script>
1932 $(document).ready(function() {
1931 $(document).ready(function() {
1933 $('#%s').select2({
1932 $('#%s').select2({
1934 containerCssClass: 'drop-menu',
1933 containerCssClass: 'drop-menu',
1935 dropdownCssClass: 'drop-menu-dropdown',
1934 dropdownCssClass: 'drop-menu-dropdown',
1936 dropdownAutoWidth: true%s
1935 dropdownAutoWidth: true%s
1937 });
1936 });
1938 });
1937 });
1939 </script>
1938 </script>
1940 """
1939 """
1941 filter_option = """,
1940 filter_option = """,
1942 minimumResultsForSearch: -1
1941 minimumResultsForSearch: -1
1943 """
1942 """
1944 input_id = attrs.get('id') or name
1943 input_id = attrs.get('id') or name
1945 filter_enabled = "" if enable_filter else filter_option
1944 filter_enabled = "" if enable_filter else filter_option
1946 select_script = literal(select2 % (input_id, filter_enabled))
1945 select_script = literal(select2 % (input_id, filter_enabled))
1947
1946
1948 return literal(select_html+select_script)
1947 return literal(select_html+select_script)
1949
1948
1950
1949
1951 def get_visual_attr(tmpl_context_var, attr_name):
1950 def get_visual_attr(tmpl_context_var, attr_name):
1952 """
1951 """
1953 A safe way to get a variable from visual variable of template context
1952 A safe way to get a variable from visual variable of template context
1954
1953
1955 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
1954 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
1956 :param attr_name: name of the attribute we fetch from the c.visual
1955 :param attr_name: name of the attribute we fetch from the c.visual
1957 """
1956 """
1958 visual = getattr(tmpl_context_var, 'visual', None)
1957 visual = getattr(tmpl_context_var, 'visual', None)
1959 if not visual:
1958 if not visual:
1960 return
1959 return
1961 else:
1960 else:
1962 return getattr(visual, attr_name, None)
1961 return getattr(visual, attr_name, None)
1963
1962
1964
1963
1965 def get_last_path_part(file_node):
1964 def get_last_path_part(file_node):
1966 if not file_node.path:
1965 if not file_node.path:
1967 return u''
1966 return u''
1968
1967
1969 path = safe_unicode(file_node.path.split('/')[-1])
1968 path = safe_unicode(file_node.path.split('/')[-1])
1970 return u'../' + path
1969 return u'../' + path
1971
1970
1972
1971
1973 def route_url(*args, **kwargs):
1972 def route_url(*args, **kwargs):
1974 """
1973 """
1975 Wrapper around pyramids `route_url` (fully qualified url) function.
1974 Wrapper around pyramids `route_url` (fully qualified url) function.
1976 It is used to generate URLs from within pylons views or templates.
1975 It is used to generate URLs from within pylons views or templates.
1977 This will be removed when pyramid migration if finished.
1976 This will be removed when pyramid migration if finished.
1978 """
1977 """
1979 req = get_current_request()
1978 req = get_current_request()
1980 return req.route_url(*args, **kwargs)
1979 return req.route_url(*args, **kwargs)
1981
1980
1982
1981
1983 def route_path(*args, **kwargs):
1982 def route_path(*args, **kwargs):
1984 """
1983 """
1985 Wrapper around pyramids `route_path` function. It is used to generate
1984 Wrapper around pyramids `route_path` function. It is used to generate
1986 URLs from within pylons views or templates. This will be removed when
1985 URLs from within pylons views or templates. This will be removed when
1987 pyramid migration if finished.
1986 pyramid migration if finished.
1988 """
1987 """
1989 req = get_current_request()
1988 req = get_current_request()
1990 return req.route_path(*args, **kwargs)
1989 return req.route_path(*args, **kwargs)
1991
1990
1992
1991
1993 def route_path_or_none(*args, **kwargs):
1992 def route_path_or_none(*args, **kwargs):
1994 try:
1993 try:
1995 return route_path(*args, **kwargs)
1994 return route_path(*args, **kwargs)
1996 except KeyError:
1995 except KeyError:
1997 return None
1996 return None
1998
1997
1999
1998
2000 def static_url(*args, **kwds):
1999 def static_url(*args, **kwds):
2001 """
2000 """
2002 Wrapper around pyramids `route_path` function. It is used to generate
2001 Wrapper around pyramids `route_path` function. It is used to generate
2003 URLs from within pylons views or templates. This will be removed when
2002 URLs from within pylons views or templates. This will be removed when
2004 pyramid migration if finished.
2003 pyramid migration if finished.
2005 """
2004 """
2006 req = get_current_request()
2005 req = get_current_request()
2007 return req.static_url(*args, **kwds)
2006 return req.static_url(*args, **kwds)
2008
2007
2009
2008
2010 def resource_path(*args, **kwds):
2009 def resource_path(*args, **kwds):
2011 """
2010 """
2012 Wrapper around pyramids `route_path` function. It is used to generate
2011 Wrapper around pyramids `route_path` function. It is used to generate
2013 URLs from within pylons views or templates. This will be removed when
2012 URLs from within pylons views or templates. This will be removed when
2014 pyramid migration if finished.
2013 pyramid migration if finished.
2015 """
2014 """
2016 req = get_current_request()
2015 req = get_current_request()
2017 return req.resource_path(*args, **kwds)
2016 return req.resource_path(*args, **kwds)
2018
2017
2019
2018
2020 def api_call_example(method, args):
2019 def api_call_example(method, args):
2021 """
2020 """
2022 Generates an API call example via CURL
2021 Generates an API call example via CURL
2023 """
2022 """
2024 args_json = json.dumps(OrderedDict([
2023 args_json = json.dumps(OrderedDict([
2025 ('id', 1),
2024 ('id', 1),
2026 ('auth_token', 'SECRET'),
2025 ('auth_token', 'SECRET'),
2027 ('method', method),
2026 ('method', method),
2028 ('args', args)
2027 ('args', args)
2029 ]))
2028 ]))
2030 return literal(
2029 return literal(
2031 "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{data}'"
2030 "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{data}'"
2032 "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, "
2031 "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, "
2033 "and needs to be of `api calls` role."
2032 "and needs to be of `api calls` role."
2034 .format(
2033 .format(
2035 api_url=route_url('apiv2'),
2034 api_url=route_url('apiv2'),
2036 token_url=route_url('my_account_auth_tokens'),
2035 token_url=route_url('my_account_auth_tokens'),
2037 data=args_json))
2036 data=args_json))
2038
2037
2039
2038
2040 def notification_description(notification, request):
2039 def notification_description(notification, request):
2041 """
2040 """
2042 Generate notification human readable description based on notification type
2041 Generate notification human readable description based on notification type
2043 """
2042 """
2044 from rhodecode.model.notification import NotificationModel
2043 from rhodecode.model.notification import NotificationModel
2045 return NotificationModel().make_description(
2044 return NotificationModel().make_description(
2046 notification, translate=request.translate)
2045 notification, translate=request.translate)
@@ -1,908 +1,909 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Scm model for RhodeCode
22 Scm model for RhodeCode
23 """
23 """
24
24
25 import os.path
25 import os.path
26 import re
26 import re
27 import sys
27 import sys
28 import traceback
28 import traceback
29 import logging
29 import logging
30 import cStringIO
30 import cStringIO
31 import pkg_resources
31 import pkg_resources
32
32
33 from pylons.i18n.translation import _
33 from pylons.i18n.translation import _
34 from sqlalchemy import func
34 from sqlalchemy import func
35 from zope.cachedescriptors.property import Lazy as LazyProperty
35 from zope.cachedescriptors.property import Lazy as LazyProperty
36
36
37 import rhodecode
37 import rhodecode
38 from rhodecode.lib.vcs import get_backend
38 from rhodecode.lib.vcs import get_backend
39 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
39 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
40 from rhodecode.lib.vcs.nodes import FileNode
40 from rhodecode.lib.vcs.nodes import FileNode
41 from rhodecode.lib.vcs.backends.base import EmptyCommit
41 from rhodecode.lib.vcs.backends.base import EmptyCommit
42 from rhodecode.lib import helpers as h
42 from rhodecode.lib import helpers as h
43
43
44 from rhodecode.lib.auth import (
44 from rhodecode.lib.auth import (
45 HasRepoPermissionAny, HasRepoGroupPermissionAny,
45 HasRepoPermissionAny, HasRepoGroupPermissionAny,
46 HasUserGroupPermissionAny)
46 HasUserGroupPermissionAny)
47 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
47 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
48 from rhodecode.lib import hooks_utils, caches
48 from rhodecode.lib import hooks_utils, caches
49 from rhodecode.lib.utils import (
49 from rhodecode.lib.utils import (
50 get_filesystem_repos, make_db_config)
50 get_filesystem_repos, make_db_config)
51 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
51 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
52 from rhodecode.lib.system_info import get_system_info
52 from rhodecode.lib.system_info import get_system_info
53 from rhodecode.model import BaseModel
53 from rhodecode.model import BaseModel
54 from rhodecode.model.db import (
54 from rhodecode.model.db import (
55 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
55 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
56 PullRequest)
56 PullRequest)
57 from rhodecode.model.settings import VcsSettingsModel
57 from rhodecode.model.settings import VcsSettingsModel
58
58
59 log = logging.getLogger(__name__)
59 log = logging.getLogger(__name__)
60
60
61
61
62 class UserTemp(object):
62 class UserTemp(object):
63 def __init__(self, user_id):
63 def __init__(self, user_id):
64 self.user_id = user_id
64 self.user_id = user_id
65
65
66 def __repr__(self):
66 def __repr__(self):
67 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
67 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
68
68
69
69
70 class RepoTemp(object):
70 class RepoTemp(object):
71 def __init__(self, repo_id):
71 def __init__(self, repo_id):
72 self.repo_id = repo_id
72 self.repo_id = repo_id
73
73
74 def __repr__(self):
74 def __repr__(self):
75 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
75 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
76
76
77
77
78 class SimpleCachedRepoList(object):
78 class SimpleCachedRepoList(object):
79 """
79 """
80 Lighter version of of iteration of repos without the scm initialisation,
80 Lighter version of of iteration of repos without the scm initialisation,
81 and with cache usage
81 and with cache usage
82 """
82 """
83 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
83 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
84 self.db_repo_list = db_repo_list
84 self.db_repo_list = db_repo_list
85 self.repos_path = repos_path
85 self.repos_path = repos_path
86 self.order_by = order_by
86 self.order_by = order_by
87 self.reversed = (order_by or '').startswith('-')
87 self.reversed = (order_by or '').startswith('-')
88 if not perm_set:
88 if not perm_set:
89 perm_set = ['repository.read', 'repository.write',
89 perm_set = ['repository.read', 'repository.write',
90 'repository.admin']
90 'repository.admin']
91 self.perm_set = perm_set
91 self.perm_set = perm_set
92
92
93 def __len__(self):
93 def __len__(self):
94 return len(self.db_repo_list)
94 return len(self.db_repo_list)
95
95
96 def __repr__(self):
96 def __repr__(self):
97 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
97 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
98
98
99 def __iter__(self):
99 def __iter__(self):
100 for dbr in self.db_repo_list:
100 for dbr in self.db_repo_list:
101 # check permission at this level
101 # check permission at this level
102 has_perm = HasRepoPermissionAny(*self.perm_set)(
102 has_perm = HasRepoPermissionAny(*self.perm_set)(
103 dbr.repo_name, 'SimpleCachedRepoList check')
103 dbr.repo_name, 'SimpleCachedRepoList check')
104 if not has_perm:
104 if not has_perm:
105 continue
105 continue
106
106
107 tmp_d = {
107 tmp_d = {
108 'name': dbr.repo_name,
108 'name': dbr.repo_name,
109 'dbrepo': dbr.get_dict(),
109 'dbrepo': dbr.get_dict(),
110 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
110 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
111 }
111 }
112 yield tmp_d
112 yield tmp_d
113
113
114
114
115 class _PermCheckIterator(object):
115 class _PermCheckIterator(object):
116
116
117 def __init__(
117 def __init__(
118 self, obj_list, obj_attr, perm_set, perm_checker,
118 self, obj_list, obj_attr, perm_set, perm_checker,
119 extra_kwargs=None):
119 extra_kwargs=None):
120 """
120 """
121 Creates iterator from given list of objects, additionally
121 Creates iterator from given list of objects, additionally
122 checking permission for them from perm_set var
122 checking permission for them from perm_set var
123
123
124 :param obj_list: list of db objects
124 :param obj_list: list of db objects
125 :param obj_attr: attribute of object to pass into perm_checker
125 :param obj_attr: attribute of object to pass into perm_checker
126 :param perm_set: list of permissions to check
126 :param perm_set: list of permissions to check
127 :param perm_checker: callable to check permissions against
127 :param perm_checker: callable to check permissions against
128 """
128 """
129 self.obj_list = obj_list
129 self.obj_list = obj_list
130 self.obj_attr = obj_attr
130 self.obj_attr = obj_attr
131 self.perm_set = perm_set
131 self.perm_set = perm_set
132 self.perm_checker = perm_checker
132 self.perm_checker = perm_checker
133 self.extra_kwargs = extra_kwargs or {}
133 self.extra_kwargs = extra_kwargs or {}
134
134
135 def __len__(self):
135 def __len__(self):
136 return len(self.obj_list)
136 return len(self.obj_list)
137
137
138 def __repr__(self):
138 def __repr__(self):
139 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
139 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
140
140
141 def __iter__(self):
141 def __iter__(self):
142 checker = self.perm_checker(*self.perm_set)
142 checker = self.perm_checker(*self.perm_set)
143 for db_obj in self.obj_list:
143 for db_obj in self.obj_list:
144 # check permission at this level
144 # check permission at this level
145 name = getattr(db_obj, self.obj_attr, None)
145 name = getattr(db_obj, self.obj_attr, None)
146 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
146 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
147 continue
147 continue
148
148
149 yield db_obj
149 yield db_obj
150
150
151
151
152 class RepoList(_PermCheckIterator):
152 class RepoList(_PermCheckIterator):
153
153
154 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
154 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
155 if not perm_set:
155 if not perm_set:
156 perm_set = [
156 perm_set = [
157 'repository.read', 'repository.write', 'repository.admin']
157 'repository.read', 'repository.write', 'repository.admin']
158
158
159 super(RepoList, self).__init__(
159 super(RepoList, self).__init__(
160 obj_list=db_repo_list,
160 obj_list=db_repo_list,
161 obj_attr='repo_name', perm_set=perm_set,
161 obj_attr='repo_name', perm_set=perm_set,
162 perm_checker=HasRepoPermissionAny,
162 perm_checker=HasRepoPermissionAny,
163 extra_kwargs=extra_kwargs)
163 extra_kwargs=extra_kwargs)
164
164
165
165
166 class RepoGroupList(_PermCheckIterator):
166 class RepoGroupList(_PermCheckIterator):
167
167
168 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
168 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
169 if not perm_set:
169 if not perm_set:
170 perm_set = ['group.read', 'group.write', 'group.admin']
170 perm_set = ['group.read', 'group.write', 'group.admin']
171
171
172 super(RepoGroupList, self).__init__(
172 super(RepoGroupList, self).__init__(
173 obj_list=db_repo_group_list,
173 obj_list=db_repo_group_list,
174 obj_attr='group_name', perm_set=perm_set,
174 obj_attr='group_name', perm_set=perm_set,
175 perm_checker=HasRepoGroupPermissionAny,
175 perm_checker=HasRepoGroupPermissionAny,
176 extra_kwargs=extra_kwargs)
176 extra_kwargs=extra_kwargs)
177
177
178
178
179 class UserGroupList(_PermCheckIterator):
179 class UserGroupList(_PermCheckIterator):
180
180
181 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
181 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
182 if not perm_set:
182 if not perm_set:
183 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
183 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
184
184
185 super(UserGroupList, self).__init__(
185 super(UserGroupList, self).__init__(
186 obj_list=db_user_group_list,
186 obj_list=db_user_group_list,
187 obj_attr='users_group_name', perm_set=perm_set,
187 obj_attr='users_group_name', perm_set=perm_set,
188 perm_checker=HasUserGroupPermissionAny,
188 perm_checker=HasUserGroupPermissionAny,
189 extra_kwargs=extra_kwargs)
189 extra_kwargs=extra_kwargs)
190
190
191
191
192 class ScmModel(BaseModel):
192 class ScmModel(BaseModel):
193 """
193 """
194 Generic Scm Model
194 Generic Scm Model
195 """
195 """
196
196
197 @LazyProperty
197 @LazyProperty
198 def repos_path(self):
198 def repos_path(self):
199 """
199 """
200 Gets the repositories root path from database
200 Gets the repositories root path from database
201 """
201 """
202
202
203 settings_model = VcsSettingsModel(sa=self.sa)
203 settings_model = VcsSettingsModel(sa=self.sa)
204 return settings_model.get_repos_location()
204 return settings_model.get_repos_location()
205
205
206 def repo_scan(self, repos_path=None):
206 def repo_scan(self, repos_path=None):
207 """
207 """
208 Listing of repositories in given path. This path should not be a
208 Listing of repositories in given path. This path should not be a
209 repository itself. Return a dictionary of repository objects
209 repository itself. Return a dictionary of repository objects
210
210
211 :param repos_path: path to directory containing repositories
211 :param repos_path: path to directory containing repositories
212 """
212 """
213
213
214 if repos_path is None:
214 if repos_path is None:
215 repos_path = self.repos_path
215 repos_path = self.repos_path
216
216
217 log.info('scanning for repositories in %s', repos_path)
217 log.info('scanning for repositories in %s', repos_path)
218
218
219 config = make_db_config()
219 config = make_db_config()
220 config.set('extensions', 'largefiles', '')
220 config.set('extensions', 'largefiles', '')
221 repos = {}
221 repos = {}
222
222
223 for name, path in get_filesystem_repos(repos_path, recursive=True):
223 for name, path in get_filesystem_repos(repos_path, recursive=True):
224 # name need to be decomposed and put back together using the /
224 # name need to be decomposed and put back together using the /
225 # since this is internal storage separator for rhodecode
225 # since this is internal storage separator for rhodecode
226 name = Repository.normalize_repo_name(name)
226 name = Repository.normalize_repo_name(name)
227
227
228 try:
228 try:
229 if name in repos:
229 if name in repos:
230 raise RepositoryError('Duplicate repository name %s '
230 raise RepositoryError('Duplicate repository name %s '
231 'found in %s' % (name, path))
231 'found in %s' % (name, path))
232 elif path[0] in rhodecode.BACKENDS:
232 elif path[0] in rhodecode.BACKENDS:
233 klass = get_backend(path[0])
233 klass = get_backend(path[0])
234 repos[name] = klass(path[1], config=config)
234 repos[name] = klass(path[1], config=config)
235 except OSError:
235 except OSError:
236 continue
236 continue
237 log.debug('found %s paths with repositories', len(repos))
237 log.debug('found %s paths with repositories', len(repos))
238 return repos
238 return repos
239
239
240 def get_repos(self, all_repos=None, sort_key=None):
240 def get_repos(self, all_repos=None, sort_key=None):
241 """
241 """
242 Get all repositories from db and for each repo create it's
242 Get all repositories from db and for each repo create it's
243 backend instance and fill that backed with information from database
243 backend instance and fill that backed with information from database
244
244
245 :param all_repos: list of repository names as strings
245 :param all_repos: list of repository names as strings
246 give specific repositories list, good for filtering
246 give specific repositories list, good for filtering
247
247
248 :param sort_key: initial sorting of repositories
248 :param sort_key: initial sorting of repositories
249 """
249 """
250 if all_repos is None:
250 if all_repos is None:
251 all_repos = self.sa.query(Repository)\
251 all_repos = self.sa.query(Repository)\
252 .filter(Repository.group_id == None)\
252 .filter(Repository.group_id == None)\
253 .order_by(func.lower(Repository.repo_name)).all()
253 .order_by(func.lower(Repository.repo_name)).all()
254 repo_iter = SimpleCachedRepoList(
254 repo_iter = SimpleCachedRepoList(
255 all_repos, repos_path=self.repos_path, order_by=sort_key)
255 all_repos, repos_path=self.repos_path, order_by=sort_key)
256 return repo_iter
256 return repo_iter
257
257
258 def get_repo_groups(self, all_groups=None):
258 def get_repo_groups(self, all_groups=None):
259 if all_groups is None:
259 if all_groups is None:
260 all_groups = RepoGroup.query()\
260 all_groups = RepoGroup.query()\
261 .filter(RepoGroup.group_parent_id == None).all()
261 .filter(RepoGroup.group_parent_id == None).all()
262 return [x for x in RepoGroupList(all_groups)]
262 return [x for x in RepoGroupList(all_groups)]
263
263
264 def mark_for_invalidation(self, repo_name, delete=False):
264 def mark_for_invalidation(self, repo_name, delete=False):
265 """
265 """
266 Mark caches of this repo invalid in the database. `delete` flag
266 Mark caches of this repo invalid in the database. `delete` flag
267 removes the cache entries
267 removes the cache entries
268
268
269 :param repo_name: the repo_name for which caches should be marked
269 :param repo_name: the repo_name for which caches should be marked
270 invalid, or deleted
270 invalid, or deleted
271 :param delete: delete the entry keys instead of setting bool
271 :param delete: delete the entry keys instead of setting bool
272 flag on them
272 flag on them
273 """
273 """
274 CacheKey.set_invalidate(repo_name, delete=delete)
274 CacheKey.set_invalidate(repo_name, delete=delete)
275 repo = Repository.get_by_repo_name(repo_name)
275 repo = Repository.get_by_repo_name(repo_name)
276
276
277 if repo:
277 if repo:
278 config = repo._config
278 config = repo._config
279 config.set('extensions', 'largefiles', '')
279 config.set('extensions', 'largefiles', '')
280 repo.update_commit_cache(config=config, cs_cache=None)
280 repo.update_commit_cache(config=config, cs_cache=None)
281 caches.clear_repo_caches(repo_name)
281 caches.clear_repo_caches(repo_name)
282
282
283 def toggle_following_repo(self, follow_repo_id, user_id):
283 def toggle_following_repo(self, follow_repo_id, user_id):
284
284
285 f = self.sa.query(UserFollowing)\
285 f = self.sa.query(UserFollowing)\
286 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
286 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
287 .filter(UserFollowing.user_id == user_id).scalar()
287 .filter(UserFollowing.user_id == user_id).scalar()
288
288
289 if f is not None:
289 if f is not None:
290 try:
290 try:
291 self.sa.delete(f)
291 self.sa.delete(f)
292 return
292 return
293 except Exception:
293 except Exception:
294 log.error(traceback.format_exc())
294 log.error(traceback.format_exc())
295 raise
295 raise
296
296
297 try:
297 try:
298 f = UserFollowing()
298 f = UserFollowing()
299 f.user_id = user_id
299 f.user_id = user_id
300 f.follows_repo_id = follow_repo_id
300 f.follows_repo_id = follow_repo_id
301 self.sa.add(f)
301 self.sa.add(f)
302 except Exception:
302 except Exception:
303 log.error(traceback.format_exc())
303 log.error(traceback.format_exc())
304 raise
304 raise
305
305
306 def toggle_following_user(self, follow_user_id, user_id):
306 def toggle_following_user(self, follow_user_id, user_id):
307 f = self.sa.query(UserFollowing)\
307 f = self.sa.query(UserFollowing)\
308 .filter(UserFollowing.follows_user_id == follow_user_id)\
308 .filter(UserFollowing.follows_user_id == follow_user_id)\
309 .filter(UserFollowing.user_id == user_id).scalar()
309 .filter(UserFollowing.user_id == user_id).scalar()
310
310
311 if f is not None:
311 if f is not None:
312 try:
312 try:
313 self.sa.delete(f)
313 self.sa.delete(f)
314 return
314 return
315 except Exception:
315 except Exception:
316 log.error(traceback.format_exc())
316 log.error(traceback.format_exc())
317 raise
317 raise
318
318
319 try:
319 try:
320 f = UserFollowing()
320 f = UserFollowing()
321 f.user_id = user_id
321 f.user_id = user_id
322 f.follows_user_id = follow_user_id
322 f.follows_user_id = follow_user_id
323 self.sa.add(f)
323 self.sa.add(f)
324 except Exception:
324 except Exception:
325 log.error(traceback.format_exc())
325 log.error(traceback.format_exc())
326 raise
326 raise
327
327
328 def is_following_repo(self, repo_name, user_id, cache=False):
328 def is_following_repo(self, repo_name, user_id, cache=False):
329 r = self.sa.query(Repository)\
329 r = self.sa.query(Repository)\
330 .filter(Repository.repo_name == repo_name).scalar()
330 .filter(Repository.repo_name == repo_name).scalar()
331
331
332 f = self.sa.query(UserFollowing)\
332 f = self.sa.query(UserFollowing)\
333 .filter(UserFollowing.follows_repository == r)\
333 .filter(UserFollowing.follows_repository == r)\
334 .filter(UserFollowing.user_id == user_id).scalar()
334 .filter(UserFollowing.user_id == user_id).scalar()
335
335
336 return f is not None
336 return f is not None
337
337
338 def is_following_user(self, username, user_id, cache=False):
338 def is_following_user(self, username, user_id, cache=False):
339 u = User.get_by_username(username)
339 u = User.get_by_username(username)
340
340
341 f = self.sa.query(UserFollowing)\
341 f = self.sa.query(UserFollowing)\
342 .filter(UserFollowing.follows_user == u)\
342 .filter(UserFollowing.follows_user == u)\
343 .filter(UserFollowing.user_id == user_id).scalar()
343 .filter(UserFollowing.user_id == user_id).scalar()
344
344
345 return f is not None
345 return f is not None
346
346
347 def get_followers(self, repo):
347 def get_followers(self, repo):
348 repo = self._get_repo(repo)
348 repo = self._get_repo(repo)
349
349
350 return self.sa.query(UserFollowing)\
350 return self.sa.query(UserFollowing)\
351 .filter(UserFollowing.follows_repository == repo).count()
351 .filter(UserFollowing.follows_repository == repo).count()
352
352
353 def get_forks(self, repo):
353 def get_forks(self, repo):
354 repo = self._get_repo(repo)
354 repo = self._get_repo(repo)
355 return self.sa.query(Repository)\
355 return self.sa.query(Repository)\
356 .filter(Repository.fork == repo).count()
356 .filter(Repository.fork == repo).count()
357
357
358 def get_pull_requests(self, repo):
358 def get_pull_requests(self, repo):
359 repo = self._get_repo(repo)
359 repo = self._get_repo(repo)
360 return self.sa.query(PullRequest)\
360 return self.sa.query(PullRequest)\
361 .filter(PullRequest.target_repo == repo)\
361 .filter(PullRequest.target_repo == repo)\
362 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
362 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
363
363
364 def mark_as_fork(self, repo, fork, user):
364 def mark_as_fork(self, repo, fork, user):
365 repo = self._get_repo(repo)
365 repo = self._get_repo(repo)
366 fork = self._get_repo(fork)
366 fork = self._get_repo(fork)
367 if fork and repo.repo_id == fork.repo_id:
367 if fork and repo.repo_id == fork.repo_id:
368 raise Exception("Cannot set repository as fork of itself")
368 raise Exception("Cannot set repository as fork of itself")
369
369
370 if fork and repo.repo_type != fork.repo_type:
370 if fork and repo.repo_type != fork.repo_type:
371 raise RepositoryError(
371 raise RepositoryError(
372 "Cannot set repository as fork of repository with other type")
372 "Cannot set repository as fork of repository with other type")
373
373
374 repo.fork = fork
374 repo.fork = fork
375 self.sa.add(repo)
375 self.sa.add(repo)
376 return repo
376 return repo
377
377
378 def pull_changes(self, repo, username):
378 def pull_changes(self, repo, username):
379 dbrepo = self._get_repo(repo)
379 dbrepo = self._get_repo(repo)
380 clone_uri = dbrepo.clone_uri
380 clone_uri = dbrepo.clone_uri
381 if not clone_uri:
381 if not clone_uri:
382 raise Exception("This repository doesn't have a clone uri")
382 raise Exception("This repository doesn't have a clone uri")
383
383
384 repo = dbrepo.scm_instance(cache=False)
384 repo = dbrepo.scm_instance(cache=False)
385 # TODO: marcink fix this an re-enable since we need common logic
385 # TODO: marcink fix this an re-enable since we need common logic
386 # for hg/git remove hooks so we don't trigger them on fetching
386 # for hg/git remove hooks so we don't trigger them on fetching
387 # commits from remote
387 # commits from remote
388 repo.config.clear_section('hooks')
388 repo.config.clear_section('hooks')
389
389
390 repo_name = dbrepo.repo_name
390 repo_name = dbrepo.repo_name
391 try:
391 try:
392 # TODO: we need to make sure those operations call proper hooks !
392 # TODO: we need to make sure those operations call proper hooks !
393 repo.pull(clone_uri)
393 repo.pull(clone_uri)
394
394
395 self.mark_for_invalidation(repo_name)
395 self.mark_for_invalidation(repo_name)
396 except Exception:
396 except Exception:
397 log.error(traceback.format_exc())
397 log.error(traceback.format_exc())
398 raise
398 raise
399
399
400 def commit_change(self, repo, repo_name, commit, user, author, message,
400 def commit_change(self, repo, repo_name, commit, user, author, message,
401 content, f_path):
401 content, f_path):
402 """
402 """
403 Commits changes
403 Commits changes
404
404
405 :param repo: SCM instance
405 :param repo: SCM instance
406
406
407 """
407 """
408 user = self._get_user(user)
408 user = self._get_user(user)
409
409
410 # decoding here will force that we have proper encoded values
410 # decoding here will force that we have proper encoded values
411 # in any other case this will throw exceptions and deny commit
411 # in any other case this will throw exceptions and deny commit
412 content = safe_str(content)
412 content = safe_str(content)
413 path = safe_str(f_path)
413 path = safe_str(f_path)
414 # message and author needs to be unicode
414 # message and author needs to be unicode
415 # proper backend should then translate that into required type
415 # proper backend should then translate that into required type
416 message = safe_unicode(message)
416 message = safe_unicode(message)
417 author = safe_unicode(author)
417 author = safe_unicode(author)
418 imc = repo.in_memory_commit
418 imc = repo.in_memory_commit
419 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
419 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
420 try:
420 try:
421 # TODO: handle pre-push action !
421 # TODO: handle pre-push action !
422 tip = imc.commit(
422 tip = imc.commit(
423 message=message, author=author, parents=[commit],
423 message=message, author=author, parents=[commit],
424 branch=commit.branch)
424 branch=commit.branch)
425 except Exception as e:
425 except Exception as e:
426 log.error(traceback.format_exc())
426 log.error(traceback.format_exc())
427 raise IMCCommitError(str(e))
427 raise IMCCommitError(str(e))
428 finally:
428 finally:
429 # always clear caches, if commit fails we want fresh object also
429 # always clear caches, if commit fails we want fresh object also
430 self.mark_for_invalidation(repo_name)
430 self.mark_for_invalidation(repo_name)
431
431
432 # We trigger the post-push action
432 # We trigger the post-push action
433 hooks_utils.trigger_post_push_hook(
433 hooks_utils.trigger_post_push_hook(
434 username=user.username, action='push_local', repo_name=repo_name,
434 username=user.username, action='push_local', repo_name=repo_name,
435 repo_alias=repo.alias, commit_ids=[tip.raw_id])
435 repo_alias=repo.alias, commit_ids=[tip.raw_id])
436 return tip
436 return tip
437
437
438 def _sanitize_path(self, f_path):
438 def _sanitize_path(self, f_path):
439 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
439 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
440 raise NonRelativePathError('%s is not an relative path' % f_path)
440 raise NonRelativePathError('%s is not an relative path' % f_path)
441 if f_path:
441 if f_path:
442 f_path = os.path.normpath(f_path)
442 f_path = os.path.normpath(f_path)
443 return f_path
443 return f_path
444
444
445 def get_dirnode_metadata(self, commit, dir_node):
445 def get_dirnode_metadata(self, request, commit, dir_node):
446 if not dir_node.is_dir():
446 if not dir_node.is_dir():
447 return []
447 return []
448
448
449 data = []
449 data = []
450 for node in dir_node:
450 for node in dir_node:
451 if not node.is_file():
451 if not node.is_file():
452 # we skip file-nodes
452 # we skip file-nodes
453 continue
453 continue
454
454
455 last_commit = node.last_commit
455 last_commit = node.last_commit
456 last_commit_date = last_commit.date
456 last_commit_date = last_commit.date
457 data.append({
457 data.append({
458 'name': node.name,
458 'name': node.name,
459 'size': h.format_byte_size_binary(node.size),
459 'size': h.format_byte_size_binary(node.size),
460 'modified_at': h.format_date(last_commit_date),
460 'modified_at': h.format_date(last_commit_date),
461 'modified_ts': last_commit_date.isoformat(),
461 'modified_ts': last_commit_date.isoformat(),
462 'revision': last_commit.revision,
462 'revision': last_commit.revision,
463 'short_id': last_commit.short_id,
463 'short_id': last_commit.short_id,
464 'message': h.escape(last_commit.message),
464 'message': h.escape(last_commit.message),
465 'author': h.escape(last_commit.author),
465 'author': h.escape(last_commit.author),
466 'user_profile': h.gravatar_with_user(last_commit.author),
466 'user_profile': h.gravatar_with_user(
467 request, last_commit.author),
467 })
468 })
468
469
469 return data
470 return data
470
471
471 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
472 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
472 extended_info=False, content=False, max_file_bytes=None):
473 extended_info=False, content=False, max_file_bytes=None):
473 """
474 """
474 recursive walk in root dir and return a set of all path in that dir
475 recursive walk in root dir and return a set of all path in that dir
475 based on repository walk function
476 based on repository walk function
476
477
477 :param repo_name: name of repository
478 :param repo_name: name of repository
478 :param commit_id: commit id for which to list nodes
479 :param commit_id: commit id for which to list nodes
479 :param root_path: root path to list
480 :param root_path: root path to list
480 :param flat: return as a list, if False returns a dict with description
481 :param flat: return as a list, if False returns a dict with description
481 :param max_file_bytes: will not return file contents over this limit
482 :param max_file_bytes: will not return file contents over this limit
482
483
483 """
484 """
484 _files = list()
485 _files = list()
485 _dirs = list()
486 _dirs = list()
486 try:
487 try:
487 _repo = self._get_repo(repo_name)
488 _repo = self._get_repo(repo_name)
488 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
489 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
489 root_path = root_path.lstrip('/')
490 root_path = root_path.lstrip('/')
490 for __, dirs, files in commit.walk(root_path):
491 for __, dirs, files in commit.walk(root_path):
491 for f in files:
492 for f in files:
492 _content = None
493 _content = None
493 _data = f.unicode_path
494 _data = f.unicode_path
494 over_size_limit = (max_file_bytes is not None
495 over_size_limit = (max_file_bytes is not None
495 and f.size > max_file_bytes)
496 and f.size > max_file_bytes)
496
497
497 if not flat:
498 if not flat:
498 _data = {
499 _data = {
499 "name": h.escape(f.unicode_path),
500 "name": h.escape(f.unicode_path),
500 "type": "file",
501 "type": "file",
501 }
502 }
502 if extended_info:
503 if extended_info:
503 _data.update({
504 _data.update({
504 "md5": f.md5,
505 "md5": f.md5,
505 "binary": f.is_binary,
506 "binary": f.is_binary,
506 "size": f.size,
507 "size": f.size,
507 "extension": f.extension,
508 "extension": f.extension,
508 "mimetype": f.mimetype,
509 "mimetype": f.mimetype,
509 "lines": f.lines()[0]
510 "lines": f.lines()[0]
510 })
511 })
511
512
512 if content:
513 if content:
513 full_content = None
514 full_content = None
514 if not f.is_binary and not over_size_limit:
515 if not f.is_binary and not over_size_limit:
515 full_content = safe_str(f.content)
516 full_content = safe_str(f.content)
516
517
517 _data.update({
518 _data.update({
518 "content": full_content,
519 "content": full_content,
519 })
520 })
520 _files.append(_data)
521 _files.append(_data)
521 for d in dirs:
522 for d in dirs:
522 _data = d.unicode_path
523 _data = d.unicode_path
523 if not flat:
524 if not flat:
524 _data = {
525 _data = {
525 "name": h.escape(d.unicode_path),
526 "name": h.escape(d.unicode_path),
526 "type": "dir",
527 "type": "dir",
527 }
528 }
528 if extended_info:
529 if extended_info:
529 _data.update({
530 _data.update({
530 "md5": None,
531 "md5": None,
531 "binary": None,
532 "binary": None,
532 "size": None,
533 "size": None,
533 "extension": None,
534 "extension": None,
534 })
535 })
535 if content:
536 if content:
536 _data.update({
537 _data.update({
537 "content": None
538 "content": None
538 })
539 })
539 _dirs.append(_data)
540 _dirs.append(_data)
540 except RepositoryError:
541 except RepositoryError:
541 log.debug("Exception in get_nodes", exc_info=True)
542 log.debug("Exception in get_nodes", exc_info=True)
542 raise
543 raise
543
544
544 return _dirs, _files
545 return _dirs, _files
545
546
546 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
547 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
547 author=None, trigger_push_hook=True):
548 author=None, trigger_push_hook=True):
548 """
549 """
549 Commits given multiple nodes into repo
550 Commits given multiple nodes into repo
550
551
551 :param user: RhodeCode User object or user_id, the commiter
552 :param user: RhodeCode User object or user_id, the commiter
552 :param repo: RhodeCode Repository object
553 :param repo: RhodeCode Repository object
553 :param message: commit message
554 :param message: commit message
554 :param nodes: mapping {filename:{'content':content},...}
555 :param nodes: mapping {filename:{'content':content},...}
555 :param parent_commit: parent commit, can be empty than it's
556 :param parent_commit: parent commit, can be empty than it's
556 initial commit
557 initial commit
557 :param author: author of commit, cna be different that commiter
558 :param author: author of commit, cna be different that commiter
558 only for git
559 only for git
559 :param trigger_push_hook: trigger push hooks
560 :param trigger_push_hook: trigger push hooks
560
561
561 :returns: new commited commit
562 :returns: new commited commit
562 """
563 """
563
564
564 user = self._get_user(user)
565 user = self._get_user(user)
565 scm_instance = repo.scm_instance(cache=False)
566 scm_instance = repo.scm_instance(cache=False)
566
567
567 processed_nodes = []
568 processed_nodes = []
568 for f_path in nodes:
569 for f_path in nodes:
569 f_path = self._sanitize_path(f_path)
570 f_path = self._sanitize_path(f_path)
570 content = nodes[f_path]['content']
571 content = nodes[f_path]['content']
571 f_path = safe_str(f_path)
572 f_path = safe_str(f_path)
572 # decoding here will force that we have proper encoded values
573 # decoding here will force that we have proper encoded values
573 # in any other case this will throw exceptions and deny commit
574 # in any other case this will throw exceptions and deny commit
574 if isinstance(content, (basestring,)):
575 if isinstance(content, (basestring,)):
575 content = safe_str(content)
576 content = safe_str(content)
576 elif isinstance(content, (file, cStringIO.OutputType,)):
577 elif isinstance(content, (file, cStringIO.OutputType,)):
577 content = content.read()
578 content = content.read()
578 else:
579 else:
579 raise Exception('Content is of unrecognized type %s' % (
580 raise Exception('Content is of unrecognized type %s' % (
580 type(content)
581 type(content)
581 ))
582 ))
582 processed_nodes.append((f_path, content))
583 processed_nodes.append((f_path, content))
583
584
584 message = safe_unicode(message)
585 message = safe_unicode(message)
585 commiter = user.full_contact
586 commiter = user.full_contact
586 author = safe_unicode(author) if author else commiter
587 author = safe_unicode(author) if author else commiter
587
588
588 imc = scm_instance.in_memory_commit
589 imc = scm_instance.in_memory_commit
589
590
590 if not parent_commit:
591 if not parent_commit:
591 parent_commit = EmptyCommit(alias=scm_instance.alias)
592 parent_commit = EmptyCommit(alias=scm_instance.alias)
592
593
593 if isinstance(parent_commit, EmptyCommit):
594 if isinstance(parent_commit, EmptyCommit):
594 # EmptyCommit means we we're editing empty repository
595 # EmptyCommit means we we're editing empty repository
595 parents = None
596 parents = None
596 else:
597 else:
597 parents = [parent_commit]
598 parents = [parent_commit]
598 # add multiple nodes
599 # add multiple nodes
599 for path, content in processed_nodes:
600 for path, content in processed_nodes:
600 imc.add(FileNode(path, content=content))
601 imc.add(FileNode(path, content=content))
601 # TODO: handle pre push scenario
602 # TODO: handle pre push scenario
602 tip = imc.commit(message=message,
603 tip = imc.commit(message=message,
603 author=author,
604 author=author,
604 parents=parents,
605 parents=parents,
605 branch=parent_commit.branch)
606 branch=parent_commit.branch)
606
607
607 self.mark_for_invalidation(repo.repo_name)
608 self.mark_for_invalidation(repo.repo_name)
608 if trigger_push_hook:
609 if trigger_push_hook:
609 hooks_utils.trigger_post_push_hook(
610 hooks_utils.trigger_post_push_hook(
610 username=user.username, action='push_local',
611 username=user.username, action='push_local',
611 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
612 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
612 commit_ids=[tip.raw_id])
613 commit_ids=[tip.raw_id])
613 return tip
614 return tip
614
615
615 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
616 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
616 author=None, trigger_push_hook=True):
617 author=None, trigger_push_hook=True):
617 user = self._get_user(user)
618 user = self._get_user(user)
618 scm_instance = repo.scm_instance(cache=False)
619 scm_instance = repo.scm_instance(cache=False)
619
620
620 message = safe_unicode(message)
621 message = safe_unicode(message)
621 commiter = user.full_contact
622 commiter = user.full_contact
622 author = safe_unicode(author) if author else commiter
623 author = safe_unicode(author) if author else commiter
623
624
624 imc = scm_instance.in_memory_commit
625 imc = scm_instance.in_memory_commit
625
626
626 if not parent_commit:
627 if not parent_commit:
627 parent_commit = EmptyCommit(alias=scm_instance.alias)
628 parent_commit = EmptyCommit(alias=scm_instance.alias)
628
629
629 if isinstance(parent_commit, EmptyCommit):
630 if isinstance(parent_commit, EmptyCommit):
630 # EmptyCommit means we we're editing empty repository
631 # EmptyCommit means we we're editing empty repository
631 parents = None
632 parents = None
632 else:
633 else:
633 parents = [parent_commit]
634 parents = [parent_commit]
634
635
635 # add multiple nodes
636 # add multiple nodes
636 for _filename, data in nodes.items():
637 for _filename, data in nodes.items():
637 # new filename, can be renamed from the old one, also sanitaze
638 # new filename, can be renamed from the old one, also sanitaze
638 # the path for any hack around relative paths like ../../ etc.
639 # the path for any hack around relative paths like ../../ etc.
639 filename = self._sanitize_path(data['filename'])
640 filename = self._sanitize_path(data['filename'])
640 old_filename = self._sanitize_path(_filename)
641 old_filename = self._sanitize_path(_filename)
641 content = data['content']
642 content = data['content']
642
643
643 filenode = FileNode(old_filename, content=content)
644 filenode = FileNode(old_filename, content=content)
644 op = data['op']
645 op = data['op']
645 if op == 'add':
646 if op == 'add':
646 imc.add(filenode)
647 imc.add(filenode)
647 elif op == 'del':
648 elif op == 'del':
648 imc.remove(filenode)
649 imc.remove(filenode)
649 elif op == 'mod':
650 elif op == 'mod':
650 if filename != old_filename:
651 if filename != old_filename:
651 # TODO: handle renames more efficient, needs vcs lib
652 # TODO: handle renames more efficient, needs vcs lib
652 # changes
653 # changes
653 imc.remove(filenode)
654 imc.remove(filenode)
654 imc.add(FileNode(filename, content=content))
655 imc.add(FileNode(filename, content=content))
655 else:
656 else:
656 imc.change(filenode)
657 imc.change(filenode)
657
658
658 try:
659 try:
659 # TODO: handle pre push scenario
660 # TODO: handle pre push scenario
660 # commit changes
661 # commit changes
661 tip = imc.commit(message=message,
662 tip = imc.commit(message=message,
662 author=author,
663 author=author,
663 parents=parents,
664 parents=parents,
664 branch=parent_commit.branch)
665 branch=parent_commit.branch)
665 except NodeNotChangedError:
666 except NodeNotChangedError:
666 raise
667 raise
667 except Exception as e:
668 except Exception as e:
668 log.exception("Unexpected exception during call to imc.commit")
669 log.exception("Unexpected exception during call to imc.commit")
669 raise IMCCommitError(str(e))
670 raise IMCCommitError(str(e))
670 finally:
671 finally:
671 # always clear caches, if commit fails we want fresh object also
672 # always clear caches, if commit fails we want fresh object also
672 self.mark_for_invalidation(repo.repo_name)
673 self.mark_for_invalidation(repo.repo_name)
673
674
674 if trigger_push_hook:
675 if trigger_push_hook:
675 hooks_utils.trigger_post_push_hook(
676 hooks_utils.trigger_post_push_hook(
676 username=user.username, action='push_local',
677 username=user.username, action='push_local',
677 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
678 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
678 commit_ids=[tip.raw_id])
679 commit_ids=[tip.raw_id])
679
680
680 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
681 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
681 author=None, trigger_push_hook=True):
682 author=None, trigger_push_hook=True):
682 """
683 """
683 Deletes given multiple nodes into `repo`
684 Deletes given multiple nodes into `repo`
684
685
685 :param user: RhodeCode User object or user_id, the committer
686 :param user: RhodeCode User object or user_id, the committer
686 :param repo: RhodeCode Repository object
687 :param repo: RhodeCode Repository object
687 :param message: commit message
688 :param message: commit message
688 :param nodes: mapping {filename:{'content':content},...}
689 :param nodes: mapping {filename:{'content':content},...}
689 :param parent_commit: parent commit, can be empty than it's initial
690 :param parent_commit: parent commit, can be empty than it's initial
690 commit
691 commit
691 :param author: author of commit, cna be different that commiter only
692 :param author: author of commit, cna be different that commiter only
692 for git
693 for git
693 :param trigger_push_hook: trigger push hooks
694 :param trigger_push_hook: trigger push hooks
694
695
695 :returns: new commit after deletion
696 :returns: new commit after deletion
696 """
697 """
697
698
698 user = self._get_user(user)
699 user = self._get_user(user)
699 scm_instance = repo.scm_instance(cache=False)
700 scm_instance = repo.scm_instance(cache=False)
700
701
701 processed_nodes = []
702 processed_nodes = []
702 for f_path in nodes:
703 for f_path in nodes:
703 f_path = self._sanitize_path(f_path)
704 f_path = self._sanitize_path(f_path)
704 # content can be empty but for compatabilty it allows same dicts
705 # content can be empty but for compatabilty it allows same dicts
705 # structure as add_nodes
706 # structure as add_nodes
706 content = nodes[f_path].get('content')
707 content = nodes[f_path].get('content')
707 processed_nodes.append((f_path, content))
708 processed_nodes.append((f_path, content))
708
709
709 message = safe_unicode(message)
710 message = safe_unicode(message)
710 commiter = user.full_contact
711 commiter = user.full_contact
711 author = safe_unicode(author) if author else commiter
712 author = safe_unicode(author) if author else commiter
712
713
713 imc = scm_instance.in_memory_commit
714 imc = scm_instance.in_memory_commit
714
715
715 if not parent_commit:
716 if not parent_commit:
716 parent_commit = EmptyCommit(alias=scm_instance.alias)
717 parent_commit = EmptyCommit(alias=scm_instance.alias)
717
718
718 if isinstance(parent_commit, EmptyCommit):
719 if isinstance(parent_commit, EmptyCommit):
719 # EmptyCommit means we we're editing empty repository
720 # EmptyCommit means we we're editing empty repository
720 parents = None
721 parents = None
721 else:
722 else:
722 parents = [parent_commit]
723 parents = [parent_commit]
723 # add multiple nodes
724 # add multiple nodes
724 for path, content in processed_nodes:
725 for path, content in processed_nodes:
725 imc.remove(FileNode(path, content=content))
726 imc.remove(FileNode(path, content=content))
726
727
727 # TODO: handle pre push scenario
728 # TODO: handle pre push scenario
728 tip = imc.commit(message=message,
729 tip = imc.commit(message=message,
729 author=author,
730 author=author,
730 parents=parents,
731 parents=parents,
731 branch=parent_commit.branch)
732 branch=parent_commit.branch)
732
733
733 self.mark_for_invalidation(repo.repo_name)
734 self.mark_for_invalidation(repo.repo_name)
734 if trigger_push_hook:
735 if trigger_push_hook:
735 hooks_utils.trigger_post_push_hook(
736 hooks_utils.trigger_post_push_hook(
736 username=user.username, action='push_local',
737 username=user.username, action='push_local',
737 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
738 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
738 commit_ids=[tip.raw_id])
739 commit_ids=[tip.raw_id])
739 return tip
740 return tip
740
741
741 def strip(self, repo, commit_id, branch):
742 def strip(self, repo, commit_id, branch):
742 scm_instance = repo.scm_instance(cache=False)
743 scm_instance = repo.scm_instance(cache=False)
743 scm_instance.config.clear_section('hooks')
744 scm_instance.config.clear_section('hooks')
744 scm_instance.strip(commit_id, branch)
745 scm_instance.strip(commit_id, branch)
745 self.mark_for_invalidation(repo.repo_name)
746 self.mark_for_invalidation(repo.repo_name)
746
747
747 def get_unread_journal(self):
748 def get_unread_journal(self):
748 return self.sa.query(UserLog).count()
749 return self.sa.query(UserLog).count()
749
750
750 def get_repo_landing_revs(self, repo=None):
751 def get_repo_landing_revs(self, repo=None):
751 """
752 """
752 Generates select option with tags branches and bookmarks (for hg only)
753 Generates select option with tags branches and bookmarks (for hg only)
753 grouped by type
754 grouped by type
754
755
755 :param repo:
756 :param repo:
756 """
757 """
757
758
758 repo = self._get_repo(repo)
759 repo = self._get_repo(repo)
759
760
760 hist_l = [
761 hist_l = [
761 ['rev:tip', _('latest tip')]
762 ['rev:tip', _('latest tip')]
762 ]
763 ]
763 choices = [
764 choices = [
764 'rev:tip'
765 'rev:tip'
765 ]
766 ]
766
767
767 if not repo:
768 if not repo:
768 return choices, hist_l
769 return choices, hist_l
769
770
770 repo = repo.scm_instance()
771 repo = repo.scm_instance()
771
772
772 branches_group = (
773 branches_group = (
773 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
774 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
774 for b in repo.branches],
775 for b in repo.branches],
775 _("Branches"))
776 _("Branches"))
776 hist_l.append(branches_group)
777 hist_l.append(branches_group)
777 choices.extend([x[0] for x in branches_group[0]])
778 choices.extend([x[0] for x in branches_group[0]])
778
779
779 if repo.alias == 'hg':
780 if repo.alias == 'hg':
780 bookmarks_group = (
781 bookmarks_group = (
781 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
782 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
782 for b in repo.bookmarks],
783 for b in repo.bookmarks],
783 _("Bookmarks"))
784 _("Bookmarks"))
784 hist_l.append(bookmarks_group)
785 hist_l.append(bookmarks_group)
785 choices.extend([x[0] for x in bookmarks_group[0]])
786 choices.extend([x[0] for x in bookmarks_group[0]])
786
787
787 tags_group = (
788 tags_group = (
788 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
789 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
789 for t in repo.tags],
790 for t in repo.tags],
790 _("Tags"))
791 _("Tags"))
791 hist_l.append(tags_group)
792 hist_l.append(tags_group)
792 choices.extend([x[0] for x in tags_group[0]])
793 choices.extend([x[0] for x in tags_group[0]])
793
794
794 return choices, hist_l
795 return choices, hist_l
795
796
796 def install_git_hook(self, repo, force_create=False):
797 def install_git_hook(self, repo, force_create=False):
797 """
798 """
798 Creates a rhodecode hook inside a git repository
799 Creates a rhodecode hook inside a git repository
799
800
800 :param repo: Instance of VCS repo
801 :param repo: Instance of VCS repo
801 :param force_create: Create even if same name hook exists
802 :param force_create: Create even if same name hook exists
802 """
803 """
803
804
804 loc = os.path.join(repo.path, 'hooks')
805 loc = os.path.join(repo.path, 'hooks')
805 if not repo.bare:
806 if not repo.bare:
806 loc = os.path.join(repo.path, '.git', 'hooks')
807 loc = os.path.join(repo.path, '.git', 'hooks')
807 if not os.path.isdir(loc):
808 if not os.path.isdir(loc):
808 os.makedirs(loc, mode=0777)
809 os.makedirs(loc, mode=0777)
809
810
810 tmpl_post = pkg_resources.resource_string(
811 tmpl_post = pkg_resources.resource_string(
811 'rhodecode', '/'.join(
812 'rhodecode', '/'.join(
812 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
813 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
813 tmpl_pre = pkg_resources.resource_string(
814 tmpl_pre = pkg_resources.resource_string(
814 'rhodecode', '/'.join(
815 'rhodecode', '/'.join(
815 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
816 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
816
817
817 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
818 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
818 _hook_file = os.path.join(loc, '%s-receive' % h_type)
819 _hook_file = os.path.join(loc, '%s-receive' % h_type)
819 log.debug('Installing git hook in repo %s', repo)
820 log.debug('Installing git hook in repo %s', repo)
820 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
821 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
821
822
822 if _rhodecode_hook or force_create:
823 if _rhodecode_hook or force_create:
823 log.debug('writing %s hook file !', h_type)
824 log.debug('writing %s hook file !', h_type)
824 try:
825 try:
825 with open(_hook_file, 'wb') as f:
826 with open(_hook_file, 'wb') as f:
826 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
827 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
827 tmpl = tmpl.replace('_ENV_', sys.executable)
828 tmpl = tmpl.replace('_ENV_', sys.executable)
828 f.write(tmpl)
829 f.write(tmpl)
829 os.chmod(_hook_file, 0755)
830 os.chmod(_hook_file, 0755)
830 except IOError:
831 except IOError:
831 log.exception('error writing hook file %s', _hook_file)
832 log.exception('error writing hook file %s', _hook_file)
832 else:
833 else:
833 log.debug('skipping writing hook file')
834 log.debug('skipping writing hook file')
834
835
835 def install_svn_hooks(self, repo, force_create=False):
836 def install_svn_hooks(self, repo, force_create=False):
836 """
837 """
837 Creates rhodecode hooks inside a svn repository
838 Creates rhodecode hooks inside a svn repository
838
839
839 :param repo: Instance of VCS repo
840 :param repo: Instance of VCS repo
840 :param force_create: Create even if same name hook exists
841 :param force_create: Create even if same name hook exists
841 """
842 """
842 hooks_path = os.path.join(repo.path, 'hooks')
843 hooks_path = os.path.join(repo.path, 'hooks')
843 if not os.path.isdir(hooks_path):
844 if not os.path.isdir(hooks_path):
844 os.makedirs(hooks_path)
845 os.makedirs(hooks_path)
845 post_commit_tmpl = pkg_resources.resource_string(
846 post_commit_tmpl = pkg_resources.resource_string(
846 'rhodecode', '/'.join(
847 'rhodecode', '/'.join(
847 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
848 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
848 pre_commit_template = pkg_resources.resource_string(
849 pre_commit_template = pkg_resources.resource_string(
849 'rhodecode', '/'.join(
850 'rhodecode', '/'.join(
850 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
851 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
851 templates = {
852 templates = {
852 'post-commit': post_commit_tmpl,
853 'post-commit': post_commit_tmpl,
853 'pre-commit': pre_commit_template
854 'pre-commit': pre_commit_template
854 }
855 }
855 for filename in templates:
856 for filename in templates:
856 _hook_file = os.path.join(hooks_path, filename)
857 _hook_file = os.path.join(hooks_path, filename)
857 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
858 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
858 if _rhodecode_hook or force_create:
859 if _rhodecode_hook or force_create:
859 log.debug('writing %s hook file !', filename)
860 log.debug('writing %s hook file !', filename)
860 template = templates[filename]
861 template = templates[filename]
861 try:
862 try:
862 with open(_hook_file, 'wb') as f:
863 with open(_hook_file, 'wb') as f:
863 template = template.replace(
864 template = template.replace(
864 '_TMPL_', rhodecode.__version__)
865 '_TMPL_', rhodecode.__version__)
865 template = template.replace('_ENV_', sys.executable)
866 template = template.replace('_ENV_', sys.executable)
866 f.write(template)
867 f.write(template)
867 os.chmod(_hook_file, 0755)
868 os.chmod(_hook_file, 0755)
868 except IOError:
869 except IOError:
869 log.exception('error writing hook file %s', filename)
870 log.exception('error writing hook file %s', filename)
870 else:
871 else:
871 log.debug('skipping writing hook file')
872 log.debug('skipping writing hook file')
872
873
873 def install_hooks(self, repo, repo_type):
874 def install_hooks(self, repo, repo_type):
874 if repo_type == 'git':
875 if repo_type == 'git':
875 self.install_git_hook(repo)
876 self.install_git_hook(repo)
876 elif repo_type == 'svn':
877 elif repo_type == 'svn':
877 self.install_svn_hooks(repo)
878 self.install_svn_hooks(repo)
878
879
879 def get_server_info(self, environ=None):
880 def get_server_info(self, environ=None):
880 server_info = get_system_info(environ)
881 server_info = get_system_info(environ)
881 return server_info
882 return server_info
882
883
883
884
884 def _check_rhodecode_hook(hook_path):
885 def _check_rhodecode_hook(hook_path):
885 """
886 """
886 Check if the hook was created by RhodeCode
887 Check if the hook was created by RhodeCode
887 """
888 """
888 if not os.path.exists(hook_path):
889 if not os.path.exists(hook_path):
889 return True
890 return True
890
891
891 log.debug('hook exists, checking if it is from rhodecode')
892 log.debug('hook exists, checking if it is from rhodecode')
892 hook_content = _read_hook(hook_path)
893 hook_content = _read_hook(hook_path)
893 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
894 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
894 if matches:
895 if matches:
895 try:
896 try:
896 version = matches.groups()[0]
897 version = matches.groups()[0]
897 log.debug('got %s, it is rhodecode', version)
898 log.debug('got %s, it is rhodecode', version)
898 return True
899 return True
899 except Exception:
900 except Exception:
900 log.exception("Exception while reading the hook version.")
901 log.exception("Exception while reading the hook version.")
901
902
902 return False
903 return False
903
904
904
905
905 def _read_hook(hook_path):
906 def _read_hook(hook_path):
906 with open(hook_path, 'rb') as f:
907 with open(hook_path, 'rb') as f:
907 content = f.read()
908 content = f.read()
908 return content
909 return content
@@ -1,92 +1,92 b''
1 <%def name="render_line(line_num, tokens,
1 <%def name="render_line(line_num, tokens,
2 annotation=None,
2 annotation=None,
3 bgcolor=None, show_annotation=None)">
3 bgcolor=None, show_annotation=None)">
4 <%
4 <%
5 from rhodecode.lib.codeblocks import render_tokenstream
5 from rhodecode.lib.codeblocks import render_tokenstream
6 # avoid module lookup for performance
6 # avoid module lookup for performance
7 html_escape = h.html_escape
7 html_escape = h.html_escape
8 tooltip = h.tooltip
8 tooltip = h.tooltip
9 %>
9 %>
10 <tr class="cb-line cb-line-fresh ${'cb-annotate' if show_annotation else ''}"
10 <tr class="cb-line cb-line-fresh ${'cb-annotate' if show_annotation else ''}"
11 %if annotation:
11 %if annotation:
12 data-revision="${annotation.revision}"
12 data-revision="${annotation.revision}"
13 %endif
13 %endif
14 >
14 >
15
15
16 % if annotation:
16 % if annotation:
17 % if show_annotation:
17 % if show_annotation:
18 <td class="cb-annotate-info tooltip"
18 <td class="cb-annotate-info tooltip"
19 title="Author: ${tooltip(annotation.author) | entity}<br>Date: ${annotation.date}<br>Message: ${annotation.message | entity}"
19 title="Author: ${tooltip(annotation.author) | entity}<br>Date: ${annotation.date}<br>Message: ${annotation.message | entity}"
20 >
20 >
21 ${h.gravatar_with_user(annotation.author, 16) | n}
21 ${h.gravatar_with_user(request, annotation.author, 16) | n}
22 <div class="cb-annotate-message truncate-wrap">${h.chop_at_smart(annotation.message, '\n', suffix_if_chopped='...')}</div>
22 <div class="cb-annotate-message truncate-wrap">${h.chop_at_smart(annotation.message, '\n', suffix_if_chopped='...')}</div>
23 </td>
23 </td>
24 <td class="cb-annotate-message-spacer">
24 <td class="cb-annotate-message-spacer">
25 <a class="tooltip" href="#show-previous-annotation" onclick="return annotationController.previousAnnotation('${annotation.raw_id}', '${c.f_path}', ${line_num})" title="${tooltip(_('view annotation from before this change'))}">
25 <a class="tooltip" href="#show-previous-annotation" onclick="return annotationController.previousAnnotation('${annotation.raw_id}', '${c.f_path}', ${line_num})" title="${tooltip(_('view annotation from before this change'))}">
26 <i class="icon-left"></i>
26 <i class="icon-left"></i>
27 </a>
27 </a>
28 </td>
28 </td>
29 <td
29 <td
30 class="cb-annotate-revision"
30 class="cb-annotate-revision"
31 data-revision="${annotation.revision}"
31 data-revision="${annotation.revision}"
32 onclick="$('[data-revision=${annotation.revision}]').toggleClass('cb-line-fresh')"
32 onclick="$('[data-revision=${annotation.revision}]').toggleClass('cb-line-fresh')"
33 style="background: ${bgcolor}">
33 style="background: ${bgcolor}">
34 <a class="cb-annotate" href="${h.url('changeset_home',repo_name=c.repo_name,revision=annotation.raw_id)}">
34 <a class="cb-annotate" href="${h.url('changeset_home',repo_name=c.repo_name,revision=annotation.raw_id)}">
35 r${annotation.revision}
35 r${annotation.revision}
36 </a>
36 </a>
37 </td>
37 </td>
38 % else:
38 % else:
39 <td></td>
39 <td></td>
40 <td class="cb-annotate-message-spacer"></td>
40 <td class="cb-annotate-message-spacer"></td>
41 <td
41 <td
42 class="cb-annotate-revision"
42 class="cb-annotate-revision"
43 data-revision="${annotation.revision}"
43 data-revision="${annotation.revision}"
44 onclick="$('[data-revision=${annotation.revision}]').toggleClass('cb-line-fresh')"
44 onclick="$('[data-revision=${annotation.revision}]').toggleClass('cb-line-fresh')"
45 style="background: ${bgcolor}">
45 style="background: ${bgcolor}">
46 </td>
46 </td>
47 % endif
47 % endif
48 % else:
48 % else:
49 <td colspan="3"></td>
49 <td colspan="3"></td>
50 % endif
50 % endif
51
51
52
52
53 <td class="cb-lineno" id="L${line_num}">
53 <td class="cb-lineno" id="L${line_num}">
54 <a data-line-no="${line_num}" href="#L${line_num}"></a>
54 <a data-line-no="${line_num}" href="#L${line_num}"></a>
55 </td>
55 </td>
56 <td class="cb-content cb-content-fresh"
56 <td class="cb-content cb-content-fresh"
57 %if bgcolor:
57 %if bgcolor:
58 style="background: ${bgcolor}"
58 style="background: ${bgcolor}"
59 %endif
59 %endif
60 >
60 >
61 ## newline at end is necessary for highlight to work when line is empty
61 ## newline at end is necessary for highlight to work when line is empty
62 ## and for copy pasting code to work as expected
62 ## and for copy pasting code to work as expected
63 <span class="cb-code">${render_tokenstream(tokens)|n}${'\n'}</span>
63 <span class="cb-code">${render_tokenstream(tokens)|n}${'\n'}</span>
64 </td>
64 </td>
65 </tr>
65 </tr>
66 </%def>
66 </%def>
67
67
68 <%def name="render_annotation_lines(annotation, lines, color_hasher)">
68 <%def name="render_annotation_lines(annotation, lines, color_hasher)">
69 % for line_num, tokens in lines:
69 % for line_num, tokens in lines:
70 ${render_line(line_num, tokens,
70 ${render_line(line_num, tokens,
71 bgcolor=color_hasher(annotation and annotation.raw_id or ''),
71 bgcolor=color_hasher(annotation and annotation.raw_id or ''),
72 annotation=annotation, show_annotation=loop.first
72 annotation=annotation, show_annotation=loop.first
73 )}
73 )}
74 % endfor
74 % endfor
75 <script>
75 <script>
76 var AnnotationController = function() {
76 var AnnotationController = function() {
77 var self = this;
77 var self = this;
78
78
79 this.previousAnnotation = function(commitId, fPath, lineNo) {
79 this.previousAnnotation = function(commitId, fPath, lineNo) {
80 var params = {
80 var params = {
81 'repo_name': templateContext.repo_name,
81 'repo_name': templateContext.repo_name,
82 'commit_id': commitId,
82 'commit_id': commitId,
83 'f_path': fPath,
83 'f_path': fPath,
84 'line_anchor': lineNo
84 'line_anchor': lineNo
85 };
85 };
86 window.location = pyroutes.url('repo_files:annotated_previous', params);
86 window.location = pyroutes.url('repo_files:annotated_previous', params);
87 return false;
87 return false;
88 };
88 };
89 };
89 };
90 var annotationController = new AnnotationController();
90 var annotationController = new AnnotationController();
91 </script>
91 </script>
92 </%def>
92 </%def>
@@ -1,82 +1,82 b''
1 <div id="file-tree-wrapper" class="browser-body ${'full-load' if c.full_load else ''}">
1 <div id="file-tree-wrapper" class="browser-body ${'full-load' if c.full_load else ''}">
2 <table class="code-browser rctable">
2 <table class="code-browser rctable">
3 <thead>
3 <thead>
4 <tr>
4 <tr>
5 <th>${_('Name')}</th>
5 <th>${_('Name')}</th>
6 <th>${_('Size')}</th>
6 <th>${_('Size')}</th>
7 <th>${_('Modified')}</th>
7 <th>${_('Modified')}</th>
8 <th>${_('Last Commit')}</th>
8 <th>${_('Last Commit')}</th>
9 <th>${_('Author')}</th>
9 <th>${_('Author')}</th>
10 </tr>
10 </tr>
11 </thead>
11 </thead>
12
12
13 <tbody id="tbody">
13 <tbody id="tbody">
14 %if c.file.parent:
14 %if c.file.parent:
15 <tr class="parity0">
15 <tr class="parity0">
16 <td class="td-componentname">
16 <td class="td-componentname">
17 <a href="${h.route_path('repo_files',repo_name=c.repo_name,commit_id=c.commit.raw_id,f_path=c.file.parent.path)}" class="pjax-link">
17 <a href="${h.route_path('repo_files',repo_name=c.repo_name,commit_id=c.commit.raw_id,f_path=c.file.parent.path)}" class="pjax-link">
18 <i class="icon-directory"></i>..
18 <i class="icon-directory"></i>..
19 </a>
19 </a>
20 </td>
20 </td>
21 <td></td>
21 <td></td>
22 <td></td>
22 <td></td>
23 <td></td>
23 <td></td>
24 <td></td>
24 <td></td>
25 </tr>
25 </tr>
26 %endif
26 %endif
27 %for cnt,node in enumerate(c.file):
27 %for cnt,node in enumerate(c.file):
28 <tr class="parity${cnt%2}">
28 <tr class="parity${cnt%2}">
29 <td class="td-componentname">
29 <td class="td-componentname">
30 % if node.is_submodule():
30 % if node.is_submodule():
31 <span class="submodule-dir">
31 <span class="submodule-dir">
32 % if node.url.startswith('http://') or node.url.startswith('https://'):
32 % if node.url.startswith('http://') or node.url.startswith('https://'):
33 <a href="${node.url}">
33 <a href="${node.url}">
34 <i class="icon-directory browser-dir"></i>${node.name}
34 <i class="icon-directory browser-dir"></i>${node.name}
35 </a>
35 </a>
36 % else:
36 % else:
37 <i class="icon-directory browser-dir"></i>${node.name}
37 <i class="icon-directory browser-dir"></i>${node.name}
38 % endif
38 % endif
39 </span>
39 </span>
40 % else:
40 % else:
41 <a href="${h.route_path('repo_files',repo_name=c.repo_name,commit_id=c.commit.raw_id,f_path=h.safe_unicode(node.path))}" class="pjax-link">
41 <a href="${h.route_path('repo_files',repo_name=c.repo_name,commit_id=c.commit.raw_id,f_path=h.safe_unicode(node.path))}" class="pjax-link">
42 <i class="${'icon-file-text browser-file' if node.is_file() else 'icon-directory browser-dir'}"></i>${node.name}
42 <i class="${'icon-file-text browser-file' if node.is_file() else 'icon-directory browser-dir'}"></i>${node.name}
43 </a>
43 </a>
44 % endif
44 % endif
45 </td>
45 </td>
46 %if node.is_file():
46 %if node.is_file():
47 <td class="td-size" data-attr-name="size">
47 <td class="td-size" data-attr-name="size">
48 % if c.full_load:
48 % if c.full_load:
49 <span data-size="${node.size}">${h.format_byte_size_binary(node.size)}</span>
49 <span data-size="${node.size}">${h.format_byte_size_binary(node.size)}</span>
50 % else:
50 % else:
51 ${_('Loading ...')}
51 ${_('Loading ...')}
52 % endif
52 % endif
53 </td>
53 </td>
54 <td class="td-time" data-attr-name="modified_at">
54 <td class="td-time" data-attr-name="modified_at">
55 % if c.full_load:
55 % if c.full_load:
56 <span data-date="${node.last_commit.date}">${h.age_component(node.last_commit.date)}</span>
56 <span data-date="${node.last_commit.date}">${h.age_component(node.last_commit.date)}</span>
57 % endif
57 % endif
58 </td>
58 </td>
59 <td class="td-hash" data-attr-name="commit_id">
59 <td class="td-hash" data-attr-name="commit_id">
60 % if c.full_load:
60 % if c.full_load:
61 <div class="tooltip" title="${h.tooltip(node.last_commit.message)}">
61 <div class="tooltip" title="${h.tooltip(node.last_commit.message)}">
62 <pre data-commit-id="${node.last_commit.raw_id}">r${node.last_commit.revision}:${node.last_commit.short_id}</pre>
62 <pre data-commit-id="${node.last_commit.raw_id}">r${node.last_commit.revision}:${node.last_commit.short_id}</pre>
63 </div>
63 </div>
64 % endif
64 % endif
65 </td>
65 </td>
66 <td class="td-user" data-attr-name="author">
66 <td class="td-user" data-attr-name="author">
67 % if c.full_load:
67 % if c.full_load:
68 <span data-author="${node.last_commit.author}" title="${h.tooltip(node.last_commit.author)}">${h.gravatar_with_user(node.last_commit.author)|n}</span>
68 <span data-author="${node.last_commit.author}" title="${h.tooltip(node.last_commit.author)}">${h.gravatar_with_user(request, node.last_commit.author)|n}</span>
69 % endif
69 % endif
70 </td>
70 </td>
71 %else:
71 %else:
72 <td></td>
72 <td></td>
73 <td></td>
73 <td></td>
74 <td></td>
74 <td></td>
75 <td></td>
75 <td></td>
76 %endif
76 %endif
77 </tr>
77 </tr>
78 %endfor
78 %endfor
79 </tbody>
79 </tbody>
80 <tbody id="tbody_filtered"></tbody>
80 <tbody id="tbody_filtered"></tbody>
81 </table>
81 </table>
82 </div>
82 </div>
General Comments 0
You need to be logged in to leave comments. Login now