##// END OF EJS Templates
FOLD: into unicode changes
super-admin -
r4959:00826968 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,2530 +1,2533 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 authentication and permission libraries
22 authentication and permission libraries
23 """
23 """
24
24
25 import os
25 import os
26
26
27 import colander
27 import colander
28 import time
28 import time
29 import collections
29 import collections
30 import fnmatch
30 import fnmatch
31 import itertools
31 import itertools
32 import logging
32 import logging
33 import random
33 import random
34 import traceback
34 import traceback
35 from functools import wraps
35 from functools import wraps
36
36
37 import ipaddress
37 import ipaddress
38
38
39 from pyramid.httpexceptions import HTTPForbidden, HTTPFound, HTTPNotFound
39 from pyramid.httpexceptions import HTTPForbidden, HTTPFound, HTTPNotFound
40 from sqlalchemy.orm.exc import ObjectDeletedError
40 from sqlalchemy.orm.exc import ObjectDeletedError
41 from sqlalchemy.orm import joinedload
41 from sqlalchemy.orm import joinedload
42 from zope.cachedescriptors.property import Lazy as LazyProperty
42 from zope.cachedescriptors.property import Lazy as LazyProperty
43
43
44 import rhodecode
44 import rhodecode
45 from rhodecode.model import meta
45 from rhodecode.model import meta
46 from rhodecode.model.meta import Session
46 from rhodecode.model.meta import Session
47 from rhodecode.model.user import UserModel
47 from rhodecode.model.user import UserModel
48 from rhodecode.model.db import (
48 from rhodecode.model.db import (
49 false, User, Repository, Permission, UserToPerm, UserGroupToPerm, UserGroupMember,
49 false, User, Repository, Permission, UserToPerm, UserGroupToPerm, UserGroupMember,
50 UserIpMap, UserApiKeys, RepoGroup, UserGroup, UserNotice)
50 UserIpMap, UserApiKeys, RepoGroup, UserGroup, UserNotice)
51 from rhodecode.lib import rc_cache
51 from rhodecode.lib import rc_cache
52 from rhodecode.lib.utils import (
52 from rhodecode.lib.utils import (
53 get_repo_slug, get_repo_group_slug, get_user_group_slug)
53 get_repo_slug, get_repo_group_slug, get_user_group_slug)
54 from rhodecode.lib.type_utils import aslist
54 from rhodecode.lib.type_utils import aslist
55 from rhodecode.lib.hash_utils import sha1, sha256, md5
55 from rhodecode.lib.hash_utils import sha1, sha256, md5
56 from rhodecode.lib.str_utils import ascii_bytes, safe_str, safe_int, safe_bytes
56 from rhodecode.lib.str_utils import ascii_bytes, safe_str, safe_int, safe_bytes
57 from rhodecode.lib.caching_query import FromCache
57 from rhodecode.lib.caching_query import FromCache
58
58
59
59
60 if rhodecode.is_unix:
60 if rhodecode.is_unix:
61 import bcrypt
61 import bcrypt
62
62
63 log = logging.getLogger(__name__)
63 log = logging.getLogger(__name__)
64
64
65 csrf_token_key = "csrf_token"
65 csrf_token_key = "csrf_token"
66
66
67
67
68 class PasswordGenerator(object):
68 class PasswordGenerator(object):
69 """
69 """
70 This is a simple class for generating password from different sets of
70 This is a simple class for generating password from different sets of
71 characters
71 characters
72 usage::
72 usage::
73 passwd_gen = PasswordGenerator()
73 passwd_gen = PasswordGenerator()
74 #print 8-letter password containing only big and small letters
74 #print 8-letter password containing only big and small letters
75 of alphabet
75 of alphabet
76 passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
76 passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
77 """
77 """
78 ALPHABETS_NUM = r'''1234567890'''
78 ALPHABETS_NUM = r'''1234567890'''
79 ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
79 ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
80 ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
80 ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
81 ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
81 ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
82 ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
82 ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
83 + ALPHABETS_NUM + ALPHABETS_SPECIAL
83 + ALPHABETS_NUM + ALPHABETS_SPECIAL
84 ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
84 ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
85 ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
85 ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
86 ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
86 ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
87 ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
87 ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
88
88
89 def __init__(self, passwd=''):
89 def __init__(self, passwd=''):
90 self.passwd = passwd
90 self.passwd = passwd
91
91
92 def gen_password(self, length, type_=None):
92 def gen_password(self, length, type_=None):
93 if type_ is None:
93 if type_ is None:
94 type_ = self.ALPHABETS_FULL
94 type_ = self.ALPHABETS_FULL
95 self.passwd = ''.join([random.choice(type_) for _ in range(length)])
95 self.passwd = ''.join([random.choice(type_) for _ in range(length)])
96 return self.passwd
96 return self.passwd
97
97
98
98
99 class _RhodeCodeCryptoBase(object):
99 class _RhodeCodeCryptoBase(object):
100 ENC_PREF = None
100 ENC_PREF = None
101
101
102 def hash_create(self, str_):
102 def hash_create(self, str_):
103 """
103 """
104 hash the string using
104 hash the string using
105
105
106 :param str_: password to hash
106 :param str_: password to hash
107 """
107 """
108 raise NotImplementedError
108 raise NotImplementedError
109
109
110 def hash_check_with_upgrade(self, password, hashed):
110 def hash_check_with_upgrade(self, password, hashed):
111 """
111 """
112 Returns tuple in which first element is boolean that states that
112 Returns tuple in which first element is boolean that states that
113 given password matches it's hashed version, and the second is new hash
113 given password matches it's hashed version, and the second is new hash
114 of the password, in case this password should be migrated to new
114 of the password, in case this password should be migrated to new
115 cipher.
115 cipher.
116 """
116 """
117 checked_hash = self.hash_check(password, hashed)
117 checked_hash = self.hash_check(password, hashed)
118 return checked_hash, None
118 return checked_hash, None
119
119
120 def hash_check(self, password, hashed):
120 def hash_check(self, password, hashed):
121 """
121 """
122 Checks matching password with it's hashed value.
122 Checks matching password with it's hashed value.
123
123
124 :param password: password
124 :param password: password
125 :param hashed: password in hashed form
125 :param hashed: password in hashed form
126 """
126 """
127 raise NotImplementedError
127 raise NotImplementedError
128
128
129 def _assert_bytes(self, value):
129 def _assert_bytes(self, value):
130 """
130 """
131 Passing in an `unicode` object can lead to hard to detect issues
131 Passing in an `unicode` object can lead to hard to detect issues
132 if passwords contain non-ascii characters. Doing a type check
132 if passwords contain non-ascii characters. Doing a type check
133 during runtime, so that such mistakes are detected early on.
133 during runtime, so that such mistakes are detected early on.
134 """
134 """
135 if not isinstance(value, str):
135 if not isinstance(value, str):
136 raise TypeError(
136 raise TypeError(
137 "Bytestring required as input, got %r." % (value, ))
137 "Bytestring required as input, got %r." % (value, ))
138
138
139
139
140 class _RhodeCodeCryptoBCrypt(_RhodeCodeCryptoBase):
140 class _RhodeCodeCryptoBCrypt(_RhodeCodeCryptoBase):
141 ENC_PREF = ('$2a$10', '$2b$10')
141 ENC_PREF = ('$2a$10', '$2b$10')
142
142
143 def hash_create(self, str_):
143 def hash_create(self, str_):
144 self._assert_bytes(str_)
144 self._assert_bytes(str_)
145 return bcrypt.hashpw(str_, bcrypt.gensalt(10))
145 return bcrypt.hashpw(str_, bcrypt.gensalt(10))
146
146
147 def hash_check_with_upgrade(self, password, hashed):
147 def hash_check_with_upgrade(self, password, hashed):
148 """
148 """
149 Returns tuple in which first element is boolean that states that
149 Returns tuple in which first element is boolean that states that
150 given password matches it's hashed version, and the second is new hash
150 given password matches it's hashed version, and the second is new hash
151 of the password, in case this password should be migrated to new
151 of the password, in case this password should be migrated to new
152 cipher.
152 cipher.
153
153
154 This implements special upgrade logic which works like that:
154 This implements special upgrade logic which works like that:
155 - check if the given password == bcrypted hash, if yes then we
155 - check if the given password == bcrypted hash, if yes then we
156 properly used password and it was already in bcrypt. Proceed
156 properly used password and it was already in bcrypt. Proceed
157 without any changes
157 without any changes
158 - if bcrypt hash check is not working try with sha256. If hash compare
158 - if bcrypt hash check is not working try with sha256. If hash compare
159 is ok, it means we using correct but old hashed password. indicate
159 is ok, it means we using correct but old hashed password. indicate
160 hash change and proceed
160 hash change and proceed
161 """
161 """
162
162
163 new_hash = None
163 new_hash = None
164
164
165 # regular pw check
165 # regular pw check
166 password_match_bcrypt = self.hash_check(password, hashed)
166 password_match_bcrypt = self.hash_check(password, hashed)
167
167
168 # now we want to know if the password was maybe from sha256
168 # now we want to know if the password was maybe from sha256
169 # basically calling _RhodeCodeCryptoSha256().hash_check()
169 # basically calling _RhodeCodeCryptoSha256().hash_check()
170 if not password_match_bcrypt:
170 if not password_match_bcrypt:
171 if _RhodeCodeCryptoSha256().hash_check(password, hashed):
171 if _RhodeCodeCryptoSha256().hash_check(password, hashed):
172 new_hash = self.hash_create(password) # make new bcrypt hash
172 new_hash = self.hash_create(password) # make new bcrypt hash
173 password_match_bcrypt = True
173 password_match_bcrypt = True
174
174
175 return password_match_bcrypt, new_hash
175 return password_match_bcrypt, new_hash
176
176
177 def hash_check(self, password, hashed):
177 def hash_check(self, password, hashed):
178 """
178 """
179 Checks matching password with it's hashed value.
179 Checks matching password with it's hashed value.
180
180
181 :param password: password
181 :param password: password
182 :param hashed: password in hashed form
182 :param hashed: password in hashed form
183 """
183 """
184 self._assert_bytes(password)
184 self._assert_bytes(password)
185 try:
185 try:
186 return bcrypt.hashpw(password, hashed) == hashed
186 return bcrypt.hashpw(password, hashed) == hashed
187 except ValueError as e:
187 except ValueError as e:
188 # we're having a invalid salt here probably, we should not crash
188 # we're having a invalid salt here probably, we should not crash
189 # just return with False as it would be a wrong password.
189 # just return with False as it would be a wrong password.
190 log.debug('Failed to check password hash using bcrypt %s',
190 log.debug('Failed to check password hash using bcrypt %s',
191 safe_str(e))
191 safe_str(e))
192
192
193 return False
193 return False
194
194
195
195
196 class _RhodeCodeCryptoSha256(_RhodeCodeCryptoBase):
196 class _RhodeCodeCryptoSha256(_RhodeCodeCryptoBase):
197 ENC_PREF = '_'
197 ENC_PREF = '_'
198
198
199 def hash_create(self, str_):
199 def hash_create(self, str_):
200 self._assert_bytes(str_)
200 self._assert_bytes(str_)
201 return sha256(str_)
201 return sha256(str_)
202
202
203 def hash_check(self, password, hashed):
203 def hash_check(self, password, hashed):
204 """
204 """
205 Checks matching password with it's hashed value.
205 Checks matching password with it's hashed value.
206
206
207 :param password: password
207 :param password: password
208 :param hashed: password in hashed form
208 :param hashed: password in hashed form
209 """
209 """
210 self._assert_bytes(password)
210 self._assert_bytes(password)
211 return sha256(password) == hashed
211 return sha256(password) == hashed
212
212
213
213
214 class _RhodeCodeCryptoTest(_RhodeCodeCryptoBase):
214 class _RhodeCodeCryptoTest(_RhodeCodeCryptoBase):
215 ENC_PREF = '_'
215 ENC_PREF = '_'
216
216
217 def hash_create(self, str_):
217 def hash_create(self, str_):
218 self._assert_bytes(str_)
218 self._assert_bytes(str_)
219 return sha1(str_)
219 return sha1(str_)
220
220
221 def hash_check(self, password, hashed):
221 def hash_check(self, password, hashed):
222 """
222 """
223 Checks matching password with it's hashed value.
223 Checks matching password with it's hashed value.
224
224
225 :param password: password
225 :param password: password
226 :param hashed: password in hashed form
226 :param hashed: password in hashed form
227 """
227 """
228 self._assert_bytes(password)
228 self._assert_bytes(password)
229 return sha1(password) == hashed
229 return sha1(password) == hashed
230
230
231
231
232 def crypto_backend():
232 def crypto_backend():
233 """
233 """
234 Return the matching crypto backend.
234 Return the matching crypto backend.
235
235
236 Selection is based on if we run tests or not, we pick sha1-test backend to run
236 Selection is based on if we run tests or not, we pick sha1-test backend to run
237 tests faster since BCRYPT is expensive to calculate
237 tests faster since BCRYPT is expensive to calculate
238 """
238 """
239 if rhodecode.is_test:
239 if rhodecode.is_test:
240 RhodeCodeCrypto = _RhodeCodeCryptoTest()
240 RhodeCodeCrypto = _RhodeCodeCryptoTest()
241 else:
241 else:
242 RhodeCodeCrypto = _RhodeCodeCryptoBCrypt()
242 RhodeCodeCrypto = _RhodeCodeCryptoBCrypt()
243
243
244 return RhodeCodeCrypto
244 return RhodeCodeCrypto
245
245
246
246
247 def get_crypt_password(password):
247 def get_crypt_password(password):
248 """
248 """
249 Create the hash of `password` with the active crypto backend.
249 Create the hash of `password` with the active crypto backend.
250
250
251 :param password: The cleartext password.
251 :param password: The cleartext password.
252 :type password: unicode
252 :type password: unicode
253 """
253 """
254 password = safe_str(password)
254 password = safe_str(password)
255 return crypto_backend().hash_create(password)
255 return crypto_backend().hash_create(password)
256
256
257
257
258 def check_password(password, hashed):
258 def check_password(password, hashed):
259 """
259 """
260 Check if the value in `password` matches the hash in `hashed`.
260 Check if the value in `password` matches the hash in `hashed`.
261
261
262 :param password: The cleartext password.
262 :param password: The cleartext password.
263 :type password: unicode
263 :type password: unicode
264
264
265 :param hashed: The expected hashed version of the password.
265 :param hashed: The expected hashed version of the password.
266 :type hashed: The hash has to be passed in in text representation.
266 :type hashed: The hash has to be passed in in text representation.
267 """
267 """
268 password = safe_str(password)
268 password = safe_str(password)
269 return crypto_backend().hash_check(password, hashed)
269 return crypto_backend().hash_check(password, hashed)
270
270
271
271
272 def generate_auth_token(data, salt=None):
272 def generate_auth_token(data, salt=None):
273 """
273 """
274 Generates API KEY from given string
274 Generates API KEY from given string
275 """
275 """
276
276
277 if salt is None:
277 if salt is None:
278 salt = os.urandom(16)
278 salt = os.urandom(16)
279 return sha1(data + salt)
279 return sha1(data + salt)
280
280
281
281
282 def get_came_from(request):
282 def get_came_from(request):
283 """
283 """
284 get query_string+path from request sanitized after removing auth_token
284 get query_string+path from request sanitized after removing auth_token
285 """
285 """
286 _req = request
286 _req = request
287
287
288 path = _req.path
288 path = _req.path
289 if 'auth_token' in _req.GET:
289 if 'auth_token' in _req.GET:
290 # sanitize the request and remove auth_token for redirection
290 # sanitize the request and remove auth_token for redirection
291 _req.GET.pop('auth_token')
291 _req.GET.pop('auth_token')
292 qs = _req.query_string
292 qs = _req.query_string
293 if qs:
293 if qs:
294 path += '?' + qs
294 path += '?' + qs
295
295
296 return path
296 return path
297
297
298
298
299 class CookieStoreWrapper(object):
299 class CookieStoreWrapper(object):
300
300
301 def __init__(self, cookie_store):
301 def __init__(self, cookie_store):
302 self.cookie_store = cookie_store
302 self.cookie_store = cookie_store
303
303
304 def __repr__(self):
304 def __repr__(self):
305 return 'CookieStore<%s>' % (self.cookie_store)
305 return 'CookieStore<%s>' % (self.cookie_store)
306
306
307 def get(self, key, other=None):
307 def get(self, key, other=None):
308 if isinstance(self.cookie_store, dict):
308 if isinstance(self.cookie_store, dict):
309 return self.cookie_store.get(key, other)
309 return self.cookie_store.get(key, other)
310 elif isinstance(self.cookie_store, AuthUser):
310 elif isinstance(self.cookie_store, AuthUser):
311 return self.cookie_store.__dict__.get(key, other)
311 return self.cookie_store.__dict__.get(key, other)
312
312
313
313
314 def _cached_perms_data(user_id, scope, user_is_admin,
314 def _cached_perms_data(user_id, scope, user_is_admin,
315 user_inherit_default_permissions, explicit, algo,
315 user_inherit_default_permissions, explicit, algo,
316 calculate_super_admin):
316 calculate_super_admin):
317
317
318 permissions = PermissionCalculator(
318 permissions = PermissionCalculator(
319 user_id, scope, user_is_admin, user_inherit_default_permissions,
319 user_id, scope, user_is_admin, user_inherit_default_permissions,
320 explicit, algo, calculate_super_admin)
320 explicit, algo, calculate_super_admin)
321 return permissions.calculate()
321 return permissions.calculate()
322
322
323
323
324 class PermOrigin(object):
324 class PermOrigin(object):
325 SUPER_ADMIN = 'superadmin'
325 SUPER_ADMIN = 'superadmin'
326 ARCHIVED = 'archived'
326 ARCHIVED = 'archived'
327
327
328 REPO_USER = 'user:%s'
328 REPO_USER = 'user:%s'
329 REPO_USERGROUP = 'usergroup:%s'
329 REPO_USERGROUP = 'usergroup:%s'
330 REPO_OWNER = 'repo.owner'
330 REPO_OWNER = 'repo.owner'
331 REPO_DEFAULT = 'repo.default'
331 REPO_DEFAULT = 'repo.default'
332 REPO_DEFAULT_NO_INHERIT = 'repo.default.no.inherit'
332 REPO_DEFAULT_NO_INHERIT = 'repo.default.no.inherit'
333 REPO_PRIVATE = 'repo.private'
333 REPO_PRIVATE = 'repo.private'
334
334
335 REPOGROUP_USER = 'user:%s'
335 REPOGROUP_USER = 'user:%s'
336 REPOGROUP_USERGROUP = 'usergroup:%s'
336 REPOGROUP_USERGROUP = 'usergroup:%s'
337 REPOGROUP_OWNER = 'group.owner'
337 REPOGROUP_OWNER = 'group.owner'
338 REPOGROUP_DEFAULT = 'group.default'
338 REPOGROUP_DEFAULT = 'group.default'
339 REPOGROUP_DEFAULT_NO_INHERIT = 'group.default.no.inherit'
339 REPOGROUP_DEFAULT_NO_INHERIT = 'group.default.no.inherit'
340
340
341 USERGROUP_USER = 'user:%s'
341 USERGROUP_USER = 'user:%s'
342 USERGROUP_USERGROUP = 'usergroup:%s'
342 USERGROUP_USERGROUP = 'usergroup:%s'
343 USERGROUP_OWNER = 'usergroup.owner'
343 USERGROUP_OWNER = 'usergroup.owner'
344 USERGROUP_DEFAULT = 'usergroup.default'
344 USERGROUP_DEFAULT = 'usergroup.default'
345 USERGROUP_DEFAULT_NO_INHERIT = 'usergroup.default.no.inherit'
345 USERGROUP_DEFAULT_NO_INHERIT = 'usergroup.default.no.inherit'
346
346
347
347
348 class PermOriginDict(dict):
348 class PermOriginDict(dict):
349 """
349 """
350 A special dict used for tracking permissions along with their origins.
350 A special dict used for tracking permissions along with their origins.
351
351
352 `__setitem__` has been overridden to expect a tuple(perm, origin)
352 `__setitem__` has been overridden to expect a tuple(perm, origin)
353 `__getitem__` will return only the perm
353 `__getitem__` will return only the perm
354 `.perm_origin_stack` will return the stack of (perm, origin) set per key
354 `.perm_origin_stack` will return the stack of (perm, origin) set per key
355
355
356 >>> perms = PermOriginDict()
356 >>> perms = PermOriginDict()
357 >>> perms['resource'] = 'read', 'default', 1
357 >>> perms['resource'] = 'read', 'default', 1
358 >>> perms['resource']
358 >>> perms['resource']
359 'read'
359 'read'
360 >>> perms['resource'] = 'write', 'admin', 2
360 >>> perms['resource'] = 'write', 'admin', 2
361 >>> perms['resource']
361 >>> perms['resource']
362 'write'
362 'write'
363 >>> perms.perm_origin_stack
363 >>> perms.perm_origin_stack
364 {'resource': [('read', 'default', 1), ('write', 'admin', 2)]}
364 {'resource': [('read', 'default', 1), ('write', 'admin', 2)]}
365 """
365 """
366
366
367 def __init__(self, *args, **kw):
367 def __init__(self, *args, **kw):
368 dict.__init__(self, *args, **kw)
368 dict.__init__(self, *args, **kw)
369 self.perm_origin_stack = collections.OrderedDict()
369 self.perm_origin_stack = collections.OrderedDict()
370
370
371 def __setitem__(self, key, perm_origin_obj_id):
371 def __setitem__(self, key, perm_origin_obj_id):
372 # set (most likely via pickle) key:val pair without tuple
372 # set (most likely via pickle) key:val pair without tuple
373 if not isinstance(perm_origin_obj_id, tuple):
373 if not isinstance(perm_origin_obj_id, tuple):
374 perm = perm_origin_obj_id
374 perm = perm_origin_obj_id
375 dict.__setitem__(self, key, perm)
375 dict.__setitem__(self, key, perm)
376 else:
376 else:
377 # unpack if we create a key from tuple
377 # unpack if we create a key from tuple
378 (perm, origin, obj_id) = perm_origin_obj_id
378 (perm, origin, obj_id) = perm_origin_obj_id
379 self.perm_origin_stack.setdefault(key, []).append((perm, origin, obj_id))
379 self.perm_origin_stack.setdefault(key, []).append((perm, origin, obj_id))
380 dict.__setitem__(self, key, perm)
380 dict.__setitem__(self, key, perm)
381
381
382
382
383 class BranchPermOriginDict(dict):
383 class BranchPermOriginDict(dict):
384 """
384 """
385 Dedicated branch permissions dict, with tracking of patterns and origins.
385 Dedicated branch permissions dict, with tracking of patterns and origins.
386
386
387 >>> perms = BranchPermOriginDict()
387 >>> perms = BranchPermOriginDict()
388 >>> perms['resource'] = '*pattern', 'read', 'default'
388 >>> perms['resource'] = '*pattern', 'read', 'default'
389 >>> perms['resource']
389 >>> perms['resource']
390 {'*pattern': 'read'}
390 {'*pattern': 'read'}
391 >>> perms['resource'] = '*pattern', 'write', 'admin'
391 >>> perms['resource'] = '*pattern', 'write', 'admin'
392 >>> perms['resource']
392 >>> perms['resource']
393 {'*pattern': 'write'}
393 {'*pattern': 'write'}
394 >>> perms.perm_origin_stack
394 >>> perms.perm_origin_stack
395 {'resource': {'*pattern': [('read', 'default'), ('write', 'admin')]}}
395 {'resource': {'*pattern': [('read', 'default'), ('write', 'admin')]}}
396 """
396 """
397 def __init__(self, *args, **kw):
397 def __init__(self, *args, **kw):
398 dict.__init__(self, *args, **kw)
398 dict.__init__(self, *args, **kw)
399 self.perm_origin_stack = collections.OrderedDict()
399 self.perm_origin_stack = collections.OrderedDict()
400
400
401 def __setitem__(self, key, pattern_perm_origin):
401 def __setitem__(self, key, pattern_perm_origin):
402 # set (most likely via pickle) key:val pair without tuple
402 # set (most likely via pickle) key:val pair without tuple
403 if not isinstance(pattern_perm_origin, tuple):
403 if not isinstance(pattern_perm_origin, tuple):
404 pattern_perm = pattern_perm_origin
404 pattern_perm = pattern_perm_origin
405 dict.__setitem__(self, key, pattern_perm)
405 dict.__setitem__(self, key, pattern_perm)
406
406
407 else:
407 else:
408 (pattern_perm, origin) = pattern_perm_origin
408 (pattern_perm, origin) = pattern_perm_origin
409 # we're passing in the dict, so we save the the stack
409 # we're passing in the dict, so we save the the stack
410 for pattern, perm in pattern_perm.items():
410 for pattern, perm in pattern_perm.items():
411 self.perm_origin_stack.setdefault(key, {})\
411 self.perm_origin_stack.setdefault(key, {})\
412 .setdefault(pattern, []).append((perm, origin))
412 .setdefault(pattern, []).append((perm, origin))
413
413
414 dict.__setitem__(self, key, pattern_perm)
414 dict.__setitem__(self, key, pattern_perm)
415
415
416
416
417 class PermissionCalculator(object):
417 class PermissionCalculator(object):
418
418
419 def __init__(
419 def __init__(
420 self, user_id, scope, user_is_admin,
420 self, user_id, scope, user_is_admin,
421 user_inherit_default_permissions, explicit, algo,
421 user_inherit_default_permissions, explicit, algo,
422 calculate_super_admin_as_user=False):
422 calculate_super_admin_as_user=False):
423
423
424 self.user_id = user_id
424 self.user_id = user_id
425 self.user_is_admin = user_is_admin
425 self.user_is_admin = user_is_admin
426 self.inherit_default_permissions = user_inherit_default_permissions
426 self.inherit_default_permissions = user_inherit_default_permissions
427 self.explicit = explicit
427 self.explicit = explicit
428 self.algo = algo
428 self.algo = algo
429 self.calculate_super_admin_as_user = calculate_super_admin_as_user
429 self.calculate_super_admin_as_user = calculate_super_admin_as_user
430
430
431 scope = scope or {}
431 scope = scope or {}
432 self.scope_repo_id = scope.get('repo_id')
432 self.scope_repo_id = scope.get('repo_id')
433 self.scope_repo_group_id = scope.get('repo_group_id')
433 self.scope_repo_group_id = scope.get('repo_group_id')
434 self.scope_user_group_id = scope.get('user_group_id')
434 self.scope_user_group_id = scope.get('user_group_id')
435
435
436 self.default_user_id = User.get_default_user(cache=True).user_id
436 self.default_user_id = User.get_default_user(cache=True).user_id
437
437
438 self.permissions_repositories = PermOriginDict()
438 self.permissions_repositories = PermOriginDict()
439 self.permissions_repository_groups = PermOriginDict()
439 self.permissions_repository_groups = PermOriginDict()
440 self.permissions_user_groups = PermOriginDict()
440 self.permissions_user_groups = PermOriginDict()
441 self.permissions_repository_branches = BranchPermOriginDict()
441 self.permissions_repository_branches = BranchPermOriginDict()
442 self.permissions_global = set()
442 self.permissions_global = set()
443
443
444 self.default_repo_perms = Permission.get_default_repo_perms(
444 self.default_repo_perms = Permission.get_default_repo_perms(
445 self.default_user_id, self.scope_repo_id)
445 self.default_user_id, self.scope_repo_id)
446 self.default_repo_groups_perms = Permission.get_default_group_perms(
446 self.default_repo_groups_perms = Permission.get_default_group_perms(
447 self.default_user_id, self.scope_repo_group_id)
447 self.default_user_id, self.scope_repo_group_id)
448 self.default_user_group_perms = \
448 self.default_user_group_perms = \
449 Permission.get_default_user_group_perms(
449 Permission.get_default_user_group_perms(
450 self.default_user_id, self.scope_user_group_id)
450 self.default_user_id, self.scope_user_group_id)
451
451
452 # default branch perms
452 # default branch perms
453 self.default_branch_repo_perms = \
453 self.default_branch_repo_perms = \
454 Permission.get_default_repo_branch_perms(
454 Permission.get_default_repo_branch_perms(
455 self.default_user_id, self.scope_repo_id)
455 self.default_user_id, self.scope_repo_id)
456
456
457 def calculate(self):
457 def calculate(self):
458 if self.user_is_admin and not self.calculate_super_admin_as_user:
458 if self.user_is_admin and not self.calculate_super_admin_as_user:
459 return self._calculate_super_admin_permissions()
459 return self._calculate_super_admin_permissions()
460
460
461 self._calculate_global_default_permissions()
461 self._calculate_global_default_permissions()
462 self._calculate_global_permissions()
462 self._calculate_global_permissions()
463 self._calculate_default_permissions()
463 self._calculate_default_permissions()
464 self._calculate_repository_permissions()
464 self._calculate_repository_permissions()
465 self._calculate_repository_branch_permissions()
465 self._calculate_repository_branch_permissions()
466 self._calculate_repository_group_permissions()
466 self._calculate_repository_group_permissions()
467 self._calculate_user_group_permissions()
467 self._calculate_user_group_permissions()
468 return self._permission_structure()
468 return self._permission_structure()
469
469
470 def _calculate_super_admin_permissions(self):
470 def _calculate_super_admin_permissions(self):
471 """
471 """
472 super-admin user have all default rights for repositories
472 super-admin user have all default rights for repositories
473 and groups set to admin
473 and groups set to admin
474 """
474 """
475 self.permissions_global.add('hg.admin')
475 self.permissions_global.add('hg.admin')
476 self.permissions_global.add('hg.create.write_on_repogroup.true')
476 self.permissions_global.add('hg.create.write_on_repogroup.true')
477
477
478 # repositories
478 # repositories
479 for perm in self.default_repo_perms:
479 for perm in self.default_repo_perms:
480 r_k = perm.UserRepoToPerm.repository.repo_name
480 r_k = perm.UserRepoToPerm.repository.repo_name
481 obj_id = perm.UserRepoToPerm.repository.repo_id
481 obj_id = perm.UserRepoToPerm.repository.repo_id
482 archived = perm.UserRepoToPerm.repository.archived
482 archived = perm.UserRepoToPerm.repository.archived
483 p = 'repository.admin'
483 p = 'repository.admin'
484 self.permissions_repositories[r_k] = p, PermOrigin.SUPER_ADMIN, obj_id
484 self.permissions_repositories[r_k] = p, PermOrigin.SUPER_ADMIN, obj_id
485 # special case for archived repositories, which we block still even for
485 # special case for archived repositories, which we block still even for
486 # super admins
486 # super admins
487 if archived:
487 if archived:
488 p = 'repository.read'
488 p = 'repository.read'
489 self.permissions_repositories[r_k] = p, PermOrigin.ARCHIVED, obj_id
489 self.permissions_repositories[r_k] = p, PermOrigin.ARCHIVED, obj_id
490
490
491 # repository groups
491 # repository groups
492 for perm in self.default_repo_groups_perms:
492 for perm in self.default_repo_groups_perms:
493 rg_k = perm.UserRepoGroupToPerm.group.group_name
493 rg_k = perm.UserRepoGroupToPerm.group.group_name
494 obj_id = perm.UserRepoGroupToPerm.group.group_id
494 obj_id = perm.UserRepoGroupToPerm.group.group_id
495 p = 'group.admin'
495 p = 'group.admin'
496 self.permissions_repository_groups[rg_k] = p, PermOrigin.SUPER_ADMIN, obj_id
496 self.permissions_repository_groups[rg_k] = p, PermOrigin.SUPER_ADMIN, obj_id
497
497
498 # user groups
498 # user groups
499 for perm in self.default_user_group_perms:
499 for perm in self.default_user_group_perms:
500 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
500 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
501 obj_id = perm.UserUserGroupToPerm.user_group.users_group_id
501 obj_id = perm.UserUserGroupToPerm.user_group.users_group_id
502 p = 'usergroup.admin'
502 p = 'usergroup.admin'
503 self.permissions_user_groups[u_k] = p, PermOrigin.SUPER_ADMIN, obj_id
503 self.permissions_user_groups[u_k] = p, PermOrigin.SUPER_ADMIN, obj_id
504
504
505 # branch permissions
505 # branch permissions
506 # since super-admin also can have custom rule permissions
506 # since super-admin also can have custom rule permissions
507 # we *always* need to calculate those inherited from default, and also explicit
507 # we *always* need to calculate those inherited from default, and also explicit
508 self._calculate_default_permissions_repository_branches(
508 self._calculate_default_permissions_repository_branches(
509 user_inherit_object_permissions=False)
509 user_inherit_object_permissions=False)
510 self._calculate_repository_branch_permissions()
510 self._calculate_repository_branch_permissions()
511
511
512 return self._permission_structure()
512 return self._permission_structure()
513
513
514 def _calculate_global_default_permissions(self):
514 def _calculate_global_default_permissions(self):
515 """
515 """
516 global permissions taken from the default user
516 global permissions taken from the default user
517 """
517 """
518 default_global_perms = UserToPerm.query()\
518 default_global_perms = UserToPerm.query()\
519 .filter(UserToPerm.user_id == self.default_user_id)\
519 .filter(UserToPerm.user_id == self.default_user_id)\
520 .options(joinedload(UserToPerm.permission))
520 .options(joinedload(UserToPerm.permission))
521
521
522 for perm in default_global_perms:
522 for perm in default_global_perms:
523 self.permissions_global.add(perm.permission.permission_name)
523 self.permissions_global.add(perm.permission.permission_name)
524
524
525 if self.user_is_admin:
525 if self.user_is_admin:
526 self.permissions_global.add('hg.admin')
526 self.permissions_global.add('hg.admin')
527 self.permissions_global.add('hg.create.write_on_repogroup.true')
527 self.permissions_global.add('hg.create.write_on_repogroup.true')
528
528
529 def _calculate_global_permissions(self):
529 def _calculate_global_permissions(self):
530 """
530 """
531 Set global system permissions with user permissions or permissions
531 Set global system permissions with user permissions or permissions
532 taken from the user groups of the current user.
532 taken from the user groups of the current user.
533
533
534 The permissions include repo creating, repo group creating, forking
534 The permissions include repo creating, repo group creating, forking
535 etc.
535 etc.
536 """
536 """
537
537
538 # now we read the defined permissions and overwrite what we have set
538 # now we read the defined permissions and overwrite what we have set
539 # before those can be configured from groups or users explicitly.
539 # before those can be configured from groups or users explicitly.
540
540
541 # In case we want to extend this list we should make sure
541 # In case we want to extend this list we should make sure
542 # this is in sync with User.DEFAULT_USER_PERMISSIONS definitions
542 # this is in sync with User.DEFAULT_USER_PERMISSIONS definitions
543 from rhodecode.model.permission import PermissionModel
543 from rhodecode.model.permission import PermissionModel
544
544
545 _configurable = frozenset([
545 _configurable = frozenset([
546 PermissionModel.FORKING_DISABLED, PermissionModel.FORKING_ENABLED,
546 PermissionModel.FORKING_DISABLED, PermissionModel.FORKING_ENABLED,
547 'hg.create.none', 'hg.create.repository',
547 'hg.create.none', 'hg.create.repository',
548 'hg.usergroup.create.false', 'hg.usergroup.create.true',
548 'hg.usergroup.create.false', 'hg.usergroup.create.true',
549 'hg.repogroup.create.false', 'hg.repogroup.create.true',
549 'hg.repogroup.create.false', 'hg.repogroup.create.true',
550 'hg.create.write_on_repogroup.false', 'hg.create.write_on_repogroup.true',
550 'hg.create.write_on_repogroup.false', 'hg.create.write_on_repogroup.true',
551 'hg.inherit_default_perms.false', 'hg.inherit_default_perms.true'
551 'hg.inherit_default_perms.false', 'hg.inherit_default_perms.true'
552 ])
552 ])
553
553
554 # USER GROUPS comes first user group global permissions
554 # USER GROUPS comes first user group global permissions
555 user_perms_from_users_groups = Session().query(UserGroupToPerm)\
555 user_perms_from_users_groups = Session().query(UserGroupToPerm)\
556 .options(joinedload(UserGroupToPerm.permission))\
556 .options(joinedload(UserGroupToPerm.permission))\
557 .join((UserGroupMember, UserGroupToPerm.users_group_id ==
557 .join((UserGroupMember, UserGroupToPerm.users_group_id ==
558 UserGroupMember.users_group_id))\
558 UserGroupMember.users_group_id))\
559 .filter(UserGroupMember.user_id == self.user_id)\
559 .filter(UserGroupMember.user_id == self.user_id)\
560 .order_by(UserGroupToPerm.users_group_id)\
560 .order_by(UserGroupToPerm.users_group_id)\
561 .all()
561 .all()
562
562
563 # need to group here by groups since user can be in more than
563 # need to group here by groups since user can be in more than
564 # one group, so we get all groups
564 # one group, so we get all groups
565 _explicit_grouped_perms = [
565 _explicit_grouped_perms = [
566 [x, list(y)] for x, y in
566 [x, list(y)] for x, y in
567 itertools.groupby(user_perms_from_users_groups,
567 itertools.groupby(user_perms_from_users_groups,
568 lambda _x: _x.users_group)]
568 lambda _x: _x.users_group)]
569
569
570 for gr, perms in _explicit_grouped_perms:
570 for gr, perms in _explicit_grouped_perms:
571 # since user can be in multiple groups iterate over them and
571 # since user can be in multiple groups iterate over them and
572 # select the lowest permissions first (more explicit)
572 # select the lowest permissions first (more explicit)
573 # TODO(marcink): do this^^
573 # TODO(marcink): do this^^
574
574
575 # group doesn't inherit default permissions so we actually set them
575 # group doesn't inherit default permissions so we actually set them
576 if not gr.inherit_default_permissions:
576 if not gr.inherit_default_permissions:
577 # NEED TO IGNORE all previously set configurable permissions
577 # NEED TO IGNORE all previously set configurable permissions
578 # and replace them with explicitly set from this user
578 # and replace them with explicitly set from this user
579 # group permissions
579 # group permissions
580 self.permissions_global = self.permissions_global.difference(
580 self.permissions_global = self.permissions_global.difference(
581 _configurable)
581 _configurable)
582 for perm in perms:
582 for perm in perms:
583 self.permissions_global.add(perm.permission.permission_name)
583 self.permissions_global.add(perm.permission.permission_name)
584
584
585 # user explicit global permissions
585 # user explicit global permissions
586 user_perms = Session().query(UserToPerm)\
586 user_perms = Session().query(UserToPerm)\
587 .options(joinedload(UserToPerm.permission))\
587 .options(joinedload(UserToPerm.permission))\
588 .filter(UserToPerm.user_id == self.user_id).all()
588 .filter(UserToPerm.user_id == self.user_id).all()
589
589
590 if not self.inherit_default_permissions:
590 if not self.inherit_default_permissions:
591 # NEED TO IGNORE all configurable permissions and
591 # NEED TO IGNORE all configurable permissions and
592 # replace them with explicitly set from this user permissions
592 # replace them with explicitly set from this user permissions
593 self.permissions_global = self.permissions_global.difference(
593 self.permissions_global = self.permissions_global.difference(
594 _configurable)
594 _configurable)
595 for perm in user_perms:
595 for perm in user_perms:
596 self.permissions_global.add(perm.permission.permission_name)
596 self.permissions_global.add(perm.permission.permission_name)
597
597
598 def _calculate_default_permissions_repositories(self, user_inherit_object_permissions):
598 def _calculate_default_permissions_repositories(self, user_inherit_object_permissions):
599 for perm in self.default_repo_perms:
599 for perm in self.default_repo_perms:
600 r_k = perm.UserRepoToPerm.repository.repo_name
600 r_k = perm.UserRepoToPerm.repository.repo_name
601 obj_id = perm.UserRepoToPerm.repository.repo_id
601 obj_id = perm.UserRepoToPerm.repository.repo_id
602 archived = perm.UserRepoToPerm.repository.archived
602 archived = perm.UserRepoToPerm.repository.archived
603 p = perm.Permission.permission_name
603 p = perm.Permission.permission_name
604 o = PermOrigin.REPO_DEFAULT
604 o = PermOrigin.REPO_DEFAULT
605 self.permissions_repositories[r_k] = p, o, obj_id
605 self.permissions_repositories[r_k] = p, o, obj_id
606
606
607 # if we decide this user isn't inheriting permissions from
607 # if we decide this user isn't inheriting permissions from
608 # default user we set him to .none so only explicit
608 # default user we set him to .none so only explicit
609 # permissions work
609 # permissions work
610 if not user_inherit_object_permissions:
610 if not user_inherit_object_permissions:
611 p = 'repository.none'
611 p = 'repository.none'
612 o = PermOrigin.REPO_DEFAULT_NO_INHERIT
612 o = PermOrigin.REPO_DEFAULT_NO_INHERIT
613 self.permissions_repositories[r_k] = p, o, obj_id
613 self.permissions_repositories[r_k] = p, o, obj_id
614
614
615 if perm.Repository.private and not (
615 if perm.Repository.private and not (
616 perm.Repository.user_id == self.user_id):
616 perm.Repository.user_id == self.user_id):
617 # disable defaults for private repos,
617 # disable defaults for private repos,
618 p = 'repository.none'
618 p = 'repository.none'
619 o = PermOrigin.REPO_PRIVATE
619 o = PermOrigin.REPO_PRIVATE
620 self.permissions_repositories[r_k] = p, o, obj_id
620 self.permissions_repositories[r_k] = p, o, obj_id
621
621
622 elif perm.Repository.user_id == self.user_id:
622 elif perm.Repository.user_id == self.user_id:
623 # set admin if owner
623 # set admin if owner
624 p = 'repository.admin'
624 p = 'repository.admin'
625 o = PermOrigin.REPO_OWNER
625 o = PermOrigin.REPO_OWNER
626 self.permissions_repositories[r_k] = p, o, obj_id
626 self.permissions_repositories[r_k] = p, o, obj_id
627
627
628 if self.user_is_admin:
628 if self.user_is_admin:
629 p = 'repository.admin'
629 p = 'repository.admin'
630 o = PermOrigin.SUPER_ADMIN
630 o = PermOrigin.SUPER_ADMIN
631 self.permissions_repositories[r_k] = p, o, obj_id
631 self.permissions_repositories[r_k] = p, o, obj_id
632
632
633 # finally in case of archived repositories, we downgrade higher
633 # finally in case of archived repositories, we downgrade higher
634 # permissions to read
634 # permissions to read
635 if archived:
635 if archived:
636 current_perm = self.permissions_repositories[r_k]
636 current_perm = self.permissions_repositories[r_k]
637 if current_perm in ['repository.write', 'repository.admin']:
637 if current_perm in ['repository.write', 'repository.admin']:
638 p = 'repository.read'
638 p = 'repository.read'
639 o = PermOrigin.ARCHIVED
639 o = PermOrigin.ARCHIVED
640 self.permissions_repositories[r_k] = p, o, obj_id
640 self.permissions_repositories[r_k] = p, o, obj_id
641
641
642 def _calculate_default_permissions_repository_branches(self, user_inherit_object_permissions):
642 def _calculate_default_permissions_repository_branches(self, user_inherit_object_permissions):
643 for perm in self.default_branch_repo_perms:
643 for perm in self.default_branch_repo_perms:
644
644
645 r_k = perm.UserRepoToPerm.repository.repo_name
645 r_k = perm.UserRepoToPerm.repository.repo_name
646 p = perm.Permission.permission_name
646 p = perm.Permission.permission_name
647 pattern = perm.UserToRepoBranchPermission.branch_pattern
647 pattern = perm.UserToRepoBranchPermission.branch_pattern
648 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
648 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
649
649
650 if not self.explicit:
650 if not self.explicit:
651 cur_perm = self.permissions_repository_branches.get(r_k)
651 cur_perm = self.permissions_repository_branches.get(r_k)
652 if cur_perm:
652 if cur_perm:
653 cur_perm = cur_perm[pattern]
653 cur_perm = cur_perm[pattern]
654 cur_perm = cur_perm or 'branch.none'
654 cur_perm = cur_perm or 'branch.none'
655
655
656 p = self._choose_permission(p, cur_perm)
656 p = self._choose_permission(p, cur_perm)
657
657
658 # NOTE(marcink): register all pattern/perm instances in this
658 # NOTE(marcink): register all pattern/perm instances in this
659 # special dict that aggregates entries
659 # special dict that aggregates entries
660 self.permissions_repository_branches[r_k] = {pattern: p}, o
660 self.permissions_repository_branches[r_k] = {pattern: p}, o
661
661
662 def _calculate_default_permissions_repository_groups(self, user_inherit_object_permissions):
662 def _calculate_default_permissions_repository_groups(self, user_inherit_object_permissions):
663 for perm in self.default_repo_groups_perms:
663 for perm in self.default_repo_groups_perms:
664 rg_k = perm.UserRepoGroupToPerm.group.group_name
664 rg_k = perm.UserRepoGroupToPerm.group.group_name
665 obj_id = perm.UserRepoGroupToPerm.group.group_id
665 obj_id = perm.UserRepoGroupToPerm.group.group_id
666 p = perm.Permission.permission_name
666 p = perm.Permission.permission_name
667 o = PermOrigin.REPOGROUP_DEFAULT
667 o = PermOrigin.REPOGROUP_DEFAULT
668 self.permissions_repository_groups[rg_k] = p, o, obj_id
668 self.permissions_repository_groups[rg_k] = p, o, obj_id
669
669
670 # if we decide this user isn't inheriting permissions from default
670 # if we decide this user isn't inheriting permissions from default
671 # user we set him to .none so only explicit permissions work
671 # user we set him to .none so only explicit permissions work
672 if not user_inherit_object_permissions:
672 if not user_inherit_object_permissions:
673 p = 'group.none'
673 p = 'group.none'
674 o = PermOrigin.REPOGROUP_DEFAULT_NO_INHERIT
674 o = PermOrigin.REPOGROUP_DEFAULT_NO_INHERIT
675 self.permissions_repository_groups[rg_k] = p, o, obj_id
675 self.permissions_repository_groups[rg_k] = p, o, obj_id
676
676
677 if perm.RepoGroup.user_id == self.user_id:
677 if perm.RepoGroup.user_id == self.user_id:
678 # set admin if owner
678 # set admin if owner
679 p = 'group.admin'
679 p = 'group.admin'
680 o = PermOrigin.REPOGROUP_OWNER
680 o = PermOrigin.REPOGROUP_OWNER
681 self.permissions_repository_groups[rg_k] = p, o, obj_id
681 self.permissions_repository_groups[rg_k] = p, o, obj_id
682
682
683 if self.user_is_admin:
683 if self.user_is_admin:
684 p = 'group.admin'
684 p = 'group.admin'
685 o = PermOrigin.SUPER_ADMIN
685 o = PermOrigin.SUPER_ADMIN
686 self.permissions_repository_groups[rg_k] = p, o, obj_id
686 self.permissions_repository_groups[rg_k] = p, o, obj_id
687
687
688 def _calculate_default_permissions_user_groups(self, user_inherit_object_permissions):
688 def _calculate_default_permissions_user_groups(self, user_inherit_object_permissions):
689 for perm in self.default_user_group_perms:
689 for perm in self.default_user_group_perms:
690 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
690 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
691 obj_id = perm.UserUserGroupToPerm.user_group.users_group_id
691 obj_id = perm.UserUserGroupToPerm.user_group.users_group_id
692 p = perm.Permission.permission_name
692 p = perm.Permission.permission_name
693 o = PermOrigin.USERGROUP_DEFAULT
693 o = PermOrigin.USERGROUP_DEFAULT
694 self.permissions_user_groups[u_k] = p, o, obj_id
694 self.permissions_user_groups[u_k] = p, o, obj_id
695
695
696 # if we decide this user isn't inheriting permissions from default
696 # if we decide this user isn't inheriting permissions from default
697 # user we set him to .none so only explicit permissions work
697 # user we set him to .none so only explicit permissions work
698 if not user_inherit_object_permissions:
698 if not user_inherit_object_permissions:
699 p = 'usergroup.none'
699 p = 'usergroup.none'
700 o = PermOrigin.USERGROUP_DEFAULT_NO_INHERIT
700 o = PermOrigin.USERGROUP_DEFAULT_NO_INHERIT
701 self.permissions_user_groups[u_k] = p, o, obj_id
701 self.permissions_user_groups[u_k] = p, o, obj_id
702
702
703 if perm.UserGroup.user_id == self.user_id:
703 if perm.UserGroup.user_id == self.user_id:
704 # set admin if owner
704 # set admin if owner
705 p = 'usergroup.admin'
705 p = 'usergroup.admin'
706 o = PermOrigin.USERGROUP_OWNER
706 o = PermOrigin.USERGROUP_OWNER
707 self.permissions_user_groups[u_k] = p, o, obj_id
707 self.permissions_user_groups[u_k] = p, o, obj_id
708
708
709 if self.user_is_admin:
709 if self.user_is_admin:
710 p = 'usergroup.admin'
710 p = 'usergroup.admin'
711 o = PermOrigin.SUPER_ADMIN
711 o = PermOrigin.SUPER_ADMIN
712 self.permissions_user_groups[u_k] = p, o, obj_id
712 self.permissions_user_groups[u_k] = p, o, obj_id
713
713
714 def _calculate_default_permissions(self):
714 def _calculate_default_permissions(self):
715 """
715 """
716 Set default user permissions for repositories, repository branches,
716 Set default user permissions for repositories, repository branches,
717 repository groups, user groups taken from the default user.
717 repository groups, user groups taken from the default user.
718
718
719 Calculate inheritance of object permissions based on what we have now
719 Calculate inheritance of object permissions based on what we have now
720 in GLOBAL permissions. We check if .false is in GLOBAL since this is
720 in GLOBAL permissions. We check if .false is in GLOBAL since this is
721 explicitly set. Inherit is the opposite of .false being there.
721 explicitly set. Inherit is the opposite of .false being there.
722
722
723 .. note::
723 .. note::
724
724
725 the syntax is little bit odd but what we need to check here is
725 the syntax is little bit odd but what we need to check here is
726 the opposite of .false permission being in the list so even for
726 the opposite of .false permission being in the list so even for
727 inconsistent state when both .true/.false is there
727 inconsistent state when both .true/.false is there
728 .false is more important
728 .false is more important
729
729
730 """
730 """
731 user_inherit_object_permissions = not ('hg.inherit_default_perms.false'
731 user_inherit_object_permissions = not ('hg.inherit_default_perms.false'
732 in self.permissions_global)
732 in self.permissions_global)
733
733
734 # default permissions inherited from `default` user permissions
734 # default permissions inherited from `default` user permissions
735 self._calculate_default_permissions_repositories(
735 self._calculate_default_permissions_repositories(
736 user_inherit_object_permissions)
736 user_inherit_object_permissions)
737
737
738 self._calculate_default_permissions_repository_branches(
738 self._calculate_default_permissions_repository_branches(
739 user_inherit_object_permissions)
739 user_inherit_object_permissions)
740
740
741 self._calculate_default_permissions_repository_groups(
741 self._calculate_default_permissions_repository_groups(
742 user_inherit_object_permissions)
742 user_inherit_object_permissions)
743
743
744 self._calculate_default_permissions_user_groups(
744 self._calculate_default_permissions_user_groups(
745 user_inherit_object_permissions)
745 user_inherit_object_permissions)
746
746
747 def _calculate_repository_permissions(self):
747 def _calculate_repository_permissions(self):
748 """
748 """
749 Repository access permissions for the current user.
749 Repository access permissions for the current user.
750
750
751 Check if the user is part of user groups for this repository and
751 Check if the user is part of user groups for this repository and
752 fill in the permission from it. `_choose_permission` decides of which
752 fill in the permission from it. `_choose_permission` decides of which
753 permission should be selected based on selected method.
753 permission should be selected based on selected method.
754 """
754 """
755
755
756 # user group for repositories permissions
756 # user group for repositories permissions
757 user_repo_perms_from_user_group = Permission\
757 user_repo_perms_from_user_group = Permission\
758 .get_default_repo_perms_from_user_group(
758 .get_default_repo_perms_from_user_group(
759 self.user_id, self.scope_repo_id)
759 self.user_id, self.scope_repo_id)
760
760
761 multiple_counter = collections.defaultdict(int)
761 multiple_counter = collections.defaultdict(int)
762 for perm in user_repo_perms_from_user_group:
762 for perm in user_repo_perms_from_user_group:
763 r_k = perm.UserGroupRepoToPerm.repository.repo_name
763 r_k = perm.UserGroupRepoToPerm.repository.repo_name
764 obj_id = perm.UserGroupRepoToPerm.repository.repo_id
764 obj_id = perm.UserGroupRepoToPerm.repository.repo_id
765 multiple_counter[r_k] += 1
765 multiple_counter[r_k] += 1
766 p = perm.Permission.permission_name
766 p = perm.Permission.permission_name
767 o = PermOrigin.REPO_USERGROUP % perm.UserGroupRepoToPerm\
767 o = PermOrigin.REPO_USERGROUP % perm.UserGroupRepoToPerm\
768 .users_group.users_group_name
768 .users_group.users_group_name
769
769
770 if multiple_counter[r_k] > 1:
770 if multiple_counter[r_k] > 1:
771 cur_perm = self.permissions_repositories[r_k]
771 cur_perm = self.permissions_repositories[r_k]
772 p = self._choose_permission(p, cur_perm)
772 p = self._choose_permission(p, cur_perm)
773
773
774 self.permissions_repositories[r_k] = p, o, obj_id
774 self.permissions_repositories[r_k] = p, o, obj_id
775
775
776 if perm.Repository.user_id == self.user_id:
776 if perm.Repository.user_id == self.user_id:
777 # set admin if owner
777 # set admin if owner
778 p = 'repository.admin'
778 p = 'repository.admin'
779 o = PermOrigin.REPO_OWNER
779 o = PermOrigin.REPO_OWNER
780 self.permissions_repositories[r_k] = p, o, obj_id
780 self.permissions_repositories[r_k] = p, o, obj_id
781
781
782 if self.user_is_admin:
782 if self.user_is_admin:
783 p = 'repository.admin'
783 p = 'repository.admin'
784 o = PermOrigin.SUPER_ADMIN
784 o = PermOrigin.SUPER_ADMIN
785 self.permissions_repositories[r_k] = p, o, obj_id
785 self.permissions_repositories[r_k] = p, o, obj_id
786
786
787 # user explicit permissions for repositories, overrides any specified
787 # user explicit permissions for repositories, overrides any specified
788 # by the group permission
788 # by the group permission
789 user_repo_perms = Permission.get_default_repo_perms(
789 user_repo_perms = Permission.get_default_repo_perms(
790 self.user_id, self.scope_repo_id)
790 self.user_id, self.scope_repo_id)
791 for perm in user_repo_perms:
791 for perm in user_repo_perms:
792 r_k = perm.UserRepoToPerm.repository.repo_name
792 r_k = perm.UserRepoToPerm.repository.repo_name
793 obj_id = perm.UserRepoToPerm.repository.repo_id
793 obj_id = perm.UserRepoToPerm.repository.repo_id
794 archived = perm.UserRepoToPerm.repository.archived
794 archived = perm.UserRepoToPerm.repository.archived
795 p = perm.Permission.permission_name
795 p = perm.Permission.permission_name
796 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
796 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
797
797
798 if not self.explicit:
798 if not self.explicit:
799 cur_perm = self.permissions_repositories.get(
799 cur_perm = self.permissions_repositories.get(
800 r_k, 'repository.none')
800 r_k, 'repository.none')
801 p = self._choose_permission(p, cur_perm)
801 p = self._choose_permission(p, cur_perm)
802
802
803 self.permissions_repositories[r_k] = p, o, obj_id
803 self.permissions_repositories[r_k] = p, o, obj_id
804
804
805 if perm.Repository.user_id == self.user_id:
805 if perm.Repository.user_id == self.user_id:
806 # set admin if owner
806 # set admin if owner
807 p = 'repository.admin'
807 p = 'repository.admin'
808 o = PermOrigin.REPO_OWNER
808 o = PermOrigin.REPO_OWNER
809 self.permissions_repositories[r_k] = p, o, obj_id
809 self.permissions_repositories[r_k] = p, o, obj_id
810
810
811 if self.user_is_admin:
811 if self.user_is_admin:
812 p = 'repository.admin'
812 p = 'repository.admin'
813 o = PermOrigin.SUPER_ADMIN
813 o = PermOrigin.SUPER_ADMIN
814 self.permissions_repositories[r_k] = p, o, obj_id
814 self.permissions_repositories[r_k] = p, o, obj_id
815
815
816 # finally in case of archived repositories, we downgrade higher
816 # finally in case of archived repositories, we downgrade higher
817 # permissions to read
817 # permissions to read
818 if archived:
818 if archived:
819 current_perm = self.permissions_repositories[r_k]
819 current_perm = self.permissions_repositories[r_k]
820 if current_perm in ['repository.write', 'repository.admin']:
820 if current_perm in ['repository.write', 'repository.admin']:
821 p = 'repository.read'
821 p = 'repository.read'
822 o = PermOrigin.ARCHIVED
822 o = PermOrigin.ARCHIVED
823 self.permissions_repositories[r_k] = p, o, obj_id
823 self.permissions_repositories[r_k] = p, o, obj_id
824
824
825 def _calculate_repository_branch_permissions(self):
825 def _calculate_repository_branch_permissions(self):
826 # user group for repositories permissions
826 # user group for repositories permissions
827 user_repo_branch_perms_from_user_group = Permission\
827 user_repo_branch_perms_from_user_group = Permission\
828 .get_default_repo_branch_perms_from_user_group(
828 .get_default_repo_branch_perms_from_user_group(
829 self.user_id, self.scope_repo_id)
829 self.user_id, self.scope_repo_id)
830
830
831 multiple_counter = collections.defaultdict(int)
831 multiple_counter = collections.defaultdict(int)
832 for perm in user_repo_branch_perms_from_user_group:
832 for perm in user_repo_branch_perms_from_user_group:
833 r_k = perm.UserGroupRepoToPerm.repository.repo_name
833 r_k = perm.UserGroupRepoToPerm.repository.repo_name
834 p = perm.Permission.permission_name
834 p = perm.Permission.permission_name
835 pattern = perm.UserGroupToRepoBranchPermission.branch_pattern
835 pattern = perm.UserGroupToRepoBranchPermission.branch_pattern
836 o = PermOrigin.REPO_USERGROUP % perm.UserGroupRepoToPerm\
836 o = PermOrigin.REPO_USERGROUP % perm.UserGroupRepoToPerm\
837 .users_group.users_group_name
837 .users_group.users_group_name
838
838
839 multiple_counter[r_k] += 1
839 multiple_counter[r_k] += 1
840 if multiple_counter[r_k] > 1:
840 if multiple_counter[r_k] > 1:
841 cur_perm = self.permissions_repository_branches[r_k][pattern]
841 cur_perm = self.permissions_repository_branches[r_k][pattern]
842 p = self._choose_permission(p, cur_perm)
842 p = self._choose_permission(p, cur_perm)
843
843
844 self.permissions_repository_branches[r_k] = {pattern: p}, o
844 self.permissions_repository_branches[r_k] = {pattern: p}, o
845
845
846 # user explicit branch permissions for repositories, overrides
846 # user explicit branch permissions for repositories, overrides
847 # any specified by the group permission
847 # any specified by the group permission
848 user_repo_branch_perms = Permission.get_default_repo_branch_perms(
848 user_repo_branch_perms = Permission.get_default_repo_branch_perms(
849 self.user_id, self.scope_repo_id)
849 self.user_id, self.scope_repo_id)
850
850
851 for perm in user_repo_branch_perms:
851 for perm in user_repo_branch_perms:
852
852
853 r_k = perm.UserRepoToPerm.repository.repo_name
853 r_k = perm.UserRepoToPerm.repository.repo_name
854 p = perm.Permission.permission_name
854 p = perm.Permission.permission_name
855 pattern = perm.UserToRepoBranchPermission.branch_pattern
855 pattern = perm.UserToRepoBranchPermission.branch_pattern
856 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
856 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
857
857
858 if not self.explicit:
858 if not self.explicit:
859 cur_perm = self.permissions_repository_branches.get(r_k)
859 cur_perm = self.permissions_repository_branches.get(r_k)
860 if cur_perm:
860 if cur_perm:
861 cur_perm = cur_perm[pattern]
861 cur_perm = cur_perm[pattern]
862 cur_perm = cur_perm or 'branch.none'
862 cur_perm = cur_perm or 'branch.none'
863 p = self._choose_permission(p, cur_perm)
863 p = self._choose_permission(p, cur_perm)
864
864
865 # NOTE(marcink): register all pattern/perm instances in this
865 # NOTE(marcink): register all pattern/perm instances in this
866 # special dict that aggregates entries
866 # special dict that aggregates entries
867 self.permissions_repository_branches[r_k] = {pattern: p}, o
867 self.permissions_repository_branches[r_k] = {pattern: p}, o
868
868
869 def _calculate_repository_group_permissions(self):
869 def _calculate_repository_group_permissions(self):
870 """
870 """
871 Repository group permissions for the current user.
871 Repository group permissions for the current user.
872
872
873 Check if the user is part of user groups for repository groups and
873 Check if the user is part of user groups for repository groups and
874 fill in the permissions from it. `_choose_permission` decides of which
874 fill in the permissions from it. `_choose_permission` decides of which
875 permission should be selected based on selected method.
875 permission should be selected based on selected method.
876 """
876 """
877 # user group for repo groups permissions
877 # user group for repo groups permissions
878 user_repo_group_perms_from_user_group = Permission\
878 user_repo_group_perms_from_user_group = Permission\
879 .get_default_group_perms_from_user_group(
879 .get_default_group_perms_from_user_group(
880 self.user_id, self.scope_repo_group_id)
880 self.user_id, self.scope_repo_group_id)
881
881
882 multiple_counter = collections.defaultdict(int)
882 multiple_counter = collections.defaultdict(int)
883 for perm in user_repo_group_perms_from_user_group:
883 for perm in user_repo_group_perms_from_user_group:
884 rg_k = perm.UserGroupRepoGroupToPerm.group.group_name
884 rg_k = perm.UserGroupRepoGroupToPerm.group.group_name
885 obj_id = perm.UserGroupRepoGroupToPerm.group.group_id
885 obj_id = perm.UserGroupRepoGroupToPerm.group.group_id
886 multiple_counter[rg_k] += 1
886 multiple_counter[rg_k] += 1
887 o = PermOrigin.REPOGROUP_USERGROUP % perm.UserGroupRepoGroupToPerm\
887 o = PermOrigin.REPOGROUP_USERGROUP % perm.UserGroupRepoGroupToPerm\
888 .users_group.users_group_name
888 .users_group.users_group_name
889 p = perm.Permission.permission_name
889 p = perm.Permission.permission_name
890
890
891 if multiple_counter[rg_k] > 1:
891 if multiple_counter[rg_k] > 1:
892 cur_perm = self.permissions_repository_groups[rg_k]
892 cur_perm = self.permissions_repository_groups[rg_k]
893 p = self._choose_permission(p, cur_perm)
893 p = self._choose_permission(p, cur_perm)
894 self.permissions_repository_groups[rg_k] = p, o, obj_id
894 self.permissions_repository_groups[rg_k] = p, o, obj_id
895
895
896 if perm.RepoGroup.user_id == self.user_id:
896 if perm.RepoGroup.user_id == self.user_id:
897 # set admin if owner, even for member of other user group
897 # set admin if owner, even for member of other user group
898 p = 'group.admin'
898 p = 'group.admin'
899 o = PermOrigin.REPOGROUP_OWNER
899 o = PermOrigin.REPOGROUP_OWNER
900 self.permissions_repository_groups[rg_k] = p, o, obj_id
900 self.permissions_repository_groups[rg_k] = p, o, obj_id
901
901
902 if self.user_is_admin:
902 if self.user_is_admin:
903 p = 'group.admin'
903 p = 'group.admin'
904 o = PermOrigin.SUPER_ADMIN
904 o = PermOrigin.SUPER_ADMIN
905 self.permissions_repository_groups[rg_k] = p, o, obj_id
905 self.permissions_repository_groups[rg_k] = p, o, obj_id
906
906
907 # user explicit permissions for repository groups
907 # user explicit permissions for repository groups
908 user_repo_groups_perms = Permission.get_default_group_perms(
908 user_repo_groups_perms = Permission.get_default_group_perms(
909 self.user_id, self.scope_repo_group_id)
909 self.user_id, self.scope_repo_group_id)
910 for perm in user_repo_groups_perms:
910 for perm in user_repo_groups_perms:
911 rg_k = perm.UserRepoGroupToPerm.group.group_name
911 rg_k = perm.UserRepoGroupToPerm.group.group_name
912 obj_id = perm.UserRepoGroupToPerm.group.group_id
912 obj_id = perm.UserRepoGroupToPerm.group.group_id
913 o = PermOrigin.REPOGROUP_USER % perm.UserRepoGroupToPerm\
913 o = PermOrigin.REPOGROUP_USER % perm.UserRepoGroupToPerm\
914 .user.username
914 .user.username
915 p = perm.Permission.permission_name
915 p = perm.Permission.permission_name
916
916
917 if not self.explicit:
917 if not self.explicit:
918 cur_perm = self.permissions_repository_groups.get(rg_k, 'group.none')
918 cur_perm = self.permissions_repository_groups.get(rg_k, 'group.none')
919 p = self._choose_permission(p, cur_perm)
919 p = self._choose_permission(p, cur_perm)
920
920
921 self.permissions_repository_groups[rg_k] = p, o, obj_id
921 self.permissions_repository_groups[rg_k] = p, o, obj_id
922
922
923 if perm.RepoGroup.user_id == self.user_id:
923 if perm.RepoGroup.user_id == self.user_id:
924 # set admin if owner
924 # set admin if owner
925 p = 'group.admin'
925 p = 'group.admin'
926 o = PermOrigin.REPOGROUP_OWNER
926 o = PermOrigin.REPOGROUP_OWNER
927 self.permissions_repository_groups[rg_k] = p, o, obj_id
927 self.permissions_repository_groups[rg_k] = p, o, obj_id
928
928
929 if self.user_is_admin:
929 if self.user_is_admin:
930 p = 'group.admin'
930 p = 'group.admin'
931 o = PermOrigin.SUPER_ADMIN
931 o = PermOrigin.SUPER_ADMIN
932 self.permissions_repository_groups[rg_k] = p, o, obj_id
932 self.permissions_repository_groups[rg_k] = p, o, obj_id
933
933
934 def _calculate_user_group_permissions(self):
934 def _calculate_user_group_permissions(self):
935 """
935 """
936 User group permissions for the current user.
936 User group permissions for the current user.
937 """
937 """
938 # user group for user group permissions
938 # user group for user group permissions
939 user_group_from_user_group = Permission\
939 user_group_from_user_group = Permission\
940 .get_default_user_group_perms_from_user_group(
940 .get_default_user_group_perms_from_user_group(
941 self.user_id, self.scope_user_group_id)
941 self.user_id, self.scope_user_group_id)
942
942
943 multiple_counter = collections.defaultdict(int)
943 multiple_counter = collections.defaultdict(int)
944 for perm in user_group_from_user_group:
944 for perm in user_group_from_user_group:
945 ug_k = perm.UserGroupUserGroupToPerm.target_user_group.users_group_name
945 ug_k = perm.UserGroupUserGroupToPerm.target_user_group.users_group_name
946 obj_id = perm.UserGroupUserGroupToPerm.target_user_group.users_group_id
946 obj_id = perm.UserGroupUserGroupToPerm.target_user_group.users_group_id
947 multiple_counter[ug_k] += 1
947 multiple_counter[ug_k] += 1
948 o = PermOrigin.USERGROUP_USERGROUP % perm.UserGroupUserGroupToPerm\
948 o = PermOrigin.USERGROUP_USERGROUP % perm.UserGroupUserGroupToPerm\
949 .user_group.users_group_name
949 .user_group.users_group_name
950 p = perm.Permission.permission_name
950 p = perm.Permission.permission_name
951
951
952 if multiple_counter[ug_k] > 1:
952 if multiple_counter[ug_k] > 1:
953 cur_perm = self.permissions_user_groups[ug_k]
953 cur_perm = self.permissions_user_groups[ug_k]
954 p = self._choose_permission(p, cur_perm)
954 p = self._choose_permission(p, cur_perm)
955
955
956 self.permissions_user_groups[ug_k] = p, o, obj_id
956 self.permissions_user_groups[ug_k] = p, o, obj_id
957
957
958 if perm.UserGroup.user_id == self.user_id:
958 if perm.UserGroup.user_id == self.user_id:
959 # set admin if owner, even for member of other user group
959 # set admin if owner, even for member of other user group
960 p = 'usergroup.admin'
960 p = 'usergroup.admin'
961 o = PermOrigin.USERGROUP_OWNER
961 o = PermOrigin.USERGROUP_OWNER
962 self.permissions_user_groups[ug_k] = p, o, obj_id
962 self.permissions_user_groups[ug_k] = p, o, obj_id
963
963
964 if self.user_is_admin:
964 if self.user_is_admin:
965 p = 'usergroup.admin'
965 p = 'usergroup.admin'
966 o = PermOrigin.SUPER_ADMIN
966 o = PermOrigin.SUPER_ADMIN
967 self.permissions_user_groups[ug_k] = p, o, obj_id
967 self.permissions_user_groups[ug_k] = p, o, obj_id
968
968
969 # user explicit permission for user groups
969 # user explicit permission for user groups
970 user_user_groups_perms = Permission.get_default_user_group_perms(
970 user_user_groups_perms = Permission.get_default_user_group_perms(
971 self.user_id, self.scope_user_group_id)
971 self.user_id, self.scope_user_group_id)
972 for perm in user_user_groups_perms:
972 for perm in user_user_groups_perms:
973 ug_k = perm.UserUserGroupToPerm.user_group.users_group_name
973 ug_k = perm.UserUserGroupToPerm.user_group.users_group_name
974 obj_id = perm.UserUserGroupToPerm.user_group.users_group_id
974 obj_id = perm.UserUserGroupToPerm.user_group.users_group_id
975 o = PermOrigin.USERGROUP_USER % perm.UserUserGroupToPerm\
975 o = PermOrigin.USERGROUP_USER % perm.UserUserGroupToPerm\
976 .user.username
976 .user.username
977 p = perm.Permission.permission_name
977 p = perm.Permission.permission_name
978
978
979 if not self.explicit:
979 if not self.explicit:
980 cur_perm = self.permissions_user_groups.get(ug_k, 'usergroup.none')
980 cur_perm = self.permissions_user_groups.get(ug_k, 'usergroup.none')
981 p = self._choose_permission(p, cur_perm)
981 p = self._choose_permission(p, cur_perm)
982
982
983 self.permissions_user_groups[ug_k] = p, o, obj_id
983 self.permissions_user_groups[ug_k] = p, o, obj_id
984
984
985 if perm.UserGroup.user_id == self.user_id:
985 if perm.UserGroup.user_id == self.user_id:
986 # set admin if owner
986 # set admin if owner
987 p = 'usergroup.admin'
987 p = 'usergroup.admin'
988 o = PermOrigin.USERGROUP_OWNER
988 o = PermOrigin.USERGROUP_OWNER
989 self.permissions_user_groups[ug_k] = p, o, obj_id
989 self.permissions_user_groups[ug_k] = p, o, obj_id
990
990
991 if self.user_is_admin:
991 if self.user_is_admin:
992 p = 'usergroup.admin'
992 p = 'usergroup.admin'
993 o = PermOrigin.SUPER_ADMIN
993 o = PermOrigin.SUPER_ADMIN
994 self.permissions_user_groups[ug_k] = p, o, obj_id
994 self.permissions_user_groups[ug_k] = p, o, obj_id
995
995
996 def _choose_permission(self, new_perm, cur_perm):
996 def _choose_permission(self, new_perm, cur_perm):
997 new_perm_val = Permission.PERM_WEIGHTS[new_perm]
997 new_perm_val = Permission.PERM_WEIGHTS[new_perm]
998 cur_perm_val = Permission.PERM_WEIGHTS[cur_perm]
998 cur_perm_val = Permission.PERM_WEIGHTS[cur_perm]
999 if self.algo == 'higherwin':
999 if self.algo == 'higherwin':
1000 if new_perm_val > cur_perm_val:
1000 if new_perm_val > cur_perm_val:
1001 return new_perm
1001 return new_perm
1002 return cur_perm
1002 return cur_perm
1003 elif self.algo == 'lowerwin':
1003 elif self.algo == 'lowerwin':
1004 if new_perm_val < cur_perm_val:
1004 if new_perm_val < cur_perm_val:
1005 return new_perm
1005 return new_perm
1006 return cur_perm
1006 return cur_perm
1007
1007
1008 def _permission_structure(self):
1008 def _permission_structure(self):
1009 return {
1009 return {
1010 'global': self.permissions_global,
1010 'global': self.permissions_global,
1011 'repositories': self.permissions_repositories,
1011 'repositories': self.permissions_repositories,
1012 'repository_branches': self.permissions_repository_branches,
1012 'repository_branches': self.permissions_repository_branches,
1013 'repositories_groups': self.permissions_repository_groups,
1013 'repositories_groups': self.permissions_repository_groups,
1014 'user_groups': self.permissions_user_groups,
1014 'user_groups': self.permissions_user_groups,
1015 }
1015 }
1016
1016
1017
1017
1018 def allowed_auth_token_access(view_name, auth_token, whitelist=None):
1018 def allowed_auth_token_access(view_name, auth_token, whitelist=None):
1019 """
1019 """
1020 Check if given controller_name is in whitelist of auth token access
1020 Check if given controller_name is in whitelist of auth token access
1021 """
1021 """
1022 if not whitelist:
1022 if not whitelist:
1023 from rhodecode import CONFIG
1023 from rhodecode import CONFIG
1024 whitelist = aslist(
1024 whitelist = aslist(
1025 CONFIG.get('api_access_controllers_whitelist'), sep=',')
1025 CONFIG.get('api_access_controllers_whitelist'), sep=',')
1026 # backward compat translation
1026 # backward compat translation
1027 compat = {
1027 compat = {
1028 # old controller, new VIEW
1028 # old controller, new VIEW
1029 'ChangesetController:*': 'RepoCommitsView:*',
1029 'ChangesetController:*': 'RepoCommitsView:*',
1030 'ChangesetController:changeset_patch': 'RepoCommitsView:repo_commit_patch',
1030 'ChangesetController:changeset_patch': 'RepoCommitsView:repo_commit_patch',
1031 'ChangesetController:changeset_raw': 'RepoCommitsView:repo_commit_raw',
1031 'ChangesetController:changeset_raw': 'RepoCommitsView:repo_commit_raw',
1032 'FilesController:raw': 'RepoCommitsView:repo_commit_raw',
1032 'FilesController:raw': 'RepoCommitsView:repo_commit_raw',
1033 'FilesController:archivefile': 'RepoFilesView:repo_archivefile',
1033 'FilesController:archivefile': 'RepoFilesView:repo_archivefile',
1034 'GistsController:*': 'GistView:*',
1034 'GistsController:*': 'GistView:*',
1035 }
1035 }
1036
1036
1037 log.debug(
1037 log.debug(
1038 'Allowed views for AUTH TOKEN access: %s', whitelist)
1038 'Allowed views for AUTH TOKEN access: %s', whitelist)
1039 auth_token_access_valid = False
1039 auth_token_access_valid = False
1040
1040
1041 for entry in whitelist:
1041 for entry in whitelist:
1042 token_match = True
1042 token_match = True
1043 if entry in compat:
1043 if entry in compat:
1044 # translate from old Controllers to Pyramid Views
1044 # translate from old Controllers to Pyramid Views
1045 entry = compat[entry]
1045 entry = compat[entry]
1046
1046
1047 if '@' in entry:
1047 if '@' in entry:
1048 # specific AuthToken
1048 # specific AuthToken
1049 entry, allowed_token = entry.split('@', 1)
1049 entry, allowed_token = entry.split('@', 1)
1050 token_match = auth_token == allowed_token
1050 token_match = auth_token == allowed_token
1051
1051
1052 if fnmatch.fnmatch(view_name, entry) and token_match:
1052 if fnmatch.fnmatch(view_name, entry) and token_match:
1053 auth_token_access_valid = True
1053 auth_token_access_valid = True
1054 break
1054 break
1055
1055
1056 if auth_token_access_valid:
1056 if auth_token_access_valid:
1057 log.debug('view: `%s` matches entry in whitelist: %s',
1057 log.debug('view: `%s` matches entry in whitelist: %s',
1058 view_name, whitelist)
1058 view_name, whitelist)
1059
1059
1060 else:
1060 else:
1061 msg = ('view: `%s` does *NOT* match any entry in whitelist: %s'
1061 msg = ('view: `%s` does *NOT* match any entry in whitelist: %s'
1062 % (view_name, whitelist))
1062 % (view_name, whitelist))
1063 if auth_token:
1063 if auth_token:
1064 # if we use auth token key and don't have access it's a warning
1064 # if we use auth token key and don't have access it's a warning
1065 log.warning(msg)
1065 log.warning(msg)
1066 else:
1066 else:
1067 log.debug(msg)
1067 log.debug(msg)
1068
1068
1069 return auth_token_access_valid
1069 return auth_token_access_valid
1070
1070
1071
1071
1072 class AuthUser(object):
1072 class AuthUser(object):
1073 """
1073 """
1074 A simple object that handles all attributes of user in RhodeCode
1074 A simple object that handles all attributes of user in RhodeCode
1075
1075
1076 It does lookup based on API key,given user, or user present in session
1076 It does lookup based on API key,given user, or user present in session
1077 Then it fills all required information for such user. It also checks if
1077 Then it fills all required information for such user. It also checks if
1078 anonymous access is enabled and if so, it returns default user as logged in
1078 anonymous access is enabled and if so, it returns default user as logged in
1079 """
1079 """
1080 GLOBAL_PERMS = [x[0] for x in Permission.PERMS]
1080 GLOBAL_PERMS = [x[0] for x in Permission.PERMS]
1081 repo_read_perms = ['repository.read', 'repository.admin', 'repository.write']
1081 repo_read_perms = ['repository.read', 'repository.admin', 'repository.write']
1082 repo_group_read_perms = ['group.read', 'group.write', 'group.admin']
1082 repo_group_read_perms = ['group.read', 'group.write', 'group.admin']
1083 user_group_read_perms = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
1083 user_group_read_perms = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
1084
1084
1085 def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None):
1085 def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None):
1086
1086
1087 self.user_id = user_id
1087 self.user_id = user_id
1088 self._api_key = api_key
1088 self._api_key = api_key
1089
1089
1090 self.api_key = None
1090 self.api_key = None
1091 self.username = username
1091 self.username = username
1092 self.ip_addr = ip_addr
1092 self.ip_addr = ip_addr
1093 self.name = ''
1093 self.name = ''
1094 self.lastname = ''
1094 self.lastname = ''
1095 self.first_name = ''
1095 self.first_name = ''
1096 self.last_name = ''
1096 self.last_name = ''
1097 self.email = ''
1097 self.email = ''
1098 self.is_authenticated = False
1098 self.is_authenticated = False
1099 self.admin = False
1099 self.admin = False
1100 self.inherit_default_permissions = False
1100 self.inherit_default_permissions = False
1101 self.password = ''
1101 self.password = ''
1102
1102
1103 self.anonymous_user = None # propagated on propagate_data
1103 self.anonymous_user = None # propagated on propagate_data
1104 self.propagate_data()
1104 self.propagate_data()
1105 self._instance = None
1105 self._instance = None
1106 self._permissions_scoped_cache = {} # used to bind scoped calculation
1106 self._permissions_scoped_cache = {} # used to bind scoped calculation
1107
1107
1108 @LazyProperty
1108 @LazyProperty
1109 def permissions(self):
1109 def permissions(self):
1110 return self.get_perms(user=self, cache=None)
1110 return self.get_perms(user=self, cache=None)
1111
1111
1112 @LazyProperty
1112 @LazyProperty
1113 def permissions_safe(self):
1113 def permissions_safe(self):
1114 """
1114 """
1115 Filtered permissions excluding not allowed repositories
1115 Filtered permissions excluding not allowed repositories
1116 """
1116 """
1117 perms = self.get_perms(user=self, cache=None)
1117 perms = self.get_perms(user=self, cache=None)
1118
1118
1119 perms['repositories'] = {
1119 perms['repositories'] = {
1120 k: v for k, v in perms['repositories'].items()
1120 k: v for k, v in perms['repositories'].items()
1121 if v != 'repository.none'}
1121 if v != 'repository.none'}
1122 perms['repositories_groups'] = {
1122 perms['repositories_groups'] = {
1123 k: v for k, v in perms['repositories_groups'].items()
1123 k: v for k, v in perms['repositories_groups'].items()
1124 if v != 'group.none'}
1124 if v != 'group.none'}
1125 perms['user_groups'] = {
1125 perms['user_groups'] = {
1126 k: v for k, v in perms['user_groups'].items()
1126 k: v for k, v in perms['user_groups'].items()
1127 if v != 'usergroup.none'}
1127 if v != 'usergroup.none'}
1128 perms['repository_branches'] = {
1128 perms['repository_branches'] = {
1129 k: v for k, v in perms['repository_branches'].items()
1129 k: v for k, v in perms['repository_branches'].items()
1130 if v != 'branch.none'}
1130 if v != 'branch.none'}
1131 return perms
1131 return perms
1132
1132
1133 @LazyProperty
1133 @LazyProperty
1134 def permissions_full_details(self):
1134 def permissions_full_details(self):
1135 return self.get_perms(
1135 return self.get_perms(
1136 user=self, cache=None, calculate_super_admin=True)
1136 user=self, cache=None, calculate_super_admin=True)
1137
1137
1138 def permissions_with_scope(self, scope):
1138 def permissions_with_scope(self, scope):
1139 """
1139 """
1140 Call the get_perms function with scoped data. The scope in that function
1140 Call the get_perms function with scoped data. The scope in that function
1141 narrows the SQL calls to the given ID of objects resulting in fetching
1141 narrows the SQL calls to the given ID of objects resulting in fetching
1142 Just particular permission we want to obtain. If scope is an empty dict
1142 Just particular permission we want to obtain. If scope is an empty dict
1143 then it basically narrows the scope to GLOBAL permissions only.
1143 then it basically narrows the scope to GLOBAL permissions only.
1144
1144
1145 :param scope: dict
1145 :param scope: dict
1146 """
1146 """
1147 if 'repo_name' in scope:
1147 if 'repo_name' in scope:
1148 obj = Repository.get_by_repo_name(scope['repo_name'])
1148 obj = Repository.get_by_repo_name(scope['repo_name'])
1149 if obj:
1149 if obj:
1150 scope['repo_id'] = obj.repo_id
1150 scope['repo_id'] = obj.repo_id
1151 _scope = collections.OrderedDict()
1151 _scope = collections.OrderedDict()
1152 _scope['repo_id'] = -1
1152 _scope['repo_id'] = -1
1153 _scope['user_group_id'] = -1
1153 _scope['user_group_id'] = -1
1154 _scope['repo_group_id'] = -1
1154 _scope['repo_group_id'] = -1
1155
1155
1156 for k in sorted(scope.keys()):
1156 for k in sorted(scope.keys()):
1157 _scope[k] = scope[k]
1157 _scope[k] = scope[k]
1158
1158
1159 # store in cache to mimic how the @LazyProperty works,
1159 # store in cache to mimic how the @LazyProperty works,
1160 # the difference here is that we use the unique key calculated
1160 # the difference here is that we use the unique key calculated
1161 # from params and values
1161 # from params and values
1162 return self.get_perms(user=self, cache=None, scope=_scope)
1162 return self.get_perms(user=self, cache=None, scope=_scope)
1163
1163
1164 def get_instance(self):
1164 def get_instance(self):
1165 return User.get(self.user_id)
1165 return User.get(self.user_id)
1166
1166
1167 def propagate_data(self):
1167 def propagate_data(self):
1168 """
1168 """
1169 Fills in user data and propagates values to this instance. Maps fetched
1169 Fills in user data and propagates values to this instance. Maps fetched
1170 user attributes to this class instance attributes
1170 user attributes to this class instance attributes
1171 """
1171 """
1172 log.debug('AuthUser: starting data propagation for new potential user')
1172 log.debug('AuthUser: starting data propagation for new potential user')
1173 user_model = UserModel()
1173 user_model = UserModel()
1174 anon_user = self.anonymous_user = User.get_default_user(cache=True)
1174 anon_user = self.anonymous_user = User.get_default_user(cache=True)
1175 is_user_loaded = False
1175 is_user_loaded = False
1176
1176
1177 # lookup by userid
1177 # lookup by userid
1178 if self.user_id is not None and self.user_id != anon_user.user_id:
1178 if self.user_id is not None and self.user_id != anon_user.user_id:
1179 log.debug('Trying Auth User lookup by USER ID: `%s`', self.user_id)
1179 log.debug('Trying Auth User lookup by USER ID: `%s`', self.user_id)
1180 is_user_loaded = user_model.fill_data(self, user_id=self.user_id)
1180 is_user_loaded = user_model.fill_data(self, user_id=self.user_id)
1181
1181
1182 # try go get user by api key
1182 # try go get user by api key
1183 elif self._api_key and self._api_key != anon_user.api_key:
1183 elif self._api_key and self._api_key != anon_user.api_key:
1184 log.debug('Trying Auth User lookup by API KEY: `...%s`', self._api_key[-4:])
1184 log.debug('Trying Auth User lookup by API KEY: `...%s`', self._api_key[-4:])
1185 is_user_loaded = user_model.fill_data(self, api_key=self._api_key)
1185 is_user_loaded = user_model.fill_data(self, api_key=self._api_key)
1186
1186
1187 # lookup by username
1187 # lookup by username
1188 elif self.username:
1188 elif self.username:
1189 log.debug('Trying Auth User lookup by USER NAME: `%s`', self.username)
1189 log.debug('Trying Auth User lookup by USER NAME: `%s`', self.username)
1190 is_user_loaded = user_model.fill_data(self, username=self.username)
1190 is_user_loaded = user_model.fill_data(self, username=self.username)
1191 else:
1191 else:
1192 log.debug('No data in %s that could been used to log in', self)
1192 log.debug('No data in %s that could been used to log in', self)
1193
1193
1194 if not is_user_loaded:
1194 if not is_user_loaded:
1195 log.debug(
1195 log.debug(
1196 'Failed to load user. Fallback to default user %s', anon_user)
1196 'Failed to load user. Fallback to default user %s', anon_user)
1197 # if we cannot authenticate user try anonymous
1197 # if we cannot authenticate user try anonymous
1198 if anon_user.active:
1198 if anon_user.active:
1199 log.debug('default user is active, using it as a session user')
1199 log.debug('default user is active, using it as a session user')
1200 user_model.fill_data(self, user_id=anon_user.user_id)
1200 user_model.fill_data(self, user_id=anon_user.user_id)
1201 # then we set this user is logged in
1201 # then we set this user is logged in
1202 self.is_authenticated = True
1202 self.is_authenticated = True
1203 else:
1203 else:
1204 log.debug('default user is NOT active')
1204 log.debug('default user is NOT active')
1205 # in case of disabled anonymous user we reset some of the
1205 # in case of disabled anonymous user we reset some of the
1206 # parameters so such user is "corrupted", skipping the fill_data
1206 # parameters so such user is "corrupted", skipping the fill_data
1207 for attr in ['user_id', 'username', 'admin', 'active']:
1207 for attr in ['user_id', 'username', 'admin', 'active']:
1208 setattr(self, attr, None)
1208 setattr(self, attr, None)
1209 self.is_authenticated = False
1209 self.is_authenticated = False
1210
1210
1211 if not self.username:
1211 if not self.username:
1212 self.username = 'None'
1212 self.username = 'None'
1213
1213
1214 log.debug('AuthUser: propagated user is now %s', self)
1214 log.debug('AuthUser: propagated user is now %s', self)
1215
1215
1216 def get_perms(self, user, scope=None, explicit=True, algo='higherwin',
1216 def get_perms(self, user, scope=None, explicit=True, algo='higherwin',
1217 calculate_super_admin=False, cache=None):
1217 calculate_super_admin=False, cache=None):
1218 """
1218 """
1219 Fills user permission attribute with permissions taken from database
1219 Fills user permission attribute with permissions taken from database
1220 works for permissions given for repositories, and for permissions that
1220 works for permissions given for repositories, and for permissions that
1221 are granted to groups
1221 are granted to groups
1222
1222
1223 :param user: instance of User object from database
1223 :param user: instance of User object from database
1224 :param explicit: In case there are permissions both for user and a group
1224 :param explicit: In case there are permissions both for user and a group
1225 that user is part of, explicit flag will defiine if user will
1225 that user is part of, explicit flag will defiine if user will
1226 explicitly override permissions from group, if it's False it will
1226 explicitly override permissions from group, if it's False it will
1227 make decision based on the algo
1227 make decision based on the algo
1228 :param algo: algorithm to decide what permission should be choose if
1228 :param algo: algorithm to decide what permission should be choose if
1229 it's multiple defined, eg user in two different groups. It also
1229 it's multiple defined, eg user in two different groups. It also
1230 decides if explicit flag is turned off how to specify the permission
1230 decides if explicit flag is turned off how to specify the permission
1231 for case when user is in a group + have defined separate permission
1231 for case when user is in a group + have defined separate permission
1232 :param calculate_super_admin: calculate permissions for super-admin in the
1232 :param calculate_super_admin: calculate permissions for super-admin in the
1233 same way as for regular user without speedups
1233 same way as for regular user without speedups
1234 :param cache: Use caching for calculation, None = let the cache backend decide
1234 :param cache: Use caching for calculation, None = let the cache backend decide
1235 """
1235 """
1236 user_id = user.user_id
1236 user_id = user.user_id
1237 user_is_admin = user.is_admin
1237 user_is_admin = user.is_admin
1238
1238
1239 # inheritance of global permissions like create repo/fork repo etc
1239 # inheritance of global permissions like create repo/fork repo etc
1240 user_inherit_default_permissions = user.inherit_default_permissions
1240 user_inherit_default_permissions = user.inherit_default_permissions
1241
1241
1242 cache_seconds = safe_int(
1242 cache_seconds = safe_int(
1243 rhodecode.CONFIG.get('rc_cache.cache_perms.expiration_time'))
1243 rhodecode.CONFIG.get('rc_cache.cache_perms.expiration_time'))
1244
1244
1245 if cache is None:
1245 if cache is None:
1246 # let the backend cache decide
1246 # let the backend cache decide
1247 cache_on = cache_seconds > 0
1247 cache_on = cache_seconds > 0
1248 else:
1248 else:
1249 cache_on = cache
1249 cache_on = cache
1250
1250
1251 log.debug(
1251 log.debug(
1252 'Computing PERMISSION tree for user %s scope `%s` '
1252 'Computing PERMISSION tree for user %s scope `%s` '
1253 'with caching: %s[TTL: %ss]', user, scope, cache_on, cache_seconds or 0)
1253 'with caching: %s[TTL: %ss]', user, scope, cache_on, cache_seconds or 0)
1254
1254
1255 cache_namespace_uid = 'cache_user_auth.{}'.format(user_id)
1255 cache_namespace_uid = 'cache_user_auth.{}'.format(user_id)
1256 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1256 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1257
1257
1258 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
1258 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
1259 condition=cache_on)
1259 condition=cache_on)
1260 def compute_perm_tree(cache_name, cache_ver,
1260 def compute_perm_tree(cache_name, cache_ver,
1261 user_id, scope, user_is_admin,user_inherit_default_permissions,
1261 user_id, scope, user_is_admin,user_inherit_default_permissions,
1262 explicit, algo, calculate_super_admin):
1262 explicit, algo, calculate_super_admin):
1263 return _cached_perms_data(
1263 return _cached_perms_data(
1264 user_id, scope, user_is_admin, user_inherit_default_permissions,
1264 user_id, scope, user_is_admin, user_inherit_default_permissions,
1265 explicit, algo, calculate_super_admin)
1265 explicit, algo, calculate_super_admin)
1266
1266
1267 start = time.time()
1267 start = time.time()
1268 result = compute_perm_tree(
1268 result = compute_perm_tree(
1269 'permissions', 'v1', user_id, scope, user_is_admin,
1269 'permissions', 'v1', user_id, scope, user_is_admin,
1270 user_inherit_default_permissions, explicit, algo,
1270 user_inherit_default_permissions, explicit, algo,
1271 calculate_super_admin)
1271 calculate_super_admin)
1272
1272
1273 result_repr = []
1273 result_repr = []
1274 for k in result:
1274 for k in result:
1275 result_repr.append((k, len(result[k])))
1275 result_repr.append((k, len(result[k])))
1276 total = time.time() - start
1276 total = time.time() - start
1277 log.debug('PERMISSION tree for user %s computed in %.4fs: %s',
1277 log.debug('PERMISSION tree for user %s computed in %.4fs: %s',
1278 user, total, result_repr)
1278 user, total, result_repr)
1279
1279
1280 return result
1280 return result
1281
1281
1282 @property
1282 @property
1283 def is_default(self):
1283 def is_default(self):
1284 return self.username == User.DEFAULT_USER
1284 return self.username == User.DEFAULT_USER
1285
1285
1286 @property
1286 @property
1287 def is_admin(self):
1287 def is_admin(self):
1288 return self.admin
1288 return self.admin
1289
1289
1290 @property
1290 @property
1291 def is_user_object(self):
1291 def is_user_object(self):
1292 return self.user_id is not None
1292 return self.user_id is not None
1293
1293
1294 @property
1294 @property
1295 def repositories_admin(self):
1295 def repositories_admin(self):
1296 """
1296 """
1297 Returns list of repositories you're an admin of
1297 Returns list of repositories you're an admin of
1298 """
1298 """
1299 return [
1299 return [
1300 x[0] for x in self.permissions['repositories'].items()
1300 x[0] for x in self.permissions['repositories'].items()
1301 if x[1] == 'repository.admin']
1301 if x[1] == 'repository.admin']
1302
1302
1303 @property
1303 @property
1304 def repository_groups_admin(self):
1304 def repository_groups_admin(self):
1305 """
1305 """
1306 Returns list of repository groups you're an admin of
1306 Returns list of repository groups you're an admin of
1307 """
1307 """
1308 return [
1308 return [
1309 x[0] for x in self.permissions['repositories_groups'].items()
1309 x[0] for x in self.permissions['repositories_groups'].items()
1310 if x[1] == 'group.admin']
1310 if x[1] == 'group.admin']
1311
1311
1312 @property
1312 @property
1313 def user_groups_admin(self):
1313 def user_groups_admin(self):
1314 """
1314 """
1315 Returns list of user groups you're an admin of
1315 Returns list of user groups you're an admin of
1316 """
1316 """
1317 return [
1317 return [
1318 x[0] for x in self.permissions['user_groups'].items()
1318 x[0] for x in self.permissions['user_groups'].items()
1319 if x[1] == 'usergroup.admin']
1319 if x[1] == 'usergroup.admin']
1320
1320
1321 def repo_acl_ids_from_stack(self, perms=None, prefix_filter=None, cache=False):
1321 def repo_acl_ids_from_stack(self, perms=None, prefix_filter=None, cache=False):
1322 if not perms:
1322 if not perms:
1323 perms = AuthUser.repo_read_perms
1323 perms = AuthUser.repo_read_perms
1324 allowed_ids = []
1324 allowed_ids = []
1325 for k, stack_data in self.permissions['repositories'].perm_origin_stack.items():
1325 for k, stack_data in self.permissions['repositories'].perm_origin_stack.items():
1326 perm, origin, obj_id = stack_data[-1] # last item is the current permission
1326 perm, origin, obj_id = stack_data[-1] # last item is the current permission
1327 if prefix_filter and not k.startswith(prefix_filter):
1327 if prefix_filter and not k.startswith(prefix_filter):
1328 continue
1328 continue
1329 if perm in perms:
1329 if perm in perms:
1330 allowed_ids.append(obj_id)
1330 allowed_ids.append(obj_id)
1331 return allowed_ids
1331 return allowed_ids
1332
1332
1333 def repo_acl_ids(self, perms=None, name_filter=None, cache=False):
1333 def repo_acl_ids(self, perms=None, name_filter=None, cache=False):
1334 """
1334 """
1335 Returns list of repository ids that user have access to based on given
1335 Returns list of repository ids that user have access to based on given
1336 perms. The cache flag should be only used in cases that are used for
1336 perms. The cache flag should be only used in cases that are used for
1337 display purposes, NOT IN ANY CASE for permission checks.
1337 display purposes, NOT IN ANY CASE for permission checks.
1338 """
1338 """
1339 from rhodecode.model.scm import RepoList
1339 from rhodecode.model.scm import RepoList
1340 if not perms:
1340 if not perms:
1341 perms = AuthUser.repo_read_perms
1341 perms = AuthUser.repo_read_perms
1342
1342
1343 if not isinstance(perms, list):
1343 if not isinstance(perms, list):
1344 raise ValueError('perms parameter must be a list got {} instead'.format(perms))
1344 raise ValueError('perms parameter must be a list got {} instead'.format(perms))
1345
1345
1346 def _cached_repo_acl(perm_def, _name_filter):
1346 def _cached_repo_acl(perm_def, _name_filter):
1347 qry = Repository.query()
1347 qry = Repository.query()
1348 if _name_filter:
1348 if _name_filter:
1349 ilike_expression = u'%{}%'.format(safe_unicode(_name_filter))
1349 ilike_expression = '%{}%'.format(_name_filter)
1350 qry = qry.filter(
1350 qry = qry.filter(
1351 Repository.repo_name.ilike(ilike_expression))
1351 Repository.repo_name.ilike(ilike_expression))
1352
1352
1353 return [x.repo_id for x in
1353 return [x.repo_id for x in
1354 RepoList(qry, perm_set=perm_def, extra_kwargs={'user': self})]
1354 RepoList(qry, perm_set=perm_def, extra_kwargs={'user': self})]
1355
1355
1356 log.debug('Computing REPO ACL IDS user %s', self)
1356 log.debug('Computing REPO ACL IDS user %s', self)
1357
1357
1358 cache_namespace_uid = 'cache_user_repo_acl_ids.{}'.format(self.user_id)
1358 cache_namespace_uid = 'cache_user_repo_acl_ids.{}'.format(self.user_id)
1359 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1359 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1360
1360
1361 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache)
1361 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache)
1362 def compute_repo_acl_ids(cache_ver, user_id, perm_def, _name_filter):
1362 def compute_repo_acl_ids(cache_ver, user_id, perm_def, _name_filter):
1363 return _cached_repo_acl(perm_def, _name_filter)
1363 return _cached_repo_acl(perm_def, _name_filter)
1364
1364
1365 start = time.time()
1365 start = time.time()
1366 result = compute_repo_acl_ids('v1', self.user_id, perms, name_filter)
1366 result = compute_repo_acl_ids('v1', self.user_id, perms, name_filter)
1367 total = time.time() - start
1367 total = time.time() - start
1368 log.debug('REPO ACL IDS for user %s computed in %.4fs', self, total)
1368 log.debug('REPO ACL IDS for user %s computed in %.4fs', self, total)
1369
1369
1370 return result
1370 return result
1371
1371
1372 def repo_group_acl_ids_from_stack(self, perms=None, prefix_filter=None, cache=False):
1372 def repo_group_acl_ids_from_stack(self, perms=None, prefix_filter=None, cache=False):
1373 if not perms:
1373 if not perms:
1374 perms = AuthUser.repo_group_read_perms
1374 perms = AuthUser.repo_group_read_perms
1375 allowed_ids = []
1375 allowed_ids = []
1376 for k, stack_data in self.permissions['repositories_groups'].perm_origin_stack.items():
1376 for k, stack_data in self.permissions['repositories_groups'].perm_origin_stack.items():
1377 perm, origin, obj_id = stack_data[-1] # last item is the current permission
1377 perm, origin, obj_id = stack_data[-1] # last item is the current permission
1378 if prefix_filter and not k.startswith(prefix_filter):
1378 if prefix_filter and not k.startswith(prefix_filter):
1379 continue
1379 continue
1380 if perm in perms:
1380 if perm in perms:
1381 allowed_ids.append(obj_id)
1381 allowed_ids.append(obj_id)
1382 return allowed_ids
1382 return allowed_ids
1383
1383
1384 def repo_group_acl_ids(self, perms=None, name_filter=None, cache=False):
1384 def repo_group_acl_ids(self, perms=None, name_filter=None, cache=False):
1385 """
1385 """
1386 Returns list of repository group ids that user have access to based on given
1386 Returns list of repository group ids that user have access to based on given
1387 perms. The cache flag should be only used in cases that are used for
1387 perms. The cache flag should be only used in cases that are used for
1388 display purposes, NOT IN ANY CASE for permission checks.
1388 display purposes, NOT IN ANY CASE for permission checks.
1389 """
1389 """
1390 from rhodecode.model.scm import RepoGroupList
1390 from rhodecode.model.scm import RepoGroupList
1391 if not perms:
1391 if not perms:
1392 perms = AuthUser.repo_group_read_perms
1392 perms = AuthUser.repo_group_read_perms
1393
1393
1394 if not isinstance(perms, list):
1394 if not isinstance(perms, list):
1395 raise ValueError('perms parameter must be a list got {} instead'.format(perms))
1395 raise ValueError('perms parameter must be a list got {} instead'.format(perms))
1396
1396
1397 def _cached_repo_group_acl(perm_def, _name_filter):
1397 def _cached_repo_group_acl(perm_def, _name_filter):
1398 qry = RepoGroup.query()
1398 qry = RepoGroup.query()
1399 if _name_filter:
1399 if _name_filter:
1400 ilike_expression = u'%{}%'.format(safe_unicode(_name_filter))
1400 ilike_expression = '%{}%'.format(_name_filter)
1401 qry = qry.filter(
1401 qry = qry.filter(
1402 RepoGroup.group_name.ilike(ilike_expression))
1402 RepoGroup.group_name.ilike(ilike_expression))
1403
1403
1404 return [x.group_id for x in
1404 return [x.group_id for x in
1405 RepoGroupList(qry, perm_set=perm_def, extra_kwargs={'user': self})]
1405 RepoGroupList(qry, perm_set=perm_def, extra_kwargs={'user': self})]
1406
1406
1407 log.debug('Computing REPO GROUP ACL IDS user %s', self)
1407 log.debug('Computing REPO GROUP ACL IDS user %s', self)
1408
1408
1409 cache_namespace_uid = 'cache_user_repo_group_acl_ids.{}'.format(self.user_id)
1409 cache_namespace_uid = 'cache_user_repo_group_acl_ids.{}'.format(self.user_id)
1410 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1410 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1411
1411
1412 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache)
1412 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache)
1413 def compute_repo_group_acl_ids(cache_ver, user_id, perm_def, _name_filter):
1413 def compute_repo_group_acl_ids(cache_ver, user_id, perm_def, _name_filter):
1414 return _cached_repo_group_acl(perm_def, _name_filter)
1414 return _cached_repo_group_acl(perm_def, _name_filter)
1415
1415
1416 start = time.time()
1416 start = time.time()
1417 result = compute_repo_group_acl_ids('v1', self.user_id, perms, name_filter)
1417 result = compute_repo_group_acl_ids('v1', self.user_id, perms, name_filter)
1418 total = time.time() - start
1418 total = time.time() - start
1419 log.debug('REPO GROUP ACL IDS for user %s computed in %.4fs', self, total)
1419 log.debug('REPO GROUP ACL IDS for user %s computed in %.4fs', self, total)
1420
1420
1421 return result
1421 return result
1422
1422
1423 def user_group_acl_ids_from_stack(self, perms=None, cache=False):
1423 def user_group_acl_ids_from_stack(self, perms=None, cache=False):
1424 if not perms:
1424 if not perms:
1425 perms = AuthUser.user_group_read_perms
1425 perms = AuthUser.user_group_read_perms
1426 allowed_ids = []
1426 allowed_ids = []
1427 for k, stack_data in self.permissions['user_groups'].perm_origin_stack.items():
1427 for k, stack_data in self.permissions['user_groups'].perm_origin_stack.items():
1428 perm, origin, obj_id = stack_data[-1] # last item is the current permission
1428 perm, origin, obj_id = stack_data[-1] # last item is the current permission
1429 if perm in perms:
1429 if perm in perms:
1430 allowed_ids.append(obj_id)
1430 allowed_ids.append(obj_id)
1431 return allowed_ids
1431 return allowed_ids
1432
1432
1433 def user_group_acl_ids(self, perms=None, name_filter=None, cache=False):
1433 def user_group_acl_ids(self, perms=None, name_filter=None, cache=False):
1434 """
1434 """
1435 Returns list of user group ids that user have access to based on given
1435 Returns list of user group ids that user have access to based on given
1436 perms. The cache flag should be only used in cases that are used for
1436 perms. The cache flag should be only used in cases that are used for
1437 display purposes, NOT IN ANY CASE for permission checks.
1437 display purposes, NOT IN ANY CASE for permission checks.
1438 """
1438 """
1439 from rhodecode.model.scm import UserGroupList
1439 from rhodecode.model.scm import UserGroupList
1440 if not perms:
1440 if not perms:
1441 perms = AuthUser.user_group_read_perms
1441 perms = AuthUser.user_group_read_perms
1442
1442
1443 if not isinstance(perms, list):
1443 if not isinstance(perms, list):
1444 raise ValueError('perms parameter must be a list got {} instead'.format(perms))
1444 raise ValueError('perms parameter must be a list got {} instead'.format(perms))
1445
1445
1446 def _cached_user_group_acl(perm_def, _name_filter):
1446 def _cached_user_group_acl(perm_def, _name_filter):
1447 qry = UserGroup.query()
1447 qry = UserGroup.query()
1448 if _name_filter:
1448 if _name_filter:
1449 ilike_expression = u'%{}%'.format(safe_unicode(_name_filter))
1449 ilike_expression = '%{}%'.format(_name_filter)
1450 qry = qry.filter(
1450 qry = qry.filter(
1451 UserGroup.users_group_name.ilike(ilike_expression))
1451 UserGroup.users_group_name.ilike(ilike_expression))
1452
1452
1453 return [x.users_group_id for x in
1453 return [x.users_group_id for x in
1454 UserGroupList(qry, perm_set=perm_def, extra_kwargs={'user': self})]
1454 UserGroupList(qry, perm_set=perm_def, extra_kwargs={'user': self})]
1455
1455
1456 log.debug('Computing USER GROUP ACL IDS user %s', self)
1456 log.debug('Computing USER GROUP ACL IDS user %s', self)
1457
1457
1458 cache_namespace_uid = 'cache_user_user_group_acl_ids.{}'.format(self.user_id)
1458 cache_namespace_uid = 'cache_user_user_group_acl_ids.{}'.format(self.user_id)
1459 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1459 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1460
1460
1461 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache)
1461 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache)
1462 def compute_user_group_acl_ids(cache_ver, user_id, perm_def, _name_filter):
1462 def compute_user_group_acl_ids(cache_ver, user_id, perm_def, _name_filter):
1463 return _cached_user_group_acl(perm_def, _name_filter)
1463 return _cached_user_group_acl(perm_def, _name_filter)
1464
1464
1465 start = time.time()
1465 start = time.time()
1466 result = compute_user_group_acl_ids('v1', self.user_id, perms, name_filter)
1466 result = compute_user_group_acl_ids('v1', self.user_id, perms, name_filter)
1467 total = time.time() - start
1467 total = time.time() - start
1468 log.debug('USER GROUP ACL IDS for user %s computed in %.4fs', self, total)
1468 log.debug('USER GROUP ACL IDS for user %s computed in %.4fs', self, total)
1469
1469
1470 return result
1470 return result
1471
1471
1472 @property
1472 @property
1473 def ip_allowed(self):
1473 def ip_allowed(self):
1474 """
1474 """
1475 Checks if ip_addr used in constructor is allowed from defined list of
1475 Checks if ip_addr used in constructor is allowed from defined list of
1476 allowed ip_addresses for user
1476 allowed ip_addresses for user
1477
1477
1478 :returns: boolean, True if ip is in allowed ip range
1478 :returns: boolean, True if ip is in allowed ip range
1479 """
1479 """
1480 # check IP
1480 # check IP
1481 inherit = self.inherit_default_permissions
1481 inherit = self.inherit_default_permissions
1482 return AuthUser.check_ip_allowed(self.user_id, self.ip_addr,
1482 return AuthUser.check_ip_allowed(self.user_id, self.ip_addr,
1483 inherit_from_default=inherit)
1483 inherit_from_default=inherit)
1484
1484
1485 @property
1485 @property
1486 def personal_repo_group(self):
1486 def personal_repo_group(self):
1487 return RepoGroup.get_user_personal_repo_group(self.user_id)
1487 return RepoGroup.get_user_personal_repo_group(self.user_id)
1488
1488
1489 @LazyProperty
1489 @LazyProperty
1490 def feed_token(self):
1490 def feed_token(self):
1491 return self.get_instance().feed_token
1491 return self.get_instance().feed_token
1492
1492
1493 @LazyProperty
1493 @LazyProperty
1494 def artifact_token(self):
1494 def artifact_token(self):
1495 return self.get_instance().artifact_token
1495 return self.get_instance().artifact_token
1496
1496
1497 @classmethod
1497 @classmethod
1498 def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default):
1498 def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default):
1499 allowed_ips = AuthUser.get_allowed_ips(
1499 allowed_ips = AuthUser.get_allowed_ips(
1500 user_id, cache=True, inherit_from_default=inherit_from_default)
1500 user_id, cache=True, inherit_from_default=inherit_from_default)
1501 if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
1501 if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
1502 log.debug('IP:%s for user %s is in range of %s',
1502 log.debug('IP:%s for user %s is in range of %s',
1503 ip_addr, user_id, allowed_ips)
1503 ip_addr, user_id, allowed_ips)
1504 return True
1504 return True
1505 else:
1505 else:
1506 log.info('Access for IP:%s forbidden for user %s, '
1506 log.info('Access for IP:%s forbidden for user %s, '
1507 'not in %s', ip_addr, user_id, allowed_ips,
1507 'not in %s', ip_addr, user_id, allowed_ips,
1508 extra={"ip": ip_addr, "user_id": user_id})
1508 extra={"ip": ip_addr, "user_id": user_id})
1509 return False
1509 return False
1510
1510
1511 def get_branch_permissions(self, repo_name, perms=None):
1511 def get_branch_permissions(self, repo_name, perms=None):
1512 perms = perms or self.permissions_with_scope({'repo_name': repo_name})
1512 perms = perms or self.permissions_with_scope({'repo_name': repo_name})
1513 branch_perms = perms.get('repository_branches', {})
1513 branch_perms = perms.get('repository_branches', {})
1514 if not branch_perms:
1514 if not branch_perms:
1515 return {}
1515 return {}
1516 repo_branch_perms = branch_perms.get(repo_name)
1516 repo_branch_perms = branch_perms.get(repo_name)
1517 return repo_branch_perms or {}
1517 return repo_branch_perms or {}
1518
1518
1519 def get_rule_and_branch_permission(self, repo_name, branch_name):
1519 def get_rule_and_branch_permission(self, repo_name, branch_name):
1520 """
1520 """
1521 Check if this AuthUser has defined any permissions for branches. If any of
1521 Check if this AuthUser has defined any permissions for branches. If any of
1522 the rules match in order, we return the matching permissions
1522 the rules match in order, we return the matching permissions
1523 """
1523 """
1524
1524
1525 rule = default_perm = ''
1525 rule = default_perm = ''
1526
1526
1527 repo_branch_perms = self.get_branch_permissions(repo_name=repo_name)
1527 repo_branch_perms = self.get_branch_permissions(repo_name=repo_name)
1528 if not repo_branch_perms:
1528 if not repo_branch_perms:
1529 return rule, default_perm
1529 return rule, default_perm
1530
1530
1531 # now calculate the permissions
1531 # now calculate the permissions
1532 for pattern, branch_perm in repo_branch_perms.items():
1532 for pattern, branch_perm in repo_branch_perms.items():
1533 if fnmatch.fnmatch(branch_name, pattern):
1533 if fnmatch.fnmatch(branch_name, pattern):
1534 rule = '`{}`=>{}'.format(pattern, branch_perm)
1534 rule = '`{}`=>{}'.format(pattern, branch_perm)
1535 return rule, branch_perm
1535 return rule, branch_perm
1536
1536
1537 return rule, default_perm
1537 return rule, default_perm
1538
1538
1539 def get_notice_messages(self):
1539 def get_notice_messages(self):
1540
1540
1541 notice_level = 'notice-error'
1541 notice_level = 'notice-error'
1542 notice_messages = []
1542 notice_messages = []
1543 if self.is_default:
1543 if self.is_default:
1544 return [], notice_level
1544 return [], notice_level
1545
1545
1546 notices = UserNotice.query()\
1546 notices = UserNotice.query()\
1547 .filter(UserNotice.user_id == self.user_id)\
1547 .filter(UserNotice.user_id == self.user_id)\
1548 .filter(UserNotice.notice_read == false())\
1548 .filter(UserNotice.notice_read == false())\
1549 .all()
1549 .all()
1550
1550
1551 try:
1551 try:
1552 for entry in notices:
1552 for entry in notices:
1553
1553
1554 msg = {
1554 msg = {
1555 'msg_id': entry.user_notice_id,
1555 'msg_id': entry.user_notice_id,
1556 'level': entry.notification_level,
1556 'level': entry.notification_level,
1557 'subject': entry.notice_subject,
1557 'subject': entry.notice_subject,
1558 'body': entry.notice_body,
1558 'body': entry.notice_body,
1559 }
1559 }
1560 notice_messages.append(msg)
1560 notice_messages.append(msg)
1561
1561
1562 log.debug('Got user %s %s messages', self, len(notice_messages))
1562 log.debug('Got user %s %s messages', self, len(notice_messages))
1563
1563
1564 levels = [x['level'] for x in notice_messages]
1564 levels = [x['level'] for x in notice_messages]
1565 notice_level = 'notice-error' if 'error' in levels else 'notice-warning'
1565 notice_level = 'notice-error' if 'error' in levels else 'notice-warning'
1566 except Exception:
1566 except Exception:
1567 pass
1567 pass
1568
1568
1569 return notice_messages, notice_level
1569 return notice_messages, notice_level
1570
1570
1571 def __repr__(self):
1571 def __repr__(self):
1572 return self.repr_user(self.user_id, self.username, self.ip_addr, self.is_authenticated)
1572 return self.repr_user(self.user_id, self.username, self.ip_addr, self.is_authenticated)
1573
1573
1574 def set_authenticated(self, authenticated=True):
1574 def set_authenticated(self, authenticated=True):
1575 if self.user_id != self.anonymous_user.user_id:
1575 if self.user_id != self.anonymous_user.user_id:
1576 self.is_authenticated = authenticated
1576 self.is_authenticated = authenticated
1577
1577
1578 def get_cookie_store(self):
1578 def get_cookie_store(self):
1579 return {
1579 return {
1580 'username': self.username,
1580 'username': self.username,
1581 'password': md5(safe_bytes(self.password or '')),
1581 'password': md5(safe_bytes(self.password or '')),
1582 'user_id': self.user_id,
1582 'user_id': self.user_id,
1583 'is_authenticated': self.is_authenticated
1583 'is_authenticated': self.is_authenticated
1584 }
1584 }
1585
1585
1586 @classmethod
1586 @classmethod
1587 def repr_user(cls, user_id=0, username='ANONYMOUS', ip='0.0.0.0', is_authenticated=False):
1587 def repr_user(cls, user_id=0, username='ANONYMOUS', ip='0.0.0.0', is_authenticated=False):
1588 tmpl = "<AuthUser('id:{}[{}] ip:{} auth:{}')>"
1588 tmpl = "<AuthUser('id:{}[{}] ip:{} auth:{}')>"
1589 return tmpl.format(user_id, username, ip, is_authenticated)
1589 return tmpl.format(user_id, username, ip, is_authenticated)
1590
1590
1591 @classmethod
1591 @classmethod
1592 def from_cookie_store(cls, cookie_store):
1592 def from_cookie_store(cls, cookie_store):
1593 """
1593 """
1594 Creates AuthUser from a cookie store
1594 Creates AuthUser from a cookie store
1595
1595
1596 :param cls:
1596 :param cls:
1597 :param cookie_store:
1597 :param cookie_store:
1598 """
1598 """
1599 user_id = cookie_store.get('user_id')
1599 user_id = cookie_store.get('user_id')
1600 username = cookie_store.get('username')
1600 username = cookie_store.get('username')
1601 api_key = cookie_store.get('api_key')
1601 api_key = cookie_store.get('api_key')
1602 return AuthUser(user_id, api_key, username)
1602 return AuthUser(user_id, api_key, username)
1603
1603
1604 @classmethod
1604 @classmethod
1605 def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False):
1605 def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False):
1606 _set = set()
1606 _set = set()
1607
1607
1608 if inherit_from_default:
1608 if inherit_from_default:
1609 def_user_id = User.get_default_user(cache=True).user_id
1609 def_user_id = User.get_default_user(cache=True).user_id
1610 default_ips = UserIpMap.query().filter(UserIpMap.user_id == def_user_id)
1610 default_ips = UserIpMap.query().filter(UserIpMap.user_id == def_user_id)
1611 if cache:
1611 if cache:
1612 default_ips = default_ips.options(
1612 default_ips = default_ips.options(
1613 FromCache("sql_cache_short", "get_user_ips_default"))
1613 FromCache("sql_cache_short", "get_user_ips_default"))
1614
1614
1615 # populate from default user
1615 # populate from default user
1616 for ip in default_ips:
1616 for ip in default_ips:
1617 try:
1617 try:
1618 _set.add(ip.ip_addr)
1618 _set.add(ip.ip_addr)
1619 except ObjectDeletedError:
1619 except ObjectDeletedError:
1620 # since we use heavy caching sometimes it happens that
1620 # since we use heavy caching sometimes it happens that
1621 # we get deleted objects here, we just skip them
1621 # we get deleted objects here, we just skip them
1622 pass
1622 pass
1623
1623
1624 # NOTE:(marcink) we don't want to load any rules for empty
1624 # NOTE:(marcink) we don't want to load any rules for empty
1625 # user_id which is the case of access of non logged users when anonymous
1625 # user_id which is the case of access of non logged users when anonymous
1626 # access is disabled
1626 # access is disabled
1627 user_ips = []
1627 user_ips = []
1628 if user_id:
1628 if user_id:
1629 user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
1629 user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
1630 if cache:
1630 if cache:
1631 user_ips = user_ips.options(
1631 user_ips = user_ips.options(
1632 FromCache("sql_cache_short", "get_user_ips_%s" % user_id))
1632 FromCache("sql_cache_short", "get_user_ips_%s" % user_id))
1633
1633
1634 for ip in user_ips:
1634 for ip in user_ips:
1635 try:
1635 try:
1636 _set.add(ip.ip_addr)
1636 _set.add(ip.ip_addr)
1637 except ObjectDeletedError:
1637 except ObjectDeletedError:
1638 # since we use heavy caching sometimes it happens that we get
1638 # since we use heavy caching sometimes it happens that we get
1639 # deleted objects here, we just skip them
1639 # deleted objects here, we just skip them
1640 pass
1640 pass
1641 return _set or {ip for ip in ['0.0.0.0/0', '::/0']}
1641 return _set or {ip for ip in ['0.0.0.0/0', '::/0']}
1642
1642
1643
1643
1644 def set_available_permissions(settings):
1644 def set_available_permissions(settings):
1645 """
1645 """
1646 This function will propagate pyramid settings with all available defined
1646 This function will propagate pyramid settings with all available defined
1647 permission given in db. We don't want to check each time from db for new
1647 permission given in db. We don't want to check each time from db for new
1648 permissions since adding a new permission also requires application restart
1648 permissions since adding a new permission also requires application restart
1649 ie. to decorate new views with the newly created permission
1649 ie. to decorate new views with the newly created permission
1650
1650
1651 :param settings: current pyramid registry.settings
1651 :param settings: current pyramid registry.settings
1652
1652
1653 """
1653 """
1654 log.debug('auth: getting information about all available permissions')
1654 log.debug('auth: getting information about all available permissions')
1655 try:
1655 try:
1656 sa = meta.Session
1656 sa = meta.Session
1657 all_perms = sa.query(Permission).all()
1657 all_perms = sa.query(Permission).all()
1658 settings.setdefault('available_permissions',
1658 settings.setdefault('available_permissions',
1659 [x.permission_name for x in all_perms])
1659 [x.permission_name for x in all_perms])
1660 log.debug('auth: set available permissions')
1660 log.debug('auth: set available permissions')
1661 except Exception:
1661 except Exception:
1662 log.exception('Failed to fetch permissions from the database.')
1662 log.exception('Failed to fetch permissions from the database.')
1663 raise
1663 raise
1664
1664
1665
1665
1666 def get_csrf_token(session, force_new=False, save_if_missing=True):
1666 def get_csrf_token(session, force_new=False, save_if_missing=True):
1667 """
1667 """
1668 Return the current authentication token, creating one if one doesn't
1668 Return the current authentication token, creating one if one doesn't
1669 already exist and the save_if_missing flag is present.
1669 already exist and the save_if_missing flag is present.
1670
1670
1671 :param session: pass in the pyramid session, else we use the global ones
1671 :param session: pass in the pyramid session, else we use the global ones
1672 :param force_new: force to re-generate the token and store it in session
1672 :param force_new: force to re-generate the token and store it in session
1673 :param save_if_missing: save the newly generated token if it's missing in
1673 :param save_if_missing: save the newly generated token if it's missing in
1674 session
1674 session
1675 """
1675 """
1676 # NOTE(marcink): probably should be replaced with below one from pyramid 1.9
1676 # NOTE(marcink): probably should be replaced with below one from pyramid 1.9
1677 # from pyramid.csrf import get_csrf_token
1677 # from pyramid.csrf import get_csrf_token
1678
1678
1679 if (csrf_token_key not in session and save_if_missing) or force_new:
1679 if (csrf_token_key not in session and save_if_missing) or force_new:
1680 token = sha1(ascii_bytes(str(random.getrandbits(128))))
1680 token = sha1(ascii_bytes(str(random.getrandbits(128))))
1681 session[csrf_token_key] = token
1681 session[csrf_token_key] = token
1682 if hasattr(session, 'save'):
1682 if hasattr(session, 'save'):
1683 session.save()
1683 session.save()
1684 return session.get(csrf_token_key)
1684 return session.get(csrf_token_key)
1685
1685
1686
1686
1687 def get_request(perm_class_instance):
1687 def get_request(perm_class_instance):
1688 from pyramid.threadlocal import get_current_request
1688 from pyramid.threadlocal import get_current_request
1689 pyramid_request = get_current_request()
1689 pyramid_request = get_current_request()
1690 return pyramid_request
1690 return pyramid_request
1691
1691
1692
1692
1693 # CHECK DECORATORS
1693 # CHECK DECORATORS
1694 class CSRFRequired(object):
1694 class CSRFRequired(object):
1695 """
1695 """
1696 Decorator for authenticating a form
1696 Decorator for authenticating a form
1697
1697
1698 This decorator uses an authorization token stored in the client's
1698 This decorator uses an authorization token stored in the client's
1699 session for prevention of certain Cross-site request forgery (CSRF)
1699 session for prevention of certain Cross-site request forgery (CSRF)
1700 attacks (See
1700 attacks (See
1701 http://en.wikipedia.org/wiki/Cross-site_request_forgery for more
1701 http://en.wikipedia.org/wiki/Cross-site_request_forgery for more
1702 information).
1702 information).
1703
1703
1704 For use with the ``secure_form`` helper functions.
1704 For use with the ``secure_form`` helper functions.
1705
1705
1706 """
1706 """
1707 def __init__(self, token=csrf_token_key, header='X-CSRF-Token', except_methods=None):
1707 def __init__(self, token=csrf_token_key, header='X-CSRF-Token', except_methods=None):
1708 self.token = token
1708 self.token = token
1709 self.header = header
1709 self.header = header
1710 self.except_methods = except_methods or []
1710 self.except_methods = except_methods or []
1711
1711
1712 def __call__(self, func):
1712 def __call__(self, func):
1713 return get_cython_compat_decorator(self.__wrapper, func)
1713 return get_cython_compat_decorator(self.__wrapper, func)
1714
1714
1715 def _get_csrf(self, _request):
1715 def _get_csrf(self, _request):
1716 return _request.POST.get(self.token, _request.headers.get(self.header))
1716 return _request.POST.get(self.token, _request.headers.get(self.header))
1717
1717
1718 def check_csrf(self, _request, cur_token):
1718 def check_csrf(self, _request, cur_token):
1719 supplied_token = self._get_csrf(_request)
1719 supplied_token = self._get_csrf(_request)
1720 return supplied_token and supplied_token == cur_token
1720 return supplied_token and supplied_token == cur_token
1721
1721
1722 def _get_request(self):
1722 def _get_request(self):
1723 return get_request(self)
1723 return get_request(self)
1724
1724
1725 def __wrapper(self, func, *fargs, **fkwargs):
1725 def __wrapper(self, func, *fargs, **fkwargs):
1726 request = self._get_request()
1726 request = self._get_request()
1727
1727
1728 if request.method in self.except_methods:
1728 if request.method in self.except_methods:
1729 return func(*fargs, **fkwargs)
1729 return func(*fargs, **fkwargs)
1730
1730
1731 cur_token = get_csrf_token(request.session, save_if_missing=False)
1731 cur_token = get_csrf_token(request.session, save_if_missing=False)
1732 if self.check_csrf(request, cur_token):
1732 if self.check_csrf(request, cur_token):
1733 if request.POST.get(self.token):
1733 if request.POST.get(self.token):
1734 del request.POST[self.token]
1734 del request.POST[self.token]
1735 return func(*fargs, **fkwargs)
1735 return func(*fargs, **fkwargs)
1736 else:
1736 else:
1737 reason = 'token-missing'
1737 reason = 'token-missing'
1738 supplied_token = self._get_csrf(request)
1738 supplied_token = self._get_csrf(request)
1739 if supplied_token and cur_token != supplied_token:
1739 if supplied_token and cur_token != supplied_token:
1740 reason = 'token-mismatch [%s:%s]' % (
1740 reason = 'token-mismatch [%s:%s]' % (
1741 cur_token or ''[:6], supplied_token or ''[:6])
1741 cur_token or ''[:6], supplied_token or ''[:6])
1742
1742
1743 csrf_message = \
1743 csrf_message = \
1744 ("Cross-site request forgery detected, request denied. See "
1744 ("Cross-site request forgery detected, request denied. See "
1745 "http://en.wikipedia.org/wiki/Cross-site_request_forgery for "
1745 "http://en.wikipedia.org/wiki/Cross-site_request_forgery for "
1746 "more information.")
1746 "more information.")
1747 log.warn('Cross-site request forgery detected, request %r DENIED: %s '
1747 log.warn('Cross-site request forgery detected, request %r DENIED: %s '
1748 'REMOTE_ADDR:%s, HEADERS:%s' % (
1748 'REMOTE_ADDR:%s, HEADERS:%s' % (
1749 request, reason, request.remote_addr, request.headers))
1749 request, reason, request.remote_addr, request.headers))
1750
1750
1751 raise HTTPForbidden(explanation=csrf_message)
1751 raise HTTPForbidden(explanation=csrf_message)
1752
1752
1753
1753
1754 class LoginRequired(object):
1754 class LoginRequired(object):
1755 """
1755 """
1756 Must be logged in to execute this function else
1756 Must be logged in to execute this function else
1757 redirect to login page
1757 redirect to login page
1758
1758
1759 :param api_access: if enabled this checks only for valid auth token
1759 :param api_access: if enabled this checks only for valid auth token
1760 and grants access based on valid token
1760 and grants access based on valid token
1761 """
1761 """
1762 def __init__(self, auth_token_access=None):
1762 def __init__(self, auth_token_access=None):
1763 self.auth_token_access = auth_token_access
1763 self.auth_token_access = auth_token_access
1764 if self.auth_token_access:
1764 if self.auth_token_access:
1765 valid_type = set(auth_token_access).intersection(set(UserApiKeys.ROLES))
1765 valid_type = set(auth_token_access).intersection(set(UserApiKeys.ROLES))
1766 if not valid_type:
1766 if not valid_type:
1767 raise ValueError('auth_token_access must be on of {}, got {}'.format(
1767 raise ValueError('auth_token_access must be on of {}, got {}'.format(
1768 UserApiKeys.ROLES, auth_token_access))
1768 UserApiKeys.ROLES, auth_token_access))
1769
1769
1770 def __call__(self, func):
1770 def __call__(self, func):
1771 return get_cython_compat_decorator(self.__wrapper, func)
1771 return get_cython_compat_decorator(self.__wrapper, func)
1772
1772
1773 def _get_request(self):
1773 def _get_request(self):
1774 return get_request(self)
1774 return get_request(self)
1775
1775
1776 def __wrapper(self, func, *fargs, **fkwargs):
1776 def __wrapper(self, func, *fargs, **fkwargs):
1777 from rhodecode.lib import helpers as h
1777 from rhodecode.lib import helpers as h
1778 cls = fargs[0]
1778 cls = fargs[0]
1779 user = cls._rhodecode_user
1779 user = cls._rhodecode_user
1780 request = self._get_request()
1780 request = self._get_request()
1781 _ = request.translate
1781 _ = request.translate
1782
1782
1783 loc = "%s:%s" % (cls.__class__.__name__, func.__name__)
1783 loc = "%s:%s" % (cls.__class__.__name__, func.__name__)
1784 log.debug('Starting login restriction checks for user: %s', user)
1784 log.debug('Starting login restriction checks for user: %s', user)
1785 # check if our IP is allowed
1785 # check if our IP is allowed
1786 ip_access_valid = True
1786 ip_access_valid = True
1787 if not user.ip_allowed:
1787 if not user.ip_allowed:
1788 h.flash(h.literal(_('IP {} not allowed'.format(user.ip_addr))),
1788 h.flash(h.literal(_('IP {} not allowed'.format(user.ip_addr))),
1789 category='warning')
1789 category='warning')
1790 ip_access_valid = False
1790 ip_access_valid = False
1791
1791
1792 # we used stored token that is extract from GET or URL param (if any)
1792 # we used stored token that is extract from GET or URL param (if any)
1793 _auth_token = request.user_auth_token
1793 _auth_token = request.user_auth_token
1794
1794
1795 # check if we used an AUTH_TOKEN and it's a valid one
1795 # check if we used an AUTH_TOKEN and it's a valid one
1796 # defined white-list of controllers which API access will be enabled
1796 # defined white-list of controllers which API access will be enabled
1797 whitelist = None
1797 whitelist = None
1798 if self.auth_token_access:
1798 if self.auth_token_access:
1799 # since this location is allowed by @LoginRequired decorator it's our
1799 # since this location is allowed by @LoginRequired decorator it's our
1800 # only whitelist
1800 # only whitelist
1801 whitelist = [loc]
1801 whitelist = [loc]
1802 auth_token_access_valid = allowed_auth_token_access(
1802 auth_token_access_valid = allowed_auth_token_access(
1803 loc, whitelist=whitelist, auth_token=_auth_token)
1803 loc, whitelist=whitelist, auth_token=_auth_token)
1804
1804
1805 # explicit controller is enabled or API is in our whitelist
1805 # explicit controller is enabled or API is in our whitelist
1806 if auth_token_access_valid:
1806 if auth_token_access_valid:
1807 log.debug('Checking AUTH TOKEN access for %s', cls)
1807 log.debug('Checking AUTH TOKEN access for %s', cls)
1808 db_user = user.get_instance()
1808 db_user = user.get_instance()
1809
1809
1810 if db_user:
1810 if db_user:
1811 if self.auth_token_access:
1811 if self.auth_token_access:
1812 roles = self.auth_token_access
1812 roles = self.auth_token_access
1813 else:
1813 else:
1814 roles = [UserApiKeys.ROLE_HTTP]
1814 roles = [UserApiKeys.ROLE_HTTP]
1815 log.debug('AUTH TOKEN: checking auth for user %s and roles %s',
1815 log.debug('AUTH TOKEN: checking auth for user %s and roles %s',
1816 db_user, roles)
1816 db_user, roles)
1817 token_match = db_user.authenticate_by_token(
1817 token_match = db_user.authenticate_by_token(
1818 _auth_token, roles=roles)
1818 _auth_token, roles=roles)
1819 else:
1819 else:
1820 log.debug('Unable to fetch db instance for auth user: %s', user)
1820 log.debug('Unable to fetch db instance for auth user: %s', user)
1821 token_match = False
1821 token_match = False
1822
1822
1823 if _auth_token and token_match:
1823 if _auth_token and token_match:
1824 auth_token_access_valid = True
1824 auth_token_access_valid = True
1825 log.debug('AUTH TOKEN ****%s is VALID', _auth_token[-4:])
1825 log.debug('AUTH TOKEN ****%s is VALID', _auth_token[-4:])
1826 else:
1826 else:
1827 auth_token_access_valid = False
1827 auth_token_access_valid = False
1828 if not _auth_token:
1828 if not _auth_token:
1829 log.debug("AUTH TOKEN *NOT* present in request")
1829 log.debug("AUTH TOKEN *NOT* present in request")
1830 else:
1830 else:
1831 log.warning("AUTH TOKEN ****%s *NOT* valid", _auth_token[-4:])
1831 log.warning("AUTH TOKEN ****%s *NOT* valid", _auth_token[-4:])
1832
1832
1833 log.debug('Checking if %s is authenticated @ %s', user.username, loc)
1833 log.debug('Checking if %s is authenticated @ %s', user.username, loc)
1834 reason = 'RHODECODE_AUTH' if user.is_authenticated \
1834 reason = 'RHODECODE_AUTH' if user.is_authenticated \
1835 else 'AUTH_TOKEN_AUTH'
1835 else 'AUTH_TOKEN_AUTH'
1836
1836
1837 if ip_access_valid and (
1837 if ip_access_valid and (
1838 user.is_authenticated or auth_token_access_valid):
1838 user.is_authenticated or auth_token_access_valid):
1839 log.info('user %s authenticating with:%s IS authenticated on func %s',
1839 log.info('user %s authenticating with:%s IS authenticated on func %s',
1840 user, reason, loc)
1840 user, reason, loc)
1841
1841
1842 return func(*fargs, **fkwargs)
1842 return func(*fargs, **fkwargs)
1843 else:
1843 else:
1844 log.warning(
1844 log.warning(
1845 'user %s authenticating with:%s NOT authenticated on '
1845 'user %s authenticating with:%s NOT authenticated on '
1846 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s',
1846 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s',
1847 user, reason, loc, ip_access_valid, auth_token_access_valid)
1847 user, reason, loc, ip_access_valid, auth_token_access_valid)
1848 # we preserve the get PARAM
1848 # we preserve the get PARAM
1849 came_from = get_came_from(request)
1849 came_from = get_came_from(request)
1850
1850
1851 log.debug('redirecting to login page with %s', came_from)
1851 log.debug('redirecting to login page with %s', came_from)
1852 raise HTTPFound(
1852 raise HTTPFound(
1853 h.route_path('login', _query={'came_from': came_from}))
1853 h.route_path('login', _query={'came_from': came_from}))
1854
1854
1855
1855
1856 class NotAnonymous(object):
1856 class NotAnonymous(object):
1857 """
1857 """
1858 Must be logged in to execute this function else
1858 Must be logged in to execute this function else
1859 redirect to login page
1859 redirect to login page
1860 """
1860 """
1861
1861
1862 def __call__(self, func):
1862 def __call__(self, func):
1863 return get_cython_compat_decorator(self.__wrapper, func)
1863 return get_cython_compat_decorator(self.__wrapper, func)
1864
1864
1865 def _get_request(self):
1865 def _get_request(self):
1866 return get_request(self)
1866 return get_request(self)
1867
1867
1868 def __wrapper(self, func, *fargs, **fkwargs):
1868 def __wrapper(self, func, *fargs, **fkwargs):
1869 import rhodecode.lib.helpers as h
1869 import rhodecode.lib.helpers as h
1870 cls = fargs[0]
1870 cls = fargs[0]
1871 self.user = cls._rhodecode_user
1871 self.user = cls._rhodecode_user
1872 request = self._get_request()
1872 request = self._get_request()
1873 _ = request.translate
1873 _ = request.translate
1874 log.debug('Checking if user is not anonymous @%s', cls)
1874 log.debug('Checking if user is not anonymous @%s', cls)
1875
1875
1876 anonymous = self.user.username == User.DEFAULT_USER
1876 anonymous = self.user.username == User.DEFAULT_USER
1877
1877
1878 if anonymous:
1878 if anonymous:
1879 came_from = get_came_from(request)
1879 came_from = get_came_from(request)
1880 h.flash(_('You need to be a registered user to '
1880 h.flash(_('You need to be a registered user to '
1881 'perform this action'),
1881 'perform this action'),
1882 category='warning')
1882 category='warning')
1883 raise HTTPFound(
1883 raise HTTPFound(
1884 h.route_path('login', _query={'came_from': came_from}))
1884 h.route_path('login', _query={'came_from': came_from}))
1885 else:
1885 else:
1886 return func(*fargs, **fkwargs)
1886 return func(*fargs, **fkwargs)
1887
1887
1888
1888
1889 class PermsDecorator(object):
1889 class PermsDecorator(object):
1890 """
1890 """
1891 Base class for controller decorators, we extract the current user from
1891 Base class for controller decorators, we extract the current user from
1892 the class itself, which has it stored in base controllers
1892 the class itself, which has it stored in base controllers
1893 """
1893 """
1894
1894
1895 def __init__(self, *required_perms):
1895 def __init__(self, *required_perms):
1896 self.required_perms = set(required_perms)
1896 self.required_perms = set(required_perms)
1897
1897
1898 def __call__(self, func):
1898 def __call__(self, func):
1899 return get_cython_compat_decorator(self.__wrapper, func)
1899 return get_cython_compat_decorator(self.__wrapper, func)
1900
1900
1901 def _get_request(self):
1901 def _get_request(self):
1902 return get_request(self)
1902 return get_request(self)
1903
1903
1904 def __wrapper(self, func, *fargs, **fkwargs):
1904 def __wrapper(self, func, *fargs, **fkwargs):
1905 import rhodecode.lib.helpers as h
1905 import rhodecode.lib.helpers as h
1906 cls = fargs[0]
1906 cls = fargs[0]
1907 _user = cls._rhodecode_user
1907 _user = cls._rhodecode_user
1908 request = self._get_request()
1908 request = self._get_request()
1909 _ = request.translate
1909 _ = request.translate
1910
1910
1911 log.debug('checking %s permissions %s for %s %s',
1911 log.debug('checking %s permissions %s for %s %s',
1912 self.__class__.__name__, self.required_perms, cls, _user)
1912 self.__class__.__name__, self.required_perms, cls, _user)
1913
1913
1914 if self.check_permissions(_user):
1914 if self.check_permissions(_user):
1915 log.debug('Permission granted for %s %s', cls, _user)
1915 log.debug('Permission granted for %s %s', cls, _user)
1916 return func(*fargs, **fkwargs)
1916 return func(*fargs, **fkwargs)
1917
1917
1918 else:
1918 else:
1919 log.debug('Permission denied for %s %s', cls, _user)
1919 log.debug('Permission denied for %s %s', cls, _user)
1920 anonymous = _user.username == User.DEFAULT_USER
1920 anonymous = _user.username == User.DEFAULT_USER
1921
1921
1922 if anonymous:
1922 if anonymous:
1923 came_from = get_came_from(self._get_request())
1923 came_from = get_came_from(self._get_request())
1924 h.flash(_('You need to be signed in to view this page'),
1924 h.flash(_('You need to be signed in to view this page'),
1925 category='warning')
1925 category='warning')
1926 raise HTTPFound(
1926 raise HTTPFound(
1927 h.route_path('login', _query={'came_from': came_from}))
1927 h.route_path('login', _query={'came_from': came_from}))
1928
1928
1929 else:
1929 else:
1930 # redirect with 404 to prevent resource discovery
1930 # redirect with 404 to prevent resource discovery
1931 raise HTTPNotFound()
1931 raise HTTPNotFound()
1932
1932
1933 def check_permissions(self, user):
1933 def check_permissions(self, user):
1934 """Dummy function for overriding"""
1934 """Dummy function for overriding"""
1935 raise NotImplementedError(
1935 raise NotImplementedError(
1936 'You have to write this function in child class')
1936 'You have to write this function in child class')
1937
1937
1938
1938
1939 class HasPermissionAllDecorator(PermsDecorator):
1939 class HasPermissionAllDecorator(PermsDecorator):
1940 """
1940 """
1941 Checks for access permission for all given predicates. All of them
1941 Checks for access permission for all given predicates. All of them
1942 have to be meet in order to fulfill the request
1942 have to be meet in order to fulfill the request
1943 """
1943 """
1944
1944
1945 def check_permissions(self, user):
1945 def check_permissions(self, user):
1946 perms = user.permissions_with_scope({})
1946 perms = user.permissions_with_scope({})
1947 if self.required_perms.issubset(perms['global']):
1947 if self.required_perms.issubset(perms['global']):
1948 return True
1948 return True
1949 return False
1949 return False
1950
1950
1951
1951
1952 class HasPermissionAnyDecorator(PermsDecorator):
1952 class HasPermissionAnyDecorator(PermsDecorator):
1953 """
1953 """
1954 Checks for access permission for any of given predicates. In order to
1954 Checks for access permission for any of given predicates. In order to
1955 fulfill the request any of predicates must be meet
1955 fulfill the request any of predicates must be meet
1956 """
1956 """
1957
1957
1958 def check_permissions(self, user):
1958 def check_permissions(self, user):
1959 perms = user.permissions_with_scope({})
1959 perms = user.permissions_with_scope({})
1960 if self.required_perms.intersection(perms['global']):
1960 if self.required_perms.intersection(perms['global']):
1961 return True
1961 return True
1962 return False
1962 return False
1963
1963
1964
1964
1965 class HasRepoPermissionAllDecorator(PermsDecorator):
1965 class HasRepoPermissionAllDecorator(PermsDecorator):
1966 """
1966 """
1967 Checks for access permission for all given predicates for specific
1967 Checks for access permission for all given predicates for specific
1968 repository. All of them have to be meet in order to fulfill the request
1968 repository. All of them have to be meet in order to fulfill the request
1969 """
1969 """
1970 def _get_repo_name(self):
1970 def _get_repo_name(self):
1971 _request = self._get_request()
1971 _request = self._get_request()
1972 return get_repo_slug(_request)
1972 return get_repo_slug(_request)
1973
1973
1974 def check_permissions(self, user):
1974 def check_permissions(self, user):
1975 perms = user.permissions
1975 perms = user.permissions
1976 repo_name = self._get_repo_name()
1976 repo_name = self._get_repo_name()
1977
1977
1978 try:
1978 try:
1979 user_perms = {perms['repositories'][repo_name]}
1979 user_perms = {perms['repositories'][repo_name]}
1980 except KeyError:
1980 except KeyError:
1981 log.debug('cannot locate repo with name: `%s` in permissions defs',
1981 log.debug('cannot locate repo with name: `%s` in permissions defs',
1982 repo_name)
1982 repo_name)
1983 return False
1983 return False
1984
1984
1985 log.debug('checking `%s` permissions for repo `%s`',
1985 log.debug('checking `%s` permissions for repo `%s`',
1986 user_perms, repo_name)
1986 user_perms, repo_name)
1987 if self.required_perms.issubset(user_perms):
1987 if self.required_perms.issubset(user_perms):
1988 return True
1988 return True
1989 return False
1989 return False
1990
1990
1991
1991
1992 class HasRepoPermissionAnyDecorator(PermsDecorator):
1992 class HasRepoPermissionAnyDecorator(PermsDecorator):
1993 """
1993 """
1994 Checks for access permission for any of given predicates for specific
1994 Checks for access permission for any of given predicates for specific
1995 repository. In order to fulfill the request any of predicates must be meet
1995 repository. In order to fulfill the request any of predicates must be meet
1996 """
1996 """
1997 def _get_repo_name(self):
1997 def _get_repo_name(self):
1998 _request = self._get_request()
1998 _request = self._get_request()
1999 return get_repo_slug(_request)
1999 return get_repo_slug(_request)
2000
2000
2001 def check_permissions(self, user):
2001 def check_permissions(self, user):
2002 perms = user.permissions
2002 perms = user.permissions
2003 repo_name = self._get_repo_name()
2003 repo_name = self._get_repo_name()
2004
2004
2005 try:
2005 try:
2006 user_perms = {perms['repositories'][repo_name]}
2006 user_perms = {perms['repositories'][repo_name]}
2007 except KeyError:
2007 except KeyError:
2008 log.debug(
2008 log.debug(
2009 'cannot locate repo with name: `%s` in permissions defs',
2009 'cannot locate repo with name: `%s` in permissions defs',
2010 repo_name)
2010 repo_name)
2011 return False
2011 return False
2012
2012
2013 log.debug('checking `%s` permissions for repo `%s`',
2013 log.debug('checking `%s` permissions for repo `%s`',
2014 user_perms, repo_name)
2014 user_perms, repo_name)
2015 if self.required_perms.intersection(user_perms):
2015 if self.required_perms.intersection(user_perms):
2016 return True
2016 return True
2017 return False
2017 return False
2018
2018
2019
2019
2020 class HasRepoGroupPermissionAllDecorator(PermsDecorator):
2020 class HasRepoGroupPermissionAllDecorator(PermsDecorator):
2021 """
2021 """
2022 Checks for access permission for all given predicates for specific
2022 Checks for access permission for all given predicates for specific
2023 repository group. All of them have to be meet in order to
2023 repository group. All of them have to be meet in order to
2024 fulfill the request
2024 fulfill the request
2025 """
2025 """
2026 def _get_repo_group_name(self):
2026 def _get_repo_group_name(self):
2027 _request = self._get_request()
2027 _request = self._get_request()
2028 return get_repo_group_slug(_request)
2028 return get_repo_group_slug(_request)
2029
2029
2030 def check_permissions(self, user):
2030 def check_permissions(self, user):
2031 perms = user.permissions
2031 perms = user.permissions
2032 group_name = self._get_repo_group_name()
2032 group_name = self._get_repo_group_name()
2033 try:
2033 try:
2034 user_perms = {perms['repositories_groups'][group_name]}
2034 user_perms = {perms['repositories_groups'][group_name]}
2035 except KeyError:
2035 except KeyError:
2036 log.debug(
2036 log.debug(
2037 'cannot locate repo group with name: `%s` in permissions defs',
2037 'cannot locate repo group with name: `%s` in permissions defs',
2038 group_name)
2038 group_name)
2039 return False
2039 return False
2040
2040
2041 log.debug('checking `%s` permissions for repo group `%s`',
2041 log.debug('checking `%s` permissions for repo group `%s`',
2042 user_perms, group_name)
2042 user_perms, group_name)
2043 if self.required_perms.issubset(user_perms):
2043 if self.required_perms.issubset(user_perms):
2044 return True
2044 return True
2045 return False
2045 return False
2046
2046
2047
2047
2048 class HasRepoGroupPermissionAnyDecorator(PermsDecorator):
2048 class HasRepoGroupPermissionAnyDecorator(PermsDecorator):
2049 """
2049 """
2050 Checks for access permission for any of given predicates for specific
2050 Checks for access permission for any of given predicates for specific
2051 repository group. In order to fulfill the request any
2051 repository group. In order to fulfill the request any
2052 of predicates must be met
2052 of predicates must be met
2053 """
2053 """
2054 def _get_repo_group_name(self):
2054 def _get_repo_group_name(self):
2055 _request = self._get_request()
2055 _request = self._get_request()
2056 return get_repo_group_slug(_request)
2056 return get_repo_group_slug(_request)
2057
2057
2058 def check_permissions(self, user):
2058 def check_permissions(self, user):
2059 perms = user.permissions
2059 perms = user.permissions
2060 group_name = self._get_repo_group_name()
2060 group_name = self._get_repo_group_name()
2061
2061
2062 try:
2062 try:
2063 user_perms = {perms['repositories_groups'][group_name]}
2063 user_perms = {perms['repositories_groups'][group_name]}
2064 except KeyError:
2064 except KeyError:
2065 log.debug(
2065 log.debug(
2066 'cannot locate repo group with name: `%s` in permissions defs',
2066 'cannot locate repo group with name: `%s` in permissions defs',
2067 group_name)
2067 group_name)
2068 return False
2068 return False
2069
2069
2070 log.debug('checking `%s` permissions for repo group `%s`',
2070 log.debug('checking `%s` permissions for repo group `%s`',
2071 user_perms, group_name)
2071 user_perms, group_name)
2072 if self.required_perms.intersection(user_perms):
2072 if self.required_perms.intersection(user_perms):
2073 return True
2073 return True
2074 return False
2074 return False
2075
2075
2076
2076
2077 class HasUserGroupPermissionAllDecorator(PermsDecorator):
2077 class HasUserGroupPermissionAllDecorator(PermsDecorator):
2078 """
2078 """
2079 Checks for access permission for all given predicates for specific
2079 Checks for access permission for all given predicates for specific
2080 user group. All of them have to be meet in order to fulfill the request
2080 user group. All of them have to be meet in order to fulfill the request
2081 """
2081 """
2082 def _get_user_group_name(self):
2082 def _get_user_group_name(self):
2083 _request = self._get_request()
2083 _request = self._get_request()
2084 return get_user_group_slug(_request)
2084 return get_user_group_slug(_request)
2085
2085
2086 def check_permissions(self, user):
2086 def check_permissions(self, user):
2087 perms = user.permissions
2087 perms = user.permissions
2088 group_name = self._get_user_group_name()
2088 group_name = self._get_user_group_name()
2089 try:
2089 try:
2090 user_perms = {perms['user_groups'][group_name]}
2090 user_perms = {perms['user_groups'][group_name]}
2091 except KeyError:
2091 except KeyError:
2092 return False
2092 return False
2093
2093
2094 if self.required_perms.issubset(user_perms):
2094 if self.required_perms.issubset(user_perms):
2095 return True
2095 return True
2096 return False
2096 return False
2097
2097
2098
2098
2099 class HasUserGroupPermissionAnyDecorator(PermsDecorator):
2099 class HasUserGroupPermissionAnyDecorator(PermsDecorator):
2100 """
2100 """
2101 Checks for access permission for any of given predicates for specific
2101 Checks for access permission for any of given predicates for specific
2102 user group. In order to fulfill the request any of predicates must be meet
2102 user group. In order to fulfill the request any of predicates must be meet
2103 """
2103 """
2104 def _get_user_group_name(self):
2104 def _get_user_group_name(self):
2105 _request = self._get_request()
2105 _request = self._get_request()
2106 return get_user_group_slug(_request)
2106 return get_user_group_slug(_request)
2107
2107
2108 def check_permissions(self, user):
2108 def check_permissions(self, user):
2109 perms = user.permissions
2109 perms = user.permissions
2110 group_name = self._get_user_group_name()
2110 group_name = self._get_user_group_name()
2111 try:
2111 try:
2112 user_perms = {perms['user_groups'][group_name]}
2112 user_perms = {perms['user_groups'][group_name]}
2113 except KeyError:
2113 except KeyError:
2114 return False
2114 return False
2115
2115
2116 if self.required_perms.intersection(user_perms):
2116 if self.required_perms.intersection(user_perms):
2117 return True
2117 return True
2118 return False
2118 return False
2119
2119
2120
2120
2121 # CHECK FUNCTIONS
2121 # CHECK FUNCTIONS
2122 class PermsFunction(object):
2122 class PermsFunction(object):
2123 """Base function for other check functions"""
2123 """Base function for other check functions"""
2124
2124
2125 def __init__(self, *perms):
2125 def __init__(self, *perms):
2126 self.required_perms = set(perms)
2126 self.required_perms = set(perms)
2127 self.repo_name = None
2127 self.repo_name = None
2128 self.repo_group_name = None
2128 self.repo_group_name = None
2129 self.user_group_name = None
2129 self.user_group_name = None
2130
2130
2131 def __bool__(self):
2131 def __bool__(self):
2132 import inspect
2132 import inspect
2133 frame = inspect.currentframe()
2133 frame = inspect.currentframe()
2134 stack_trace = traceback.format_stack(frame)
2134 stack_trace = traceback.format_stack(frame)
2135 log.error('Checking bool value on a class instance of perm '
2135 log.error('Checking bool value on a class instance of perm '
2136 'function is not allowed: %s', ''.join(stack_trace))
2136 'function is not allowed: %s', ''.join(stack_trace))
2137 # rather than throwing errors, here we always return False so if by
2137 # rather than throwing errors, here we always return False so if by
2138 # accident someone checks truth for just an instance it will always end
2138 # accident someone checks truth for just an instance it will always end
2139 # up in returning False
2139 # up in returning False
2140 return False
2140 return False
2141 __nonzero__ = __bool__
2141 __nonzero__ = __bool__
2142
2142
2143 def __call__(self, check_location='', user=None):
2143 def __call__(self, check_location='', user=None):
2144 if not user:
2144 if not user:
2145 log.debug('Using user attribute from global request')
2145 log.debug('Using user attribute from global request')
2146 request = self._get_request()
2146 request = self._get_request()
2147 user = request.user
2147 user = request.user
2148
2148
2149 # init auth user if not already given
2149 # init auth user if not already given
2150 if not isinstance(user, AuthUser):
2150 if not isinstance(user, AuthUser):
2151 log.debug('Wrapping user %s into AuthUser', user)
2151 log.debug('Wrapping user %s into AuthUser', user)
2152 user = AuthUser(user.user_id)
2152 user = AuthUser(user.user_id)
2153
2153
2154 cls_name = self.__class__.__name__
2154 cls_name = self.__class__.__name__
2155 check_scope = self._get_check_scope(cls_name)
2155 check_scope = self._get_check_scope(cls_name)
2156 check_location = check_location or 'unspecified location'
2156 check_location = check_location or 'unspecified location'
2157
2157
2158 log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name,
2158 log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name,
2159 self.required_perms, user, check_scope, check_location)
2159 self.required_perms, user, check_scope, check_location)
2160 if not user:
2160 if not user:
2161 log.warning('Empty user given for permission check')
2161 log.warning('Empty user given for permission check')
2162 return False
2162 return False
2163
2163
2164 if self.check_permissions(user):
2164 if self.check_permissions(user):
2165 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
2165 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
2166 check_scope, user, check_location)
2166 check_scope, user, check_location)
2167 return True
2167 return True
2168
2168
2169 else:
2169 else:
2170 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
2170 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
2171 check_scope, user, check_location)
2171 check_scope, user, check_location)
2172 return False
2172 return False
2173
2173
2174 def _get_request(self):
2174 def _get_request(self):
2175 return get_request(self)
2175 return get_request(self)
2176
2176
2177 def _get_check_scope(self, cls_name):
2177 def _get_check_scope(self, cls_name):
2178 return {
2178 return {
2179 'HasPermissionAll': 'GLOBAL',
2179 'HasPermissionAll': 'GLOBAL',
2180 'HasPermissionAny': 'GLOBAL',
2180 'HasPermissionAny': 'GLOBAL',
2181 'HasRepoPermissionAll': 'repo:%s' % self.repo_name,
2181 'HasRepoPermissionAll': 'repo:%s' % self.repo_name,
2182 'HasRepoPermissionAny': 'repo:%s' % self.repo_name,
2182 'HasRepoPermissionAny': 'repo:%s' % self.repo_name,
2183 'HasRepoGroupPermissionAll': 'repo_group:%s' % self.repo_group_name,
2183 'HasRepoGroupPermissionAll': 'repo_group:%s' % self.repo_group_name,
2184 'HasRepoGroupPermissionAny': 'repo_group:%s' % self.repo_group_name,
2184 'HasRepoGroupPermissionAny': 'repo_group:%s' % self.repo_group_name,
2185 'HasUserGroupPermissionAll': 'user_group:%s' % self.user_group_name,
2185 'HasUserGroupPermissionAll': 'user_group:%s' % self.user_group_name,
2186 'HasUserGroupPermissionAny': 'user_group:%s' % self.user_group_name,
2186 'HasUserGroupPermissionAny': 'user_group:%s' % self.user_group_name,
2187 }.get(cls_name, '?:%s' % cls_name)
2187 }.get(cls_name, '?:%s' % cls_name)
2188
2188
2189 def check_permissions(self, user):
2189 def check_permissions(self, user):
2190 """Dummy function for overriding"""
2190 """Dummy function for overriding"""
2191 raise Exception('You have to write this function in child class')
2191 raise Exception('You have to write this function in child class')
2192
2192
2193
2193
2194 class HasPermissionAll(PermsFunction):
2194 class HasPermissionAll(PermsFunction):
2195 def check_permissions(self, user):
2195 def check_permissions(self, user):
2196 perms = user.permissions_with_scope({})
2196 perms = user.permissions_with_scope({})
2197 if self.required_perms.issubset(perms.get('global')):
2197 if self.required_perms.issubset(perms.get('global')):
2198 return True
2198 return True
2199 return False
2199 return False
2200
2200
2201
2201
2202 class HasPermissionAny(PermsFunction):
2202 class HasPermissionAny(PermsFunction):
2203 def check_permissions(self, user):
2203 def check_permissions(self, user):
2204 perms = user.permissions_with_scope({})
2204 perms = user.permissions_with_scope({})
2205 if self.required_perms.intersection(perms.get('global')):
2205 if self.required_perms.intersection(perms.get('global')):
2206 return True
2206 return True
2207 return False
2207 return False
2208
2208
2209
2209
2210 class HasRepoPermissionAll(PermsFunction):
2210 class HasRepoPermissionAll(PermsFunction):
2211 def __call__(self, repo_name=None, check_location='', user=None):
2211 def __call__(self, repo_name=None, check_location='', user=None):
2212 self.repo_name = repo_name
2212 self.repo_name = repo_name
2213 return super(HasRepoPermissionAll, self).__call__(check_location, user)
2213 return super(HasRepoPermissionAll, self).__call__(check_location, user)
2214
2214
2215 def _get_repo_name(self):
2215 def _get_repo_name(self):
2216 if not self.repo_name:
2216 if not self.repo_name:
2217 _request = self._get_request()
2217 _request = self._get_request()
2218 self.repo_name = get_repo_slug(_request)
2218 self.repo_name = get_repo_slug(_request)
2219 return self.repo_name
2219 return self.repo_name
2220
2220
2221 def check_permissions(self, user):
2221 def check_permissions(self, user):
2222 self.repo_name = self._get_repo_name()
2222 self.repo_name = self._get_repo_name()
2223 perms = user.permissions
2223 perms = user.permissions
2224 try:
2224 try:
2225 user_perms = {perms['repositories'][self.repo_name]}
2225 user_perms = {perms['repositories'][self.repo_name]}
2226 except KeyError:
2226 except KeyError:
2227 return False
2227 return False
2228 if self.required_perms.issubset(user_perms):
2228 if self.required_perms.issubset(user_perms):
2229 return True
2229 return True
2230 return False
2230 return False
2231
2231
2232
2232
2233 class HasRepoPermissionAny(PermsFunction):
2233 class HasRepoPermissionAny(PermsFunction):
2234 def __call__(self, repo_name=None, check_location='', user=None):
2234 def __call__(self, repo_name=None, check_location='', user=None):
2235 self.repo_name = repo_name
2235 self.repo_name = repo_name
2236 return super(HasRepoPermissionAny, self).__call__(check_location, user)
2236 return super(HasRepoPermissionAny, self).__call__(check_location, user)
2237
2237
2238 def _get_repo_name(self):
2238 def _get_repo_name(self):
2239 if not self.repo_name:
2239 if not self.repo_name:
2240 _request = self._get_request()
2240 _request = self._get_request()
2241 self.repo_name = get_repo_slug(_request)
2241 self.repo_name = get_repo_slug(_request)
2242 return self.repo_name
2242 return self.repo_name
2243
2243
2244 def check_permissions(self, user):
2244 def check_permissions(self, user):
2245 self.repo_name = self._get_repo_name()
2245 self.repo_name = self._get_repo_name()
2246 perms = user.permissions
2246 perms = user.permissions
2247 try:
2247 try:
2248 user_perms = {perms['repositories'][self.repo_name]}
2248 user_perms = {perms['repositories'][self.repo_name]}
2249 except KeyError:
2249 except KeyError:
2250 return False
2250 return False
2251 if self.required_perms.intersection(user_perms):
2251 if self.required_perms.intersection(user_perms):
2252 return True
2252 return True
2253 return False
2253 return False
2254
2254
2255
2255
2256 class HasRepoGroupPermissionAny(PermsFunction):
2256 class HasRepoGroupPermissionAny(PermsFunction):
2257 def __call__(self, group_name=None, check_location='', user=None):
2257 def __call__(self, group_name=None, check_location='', user=None):
2258 self.repo_group_name = group_name
2258 self.repo_group_name = group_name
2259 return super(HasRepoGroupPermissionAny, self).__call__(check_location, user)
2259 return super(HasRepoGroupPermissionAny, self).__call__(check_location, user)
2260
2260
2261 def check_permissions(self, user):
2261 def check_permissions(self, user):
2262 perms = user.permissions
2262 perms = user.permissions
2263 try:
2263 try:
2264 user_perms = {perms['repositories_groups'][self.repo_group_name]}
2264 user_perms = {perms['repositories_groups'][self.repo_group_name]}
2265 except KeyError:
2265 except KeyError:
2266 return False
2266 return False
2267 if self.required_perms.intersection(user_perms):
2267 if self.required_perms.intersection(user_perms):
2268 return True
2268 return True
2269 return False
2269 return False
2270
2270
2271
2271
2272 class HasRepoGroupPermissionAll(PermsFunction):
2272 class HasRepoGroupPermissionAll(PermsFunction):
2273 def __call__(self, group_name=None, check_location='', user=None):
2273 def __call__(self, group_name=None, check_location='', user=None):
2274 self.repo_group_name = group_name
2274 self.repo_group_name = group_name
2275 return super(HasRepoGroupPermissionAll, self).__call__(check_location, user)
2275 return super(HasRepoGroupPermissionAll, self).__call__(check_location, user)
2276
2276
2277 def check_permissions(self, user):
2277 def check_permissions(self, user):
2278 perms = user.permissions
2278 perms = user.permissions
2279 try:
2279 try:
2280 user_perms = {perms['repositories_groups'][self.repo_group_name]}
2280 user_perms = {perms['repositories_groups'][self.repo_group_name]}
2281 except KeyError:
2281 except KeyError:
2282 return False
2282 return False
2283 if self.required_perms.issubset(user_perms):
2283 if self.required_perms.issubset(user_perms):
2284 return True
2284 return True
2285 return False
2285 return False
2286
2286
2287
2287
2288 class HasUserGroupPermissionAny(PermsFunction):
2288 class HasUserGroupPermissionAny(PermsFunction):
2289 def __call__(self, user_group_name=None, check_location='', user=None):
2289 def __call__(self, user_group_name=None, check_location='', user=None):
2290 self.user_group_name = user_group_name
2290 self.user_group_name = user_group_name
2291 return super(HasUserGroupPermissionAny, self).__call__(check_location, user)
2291 return super(HasUserGroupPermissionAny, self).__call__(check_location, user)
2292
2292
2293 def check_permissions(self, user):
2293 def check_permissions(self, user):
2294 perms = user.permissions
2294 perms = user.permissions
2295 try:
2295 try:
2296 user_perms = {perms['user_groups'][self.user_group_name]}
2296 user_perms = {perms['user_groups'][self.user_group_name]}
2297 except KeyError:
2297 except KeyError:
2298 return False
2298 return False
2299 if self.required_perms.intersection(user_perms):
2299 if self.required_perms.intersection(user_perms):
2300 return True
2300 return True
2301 return False
2301 return False
2302
2302
2303
2303
2304 class HasUserGroupPermissionAll(PermsFunction):
2304 class HasUserGroupPermissionAll(PermsFunction):
2305 def __call__(self, user_group_name=None, check_location='', user=None):
2305 def __call__(self, user_group_name=None, check_location='', user=None):
2306 self.user_group_name = user_group_name
2306 self.user_group_name = user_group_name
2307 return super(HasUserGroupPermissionAll, self).__call__(check_location, user)
2307 return super(HasUserGroupPermissionAll, self).__call__(check_location, user)
2308
2308
2309 def check_permissions(self, user):
2309 def check_permissions(self, user):
2310 perms = user.permissions
2310 perms = user.permissions
2311 try:
2311 try:
2312 user_perms = {perms['user_groups'][self.user_group_name]}
2312 user_perms = {perms['user_groups'][self.user_group_name]}
2313 except KeyError:
2313 except KeyError:
2314 return False
2314 return False
2315 if self.required_perms.issubset(user_perms):
2315 if self.required_perms.issubset(user_perms):
2316 return True
2316 return True
2317 return False
2317 return False
2318
2318
2319
2319
2320 # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH
2320 # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH
2321 class HasPermissionAnyMiddleware(object):
2321 class HasPermissionAnyMiddleware(object):
2322 def __init__(self, *perms):
2322 def __init__(self, *perms):
2323 self.required_perms = set(perms)
2323 self.required_perms = set(perms)
2324
2324
2325 def __call__(self, auth_user, repo_name):
2325 def __call__(self, auth_user, repo_name):
2326 # repo_name MUST be unicode, since we handle keys in permission
2326 # # repo_name MUST be unicode, since we handle keys in permission
2327 # dict by unicode
2327 # # dict by unicode
2328 repo_name = safe_unicode(repo_name)
2328 #TODO: verify
2329 # repo_name = safe_unicode(repo_name)
2330
2329 log.debug(
2331 log.debug(
2330 'Checking VCS protocol permissions %s for user:%s repo:`%s`',
2332 'Checking VCS protocol permissions %s for user:%s repo:`%s`',
2331 self.required_perms, auth_user, repo_name)
2333 self.required_perms, auth_user, repo_name)
2332
2334
2333 if self.check_permissions(auth_user, repo_name):
2335 if self.check_permissions(auth_user, repo_name):
2334 log.debug('Permission to repo:`%s` GRANTED for user:%s @ %s',
2336 log.debug('Permission to repo:`%s` GRANTED for user:%s @ %s',
2335 repo_name, auth_user, 'PermissionMiddleware')
2337 repo_name, auth_user, 'PermissionMiddleware')
2336 return True
2338 return True
2337
2339
2338 else:
2340 else:
2339 log.debug('Permission to repo:`%s` DENIED for user:%s @ %s',
2341 log.debug('Permission to repo:`%s` DENIED for user:%s @ %s',
2340 repo_name, auth_user, 'PermissionMiddleware')
2342 repo_name, auth_user, 'PermissionMiddleware')
2341 return False
2343 return False
2342
2344
2343 def check_permissions(self, user, repo_name):
2345 def check_permissions(self, user, repo_name):
2344 perms = user.permissions_with_scope({'repo_name': repo_name})
2346 perms = user.permissions_with_scope({'repo_name': repo_name})
2345
2347
2346 try:
2348 try:
2347 user_perms = {perms['repositories'][repo_name]}
2349 user_perms = {perms['repositories'][repo_name]}
2348 except Exception:
2350 except Exception:
2349 log.exception('Error while accessing user permissions')
2351 log.exception('Error while accessing user permissions')
2350 return False
2352 return False
2351
2353
2352 if self.required_perms.intersection(user_perms):
2354 if self.required_perms.intersection(user_perms):
2353 return True
2355 return True
2354 return False
2356 return False
2355
2357
2356
2358
2357 # SPECIAL VERSION TO HANDLE API AUTH
2359 # SPECIAL VERSION TO HANDLE API AUTH
2358 class _BaseApiPerm(object):
2360 class _BaseApiPerm(object):
2359 def __init__(self, *perms):
2361 def __init__(self, *perms):
2360 self.required_perms = set(perms)
2362 self.required_perms = set(perms)
2361
2363
2362 def __call__(self, check_location=None, user=None, repo_name=None,
2364 def __call__(self, check_location=None, user=None, repo_name=None,
2363 group_name=None, user_group_name=None):
2365 group_name=None, user_group_name=None):
2364 cls_name = self.__class__.__name__
2366 cls_name = self.__class__.__name__
2365 check_scope = 'global:%s' % (self.required_perms,)
2367 check_scope = 'global:%s' % (self.required_perms,)
2366 if repo_name:
2368 if repo_name:
2367 check_scope += ', repo_name:%s' % (repo_name,)
2369 check_scope += ', repo_name:%s' % (repo_name,)
2368
2370
2369 if group_name:
2371 if group_name:
2370 check_scope += ', repo_group_name:%s' % (group_name,)
2372 check_scope += ', repo_group_name:%s' % (group_name,)
2371
2373
2372 if user_group_name:
2374 if user_group_name:
2373 check_scope += ', user_group_name:%s' % (user_group_name,)
2375 check_scope += ', user_group_name:%s' % (user_group_name,)
2374
2376
2375 log.debug('checking cls:%s %s %s @ %s',
2377 log.debug('checking cls:%s %s %s @ %s',
2376 cls_name, self.required_perms, check_scope, check_location)
2378 cls_name, self.required_perms, check_scope, check_location)
2377 if not user:
2379 if not user:
2378 log.debug('Empty User passed into arguments')
2380 log.debug('Empty User passed into arguments')
2379 return False
2381 return False
2380
2382
2381 # process user
2383 # process user
2382 if not isinstance(user, AuthUser):
2384 if not isinstance(user, AuthUser):
2383 user = AuthUser(user.user_id)
2385 user = AuthUser(user.user_id)
2384 if not check_location:
2386 if not check_location:
2385 check_location = 'unspecified'
2387 check_location = 'unspecified'
2386 if self.check_permissions(user.permissions, repo_name, group_name,
2388 if self.check_permissions(user.permissions, repo_name, group_name,
2387 user_group_name):
2389 user_group_name):
2388 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
2390 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
2389 check_scope, user, check_location)
2391 check_scope, user, check_location)
2390 return True
2392 return True
2391
2393
2392 else:
2394 else:
2393 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
2395 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
2394 check_scope, user, check_location)
2396 check_scope, user, check_location)
2395 return False
2397 return False
2396
2398
2397 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2399 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2398 user_group_name=None):
2400 user_group_name=None):
2399 """
2401 """
2400 implement in child class should return True if permissions are ok,
2402 implement in child class should return True if permissions are ok,
2401 False otherwise
2403 False otherwise
2402
2404
2403 :param perm_defs: dict with permission definitions
2405 :param perm_defs: dict with permission definitions
2404 :param repo_name: repo name
2406 :param repo_name: repo name
2405 """
2407 """
2406 raise NotImplementedError()
2408 raise NotImplementedError()
2407
2409
2408
2410
2409 class HasPermissionAllApi(_BaseApiPerm):
2411 class HasPermissionAllApi(_BaseApiPerm):
2410 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2412 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2411 user_group_name=None):
2413 user_group_name=None):
2412 if self.required_perms.issubset(perm_defs.get('global')):
2414 if self.required_perms.issubset(perm_defs.get('global')):
2413 return True
2415 return True
2414 return False
2416 return False
2415
2417
2416
2418
2417 class HasPermissionAnyApi(_BaseApiPerm):
2419 class HasPermissionAnyApi(_BaseApiPerm):
2418 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2420 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2419 user_group_name=None):
2421 user_group_name=None):
2420 if self.required_perms.intersection(perm_defs.get('global')):
2422 if self.required_perms.intersection(perm_defs.get('global')):
2421 return True
2423 return True
2422 return False
2424 return False
2423
2425
2424
2426
2425 class HasRepoPermissionAllApi(_BaseApiPerm):
2427 class HasRepoPermissionAllApi(_BaseApiPerm):
2426 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2428 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2427 user_group_name=None):
2429 user_group_name=None):
2428 try:
2430 try:
2429 _user_perms = {perm_defs['repositories'][repo_name]}
2431 _user_perms = {perm_defs['repositories'][repo_name]}
2430 except KeyError:
2432 except KeyError:
2431 log.warning(traceback.format_exc())
2433 log.warning(traceback.format_exc())
2432 return False
2434 return False
2433 if self.required_perms.issubset(_user_perms):
2435 if self.required_perms.issubset(_user_perms):
2434 return True
2436 return True
2435 return False
2437 return False
2436
2438
2437
2439
2438 class HasRepoPermissionAnyApi(_BaseApiPerm):
2440 class HasRepoPermissionAnyApi(_BaseApiPerm):
2439 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2441 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2440 user_group_name=None):
2442 user_group_name=None):
2441 try:
2443 try:
2442 _user_perms = {perm_defs['repositories'][repo_name]}
2444 _user_perms = {perm_defs['repositories'][repo_name]}
2443 except KeyError:
2445 except KeyError:
2444 log.warning(traceback.format_exc())
2446 log.warning(traceback.format_exc())
2445 return False
2447 return False
2446 if self.required_perms.intersection(_user_perms):
2448 if self.required_perms.intersection(_user_perms):
2447 return True
2449 return True
2448 return False
2450 return False
2449
2451
2450
2452
2451 class HasRepoGroupPermissionAnyApi(_BaseApiPerm):
2453 class HasRepoGroupPermissionAnyApi(_BaseApiPerm):
2452 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2454 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2453 user_group_name=None):
2455 user_group_name=None):
2454 try:
2456 try:
2455 _user_perms = {perm_defs['repositories_groups'][group_name]}
2457 _user_perms = {perm_defs['repositories_groups'][group_name]}
2456 except KeyError:
2458 except KeyError:
2457 log.warning(traceback.format_exc())
2459 log.warning(traceback.format_exc())
2458 return False
2460 return False
2459 if self.required_perms.intersection(_user_perms):
2461 if self.required_perms.intersection(_user_perms):
2460 return True
2462 return True
2461 return False
2463 return False
2462
2464
2463
2465
2464 class HasRepoGroupPermissionAllApi(_BaseApiPerm):
2466 class HasRepoGroupPermissionAllApi(_BaseApiPerm):
2465 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2467 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2466 user_group_name=None):
2468 user_group_name=None):
2467 try:
2469 try:
2468 _user_perms = {perm_defs['repositories_groups'][group_name]}
2470 _user_perms = {perm_defs['repositories_groups'][group_name]}
2469 except KeyError:
2471 except KeyError:
2470 log.warning(traceback.format_exc())
2472 log.warning(traceback.format_exc())
2471 return False
2473 return False
2472 if self.required_perms.issubset(_user_perms):
2474 if self.required_perms.issubset(_user_perms):
2473 return True
2475 return True
2474 return False
2476 return False
2475
2477
2476
2478
2477 class HasUserGroupPermissionAnyApi(_BaseApiPerm):
2479 class HasUserGroupPermissionAnyApi(_BaseApiPerm):
2478 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2480 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2479 user_group_name=None):
2481 user_group_name=None):
2480 try:
2482 try:
2481 _user_perms = {perm_defs['user_groups'][user_group_name]}
2483 _user_perms = {perm_defs['user_groups'][user_group_name]}
2482 except KeyError:
2484 except KeyError:
2483 log.warning(traceback.format_exc())
2485 log.warning(traceback.format_exc())
2484 return False
2486 return False
2485 if self.required_perms.intersection(_user_perms):
2487 if self.required_perms.intersection(_user_perms):
2486 return True
2488 return True
2487 return False
2489 return False
2488
2490
2489
2491
2490 def check_ip_access(source_ip, allowed_ips=None):
2492 def check_ip_access(source_ip, allowed_ips=None):
2491 """
2493 """
2492 Checks if source_ip is a subnet of any of allowed_ips.
2494 Checks if source_ip is a subnet of any of allowed_ips.
2493
2495
2494 :param source_ip:
2496 :param source_ip:
2495 :param allowed_ips: list of allowed ips together with mask
2497 :param allowed_ips: list of allowed ips together with mask
2496 """
2498 """
2497 log.debug('checking if ip:%s is subnet of %s', source_ip, allowed_ips)
2499 log.debug('checking if ip:%s is subnet of %s', source_ip, allowed_ips)
2498 source_ip_address = ipaddress.ip_address(safe_unicode(source_ip))
2500 source_ip_address = ipaddress.ip_address(source_ip)
2499 if isinstance(allowed_ips, (tuple, list, set)):
2501 if isinstance(allowed_ips, (tuple, list, set)):
2500 for ip in allowed_ips:
2502 for ip in allowed_ips:
2501 ip = safe_unicode(ip)
2503 #TODO: verify
2504 #ip = safe_unicode(ip)
2502 try:
2505 try:
2503 network_address = ipaddress.ip_network(ip, strict=False)
2506 network_address = ipaddress.ip_network(ip, strict=False)
2504 if source_ip_address in network_address:
2507 if source_ip_address in network_address:
2505 log.debug('IP %s is network %s', source_ip_address, network_address)
2508 log.debug('IP %s is network %s', source_ip_address, network_address)
2506 return True
2509 return True
2507 # for any case we cannot determine the IP, don't crash just
2510 # for any case we cannot determine the IP, don't crash just
2508 # skip it and log as error, we want to say forbidden still when
2511 # skip it and log as error, we want to say forbidden still when
2509 # sending bad IP
2512 # sending bad IP
2510 except Exception:
2513 except Exception:
2511 log.error(traceback.format_exc())
2514 log.error(traceback.format_exc())
2512 continue
2515 continue
2513 return False
2516 return False
2514
2517
2515
2518
2516 def get_cython_compat_decorator(wrapper, func):
2519 def get_cython_compat_decorator(wrapper, func):
2517 """
2520 """
2518 Creates a cython compatible decorator. The previously used
2521 Creates a cython compatible decorator. The previously used
2519 decorator.decorator() function seems to be incompatible with cython.
2522 decorator.decorator() function seems to be incompatible with cython.
2520
2523
2521 :param wrapper: __wrapper method of the decorator class
2524 :param wrapper: __wrapper method of the decorator class
2522 :param func: decorated function
2525 :param func: decorated function
2523 """
2526 """
2524 @wraps(func)
2527 @wraps(func)
2525 def local_wrapper(*args, **kwds):
2528 def local_wrapper(*args, **kwds):
2526 return wrapper(func, *args, **kwds)
2529 return wrapper(func, *args, **kwds)
2527 local_wrapper.__wrapped__ = func
2530 local_wrapper.__wrapped__ = func
2528 return local_wrapper
2531 return local_wrapper
2529
2532
2530
2533
@@ -1,611 +1,611 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 The base Controller API
22 The base Controller API
23 Provides the BaseController class for subclassing. And usage in different
23 Provides the BaseController class for subclassing. And usage in different
24 controllers
24 controllers
25 """
25 """
26
26
27 import logging
27 import logging
28 import socket
28 import socket
29
29
30 import markupsafe
30 import markupsafe
31 import ipaddress
31 import ipaddress
32
32
33 from paste.auth.basic import AuthBasicAuthenticator
33 from paste.auth.basic import AuthBasicAuthenticator
34 from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception
34 from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception
35 from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION
35 from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION
36
36
37 import rhodecode
37 import rhodecode
38 from rhodecode.authentication.base import VCS_TYPE
38 from rhodecode.authentication.base import VCS_TYPE
39 from rhodecode.lib import auth, utils2
39 from rhodecode.lib import auth, utils2
40 from rhodecode.lib import helpers as h
40 from rhodecode.lib import helpers as h
41 from rhodecode.lib.auth import AuthUser, CookieStoreWrapper
41 from rhodecode.lib.auth import AuthUser, CookieStoreWrapper
42 from rhodecode.lib.exceptions import UserCreationError
42 from rhodecode.lib.exceptions import UserCreationError
43 from rhodecode.lib.utils import (password_changed, get_enabled_hook_classes)
43 from rhodecode.lib.utils import (password_changed, get_enabled_hook_classes)
44 from rhodecode.lib.utils2 import (
44 from rhodecode.lib.utils2 import (
45 str2bool, safe_unicode, AttributeDict, safe_int, sha1, aslist, safe_str)
45 str2bool, safe_unicode, AttributeDict, safe_int, sha1, aslist, safe_str)
46 from rhodecode.model.db import Repository, User, ChangesetComment, UserBookmark
46 from rhodecode.model.db import Repository, User, ChangesetComment, UserBookmark
47 from rhodecode.model.notification import NotificationModel
47 from rhodecode.model.notification import NotificationModel
48 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
48 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 def _filter_proxy(ip):
53 def _filter_proxy(ip):
54 """
54 """
55 Passed in IP addresses in HEADERS can be in a special format of multiple
55 Passed in IP addresses in HEADERS can be in a special format of multiple
56 ips. Those comma separated IPs are passed from various proxies in the
56 ips. Those comma separated IPs are passed from various proxies in the
57 chain of request processing. The left-most being the original client.
57 chain of request processing. The left-most being the original client.
58 We only care about the first IP which came from the org. client.
58 We only care about the first IP which came from the org. client.
59
59
60 :param ip: ip string from headers
60 :param ip: ip string from headers
61 """
61 """
62 if ',' in ip:
62 if ',' in ip:
63 _ips = ip.split(',')
63 _ips = ip.split(',')
64 _first_ip = _ips[0].strip()
64 _first_ip = _ips[0].strip()
65 log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
65 log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
66 return _first_ip
66 return _first_ip
67 return ip
67 return ip
68
68
69
69
70 def _filter_port(ip):
70 def _filter_port(ip):
71 """
71 """
72 Removes a port from ip, there are 4 main cases to handle here.
72 Removes a port from ip, there are 4 main cases to handle here.
73 - ipv4 eg. 127.0.0.1
73 - ipv4 eg. 127.0.0.1
74 - ipv6 eg. ::1
74 - ipv6 eg. ::1
75 - ipv4+port eg. 127.0.0.1:8080
75 - ipv4+port eg. 127.0.0.1:8080
76 - ipv6+port eg. [::1]:8080
76 - ipv6+port eg. [::1]:8080
77
77
78 :param ip:
78 :param ip:
79 """
79 """
80 def is_ipv6(ip_addr):
80 def is_ipv6(ip_addr):
81 if hasattr(socket, 'inet_pton'):
81 if hasattr(socket, 'inet_pton'):
82 try:
82 try:
83 socket.inet_pton(socket.AF_INET6, ip_addr)
83 socket.inet_pton(socket.AF_INET6, ip_addr)
84 except socket.error:
84 except socket.error:
85 return False
85 return False
86 else:
86 else:
87 # fallback to ipaddress
87 # fallback to ipaddress
88 try:
88 try:
89 ipaddress.IPv6Address(safe_unicode(ip_addr))
89 ipaddress.IPv6Address(safe_str(ip_addr))
90 except Exception:
90 except Exception:
91 return False
91 return False
92 return True
92 return True
93
93
94 if ':' not in ip: # must be ipv4 pure ip
94 if ':' not in ip: # must be ipv4 pure ip
95 return ip
95 return ip
96
96
97 if '[' in ip and ']' in ip: # ipv6 with port
97 if '[' in ip and ']' in ip: # ipv6 with port
98 return ip.split(']')[0][1:].lower()
98 return ip.split(']')[0][1:].lower()
99
99
100 # must be ipv6 or ipv4 with port
100 # must be ipv6 or ipv4 with port
101 if is_ipv6(ip):
101 if is_ipv6(ip):
102 return ip
102 return ip
103 else:
103 else:
104 ip, _port = ip.split(':')[:2] # means ipv4+port
104 ip, _port = ip.split(':')[:2] # means ipv4+port
105 return ip
105 return ip
106
106
107
107
108 def get_ip_addr(environ):
108 def get_ip_addr(environ):
109 proxy_key = 'HTTP_X_REAL_IP'
109 proxy_key = 'HTTP_X_REAL_IP'
110 proxy_key2 = 'HTTP_X_FORWARDED_FOR'
110 proxy_key2 = 'HTTP_X_FORWARDED_FOR'
111 def_key = 'REMOTE_ADDR'
111 def_key = 'REMOTE_ADDR'
112 _filters = lambda x: _filter_port(_filter_proxy(x))
112 _filters = lambda x: _filter_port(_filter_proxy(x))
113
113
114 ip = environ.get(proxy_key)
114 ip = environ.get(proxy_key)
115 if ip:
115 if ip:
116 return _filters(ip)
116 return _filters(ip)
117
117
118 ip = environ.get(proxy_key2)
118 ip = environ.get(proxy_key2)
119 if ip:
119 if ip:
120 return _filters(ip)
120 return _filters(ip)
121
121
122 ip = environ.get(def_key, '0.0.0.0')
122 ip = environ.get(def_key, '0.0.0.0')
123 return _filters(ip)
123 return _filters(ip)
124
124
125
125
126 def get_server_ip_addr(environ, log_errors=True):
126 def get_server_ip_addr(environ, log_errors=True):
127 hostname = environ.get('SERVER_NAME')
127 hostname = environ.get('SERVER_NAME')
128 try:
128 try:
129 return socket.gethostbyname(hostname)
129 return socket.gethostbyname(hostname)
130 except Exception as e:
130 except Exception as e:
131 if log_errors:
131 if log_errors:
132 # in some cases this lookup is not possible, and we don't want to
132 # in some cases this lookup is not possible, and we don't want to
133 # make it an exception in logs
133 # make it an exception in logs
134 log.exception('Could not retrieve server ip address: %s', e)
134 log.exception('Could not retrieve server ip address: %s', e)
135 return hostname
135 return hostname
136
136
137
137
138 def get_server_port(environ):
138 def get_server_port(environ):
139 return environ.get('SERVER_PORT')
139 return environ.get('SERVER_PORT')
140
140
141
141
142 def get_access_path(environ):
142 def get_access_path(environ):
143 path = environ.get('PATH_INFO')
143 path = environ.get('PATH_INFO')
144 org_req = environ.get('pylons.original_request')
144 org_req = environ.get('pylons.original_request')
145 if org_req:
145 if org_req:
146 path = org_req.environ.get('PATH_INFO')
146 path = org_req.environ.get('PATH_INFO')
147 return path
147 return path
148
148
149
149
150 def get_user_agent(environ):
150 def get_user_agent(environ):
151 return environ.get('HTTP_USER_AGENT')
151 return environ.get('HTTP_USER_AGENT')
152
152
153
153
154 def vcs_operation_context(
154 def vcs_operation_context(
155 environ, repo_name, username, action, scm, check_locking=True,
155 environ, repo_name, username, action, scm, check_locking=True,
156 is_shadow_repo=False, check_branch_perms=False, detect_force_push=False):
156 is_shadow_repo=False, check_branch_perms=False, detect_force_push=False):
157 """
157 """
158 Generate the context for a vcs operation, e.g. push or pull.
158 Generate the context for a vcs operation, e.g. push or pull.
159
159
160 This context is passed over the layers so that hooks triggered by the
160 This context is passed over the layers so that hooks triggered by the
161 vcs operation know details like the user, the user's IP address etc.
161 vcs operation know details like the user, the user's IP address etc.
162
162
163 :param check_locking: Allows to switch of the computation of the locking
163 :param check_locking: Allows to switch of the computation of the locking
164 data. This serves mainly the need of the simplevcs middleware to be
164 data. This serves mainly the need of the simplevcs middleware to be
165 able to disable this for certain operations.
165 able to disable this for certain operations.
166
166
167 """
167 """
168 # Tri-state value: False: unlock, None: nothing, True: lock
168 # Tri-state value: False: unlock, None: nothing, True: lock
169 make_lock = None
169 make_lock = None
170 locked_by = [None, None, None]
170 locked_by = [None, None, None]
171 is_anonymous = username == User.DEFAULT_USER
171 is_anonymous = username == User.DEFAULT_USER
172 user = User.get_by_username(username)
172 user = User.get_by_username(username)
173 if not is_anonymous and check_locking:
173 if not is_anonymous and check_locking:
174 log.debug('Checking locking on repository "%s"', repo_name)
174 log.debug('Checking locking on repository "%s"', repo_name)
175 repo = Repository.get_by_repo_name(repo_name)
175 repo = Repository.get_by_repo_name(repo_name)
176 make_lock, __, locked_by = repo.get_locking_state(
176 make_lock, __, locked_by = repo.get_locking_state(
177 action, user.user_id)
177 action, user.user_id)
178 user_id = user.user_id
178 user_id = user.user_id
179 settings_model = VcsSettingsModel(repo=repo_name)
179 settings_model = VcsSettingsModel(repo=repo_name)
180 ui_settings = settings_model.get_ui_settings()
180 ui_settings = settings_model.get_ui_settings()
181
181
182 # NOTE(marcink): This should be also in sync with
182 # NOTE(marcink): This should be also in sync with
183 # rhodecode/apps/ssh_support/lib/backends/base.py:update_environment scm_data
183 # rhodecode/apps/ssh_support/lib/backends/base.py:update_environment scm_data
184 store = [x for x in ui_settings if x.key == '/']
184 store = [x for x in ui_settings if x.key == '/']
185 repo_store = ''
185 repo_store = ''
186 if store:
186 if store:
187 repo_store = store[0].value
187 repo_store = store[0].value
188
188
189 scm_data = {
189 scm_data = {
190 'ip': get_ip_addr(environ),
190 'ip': get_ip_addr(environ),
191 'username': username,
191 'username': username,
192 'user_id': user_id,
192 'user_id': user_id,
193 'action': action,
193 'action': action,
194 'repository': repo_name,
194 'repository': repo_name,
195 'scm': scm,
195 'scm': scm,
196 'config': rhodecode.CONFIG['__file__'],
196 'config': rhodecode.CONFIG['__file__'],
197 'repo_store': repo_store,
197 'repo_store': repo_store,
198 'make_lock': make_lock,
198 'make_lock': make_lock,
199 'locked_by': locked_by,
199 'locked_by': locked_by,
200 'server_url': utils2.get_server_url(environ),
200 'server_url': utils2.get_server_url(environ),
201 'user_agent': get_user_agent(environ),
201 'user_agent': get_user_agent(environ),
202 'hooks': get_enabled_hook_classes(ui_settings),
202 'hooks': get_enabled_hook_classes(ui_settings),
203 'is_shadow_repo': is_shadow_repo,
203 'is_shadow_repo': is_shadow_repo,
204 'detect_force_push': detect_force_push,
204 'detect_force_push': detect_force_push,
205 'check_branch_perms': check_branch_perms,
205 'check_branch_perms': check_branch_perms,
206 }
206 }
207 return scm_data
207 return scm_data
208
208
209
209
210 class BasicAuth(AuthBasicAuthenticator):
210 class BasicAuth(AuthBasicAuthenticator):
211
211
212 def __init__(self, realm, authfunc, registry, auth_http_code=None,
212 def __init__(self, realm, authfunc, registry, auth_http_code=None,
213 initial_call_detection=False, acl_repo_name=None, rc_realm=''):
213 initial_call_detection=False, acl_repo_name=None, rc_realm=''):
214 self.realm = realm
214 self.realm = realm
215 self.rc_realm = rc_realm
215 self.rc_realm = rc_realm
216 self.initial_call = initial_call_detection
216 self.initial_call = initial_call_detection
217 self.authfunc = authfunc
217 self.authfunc = authfunc
218 self.registry = registry
218 self.registry = registry
219 self.acl_repo_name = acl_repo_name
219 self.acl_repo_name = acl_repo_name
220 self._rc_auth_http_code = auth_http_code
220 self._rc_auth_http_code = auth_http_code
221
221
222 def _get_response_from_code(self, http_code):
222 def _get_response_from_code(self, http_code):
223 try:
223 try:
224 return get_exception(safe_int(http_code))
224 return get_exception(safe_int(http_code))
225 except Exception:
225 except Exception:
226 log.exception('Failed to fetch response for code %s', http_code)
226 log.exception('Failed to fetch response for code %s', http_code)
227 return HTTPForbidden
227 return HTTPForbidden
228
228
229 def get_rc_realm(self):
229 def get_rc_realm(self):
230 return safe_str(self.rc_realm)
230 return safe_str(self.rc_realm)
231
231
232 def build_authentication(self):
232 def build_authentication(self):
233 head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm)
233 head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm)
234 if self._rc_auth_http_code and not self.initial_call:
234 if self._rc_auth_http_code and not self.initial_call:
235 # return alternative HTTP code if alternative http return code
235 # return alternative HTTP code if alternative http return code
236 # is specified in RhodeCode config, but ONLY if it's not the
236 # is specified in RhodeCode config, but ONLY if it's not the
237 # FIRST call
237 # FIRST call
238 custom_response_klass = self._get_response_from_code(
238 custom_response_klass = self._get_response_from_code(
239 self._rc_auth_http_code)
239 self._rc_auth_http_code)
240 return custom_response_klass(headers=head)
240 return custom_response_klass(headers=head)
241 return HTTPUnauthorized(headers=head)
241 return HTTPUnauthorized(headers=head)
242
242
243 def authenticate(self, environ):
243 def authenticate(self, environ):
244 authorization = AUTHORIZATION(environ)
244 authorization = AUTHORIZATION(environ)
245 if not authorization:
245 if not authorization:
246 return self.build_authentication()
246 return self.build_authentication()
247 (authmeth, auth) = authorization.split(' ', 1)
247 (authmeth, auth) = authorization.split(' ', 1)
248 if 'basic' != authmeth.lower():
248 if 'basic' != authmeth.lower():
249 return self.build_authentication()
249 return self.build_authentication()
250 auth = auth.strip().decode('base64')
250 auth = auth.strip().decode('base64')
251 _parts = auth.split(':', 1)
251 _parts = auth.split(':', 1)
252 if len(_parts) == 2:
252 if len(_parts) == 2:
253 username, password = _parts
253 username, password = _parts
254 auth_data = self.authfunc(
254 auth_data = self.authfunc(
255 username, password, environ, VCS_TYPE,
255 username, password, environ, VCS_TYPE,
256 registry=self.registry, acl_repo_name=self.acl_repo_name)
256 registry=self.registry, acl_repo_name=self.acl_repo_name)
257 if auth_data:
257 if auth_data:
258 return {'username': username, 'auth_data': auth_data}
258 return {'username': username, 'auth_data': auth_data}
259 if username and password:
259 if username and password:
260 # we mark that we actually executed authentication once, at
260 # we mark that we actually executed authentication once, at
261 # that point we can use the alternative auth code
261 # that point we can use the alternative auth code
262 self.initial_call = False
262 self.initial_call = False
263
263
264 return self.build_authentication()
264 return self.build_authentication()
265
265
266 __call__ = authenticate
266 __call__ = authenticate
267
267
268
268
269 def calculate_version_hash(config):
269 def calculate_version_hash(config):
270 return sha1(
270 return sha1(
271 config.get('beaker.session.secret', '') +
271 config.get('beaker.session.secret', '') +
272 rhodecode.__version__)[:8]
272 rhodecode.__version__)[:8]
273
273
274
274
275 def get_current_lang(request):
275 def get_current_lang(request):
276 # NOTE(marcink): remove after pyramid move
276 # NOTE(marcink): remove after pyramid move
277 try:
277 try:
278 return translation.get_lang()[0]
278 return translation.get_lang()[0]
279 except:
279 except:
280 pass
280 pass
281
281
282 return getattr(request, '_LOCALE_', request.locale_name)
282 return getattr(request, '_LOCALE_', request.locale_name)
283
283
284
284
285 def attach_context_attributes(context, request, user_id=None, is_api=None):
285 def attach_context_attributes(context, request, user_id=None, is_api=None):
286 """
286 """
287 Attach variables into template context called `c`.
287 Attach variables into template context called `c`.
288 """
288 """
289 config = request.registry.settings
289 config = request.registry.settings
290
290
291 rc_config = SettingsModel().get_all_settings(cache=True, from_request=False)
291 rc_config = SettingsModel().get_all_settings(cache=True, from_request=False)
292 context.rc_config = rc_config
292 context.rc_config = rc_config
293 context.rhodecode_version = rhodecode.__version__
293 context.rhodecode_version = rhodecode.__version__
294 context.rhodecode_edition = config.get('rhodecode.edition')
294 context.rhodecode_edition = config.get('rhodecode.edition')
295 context.rhodecode_edition_id = config.get('rhodecode.edition_id')
295 context.rhodecode_edition_id = config.get('rhodecode.edition_id')
296 # unique secret + version does not leak the version but keep consistency
296 # unique secret + version does not leak the version but keep consistency
297 context.rhodecode_version_hash = calculate_version_hash(config)
297 context.rhodecode_version_hash = calculate_version_hash(config)
298
298
299 # Default language set for the incoming request
299 # Default language set for the incoming request
300 context.language = get_current_lang(request)
300 context.language = get_current_lang(request)
301
301
302 # Visual options
302 # Visual options
303 context.visual = AttributeDict({})
303 context.visual = AttributeDict({})
304
304
305 # DB stored Visual Items
305 # DB stored Visual Items
306 context.visual.show_public_icon = str2bool(
306 context.visual.show_public_icon = str2bool(
307 rc_config.get('rhodecode_show_public_icon'))
307 rc_config.get('rhodecode_show_public_icon'))
308 context.visual.show_private_icon = str2bool(
308 context.visual.show_private_icon = str2bool(
309 rc_config.get('rhodecode_show_private_icon'))
309 rc_config.get('rhodecode_show_private_icon'))
310 context.visual.stylify_metatags = str2bool(
310 context.visual.stylify_metatags = str2bool(
311 rc_config.get('rhodecode_stylify_metatags'))
311 rc_config.get('rhodecode_stylify_metatags'))
312 context.visual.dashboard_items = safe_int(
312 context.visual.dashboard_items = safe_int(
313 rc_config.get('rhodecode_dashboard_items', 100))
313 rc_config.get('rhodecode_dashboard_items', 100))
314 context.visual.admin_grid_items = safe_int(
314 context.visual.admin_grid_items = safe_int(
315 rc_config.get('rhodecode_admin_grid_items', 100))
315 rc_config.get('rhodecode_admin_grid_items', 100))
316 context.visual.show_revision_number = str2bool(
316 context.visual.show_revision_number = str2bool(
317 rc_config.get('rhodecode_show_revision_number', True))
317 rc_config.get('rhodecode_show_revision_number', True))
318 context.visual.show_sha_length = safe_int(
318 context.visual.show_sha_length = safe_int(
319 rc_config.get('rhodecode_show_sha_length', 100))
319 rc_config.get('rhodecode_show_sha_length', 100))
320 context.visual.repository_fields = str2bool(
320 context.visual.repository_fields = str2bool(
321 rc_config.get('rhodecode_repository_fields'))
321 rc_config.get('rhodecode_repository_fields'))
322 context.visual.show_version = str2bool(
322 context.visual.show_version = str2bool(
323 rc_config.get('rhodecode_show_version'))
323 rc_config.get('rhodecode_show_version'))
324 context.visual.use_gravatar = str2bool(
324 context.visual.use_gravatar = str2bool(
325 rc_config.get('rhodecode_use_gravatar'))
325 rc_config.get('rhodecode_use_gravatar'))
326 context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url')
326 context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url')
327 context.visual.default_renderer = rc_config.get(
327 context.visual.default_renderer = rc_config.get(
328 'rhodecode_markup_renderer', 'rst')
328 'rhodecode_markup_renderer', 'rst')
329 context.visual.comment_types = ChangesetComment.COMMENT_TYPES
329 context.visual.comment_types = ChangesetComment.COMMENT_TYPES
330 context.visual.rhodecode_support_url = \
330 context.visual.rhodecode_support_url = \
331 rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support')
331 rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support')
332
332
333 context.visual.affected_files_cut_off = 60
333 context.visual.affected_files_cut_off = 60
334
334
335 context.pre_code = rc_config.get('rhodecode_pre_code')
335 context.pre_code = rc_config.get('rhodecode_pre_code')
336 context.post_code = rc_config.get('rhodecode_post_code')
336 context.post_code = rc_config.get('rhodecode_post_code')
337 context.rhodecode_name = rc_config.get('rhodecode_title')
337 context.rhodecode_name = rc_config.get('rhodecode_title')
338 context.default_encodings = aslist(config.get('default_encoding'), sep=',')
338 context.default_encodings = aslist(config.get('default_encoding'), sep=',')
339 # if we have specified default_encoding in the request, it has more
339 # if we have specified default_encoding in the request, it has more
340 # priority
340 # priority
341 if request.GET.get('default_encoding'):
341 if request.GET.get('default_encoding'):
342 context.default_encodings.insert(0, request.GET.get('default_encoding'))
342 context.default_encodings.insert(0, request.GET.get('default_encoding'))
343 context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl')
343 context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl')
344 context.clone_uri_id_tmpl = rc_config.get('rhodecode_clone_uri_id_tmpl')
344 context.clone_uri_id_tmpl = rc_config.get('rhodecode_clone_uri_id_tmpl')
345 context.clone_uri_ssh_tmpl = rc_config.get('rhodecode_clone_uri_ssh_tmpl')
345 context.clone_uri_ssh_tmpl = rc_config.get('rhodecode_clone_uri_ssh_tmpl')
346
346
347 # INI stored
347 # INI stored
348 context.labs_active = str2bool(
348 context.labs_active = str2bool(
349 config.get('labs_settings_active', 'false'))
349 config.get('labs_settings_active', 'false'))
350 context.ssh_enabled = str2bool(
350 context.ssh_enabled = str2bool(
351 config.get('ssh.generate_authorized_keyfile', 'false'))
351 config.get('ssh.generate_authorized_keyfile', 'false'))
352 context.ssh_key_generator_enabled = str2bool(
352 context.ssh_key_generator_enabled = str2bool(
353 config.get('ssh.enable_ui_key_generator', 'true'))
353 config.get('ssh.enable_ui_key_generator', 'true'))
354
354
355 context.visual.allow_repo_location_change = str2bool(
355 context.visual.allow_repo_location_change = str2bool(
356 config.get('allow_repo_location_change', True))
356 config.get('allow_repo_location_change', True))
357 context.visual.allow_custom_hooks_settings = str2bool(
357 context.visual.allow_custom_hooks_settings = str2bool(
358 config.get('allow_custom_hooks_settings', True))
358 config.get('allow_custom_hooks_settings', True))
359 context.debug_style = str2bool(config.get('debug_style', False))
359 context.debug_style = str2bool(config.get('debug_style', False))
360
360
361 context.rhodecode_instanceid = config.get('instance_id')
361 context.rhodecode_instanceid = config.get('instance_id')
362
362
363 context.visual.cut_off_limit_diff = safe_int(
363 context.visual.cut_off_limit_diff = safe_int(
364 config.get('cut_off_limit_diff'))
364 config.get('cut_off_limit_diff'))
365 context.visual.cut_off_limit_file = safe_int(
365 context.visual.cut_off_limit_file = safe_int(
366 config.get('cut_off_limit_file'))
366 config.get('cut_off_limit_file'))
367
367
368 context.license = AttributeDict({})
368 context.license = AttributeDict({})
369 context.license.hide_license_info = str2bool(
369 context.license.hide_license_info = str2bool(
370 config.get('license.hide_license_info', False))
370 config.get('license.hide_license_info', False))
371
371
372 # AppEnlight
372 # AppEnlight
373 context.appenlight_enabled = config.get('appenlight', False)
373 context.appenlight_enabled = config.get('appenlight', False)
374 context.appenlight_api_public_key = config.get(
374 context.appenlight_api_public_key = config.get(
375 'appenlight.api_public_key', '')
375 'appenlight.api_public_key', '')
376 context.appenlight_server_url = config.get('appenlight.server_url', '')
376 context.appenlight_server_url = config.get('appenlight.server_url', '')
377
377
378 diffmode = {
378 diffmode = {
379 "unified": "unified",
379 "unified": "unified",
380 "sideside": "sideside"
380 "sideside": "sideside"
381 }.get(request.GET.get('diffmode'))
381 }.get(request.GET.get('diffmode'))
382
382
383 if is_api is not None:
383 if is_api is not None:
384 is_api = hasattr(request, 'rpc_user')
384 is_api = hasattr(request, 'rpc_user')
385 session_attrs = {
385 session_attrs = {
386 # defaults
386 # defaults
387 "clone_url_format": "http",
387 "clone_url_format": "http",
388 "diffmode": "sideside",
388 "diffmode": "sideside",
389 "license_fingerprint": request.session.get('license_fingerprint')
389 "license_fingerprint": request.session.get('license_fingerprint')
390 }
390 }
391
391
392 if not is_api:
392 if not is_api:
393 # don't access pyramid session for API calls
393 # don't access pyramid session for API calls
394 if diffmode and diffmode != request.session.get('rc_user_session_attr.diffmode'):
394 if diffmode and diffmode != request.session.get('rc_user_session_attr.diffmode'):
395 request.session['rc_user_session_attr.diffmode'] = diffmode
395 request.session['rc_user_session_attr.diffmode'] = diffmode
396
396
397 # session settings per user
397 # session settings per user
398
398
399 for k, v in request.session.items():
399 for k, v in request.session.items():
400 pref = 'rc_user_session_attr.'
400 pref = 'rc_user_session_attr.'
401 if k and k.startswith(pref):
401 if k and k.startswith(pref):
402 k = k[len(pref):]
402 k = k[len(pref):]
403 session_attrs[k] = v
403 session_attrs[k] = v
404
404
405 context.user_session_attrs = session_attrs
405 context.user_session_attrs = session_attrs
406
406
407 # JS template context
407 # JS template context
408 context.template_context = {
408 context.template_context = {
409 'repo_name': None,
409 'repo_name': None,
410 'repo_type': None,
410 'repo_type': None,
411 'repo_landing_commit': None,
411 'repo_landing_commit': None,
412 'rhodecode_user': {
412 'rhodecode_user': {
413 'username': None,
413 'username': None,
414 'email': None,
414 'email': None,
415 'notification_status': False
415 'notification_status': False
416 },
416 },
417 'session_attrs': session_attrs,
417 'session_attrs': session_attrs,
418 'visual': {
418 'visual': {
419 'default_renderer': None
419 'default_renderer': None
420 },
420 },
421 'commit_data': {
421 'commit_data': {
422 'commit_id': None
422 'commit_id': None
423 },
423 },
424 'pull_request_data': {'pull_request_id': None},
424 'pull_request_data': {'pull_request_id': None},
425 'timeago': {
425 'timeago': {
426 'refresh_time': 120 * 1000,
426 'refresh_time': 120 * 1000,
427 'cutoff_limit': 1000 * 60 * 60 * 24 * 7
427 'cutoff_limit': 1000 * 60 * 60 * 24 * 7
428 },
428 },
429 'pyramid_dispatch': {
429 'pyramid_dispatch': {
430
430
431 },
431 },
432 'extra': {'plugins': {}}
432 'extra': {'plugins': {}}
433 }
433 }
434 # END CONFIG VARS
434 # END CONFIG VARS
435 if is_api:
435 if is_api:
436 csrf_token = None
436 csrf_token = None
437 else:
437 else:
438 csrf_token = auth.get_csrf_token(session=request.session)
438 csrf_token = auth.get_csrf_token(session=request.session)
439
439
440 context.csrf_token = csrf_token
440 context.csrf_token = csrf_token
441 context.backends = rhodecode.BACKENDS.keys()
441 context.backends = rhodecode.BACKENDS.keys()
442
442
443 unread_count = 0
443 unread_count = 0
444 user_bookmark_list = []
444 user_bookmark_list = []
445 if user_id:
445 if user_id:
446 unread_count = NotificationModel().get_unread_cnt_for_user(user_id)
446 unread_count = NotificationModel().get_unread_cnt_for_user(user_id)
447 user_bookmark_list = UserBookmark.get_bookmarks_for_user(user_id)
447 user_bookmark_list = UserBookmark.get_bookmarks_for_user(user_id)
448 context.unread_notifications = unread_count
448 context.unread_notifications = unread_count
449 context.bookmark_items = user_bookmark_list
449 context.bookmark_items = user_bookmark_list
450
450
451 # web case
451 # web case
452 if hasattr(request, 'user'):
452 if hasattr(request, 'user'):
453 context.auth_user = request.user
453 context.auth_user = request.user
454 context.rhodecode_user = request.user
454 context.rhodecode_user = request.user
455
455
456 # api case
456 # api case
457 if hasattr(request, 'rpc_user'):
457 if hasattr(request, 'rpc_user'):
458 context.auth_user = request.rpc_user
458 context.auth_user = request.rpc_user
459 context.rhodecode_user = request.rpc_user
459 context.rhodecode_user = request.rpc_user
460
460
461 # attach the whole call context to the request
461 # attach the whole call context to the request
462 request.set_call_context(context)
462 request.set_call_context(context)
463
463
464
464
465 def get_auth_user(request):
465 def get_auth_user(request):
466 environ = request.environ
466 environ = request.environ
467 session = request.session
467 session = request.session
468
468
469 ip_addr = get_ip_addr(environ)
469 ip_addr = get_ip_addr(environ)
470
470
471 # make sure that we update permissions each time we call controller
471 # make sure that we update permissions each time we call controller
472 _auth_token = (
472 _auth_token = (
473 # ?auth_token=XXX
473 # ?auth_token=XXX
474 request.GET.get('auth_token', '')
474 request.GET.get('auth_token', '')
475 # ?api_key=XXX !LEGACY
475 # ?api_key=XXX !LEGACY
476 or request.GET.get('api_key', '')
476 or request.GET.get('api_key', '')
477 # or headers....
477 # or headers....
478 or request.headers.get('X-Rc-Auth-Token', '')
478 or request.headers.get('X-Rc-Auth-Token', '')
479 )
479 )
480 if not _auth_token and request.matchdict:
480 if not _auth_token and request.matchdict:
481 url_auth_token = request.matchdict.get('_auth_token')
481 url_auth_token = request.matchdict.get('_auth_token')
482 _auth_token = url_auth_token
482 _auth_token = url_auth_token
483 if _auth_token:
483 if _auth_token:
484 log.debug('Using URL extracted auth token `...%s`', _auth_token[-4:])
484 log.debug('Using URL extracted auth token `...%s`', _auth_token[-4:])
485
485
486 if _auth_token:
486 if _auth_token:
487 # when using API_KEY we assume user exists, and
487 # when using API_KEY we assume user exists, and
488 # doesn't need auth based on cookies.
488 # doesn't need auth based on cookies.
489 auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr)
489 auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr)
490 authenticated = False
490 authenticated = False
491 else:
491 else:
492 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
492 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
493 try:
493 try:
494 auth_user = AuthUser(user_id=cookie_store.get('user_id', None),
494 auth_user = AuthUser(user_id=cookie_store.get('user_id', None),
495 ip_addr=ip_addr)
495 ip_addr=ip_addr)
496 except UserCreationError as e:
496 except UserCreationError as e:
497 h.flash(e, 'error')
497 h.flash(e, 'error')
498 # container auth or other auth functions that create users
498 # container auth or other auth functions that create users
499 # on the fly can throw this exception signaling that there's
499 # on the fly can throw this exception signaling that there's
500 # issue with user creation, explanation should be provided
500 # issue with user creation, explanation should be provided
501 # in Exception itself. We then create a simple blank
501 # in Exception itself. We then create a simple blank
502 # AuthUser
502 # AuthUser
503 auth_user = AuthUser(ip_addr=ip_addr)
503 auth_user = AuthUser(ip_addr=ip_addr)
504
504
505 # in case someone changes a password for user it triggers session
505 # in case someone changes a password for user it triggers session
506 # flush and forces a re-login
506 # flush and forces a re-login
507 if password_changed(auth_user, session):
507 if password_changed(auth_user, session):
508 session.invalidate()
508 session.invalidate()
509 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
509 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
510 auth_user = AuthUser(ip_addr=ip_addr)
510 auth_user = AuthUser(ip_addr=ip_addr)
511
511
512 authenticated = cookie_store.get('is_authenticated')
512 authenticated = cookie_store.get('is_authenticated')
513
513
514 if not auth_user.is_authenticated and auth_user.is_user_object:
514 if not auth_user.is_authenticated and auth_user.is_user_object:
515 # user is not authenticated and not empty
515 # user is not authenticated and not empty
516 auth_user.set_authenticated(authenticated)
516 auth_user.set_authenticated(authenticated)
517
517
518 return auth_user, _auth_token
518 return auth_user, _auth_token
519
519
520
520
521 def h_filter(s):
521 def h_filter(s):
522 """
522 """
523 Custom filter for Mako templates. Mako by standard uses `markupsafe.escape`
523 Custom filter for Mako templates. Mako by standard uses `markupsafe.escape`
524 we wrap this with additional functionality that converts None to empty
524 we wrap this with additional functionality that converts None to empty
525 strings
525 strings
526 """
526 """
527 if s is None:
527 if s is None:
528 return markupsafe.Markup()
528 return markupsafe.Markup()
529 return markupsafe.escape(s)
529 return markupsafe.escape(s)
530
530
531
531
532 def add_events_routes(config):
532 def add_events_routes(config):
533 """
533 """
534 Adds routing that can be used in events. Because some events are triggered
534 Adds routing that can be used in events. Because some events are triggered
535 outside of pyramid context, we need to bootstrap request with some
535 outside of pyramid context, we need to bootstrap request with some
536 routing registered
536 routing registered
537 """
537 """
538
538
539 from rhodecode.apps._base import ADMIN_PREFIX
539 from rhodecode.apps._base import ADMIN_PREFIX
540
540
541 config.add_route(name='home', pattern='/')
541 config.add_route(name='home', pattern='/')
542 config.add_route(name='main_page_repos_data', pattern='/_home_repos')
542 config.add_route(name='main_page_repos_data', pattern='/_home_repos')
543 config.add_route(name='main_page_repo_groups_data', pattern='/_home_repo_groups')
543 config.add_route(name='main_page_repo_groups_data', pattern='/_home_repo_groups')
544
544
545 config.add_route(name='login', pattern=ADMIN_PREFIX + '/login')
545 config.add_route(name='login', pattern=ADMIN_PREFIX + '/login')
546 config.add_route(name='logout', pattern=ADMIN_PREFIX + '/logout')
546 config.add_route(name='logout', pattern=ADMIN_PREFIX + '/logout')
547 config.add_route(name='repo_summary', pattern='/{repo_name}')
547 config.add_route(name='repo_summary', pattern='/{repo_name}')
548 config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary')
548 config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary')
549 config.add_route(name='repo_group_home', pattern='/{repo_group_name}')
549 config.add_route(name='repo_group_home', pattern='/{repo_group_name}')
550
550
551 config.add_route(name='pullrequest_show',
551 config.add_route(name='pullrequest_show',
552 pattern='/{repo_name}/pull-request/{pull_request_id}')
552 pattern='/{repo_name}/pull-request/{pull_request_id}')
553 config.add_route(name='pull_requests_global',
553 config.add_route(name='pull_requests_global',
554 pattern='/pull-request/{pull_request_id}')
554 pattern='/pull-request/{pull_request_id}')
555
555
556 config.add_route(name='repo_commit',
556 config.add_route(name='repo_commit',
557 pattern='/{repo_name}/changeset/{commit_id}')
557 pattern='/{repo_name}/changeset/{commit_id}')
558 config.add_route(name='repo_files',
558 config.add_route(name='repo_files',
559 pattern='/{repo_name}/files/{commit_id}/{f_path}')
559 pattern='/{repo_name}/files/{commit_id}/{f_path}')
560
560
561 config.add_route(name='hovercard_user',
561 config.add_route(name='hovercard_user',
562 pattern='/_hovercard/user/{user_id}')
562 pattern='/_hovercard/user/{user_id}')
563
563
564 config.add_route(name='hovercard_user_group',
564 config.add_route(name='hovercard_user_group',
565 pattern='/_hovercard/user_group/{user_group_id}')
565 pattern='/_hovercard/user_group/{user_group_id}')
566
566
567 config.add_route(name='hovercard_pull_request',
567 config.add_route(name='hovercard_pull_request',
568 pattern='/_hovercard/pull_request/{pull_request_id}')
568 pattern='/_hovercard/pull_request/{pull_request_id}')
569
569
570 config.add_route(name='hovercard_repo_commit',
570 config.add_route(name='hovercard_repo_commit',
571 pattern='/_hovercard/commit/{repo_name}/{commit_id}')
571 pattern='/_hovercard/commit/{repo_name}/{commit_id}')
572
572
573
573
574 def bootstrap_config(request, registry_name='RcTestRegistry'):
574 def bootstrap_config(request, registry_name='RcTestRegistry'):
575 import pyramid.testing
575 import pyramid.testing
576 registry = pyramid.testing.Registry(registry_name)
576 registry = pyramid.testing.Registry(registry_name)
577
577
578 config = pyramid.testing.setUp(registry=registry, request=request)
578 config = pyramid.testing.setUp(registry=registry, request=request)
579
579
580 # allow pyramid lookup in testing
580 # allow pyramid lookup in testing
581 config.include('pyramid_mako')
581 config.include('pyramid_mako')
582 config.include('rhodecode.lib.rc_beaker')
582 config.include('rhodecode.lib.rc_beaker')
583 config.include('rhodecode.lib.rc_cache')
583 config.include('rhodecode.lib.rc_cache')
584
584
585 add_events_routes(config)
585 add_events_routes(config)
586
586
587 return config
587 return config
588
588
589
589
590 def bootstrap_request(**kwargs):
590 def bootstrap_request(**kwargs):
591 """
591 """
592 Returns a thin version of Request Object that is used in non-web context like testing/celery
592 Returns a thin version of Request Object that is used in non-web context like testing/celery
593 """
593 """
594
594
595 import pyramid.testing
595 import pyramid.testing
596 from rhodecode.lib.request import ThinRequest as _ThinRequest
596 from rhodecode.lib.request import ThinRequest as _ThinRequest
597
597
598 class ThinRequest(_ThinRequest):
598 class ThinRequest(_ThinRequest):
599 application_url = kwargs.pop('application_url', 'http://example.com')
599 application_url = kwargs.pop('application_url', 'http://example.com')
600 host = kwargs.pop('host', 'example.com:80')
600 host = kwargs.pop('host', 'example.com:80')
601 domain = kwargs.pop('domain', 'example.com')
601 domain = kwargs.pop('domain', 'example.com')
602
602
603 class ThinSession(pyramid.testing.DummySession):
603 class ThinSession(pyramid.testing.DummySession):
604 def save(*arg, **kw):
604 def save(*arg, **kw):
605 pass
605 pass
606
606
607 request = ThinRequest(**kwargs)
607 request = ThinRequest(**kwargs)
608 request.session = ThinSession()
608 request.session = ThinSession()
609
609
610 return request
610 return request
611
611
@@ -1,2155 +1,2155 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Helper functions
22 Helper functions
23
23
24 Consists of functions to typically be used within templates, but also
24 Consists of functions to typically be used within templates, but also
25 available to Controllers. This module is available to both as 'h'.
25 available to Controllers. This module is available to both as 'h'.
26 """
26 """
27 import base64
27 import base64
28 import collections
28 import collections
29
29
30 import os
30 import os
31 import random
31 import random
32 import hashlib
32 import hashlib
33 from io import StringIO
33 from io import StringIO
34 import textwrap
34 import textwrap
35 import urllib.request, urllib.parse, urllib.error
35 import urllib.request, urllib.parse, urllib.error
36 import math
36 import math
37 import logging
37 import logging
38 import re
38 import re
39 import time
39 import time
40 import string
40 import string
41 import hashlib
41 import hashlib
42 import regex
42 import regex
43 from collections import OrderedDict
43 from collections import OrderedDict
44
44
45 import pygments
45 import pygments
46 import itertools
46 import itertools
47 import fnmatch
47 import fnmatch
48 import bleach
48 import bleach
49
49
50 from datetime import datetime
50 from datetime import datetime
51 from functools import partial
51 from functools import partial
52 from pygments.formatters.html import HtmlFormatter
52 from pygments.formatters.html import HtmlFormatter
53 from pygments.lexers import (
53 from pygments.lexers import (
54 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
54 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
55
55
56 from pyramid.threadlocal import get_current_request
56 from pyramid.threadlocal import get_current_request
57 from tempita import looper
57 from tempita import looper
58 from webhelpers2.html import literal, HTML, escape
58 from webhelpers2.html import literal, HTML, escape
59 from webhelpers2.html._autolink import _auto_link_urls
59 from webhelpers2.html._autolink import _auto_link_urls
60 from webhelpers2.html.tools import (
60 from webhelpers2.html.tools import (
61 button_to, highlight, js_obfuscate, strip_links, strip_tags)
61 button_to, highlight, js_obfuscate, strip_links, strip_tags)
62
62
63 from webhelpers2.text import (
63 from webhelpers2.text import (
64 chop_at, collapse, convert_accented_entities,
64 chop_at, collapse, convert_accented_entities,
65 convert_misc_entities, lchop, plural, rchop, remove_formatting,
65 convert_misc_entities, lchop, plural, rchop, remove_formatting,
66 replace_whitespace, urlify, truncate, wrap_paragraphs)
66 replace_whitespace, urlify, truncate, wrap_paragraphs)
67 from webhelpers2.date import time_ago_in_words
67 from webhelpers2.date import time_ago_in_words
68
68
69 from webhelpers2.html.tags import (
69 from webhelpers2.html.tags import (
70 _input, NotGiven, _make_safe_id_component as safeid,
70 _input, NotGiven, _make_safe_id_component as safeid,
71 form as insecure_form,
71 form as insecure_form,
72 auto_discovery_link, checkbox, end_form, file,
72 auto_discovery_link, checkbox, end_form, file,
73 hidden, image, javascript_link, link_to, link_to_if, link_to_unless, ol,
73 hidden, image, javascript_link, link_to, link_to_if, link_to_unless, ol,
74 select as raw_select, stylesheet_link, submit, text, password, textarea,
74 select as raw_select, stylesheet_link, submit, text, password, textarea,
75 ul, radio, Options)
75 ul, radio, Options)
76
76
77 from webhelpers2.number import format_byte_size
77 from webhelpers2.number import format_byte_size
78
78
79 from rhodecode.lib.action_parser import action_parser
79 from rhodecode.lib.action_parser import action_parser
80 from rhodecode.lib.pagination import Page, RepoPage, SqlPage
80 from rhodecode.lib.pagination import Page, RepoPage, SqlPage
81 from rhodecode.lib.ext_json import json
81 from rhodecode.lib.ext_json import json
82 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
82 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
83 from rhodecode.lib.utils2 import (
83 from rhodecode.lib.utils2 import (
84 str2bool, safe_unicode, safe_str,
84 str2bool, safe_unicode, safe_str,
85 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime,
85 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime,
86 AttributeDict, safe_int, md5, md5_safe, get_host_info)
86 AttributeDict, safe_int, md5, md5_safe, get_host_info)
87 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
87 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
88 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
88 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
89 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
89 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
90 from rhodecode.lib.vcs.conf.settings import ARCHIVE_SPECS
90 from rhodecode.lib.vcs.conf.settings import ARCHIVE_SPECS
91 from rhodecode.lib.index.search_utils import get_matching_line_offsets
91 from rhodecode.lib.index.search_utils import get_matching_line_offsets
92 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
92 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
93 from rhodecode.model.changeset_status import ChangesetStatusModel
93 from rhodecode.model.changeset_status import ChangesetStatusModel
94 from rhodecode.model.db import Permission, User, Repository, UserApiKeys, FileStore
94 from rhodecode.model.db import Permission, User, Repository, UserApiKeys, FileStore
95 from rhodecode.model.repo_group import RepoGroupModel
95 from rhodecode.model.repo_group import RepoGroupModel
96 from rhodecode.model.settings import IssueTrackerSettingsModel
96 from rhodecode.model.settings import IssueTrackerSettingsModel
97
97
98
98
99 log = logging.getLogger(__name__)
99 log = logging.getLogger(__name__)
100
100
101
101
102 DEFAULT_USER = User.DEFAULT_USER
102 DEFAULT_USER = User.DEFAULT_USER
103 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
103 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
104
104
105
105
106 def asset(path, ver=None, **kwargs):
106 def asset(path, ver=None, **kwargs):
107 """
107 """
108 Helper to generate a static asset file path for rhodecode assets
108 Helper to generate a static asset file path for rhodecode assets
109
109
110 eg. h.asset('images/image.png', ver='3923')
110 eg. h.asset('images/image.png', ver='3923')
111
111
112 :param path: path of asset
112 :param path: path of asset
113 :param ver: optional version query param to append as ?ver=
113 :param ver: optional version query param to append as ?ver=
114 """
114 """
115 request = get_current_request()
115 request = get_current_request()
116 query = {}
116 query = {}
117 query.update(kwargs)
117 query.update(kwargs)
118 if ver:
118 if ver:
119 query = {'ver': ver}
119 query = {'ver': ver}
120 return request.static_path(
120 return request.static_path(
121 'rhodecode:public/{}'.format(path), _query=query)
121 'rhodecode:public/{}'.format(path), _query=query)
122
122
123
123
124 default_html_escape_table = {
124 default_html_escape_table = {
125 ord('&'): u'&amp;',
125 ord('&'): u'&amp;',
126 ord('<'): u'&lt;',
126 ord('<'): u'&lt;',
127 ord('>'): u'&gt;',
127 ord('>'): u'&gt;',
128 ord('"'): u'&quot;',
128 ord('"'): u'&quot;',
129 ord("'"): u'&#39;',
129 ord("'"): u'&#39;',
130 }
130 }
131
131
132
132
133 def html_escape(text, html_escape_table=default_html_escape_table):
133 def html_escape(text, html_escape_table=default_html_escape_table):
134 """Produce entities within text."""
134 """Produce entities within text."""
135 return text.translate(html_escape_table)
135 return text.translate(html_escape_table)
136
136
137
137
138 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
138 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
139 """
139 """
140 Truncate string ``s`` at the first occurrence of ``sub``.
140 Truncate string ``s`` at the first occurrence of ``sub``.
141
141
142 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
142 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
143 """
143 """
144 suffix_if_chopped = suffix_if_chopped or ''
144 suffix_if_chopped = suffix_if_chopped or ''
145 pos = s.find(sub)
145 pos = s.find(sub)
146 if pos == -1:
146 if pos == -1:
147 return s
147 return s
148
148
149 if inclusive:
149 if inclusive:
150 pos += len(sub)
150 pos += len(sub)
151
151
152 chopped = s[:pos]
152 chopped = s[:pos]
153 left = s[pos:].strip()
153 left = s[pos:].strip()
154
154
155 if left and suffix_if_chopped:
155 if left and suffix_if_chopped:
156 chopped += suffix_if_chopped
156 chopped += suffix_if_chopped
157
157
158 return chopped
158 return chopped
159
159
160
160
161 def shorter(text, size=20, prefix=False):
161 def shorter(text, size=20, prefix=False):
162 postfix = '...'
162 postfix = '...'
163 if len(text) > size:
163 if len(text) > size:
164 if prefix:
164 if prefix:
165 # shorten in front
165 # shorten in front
166 return postfix + text[-(size - len(postfix)):]
166 return postfix + text[-(size - len(postfix)):]
167 else:
167 else:
168 return text[:size - len(postfix)] + postfix
168 return text[:size - len(postfix)] + postfix
169 return text
169 return text
170
170
171
171
172 def reset(name, value=None, id=NotGiven, type="reset", **attrs):
172 def reset(name, value=None, id=NotGiven, type="reset", **attrs):
173 """
173 """
174 Reset button
174 Reset button
175 """
175 """
176 return _input(type, name, value, id, attrs)
176 return _input(type, name, value, id, attrs)
177
177
178
178
179 def select(name, selected_values, options, id=NotGiven, **attrs):
179 def select(name, selected_values, options, id=NotGiven, **attrs):
180
180
181 if isinstance(options, (list, tuple)):
181 if isinstance(options, (list, tuple)):
182 options_iter = options
182 options_iter = options
183 # Handle old value,label lists ... where value also can be value,label lists
183 # Handle old value,label lists ... where value also can be value,label lists
184 options = Options()
184 options = Options()
185 for opt in options_iter:
185 for opt in options_iter:
186 if isinstance(opt, tuple) and len(opt) == 2:
186 if isinstance(opt, tuple) and len(opt) == 2:
187 value, label = opt
187 value, label = opt
188 elif isinstance(opt, str):
188 elif isinstance(opt, str):
189 value = label = opt
189 value = label = opt
190 else:
190 else:
191 raise ValueError('invalid select option type %r' % type(opt))
191 raise ValueError('invalid select option type %r' % type(opt))
192
192
193 if isinstance(value, (list, tuple)):
193 if isinstance(value, (list, tuple)):
194 option_group = options.add_optgroup(label)
194 option_group = options.add_optgroup(label)
195 for opt2 in value:
195 for opt2 in value:
196 if isinstance(opt2, tuple) and len(opt2) == 2:
196 if isinstance(opt2, tuple) and len(opt2) == 2:
197 group_value, group_label = opt2
197 group_value, group_label = opt2
198 elif isinstance(opt2, str):
198 elif isinstance(opt2, str):
199 group_value = group_label = opt2
199 group_value = group_label = opt2
200 else:
200 else:
201 raise ValueError('invalid select option type %r' % type(opt2))
201 raise ValueError('invalid select option type %r' % type(opt2))
202
202
203 option_group.add_option(group_label, group_value)
203 option_group.add_option(group_label, group_value)
204 else:
204 else:
205 options.add_option(label, value)
205 options.add_option(label, value)
206
206
207 return raw_select(name, selected_values, options, id=id, **attrs)
207 return raw_select(name, selected_values, options, id=id, **attrs)
208
208
209
209
210 def branding(name, length=40):
210 def branding(name, length=40):
211 return truncate(name, length, indicator="")
211 return truncate(name, length, indicator="")
212
212
213
213
214 def FID(raw_id, path):
214 def FID(raw_id, path):
215 """
215 """
216 Creates a unique ID for filenode based on it's hash of path and commit
216 Creates a unique ID for filenode based on it's hash of path and commit
217 it's safe to use in urls
217 it's safe to use in urls
218
218
219 :param raw_id:
219 :param raw_id:
220 :param path:
220 :param path:
221 """
221 """
222
222
223 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
223 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
224
224
225
225
226 class _GetError(object):
226 class _GetError(object):
227 """Get error from form_errors, and represent it as span wrapped error
227 """Get error from form_errors, and represent it as span wrapped error
228 message
228 message
229
229
230 :param field_name: field to fetch errors for
230 :param field_name: field to fetch errors for
231 :param form_errors: form errors dict
231 :param form_errors: form errors dict
232 """
232 """
233
233
234 def __call__(self, field_name, form_errors):
234 def __call__(self, field_name, form_errors):
235 tmpl = """<span class="error_msg">%s</span>"""
235 tmpl = """<span class="error_msg">%s</span>"""
236 if form_errors and field_name in form_errors:
236 if form_errors and field_name in form_errors:
237 return literal(tmpl % form_errors.get(field_name))
237 return literal(tmpl % form_errors.get(field_name))
238
238
239
239
240 get_error = _GetError()
240 get_error = _GetError()
241
241
242
242
243 class _ToolTip(object):
243 class _ToolTip(object):
244
244
245 def __call__(self, tooltip_title, trim_at=50):
245 def __call__(self, tooltip_title, trim_at=50):
246 """
246 """
247 Special function just to wrap our text into nice formatted
247 Special function just to wrap our text into nice formatted
248 autowrapped text
248 autowrapped text
249
249
250 :param tooltip_title:
250 :param tooltip_title:
251 """
251 """
252 tooltip_title = escape(tooltip_title)
252 tooltip_title = escape(tooltip_title)
253 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
253 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
254 return tooltip_title
254 return tooltip_title
255
255
256
256
257 tooltip = _ToolTip()
257 tooltip = _ToolTip()
258
258
259 files_icon = u'<i class="file-breadcrumb-copy tooltip icon-clipboard clipboard-action" data-clipboard-text="{}" title="Copy file path"></i>'
259 files_icon = u'<i class="file-breadcrumb-copy tooltip icon-clipboard clipboard-action" data-clipboard-text="{}" title="Copy file path"></i>'
260
260
261
261
262 def files_breadcrumbs(repo_name, repo_type, commit_id, file_path, landing_ref_name=None, at_ref=None,
262 def files_breadcrumbs(repo_name, repo_type, commit_id, file_path, landing_ref_name=None, at_ref=None,
263 limit_items=False, linkify_last_item=False, hide_last_item=False,
263 limit_items=False, linkify_last_item=False, hide_last_item=False,
264 copy_path_icon=True):
264 copy_path_icon=True):
265 if isinstance(file_path, str):
265 if isinstance(file_path, str):
266 file_path = safe_unicode(file_path)
266 file_path = safe_unicode(file_path)
267
267
268 if at_ref:
268 if at_ref:
269 route_qry = {'at': at_ref}
269 route_qry = {'at': at_ref}
270 default_landing_ref = at_ref or landing_ref_name or commit_id
270 default_landing_ref = at_ref or landing_ref_name or commit_id
271 else:
271 else:
272 route_qry = None
272 route_qry = None
273 default_landing_ref = commit_id
273 default_landing_ref = commit_id
274
274
275 # first segment is a `HOME` link to repo files root location
275 # first segment is a `HOME` link to repo files root location
276 root_name = literal(u'<i class="icon-home"></i>')
276 root_name = literal(u'<i class="icon-home"></i>')
277
277
278 url_segments = [
278 url_segments = [
279 link_to(
279 link_to(
280 root_name,
280 root_name,
281 repo_files_by_ref_url(
281 repo_files_by_ref_url(
282 repo_name,
282 repo_name,
283 repo_type,
283 repo_type,
284 f_path=None, # None here is a special case for SVN repos,
284 f_path=None, # None here is a special case for SVN repos,
285 # that won't prefix with a ref
285 # that won't prefix with a ref
286 ref_name=default_landing_ref,
286 ref_name=default_landing_ref,
287 commit_id=commit_id,
287 commit_id=commit_id,
288 query=route_qry
288 query=route_qry
289 )
289 )
290 )]
290 )]
291
291
292 path_segments = file_path.split('/')
292 path_segments = file_path.split('/')
293 last_cnt = len(path_segments) - 1
293 last_cnt = len(path_segments) - 1
294 for cnt, segment in enumerate(path_segments):
294 for cnt, segment in enumerate(path_segments):
295 if not segment:
295 if not segment:
296 continue
296 continue
297 segment_html = escape(segment)
297 segment_html = escape(segment)
298
298
299 last_item = cnt == last_cnt
299 last_item = cnt == last_cnt
300
300
301 if last_item and hide_last_item:
301 if last_item and hide_last_item:
302 # iterate over and hide last element
302 # iterate over and hide last element
303 continue
303 continue
304
304
305 if last_item and linkify_last_item is False:
305 if last_item and linkify_last_item is False:
306 # plain version
306 # plain version
307 url_segments.append(segment_html)
307 url_segments.append(segment_html)
308 else:
308 else:
309 url_segments.append(
309 url_segments.append(
310 link_to(
310 link_to(
311 segment_html,
311 segment_html,
312 repo_files_by_ref_url(
312 repo_files_by_ref_url(
313 repo_name,
313 repo_name,
314 repo_type,
314 repo_type,
315 f_path='/'.join(path_segments[:cnt + 1]),
315 f_path='/'.join(path_segments[:cnt + 1]),
316 ref_name=default_landing_ref,
316 ref_name=default_landing_ref,
317 commit_id=commit_id,
317 commit_id=commit_id,
318 query=route_qry
318 query=route_qry
319 ),
319 ),
320 ))
320 ))
321
321
322 limited_url_segments = url_segments[:1] + ['...'] + url_segments[-5:]
322 limited_url_segments = url_segments[:1] + ['...'] + url_segments[-5:]
323 if limit_items and len(limited_url_segments) < len(url_segments):
323 if limit_items and len(limited_url_segments) < len(url_segments):
324 url_segments = limited_url_segments
324 url_segments = limited_url_segments
325
325
326 full_path = file_path
326 full_path = file_path
327 if copy_path_icon:
327 if copy_path_icon:
328 icon = files_icon.format(escape(full_path))
328 icon = files_icon.format(escape(full_path))
329 else:
329 else:
330 icon = ''
330 icon = ''
331
331
332 if file_path == '':
332 if file_path == '':
333 return root_name
333 return root_name
334 else:
334 else:
335 return literal(' / '.join(url_segments) + icon)
335 return literal(' / '.join(url_segments) + icon)
336
336
337
337
338 def files_url_data(request):
338 def files_url_data(request):
339 import urllib.request, urllib.parse, urllib.error
339 import urllib.request, urllib.parse, urllib.error
340 matchdict = request.matchdict
340 matchdict = request.matchdict
341
341
342 if 'f_path' not in matchdict:
342 if 'f_path' not in matchdict:
343 matchdict['f_path'] = ''
343 matchdict['f_path'] = ''
344 else:
344 else:
345 matchdict['f_path'] = urllib.parse.quote(safe_str(matchdict['f_path']))
345 matchdict['f_path'] = urllib.parse.quote(safe_str(matchdict['f_path']))
346 if 'commit_id' not in matchdict:
346 if 'commit_id' not in matchdict:
347 matchdict['commit_id'] = 'tip'
347 matchdict['commit_id'] = 'tip'
348
348
349 return json.dumps(matchdict)
349 return json.dumps(matchdict)
350
350
351
351
352 def repo_files_by_ref_url(db_repo_name, db_repo_type, f_path, ref_name, commit_id, query=None, ):
352 def repo_files_by_ref_url(db_repo_name, db_repo_type, f_path, ref_name, commit_id, query=None, ):
353 _is_svn = is_svn(db_repo_type)
353 _is_svn = is_svn(db_repo_type)
354 final_f_path = f_path
354 final_f_path = f_path
355
355
356 if _is_svn:
356 if _is_svn:
357 """
357 """
358 For SVN the ref_name cannot be used as a commit_id, it needs to be prefixed with
358 For SVN the ref_name cannot be used as a commit_id, it needs to be prefixed with
359 actually commit_id followed by the ref_name. This should be done only in case
359 actually commit_id followed by the ref_name. This should be done only in case
360 This is a initial landing url, without additional paths.
360 This is a initial landing url, without additional paths.
361
361
362 like: /1000/tags/1.0.0/?at=tags/1.0.0
362 like: /1000/tags/1.0.0/?at=tags/1.0.0
363 """
363 """
364
364
365 if ref_name and ref_name != 'tip':
365 if ref_name and ref_name != 'tip':
366 # NOTE(marcink): for svn the ref_name is actually the stored path, so we prefix it
366 # NOTE(marcink): for svn the ref_name is actually the stored path, so we prefix it
367 # for SVN we only do this magic prefix if it's root, .eg landing revision
367 # for SVN we only do this magic prefix if it's root, .eg landing revision
368 # of files link. If we are in the tree we don't need this since we traverse the url
368 # of files link. If we are in the tree we don't need this since we traverse the url
369 # that has everything stored
369 # that has everything stored
370 if f_path in ['', '/']:
370 if f_path in ['', '/']:
371 final_f_path = '/'.join([ref_name, f_path])
371 final_f_path = '/'.join([ref_name, f_path])
372
372
373 # SVN always needs a commit_id explicitly, without a named REF
373 # SVN always needs a commit_id explicitly, without a named REF
374 default_commit_id = commit_id
374 default_commit_id = commit_id
375 else:
375 else:
376 """
376 """
377 For git and mercurial we construct a new URL using the names instead of commit_id
377 For git and mercurial we construct a new URL using the names instead of commit_id
378 like: /master/some_path?at=master
378 like: /master/some_path?at=master
379 """
379 """
380 # We currently do not support branches with slashes
380 # We currently do not support branches with slashes
381 if '/' in ref_name:
381 if '/' in ref_name:
382 default_commit_id = commit_id
382 default_commit_id = commit_id
383 else:
383 else:
384 default_commit_id = ref_name
384 default_commit_id = ref_name
385
385
386 # sometimes we pass f_path as None, to indicate explicit no prefix,
386 # sometimes we pass f_path as None, to indicate explicit no prefix,
387 # we translate it to string to not have None
387 # we translate it to string to not have None
388 final_f_path = final_f_path or ''
388 final_f_path = final_f_path or ''
389
389
390 files_url = route_path(
390 files_url = route_path(
391 'repo_files',
391 'repo_files',
392 repo_name=db_repo_name,
392 repo_name=db_repo_name,
393 commit_id=default_commit_id,
393 commit_id=default_commit_id,
394 f_path=final_f_path,
394 f_path=final_f_path,
395 _query=query
395 _query=query
396 )
396 )
397 return files_url
397 return files_url
398
398
399
399
400 def code_highlight(code, lexer, formatter, use_hl_filter=False):
400 def code_highlight(code, lexer, formatter, use_hl_filter=False):
401 """
401 """
402 Lex ``code`` with ``lexer`` and format it with the formatter ``formatter``.
402 Lex ``code`` with ``lexer`` and format it with the formatter ``formatter``.
403
403
404 If ``outfile`` is given and a valid file object (an object
404 If ``outfile`` is given and a valid file object (an object
405 with a ``write`` method), the result will be written to it, otherwise
405 with a ``write`` method), the result will be written to it, otherwise
406 it is returned as a string.
406 it is returned as a string.
407 """
407 """
408 if use_hl_filter:
408 if use_hl_filter:
409 # add HL filter
409 # add HL filter
410 from rhodecode.lib.index import search_utils
410 from rhodecode.lib.index import search_utils
411 lexer.add_filter(search_utils.ElasticSearchHLFilter())
411 lexer.add_filter(search_utils.ElasticSearchHLFilter())
412 return pygments.format(pygments.lex(code, lexer), formatter)
412 return pygments.format(pygments.lex(code, lexer), formatter)
413
413
414
414
415 class CodeHtmlFormatter(HtmlFormatter):
415 class CodeHtmlFormatter(HtmlFormatter):
416 """
416 """
417 My code Html Formatter for source codes
417 My code Html Formatter for source codes
418 """
418 """
419
419
420 def wrap(self, source, outfile):
420 def wrap(self, source, outfile):
421 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
421 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
422
422
423 def _wrap_code(self, source):
423 def _wrap_code(self, source):
424 for cnt, it in enumerate(source):
424 for cnt, it in enumerate(source):
425 i, t = it
425 i, t = it
426 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
426 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
427 yield i, t
427 yield i, t
428
428
429 def _wrap_tablelinenos(self, inner):
429 def _wrap_tablelinenos(self, inner):
430 dummyoutfile = StringIO.StringIO()
430 dummyoutfile = StringIO.StringIO()
431 lncount = 0
431 lncount = 0
432 for t, line in inner:
432 for t, line in inner:
433 if t:
433 if t:
434 lncount += 1
434 lncount += 1
435 dummyoutfile.write(line)
435 dummyoutfile.write(line)
436
436
437 fl = self.linenostart
437 fl = self.linenostart
438 mw = len(str(lncount + fl - 1))
438 mw = len(str(lncount + fl - 1))
439 sp = self.linenospecial
439 sp = self.linenospecial
440 st = self.linenostep
440 st = self.linenostep
441 la = self.lineanchors
441 la = self.lineanchors
442 aln = self.anchorlinenos
442 aln = self.anchorlinenos
443 nocls = self.noclasses
443 nocls = self.noclasses
444 if sp:
444 if sp:
445 lines = []
445 lines = []
446
446
447 for i in range(fl, fl + lncount):
447 for i in range(fl, fl + lncount):
448 if i % st == 0:
448 if i % st == 0:
449 if i % sp == 0:
449 if i % sp == 0:
450 if aln:
450 if aln:
451 lines.append('<a href="#%s%d" class="special">%*d</a>' %
451 lines.append('<a href="#%s%d" class="special">%*d</a>' %
452 (la, i, mw, i))
452 (la, i, mw, i))
453 else:
453 else:
454 lines.append('<span class="special">%*d</span>' % (mw, i))
454 lines.append('<span class="special">%*d</span>' % (mw, i))
455 else:
455 else:
456 if aln:
456 if aln:
457 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
457 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
458 else:
458 else:
459 lines.append('%*d' % (mw, i))
459 lines.append('%*d' % (mw, i))
460 else:
460 else:
461 lines.append('')
461 lines.append('')
462 ls = '\n'.join(lines)
462 ls = '\n'.join(lines)
463 else:
463 else:
464 lines = []
464 lines = []
465 for i in range(fl, fl + lncount):
465 for i in range(fl, fl + lncount):
466 if i % st == 0:
466 if i % st == 0:
467 if aln:
467 if aln:
468 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
468 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
469 else:
469 else:
470 lines.append('%*d' % (mw, i))
470 lines.append('%*d' % (mw, i))
471 else:
471 else:
472 lines.append('')
472 lines.append('')
473 ls = '\n'.join(lines)
473 ls = '\n'.join(lines)
474
474
475 # in case you wonder about the seemingly redundant <div> here: since the
475 # in case you wonder about the seemingly redundant <div> here: since the
476 # content in the other cell also is wrapped in a div, some browsers in
476 # content in the other cell also is wrapped in a div, some browsers in
477 # some configurations seem to mess up the formatting...
477 # some configurations seem to mess up the formatting...
478 if nocls:
478 if nocls:
479 yield 0, ('<table class="%stable">' % self.cssclass +
479 yield 0, ('<table class="%stable">' % self.cssclass +
480 '<tr><td><div class="linenodiv" '
480 '<tr><td><div class="linenodiv" '
481 'style="background-color: #f0f0f0; padding-right: 10px">'
481 'style="background-color: #f0f0f0; padding-right: 10px">'
482 '<pre style="line-height: 125%">' +
482 '<pre style="line-height: 125%">' +
483 ls + '</pre></div></td><td id="hlcode" class="code">')
483 ls + '</pre></div></td><td id="hlcode" class="code">')
484 else:
484 else:
485 yield 0, ('<table class="%stable">' % self.cssclass +
485 yield 0, ('<table class="%stable">' % self.cssclass +
486 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
486 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
487 ls + '</pre></div></td><td id="hlcode" class="code">')
487 ls + '</pre></div></td><td id="hlcode" class="code">')
488 yield 0, dummyoutfile.getvalue()
488 yield 0, dummyoutfile.getvalue()
489 yield 0, '</td></tr></table>'
489 yield 0, '</td></tr></table>'
490
490
491
491
492 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
492 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
493 def __init__(self, **kw):
493 def __init__(self, **kw):
494 # only show these line numbers if set
494 # only show these line numbers if set
495 self.only_lines = kw.pop('only_line_numbers', [])
495 self.only_lines = kw.pop('only_line_numbers', [])
496 self.query_terms = kw.pop('query_terms', [])
496 self.query_terms = kw.pop('query_terms', [])
497 self.max_lines = kw.pop('max_lines', 5)
497 self.max_lines = kw.pop('max_lines', 5)
498 self.line_context = kw.pop('line_context', 3)
498 self.line_context = kw.pop('line_context', 3)
499 self.url = kw.pop('url', None)
499 self.url = kw.pop('url', None)
500
500
501 super(CodeHtmlFormatter, self).__init__(**kw)
501 super(CodeHtmlFormatter, self).__init__(**kw)
502
502
503 def _wrap_code(self, source):
503 def _wrap_code(self, source):
504 for cnt, it in enumerate(source):
504 for cnt, it in enumerate(source):
505 i, t = it
505 i, t = it
506 t = '<pre>%s</pre>' % t
506 t = '<pre>%s</pre>' % t
507 yield i, t
507 yield i, t
508
508
509 def _wrap_tablelinenos(self, inner):
509 def _wrap_tablelinenos(self, inner):
510 yield 0, '<table class="code-highlight %stable">' % self.cssclass
510 yield 0, '<table class="code-highlight %stable">' % self.cssclass
511
511
512 last_shown_line_number = 0
512 last_shown_line_number = 0
513 current_line_number = 1
513 current_line_number = 1
514
514
515 for t, line in inner:
515 for t, line in inner:
516 if not t:
516 if not t:
517 yield t, line
517 yield t, line
518 continue
518 continue
519
519
520 if current_line_number in self.only_lines:
520 if current_line_number in self.only_lines:
521 if last_shown_line_number + 1 != current_line_number:
521 if last_shown_line_number + 1 != current_line_number:
522 yield 0, '<tr>'
522 yield 0, '<tr>'
523 yield 0, '<td class="line">...</td>'
523 yield 0, '<td class="line">...</td>'
524 yield 0, '<td id="hlcode" class="code"></td>'
524 yield 0, '<td id="hlcode" class="code"></td>'
525 yield 0, '</tr>'
525 yield 0, '</tr>'
526
526
527 yield 0, '<tr>'
527 yield 0, '<tr>'
528 if self.url:
528 if self.url:
529 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
529 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
530 self.url, current_line_number, current_line_number)
530 self.url, current_line_number, current_line_number)
531 else:
531 else:
532 yield 0, '<td class="line"><a href="">%i</a></td>' % (
532 yield 0, '<td class="line"><a href="">%i</a></td>' % (
533 current_line_number)
533 current_line_number)
534 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
534 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
535 yield 0, '</tr>'
535 yield 0, '</tr>'
536
536
537 last_shown_line_number = current_line_number
537 last_shown_line_number = current_line_number
538
538
539 current_line_number += 1
539 current_line_number += 1
540
540
541 yield 0, '</table>'
541 yield 0, '</table>'
542
542
543
543
544 def hsv_to_rgb(h, s, v):
544 def hsv_to_rgb(h, s, v):
545 """ Convert hsv color values to rgb """
545 """ Convert hsv color values to rgb """
546
546
547 if s == 0.0:
547 if s == 0.0:
548 return v, v, v
548 return v, v, v
549 i = int(h * 6.0) # XXX assume int() truncates!
549 i = int(h * 6.0) # XXX assume int() truncates!
550 f = (h * 6.0) - i
550 f = (h * 6.0) - i
551 p = v * (1.0 - s)
551 p = v * (1.0 - s)
552 q = v * (1.0 - s * f)
552 q = v * (1.0 - s * f)
553 t = v * (1.0 - s * (1.0 - f))
553 t = v * (1.0 - s * (1.0 - f))
554 i = i % 6
554 i = i % 6
555 if i == 0:
555 if i == 0:
556 return v, t, p
556 return v, t, p
557 if i == 1:
557 if i == 1:
558 return q, v, p
558 return q, v, p
559 if i == 2:
559 if i == 2:
560 return p, v, t
560 return p, v, t
561 if i == 3:
561 if i == 3:
562 return p, q, v
562 return p, q, v
563 if i == 4:
563 if i == 4:
564 return t, p, v
564 return t, p, v
565 if i == 5:
565 if i == 5:
566 return v, p, q
566 return v, p, q
567
567
568
568
569 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
569 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
570 """
570 """
571 Generator for getting n of evenly distributed colors using
571 Generator for getting n of evenly distributed colors using
572 hsv color and golden ratio. It always return same order of colors
572 hsv color and golden ratio. It always return same order of colors
573
573
574 :param n: number of colors to generate
574 :param n: number of colors to generate
575 :param saturation: saturation of returned colors
575 :param saturation: saturation of returned colors
576 :param lightness: lightness of returned colors
576 :param lightness: lightness of returned colors
577 :returns: RGB tuple
577 :returns: RGB tuple
578 """
578 """
579
579
580 golden_ratio = 0.618033988749895
580 golden_ratio = 0.618033988749895
581 h = 0.22717784590367374
581 h = 0.22717784590367374
582
582
583 for _ in range(n):
583 for _ in range(n):
584 h += golden_ratio
584 h += golden_ratio
585 h %= 1
585 h %= 1
586 HSV_tuple = [h, saturation, lightness]
586 HSV_tuple = [h, saturation, lightness]
587 RGB_tuple = hsv_to_rgb(*HSV_tuple)
587 RGB_tuple = hsv_to_rgb(*HSV_tuple)
588 yield map(lambda x: str(int(x * 256)), RGB_tuple)
588 yield map(lambda x: str(int(x * 256)), RGB_tuple)
589
589
590
590
591 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
591 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
592 """
592 """
593 Returns a function which when called with an argument returns a unique
593 Returns a function which when called with an argument returns a unique
594 color for that argument, eg.
594 color for that argument, eg.
595
595
596 :param n: number of colors to generate
596 :param n: number of colors to generate
597 :param saturation: saturation of returned colors
597 :param saturation: saturation of returned colors
598 :param lightness: lightness of returned colors
598 :param lightness: lightness of returned colors
599 :returns: css RGB string
599 :returns: css RGB string
600
600
601 >>> color_hash = color_hasher()
601 >>> color_hash = color_hasher()
602 >>> color_hash('hello')
602 >>> color_hash('hello')
603 'rgb(34, 12, 59)'
603 'rgb(34, 12, 59)'
604 >>> color_hash('hello')
604 >>> color_hash('hello')
605 'rgb(34, 12, 59)'
605 'rgb(34, 12, 59)'
606 >>> color_hash('other')
606 >>> color_hash('other')
607 'rgb(90, 224, 159)'
607 'rgb(90, 224, 159)'
608 """
608 """
609
609
610 color_dict = {}
610 color_dict = {}
611 cgenerator = unique_color_generator(
611 cgenerator = unique_color_generator(
612 saturation=saturation, lightness=lightness)
612 saturation=saturation, lightness=lightness)
613
613
614 def get_color_string(thing):
614 def get_color_string(thing):
615 if thing in color_dict:
615 if thing in color_dict:
616 col = color_dict[thing]
616 col = color_dict[thing]
617 else:
617 else:
618 col = color_dict[thing] = next(cgenerator)
618 col = color_dict[thing] = next(cgenerator)
619 return "rgb(%s)" % (', '.join(col))
619 return "rgb(%s)" % (', '.join(col))
620
620
621 return get_color_string
621 return get_color_string
622
622
623
623
624 def get_lexer_safe(mimetype=None, filepath=None):
624 def get_lexer_safe(mimetype=None, filepath=None):
625 """
625 """
626 Tries to return a relevant pygments lexer using mimetype/filepath name,
626 Tries to return a relevant pygments lexer using mimetype/filepath name,
627 defaulting to plain text if none could be found
627 defaulting to plain text if none could be found
628 """
628 """
629 lexer = None
629 lexer = None
630 try:
630 try:
631 if mimetype:
631 if mimetype:
632 lexer = get_lexer_for_mimetype(mimetype)
632 lexer = get_lexer_for_mimetype(mimetype)
633 if not lexer:
633 if not lexer:
634 lexer = get_lexer_for_filename(filepath)
634 lexer = get_lexer_for_filename(filepath)
635 except pygments.util.ClassNotFound:
635 except pygments.util.ClassNotFound:
636 pass
636 pass
637
637
638 if not lexer:
638 if not lexer:
639 lexer = get_lexer_by_name('text')
639 lexer = get_lexer_by_name('text')
640
640
641 return lexer
641 return lexer
642
642
643
643
644 def get_lexer_for_filenode(filenode):
644 def get_lexer_for_filenode(filenode):
645 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
645 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
646 return lexer
646 return lexer
647
647
648
648
649 def pygmentize(filenode, **kwargs):
649 def pygmentize(filenode, **kwargs):
650 """
650 """
651 pygmentize function using pygments
651 pygmentize function using pygments
652
652
653 :param filenode:
653 :param filenode:
654 """
654 """
655 lexer = get_lexer_for_filenode(filenode)
655 lexer = get_lexer_for_filenode(filenode)
656 return literal(code_highlight(filenode.content, lexer,
656 return literal(code_highlight(filenode.content, lexer,
657 CodeHtmlFormatter(**kwargs)))
657 CodeHtmlFormatter(**kwargs)))
658
658
659
659
660 def is_following_repo(repo_name, user_id):
660 def is_following_repo(repo_name, user_id):
661 from rhodecode.model.scm import ScmModel
661 from rhodecode.model.scm import ScmModel
662 return ScmModel().is_following_repo(repo_name, user_id)
662 return ScmModel().is_following_repo(repo_name, user_id)
663
663
664
664
665 class _Message(object):
665 class _Message(object):
666 """A message returned by ``Flash.pop_messages()``.
666 """A message returned by ``Flash.pop_messages()``.
667
667
668 Converting the message to a string returns the message text. Instances
668 Converting the message to a string returns the message text. Instances
669 also have the following attributes:
669 also have the following attributes:
670
670
671 * ``message``: the message text.
671 * ``message``: the message text.
672 * ``category``: the category specified when the message was created.
672 * ``category``: the category specified when the message was created.
673 """
673 """
674
674
675 def __init__(self, category, message, sub_data=None):
675 def __init__(self, category, message, sub_data=None):
676 self.category = category
676 self.category = category
677 self.message = message
677 self.message = message
678 self.sub_data = sub_data or {}
678 self.sub_data = sub_data or {}
679
679
680 def __str__(self):
680 def __str__(self):
681 return self.message
681 return self.message
682
682
683 __unicode__ = __str__
683 __unicode__ = __str__
684
684
685 def __html__(self):
685 def __html__(self):
686 return escape(safe_unicode(self.message))
686 return escape(safe_unicode(self.message))
687
687
688
688
689 class Flash(object):
689 class Flash(object):
690 # List of allowed categories. If None, allow any category.
690 # List of allowed categories. If None, allow any category.
691 categories = ["warning", "notice", "error", "success"]
691 categories = ["warning", "notice", "error", "success"]
692
692
693 # Default category if none is specified.
693 # Default category if none is specified.
694 default_category = "notice"
694 default_category = "notice"
695
695
696 def __init__(self, session_key="flash", categories=None,
696 def __init__(self, session_key="flash", categories=None,
697 default_category=None):
697 default_category=None):
698 """
698 """
699 Instantiate a ``Flash`` object.
699 Instantiate a ``Flash`` object.
700
700
701 ``session_key`` is the key to save the messages under in the user's
701 ``session_key`` is the key to save the messages under in the user's
702 session.
702 session.
703
703
704 ``categories`` is an optional list which overrides the default list
704 ``categories`` is an optional list which overrides the default list
705 of categories.
705 of categories.
706
706
707 ``default_category`` overrides the default category used for messages
707 ``default_category`` overrides the default category used for messages
708 when none is specified.
708 when none is specified.
709 """
709 """
710 self.session_key = session_key
710 self.session_key = session_key
711 if categories is not None:
711 if categories is not None:
712 self.categories = categories
712 self.categories = categories
713 if default_category is not None:
713 if default_category is not None:
714 self.default_category = default_category
714 self.default_category = default_category
715 if self.categories and self.default_category not in self.categories:
715 if self.categories and self.default_category not in self.categories:
716 raise ValueError(
716 raise ValueError(
717 "unrecognized default category %r" % (self.default_category,))
717 "unrecognized default category %r" % (self.default_category,))
718
718
719 def pop_messages(self, session=None, request=None):
719 def pop_messages(self, session=None, request=None):
720 """
720 """
721 Return all accumulated messages and delete them from the session.
721 Return all accumulated messages and delete them from the session.
722
722
723 The return value is a list of ``Message`` objects.
723 The return value is a list of ``Message`` objects.
724 """
724 """
725 messages = []
725 messages = []
726
726
727 if not session:
727 if not session:
728 if not request:
728 if not request:
729 request = get_current_request()
729 request = get_current_request()
730 session = request.session
730 session = request.session
731
731
732 # Pop the 'old' pylons flash messages. They are tuples of the form
732 # Pop the 'old' pylons flash messages. They are tuples of the form
733 # (category, message)
733 # (category, message)
734 for cat, msg in session.pop(self.session_key, []):
734 for cat, msg in session.pop(self.session_key, []):
735 messages.append(_Message(cat, msg))
735 messages.append(_Message(cat, msg))
736
736
737 # Pop the 'new' pyramid flash messages for each category as list
737 # Pop the 'new' pyramid flash messages for each category as list
738 # of strings.
738 # of strings.
739 for cat in self.categories:
739 for cat in self.categories:
740 for msg in session.pop_flash(queue=cat):
740 for msg in session.pop_flash(queue=cat):
741 sub_data = {}
741 sub_data = {}
742 if hasattr(msg, 'rsplit'):
742 if hasattr(msg, 'rsplit'):
743 flash_data = msg.rsplit('|DELIM|', 1)
743 flash_data = msg.rsplit('|DELIM|', 1)
744 org_message = flash_data[0]
744 org_message = flash_data[0]
745 if len(flash_data) > 1:
745 if len(flash_data) > 1:
746 sub_data = json.loads(flash_data[1])
746 sub_data = json.loads(flash_data[1])
747 else:
747 else:
748 org_message = msg
748 org_message = msg
749
749
750 messages.append(_Message(cat, org_message, sub_data=sub_data))
750 messages.append(_Message(cat, org_message, sub_data=sub_data))
751
751
752 # Map messages from the default queue to the 'notice' category.
752 # Map messages from the default queue to the 'notice' category.
753 for msg in session.pop_flash():
753 for msg in session.pop_flash():
754 messages.append(_Message('notice', msg))
754 messages.append(_Message('notice', msg))
755
755
756 session.save()
756 session.save()
757 return messages
757 return messages
758
758
759 def json_alerts(self, session=None, request=None):
759 def json_alerts(self, session=None, request=None):
760 payloads = []
760 payloads = []
761 messages = flash.pop_messages(session=session, request=request) or []
761 messages = flash.pop_messages(session=session, request=request) or []
762 for message in messages:
762 for message in messages:
763 payloads.append({
763 payloads.append({
764 'message': {
764 'message': {
765 'message': u'{}'.format(message.message),
765 'message': u'{}'.format(message.message),
766 'level': message.category,
766 'level': message.category,
767 'force': True,
767 'force': True,
768 'subdata': message.sub_data
768 'subdata': message.sub_data
769 }
769 }
770 })
770 })
771 return json.dumps(payloads)
771 return json.dumps(payloads)
772
772
773 def __call__(self, message, category=None, ignore_duplicate=True,
773 def __call__(self, message, category=None, ignore_duplicate=True,
774 session=None, request=None):
774 session=None, request=None):
775
775
776 if not session:
776 if not session:
777 if not request:
777 if not request:
778 request = get_current_request()
778 request = get_current_request()
779 session = request.session
779 session = request.session
780
780
781 session.flash(
781 session.flash(
782 message, queue=category, allow_duplicate=not ignore_duplicate)
782 message, queue=category, allow_duplicate=not ignore_duplicate)
783
783
784
784
785 flash = Flash()
785 flash = Flash()
786
786
787 #==============================================================================
787 #==============================================================================
788 # SCM FILTERS available via h.
788 # SCM FILTERS available via h.
789 #==============================================================================
789 #==============================================================================
790 from rhodecode.lib.vcs.utils import author_name, author_email
790 from rhodecode.lib.vcs.utils import author_name, author_email
791 from rhodecode.lib.utils2 import age, age_from_seconds
791 from rhodecode.lib.utils2 import age, age_from_seconds
792 from rhodecode.model.db import User, ChangesetStatus
792 from rhodecode.model.db import User, ChangesetStatus
793
793
794
794
795 email = author_email
795 email = author_email
796
796
797
797
798 def capitalize(raw_text):
798 def capitalize(raw_text):
799 return raw_text.capitalize()
799 return raw_text.capitalize()
800
800
801
801
802 def short_id(long_id):
802 def short_id(long_id):
803 return long_id[:12]
803 return long_id[:12]
804
804
805
805
806 def hide_credentials(url):
806 def hide_credentials(url):
807 from rhodecode.lib.utils2 import credentials_filter
807 from rhodecode.lib.utils2 import credentials_filter
808 return credentials_filter(url)
808 return credentials_filter(url)
809
809
810
810
811 import pytz
811 import pytz
812 import tzlocal
812 import tzlocal
813 local_timezone = tzlocal.get_localzone()
813 local_timezone = tzlocal.get_localzone()
814
814
815
815
816 def get_timezone(datetime_iso, time_is_local=False):
816 def get_timezone(datetime_iso, time_is_local=False):
817 tzinfo = '+00:00'
817 tzinfo = '+00:00'
818
818
819 # detect if we have a timezone info, otherwise, add it
819 # detect if we have a timezone info, otherwise, add it
820 if time_is_local and isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
820 if time_is_local and isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
821 force_timezone = os.environ.get('RC_TIMEZONE', '')
821 force_timezone = os.environ.get('RC_TIMEZONE', '')
822 if force_timezone:
822 if force_timezone:
823 force_timezone = pytz.timezone(force_timezone)
823 force_timezone = pytz.timezone(force_timezone)
824 timezone = force_timezone or local_timezone
824 timezone = force_timezone or local_timezone
825 offset = timezone.localize(datetime_iso).strftime('%z')
825 offset = timezone.localize(datetime_iso).strftime('%z')
826 tzinfo = '{}:{}'.format(offset[:-2], offset[-2:])
826 tzinfo = '{}:{}'.format(offset[:-2], offset[-2:])
827 return tzinfo
827 return tzinfo
828
828
829
829
830 def age_component(datetime_iso, value=None, time_is_local=False, tooltip=True):
830 def age_component(datetime_iso, value=None, time_is_local=False, tooltip=True):
831 title = value or format_date(datetime_iso)
831 title = value or format_date(datetime_iso)
832 tzinfo = get_timezone(datetime_iso, time_is_local=time_is_local)
832 tzinfo = get_timezone(datetime_iso, time_is_local=time_is_local)
833
833
834 return literal(
834 return literal(
835 '<time class="timeago {cls}" title="{tt_title}" datetime="{dt}{tzinfo}">{title}</time>'.format(
835 '<time class="timeago {cls}" title="{tt_title}" datetime="{dt}{tzinfo}">{title}</time>'.format(
836 cls='tooltip' if tooltip else '',
836 cls='tooltip' if tooltip else '',
837 tt_title=('{title}{tzinfo}'.format(title=title, tzinfo=tzinfo)) if tooltip else '',
837 tt_title=('{title}{tzinfo}'.format(title=title, tzinfo=tzinfo)) if tooltip else '',
838 title=title, dt=datetime_iso, tzinfo=tzinfo
838 title=title, dt=datetime_iso, tzinfo=tzinfo
839 ))
839 ))
840
840
841
841
842 def _shorten_commit_id(commit_id, commit_len=None):
842 def _shorten_commit_id(commit_id, commit_len=None):
843 if commit_len is None:
843 if commit_len is None:
844 request = get_current_request()
844 request = get_current_request()
845 commit_len = request.call_context.visual.show_sha_length
845 commit_len = request.call_context.visual.show_sha_length
846 return commit_id[:commit_len]
846 return commit_id[:commit_len]
847
847
848
848
849 def show_id(commit, show_idx=None, commit_len=None):
849 def show_id(commit, show_idx=None, commit_len=None):
850 """
850 """
851 Configurable function that shows ID
851 Configurable function that shows ID
852 by default it's r123:fffeeefffeee
852 by default it's r123:fffeeefffeee
853
853
854 :param commit: commit instance
854 :param commit: commit instance
855 """
855 """
856 if show_idx is None:
856 if show_idx is None:
857 request = get_current_request()
857 request = get_current_request()
858 show_idx = request.call_context.visual.show_revision_number
858 show_idx = request.call_context.visual.show_revision_number
859
859
860 raw_id = _shorten_commit_id(commit.raw_id, commit_len=commit_len)
860 raw_id = _shorten_commit_id(commit.raw_id, commit_len=commit_len)
861 if show_idx:
861 if show_idx:
862 return 'r%s:%s' % (commit.idx, raw_id)
862 return 'r%s:%s' % (commit.idx, raw_id)
863 else:
863 else:
864 return '%s' % (raw_id, )
864 return '%s' % (raw_id, )
865
865
866
866
867 def format_date(date):
867 def format_date(date):
868 """
868 """
869 use a standardized formatting for dates used in RhodeCode
869 use a standardized formatting for dates used in RhodeCode
870
870
871 :param date: date/datetime object
871 :param date: date/datetime object
872 :return: formatted date
872 :return: formatted date
873 """
873 """
874
874
875 if date:
875 if date:
876 _fmt = "%a, %d %b %Y %H:%M:%S"
876 _fmt = "%a, %d %b %Y %H:%M:%S"
877 return safe_unicode(date.strftime(_fmt))
877 return safe_unicode(date.strftime(_fmt))
878
878
879 return u""
879 return u""
880
880
881
881
882 class _RepoChecker(object):
882 class _RepoChecker(object):
883
883
884 def __init__(self, backend_alias):
884 def __init__(self, backend_alias):
885 self._backend_alias = backend_alias
885 self._backend_alias = backend_alias
886
886
887 def __call__(self, repository):
887 def __call__(self, repository):
888 if hasattr(repository, 'alias'):
888 if hasattr(repository, 'alias'):
889 _type = repository.alias
889 _type = repository.alias
890 elif hasattr(repository, 'repo_type'):
890 elif hasattr(repository, 'repo_type'):
891 _type = repository.repo_type
891 _type = repository.repo_type
892 else:
892 else:
893 _type = repository
893 _type = repository
894 return _type == self._backend_alias
894 return _type == self._backend_alias
895
895
896
896
897 is_git = _RepoChecker('git')
897 is_git = _RepoChecker('git')
898 is_hg = _RepoChecker('hg')
898 is_hg = _RepoChecker('hg')
899 is_svn = _RepoChecker('svn')
899 is_svn = _RepoChecker('svn')
900
900
901
901
902 def get_repo_type_by_name(repo_name):
902 def get_repo_type_by_name(repo_name):
903 repo = Repository.get_by_repo_name(repo_name)
903 repo = Repository.get_by_repo_name(repo_name)
904 if repo:
904 if repo:
905 return repo.repo_type
905 return repo.repo_type
906
906
907
907
908 def is_svn_without_proxy(repository):
908 def is_svn_without_proxy(repository):
909 if is_svn(repository):
909 if is_svn(repository):
910 from rhodecode.model.settings import VcsSettingsModel
910 from rhodecode.model.settings import VcsSettingsModel
911 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
911 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
912 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
912 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
913 return False
913 return False
914
914
915
915
916 def discover_user(author):
916 def discover_user(author):
917 """
917 """
918 Tries to discover RhodeCode User based on the author string. Author string
918 Tries to discover RhodeCode User based on the author string. Author string
919 is typically `FirstName LastName <email@address.com>`
919 is typically `FirstName LastName <email@address.com>`
920 """
920 """
921
921
922 # if author is already an instance use it for extraction
922 # if author is already an instance use it for extraction
923 if isinstance(author, User):
923 if isinstance(author, User):
924 return author
924 return author
925
925
926 # Valid email in the attribute passed, see if they're in the system
926 # Valid email in the attribute passed, see if they're in the system
927 _email = author_email(author)
927 _email = author_email(author)
928 if _email != '':
928 if _email != '':
929 user = User.get_by_email(_email, case_insensitive=True, cache=True)
929 user = User.get_by_email(_email, case_insensitive=True, cache=True)
930 if user is not None:
930 if user is not None:
931 return user
931 return user
932
932
933 # Maybe it's a username, we try to extract it and fetch by username ?
933 # Maybe it's a username, we try to extract it and fetch by username ?
934 _author = author_name(author)
934 _author = author_name(author)
935 user = User.get_by_username(_author, case_insensitive=True, cache=True)
935 user = User.get_by_username(_author, case_insensitive=True, cache=True)
936 if user is not None:
936 if user is not None:
937 return user
937 return user
938
938
939 return None
939 return None
940
940
941
941
942 def email_or_none(author):
942 def email_or_none(author):
943 # extract email from the commit string
943 # extract email from the commit string
944 _email = author_email(author)
944 _email = author_email(author)
945
945
946 # If we have an email, use it, otherwise
946 # If we have an email, use it, otherwise
947 # see if it contains a username we can get an email from
947 # see if it contains a username we can get an email from
948 if _email != '':
948 if _email != '':
949 return _email
949 return _email
950 else:
950 else:
951 user = User.get_by_username(
951 user = User.get_by_username(
952 author_name(author), case_insensitive=True, cache=True)
952 author_name(author), case_insensitive=True, cache=True)
953
953
954 if user is not None:
954 if user is not None:
955 return user.email
955 return user.email
956
956
957 # No valid email, not a valid user in the system, none!
957 # No valid email, not a valid user in the system, none!
958 return None
958 return None
959
959
960
960
961 def link_to_user(author, length=0, **kwargs):
961 def link_to_user(author, length=0, **kwargs):
962 user = discover_user(author)
962 user = discover_user(author)
963 # user can be None, but if we have it already it means we can re-use it
963 # user can be None, but if we have it already it means we can re-use it
964 # in the person() function, so we save 1 intensive-query
964 # in the person() function, so we save 1 intensive-query
965 if user:
965 if user:
966 author = user
966 author = user
967
967
968 display_person = person(author, 'username_or_name_or_email')
968 display_person = person(author, 'username_or_name_or_email')
969 if length:
969 if length:
970 display_person = shorter(display_person, length)
970 display_person = shorter(display_person, length)
971
971
972 if user and user.username != user.DEFAULT_USER:
972 if user and user.username != user.DEFAULT_USER:
973 return link_to(
973 return link_to(
974 escape(display_person),
974 escape(display_person),
975 route_path('user_profile', username=user.username),
975 route_path('user_profile', username=user.username),
976 **kwargs)
976 **kwargs)
977 else:
977 else:
978 return escape(display_person)
978 return escape(display_person)
979
979
980
980
981 def link_to_group(users_group_name, **kwargs):
981 def link_to_group(users_group_name, **kwargs):
982 return link_to(
982 return link_to(
983 escape(users_group_name),
983 escape(users_group_name),
984 route_path('user_group_profile', user_group_name=users_group_name),
984 route_path('user_group_profile', user_group_name=users_group_name),
985 **kwargs)
985 **kwargs)
986
986
987
987
988 def person(author, show_attr="username_and_name"):
988 def person(author, show_attr="username_and_name"):
989 user = discover_user(author)
989 user = discover_user(author)
990 if user:
990 if user:
991 return getattr(user, show_attr)
991 return getattr(user, show_attr)
992 else:
992 else:
993 _author = author_name(author)
993 _author = author_name(author)
994 _email = email(author)
994 _email = email(author)
995 return _author or _email
995 return _author or _email
996
996
997
997
998 def author_string(email):
998 def author_string(email):
999 if email:
999 if email:
1000 user = User.get_by_email(email, case_insensitive=True, cache=True)
1000 user = User.get_by_email(email, case_insensitive=True, cache=True)
1001 if user:
1001 if user:
1002 if user.first_name or user.last_name:
1002 if user.first_name or user.last_name:
1003 return '%s %s &lt;%s&gt;' % (
1003 return '%s %s &lt;%s&gt;' % (
1004 user.first_name, user.last_name, email)
1004 user.first_name, user.last_name, email)
1005 else:
1005 else:
1006 return email
1006 return email
1007 else:
1007 else:
1008 return email
1008 return email
1009 else:
1009 else:
1010 return None
1010 return None
1011
1011
1012
1012
1013 def person_by_id(id_, show_attr="username_and_name"):
1013 def person_by_id(id_, show_attr="username_and_name"):
1014 # attr to return from fetched user
1014 # attr to return from fetched user
1015 person_getter = lambda usr: getattr(usr, show_attr)
1015 person_getter = lambda usr: getattr(usr, show_attr)
1016
1016
1017 #maybe it's an ID ?
1017 #maybe it's an ID ?
1018 if str(id_).isdigit() or isinstance(id_, int):
1018 if str(id_).isdigit() or isinstance(id_, int):
1019 id_ = int(id_)
1019 id_ = int(id_)
1020 user = User.get(id_)
1020 user = User.get(id_)
1021 if user is not None:
1021 if user is not None:
1022 return person_getter(user)
1022 return person_getter(user)
1023 return id_
1023 return id_
1024
1024
1025
1025
1026 def gravatar_with_user(request, author, show_disabled=False, tooltip=False):
1026 def gravatar_with_user(request, author, show_disabled=False, tooltip=False):
1027 _render = request.get_partial_renderer('rhodecode:templates/base/base.mako')
1027 _render = request.get_partial_renderer('rhodecode:templates/base/base.mako')
1028 return _render('gravatar_with_user', author, show_disabled=show_disabled, tooltip=tooltip)
1028 return _render('gravatar_with_user', author, show_disabled=show_disabled, tooltip=tooltip)
1029
1029
1030
1030
1031 tags_paterns = OrderedDict((
1031 tags_paterns = OrderedDict((
1032 ('lang', (re.compile(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+\.]*)\]'),
1032 ('lang', (re.compile(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+\.]*)\]'),
1033 '<div class="metatag" tag="lang">\\2</div>')),
1033 '<div class="metatag" tag="lang">\\2</div>')),
1034
1034
1035 ('see', (re.compile(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
1035 ('see', (re.compile(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
1036 '<div class="metatag" tag="see">see: \\1 </div>')),
1036 '<div class="metatag" tag="see">see: \\1 </div>')),
1037
1037
1038 ('url', (re.compile(r'\[url\ \=\&gt;\ \[([a-zA-Z0-9\ \.\-\_]+)\]\((http://|https://|/)(.*?)\)\]'),
1038 ('url', (re.compile(r'\[url\ \=\&gt;\ \[([a-zA-Z0-9\ \.\-\_]+)\]\((http://|https://|/)(.*?)\)\]'),
1039 '<div class="metatag" tag="url"> <a href="\\2\\3">\\1</a> </div>')),
1039 '<div class="metatag" tag="url"> <a href="\\2\\3">\\1</a> </div>')),
1040
1040
1041 ('license', (re.compile(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
1041 ('license', (re.compile(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
1042 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>')),
1042 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>')),
1043
1043
1044 ('ref', (re.compile(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]'),
1044 ('ref', (re.compile(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]'),
1045 '<div class="metatag" tag="ref \\1">\\1: <a href="/\\2">\\2</a></div>')),
1045 '<div class="metatag" tag="ref \\1">\\1: <a href="/\\2">\\2</a></div>')),
1046
1046
1047 ('state', (re.compile(r'\[(stable|featured|stale|dead|dev|deprecated)\]'),
1047 ('state', (re.compile(r'\[(stable|featured|stale|dead|dev|deprecated)\]'),
1048 '<div class="metatag" tag="state \\1">\\1</div>')),
1048 '<div class="metatag" tag="state \\1">\\1</div>')),
1049
1049
1050 # label in grey
1050 # label in grey
1051 ('label', (re.compile(r'\[([a-z]+)\]'),
1051 ('label', (re.compile(r'\[([a-z]+)\]'),
1052 '<div class="metatag" tag="label">\\1</div>')),
1052 '<div class="metatag" tag="label">\\1</div>')),
1053
1053
1054 # generic catch all in grey
1054 # generic catch all in grey
1055 ('generic', (re.compile(r'\[([a-zA-Z0-9\.\-\_]+)\]'),
1055 ('generic', (re.compile(r'\[([a-zA-Z0-9\.\-\_]+)\]'),
1056 '<div class="metatag" tag="generic">\\1</div>')),
1056 '<div class="metatag" tag="generic">\\1</div>')),
1057 ))
1057 ))
1058
1058
1059
1059
1060 def extract_metatags(value):
1060 def extract_metatags(value):
1061 """
1061 """
1062 Extract supported meta-tags from given text value
1062 Extract supported meta-tags from given text value
1063 """
1063 """
1064 tags = []
1064 tags = []
1065 if not value:
1065 if not value:
1066 return tags, ''
1066 return tags, ''
1067
1067
1068 for key, val in tags_paterns.items():
1068 for key, val in tags_paterns.items():
1069 pat, replace_html = val
1069 pat, replace_html = val
1070 tags.extend([(key, x.group()) for x in pat.finditer(value)])
1070 tags.extend([(key, x.group()) for x in pat.finditer(value)])
1071 value = pat.sub('', value)
1071 value = pat.sub('', value)
1072
1072
1073 return tags, value
1073 return tags, value
1074
1074
1075
1075
1076 def style_metatag(tag_type, value):
1076 def style_metatag(tag_type, value):
1077 """
1077 """
1078 converts tags from value into html equivalent
1078 converts tags from value into html equivalent
1079 """
1079 """
1080 if not value:
1080 if not value:
1081 return ''
1081 return ''
1082
1082
1083 html_value = value
1083 html_value = value
1084 tag_data = tags_paterns.get(tag_type)
1084 tag_data = tags_paterns.get(tag_type)
1085 if tag_data:
1085 if tag_data:
1086 pat, replace_html = tag_data
1086 pat, replace_html = tag_data
1087 # convert to plain `unicode` instead of a markup tag to be used in
1087 # convert to plain `unicode` instead of a markup tag to be used in
1088 # regex expressions. safe_unicode doesn't work here
1088 # regex expressions. safe_unicode doesn't work here
1089 html_value = pat.sub(replace_html, unicode(value))
1089 html_value = pat.sub(replace_html, value)
1090
1090
1091 return html_value
1091 return html_value
1092
1092
1093
1093
1094 def bool2icon(value, show_at_false=True):
1094 def bool2icon(value, show_at_false=True):
1095 """
1095 """
1096 Returns boolean value of a given value, represented as html element with
1096 Returns boolean value of a given value, represented as html element with
1097 classes that will represent icons
1097 classes that will represent icons
1098
1098
1099 :param value: given value to convert to html node
1099 :param value: given value to convert to html node
1100 """
1100 """
1101
1101
1102 if value: # does bool conversion
1102 if value: # does bool conversion
1103 return HTML.tag('i', class_="icon-true", title='True')
1103 return HTML.tag('i', class_="icon-true", title='True')
1104 else: # not true as bool
1104 else: # not true as bool
1105 if show_at_false:
1105 if show_at_false:
1106 return HTML.tag('i', class_="icon-false", title='False')
1106 return HTML.tag('i', class_="icon-false", title='False')
1107 return HTML.tag('i')
1107 return HTML.tag('i')
1108
1108
1109
1109
1110 def b64(inp):
1110 def b64(inp):
1111 return base64.b64encode(inp)
1111 return base64.b64encode(inp)
1112
1112
1113 #==============================================================================
1113 #==============================================================================
1114 # PERMS
1114 # PERMS
1115 #==============================================================================
1115 #==============================================================================
1116 from rhodecode.lib.auth import (
1116 from rhodecode.lib.auth import (
1117 HasPermissionAny, HasPermissionAll,
1117 HasPermissionAny, HasPermissionAll,
1118 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll,
1118 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll,
1119 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token,
1119 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token,
1120 csrf_token_key, AuthUser)
1120 csrf_token_key, AuthUser)
1121
1121
1122
1122
1123 #==============================================================================
1123 #==============================================================================
1124 # GRAVATAR URL
1124 # GRAVATAR URL
1125 #==============================================================================
1125 #==============================================================================
1126 class InitialsGravatar(object):
1126 class InitialsGravatar(object):
1127 def __init__(self, email_address, first_name, last_name, size=30,
1127 def __init__(self, email_address, first_name, last_name, size=30,
1128 background=None, text_color='#fff'):
1128 background=None, text_color='#fff'):
1129 self.size = size
1129 self.size = size
1130 self.first_name = first_name
1130 self.first_name = first_name
1131 self.last_name = last_name
1131 self.last_name = last_name
1132 self.email_address = email_address
1132 self.email_address = email_address
1133 self.background = background or self.str2color(email_address)
1133 self.background = background or self.str2color(email_address)
1134 self.text_color = text_color
1134 self.text_color = text_color
1135
1135
1136 def get_color_bank(self):
1136 def get_color_bank(self):
1137 """
1137 """
1138 returns a predefined list of colors that gravatars can use.
1138 returns a predefined list of colors that gravatars can use.
1139 Those are randomized distinct colors that guarantee readability and
1139 Those are randomized distinct colors that guarantee readability and
1140 uniqueness.
1140 uniqueness.
1141
1141
1142 generated with: http://phrogz.net/css/distinct-colors.html
1142 generated with: http://phrogz.net/css/distinct-colors.html
1143 """
1143 """
1144 return [
1144 return [
1145 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1145 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1146 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1146 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1147 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1147 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1148 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1148 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1149 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1149 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1150 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1150 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1151 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1151 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1152 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1152 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1153 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1153 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1154 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1154 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1155 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1155 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1156 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1156 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1157 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1157 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1158 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1158 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1159 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1159 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1160 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1160 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1161 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1161 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1162 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1162 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1163 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1163 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1164 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1164 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1165 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1165 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1166 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1166 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1167 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1167 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1168 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1168 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1169 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1169 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1170 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1170 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1171 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1171 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1172 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1172 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1173 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1173 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1174 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1174 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1175 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1175 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1176 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1176 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1177 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1177 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1178 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1178 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1179 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1179 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1180 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1180 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1181 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1181 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1182 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1182 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1183 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1183 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1184 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1184 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1185 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1185 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1186 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1186 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1187 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1187 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1188 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1188 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1189 '#4f8c46', '#368dd9', '#5c0073'
1189 '#4f8c46', '#368dd9', '#5c0073'
1190 ]
1190 ]
1191
1191
1192 def rgb_to_hex_color(self, rgb_tuple):
1192 def rgb_to_hex_color(self, rgb_tuple):
1193 """
1193 """
1194 Converts an rgb_tuple passed to an hex color.
1194 Converts an rgb_tuple passed to an hex color.
1195
1195
1196 :param rgb_tuple: tuple with 3 ints represents rgb color space
1196 :param rgb_tuple: tuple with 3 ints represents rgb color space
1197 """
1197 """
1198 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1198 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1199
1199
1200 def email_to_int_list(self, email_str):
1200 def email_to_int_list(self, email_str):
1201 """
1201 """
1202 Get every byte of the hex digest value of email and turn it to integer.
1202 Get every byte of the hex digest value of email and turn it to integer.
1203 It's going to be always between 0-255
1203 It's going to be always between 0-255
1204 """
1204 """
1205 digest = md5_safe(email_str.lower())
1205 digest = md5_safe(email_str.lower())
1206 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1206 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1207
1207
1208 def pick_color_bank_index(self, email_str, color_bank):
1208 def pick_color_bank_index(self, email_str, color_bank):
1209 return self.email_to_int_list(email_str)[0] % len(color_bank)
1209 return self.email_to_int_list(email_str)[0] % len(color_bank)
1210
1210
1211 def str2color(self, email_str):
1211 def str2color(self, email_str):
1212 """
1212 """
1213 Tries to map in a stable algorithm an email to color
1213 Tries to map in a stable algorithm an email to color
1214
1214
1215 :param email_str:
1215 :param email_str:
1216 """
1216 """
1217 color_bank = self.get_color_bank()
1217 color_bank = self.get_color_bank()
1218 # pick position (module it's length so we always find it in the
1218 # pick position (module it's length so we always find it in the
1219 # bank even if it's smaller than 256 values
1219 # bank even if it's smaller than 256 values
1220 pos = self.pick_color_bank_index(email_str, color_bank)
1220 pos = self.pick_color_bank_index(email_str, color_bank)
1221 return color_bank[pos]
1221 return color_bank[pos]
1222
1222
1223 def normalize_email(self, email_address):
1223 def normalize_email(self, email_address):
1224 import unicodedata
1224 import unicodedata
1225 # default host used to fill in the fake/missing email
1225 # default host used to fill in the fake/missing email
1226 default_host = u'localhost'
1226 default_host = 'localhost'
1227
1227
1228 if not email_address:
1228 if not email_address:
1229 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1229 email_address = '%s@%s' % (User.DEFAULT_USER, default_host)
1230
1230
1231 email_address = safe_unicode(email_address)
1231 email_address = safe_unicode(email_address)
1232
1232
1233 if u'@' not in email_address:
1233 if u'@' not in email_address:
1234 email_address = u'%s@%s' % (email_address, default_host)
1234 email_address = u'%s@%s' % (email_address, default_host)
1235
1235
1236 if email_address.endswith(u'@'):
1236 if email_address.endswith(u'@'):
1237 email_address = u'%s%s' % (email_address, default_host)
1237 email_address = u'%s%s' % (email_address, default_host)
1238
1238
1239 email_address = unicodedata.normalize('NFKD', email_address)\
1239 email_address = unicodedata.normalize('NFKD', email_address)\
1240 .encode('ascii', 'ignore')
1240 .encode('ascii', 'ignore')
1241 return email_address
1241 return email_address
1242
1242
1243 def get_initials(self):
1243 def get_initials(self):
1244 """
1244 """
1245 Returns 2 letter initials calculated based on the input.
1245 Returns 2 letter initials calculated based on the input.
1246 The algorithm picks first given email address, and takes first letter
1246 The algorithm picks first given email address, and takes first letter
1247 of part before @, and then the first letter of server name. In case
1247 of part before @, and then the first letter of server name. In case
1248 the part before @ is in a format of `somestring.somestring2` it replaces
1248 the part before @ is in a format of `somestring.somestring2` it replaces
1249 the server letter with first letter of somestring2
1249 the server letter with first letter of somestring2
1250
1250
1251 In case function was initialized with both first and lastname, this
1251 In case function was initialized with both first and lastname, this
1252 overrides the extraction from email by first letter of the first and
1252 overrides the extraction from email by first letter of the first and
1253 last name. We add special logic to that functionality, In case Full name
1253 last name. We add special logic to that functionality, In case Full name
1254 is compound, like Guido Von Rossum, we use last part of the last name
1254 is compound, like Guido Von Rossum, we use last part of the last name
1255 (Von Rossum) picking `R`.
1255 (Von Rossum) picking `R`.
1256
1256
1257 Function also normalizes the non-ascii characters to they ascii
1257 Function also normalizes the non-ascii characters to they ascii
1258 representation, eg Ą => A
1258 representation, eg Ą => A
1259 """
1259 """
1260 import unicodedata
1260 import unicodedata
1261 # replace non-ascii to ascii
1261 # replace non-ascii to ascii
1262 first_name = unicodedata.normalize(
1262 first_name = unicodedata.normalize(
1263 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1263 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1264 last_name = unicodedata.normalize(
1264 last_name = unicodedata.normalize(
1265 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1265 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1266
1266
1267 # do NFKD encoding, and also make sure email has proper format
1267 # do NFKD encoding, and also make sure email has proper format
1268 email_address = self.normalize_email(self.email_address)
1268 email_address = self.normalize_email(self.email_address)
1269
1269
1270 # first push the email initials
1270 # first push the email initials
1271 prefix, server = email_address.split('@', 1)
1271 prefix, server = email_address.split('@', 1)
1272
1272
1273 # check if prefix is maybe a 'first_name.last_name' syntax
1273 # check if prefix is maybe a 'first_name.last_name' syntax
1274 _dot_split = prefix.rsplit('.', 1)
1274 _dot_split = prefix.rsplit('.', 1)
1275 if len(_dot_split) == 2 and _dot_split[1]:
1275 if len(_dot_split) == 2 and _dot_split[1]:
1276 initials = [_dot_split[0][0], _dot_split[1][0]]
1276 initials = [_dot_split[0][0], _dot_split[1][0]]
1277 else:
1277 else:
1278 initials = [prefix[0], server[0]]
1278 initials = [prefix[0], server[0]]
1279
1279
1280 # then try to replace either first_name or last_name
1280 # then try to replace either first_name or last_name
1281 fn_letter = (first_name or " ")[0].strip()
1281 fn_letter = (first_name or " ")[0].strip()
1282 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1282 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1283
1283
1284 if fn_letter:
1284 if fn_letter:
1285 initials[0] = fn_letter
1285 initials[0] = fn_letter
1286
1286
1287 if ln_letter:
1287 if ln_letter:
1288 initials[1] = ln_letter
1288 initials[1] = ln_letter
1289
1289
1290 return ''.join(initials).upper()
1290 return ''.join(initials).upper()
1291
1291
1292 def get_img_data_by_type(self, font_family, img_type):
1292 def get_img_data_by_type(self, font_family, img_type):
1293 default_user = """
1293 default_user = """
1294 <svg xmlns="http://www.w3.org/2000/svg"
1294 <svg xmlns="http://www.w3.org/2000/svg"
1295 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1295 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1296 viewBox="-15 -10 439.165 429.164"
1296 viewBox="-15 -10 439.165 429.164"
1297
1297
1298 xml:space="preserve"
1298 xml:space="preserve"
1299 style="background:{background};" >
1299 style="background:{background};" >
1300
1300
1301 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1301 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1302 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1302 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1303 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1303 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1304 168.596,153.916,216.671,
1304 168.596,153.916,216.671,
1305 204.583,216.671z" fill="{text_color}"/>
1305 204.583,216.671z" fill="{text_color}"/>
1306 <path d="M407.164,374.717L360.88,
1306 <path d="M407.164,374.717L360.88,
1307 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1307 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1308 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1308 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1309 15.366-44.203,23.488-69.076,23.488c-24.877,
1309 15.366-44.203,23.488-69.076,23.488c-24.877,
1310 0-48.762-8.122-69.078-23.488
1310 0-48.762-8.122-69.078-23.488
1311 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1311 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1312 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1312 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1313 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1313 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1314 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1314 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1315 19.402-10.527 C409.699,390.129,
1315 19.402-10.527 C409.699,390.129,
1316 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1316 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1317 </svg>""".format(
1317 </svg>""".format(
1318 size=self.size,
1318 size=self.size,
1319 background='#979797', # @grey4
1319 background='#979797', # @grey4
1320 text_color=self.text_color,
1320 text_color=self.text_color,
1321 font_family=font_family)
1321 font_family=font_family)
1322
1322
1323 return {
1323 return {
1324 "default_user": default_user
1324 "default_user": default_user
1325 }[img_type]
1325 }[img_type]
1326
1326
1327 def get_img_data(self, svg_type=None):
1327 def get_img_data(self, svg_type=None):
1328 """
1328 """
1329 generates the svg metadata for image
1329 generates the svg metadata for image
1330 """
1330 """
1331 fonts = [
1331 fonts = [
1332 '-apple-system',
1332 '-apple-system',
1333 'BlinkMacSystemFont',
1333 'BlinkMacSystemFont',
1334 'Segoe UI',
1334 'Segoe UI',
1335 'Roboto',
1335 'Roboto',
1336 'Oxygen-Sans',
1336 'Oxygen-Sans',
1337 'Ubuntu',
1337 'Ubuntu',
1338 'Cantarell',
1338 'Cantarell',
1339 'Helvetica Neue',
1339 'Helvetica Neue',
1340 'sans-serif'
1340 'sans-serif'
1341 ]
1341 ]
1342 font_family = ','.join(fonts)
1342 font_family = ','.join(fonts)
1343 if svg_type:
1343 if svg_type:
1344 return self.get_img_data_by_type(font_family, svg_type)
1344 return self.get_img_data_by_type(font_family, svg_type)
1345
1345
1346 initials = self.get_initials()
1346 initials = self.get_initials()
1347 img_data = """
1347 img_data = """
1348 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1348 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1349 width="{size}" height="{size}"
1349 width="{size}" height="{size}"
1350 style="width: 100%; height: 100%; background-color: {background}"
1350 style="width: 100%; height: 100%; background-color: {background}"
1351 viewBox="0 0 {size} {size}">
1351 viewBox="0 0 {size} {size}">
1352 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1352 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1353 pointer-events="auto" fill="{text_color}"
1353 pointer-events="auto" fill="{text_color}"
1354 font-family="{font_family}"
1354 font-family="{font_family}"
1355 style="font-weight: 400; font-size: {f_size}px;">{text}
1355 style="font-weight: 400; font-size: {f_size}px;">{text}
1356 </text>
1356 </text>
1357 </svg>""".format(
1357 </svg>""".format(
1358 size=self.size,
1358 size=self.size,
1359 f_size=self.size/2.05, # scale the text inside the box nicely
1359 f_size=self.size/2.05, # scale the text inside the box nicely
1360 background=self.background,
1360 background=self.background,
1361 text_color=self.text_color,
1361 text_color=self.text_color,
1362 text=initials.upper(),
1362 text=initials.upper(),
1363 font_family=font_family)
1363 font_family=font_family)
1364
1364
1365 return img_data
1365 return img_data
1366
1366
1367 def generate_svg(self, svg_type=None):
1367 def generate_svg(self, svg_type=None):
1368 img_data = self.get_img_data(svg_type)
1368 img_data = self.get_img_data(svg_type)
1369 return "data:image/svg+xml;base64,%s" % base64.b64encode(img_data)
1369 return "data:image/svg+xml;base64,%s" % base64.b64encode(img_data)
1370
1370
1371
1371
1372 def initials_gravatar(request, email_address, first_name, last_name, size=30, store_on_disk=False):
1372 def initials_gravatar(request, email_address, first_name, last_name, size=30, store_on_disk=False):
1373
1373
1374 svg_type = None
1374 svg_type = None
1375 if email_address == User.DEFAULT_USER_EMAIL:
1375 if email_address == User.DEFAULT_USER_EMAIL:
1376 svg_type = 'default_user'
1376 svg_type = 'default_user'
1377
1377
1378 klass = InitialsGravatar(email_address, first_name, last_name, size)
1378 klass = InitialsGravatar(email_address, first_name, last_name, size)
1379
1379
1380 if store_on_disk:
1380 if store_on_disk:
1381 from rhodecode.apps.file_store import utils as store_utils
1381 from rhodecode.apps.file_store import utils as store_utils
1382 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \
1382 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \
1383 FileOverSizeException
1383 FileOverSizeException
1384 from rhodecode.model.db import Session
1384 from rhodecode.model.db import Session
1385
1385
1386 image_key = md5_safe(email_address.lower()
1386 image_key = md5_safe(email_address.lower()
1387 + first_name.lower() + last_name.lower())
1387 + first_name.lower() + last_name.lower())
1388
1388
1389 storage = store_utils.get_file_storage(request.registry.settings)
1389 storage = store_utils.get_file_storage(request.registry.settings)
1390 filename = '{}.svg'.format(image_key)
1390 filename = '{}.svg'.format(image_key)
1391 subdir = 'gravatars'
1391 subdir = 'gravatars'
1392 # since final name has a counter, we apply the 0
1392 # since final name has a counter, we apply the 0
1393 uid = storage.apply_counter(0, store_utils.uid_filename(filename, randomized=False))
1393 uid = storage.apply_counter(0, store_utils.uid_filename(filename, randomized=False))
1394 store_uid = os.path.join(subdir, uid)
1394 store_uid = os.path.join(subdir, uid)
1395
1395
1396 db_entry = FileStore.get_by_store_uid(store_uid)
1396 db_entry = FileStore.get_by_store_uid(store_uid)
1397 if db_entry:
1397 if db_entry:
1398 return request.route_path('download_file', fid=store_uid)
1398 return request.route_path('download_file', fid=store_uid)
1399
1399
1400 img_data = klass.get_img_data(svg_type=svg_type)
1400 img_data = klass.get_img_data(svg_type=svg_type)
1401 img_file = store_utils.bytes_to_file_obj(img_data)
1401 img_file = store_utils.bytes_to_file_obj(img_data)
1402
1402
1403 try:
1403 try:
1404 store_uid, metadata = storage.save_file(
1404 store_uid, metadata = storage.save_file(
1405 img_file, filename, directory=subdir,
1405 img_file, filename, directory=subdir,
1406 extensions=['.svg'], randomized_name=False)
1406 extensions=['.svg'], randomized_name=False)
1407 except (FileNotAllowedException, FileOverSizeException):
1407 except (FileNotAllowedException, FileOverSizeException):
1408 raise
1408 raise
1409
1409
1410 try:
1410 try:
1411 entry = FileStore.create(
1411 entry = FileStore.create(
1412 file_uid=store_uid, filename=metadata["filename"],
1412 file_uid=store_uid, filename=metadata["filename"],
1413 file_hash=metadata["sha256"], file_size=metadata["size"],
1413 file_hash=metadata["sha256"], file_size=metadata["size"],
1414 file_display_name=filename,
1414 file_display_name=filename,
1415 file_description=u'user gravatar `{}`'.format(safe_unicode(filename)),
1415 file_description=u'user gravatar `{}`'.format(safe_unicode(filename)),
1416 hidden=True, check_acl=False, user_id=1
1416 hidden=True, check_acl=False, user_id=1
1417 )
1417 )
1418 Session().add(entry)
1418 Session().add(entry)
1419 Session().commit()
1419 Session().commit()
1420 log.debug('Stored upload in DB as %s', entry)
1420 log.debug('Stored upload in DB as %s', entry)
1421 except Exception:
1421 except Exception:
1422 raise
1422 raise
1423
1423
1424 return request.route_path('download_file', fid=store_uid)
1424 return request.route_path('download_file', fid=store_uid)
1425
1425
1426 else:
1426 else:
1427 return klass.generate_svg(svg_type=svg_type)
1427 return klass.generate_svg(svg_type=svg_type)
1428
1428
1429
1429
1430 def gravatar_external(request, gravatar_url_tmpl, email_address, size=30):
1430 def gravatar_external(request, gravatar_url_tmpl, email_address, size=30):
1431 return safe_str(gravatar_url_tmpl)\
1431 return safe_str(gravatar_url_tmpl)\
1432 .replace('{email}', email_address) \
1432 .replace('{email}', email_address) \
1433 .replace('{md5email}', md5_safe(email_address.lower())) \
1433 .replace('{md5email}', md5_safe(email_address.lower())) \
1434 .replace('{netloc}', request.host) \
1434 .replace('{netloc}', request.host) \
1435 .replace('{scheme}', request.scheme) \
1435 .replace('{scheme}', request.scheme) \
1436 .replace('{size}', safe_str(size))
1436 .replace('{size}', safe_str(size))
1437
1437
1438
1438
1439 def gravatar_url(email_address, size=30, request=None):
1439 def gravatar_url(email_address, size=30, request=None):
1440 request = request or get_current_request()
1440 request = request or get_current_request()
1441 _use_gravatar = request.call_context.visual.use_gravatar
1441 _use_gravatar = request.call_context.visual.use_gravatar
1442
1442
1443 email_address = email_address or User.DEFAULT_USER_EMAIL
1443 email_address = email_address or User.DEFAULT_USER_EMAIL
1444 if isinstance(email_address, str):
1444 if isinstance(email_address, str):
1445 # hashlib crashes on unicode items
1445 # hashlib crashes on unicode items
1446 email_address = safe_str(email_address)
1446 email_address = safe_str(email_address)
1447
1447
1448 # empty email or default user
1448 # empty email or default user
1449 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1449 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1450 return initials_gravatar(request, User.DEFAULT_USER_EMAIL, '', '', size=size)
1450 return initials_gravatar(request, User.DEFAULT_USER_EMAIL, '', '', size=size)
1451
1451
1452 if _use_gravatar:
1452 if _use_gravatar:
1453 gravatar_url_tmpl = request.call_context.visual.gravatar_url \
1453 gravatar_url_tmpl = request.call_context.visual.gravatar_url \
1454 or User.DEFAULT_GRAVATAR_URL
1454 or User.DEFAULT_GRAVATAR_URL
1455 return gravatar_external(request, gravatar_url_tmpl, email_address, size=size)
1455 return gravatar_external(request, gravatar_url_tmpl, email_address, size=size)
1456
1456
1457 else:
1457 else:
1458 return initials_gravatar(request, email_address, '', '', size=size)
1458 return initials_gravatar(request, email_address, '', '', size=size)
1459
1459
1460
1460
1461 def breadcrumb_repo_link(repo):
1461 def breadcrumb_repo_link(repo):
1462 """
1462 """
1463 Makes a breadcrumbs path link to repo
1463 Makes a breadcrumbs path link to repo
1464
1464
1465 ex::
1465 ex::
1466 group >> subgroup >> repo
1466 group >> subgroup >> repo
1467
1467
1468 :param repo: a Repository instance
1468 :param repo: a Repository instance
1469 """
1469 """
1470
1470
1471 path = [
1471 path = [
1472 link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name),
1472 link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name),
1473 title='last change:{}'.format(format_date(group.last_commit_change)))
1473 title='last change:{}'.format(format_date(group.last_commit_change)))
1474 for group in repo.groups_with_parents
1474 for group in repo.groups_with_parents
1475 ] + [
1475 ] + [
1476 link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name),
1476 link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name),
1477 title='last change:{}'.format(format_date(repo.last_commit_change)))
1477 title='last change:{}'.format(format_date(repo.last_commit_change)))
1478 ]
1478 ]
1479
1479
1480 return literal(' &raquo; '.join(path))
1480 return literal(' &raquo; '.join(path))
1481
1481
1482
1482
1483 def breadcrumb_repo_group_link(repo_group):
1483 def breadcrumb_repo_group_link(repo_group):
1484 """
1484 """
1485 Makes a breadcrumbs path link to repo
1485 Makes a breadcrumbs path link to repo
1486
1486
1487 ex::
1487 ex::
1488 group >> subgroup
1488 group >> subgroup
1489
1489
1490 :param repo_group: a Repository Group instance
1490 :param repo_group: a Repository Group instance
1491 """
1491 """
1492
1492
1493 path = [
1493 path = [
1494 link_to(group.name,
1494 link_to(group.name,
1495 route_path('repo_group_home', repo_group_name=group.group_name),
1495 route_path('repo_group_home', repo_group_name=group.group_name),
1496 title='last change:{}'.format(format_date(group.last_commit_change)))
1496 title='last change:{}'.format(format_date(group.last_commit_change)))
1497 for group in repo_group.parents
1497 for group in repo_group.parents
1498 ] + [
1498 ] + [
1499 link_to(repo_group.name,
1499 link_to(repo_group.name,
1500 route_path('repo_group_home', repo_group_name=repo_group.group_name),
1500 route_path('repo_group_home', repo_group_name=repo_group.group_name),
1501 title='last change:{}'.format(format_date(repo_group.last_commit_change)))
1501 title='last change:{}'.format(format_date(repo_group.last_commit_change)))
1502 ]
1502 ]
1503
1503
1504 return literal(' &raquo; '.join(path))
1504 return literal(' &raquo; '.join(path))
1505
1505
1506
1506
1507 def format_byte_size_binary(file_size):
1507 def format_byte_size_binary(file_size):
1508 """
1508 """
1509 Formats file/folder sizes to standard.
1509 Formats file/folder sizes to standard.
1510 """
1510 """
1511 if file_size is None:
1511 if file_size is None:
1512 file_size = 0
1512 file_size = 0
1513
1513
1514 formatted_size = format_byte_size(file_size, binary=True)
1514 formatted_size = format_byte_size(file_size, binary=True)
1515 return formatted_size
1515 return formatted_size
1516
1516
1517
1517
1518 def urlify_text(text_, safe=True, **href_attrs):
1518 def urlify_text(text_, safe=True, **href_attrs):
1519 """
1519 """
1520 Extract urls from text and make html links out of them
1520 Extract urls from text and make html links out of them
1521 """
1521 """
1522
1522
1523 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1523 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1524 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1524 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1525
1525
1526 def url_func(match_obj):
1526 def url_func(match_obj):
1527 url_full = match_obj.groups()[0]
1527 url_full = match_obj.groups()[0]
1528 a_options = dict(href_attrs)
1528 a_options = dict(href_attrs)
1529 a_options['href'] = url_full
1529 a_options['href'] = url_full
1530 a_text = url_full
1530 a_text = url_full
1531 return HTML.tag("a", a_text, **a_options)
1531 return HTML.tag("a", a_text, **a_options)
1532
1532
1533 _new_text = url_pat.sub(url_func, text_)
1533 _new_text = url_pat.sub(url_func, text_)
1534
1534
1535 if safe:
1535 if safe:
1536 return literal(_new_text)
1536 return literal(_new_text)
1537 return _new_text
1537 return _new_text
1538
1538
1539
1539
1540 def urlify_commits(text_, repo_name):
1540 def urlify_commits(text_, repo_name):
1541 """
1541 """
1542 Extract commit ids from text and make link from them
1542 Extract commit ids from text and make link from them
1543
1543
1544 :param text_:
1544 :param text_:
1545 :param repo_name: repo name to build the URL with
1545 :param repo_name: repo name to build the URL with
1546 """
1546 """
1547
1547
1548 url_pat = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1548 url_pat = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1549
1549
1550 def url_func(match_obj):
1550 def url_func(match_obj):
1551 commit_id = match_obj.groups()[1]
1551 commit_id = match_obj.groups()[1]
1552 pref = match_obj.groups()[0]
1552 pref = match_obj.groups()[0]
1553 suf = match_obj.groups()[2]
1553 suf = match_obj.groups()[2]
1554
1554
1555 tmpl = (
1555 tmpl = (
1556 '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-alt="%(hovercard_alt)s" data-hovercard-url="%(hovercard_url)s">'
1556 '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-alt="%(hovercard_alt)s" data-hovercard-url="%(hovercard_url)s">'
1557 '%(commit_id)s</a>%(suf)s'
1557 '%(commit_id)s</a>%(suf)s'
1558 )
1558 )
1559 return tmpl % {
1559 return tmpl % {
1560 'pref': pref,
1560 'pref': pref,
1561 'cls': 'revision-link',
1561 'cls': 'revision-link',
1562 'url': route_url(
1562 'url': route_url(
1563 'repo_commit', repo_name=repo_name, commit_id=commit_id),
1563 'repo_commit', repo_name=repo_name, commit_id=commit_id),
1564 'commit_id': commit_id,
1564 'commit_id': commit_id,
1565 'suf': suf,
1565 'suf': suf,
1566 'hovercard_alt': 'Commit: {}'.format(commit_id),
1566 'hovercard_alt': 'Commit: {}'.format(commit_id),
1567 'hovercard_url': route_url(
1567 'hovercard_url': route_url(
1568 'hovercard_repo_commit', repo_name=repo_name, commit_id=commit_id)
1568 'hovercard_repo_commit', repo_name=repo_name, commit_id=commit_id)
1569 }
1569 }
1570
1570
1571 new_text = url_pat.sub(url_func, text_)
1571 new_text = url_pat.sub(url_func, text_)
1572
1572
1573 return new_text
1573 return new_text
1574
1574
1575
1575
1576 def _process_url_func(match_obj, repo_name, uid, entry,
1576 def _process_url_func(match_obj, repo_name, uid, entry,
1577 return_raw_data=False, link_format='html'):
1577 return_raw_data=False, link_format='html'):
1578 pref = ''
1578 pref = ''
1579 if match_obj.group().startswith(' '):
1579 if match_obj.group().startswith(' '):
1580 pref = ' '
1580 pref = ' '
1581
1581
1582 issue_id = ''.join(match_obj.groups())
1582 issue_id = ''.join(match_obj.groups())
1583
1583
1584 if link_format == 'html':
1584 if link_format == 'html':
1585 tmpl = (
1585 tmpl = (
1586 '%(pref)s<a class="tooltip %(cls)s" href="%(url)s" title="%(title)s">'
1586 '%(pref)s<a class="tooltip %(cls)s" href="%(url)s" title="%(title)s">'
1587 '%(issue-prefix)s%(id-repr)s'
1587 '%(issue-prefix)s%(id-repr)s'
1588 '</a>')
1588 '</a>')
1589 elif link_format == 'html+hovercard':
1589 elif link_format == 'html+hovercard':
1590 tmpl = (
1590 tmpl = (
1591 '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-url="%(hovercard_url)s">'
1591 '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-url="%(hovercard_url)s">'
1592 '%(issue-prefix)s%(id-repr)s'
1592 '%(issue-prefix)s%(id-repr)s'
1593 '</a>')
1593 '</a>')
1594 elif link_format in ['rst', 'rst+hovercard']:
1594 elif link_format in ['rst', 'rst+hovercard']:
1595 tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_'
1595 tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_'
1596 elif link_format in ['markdown', 'markdown+hovercard']:
1596 elif link_format in ['markdown', 'markdown+hovercard']:
1597 tmpl = '[%(pref)s%(issue-prefix)s%(id-repr)s](%(url)s)'
1597 tmpl = '[%(pref)s%(issue-prefix)s%(id-repr)s](%(url)s)'
1598 else:
1598 else:
1599 raise ValueError('Bad link_format:{}'.format(link_format))
1599 raise ValueError('Bad link_format:{}'.format(link_format))
1600
1600
1601 (repo_name_cleaned,
1601 (repo_name_cleaned,
1602 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(repo_name)
1602 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(repo_name)
1603
1603
1604 # variables replacement
1604 # variables replacement
1605 named_vars = {
1605 named_vars = {
1606 'id': issue_id,
1606 'id': issue_id,
1607 'repo': repo_name,
1607 'repo': repo_name,
1608 'repo_name': repo_name_cleaned,
1608 'repo_name': repo_name_cleaned,
1609 'group_name': parent_group_name,
1609 'group_name': parent_group_name,
1610 # set dummy keys so we always have them
1610 # set dummy keys so we always have them
1611 'hostname': '',
1611 'hostname': '',
1612 'netloc': '',
1612 'netloc': '',
1613 'scheme': ''
1613 'scheme': ''
1614 }
1614 }
1615
1615
1616 request = get_current_request()
1616 request = get_current_request()
1617 if request:
1617 if request:
1618 # exposes, hostname, netloc, scheme
1618 # exposes, hostname, netloc, scheme
1619 host_data = get_host_info(request)
1619 host_data = get_host_info(request)
1620 named_vars.update(host_data)
1620 named_vars.update(host_data)
1621
1621
1622 # named regex variables
1622 # named regex variables
1623 named_vars.update(match_obj.groupdict())
1623 named_vars.update(match_obj.groupdict())
1624 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1624 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1625 desc = string.Template(escape(entry['desc'])).safe_substitute(**named_vars)
1625 desc = string.Template(escape(entry['desc'])).safe_substitute(**named_vars)
1626 hovercard_url = string.Template(entry.get('hovercard_url', '')).safe_substitute(**named_vars)
1626 hovercard_url = string.Template(entry.get('hovercard_url', '')).safe_substitute(**named_vars)
1627
1627
1628 def quote_cleaner(input_str):
1628 def quote_cleaner(input_str):
1629 """Remove quotes as it's HTML"""
1629 """Remove quotes as it's HTML"""
1630 return input_str.replace('"', '')
1630 return input_str.replace('"', '')
1631
1631
1632 data = {
1632 data = {
1633 'pref': pref,
1633 'pref': pref,
1634 'cls': quote_cleaner('issue-tracker-link'),
1634 'cls': quote_cleaner('issue-tracker-link'),
1635 'url': quote_cleaner(_url),
1635 'url': quote_cleaner(_url),
1636 'id-repr': issue_id,
1636 'id-repr': issue_id,
1637 'issue-prefix': entry['pref'],
1637 'issue-prefix': entry['pref'],
1638 'serv': entry['url'],
1638 'serv': entry['url'],
1639 'title': bleach.clean(desc, strip=True),
1639 'title': bleach.clean(desc, strip=True),
1640 'hovercard_url': hovercard_url
1640 'hovercard_url': hovercard_url
1641 }
1641 }
1642
1642
1643 if return_raw_data:
1643 if return_raw_data:
1644 return {
1644 return {
1645 'id': issue_id,
1645 'id': issue_id,
1646 'url': _url
1646 'url': _url
1647 }
1647 }
1648 return tmpl % data
1648 return tmpl % data
1649
1649
1650
1650
1651 def get_active_pattern_entries(repo_name):
1651 def get_active_pattern_entries(repo_name):
1652 repo = None
1652 repo = None
1653 if repo_name:
1653 if repo_name:
1654 # Retrieving repo_name to avoid invalid repo_name to explode on
1654 # Retrieving repo_name to avoid invalid repo_name to explode on
1655 # IssueTrackerSettingsModel but still passing invalid name further down
1655 # IssueTrackerSettingsModel but still passing invalid name further down
1656 repo = Repository.get_by_repo_name(repo_name, cache=True)
1656 repo = Repository.get_by_repo_name(repo_name, cache=True)
1657
1657
1658 settings_model = IssueTrackerSettingsModel(repo=repo)
1658 settings_model = IssueTrackerSettingsModel(repo=repo)
1659 active_entries = settings_model.get_settings(cache=True)
1659 active_entries = settings_model.get_settings(cache=True)
1660 return active_entries
1660 return active_entries
1661
1661
1662
1662
1663 pr_pattern_re = regex.compile(r'(?:(?:^!)|(?: !))(\d+)')
1663 pr_pattern_re = regex.compile(r'(?:(?:^!)|(?: !))(\d+)')
1664
1664
1665 allowed_link_formats = [
1665 allowed_link_formats = [
1666 'html', 'rst', 'markdown', 'html+hovercard', 'rst+hovercard', 'markdown+hovercard']
1666 'html', 'rst', 'markdown', 'html+hovercard', 'rst+hovercard', 'markdown+hovercard']
1667
1667
1668 compile_cache = {
1668 compile_cache = {
1669
1669
1670 }
1670 }
1671
1671
1672
1672
1673 def process_patterns(text_string, repo_name, link_format='html', active_entries=None):
1673 def process_patterns(text_string, repo_name, link_format='html', active_entries=None):
1674
1674
1675 if link_format not in allowed_link_formats:
1675 if link_format not in allowed_link_formats:
1676 raise ValueError('Link format can be only one of:{} got {}'.format(
1676 raise ValueError('Link format can be only one of:{} got {}'.format(
1677 allowed_link_formats, link_format))
1677 allowed_link_formats, link_format))
1678 issues_data = []
1678 issues_data = []
1679 errors = []
1679 errors = []
1680 new_text = text_string
1680 new_text = text_string
1681
1681
1682 if active_entries is None:
1682 if active_entries is None:
1683 log.debug('Fetch active issue tracker patterns for repo: %s', repo_name)
1683 log.debug('Fetch active issue tracker patterns for repo: %s', repo_name)
1684 active_entries = get_active_pattern_entries(repo_name)
1684 active_entries = get_active_pattern_entries(repo_name)
1685
1685
1686 log.debug('Got %s pattern entries to process', len(active_entries))
1686 log.debug('Got %s pattern entries to process', len(active_entries))
1687
1687
1688 for uid, entry in active_entries.items():
1688 for uid, entry in active_entries.items():
1689
1689
1690 if not (entry['pat'] and entry['url']):
1690 if not (entry['pat'] and entry['url']):
1691 log.debug('skipping due to missing data')
1691 log.debug('skipping due to missing data')
1692 continue
1692 continue
1693
1693
1694 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s',
1694 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s',
1695 uid, entry['pat'], entry['url'], entry['pref'])
1695 uid, entry['pat'], entry['url'], entry['pref'])
1696
1696
1697 if entry.get('pat_compiled'):
1697 if entry.get('pat_compiled'):
1698 pattern = entry['pat_compiled']
1698 pattern = entry['pat_compiled']
1699 elif entry['pat'] in compile_cache:
1699 elif entry['pat'] in compile_cache:
1700 pattern = compile_cache[entry['pat']]
1700 pattern = compile_cache[entry['pat']]
1701 else:
1701 else:
1702 try:
1702 try:
1703 pattern = regex.compile(r'%s' % entry['pat'])
1703 pattern = regex.compile(r'%s' % entry['pat'])
1704 except regex.error as e:
1704 except regex.error as e:
1705 regex_err = ValueError('{}:{}'.format(entry['pat'], e))
1705 regex_err = ValueError('{}:{}'.format(entry['pat'], e))
1706 log.exception('issue tracker pattern: `%s` failed to compile', regex_err)
1706 log.exception('issue tracker pattern: `%s` failed to compile', regex_err)
1707 errors.append(regex_err)
1707 errors.append(regex_err)
1708 continue
1708 continue
1709 compile_cache[entry['pat']] = pattern
1709 compile_cache[entry['pat']] = pattern
1710
1710
1711 data_func = partial(
1711 data_func = partial(
1712 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1712 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1713 return_raw_data=True)
1713 return_raw_data=True)
1714
1714
1715 for match_obj in pattern.finditer(text_string):
1715 for match_obj in pattern.finditer(text_string):
1716 issues_data.append(data_func(match_obj))
1716 issues_data.append(data_func(match_obj))
1717
1717
1718 url_func = partial(
1718 url_func = partial(
1719 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1719 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1720 link_format=link_format)
1720 link_format=link_format)
1721
1721
1722 new_text = pattern.sub(url_func, new_text)
1722 new_text = pattern.sub(url_func, new_text)
1723 log.debug('processed prefix:uid `%s`', uid)
1723 log.debug('processed prefix:uid `%s`', uid)
1724
1724
1725 # finally use global replace, eg !123 -> pr-link, those will not catch
1725 # finally use global replace, eg !123 -> pr-link, those will not catch
1726 # if already similar pattern exists
1726 # if already similar pattern exists
1727 server_url = '${scheme}://${netloc}'
1727 server_url = '${scheme}://${netloc}'
1728 pr_entry = {
1728 pr_entry = {
1729 'pref': '!',
1729 'pref': '!',
1730 'url': server_url + '/_admin/pull-requests/${id}',
1730 'url': server_url + '/_admin/pull-requests/${id}',
1731 'desc': 'Pull Request !${id}',
1731 'desc': 'Pull Request !${id}',
1732 'hovercard_url': server_url + '/_hovercard/pull_request/${id}'
1732 'hovercard_url': server_url + '/_hovercard/pull_request/${id}'
1733 }
1733 }
1734 pr_url_func = partial(
1734 pr_url_func = partial(
1735 _process_url_func, repo_name=repo_name, entry=pr_entry, uid=None,
1735 _process_url_func, repo_name=repo_name, entry=pr_entry, uid=None,
1736 link_format=link_format+'+hovercard')
1736 link_format=link_format+'+hovercard')
1737 new_text = pr_pattern_re.sub(pr_url_func, new_text)
1737 new_text = pr_pattern_re.sub(pr_url_func, new_text)
1738 log.debug('processed !pr pattern')
1738 log.debug('processed !pr pattern')
1739
1739
1740 return new_text, issues_data, errors
1740 return new_text, issues_data, errors
1741
1741
1742
1742
1743 def urlify_commit_message(commit_text, repository=None, active_pattern_entries=None,
1743 def urlify_commit_message(commit_text, repository=None, active_pattern_entries=None,
1744 issues_container_callback=None, error_container=None):
1744 issues_container_callback=None, error_container=None):
1745 """
1745 """
1746 Parses given text message and makes proper links.
1746 Parses given text message and makes proper links.
1747 issues are linked to given issue-server, and rest is a commit link
1747 issues are linked to given issue-server, and rest is a commit link
1748 """
1748 """
1749
1749
1750 def escaper(_text):
1750 def escaper(_text):
1751 return _text.replace('<', '&lt;').replace('>', '&gt;')
1751 return _text.replace('<', '&lt;').replace('>', '&gt;')
1752
1752
1753 new_text = escaper(commit_text)
1753 new_text = escaper(commit_text)
1754
1754
1755 # extract http/https links and make them real urls
1755 # extract http/https links and make them real urls
1756 new_text = urlify_text(new_text, safe=False)
1756 new_text = urlify_text(new_text, safe=False)
1757
1757
1758 # urlify commits - extract commit ids and make link out of them, if we have
1758 # urlify commits - extract commit ids and make link out of them, if we have
1759 # the scope of repository present.
1759 # the scope of repository present.
1760 if repository:
1760 if repository:
1761 new_text = urlify_commits(new_text, repository)
1761 new_text = urlify_commits(new_text, repository)
1762
1762
1763 # process issue tracker patterns
1763 # process issue tracker patterns
1764 new_text, issues, errors = process_patterns(
1764 new_text, issues, errors = process_patterns(
1765 new_text, repository or '', active_entries=active_pattern_entries)
1765 new_text, repository or '', active_entries=active_pattern_entries)
1766
1766
1767 if issues_container_callback is not None:
1767 if issues_container_callback is not None:
1768 for issue in issues:
1768 for issue in issues:
1769 issues_container_callback(issue)
1769 issues_container_callback(issue)
1770
1770
1771 if error_container is not None:
1771 if error_container is not None:
1772 error_container.extend(errors)
1772 error_container.extend(errors)
1773
1773
1774 return literal(new_text)
1774 return literal(new_text)
1775
1775
1776
1776
1777 def render_binary(repo_name, file_obj):
1777 def render_binary(repo_name, file_obj):
1778 """
1778 """
1779 Choose how to render a binary file
1779 Choose how to render a binary file
1780 """
1780 """
1781
1781
1782 # unicode
1782 # unicode
1783 filename = file_obj.name
1783 filename = file_obj.name
1784
1784
1785 # images
1785 # images
1786 for ext in ['*.png', '*.jpeg', '*.jpg', '*.ico', '*.gif']:
1786 for ext in ['*.png', '*.jpeg', '*.jpg', '*.ico', '*.gif']:
1787 if fnmatch.fnmatch(filename, pat=ext):
1787 if fnmatch.fnmatch(filename, pat=ext):
1788 src = route_path(
1788 src = route_path(
1789 'repo_file_raw', repo_name=repo_name,
1789 'repo_file_raw', repo_name=repo_name,
1790 commit_id=file_obj.commit.raw_id,
1790 commit_id=file_obj.commit.raw_id,
1791 f_path=file_obj.path)
1791 f_path=file_obj.path)
1792
1792
1793 return literal(
1793 return literal(
1794 '<img class="rendered-binary" alt="rendered-image" src="{}">'.format(src))
1794 '<img class="rendered-binary" alt="rendered-image" src="{}">'.format(src))
1795
1795
1796
1796
1797 def renderer_from_filename(filename, exclude=None):
1797 def renderer_from_filename(filename, exclude=None):
1798 """
1798 """
1799 choose a renderer based on filename, this works only for text based files
1799 choose a renderer based on filename, this works only for text based files
1800 """
1800 """
1801
1801
1802 # ipython
1802 # ipython
1803 for ext in ['*.ipynb']:
1803 for ext in ['*.ipynb']:
1804 if fnmatch.fnmatch(filename, pat=ext):
1804 if fnmatch.fnmatch(filename, pat=ext):
1805 return 'jupyter'
1805 return 'jupyter'
1806
1806
1807 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1807 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1808 if is_markup:
1808 if is_markup:
1809 return is_markup
1809 return is_markup
1810 return None
1810 return None
1811
1811
1812
1812
1813 def render(source, renderer='rst', mentions=False, relative_urls=None,
1813 def render(source, renderer='rst', mentions=False, relative_urls=None,
1814 repo_name=None, active_pattern_entries=None, issues_container_callback=None):
1814 repo_name=None, active_pattern_entries=None, issues_container_callback=None):
1815
1815
1816 def maybe_convert_relative_links(html_source):
1816 def maybe_convert_relative_links(html_source):
1817 if relative_urls:
1817 if relative_urls:
1818 return relative_links(html_source, relative_urls)
1818 return relative_links(html_source, relative_urls)
1819 return html_source
1819 return html_source
1820
1820
1821 if renderer == 'plain':
1821 if renderer == 'plain':
1822 return literal(
1822 return literal(
1823 MarkupRenderer.plain(source, leading_newline=False))
1823 MarkupRenderer.plain(source, leading_newline=False))
1824
1824
1825 elif renderer == 'rst':
1825 elif renderer == 'rst':
1826 if repo_name:
1826 if repo_name:
1827 # process patterns on comments if we pass in repo name
1827 # process patterns on comments if we pass in repo name
1828 source, issues, errors = process_patterns(
1828 source, issues, errors = process_patterns(
1829 source, repo_name, link_format='rst',
1829 source, repo_name, link_format='rst',
1830 active_entries=active_pattern_entries)
1830 active_entries=active_pattern_entries)
1831 if issues_container_callback is not None:
1831 if issues_container_callback is not None:
1832 for issue in issues:
1832 for issue in issues:
1833 issues_container_callback(issue)
1833 issues_container_callback(issue)
1834
1834
1835 return literal(
1835 return literal(
1836 '<div class="rst-block">%s</div>' %
1836 '<div class="rst-block">%s</div>' %
1837 maybe_convert_relative_links(
1837 maybe_convert_relative_links(
1838 MarkupRenderer.rst(source, mentions=mentions)))
1838 MarkupRenderer.rst(source, mentions=mentions)))
1839
1839
1840 elif renderer == 'markdown':
1840 elif renderer == 'markdown':
1841 if repo_name:
1841 if repo_name:
1842 # process patterns on comments if we pass in repo name
1842 # process patterns on comments if we pass in repo name
1843 source, issues, errors = process_patterns(
1843 source, issues, errors = process_patterns(
1844 source, repo_name, link_format='markdown',
1844 source, repo_name, link_format='markdown',
1845 active_entries=active_pattern_entries)
1845 active_entries=active_pattern_entries)
1846 if issues_container_callback is not None:
1846 if issues_container_callback is not None:
1847 for issue in issues:
1847 for issue in issues:
1848 issues_container_callback(issue)
1848 issues_container_callback(issue)
1849
1849
1850
1850
1851 return literal(
1851 return literal(
1852 '<div class="markdown-block">%s</div>' %
1852 '<div class="markdown-block">%s</div>' %
1853 maybe_convert_relative_links(
1853 maybe_convert_relative_links(
1854 MarkupRenderer.markdown(source, flavored=True,
1854 MarkupRenderer.markdown(source, flavored=True,
1855 mentions=mentions)))
1855 mentions=mentions)))
1856
1856
1857 elif renderer == 'jupyter':
1857 elif renderer == 'jupyter':
1858 return literal(
1858 return literal(
1859 '<div class="ipynb">%s</div>' %
1859 '<div class="ipynb">%s</div>' %
1860 maybe_convert_relative_links(
1860 maybe_convert_relative_links(
1861 MarkupRenderer.jupyter(source)))
1861 MarkupRenderer.jupyter(source)))
1862
1862
1863 # None means just show the file-source
1863 # None means just show the file-source
1864 return None
1864 return None
1865
1865
1866
1866
1867 def commit_status(repo, commit_id):
1867 def commit_status(repo, commit_id):
1868 return ChangesetStatusModel().get_status(repo, commit_id)
1868 return ChangesetStatusModel().get_status(repo, commit_id)
1869
1869
1870
1870
1871 def commit_status_lbl(commit_status):
1871 def commit_status_lbl(commit_status):
1872 return dict(ChangesetStatus.STATUSES).get(commit_status)
1872 return dict(ChangesetStatus.STATUSES).get(commit_status)
1873
1873
1874
1874
1875 def commit_time(repo_name, commit_id):
1875 def commit_time(repo_name, commit_id):
1876 repo = Repository.get_by_repo_name(repo_name)
1876 repo = Repository.get_by_repo_name(repo_name)
1877 commit = repo.get_commit(commit_id=commit_id)
1877 commit = repo.get_commit(commit_id=commit_id)
1878 return commit.date
1878 return commit.date
1879
1879
1880
1880
1881 def get_permission_name(key):
1881 def get_permission_name(key):
1882 return dict(Permission.PERMS).get(key)
1882 return dict(Permission.PERMS).get(key)
1883
1883
1884
1884
1885 def journal_filter_help(request):
1885 def journal_filter_help(request):
1886 _ = request.translate
1886 _ = request.translate
1887 from rhodecode.lib.audit_logger import ACTIONS
1887 from rhodecode.lib.audit_logger import ACTIONS
1888 actions = '\n'.join(textwrap.wrap(', '.join(sorted(ACTIONS.keys())), 80))
1888 actions = '\n'.join(textwrap.wrap(', '.join(sorted(ACTIONS.keys())), 80))
1889
1889
1890 return _(
1890 return _(
1891 'Example filter terms:\n' +
1891 'Example filter terms:\n' +
1892 ' repository:vcs\n' +
1892 ' repository:vcs\n' +
1893 ' username:marcin\n' +
1893 ' username:marcin\n' +
1894 ' username:(NOT marcin)\n' +
1894 ' username:(NOT marcin)\n' +
1895 ' action:*push*\n' +
1895 ' action:*push*\n' +
1896 ' ip:127.0.0.1\n' +
1896 ' ip:127.0.0.1\n' +
1897 ' date:20120101\n' +
1897 ' date:20120101\n' +
1898 ' date:[20120101100000 TO 20120102]\n' +
1898 ' date:[20120101100000 TO 20120102]\n' +
1899 '\n' +
1899 '\n' +
1900 'Actions: {actions}\n' +
1900 'Actions: {actions}\n' +
1901 '\n' +
1901 '\n' +
1902 'Generate wildcards using \'*\' character:\n' +
1902 'Generate wildcards using \'*\' character:\n' +
1903 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1903 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1904 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1904 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1905 '\n' +
1905 '\n' +
1906 'Optional AND / OR operators in queries\n' +
1906 'Optional AND / OR operators in queries\n' +
1907 ' "repository:vcs OR repository:test"\n' +
1907 ' "repository:vcs OR repository:test"\n' +
1908 ' "username:test AND repository:test*"\n'
1908 ' "username:test AND repository:test*"\n'
1909 ).format(actions=actions)
1909 ).format(actions=actions)
1910
1910
1911
1911
1912 def not_mapped_error(repo_name):
1912 def not_mapped_error(repo_name):
1913 from rhodecode.translation import _
1913 from rhodecode.translation import _
1914 flash(_('%s repository is not mapped to db perhaps'
1914 flash(_('%s repository is not mapped to db perhaps'
1915 ' it was created or renamed from the filesystem'
1915 ' it was created or renamed from the filesystem'
1916 ' please run the application again'
1916 ' please run the application again'
1917 ' in order to rescan repositories') % repo_name, category='error')
1917 ' in order to rescan repositories') % repo_name, category='error')
1918
1918
1919
1919
1920 def ip_range(ip_addr):
1920 def ip_range(ip_addr):
1921 from rhodecode.model.db import UserIpMap
1921 from rhodecode.model.db import UserIpMap
1922 s, e = UserIpMap._get_ip_range(ip_addr)
1922 s, e = UserIpMap._get_ip_range(ip_addr)
1923 return '%s - %s' % (s, e)
1923 return '%s - %s' % (s, e)
1924
1924
1925
1925
1926 def form(url, method='post', needs_csrf_token=True, **attrs):
1926 def form(url, method='post', needs_csrf_token=True, **attrs):
1927 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1927 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1928 if method.lower() != 'get' and needs_csrf_token:
1928 if method.lower() != 'get' and needs_csrf_token:
1929 raise Exception(
1929 raise Exception(
1930 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1930 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1931 'CSRF token. If the endpoint does not require such token you can ' +
1931 'CSRF token. If the endpoint does not require such token you can ' +
1932 'explicitly set the parameter needs_csrf_token to false.')
1932 'explicitly set the parameter needs_csrf_token to false.')
1933
1933
1934 return insecure_form(url, method=method, **attrs)
1934 return insecure_form(url, method=method, **attrs)
1935
1935
1936
1936
1937 def secure_form(form_url, method="POST", multipart=False, **attrs):
1937 def secure_form(form_url, method="POST", multipart=False, **attrs):
1938 """Start a form tag that points the action to an url. This
1938 """Start a form tag that points the action to an url. This
1939 form tag will also include the hidden field containing
1939 form tag will also include the hidden field containing
1940 the auth token.
1940 the auth token.
1941
1941
1942 The url options should be given either as a string, or as a
1942 The url options should be given either as a string, or as a
1943 ``url()`` function. The method for the form defaults to POST.
1943 ``url()`` function. The method for the form defaults to POST.
1944
1944
1945 Options:
1945 Options:
1946
1946
1947 ``multipart``
1947 ``multipart``
1948 If set to True, the enctype is set to "multipart/form-data".
1948 If set to True, the enctype is set to "multipart/form-data".
1949 ``method``
1949 ``method``
1950 The method to use when submitting the form, usually either
1950 The method to use when submitting the form, usually either
1951 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1951 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1952 hidden input with name _method is added to simulate the verb
1952 hidden input with name _method is added to simulate the verb
1953 over POST.
1953 over POST.
1954
1954
1955 """
1955 """
1956
1956
1957 if 'request' in attrs:
1957 if 'request' in attrs:
1958 session = attrs['request'].session
1958 session = attrs['request'].session
1959 del attrs['request']
1959 del attrs['request']
1960 else:
1960 else:
1961 raise ValueError(
1961 raise ValueError(
1962 'Calling this form requires request= to be passed as argument')
1962 'Calling this form requires request= to be passed as argument')
1963
1963
1964 _form = insecure_form(form_url, method, multipart, **attrs)
1964 _form = insecure_form(form_url, method, multipart, **attrs)
1965 token = literal(
1965 token = literal(
1966 '<input type="hidden" name="{}" value="{}">'.format(
1966 '<input type="hidden" name="{}" value="{}">'.format(
1967 csrf_token_key, get_csrf_token(session)))
1967 csrf_token_key, get_csrf_token(session)))
1968
1968
1969 return literal("%s\n%s" % (_form, token))
1969 return literal("%s\n%s" % (_form, token))
1970
1970
1971
1971
1972 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1972 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1973 select_html = select(name, selected, options, **attrs)
1973 select_html = select(name, selected, options, **attrs)
1974
1974
1975 select2 = """
1975 select2 = """
1976 <script>
1976 <script>
1977 $(document).ready(function() {
1977 $(document).ready(function() {
1978 $('#%s').select2({
1978 $('#%s').select2({
1979 containerCssClass: 'drop-menu %s',
1979 containerCssClass: 'drop-menu %s',
1980 dropdownCssClass: 'drop-menu-dropdown',
1980 dropdownCssClass: 'drop-menu-dropdown',
1981 dropdownAutoWidth: true%s
1981 dropdownAutoWidth: true%s
1982 });
1982 });
1983 });
1983 });
1984 </script>
1984 </script>
1985 """
1985 """
1986
1986
1987 filter_option = """,
1987 filter_option = """,
1988 minimumResultsForSearch: -1
1988 minimumResultsForSearch: -1
1989 """
1989 """
1990 input_id = attrs.get('id') or name
1990 input_id = attrs.get('id') or name
1991 extra_classes = ' '.join(attrs.pop('extra_classes', []))
1991 extra_classes = ' '.join(attrs.pop('extra_classes', []))
1992 filter_enabled = "" if enable_filter else filter_option
1992 filter_enabled = "" if enable_filter else filter_option
1993 select_script = literal(select2 % (input_id, extra_classes, filter_enabled))
1993 select_script = literal(select2 % (input_id, extra_classes, filter_enabled))
1994
1994
1995 return literal(select_html+select_script)
1995 return literal(select_html+select_script)
1996
1996
1997
1997
1998 def get_visual_attr(tmpl_context_var, attr_name):
1998 def get_visual_attr(tmpl_context_var, attr_name):
1999 """
1999 """
2000 A safe way to get a variable from visual variable of template context
2000 A safe way to get a variable from visual variable of template context
2001
2001
2002 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
2002 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
2003 :param attr_name: name of the attribute we fetch from the c.visual
2003 :param attr_name: name of the attribute we fetch from the c.visual
2004 """
2004 """
2005 visual = getattr(tmpl_context_var, 'visual', None)
2005 visual = getattr(tmpl_context_var, 'visual', None)
2006 if not visual:
2006 if not visual:
2007 return
2007 return
2008 else:
2008 else:
2009 return getattr(visual, attr_name, None)
2009 return getattr(visual, attr_name, None)
2010
2010
2011
2011
2012 def get_last_path_part(file_node):
2012 def get_last_path_part(file_node):
2013 if not file_node.path:
2013 if not file_node.path:
2014 return u'/'
2014 return u'/'
2015
2015
2016 path = safe_unicode(file_node.path.split('/')[-1])
2016 path = safe_unicode(file_node.path.split('/')[-1])
2017 return u'../' + path
2017 return u'../' + path
2018
2018
2019
2019
2020 def route_url(*args, **kwargs):
2020 def route_url(*args, **kwargs):
2021 """
2021 """
2022 Wrapper around pyramids `route_url` (fully qualified url) function.
2022 Wrapper around pyramids `route_url` (fully qualified url) function.
2023 """
2023 """
2024 req = get_current_request()
2024 req = get_current_request()
2025 return req.route_url(*args, **kwargs)
2025 return req.route_url(*args, **kwargs)
2026
2026
2027
2027
2028 def route_path(*args, **kwargs):
2028 def route_path(*args, **kwargs):
2029 """
2029 """
2030 Wrapper around pyramids `route_path` function.
2030 Wrapper around pyramids `route_path` function.
2031 """
2031 """
2032 req = get_current_request()
2032 req = get_current_request()
2033 return req.route_path(*args, **kwargs)
2033 return req.route_path(*args, **kwargs)
2034
2034
2035
2035
2036 def route_path_or_none(*args, **kwargs):
2036 def route_path_or_none(*args, **kwargs):
2037 try:
2037 try:
2038 return route_path(*args, **kwargs)
2038 return route_path(*args, **kwargs)
2039 except KeyError:
2039 except KeyError:
2040 return None
2040 return None
2041
2041
2042
2042
2043 def current_route_path(request, **kw):
2043 def current_route_path(request, **kw):
2044 new_args = request.GET.mixed()
2044 new_args = request.GET.mixed()
2045 new_args.update(kw)
2045 new_args.update(kw)
2046 return request.current_route_path(_query=new_args)
2046 return request.current_route_path(_query=new_args)
2047
2047
2048
2048
2049 def curl_api_example(method, args):
2049 def curl_api_example(method, args):
2050 args_json = json.dumps(OrderedDict([
2050 args_json = json.dumps(OrderedDict([
2051 ('id', 1),
2051 ('id', 1),
2052 ('auth_token', 'SECRET'),
2052 ('auth_token', 'SECRET'),
2053 ('method', method),
2053 ('method', method),
2054 ('args', args)
2054 ('args', args)
2055 ]))
2055 ]))
2056
2056
2057 return "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{args_json}'".format(
2057 return "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{args_json}'".format(
2058 api_url=route_url('apiv2'),
2058 api_url=route_url('apiv2'),
2059 args_json=args_json
2059 args_json=args_json
2060 )
2060 )
2061
2061
2062
2062
2063 def api_call_example(method, args):
2063 def api_call_example(method, args):
2064 """
2064 """
2065 Generates an API call example via CURL
2065 Generates an API call example via CURL
2066 """
2066 """
2067 curl_call = curl_api_example(method, args)
2067 curl_call = curl_api_example(method, args)
2068
2068
2069 return literal(
2069 return literal(
2070 curl_call +
2070 curl_call +
2071 "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, "
2071 "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, "
2072 "and needs to be of `api calls` role."
2072 "and needs to be of `api calls` role."
2073 .format(token_url=route_url('my_account_auth_tokens')))
2073 .format(token_url=route_url('my_account_auth_tokens')))
2074
2074
2075
2075
2076 def notification_description(notification, request):
2076 def notification_description(notification, request):
2077 """
2077 """
2078 Generate notification human readable description based on notification type
2078 Generate notification human readable description based on notification type
2079 """
2079 """
2080 from rhodecode.model.notification import NotificationModel
2080 from rhodecode.model.notification import NotificationModel
2081 return NotificationModel().make_description(
2081 return NotificationModel().make_description(
2082 notification, translate=request.translate)
2082 notification, translate=request.translate)
2083
2083
2084
2084
2085 def go_import_header(request, db_repo=None):
2085 def go_import_header(request, db_repo=None):
2086 """
2086 """
2087 Creates a header for go-import functionality in Go Lang
2087 Creates a header for go-import functionality in Go Lang
2088 """
2088 """
2089
2089
2090 if not db_repo:
2090 if not db_repo:
2091 return
2091 return
2092 if 'go-get' not in request.GET:
2092 if 'go-get' not in request.GET:
2093 return
2093 return
2094
2094
2095 clone_url = db_repo.clone_url()
2095 clone_url = db_repo.clone_url()
2096 prefix = re.split(r'^https?:\/\/', clone_url)[-1]
2096 prefix = re.split(r'^https?:\/\/', clone_url)[-1]
2097 # we have a repo and go-get flag,
2097 # we have a repo and go-get flag,
2098 return literal('<meta name="go-import" content="{} {} {}">'.format(
2098 return literal('<meta name="go-import" content="{} {} {}">'.format(
2099 prefix, db_repo.repo_type, clone_url))
2099 prefix, db_repo.repo_type, clone_url))
2100
2100
2101
2101
2102 def reviewer_as_json(*args, **kwargs):
2102 def reviewer_as_json(*args, **kwargs):
2103 from rhodecode.apps.repository.utils import reviewer_as_json as _reviewer_as_json
2103 from rhodecode.apps.repository.utils import reviewer_as_json as _reviewer_as_json
2104 return _reviewer_as_json(*args, **kwargs)
2104 return _reviewer_as_json(*args, **kwargs)
2105
2105
2106
2106
2107 def get_repo_view_type(request):
2107 def get_repo_view_type(request):
2108 route_name = request.matched_route.name
2108 route_name = request.matched_route.name
2109 route_to_view_type = {
2109 route_to_view_type = {
2110 'repo_changelog': 'commits',
2110 'repo_changelog': 'commits',
2111 'repo_commits': 'commits',
2111 'repo_commits': 'commits',
2112 'repo_files': 'files',
2112 'repo_files': 'files',
2113 'repo_summary': 'summary',
2113 'repo_summary': 'summary',
2114 'repo_commit': 'commit'
2114 'repo_commit': 'commit'
2115 }
2115 }
2116
2116
2117 return route_to_view_type.get(route_name)
2117 return route_to_view_type.get(route_name)
2118
2118
2119
2119
2120 def is_active(menu_entry, selected):
2120 def is_active(menu_entry, selected):
2121 """
2121 """
2122 Returns active class for selecting menus in templates
2122 Returns active class for selecting menus in templates
2123 <li class=${h.is_active('settings', current_active)}></li>
2123 <li class=${h.is_active('settings', current_active)}></li>
2124 """
2124 """
2125 if not isinstance(menu_entry, list):
2125 if not isinstance(menu_entry, list):
2126 menu_entry = [menu_entry]
2126 menu_entry = [menu_entry]
2127
2127
2128 if selected in menu_entry:
2128 if selected in menu_entry:
2129 return "active"
2129 return "active"
2130
2130
2131
2131
2132 class IssuesRegistry(object):
2132 class IssuesRegistry(object):
2133 """
2133 """
2134 issue_registry = IssuesRegistry()
2134 issue_registry = IssuesRegistry()
2135 some_func(issues_callback=issues_registry(...))
2135 some_func(issues_callback=issues_registry(...))
2136 """
2136 """
2137
2137
2138 def __init__(self):
2138 def __init__(self):
2139 self.issues = []
2139 self.issues = []
2140 self.unique_issues = collections.defaultdict(lambda: [])
2140 self.unique_issues = collections.defaultdict(lambda: [])
2141
2141
2142 def __call__(self, commit_dict=None):
2142 def __call__(self, commit_dict=None):
2143 def callback(issue):
2143 def callback(issue):
2144 if commit_dict and issue:
2144 if commit_dict and issue:
2145 issue['commit'] = commit_dict
2145 issue['commit'] = commit_dict
2146 self.issues.append(issue)
2146 self.issues.append(issue)
2147 self.unique_issues[issue['id']].append(issue)
2147 self.unique_issues[issue['id']].append(issue)
2148 return callback
2148 return callback
2149
2149
2150 def get_issues(self):
2150 def get_issues(self):
2151 return self.issues
2151 return self.issues
2152
2152
2153 @property
2153 @property
2154 def issues_unique_count(self):
2154 def issues_unique_count(self):
2155 return len(set(i['id'] for i in self.issues))
2155 return len(set(i['id'] for i in self.issues))
@@ -1,1012 +1,1013 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import configparser
27 import configparser
28 import urllib.request, urllib.parse, urllib.error
28 import urllib.request, urllib.parse, urllib.error
29
29
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31
31
32 from collections import OrderedDict
32 from collections import OrderedDict
33 from rhodecode.lib.datelib import (
33 from rhodecode.lib.datelib import (
34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
35 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils import safe_unicode, safe_str
36 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.utils2 import CachedProperty
37 from rhodecode.lib.vcs import connection, exceptions
37 from rhodecode.lib.vcs import connection, exceptions
38 from rhodecode.lib.vcs.backends.base import (
38 from rhodecode.lib.vcs.backends.base import (
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 MergeFailureReason, Reference, BasePathPermissionChecker)
40 MergeFailureReason, Reference, BasePathPermissionChecker)
41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 from rhodecode.lib.vcs.exceptions import (
44 from rhodecode.lib.vcs.exceptions import (
45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
47
47
48 hexlify = binascii.hexlify
48 hexlify = binascii.hexlify
49 nullid = "\0" * 20
49 nullid = "\0" * 20
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class MercurialRepository(BaseRepository):
54 class MercurialRepository(BaseRepository):
55 """
55 """
56 Mercurial repository backend
56 Mercurial repository backend
57 """
57 """
58 DEFAULT_BRANCH_NAME = 'default'
58 DEFAULT_BRANCH_NAME = 'default'
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62 """
62 """
63 Raises RepositoryError if repository could not be find at the given
63 Raises RepositoryError if repository could not be find at the given
64 ``repo_path``.
64 ``repo_path``.
65
65
66 :param repo_path: local path of the repository
66 :param repo_path: local path of the repository
67 :param config: config object containing the repo configuration
67 :param config: config object containing the repo configuration
68 :param create=False: if set to True, would try to create repository if
68 :param create=False: if set to True, would try to create repository if
69 it does not exist rather than raising exception
69 it does not exist rather than raising exception
70 :param src_url=None: would try to clone repository from given location
70 :param src_url=None: would try to clone repository from given location
71 :param do_workspace_checkout=False: sets update of working copy after
71 :param do_workspace_checkout=False: sets update of working copy after
72 making a clone
72 making a clone
73 :param bare: not used, compatible with other VCS
73 :param bare: not used, compatible with other VCS
74 """
74 """
75
75
76 self.path = safe_str(os.path.abspath(repo_path))
76 self.path = safe_str(os.path.abspath(repo_path))
77 # mercurial since 4.4.X requires certain configuration to be present
77 # mercurial since 4.4.X requires certain configuration to be present
78 # because sometimes we init the repos with config we need to meet
78 # because sometimes we init the repos with config we need to meet
79 # special requirements
79 # special requirements
80 self.config = config if config else self.get_default_config(
80 self.config = config if config else self.get_default_config(
81 default=[('extensions', 'largefiles', '1')])
81 default=[('extensions', 'largefiles', '1')])
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83
83
84 self._init_repo(create, src_url, do_workspace_checkout)
84 self._init_repo(create, src_url, do_workspace_checkout)
85
85
86 # caches
86 # caches
87 self._commit_ids = {}
87 self._commit_ids = {}
88
88
89 @LazyProperty
89 @LazyProperty
90 def _remote(self):
90 def _remote(self):
91 repo_id = self.path
91 repo_id = self.path
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93
93
94 @CachedProperty
94 @CachedProperty
95 def commit_ids(self):
95 def commit_ids(self):
96 """
96 """
97 Returns list of commit ids, in ascending order. Being lazy
97 Returns list of commit ids, in ascending order. Being lazy
98 attribute allows external tools to inject shas from cache.
98 attribute allows external tools to inject shas from cache.
99 """
99 """
100 commit_ids = self._get_all_commit_ids()
100 commit_ids = self._get_all_commit_ids()
101 self._rebuild_cache(commit_ids)
101 self._rebuild_cache(commit_ids)
102 return commit_ids
102 return commit_ids
103
103
104 def _rebuild_cache(self, commit_ids):
104 def _rebuild_cache(self, commit_ids):
105 self._commit_ids = dict((commit_id, index)
105 self._commit_ids = dict((commit_id, index)
106 for index, commit_id in enumerate(commit_ids))
106 for index, commit_id in enumerate(commit_ids))
107
107
108 @CachedProperty
108 @CachedProperty
109 def branches(self):
109 def branches(self):
110 return self._get_branches()
110 return self._get_branches()
111
111
112 @CachedProperty
112 @CachedProperty
113 def branches_closed(self):
113 def branches_closed(self):
114 return self._get_branches(active=False, closed=True)
114 return self._get_branches(active=False, closed=True)
115
115
116 @CachedProperty
116 @CachedProperty
117 def branches_all(self):
117 def branches_all(self):
118 all_branches = {}
118 all_branches = {}
119 all_branches.update(self.branches)
119 all_branches.update(self.branches)
120 all_branches.update(self.branches_closed)
120 all_branches.update(self.branches_closed)
121 return all_branches
121 return all_branches
122
122
123 def _get_branches(self, active=True, closed=False):
123 def _get_branches(self, active=True, closed=False):
124 """
124 """
125 Gets branches for this repository
125 Gets branches for this repository
126 Returns only not closed active branches by default
126 Returns only not closed active branches by default
127
127
128 :param active: return also active branches
128 :param active: return also active branches
129 :param closed: return also closed branches
129 :param closed: return also closed branches
130
130
131 """
131 """
132 if self.is_empty():
132 if self.is_empty():
133 return {}
133 return {}
134
134
135 def get_name(ctx):
135 def get_name(ctx):
136 return ctx[0]
136 return ctx[0]
137
137
138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
139 self._remote.branches(active, closed).items()]
139 self._remote.branches(active, closed).items()]
140
140
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142
142
143 @CachedProperty
143 @CachedProperty
144 def tags(self):
144 def tags(self):
145 """
145 """
146 Gets tags for this repository
146 Gets tags for this repository
147 """
147 """
148 return self._get_tags()
148 return self._get_tags()
149
149
150 def _get_tags(self):
150 def _get_tags(self):
151 if self.is_empty():
151 if self.is_empty():
152 return {}
152 return {}
153
153
154 def get_name(ctx):
154 def get_name(ctx):
155 return ctx[0]
155 return ctx[0]
156
156
157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
158 self._remote.tags().items()]
158 self._remote.tags().items()]
159
159
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161
161
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 """
163 """
164 Creates and returns a tag for the given ``commit_id``.
164 Creates and returns a tag for the given ``commit_id``.
165
165
166 :param name: name for new tag
166 :param name: name for new tag
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 :param commit_id: commit id for which new tag would be created
168 :param commit_id: commit id for which new tag would be created
169 :param message: message of the tag's commit
169 :param message: message of the tag's commit
170 :param date: date of tag's commit
170 :param date: date of tag's commit
171
171
172 :raises TagAlreadyExistError: if tag with same name already exists
172 :raises TagAlreadyExistError: if tag with same name already exists
173 """
173 """
174 if name in self.tags:
174 if name in self.tags:
175 raise TagAlreadyExistError("Tag %s already exists" % name)
175 raise TagAlreadyExistError("Tag %s already exists" % name)
176
176
177 commit = self.get_commit(commit_id=commit_id)
177 commit = self.get_commit(commit_id=commit_id)
178 local = kwargs.setdefault('local', False)
178 local = kwargs.setdefault('local', False)
179
179
180 if message is None:
180 if message is None:
181 message = "Added tag %s for commit %s" % (name, commit.short_id)
181 message = "Added tag %s for commit %s" % (name, commit.short_id)
182
182
183 date, tz = date_to_timestamp_plus_offset(date)
183 date, tz = date_to_timestamp_plus_offset(date)
184
184
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 self._remote.invalidate_vcs_cache()
186 self._remote.invalidate_vcs_cache()
187
187
188 # Reinitialize tags
188 # Reinitialize tags
189 self._invalidate_prop_cache('tags')
189 self._invalidate_prop_cache('tags')
190 tag_id = self.tags[name]
190 tag_id = self.tags[name]
191
191
192 return self.get_commit(commit_id=tag_id)
192 return self.get_commit(commit_id=tag_id)
193
193
194 def remove_tag(self, name, user, message=None, date=None):
194 def remove_tag(self, name, user, message=None, date=None):
195 """
195 """
196 Removes tag with the given `name`.
196 Removes tag with the given `name`.
197
197
198 :param name: name of the tag to be removed
198 :param name: name of the tag to be removed
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 :param message: message of the tag's removal commit
200 :param message: message of the tag's removal commit
201 :param date: date of tag's removal commit
201 :param date: date of tag's removal commit
202
202
203 :raises TagDoesNotExistError: if tag with given name does not exists
203 :raises TagDoesNotExistError: if tag with given name does not exists
204 """
204 """
205 if name not in self.tags:
205 if name not in self.tags:
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207
207
208 if message is None:
208 if message is None:
209 message = "Removed tag %s" % name
209 message = "Removed tag %s" % name
210 local = False
210 local = False
211
211
212 date, tz = date_to_timestamp_plus_offset(date)
212 date, tz = date_to_timestamp_plus_offset(date)
213
213
214 self._remote.tag(name, nullid, message, local, user, date, tz)
214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 self._remote.invalidate_vcs_cache()
215 self._remote.invalidate_vcs_cache()
216 self._invalidate_prop_cache('tags')
216 self._invalidate_prop_cache('tags')
217
217
218 @LazyProperty
218 @LazyProperty
219 def bookmarks(self):
219 def bookmarks(self):
220 """
220 """
221 Gets bookmarks for this repository
221 Gets bookmarks for this repository
222 """
222 """
223 return self._get_bookmarks()
223 return self._get_bookmarks()
224
224
225 def _get_bookmarks(self):
225 def _get_bookmarks(self):
226 if self.is_empty():
226 if self.is_empty():
227 return {}
227 return {}
228
228
229 def get_name(ctx):
229 def get_name(ctx):
230 return ctx[0]
230 return ctx[0]
231
231
232 _bookmarks = [
232 _bookmarks = [
233 (safe_unicode(n), hexlify(h)) for n, h in
233 (safe_unicode(n), hexlify(h)) for n, h in
234 self._remote.bookmarks().items()]
234 self._remote.bookmarks().items()]
235
235
236 return OrderedDict(sorted(_bookmarks, key=get_name))
236 return OrderedDict(sorted(_bookmarks, key=get_name))
237
237
238 def _get_all_commit_ids(self):
238 def _get_all_commit_ids(self):
239 return self._remote.get_all_commit_ids('visible')
239 return self._remote.get_all_commit_ids('visible')
240
240
241 def get_diff(
241 def get_diff(
242 self, commit1, commit2, path='', ignore_whitespace=False,
242 self, commit1, commit2, path='', ignore_whitespace=False,
243 context=3, path1=None):
243 context=3, path1=None):
244 """
244 """
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 `commit2` since `commit1`.
246 `commit2` since `commit1`.
247
247
248 :param commit1: Entry point from which diff is shown. Can be
248 :param commit1: Entry point from which diff is shown. Can be
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 the changes since empty state of the repository until `commit2`
250 the changes since empty state of the repository until `commit2`
251 :param commit2: Until which commit changes should be shown.
251 :param commit2: Until which commit changes should be shown.
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 changes. Defaults to ``False``.
253 changes. Defaults to ``False``.
254 :param context: How many lines before/after changed lines should be
254 :param context: How many lines before/after changed lines should be
255 shown. Defaults to ``3``.
255 shown. Defaults to ``3``.
256 """
256 """
257 self._validate_diff_commits(commit1, commit2)
257 self._validate_diff_commits(commit1, commit2)
258 if path1 is not None and path1 != path:
258 if path1 is not None and path1 != path:
259 raise ValueError("Diff of two different paths not supported.")
259 raise ValueError("Diff of two different paths not supported.")
260
260
261 if path:
261 if path:
262 file_filter = [self.path, path]
262 file_filter = [self.path, path]
263 else:
263 else:
264 file_filter = None
264 file_filter = None
265
265
266 diff = self._remote.diff(
266 diff = self._remote.diff(
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 opt_git=True, opt_ignorews=ignore_whitespace,
268 opt_git=True, opt_ignorews=ignore_whitespace,
269 context=context)
269 context=context)
270 return MercurialDiff(diff)
270 return MercurialDiff(diff)
271
271
272 def strip(self, commit_id, branch=None):
272 def strip(self, commit_id, branch=None):
273 self._remote.strip(commit_id, update=False, backup="none")
273 self._remote.strip(commit_id, update=False, backup="none")
274
274
275 self._remote.invalidate_vcs_cache()
275 self._remote.invalidate_vcs_cache()
276 # clear cache
276 # clear cache
277 self._invalidate_prop_cache('commit_ids')
277 self._invalidate_prop_cache('commit_ids')
278
278
279 return len(self.commit_ids)
279 return len(self.commit_ids)
280
280
281 def verify(self):
281 def verify(self):
282 verify = self._remote.verify()
282 verify = self._remote.verify()
283
283
284 self._remote.invalidate_vcs_cache()
284 self._remote.invalidate_vcs_cache()
285 return verify
285 return verify
286
286
287 def hg_update_cache(self):
287 def hg_update_cache(self):
288 update_cache = self._remote.hg_update_cache()
288 update_cache = self._remote.hg_update_cache()
289
289
290 self._remote.invalidate_vcs_cache()
290 self._remote.invalidate_vcs_cache()
291 return update_cache
291 return update_cache
292
292
293 def hg_rebuild_fn_cache(self):
293 def hg_rebuild_fn_cache(self):
294 update_cache = self._remote.hg_rebuild_fn_cache()
294 update_cache = self._remote.hg_rebuild_fn_cache()
295
295
296 self._remote.invalidate_vcs_cache()
296 self._remote.invalidate_vcs_cache()
297 return update_cache
297 return update_cache
298
298
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
301 self, commit_id1, repo2, commit_id2)
301 self, commit_id1, repo2, commit_id2)
302
302
303 if commit_id1 == commit_id2:
303 if commit_id1 == commit_id2:
304 return commit_id1
304 return commit_id1
305
305
306 ancestors = self._remote.revs_from_revspec(
306 ancestors = self._remote.revs_from_revspec(
307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
308 other_path=repo2.path)
308 other_path=repo2.path)
309
309
310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
311
311
312 log.debug('Found common ancestor with sha: %s', ancestor_id)
312 log.debug('Found common ancestor with sha: %s', ancestor_id)
313 return ancestor_id
313 return ancestor_id
314
314
315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
316 if commit_id1 == commit_id2:
316 if commit_id1 == commit_id2:
317 commits = []
317 commits = []
318 else:
318 else:
319 if merge:
319 if merge:
320 indexes = self._remote.revs_from_revspec(
320 indexes = self._remote.revs_from_revspec(
321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
323 else:
323 else:
324 indexes = self._remote.revs_from_revspec(
324 indexes = self._remote.revs_from_revspec(
325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
326 commit_id1, other_path=repo2.path)
326 commit_id1, other_path=repo2.path)
327
327
328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
329 for idx in indexes]
329 for idx in indexes]
330
330
331 return commits
331 return commits
332
332
333 @staticmethod
333 @staticmethod
334 def check_url(url, config):
334 def check_url(url, config):
335 """
335 """
336 Function will check given url and try to verify if it's a valid
336 Function will check given url and try to verify if it's a valid
337 link. Sometimes it may happened that mercurial will issue basic
337 link. Sometimes it may happened that mercurial will issue basic
338 auth request that can cause whole API to hang when used from python
338 auth request that can cause whole API to hang when used from python
339 or other external calls.
339 or other external calls.
340
340
341 On failures it'll raise urllib2.HTTPError, exception is also thrown
341 On failures it'll raise urllib2.HTTPError, exception is also thrown
342 when the return code is non 200
342 when the return code is non 200
343 """
343 """
344 # check first if it's not an local url
344 # check first if it's not an local url
345 if os.path.isdir(url) or url.startswith('file:'):
345 if os.path.isdir(url) or url.startswith('file:'):
346 return True
346 return True
347
347
348 # Request the _remote to verify the url
348 # Request the _remote to verify the url
349 return connection.Hg.check_url(url, config.serialize())
349 return connection.Hg.check_url(url, config.serialize())
350
350
351 @staticmethod
351 @staticmethod
352 def is_valid_repository(path):
352 def is_valid_repository(path):
353 return os.path.isdir(os.path.join(path, '.hg'))
353 return os.path.isdir(os.path.join(path, '.hg'))
354
354
355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
356 """
356 """
357 Function will check for mercurial repository in given path. If there
357 Function will check for mercurial repository in given path. If there
358 is no repository in that path it will raise an exception unless
358 is no repository in that path it will raise an exception unless
359 `create` parameter is set to True - in that case repository would
359 `create` parameter is set to True - in that case repository would
360 be created.
360 be created.
361
361
362 If `src_url` is given, would try to clone repository from the
362 If `src_url` is given, would try to clone repository from the
363 location at given clone_point. Additionally it'll make update to
363 location at given clone_point. Additionally it'll make update to
364 working copy accordingly to `do_workspace_checkout` flag.
364 working copy accordingly to `do_workspace_checkout` flag.
365 """
365 """
366 if create and os.path.exists(self.path):
366 if create and os.path.exists(self.path):
367 raise RepositoryError(
367 raise RepositoryError(
368 "Cannot create repository at %s, location already exist"
368 "Cannot create repository at %s, location already exist"
369 % self.path)
369 % self.path)
370
370
371 if src_url:
371 if src_url:
372 url = str(self._get_url(src_url))
372 url = str(self._get_url(src_url))
373 MercurialRepository.check_url(url, self.config)
373 MercurialRepository.check_url(url, self.config)
374
374
375 self._remote.clone(url, self.path, do_workspace_checkout)
375 self._remote.clone(url, self.path, do_workspace_checkout)
376
376
377 # Don't try to create if we've already cloned repo
377 # Don't try to create if we've already cloned repo
378 create = False
378 create = False
379
379
380 if create:
380 if create:
381 os.makedirs(self.path, mode=0o755)
381 os.makedirs(self.path, mode=0o755)
382 self._remote.localrepository(create)
382 self._remote.localrepository(create)
383
383
384 @LazyProperty
384 @LazyProperty
385 def in_memory_commit(self):
385 def in_memory_commit(self):
386 return MercurialInMemoryCommit(self)
386 return MercurialInMemoryCommit(self)
387
387
388 @LazyProperty
388 @LazyProperty
389 def description(self):
389 def description(self):
390 description = self._remote.get_config_value(
390 description = self._remote.get_config_value(
391 'web', 'description', untrusted=True)
391 'web', 'description', untrusted=True)
392 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
392 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
393
393
394 @LazyProperty
394 @LazyProperty
395 def contact(self):
395 def contact(self):
396 contact = (
396 contact = (
397 self._remote.get_config_value("web", "contact") or
397 self._remote.get_config_value("web", "contact") or
398 self._remote.get_config_value("ui", "username"))
398 self._remote.get_config_value("ui", "username"))
399 return safe_unicode(contact or self.DEFAULT_CONTACT)
399 return safe_unicode(contact or self.DEFAULT_CONTACT)
400
400
401 @LazyProperty
401 @LazyProperty
402 def last_change(self):
402 def last_change(self):
403 """
403 """
404 Returns last change made on this repository as
404 Returns last change made on this repository as
405 `datetime.datetime` object.
405 `datetime.datetime` object.
406 """
406 """
407 try:
407 try:
408 return self.get_commit().date
408 return self.get_commit().date
409 except RepositoryError:
409 except RepositoryError:
410 tzoffset = makedate()[1]
410 tzoffset = makedate()[1]
411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
412
412
413 def _get_fs_mtime(self):
413 def _get_fs_mtime(self):
414 # fallback to filesystem
414 # fallback to filesystem
415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
416 st_path = os.path.join(self.path, '.hg', "store")
416 st_path = os.path.join(self.path, '.hg', "store")
417 if os.path.exists(cl_path):
417 if os.path.exists(cl_path):
418 return os.stat(cl_path).st_mtime
418 return os.stat(cl_path).st_mtime
419 else:
419 else:
420 return os.stat(st_path).st_mtime
420 return os.stat(st_path).st_mtime
421
421
422 def _get_url(self, url):
422 def _get_url(self, url):
423 """
423 """
424 Returns normalized url. If schema is not given, would fall
424 Returns normalized url. If schema is not given, would fall
425 to filesystem
425 to filesystem
426 (``file:///``) schema.
426 (``file:///``) schema.
427 """
427 """
428 url = url.encode('utf8')
428 url = url.encode('utf8')
429 if url != 'default' and '://' not in url:
429 if url != 'default' and '://' not in url:
430 url = "file:" + urllib.request.pathname2url(url)
430 url = "file:" + urllib.request.pathname2url(url)
431 return url
431 return url
432
432
433 def get_hook_location(self):
433 def get_hook_location(self):
434 """
434 """
435 returns absolute path to location where hooks are stored
435 returns absolute path to location where hooks are stored
436 """
436 """
437 return os.path.join(self.path, '.hg', '.hgrc')
437 return os.path.join(self.path, '.hg', '.hgrc')
438
438
439 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
439 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
440 translate_tag=None, maybe_unreachable=False, reference_obj=None):
440 translate_tag=None, maybe_unreachable=False, reference_obj=None):
441 """
441 """
442 Returns ``MercurialCommit`` object representing repository's
442 Returns ``MercurialCommit`` object representing repository's
443 commit at the given `commit_id` or `commit_idx`.
443 commit at the given `commit_id` or `commit_idx`.
444 """
444 """
445 if self.is_empty():
445 if self.is_empty():
446 raise EmptyRepositoryError("There are no commits yet")
446 raise EmptyRepositoryError("There are no commits yet")
447
447
448 if commit_id is not None:
448 if commit_id is not None:
449 self._validate_commit_id(commit_id)
449 self._validate_commit_id(commit_id)
450 try:
450 try:
451 # we have cached idx, use it without contacting the remote
451 # we have cached idx, use it without contacting the remote
452 idx = self._commit_ids[commit_id]
452 idx = self._commit_ids[commit_id]
453 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
453 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
454 except KeyError:
454 except KeyError:
455 pass
455 pass
456
456
457 elif commit_idx is not None:
457 elif commit_idx is not None:
458 self._validate_commit_idx(commit_idx)
458 self._validate_commit_idx(commit_idx)
459 try:
459 try:
460 _commit_id = self.commit_ids[commit_idx]
460 _commit_id = self.commit_ids[commit_idx]
461 if commit_idx < 0:
461 if commit_idx < 0:
462 commit_idx = self.commit_ids.index(_commit_id)
462 commit_idx = self.commit_ids.index(_commit_id)
463
463
464 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
464 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
465 except IndexError:
465 except IndexError:
466 commit_id = commit_idx
466 commit_id = commit_idx
467 else:
467 else:
468 commit_id = "tip"
468 commit_id = "tip"
469
469
470 if isinstance(commit_id, unicode):
470 #TODO: decide if we pass bytes or str into lookup ?
471 commit_id = safe_str(commit_id)
471 # if isinstance(commit_id, unicode):
472 # commit_id = safe_str(commit_id)
472
473
473 try:
474 try:
474 raw_id, idx = self._remote.lookup(commit_id, both=True)
475 raw_id, idx = self._remote.lookup(commit_id, both=True)
475 except CommitDoesNotExistError:
476 except CommitDoesNotExistError:
476 msg = "Commit {} does not exist for `{}`".format(
477 msg = "Commit {} does not exist for `{}`".format(
477 *map(safe_str, [commit_id, self.name]))
478 *map(safe_str, [commit_id, self.name]))
478 raise CommitDoesNotExistError(msg)
479 raise CommitDoesNotExistError(msg)
479
480
480 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
481 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
481
482
482 def get_commits(
483 def get_commits(
483 self, start_id=None, end_id=None, start_date=None, end_date=None,
484 self, start_id=None, end_id=None, start_date=None, end_date=None,
484 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
485 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
485 """
486 """
486 Returns generator of ``MercurialCommit`` objects from start to end
487 Returns generator of ``MercurialCommit`` objects from start to end
487 (both are inclusive)
488 (both are inclusive)
488
489
489 :param start_id: None, str(commit_id)
490 :param start_id: None, str(commit_id)
490 :param end_id: None, str(commit_id)
491 :param end_id: None, str(commit_id)
491 :param start_date: if specified, commits with commit date less than
492 :param start_date: if specified, commits with commit date less than
492 ``start_date`` would be filtered out from returned set
493 ``start_date`` would be filtered out from returned set
493 :param end_date: if specified, commits with commit date greater than
494 :param end_date: if specified, commits with commit date greater than
494 ``end_date`` would be filtered out from returned set
495 ``end_date`` would be filtered out from returned set
495 :param branch_name: if specified, commits not reachable from given
496 :param branch_name: if specified, commits not reachable from given
496 branch would be filtered out from returned set
497 branch would be filtered out from returned set
497 :param show_hidden: Show hidden commits such as obsolete or hidden from
498 :param show_hidden: Show hidden commits such as obsolete or hidden from
498 Mercurial evolve
499 Mercurial evolve
499 :raise BranchDoesNotExistError: If given ``branch_name`` does not
500 :raise BranchDoesNotExistError: If given ``branch_name`` does not
500 exist.
501 exist.
501 :raise CommitDoesNotExistError: If commit for given ``start`` or
502 :raise CommitDoesNotExistError: If commit for given ``start`` or
502 ``end`` could not be found.
503 ``end`` could not be found.
503 """
504 """
504 # actually we should check now if it's not an empty repo
505 # actually we should check now if it's not an empty repo
505 if self.is_empty():
506 if self.is_empty():
506 raise EmptyRepositoryError("There are no commits yet")
507 raise EmptyRepositoryError("There are no commits yet")
507 self._validate_branch_name(branch_name)
508 self._validate_branch_name(branch_name)
508
509
509 branch_ancestors = False
510 branch_ancestors = False
510 if start_id is not None:
511 if start_id is not None:
511 self._validate_commit_id(start_id)
512 self._validate_commit_id(start_id)
512 c_start = self.get_commit(commit_id=start_id)
513 c_start = self.get_commit(commit_id=start_id)
513 start_pos = self._commit_ids[c_start.raw_id]
514 start_pos = self._commit_ids[c_start.raw_id]
514 else:
515 else:
515 start_pos = None
516 start_pos = None
516
517
517 if end_id is not None:
518 if end_id is not None:
518 self._validate_commit_id(end_id)
519 self._validate_commit_id(end_id)
519 c_end = self.get_commit(commit_id=end_id)
520 c_end = self.get_commit(commit_id=end_id)
520 end_pos = max(0, self._commit_ids[c_end.raw_id])
521 end_pos = max(0, self._commit_ids[c_end.raw_id])
521 else:
522 else:
522 end_pos = None
523 end_pos = None
523
524
524 if None not in [start_id, end_id] and start_pos > end_pos:
525 if None not in [start_id, end_id] and start_pos > end_pos:
525 raise RepositoryError(
526 raise RepositoryError(
526 "Start commit '%s' cannot be after end commit '%s'" %
527 "Start commit '%s' cannot be after end commit '%s'" %
527 (start_id, end_id))
528 (start_id, end_id))
528
529
529 if end_pos is not None:
530 if end_pos is not None:
530 end_pos += 1
531 end_pos += 1
531
532
532 commit_filter = []
533 commit_filter = []
533
534
534 if branch_name and not branch_ancestors:
535 if branch_name and not branch_ancestors:
535 commit_filter.append('branch("%s")' % (branch_name,))
536 commit_filter.append('branch("%s")' % (branch_name,))
536 elif branch_name and branch_ancestors:
537 elif branch_name and branch_ancestors:
537 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
538 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
538
539
539 if start_date and not end_date:
540 if start_date and not end_date:
540 commit_filter.append('date(">%s")' % (start_date,))
541 commit_filter.append('date(">%s")' % (start_date,))
541 if end_date and not start_date:
542 if end_date and not start_date:
542 commit_filter.append('date("<%s")' % (end_date,))
543 commit_filter.append('date("<%s")' % (end_date,))
543 if start_date and end_date:
544 if start_date and end_date:
544 commit_filter.append(
545 commit_filter.append(
545 'date(">%s") and date("<%s")' % (start_date, end_date))
546 'date(">%s") and date("<%s")' % (start_date, end_date))
546
547
547 if not show_hidden:
548 if not show_hidden:
548 commit_filter.append('not obsolete()')
549 commit_filter.append('not obsolete()')
549 commit_filter.append('not hidden()')
550 commit_filter.append('not hidden()')
550
551
551 # TODO: johbo: Figure out a simpler way for this solution
552 # TODO: johbo: Figure out a simpler way for this solution
552 collection_generator = CollectionGenerator
553 collection_generator = CollectionGenerator
553 if commit_filter:
554 if commit_filter:
554 commit_filter = ' and '.join(map(safe_str, commit_filter))
555 commit_filter = ' and '.join(map(safe_str, commit_filter))
555 revisions = self._remote.rev_range([commit_filter])
556 revisions = self._remote.rev_range([commit_filter])
556 collection_generator = MercurialIndexBasedCollectionGenerator
557 collection_generator = MercurialIndexBasedCollectionGenerator
557 else:
558 else:
558 revisions = self.commit_ids
559 revisions = self.commit_ids
559
560
560 if start_pos or end_pos:
561 if start_pos or end_pos:
561 revisions = revisions[start_pos:end_pos]
562 revisions = revisions[start_pos:end_pos]
562
563
563 return collection_generator(self, revisions, pre_load=pre_load)
564 return collection_generator(self, revisions, pre_load=pre_load)
564
565
565 def pull(self, url, commit_ids=None):
566 def pull(self, url, commit_ids=None):
566 """
567 """
567 Pull changes from external location.
568 Pull changes from external location.
568
569
569 :param commit_ids: Optional. Can be set to a list of commit ids
570 :param commit_ids: Optional. Can be set to a list of commit ids
570 which shall be pulled from the other repository.
571 which shall be pulled from the other repository.
571 """
572 """
572 url = self._get_url(url)
573 url = self._get_url(url)
573 self._remote.pull(url, commit_ids=commit_ids)
574 self._remote.pull(url, commit_ids=commit_ids)
574 self._remote.invalidate_vcs_cache()
575 self._remote.invalidate_vcs_cache()
575
576
576 def fetch(self, url, commit_ids=None):
577 def fetch(self, url, commit_ids=None):
577 """
578 """
578 Backward compatibility with GIT fetch==pull
579 Backward compatibility with GIT fetch==pull
579 """
580 """
580 return self.pull(url, commit_ids=commit_ids)
581 return self.pull(url, commit_ids=commit_ids)
581
582
582 def push(self, url):
583 def push(self, url):
583 url = self._get_url(url)
584 url = self._get_url(url)
584 self._remote.sync_push(url)
585 self._remote.sync_push(url)
585
586
586 def _local_clone(self, clone_path):
587 def _local_clone(self, clone_path):
587 """
588 """
588 Create a local clone of the current repo.
589 Create a local clone of the current repo.
589 """
590 """
590 self._remote.clone(self.path, clone_path, update_after_clone=True,
591 self._remote.clone(self.path, clone_path, update_after_clone=True,
591 hooks=False)
592 hooks=False)
592
593
593 def _update(self, revision, clean=False):
594 def _update(self, revision, clean=False):
594 """
595 """
595 Update the working copy to the specified revision.
596 Update the working copy to the specified revision.
596 """
597 """
597 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
598 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
598 self._remote.update(revision, clean=clean)
599 self._remote.update(revision, clean=clean)
599
600
600 def _identify(self):
601 def _identify(self):
601 """
602 """
602 Return the current state of the working directory.
603 Return the current state of the working directory.
603 """
604 """
604 return self._remote.identify().strip().rstrip('+')
605 return self._remote.identify().strip().rstrip('+')
605
606
606 def _heads(self, branch=None):
607 def _heads(self, branch=None):
607 """
608 """
608 Return the commit ids of the repository heads.
609 Return the commit ids of the repository heads.
609 """
610 """
610 return self._remote.heads(branch=branch).strip().split(' ')
611 return self._remote.heads(branch=branch).strip().split(' ')
611
612
612 def _ancestor(self, revision1, revision2):
613 def _ancestor(self, revision1, revision2):
613 """
614 """
614 Return the common ancestor of the two revisions.
615 Return the common ancestor of the two revisions.
615 """
616 """
616 return self._remote.ancestor(revision1, revision2)
617 return self._remote.ancestor(revision1, revision2)
617
618
618 def _local_push(
619 def _local_push(
619 self, revision, repository_path, push_branches=False,
620 self, revision, repository_path, push_branches=False,
620 enable_hooks=False):
621 enable_hooks=False):
621 """
622 """
622 Push the given revision to the specified repository.
623 Push the given revision to the specified repository.
623
624
624 :param push_branches: allow to create branches in the target repo.
625 :param push_branches: allow to create branches in the target repo.
625 """
626 """
626 self._remote.push(
627 self._remote.push(
627 [revision], repository_path, hooks=enable_hooks,
628 [revision], repository_path, hooks=enable_hooks,
628 push_branches=push_branches)
629 push_branches=push_branches)
629
630
630 def _local_merge(self, target_ref, merge_message, user_name, user_email,
631 def _local_merge(self, target_ref, merge_message, user_name, user_email,
631 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
632 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
632 """
633 """
633 Merge the given source_revision into the checked out revision.
634 Merge the given source_revision into the checked out revision.
634
635
635 Returns the commit id of the merge and a boolean indicating if the
636 Returns the commit id of the merge and a boolean indicating if the
636 commit needs to be pushed.
637 commit needs to be pushed.
637 """
638 """
638 source_ref_commit_id = source_ref.commit_id
639 source_ref_commit_id = source_ref.commit_id
639 target_ref_commit_id = target_ref.commit_id
640 target_ref_commit_id = target_ref.commit_id
640
641
641 # update our workdir to target ref, for proper merge
642 # update our workdir to target ref, for proper merge
642 self._update(target_ref_commit_id, clean=True)
643 self._update(target_ref_commit_id, clean=True)
643
644
644 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
645 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
645 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
646 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
646
647
647 if close_commit_id:
648 if close_commit_id:
648 # NOTE(marcink): if we get the close commit, this is our new source
649 # NOTE(marcink): if we get the close commit, this is our new source
649 # which will include the close commit itself.
650 # which will include the close commit itself.
650 source_ref_commit_id = close_commit_id
651 source_ref_commit_id = close_commit_id
651
652
652 if ancestor == source_ref_commit_id:
653 if ancestor == source_ref_commit_id:
653 # Nothing to do, the changes were already integrated
654 # Nothing to do, the changes were already integrated
654 return target_ref_commit_id, False
655 return target_ref_commit_id, False
655
656
656 elif ancestor == target_ref_commit_id and is_the_same_branch:
657 elif ancestor == target_ref_commit_id and is_the_same_branch:
657 # In this case we should force a commit message
658 # In this case we should force a commit message
658 return source_ref_commit_id, True
659 return source_ref_commit_id, True
659
660
660 unresolved = None
661 unresolved = None
661 if use_rebase:
662 if use_rebase:
662 try:
663 try:
663 bookmark_name = 'rcbook%s%s' % (source_ref_commit_id, target_ref_commit_id)
664 bookmark_name = 'rcbook%s%s' % (source_ref_commit_id, target_ref_commit_id)
664 self.bookmark(bookmark_name, revision=source_ref.commit_id)
665 self.bookmark(bookmark_name, revision=source_ref.commit_id)
665 self._remote.rebase(
666 self._remote.rebase(
666 source=source_ref_commit_id, dest=target_ref_commit_id)
667 source=source_ref_commit_id, dest=target_ref_commit_id)
667 self._remote.invalidate_vcs_cache()
668 self._remote.invalidate_vcs_cache()
668 self._update(bookmark_name, clean=True)
669 self._update(bookmark_name, clean=True)
669 return self._identify(), True
670 return self._identify(), True
670 except RepositoryError as e:
671 except RepositoryError as e:
671 # The rebase-abort may raise another exception which 'hides'
672 # The rebase-abort may raise another exception which 'hides'
672 # the original one, therefore we log it here.
673 # the original one, therefore we log it here.
673 log.exception('Error while rebasing shadow repo during merge.')
674 log.exception('Error while rebasing shadow repo during merge.')
674 if 'unresolved conflicts' in safe_str(e):
675 if 'unresolved conflicts' in safe_str(e):
675 unresolved = self._remote.get_unresolved_files()
676 unresolved = self._remote.get_unresolved_files()
676 log.debug('unresolved files: %s', unresolved)
677 log.debug('unresolved files: %s', unresolved)
677
678
678 # Cleanup any rebase leftovers
679 # Cleanup any rebase leftovers
679 self._remote.invalidate_vcs_cache()
680 self._remote.invalidate_vcs_cache()
680 self._remote.rebase(abort=True)
681 self._remote.rebase(abort=True)
681 self._remote.invalidate_vcs_cache()
682 self._remote.invalidate_vcs_cache()
682 self._remote.update(clean=True)
683 self._remote.update(clean=True)
683 if unresolved:
684 if unresolved:
684 raise UnresolvedFilesInRepo(unresolved)
685 raise UnresolvedFilesInRepo(unresolved)
685 else:
686 else:
686 raise
687 raise
687 else:
688 else:
688 try:
689 try:
689 self._remote.merge(source_ref_commit_id)
690 self._remote.merge(source_ref_commit_id)
690 self._remote.invalidate_vcs_cache()
691 self._remote.invalidate_vcs_cache()
691 self._remote.commit(
692 self._remote.commit(
692 message=safe_str(merge_message),
693 message=safe_str(merge_message),
693 username=safe_str('%s <%s>' % (user_name, user_email)))
694 username=safe_str('%s <%s>' % (user_name, user_email)))
694 self._remote.invalidate_vcs_cache()
695 self._remote.invalidate_vcs_cache()
695 return self._identify(), True
696 return self._identify(), True
696 except RepositoryError as e:
697 except RepositoryError as e:
697 # The merge-abort may raise another exception which 'hides'
698 # The merge-abort may raise another exception which 'hides'
698 # the original one, therefore we log it here.
699 # the original one, therefore we log it here.
699 log.exception('Error while merging shadow repo during merge.')
700 log.exception('Error while merging shadow repo during merge.')
700 if 'unresolved merge conflicts' in safe_str(e):
701 if 'unresolved merge conflicts' in safe_str(e):
701 unresolved = self._remote.get_unresolved_files()
702 unresolved = self._remote.get_unresolved_files()
702 log.debug('unresolved files: %s', unresolved)
703 log.debug('unresolved files: %s', unresolved)
703
704
704 # Cleanup any merge leftovers
705 # Cleanup any merge leftovers
705 self._remote.update(clean=True)
706 self._remote.update(clean=True)
706 if unresolved:
707 if unresolved:
707 raise UnresolvedFilesInRepo(unresolved)
708 raise UnresolvedFilesInRepo(unresolved)
708 else:
709 else:
709 raise
710 raise
710
711
711 def _local_close(self, target_ref, user_name, user_email,
712 def _local_close(self, target_ref, user_name, user_email,
712 source_ref, close_message=''):
713 source_ref, close_message=''):
713 """
714 """
714 Close the branch of the given source_revision
715 Close the branch of the given source_revision
715
716
716 Returns the commit id of the close and a boolean indicating if the
717 Returns the commit id of the close and a boolean indicating if the
717 commit needs to be pushed.
718 commit needs to be pushed.
718 """
719 """
719 self._update(source_ref.commit_id)
720 self._update(source_ref.commit_id)
720 message = close_message or "Closing branch: `{}`".format(source_ref.name)
721 message = close_message or "Closing branch: `{}`".format(source_ref.name)
721 try:
722 try:
722 self._remote.commit(
723 self._remote.commit(
723 message=safe_str(message),
724 message=safe_str(message),
724 username=safe_str('%s <%s>' % (user_name, user_email)),
725 username=safe_str('%s <%s>' % (user_name, user_email)),
725 close_branch=True)
726 close_branch=True)
726 self._remote.invalidate_vcs_cache()
727 self._remote.invalidate_vcs_cache()
727 return self._identify(), True
728 return self._identify(), True
728 except RepositoryError:
729 except RepositoryError:
729 # Cleanup any commit leftovers
730 # Cleanup any commit leftovers
730 self._remote.update(clean=True)
731 self._remote.update(clean=True)
731 raise
732 raise
732
733
733 def _is_the_same_branch(self, target_ref, source_ref):
734 def _is_the_same_branch(self, target_ref, source_ref):
734 return (
735 return (
735 self._get_branch_name(target_ref) ==
736 self._get_branch_name(target_ref) ==
736 self._get_branch_name(source_ref))
737 self._get_branch_name(source_ref))
737
738
738 def _get_branch_name(self, ref):
739 def _get_branch_name(self, ref):
739 if ref.type == 'branch':
740 if ref.type == 'branch':
740 return ref.name
741 return ref.name
741 return self._remote.ctx_branch(ref.commit_id)
742 return self._remote.ctx_branch(ref.commit_id)
742
743
743 def _maybe_prepare_merge_workspace(
744 def _maybe_prepare_merge_workspace(
744 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
745 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
745 shadow_repository_path = self._get_shadow_repository_path(
746 shadow_repository_path = self._get_shadow_repository_path(
746 self.path, repo_id, workspace_id)
747 self.path, repo_id, workspace_id)
747 if not os.path.exists(shadow_repository_path):
748 if not os.path.exists(shadow_repository_path):
748 self._local_clone(shadow_repository_path)
749 self._local_clone(shadow_repository_path)
749 log.debug(
750 log.debug(
750 'Prepared shadow repository in %s', shadow_repository_path)
751 'Prepared shadow repository in %s', shadow_repository_path)
751
752
752 return shadow_repository_path
753 return shadow_repository_path
753
754
754 def _merge_repo(self, repo_id, workspace_id, target_ref,
755 def _merge_repo(self, repo_id, workspace_id, target_ref,
755 source_repo, source_ref, merge_message,
756 source_repo, source_ref, merge_message,
756 merger_name, merger_email, dry_run=False,
757 merger_name, merger_email, dry_run=False,
757 use_rebase=False, close_branch=False):
758 use_rebase=False, close_branch=False):
758
759
759 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
760 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
760 'rebase' if use_rebase else 'merge', dry_run)
761 'rebase' if use_rebase else 'merge', dry_run)
761 if target_ref.commit_id not in self._heads():
762 if target_ref.commit_id not in self._heads():
762 return MergeResponse(
763 return MergeResponse(
763 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
764 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
764 metadata={'target_ref': target_ref})
765 metadata={'target_ref': target_ref})
765
766
766 try:
767 try:
767 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
768 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
768 heads_all = self._heads(target_ref.name)
769 heads_all = self._heads(target_ref.name)
769 max_heads = 10
770 max_heads = 10
770 if len(heads_all) > max_heads:
771 if len(heads_all) > max_heads:
771 heads = '\n,'.join(
772 heads = '\n,'.join(
772 heads_all[:max_heads] +
773 heads_all[:max_heads] +
773 ['and {} more.'.format(len(heads_all)-max_heads)])
774 ['and {} more.'.format(len(heads_all)-max_heads)])
774 else:
775 else:
775 heads = '\n,'.join(heads_all)
776 heads = '\n,'.join(heads_all)
776 metadata = {
777 metadata = {
777 'target_ref': target_ref,
778 'target_ref': target_ref,
778 'source_ref': source_ref,
779 'source_ref': source_ref,
779 'heads': heads
780 'heads': heads
780 }
781 }
781 return MergeResponse(
782 return MergeResponse(
782 False, False, None,
783 False, False, None,
783 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
784 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
784 metadata=metadata)
785 metadata=metadata)
785 except CommitDoesNotExistError:
786 except CommitDoesNotExistError:
786 log.exception('Failure when looking up branch heads on hg target')
787 log.exception('Failure when looking up branch heads on hg target')
787 return MergeResponse(
788 return MergeResponse(
788 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
789 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
789 metadata={'target_ref': target_ref})
790 metadata={'target_ref': target_ref})
790
791
791 shadow_repository_path = self._maybe_prepare_merge_workspace(
792 shadow_repository_path = self._maybe_prepare_merge_workspace(
792 repo_id, workspace_id, target_ref, source_ref)
793 repo_id, workspace_id, target_ref, source_ref)
793 shadow_repo = self.get_shadow_instance(shadow_repository_path)
794 shadow_repo = self.get_shadow_instance(shadow_repository_path)
794
795
795 log.debug('Pulling in target reference %s', target_ref)
796 log.debug('Pulling in target reference %s', target_ref)
796 self._validate_pull_reference(target_ref)
797 self._validate_pull_reference(target_ref)
797 shadow_repo._local_pull(self.path, target_ref)
798 shadow_repo._local_pull(self.path, target_ref)
798
799
799 try:
800 try:
800 log.debug('Pulling in source reference %s', source_ref)
801 log.debug('Pulling in source reference %s', source_ref)
801 source_repo._validate_pull_reference(source_ref)
802 source_repo._validate_pull_reference(source_ref)
802 shadow_repo._local_pull(source_repo.path, source_ref)
803 shadow_repo._local_pull(source_repo.path, source_ref)
803 except CommitDoesNotExistError:
804 except CommitDoesNotExistError:
804 log.exception('Failure when doing local pull on hg shadow repo')
805 log.exception('Failure when doing local pull on hg shadow repo')
805 return MergeResponse(
806 return MergeResponse(
806 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
807 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
807 metadata={'source_ref': source_ref})
808 metadata={'source_ref': source_ref})
808
809
809 merge_ref = None
810 merge_ref = None
810 merge_commit_id = None
811 merge_commit_id = None
811 close_commit_id = None
812 close_commit_id = None
812 merge_failure_reason = MergeFailureReason.NONE
813 merge_failure_reason = MergeFailureReason.NONE
813 metadata = {}
814 metadata = {}
814
815
815 # enforce that close branch should be used only in case we source from
816 # enforce that close branch should be used only in case we source from
816 # an actual Branch
817 # an actual Branch
817 close_branch = close_branch and source_ref.type == 'branch'
818 close_branch = close_branch and source_ref.type == 'branch'
818
819
819 # don't allow to close branch if source and target are the same
820 # don't allow to close branch if source and target are the same
820 close_branch = close_branch and source_ref.name != target_ref.name
821 close_branch = close_branch and source_ref.name != target_ref.name
821
822
822 needs_push_on_close = False
823 needs_push_on_close = False
823 if close_branch and not use_rebase and not dry_run:
824 if close_branch and not use_rebase and not dry_run:
824 try:
825 try:
825 close_commit_id, needs_push_on_close = shadow_repo._local_close(
826 close_commit_id, needs_push_on_close = shadow_repo._local_close(
826 target_ref, merger_name, merger_email, source_ref)
827 target_ref, merger_name, merger_email, source_ref)
827 merge_possible = True
828 merge_possible = True
828 except RepositoryError:
829 except RepositoryError:
829 log.exception('Failure when doing close branch on '
830 log.exception('Failure when doing close branch on '
830 'shadow repo: %s', shadow_repo)
831 'shadow repo: %s', shadow_repo)
831 merge_possible = False
832 merge_possible = False
832 merge_failure_reason = MergeFailureReason.MERGE_FAILED
833 merge_failure_reason = MergeFailureReason.MERGE_FAILED
833 else:
834 else:
834 merge_possible = True
835 merge_possible = True
835
836
836 needs_push = False
837 needs_push = False
837 if merge_possible:
838 if merge_possible:
838
839
839 try:
840 try:
840 merge_commit_id, needs_push = shadow_repo._local_merge(
841 merge_commit_id, needs_push = shadow_repo._local_merge(
841 target_ref, merge_message, merger_name, merger_email,
842 target_ref, merge_message, merger_name, merger_email,
842 source_ref, use_rebase=use_rebase,
843 source_ref, use_rebase=use_rebase,
843 close_commit_id=close_commit_id, dry_run=dry_run)
844 close_commit_id=close_commit_id, dry_run=dry_run)
844 merge_possible = True
845 merge_possible = True
845
846
846 # read the state of the close action, if it
847 # read the state of the close action, if it
847 # maybe required a push
848 # maybe required a push
848 needs_push = needs_push or needs_push_on_close
849 needs_push = needs_push or needs_push_on_close
849
850
850 # Set a bookmark pointing to the merge commit. This bookmark
851 # Set a bookmark pointing to the merge commit. This bookmark
851 # may be used to easily identify the last successful merge
852 # may be used to easily identify the last successful merge
852 # commit in the shadow repository.
853 # commit in the shadow repository.
853 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
854 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
854 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
855 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
855 except SubrepoMergeError:
856 except SubrepoMergeError:
856 log.exception(
857 log.exception(
857 'Subrepo merge error during local merge on hg shadow repo.')
858 'Subrepo merge error during local merge on hg shadow repo.')
858 merge_possible = False
859 merge_possible = False
859 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
860 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
860 needs_push = False
861 needs_push = False
861 except RepositoryError as e:
862 except RepositoryError as e:
862 log.exception('Failure when doing local merge on hg shadow repo')
863 log.exception('Failure when doing local merge on hg shadow repo')
863 if isinstance(e, UnresolvedFilesInRepo):
864 if isinstance(e, UnresolvedFilesInRepo):
864 all_conflicts = list(e.args[0])
865 all_conflicts = list(e.args[0])
865 max_conflicts = 20
866 max_conflicts = 20
866 if len(all_conflicts) > max_conflicts:
867 if len(all_conflicts) > max_conflicts:
867 conflicts = all_conflicts[:max_conflicts] \
868 conflicts = all_conflicts[:max_conflicts] \
868 + ['and {} more.'.format(len(all_conflicts)-max_conflicts)]
869 + ['and {} more.'.format(len(all_conflicts)-max_conflicts)]
869 else:
870 else:
870 conflicts = all_conflicts
871 conflicts = all_conflicts
871 metadata['unresolved_files'] = \
872 metadata['unresolved_files'] = \
872 '\n* conflict: ' + \
873 '\n* conflict: ' + \
873 ('\n * conflict: '.join(conflicts))
874 ('\n * conflict: '.join(conflicts))
874
875
875 merge_possible = False
876 merge_possible = False
876 merge_failure_reason = MergeFailureReason.MERGE_FAILED
877 merge_failure_reason = MergeFailureReason.MERGE_FAILED
877 needs_push = False
878 needs_push = False
878
879
879 if merge_possible and not dry_run:
880 if merge_possible and not dry_run:
880 if needs_push:
881 if needs_push:
881 # In case the target is a bookmark, update it, so after pushing
882 # In case the target is a bookmark, update it, so after pushing
882 # the bookmarks is also updated in the target.
883 # the bookmarks is also updated in the target.
883 if target_ref.type == 'book':
884 if target_ref.type == 'book':
884 shadow_repo.bookmark(
885 shadow_repo.bookmark(
885 target_ref.name, revision=merge_commit_id)
886 target_ref.name, revision=merge_commit_id)
886 try:
887 try:
887 shadow_repo_with_hooks = self.get_shadow_instance(
888 shadow_repo_with_hooks = self.get_shadow_instance(
888 shadow_repository_path,
889 shadow_repository_path,
889 enable_hooks=True)
890 enable_hooks=True)
890 # This is the actual merge action, we push from shadow
891 # This is the actual merge action, we push from shadow
891 # into origin.
892 # into origin.
892 # Note: the push_branches option will push any new branch
893 # Note: the push_branches option will push any new branch
893 # defined in the source repository to the target. This may
894 # defined in the source repository to the target. This may
894 # be dangerous as branches are permanent in Mercurial.
895 # be dangerous as branches are permanent in Mercurial.
895 # This feature was requested in issue #441.
896 # This feature was requested in issue #441.
896 shadow_repo_with_hooks._local_push(
897 shadow_repo_with_hooks._local_push(
897 merge_commit_id, self.path, push_branches=True,
898 merge_commit_id, self.path, push_branches=True,
898 enable_hooks=True)
899 enable_hooks=True)
899
900
900 # maybe we also need to push the close_commit_id
901 # maybe we also need to push the close_commit_id
901 if close_commit_id:
902 if close_commit_id:
902 shadow_repo_with_hooks._local_push(
903 shadow_repo_with_hooks._local_push(
903 close_commit_id, self.path, push_branches=True,
904 close_commit_id, self.path, push_branches=True,
904 enable_hooks=True)
905 enable_hooks=True)
905 merge_succeeded = True
906 merge_succeeded = True
906 except RepositoryError:
907 except RepositoryError:
907 log.exception(
908 log.exception(
908 'Failure when doing local push from the shadow '
909 'Failure when doing local push from the shadow '
909 'repository to the target repository at %s.', self.path)
910 'repository to the target repository at %s.', self.path)
910 merge_succeeded = False
911 merge_succeeded = False
911 merge_failure_reason = MergeFailureReason.PUSH_FAILED
912 merge_failure_reason = MergeFailureReason.PUSH_FAILED
912 metadata['target'] = 'hg shadow repo'
913 metadata['target'] = 'hg shadow repo'
913 metadata['merge_commit'] = merge_commit_id
914 metadata['merge_commit'] = merge_commit_id
914 else:
915 else:
915 merge_succeeded = True
916 merge_succeeded = True
916 else:
917 else:
917 merge_succeeded = False
918 merge_succeeded = False
918
919
919 return MergeResponse(
920 return MergeResponse(
920 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
921 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
921 metadata=metadata)
922 metadata=metadata)
922
923
923 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
924 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
924 config = self.config.copy()
925 config = self.config.copy()
925 if not enable_hooks:
926 if not enable_hooks:
926 config.clear_section('hooks')
927 config.clear_section('hooks')
927 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
928 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
928
929
929 def _validate_pull_reference(self, reference):
930 def _validate_pull_reference(self, reference):
930 if not (reference.name in self.bookmarks or
931 if not (reference.name in self.bookmarks or
931 reference.name in self.branches or
932 reference.name in self.branches or
932 self.get_commit(reference.commit_id)):
933 self.get_commit(reference.commit_id)):
933 raise CommitDoesNotExistError(
934 raise CommitDoesNotExistError(
934 'Unknown branch, bookmark or commit id')
935 'Unknown branch, bookmark or commit id')
935
936
936 def _local_pull(self, repository_path, reference):
937 def _local_pull(self, repository_path, reference):
937 """
938 """
938 Fetch a branch, bookmark or commit from a local repository.
939 Fetch a branch, bookmark or commit from a local repository.
939 """
940 """
940 repository_path = os.path.abspath(repository_path)
941 repository_path = os.path.abspath(repository_path)
941 if repository_path == self.path:
942 if repository_path == self.path:
942 raise ValueError('Cannot pull from the same repository')
943 raise ValueError('Cannot pull from the same repository')
943
944
944 reference_type_to_option_name = {
945 reference_type_to_option_name = {
945 'book': 'bookmark',
946 'book': 'bookmark',
946 'branch': 'branch',
947 'branch': 'branch',
947 }
948 }
948 option_name = reference_type_to_option_name.get(
949 option_name = reference_type_to_option_name.get(
949 reference.type, 'revision')
950 reference.type, 'revision')
950
951
951 if option_name == 'revision':
952 if option_name == 'revision':
952 ref = reference.commit_id
953 ref = reference.commit_id
953 else:
954 else:
954 ref = reference.name
955 ref = reference.name
955
956
956 options = {option_name: [ref]}
957 options = {option_name: [ref]}
957 self._remote.pull_cmd(repository_path, hooks=False, **options)
958 self._remote.pull_cmd(repository_path, hooks=False, **options)
958 self._remote.invalidate_vcs_cache()
959 self._remote.invalidate_vcs_cache()
959
960
960 def bookmark(self, bookmark, revision=None):
961 def bookmark(self, bookmark, revision=None):
961 if isinstance(bookmark, unicode):
962 if isinstance(bookmark, unicode):
962 bookmark = safe_str(bookmark)
963 bookmark = safe_str(bookmark)
963 self._remote.bookmark(bookmark, revision=revision)
964 self._remote.bookmark(bookmark, revision=revision)
964 self._remote.invalidate_vcs_cache()
965 self._remote.invalidate_vcs_cache()
965
966
966 def get_path_permissions(self, username):
967 def get_path_permissions(self, username):
967 hgacl_file = os.path.join(self.path, '.hg/hgacl')
968 hgacl_file = os.path.join(self.path, '.hg/hgacl')
968
969
969 def read_patterns(suffix):
970 def read_patterns(suffix):
970 svalue = None
971 svalue = None
971 for section, option in [
972 for section, option in [
972 ('narrowacl', username + suffix),
973 ('narrowacl', username + suffix),
973 ('narrowacl', 'default' + suffix),
974 ('narrowacl', 'default' + suffix),
974 ('narrowhgacl', username + suffix),
975 ('narrowhgacl', username + suffix),
975 ('narrowhgacl', 'default' + suffix)
976 ('narrowhgacl', 'default' + suffix)
976 ]:
977 ]:
977 try:
978 try:
978 svalue = hgacl.get(section, option)
979 svalue = hgacl.get(section, option)
979 break # stop at the first value we find
980 break # stop at the first value we find
980 except configparser.NoOptionError:
981 except configparser.NoOptionError:
981 pass
982 pass
982 if not svalue:
983 if not svalue:
983 return None
984 return None
984 result = ['/']
985 result = ['/']
985 for pattern in svalue.split():
986 for pattern in svalue.split():
986 result.append(pattern)
987 result.append(pattern)
987 if '*' not in pattern and '?' not in pattern:
988 if '*' not in pattern and '?' not in pattern:
988 result.append(pattern + '/*')
989 result.append(pattern + '/*')
989 return result
990 return result
990
991
991 if os.path.exists(hgacl_file):
992 if os.path.exists(hgacl_file):
992 try:
993 try:
993 hgacl = configparser.RawConfigParser()
994 hgacl = configparser.RawConfigParser()
994 hgacl.read(hgacl_file)
995 hgacl.read(hgacl_file)
995
996
996 includes = read_patterns('.includes')
997 includes = read_patterns('.includes')
997 excludes = read_patterns('.excludes')
998 excludes = read_patterns('.excludes')
998 return BasePathPermissionChecker.create_from_patterns(
999 return BasePathPermissionChecker.create_from_patterns(
999 includes, excludes)
1000 includes, excludes)
1000 except BaseException as e:
1001 except BaseException as e:
1001 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1002 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1002 hgacl_file, self.name, e)
1003 hgacl_file, self.name, e)
1003 raise exceptions.RepositoryRequirementError(msg)
1004 raise exceptions.RepositoryRequirementError(msg)
1004 else:
1005 else:
1005 return None
1006 return None
1006
1007
1007
1008
1008 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1009 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1009
1010
1010 def _commit_factory(self, commit_id):
1011 def _commit_factory(self, commit_id):
1011 return self.repo.get_commit(
1012 return self.repo.get_commit(
1012 commit_idx=commit_id, pre_load=self.pre_load)
1013 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,876 +1,876 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Module holding everything related to vcs nodes, with vcs2 architecture.
22 Module holding everything related to vcs nodes, with vcs2 architecture.
23 """
23 """
24
24
25 import os
25 import os
26 import stat
26 import stat
27
27
28 from zope.cachedescriptors.property import Lazy as LazyProperty
28 from zope.cachedescriptors.property import Lazy as LazyProperty
29
29
30 import rhodecode
30 import rhodecode
31 from rhodecode.config.conf import LANGUAGES_EXTENSIONS_MAP
31 from rhodecode.config.conf import LANGUAGES_EXTENSIONS_MAP
32 from rhodecode.lib.utils import safe_unicode, safe_str
32 from rhodecode.lib.utils import safe_unicode, safe_str
33 from rhodecode.lib.utils2 import md5
33 from rhodecode.lib.utils2 import md5
34 from rhodecode.lib.vcs import path as vcspath
34 from rhodecode.lib.vcs import path as vcspath
35 from rhodecode.lib.vcs.backends.base import EmptyCommit, FILEMODE_DEFAULT
35 from rhodecode.lib.vcs.backends.base import EmptyCommit, FILEMODE_DEFAULT
36 from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db
36 from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db
37 from rhodecode.lib.vcs.exceptions import NodeError, RemovedFileNodeError
37 from rhodecode.lib.vcs.exceptions import NodeError, RemovedFileNodeError
38
38
39 LARGEFILE_PREFIX = '.hglf'
39 LARGEFILE_PREFIX = '.hglf'
40
40
41
41
42 class NodeKind:
42 class NodeKind:
43 SUBMODULE = -1
43 SUBMODULE = -1
44 DIR = 1
44 DIR = 1
45 FILE = 2
45 FILE = 2
46 LARGEFILE = 3
46 LARGEFILE = 3
47
47
48
48
49 class NodeState:
49 class NodeState:
50 ADDED = u'added'
50 ADDED = 'added'
51 CHANGED = u'changed'
51 CHANGED = 'changed'
52 NOT_CHANGED = u'not changed'
52 NOT_CHANGED = 'not changed'
53 REMOVED = u'removed'
53 REMOVED = 'removed'
54
54
55
55
56 class NodeGeneratorBase(object):
56 class NodeGeneratorBase(object):
57 """
57 """
58 Base class for removed added and changed filenodes, it's a lazy generator
58 Base class for removed added and changed filenodes, it's a lazy generator
59 class that will create filenodes only on iteration or call
59 class that will create filenodes only on iteration or call
60
60
61 The len method doesn't need to create filenodes at all
61 The len method doesn't need to create filenodes at all
62 """
62 """
63
63
64 def __init__(self, current_paths, cs):
64 def __init__(self, current_paths, cs):
65 self.cs = cs
65 self.cs = cs
66 self.current_paths = current_paths
66 self.current_paths = current_paths
67
67
68 def __call__(self):
68 def __call__(self):
69 return [n for n in self]
69 return [n for n in self]
70
70
71 def __getslice__(self, i, j):
71 def __getslice__(self, i, j):
72 for p in self.current_paths[i:j]:
72 for p in self.current_paths[i:j]:
73 yield self.cs.get_node(p)
73 yield self.cs.get_node(p)
74
74
75 def __len__(self):
75 def __len__(self):
76 return len(self.current_paths)
76 return len(self.current_paths)
77
77
78 def __iter__(self):
78 def __iter__(self):
79 for p in self.current_paths:
79 for p in self.current_paths:
80 yield self.cs.get_node(p)
80 yield self.cs.get_node(p)
81
81
82
82
83 class AddedFileNodesGenerator(NodeGeneratorBase):
83 class AddedFileNodesGenerator(NodeGeneratorBase):
84 """
84 """
85 Class holding added files for current commit
85 Class holding added files for current commit
86 """
86 """
87
87
88
88
89 class ChangedFileNodesGenerator(NodeGeneratorBase):
89 class ChangedFileNodesGenerator(NodeGeneratorBase):
90 """
90 """
91 Class holding changed files for current commit
91 Class holding changed files for current commit
92 """
92 """
93
93
94
94
95 class RemovedFileNodesGenerator(NodeGeneratorBase):
95 class RemovedFileNodesGenerator(NodeGeneratorBase):
96 """
96 """
97 Class holding removed files for current commit
97 Class holding removed files for current commit
98 """
98 """
99 def __iter__(self):
99 def __iter__(self):
100 for p in self.current_paths:
100 for p in self.current_paths:
101 yield RemovedFileNode(path=p)
101 yield RemovedFileNode(path=p)
102
102
103 def __getslice__(self, i, j):
103 def __getslice__(self, i, j):
104 for p in self.current_paths[i:j]:
104 for p in self.current_paths[i:j]:
105 yield RemovedFileNode(path=p)
105 yield RemovedFileNode(path=p)
106
106
107
107
108 class Node(object):
108 class Node(object):
109 """
109 """
110 Simplest class representing file or directory on repository. SCM backends
110 Simplest class representing file or directory on repository. SCM backends
111 should use ``FileNode`` and ``DirNode`` subclasses rather than ``Node``
111 should use ``FileNode`` and ``DirNode`` subclasses rather than ``Node``
112 directly.
112 directly.
113
113
114 Node's ``path`` cannot start with slash as we operate on *relative* paths
114 Node's ``path`` cannot start with slash as we operate on *relative* paths
115 only. Moreover, every single node is identified by the ``path`` attribute,
115 only. Moreover, every single node is identified by the ``path`` attribute,
116 so it cannot end with slash, too. Otherwise, path could lead to mistakes.
116 so it cannot end with slash, too. Otherwise, path could lead to mistakes.
117 """
117 """
118 RTLO_MARKER = u"\u202E" # RTLO marker allows swapping text, and certain
118 RTLO_MARKER = "\u202E" # RTLO marker allows swapping text, and certain
119 # security attacks could be used with this
119 # security attacks could be used with this
120 commit = None
120 commit = None
121
121
122 def __init__(self, path, kind):
122 def __init__(self, path, kind):
123 self._validate_path(path) # can throw exception if path is invalid
123 self._validate_path(path) # can throw exception if path is invalid
124 self.path = safe_str(path.rstrip('/')) # we store paths as str
124 self.path = safe_str(path.rstrip('/')) # we store paths as str
125 if path == '' and kind != NodeKind.DIR:
125 if path == '' and kind != NodeKind.DIR:
126 raise NodeError("Only DirNode and its subclasses may be "
126 raise NodeError("Only DirNode and its subclasses may be "
127 "initialized with empty path")
127 "initialized with empty path")
128 self.kind = kind
128 self.kind = kind
129
129
130 if self.is_root() and not self.is_dir():
130 if self.is_root() and not self.is_dir():
131 raise NodeError("Root node cannot be FILE kind")
131 raise NodeError("Root node cannot be FILE kind")
132
132
133 def _validate_path(self, path):
133 def _validate_path(self, path):
134 if path.startswith('/'):
134 if path.startswith('/'):
135 raise NodeError(
135 raise NodeError(
136 "Cannot initialize Node objects with slash at "
136 "Cannot initialize Node objects with slash at "
137 "the beginning as only relative paths are supported. "
137 "the beginning as only relative paths are supported. "
138 "Got %s" % (path,))
138 "Got %s" % (path,))
139
139
140 @LazyProperty
140 @LazyProperty
141 def parent(self):
141 def parent(self):
142 parent_path = self.get_parent_path()
142 parent_path = self.get_parent_path()
143 if parent_path:
143 if parent_path:
144 if self.commit:
144 if self.commit:
145 return self.commit.get_node(parent_path)
145 return self.commit.get_node(parent_path)
146 return DirNode(parent_path)
146 return DirNode(parent_path)
147 return None
147 return None
148
148
149 @LazyProperty
149 @LazyProperty
150 def unicode_path(self):
150 def unicode_path(self):
151 return safe_unicode(self.path)
151 return safe_unicode(self.path)
152
152
153 @LazyProperty
153 @LazyProperty
154 def has_rtlo(self):
154 def has_rtlo(self):
155 """Detects if a path has right-to-left-override marker"""
155 """Detects if a path has right-to-left-override marker"""
156 return self.RTLO_MARKER in self.unicode_path
156 return self.RTLO_MARKER in self.unicode_path
157
157
158 @LazyProperty
158 @LazyProperty
159 def unicode_path_safe(self):
159 def unicode_path_safe(self):
160 """
160 """
161 Special SAFE representation of path without the right-to-left-override.
161 Special SAFE representation of path without the right-to-left-override.
162 This should be only used for "showing" the file, cannot be used for any
162 This should be only used for "showing" the file, cannot be used for any
163 urls etc.
163 urls etc.
164 """
164 """
165 return safe_unicode(self.path).replace(self.RTLO_MARKER, '')
165 return safe_unicode(self.path).replace(self.RTLO_MARKER, '')
166
166
167 @LazyProperty
167 @LazyProperty
168 def dir_path(self):
168 def dir_path(self):
169 """
169 """
170 Returns name of the directory from full path of this vcs node. Empty
170 Returns name of the directory from full path of this vcs node. Empty
171 string is returned if there's no directory in the path
171 string is returned if there's no directory in the path
172 """
172 """
173 _parts = self.path.rstrip('/').rsplit('/', 1)
173 _parts = self.path.rstrip('/').rsplit('/', 1)
174 if len(_parts) == 2:
174 if len(_parts) == 2:
175 return safe_unicode(_parts[0])
175 return safe_unicode(_parts[0])
176 return u''
176 return ''
177
177
178 @LazyProperty
178 @LazyProperty
179 def name(self):
179 def name(self):
180 """
180 """
181 Returns name of the node so if its path
181 Returns name of the node so if its path
182 then only last part is returned.
182 then only last part is returned.
183 """
183 """
184 return safe_unicode(self.path.rstrip('/').split('/')[-1])
184 return safe_unicode(self.path.rstrip('/').split('/')[-1])
185
185
186 @property
186 @property
187 def kind(self):
187 def kind(self):
188 return self._kind
188 return self._kind
189
189
190 @kind.setter
190 @kind.setter
191 def kind(self, kind):
191 def kind(self, kind):
192 if hasattr(self, '_kind'):
192 if hasattr(self, '_kind'):
193 raise NodeError("Cannot change node's kind")
193 raise NodeError("Cannot change node's kind")
194 else:
194 else:
195 self._kind = kind
195 self._kind = kind
196 # Post setter check (path's trailing slash)
196 # Post setter check (path's trailing slash)
197 if self.path.endswith('/'):
197 if self.path.endswith('/'):
198 raise NodeError("Node's path cannot end with slash")
198 raise NodeError("Node's path cannot end with slash")
199
199
200 def __cmp__(self, other):
200 def __cmp__(self, other):
201 """
201 """
202 Comparator using name of the node, needed for quick list sorting.
202 Comparator using name of the node, needed for quick list sorting.
203 """
203 """
204
204
205 kind_cmp = cmp(self.kind, other.kind)
205 kind_cmp = cmp(self.kind, other.kind)
206 if kind_cmp:
206 if kind_cmp:
207 if isinstance(self, SubModuleNode):
207 if isinstance(self, SubModuleNode):
208 # we make submodules equal to dirnode for "sorting" purposes
208 # we make submodules equal to dirnode for "sorting" purposes
209 return NodeKind.DIR
209 return NodeKind.DIR
210 return kind_cmp
210 return kind_cmp
211 return cmp(self.name, other.name)
211 return cmp(self.name, other.name)
212
212
213 def __eq__(self, other):
213 def __eq__(self, other):
214 for attr in ['name', 'path', 'kind']:
214 for attr in ['name', 'path', 'kind']:
215 if getattr(self, attr) != getattr(other, attr):
215 if getattr(self, attr) != getattr(other, attr):
216 return False
216 return False
217 if self.is_file():
217 if self.is_file():
218 if self.content != other.content:
218 if self.content != other.content:
219 return False
219 return False
220 else:
220 else:
221 # For DirNode's check without entering each dir
221 # For DirNode's check without entering each dir
222 self_nodes_paths = list(sorted(n.path for n in self.nodes))
222 self_nodes_paths = list(sorted(n.path for n in self.nodes))
223 other_nodes_paths = list(sorted(n.path for n in self.nodes))
223 other_nodes_paths = list(sorted(n.path for n in self.nodes))
224 if self_nodes_paths != other_nodes_paths:
224 if self_nodes_paths != other_nodes_paths:
225 return False
225 return False
226 return True
226 return True
227
227
228 def __ne__(self, other):
228 def __ne__(self, other):
229 return not self.__eq__(other)
229 return not self.__eq__(other)
230
230
231 def __repr__(self):
231 def __repr__(self):
232 return '<%s %r>' % (self.__class__.__name__, self.path)
232 return '<%s %r>' % (self.__class__.__name__, self.path)
233
233
234 def __str__(self):
234 def __str__(self):
235 return self.__repr__()
235 return self.__repr__()
236
236
237 def __unicode__(self):
237 def __unicode__(self):
238 return self.name
238 return self.name
239
239
240 def get_parent_path(self):
240 def get_parent_path(self):
241 """
241 """
242 Returns node's parent path or empty string if node is root.
242 Returns node's parent path or empty string if node is root.
243 """
243 """
244 if self.is_root():
244 if self.is_root():
245 return ''
245 return ''
246 return vcspath.dirname(self.path.rstrip('/')) + '/'
246 return vcspath.dirname(self.path.rstrip('/')) + '/'
247
247
248 def is_file(self):
248 def is_file(self):
249 """
249 """
250 Returns ``True`` if node's kind is ``NodeKind.FILE``, ``False``
250 Returns ``True`` if node's kind is ``NodeKind.FILE``, ``False``
251 otherwise.
251 otherwise.
252 """
252 """
253 return self.kind == NodeKind.FILE
253 return self.kind == NodeKind.FILE
254
254
255 def is_dir(self):
255 def is_dir(self):
256 """
256 """
257 Returns ``True`` if node's kind is ``NodeKind.DIR``, ``False``
257 Returns ``True`` if node's kind is ``NodeKind.DIR``, ``False``
258 otherwise.
258 otherwise.
259 """
259 """
260 return self.kind == NodeKind.DIR
260 return self.kind == NodeKind.DIR
261
261
262 def is_root(self):
262 def is_root(self):
263 """
263 """
264 Returns ``True`` if node is a root node and ``False`` otherwise.
264 Returns ``True`` if node is a root node and ``False`` otherwise.
265 """
265 """
266 return self.kind == NodeKind.DIR and self.path == ''
266 return self.kind == NodeKind.DIR and self.path == ''
267
267
268 def is_submodule(self):
268 def is_submodule(self):
269 """
269 """
270 Returns ``True`` if node's kind is ``NodeKind.SUBMODULE``, ``False``
270 Returns ``True`` if node's kind is ``NodeKind.SUBMODULE``, ``False``
271 otherwise.
271 otherwise.
272 """
272 """
273 return self.kind == NodeKind.SUBMODULE
273 return self.kind == NodeKind.SUBMODULE
274
274
275 def is_largefile(self):
275 def is_largefile(self):
276 """
276 """
277 Returns ``True`` if node's kind is ``NodeKind.LARGEFILE``, ``False``
277 Returns ``True`` if node's kind is ``NodeKind.LARGEFILE``, ``False``
278 otherwise
278 otherwise
279 """
279 """
280 return self.kind == NodeKind.LARGEFILE
280 return self.kind == NodeKind.LARGEFILE
281
281
282 def is_link(self):
282 def is_link(self):
283 if self.commit:
283 if self.commit:
284 return self.commit.is_link(self.path)
284 return self.commit.is_link(self.path)
285 return False
285 return False
286
286
287 @LazyProperty
287 @LazyProperty
288 def added(self):
288 def added(self):
289 return self.state is NodeState.ADDED
289 return self.state is NodeState.ADDED
290
290
291 @LazyProperty
291 @LazyProperty
292 def changed(self):
292 def changed(self):
293 return self.state is NodeState.CHANGED
293 return self.state is NodeState.CHANGED
294
294
295 @LazyProperty
295 @LazyProperty
296 def not_changed(self):
296 def not_changed(self):
297 return self.state is NodeState.NOT_CHANGED
297 return self.state is NodeState.NOT_CHANGED
298
298
299 @LazyProperty
299 @LazyProperty
300 def removed(self):
300 def removed(self):
301 return self.state is NodeState.REMOVED
301 return self.state is NodeState.REMOVED
302
302
303
303
304 class FileNode(Node):
304 class FileNode(Node):
305 """
305 """
306 Class representing file nodes.
306 Class representing file nodes.
307
307
308 :attribute: path: path to the node, relative to repository's root
308 :attribute: path: path to the node, relative to repository's root
309 :attribute: content: if given arbitrary sets content of the file
309 :attribute: content: if given arbitrary sets content of the file
310 :attribute: commit: if given, first time content is accessed, callback
310 :attribute: commit: if given, first time content is accessed, callback
311 :attribute: mode: stat mode for a node. Default is `FILEMODE_DEFAULT`.
311 :attribute: mode: stat mode for a node. Default is `FILEMODE_DEFAULT`.
312 """
312 """
313 _filter_pre_load = []
313 _filter_pre_load = []
314
314
315 def __init__(self, path, content=None, commit=None, mode=None, pre_load=None):
315 def __init__(self, path, content=None, commit=None, mode=None, pre_load=None):
316 """
316 """
317 Only one of ``content`` and ``commit`` may be given. Passing both
317 Only one of ``content`` and ``commit`` may be given. Passing both
318 would raise ``NodeError`` exception.
318 would raise ``NodeError`` exception.
319
319
320 :param path: relative path to the node
320 :param path: relative path to the node
321 :param content: content may be passed to constructor
321 :param content: content may be passed to constructor
322 :param commit: if given, will use it to lazily fetch content
322 :param commit: if given, will use it to lazily fetch content
323 :param mode: ST_MODE (i.e. 0100644)
323 :param mode: ST_MODE (i.e. 0100644)
324 """
324 """
325 if content and commit:
325 if content and commit:
326 raise NodeError("Cannot use both content and commit")
326 raise NodeError("Cannot use both content and commit")
327 super(FileNode, self).__init__(path, kind=NodeKind.FILE)
327 super(FileNode, self).__init__(path, kind=NodeKind.FILE)
328 self.commit = commit
328 self.commit = commit
329 self._content = content
329 self._content = content
330 self._mode = mode or FILEMODE_DEFAULT
330 self._mode = mode or FILEMODE_DEFAULT
331
331
332 self._set_bulk_properties(pre_load)
332 self._set_bulk_properties(pre_load)
333
333
334 def _set_bulk_properties(self, pre_load):
334 def _set_bulk_properties(self, pre_load):
335 if not pre_load:
335 if not pre_load:
336 return
336 return
337 pre_load = [entry for entry in pre_load
337 pre_load = [entry for entry in pre_load
338 if entry not in self._filter_pre_load]
338 if entry not in self._filter_pre_load]
339 if not pre_load:
339 if not pre_load:
340 return
340 return
341
341
342 for attr_name in pre_load:
342 for attr_name in pre_load:
343 result = getattr(self, attr_name)
343 result = getattr(self, attr_name)
344 if callable(result):
344 if callable(result):
345 result = result()
345 result = result()
346 self.__dict__[attr_name] = result
346 self.__dict__[attr_name] = result
347
347
348 @LazyProperty
348 @LazyProperty
349 def mode(self):
349 def mode(self):
350 """
350 """
351 Returns lazily mode of the FileNode. If `commit` is not set, would
351 Returns lazily mode of the FileNode. If `commit` is not set, would
352 use value given at initialization or `FILEMODE_DEFAULT` (default).
352 use value given at initialization or `FILEMODE_DEFAULT` (default).
353 """
353 """
354 if self.commit:
354 if self.commit:
355 mode = self.commit.get_file_mode(self.path)
355 mode = self.commit.get_file_mode(self.path)
356 else:
356 else:
357 mode = self._mode
357 mode = self._mode
358 return mode
358 return mode
359
359
360 @LazyProperty
360 @LazyProperty
361 def raw_bytes(self):
361 def raw_bytes(self):
362 """
362 """
363 Returns lazily the raw bytes of the FileNode.
363 Returns lazily the raw bytes of the FileNode.
364 """
364 """
365 if self.commit:
365 if self.commit:
366 if self._content is None:
366 if self._content is None:
367 self._content = self.commit.get_file_content(self.path)
367 self._content = self.commit.get_file_content(self.path)
368 content = self._content
368 content = self._content
369 else:
369 else:
370 content = self._content
370 content = self._content
371 return content
371 return content
372
372
373 def stream_bytes(self):
373 def stream_bytes(self):
374 """
374 """
375 Returns an iterator that will stream the content of the file directly from
375 Returns an iterator that will stream the content of the file directly from
376 vcsserver without loading it to memory.
376 vcsserver without loading it to memory.
377 """
377 """
378 if self.commit:
378 if self.commit:
379 return self.commit.get_file_content_streamed(self.path)
379 return self.commit.get_file_content_streamed(self.path)
380 raise NodeError("Cannot retrieve stream_bytes without related commit attribute")
380 raise NodeError("Cannot retrieve stream_bytes without related commit attribute")
381
381
382 @LazyProperty
382 @LazyProperty
383 def md5(self):
383 def md5(self):
384 """
384 """
385 Returns md5 of the file node.
385 Returns md5 of the file node.
386 """
386 """
387 return md5(self.raw_bytes)
387 return md5(self.raw_bytes)
388
388
389 def metadata_uncached(self):
389 def metadata_uncached(self):
390 """
390 """
391 Returns md5, binary flag of the file node, without any cache usage.
391 Returns md5, binary flag of the file node, without any cache usage.
392 """
392 """
393
393
394 content = self.content_uncached()
394 content = self.content_uncached()
395
395
396 is_binary = content and '\0' in content
396 is_binary = content and '\0' in content
397 size = 0
397 size = 0
398 if content:
398 if content:
399 size = len(content)
399 size = len(content)
400
400
401 return is_binary, md5(content), size, content
401 return is_binary, md5(content), size, content
402
402
403 def content_uncached(self):
403 def content_uncached(self):
404 """
404 """
405 Returns lazily content of the FileNode. If possible, would try to
405 Returns lazily content of the FileNode. If possible, would try to
406 decode content from UTF-8.
406 decode content from UTF-8.
407 """
407 """
408 if self.commit:
408 if self.commit:
409 content = self.commit.get_file_content(self.path)
409 content = self.commit.get_file_content(self.path)
410 else:
410 else:
411 content = self._content
411 content = self._content
412 return content
412 return content
413
413
414 @LazyProperty
414 @LazyProperty
415 def content(self):
415 def content(self):
416 """
416 """
417 Returns lazily content of the FileNode. If possible, would try to
417 Returns lazily content of the FileNode. If possible, would try to
418 decode content from UTF-8.
418 decode content from UTF-8.
419 """
419 """
420 content = self.raw_bytes
420 content = self.raw_bytes
421
421
422 if self.is_binary:
422 if self.is_binary:
423 return content
423 return content
424 return safe_unicode(content)
424 return safe_unicode(content)
425
425
426 @LazyProperty
426 @LazyProperty
427 def size(self):
427 def size(self):
428 if self.commit:
428 if self.commit:
429 return self.commit.get_file_size(self.path)
429 return self.commit.get_file_size(self.path)
430 raise NodeError(
430 raise NodeError(
431 "Cannot retrieve size of the file without related "
431 "Cannot retrieve size of the file without related "
432 "commit attribute")
432 "commit attribute")
433
433
434 @LazyProperty
434 @LazyProperty
435 def message(self):
435 def message(self):
436 if self.commit:
436 if self.commit:
437 return self.last_commit.message
437 return self.last_commit.message
438 raise NodeError(
438 raise NodeError(
439 "Cannot retrieve message of the file without related "
439 "Cannot retrieve message of the file without related "
440 "commit attribute")
440 "commit attribute")
441
441
442 @LazyProperty
442 @LazyProperty
443 def last_commit(self):
443 def last_commit(self):
444 if self.commit:
444 if self.commit:
445 pre_load = ["author", "date", "message", "parents"]
445 pre_load = ["author", "date", "message", "parents"]
446 return self.commit.get_path_commit(self.path, pre_load=pre_load)
446 return self.commit.get_path_commit(self.path, pre_load=pre_load)
447 raise NodeError(
447 raise NodeError(
448 "Cannot retrieve last commit of the file without "
448 "Cannot retrieve last commit of the file without "
449 "related commit attribute")
449 "related commit attribute")
450
450
451 def get_mimetype(self):
451 def get_mimetype(self):
452 """
452 """
453 Mimetype is calculated based on the file's content. If ``_mimetype``
453 Mimetype is calculated based on the file's content. If ``_mimetype``
454 attribute is available, it will be returned (backends which store
454 attribute is available, it will be returned (backends which store
455 mimetypes or can easily recognize them, should set this private
455 mimetypes or can easily recognize them, should set this private
456 attribute to indicate that type should *NOT* be calculated).
456 attribute to indicate that type should *NOT* be calculated).
457 """
457 """
458
458
459 if hasattr(self, '_mimetype'):
459 if hasattr(self, '_mimetype'):
460 if (isinstance(self._mimetype, (tuple, list,)) and
460 if (isinstance(self._mimetype, (tuple, list,)) and
461 len(self._mimetype) == 2):
461 len(self._mimetype) == 2):
462 return self._mimetype
462 return self._mimetype
463 else:
463 else:
464 raise NodeError('given _mimetype attribute must be an 2 '
464 raise NodeError('given _mimetype attribute must be an 2 '
465 'element list or tuple')
465 'element list or tuple')
466
466
467 db = get_mimetypes_db()
467 db = get_mimetypes_db()
468 mtype, encoding = db.guess_type(self.name)
468 mtype, encoding = db.guess_type(self.name)
469
469
470 if mtype is None:
470 if mtype is None:
471 if not self.is_largefile() and self.is_binary:
471 if not self.is_largefile() and self.is_binary:
472 mtype = 'application/octet-stream'
472 mtype = 'application/octet-stream'
473 encoding = None
473 encoding = None
474 else:
474 else:
475 mtype = 'text/plain'
475 mtype = 'text/plain'
476 encoding = None
476 encoding = None
477
477
478 # try with pygments
478 # try with pygments
479 try:
479 try:
480 from pygments.lexers import get_lexer_for_filename
480 from pygments.lexers import get_lexer_for_filename
481 mt = get_lexer_for_filename(self.name).mimetypes
481 mt = get_lexer_for_filename(self.name).mimetypes
482 except Exception:
482 except Exception:
483 mt = None
483 mt = None
484
484
485 if mt:
485 if mt:
486 mtype = mt[0]
486 mtype = mt[0]
487
487
488 return mtype, encoding
488 return mtype, encoding
489
489
490 @LazyProperty
490 @LazyProperty
491 def mimetype(self):
491 def mimetype(self):
492 """
492 """
493 Wrapper around full mimetype info. It returns only type of fetched
493 Wrapper around full mimetype info. It returns only type of fetched
494 mimetype without the encoding part. use get_mimetype function to fetch
494 mimetype without the encoding part. use get_mimetype function to fetch
495 full set of (type,encoding)
495 full set of (type,encoding)
496 """
496 """
497 return self.get_mimetype()[0]
497 return self.get_mimetype()[0]
498
498
499 @LazyProperty
499 @LazyProperty
500 def mimetype_main(self):
500 def mimetype_main(self):
501 return self.mimetype.split('/')[0]
501 return self.mimetype.split('/')[0]
502
502
503 @classmethod
503 @classmethod
504 def get_lexer(cls, filename, content=None):
504 def get_lexer(cls, filename, content=None):
505 from pygments import lexers
505 from pygments import lexers
506
506
507 extension = filename.split('.')[-1]
507 extension = filename.split('.')[-1]
508 lexer = None
508 lexer = None
509
509
510 try:
510 try:
511 lexer = lexers.guess_lexer_for_filename(
511 lexer = lexers.guess_lexer_for_filename(
512 filename, content, stripnl=False)
512 filename, content, stripnl=False)
513 except lexers.ClassNotFound:
513 except lexers.ClassNotFound:
514 lexer = None
514 lexer = None
515
515
516 # try our EXTENSION_MAP
516 # try our EXTENSION_MAP
517 if not lexer:
517 if not lexer:
518 try:
518 try:
519 lexer_class = LANGUAGES_EXTENSIONS_MAP.get(extension)
519 lexer_class = LANGUAGES_EXTENSIONS_MAP.get(extension)
520 if lexer_class:
520 if lexer_class:
521 lexer = lexers.get_lexer_by_name(lexer_class[0])
521 lexer = lexers.get_lexer_by_name(lexer_class[0])
522 except lexers.ClassNotFound:
522 except lexers.ClassNotFound:
523 lexer = None
523 lexer = None
524
524
525 if not lexer:
525 if not lexer:
526 lexer = lexers.TextLexer(stripnl=False)
526 lexer = lexers.TextLexer(stripnl=False)
527
527
528 return lexer
528 return lexer
529
529
530 @LazyProperty
530 @LazyProperty
531 def lexer(self):
531 def lexer(self):
532 """
532 """
533 Returns pygment's lexer class. Would try to guess lexer taking file's
533 Returns pygment's lexer class. Would try to guess lexer taking file's
534 content, name and mimetype.
534 content, name and mimetype.
535 """
535 """
536 return self.get_lexer(self.name, self.content)
536 return self.get_lexer(self.name, self.content)
537
537
538 @LazyProperty
538 @LazyProperty
539 def lexer_alias(self):
539 def lexer_alias(self):
540 """
540 """
541 Returns first alias of the lexer guessed for this file.
541 Returns first alias of the lexer guessed for this file.
542 """
542 """
543 return self.lexer.aliases[0]
543 return self.lexer.aliases[0]
544
544
545 @LazyProperty
545 @LazyProperty
546 def history(self):
546 def history(self):
547 """
547 """
548 Returns a list of commit for this file in which the file was changed
548 Returns a list of commit for this file in which the file was changed
549 """
549 """
550 if self.commit is None:
550 if self.commit is None:
551 raise NodeError('Unable to get commit for this FileNode')
551 raise NodeError('Unable to get commit for this FileNode')
552 return self.commit.get_path_history(self.path)
552 return self.commit.get_path_history(self.path)
553
553
554 @LazyProperty
554 @LazyProperty
555 def annotate(self):
555 def annotate(self):
556 """
556 """
557 Returns a list of three element tuples with lineno, commit and line
557 Returns a list of three element tuples with lineno, commit and line
558 """
558 """
559 if self.commit is None:
559 if self.commit is None:
560 raise NodeError('Unable to get commit for this FileNode')
560 raise NodeError('Unable to get commit for this FileNode')
561 pre_load = ["author", "date", "message", "parents"]
561 pre_load = ["author", "date", "message", "parents"]
562 return self.commit.get_file_annotate(self.path, pre_load=pre_load)
562 return self.commit.get_file_annotate(self.path, pre_load=pre_load)
563
563
564 @LazyProperty
564 @LazyProperty
565 def state(self):
565 def state(self):
566 if not self.commit:
566 if not self.commit:
567 raise NodeError(
567 raise NodeError(
568 "Cannot check state of the node if it's not "
568 "Cannot check state of the node if it's not "
569 "linked with commit")
569 "linked with commit")
570 elif self.path in (node.path for node in self.commit.added):
570 elif self.path in (node.path for node in self.commit.added):
571 return NodeState.ADDED
571 return NodeState.ADDED
572 elif self.path in (node.path for node in self.commit.changed):
572 elif self.path in (node.path for node in self.commit.changed):
573 return NodeState.CHANGED
573 return NodeState.CHANGED
574 else:
574 else:
575 return NodeState.NOT_CHANGED
575 return NodeState.NOT_CHANGED
576
576
577 @LazyProperty
577 @LazyProperty
578 def is_binary(self):
578 def is_binary(self):
579 """
579 """
580 Returns True if file has binary content.
580 Returns True if file has binary content.
581 """
581 """
582 if self.commit:
582 if self.commit:
583 return self.commit.is_node_binary(self.path)
583 return self.commit.is_node_binary(self.path)
584 else:
584 else:
585 raw_bytes = self._content
585 raw_bytes = self._content
586 return raw_bytes and '\0' in raw_bytes
586 return raw_bytes and '\0' in raw_bytes
587
587
588 @LazyProperty
588 @LazyProperty
589 def extension(self):
589 def extension(self):
590 """Returns filenode extension"""
590 """Returns filenode extension"""
591 return self.name.split('.')[-1]
591 return self.name.split('.')[-1]
592
592
593 @property
593 @property
594 def is_executable(self):
594 def is_executable(self):
595 """
595 """
596 Returns ``True`` if file has executable flag turned on.
596 Returns ``True`` if file has executable flag turned on.
597 """
597 """
598 return bool(self.mode & stat.S_IXUSR)
598 return bool(self.mode & stat.S_IXUSR)
599
599
600 def get_largefile_node(self):
600 def get_largefile_node(self):
601 """
601 """
602 Try to return a Mercurial FileNode from this node. It does internal
602 Try to return a Mercurial FileNode from this node. It does internal
603 checks inside largefile store, if that file exist there it will
603 checks inside largefile store, if that file exist there it will
604 create special instance of LargeFileNode which can get content from
604 create special instance of LargeFileNode which can get content from
605 LF store.
605 LF store.
606 """
606 """
607 if self.commit:
607 if self.commit:
608 return self.commit.get_largefile_node(self.path)
608 return self.commit.get_largefile_node(self.path)
609
609
610 def count_lines(self, content, count_empty=False):
610 def count_lines(self, content, count_empty=False):
611
611
612 if count_empty:
612 if count_empty:
613 all_lines = 0
613 all_lines = 0
614 empty_lines = 0
614 empty_lines = 0
615 for line in content.splitlines(True):
615 for line in content.splitlines(True):
616 if line == '\n':
616 if line == '\n':
617 empty_lines += 1
617 empty_lines += 1
618 all_lines += 1
618 all_lines += 1
619
619
620 return all_lines, all_lines - empty_lines
620 return all_lines, all_lines - empty_lines
621 else:
621 else:
622 # fast method
622 # fast method
623 empty_lines = all_lines = content.count('\n')
623 empty_lines = all_lines = content.count('\n')
624 if all_lines == 0 and content:
624 if all_lines == 0 and content:
625 # one-line without a newline
625 # one-line without a newline
626 empty_lines = all_lines = 1
626 empty_lines = all_lines = 1
627
627
628 return all_lines, empty_lines
628 return all_lines, empty_lines
629
629
630 def lines(self, count_empty=False):
630 def lines(self, count_empty=False):
631 all_lines, empty_lines = 0, 0
631 all_lines, empty_lines = 0, 0
632
632
633 if not self.is_binary:
633 if not self.is_binary:
634 content = self.content
634 content = self.content
635 all_lines, empty_lines = self.count_lines(content, count_empty=count_empty)
635 all_lines, empty_lines = self.count_lines(content, count_empty=count_empty)
636 return all_lines, empty_lines
636 return all_lines, empty_lines
637
637
638 def __repr__(self):
638 def __repr__(self):
639 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
639 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
640 getattr(self.commit, 'short_id', ''))
640 getattr(self.commit, 'short_id', ''))
641
641
642
642
643 class RemovedFileNode(FileNode):
643 class RemovedFileNode(FileNode):
644 """
644 """
645 Dummy FileNode class - trying to access any public attribute except path,
645 Dummy FileNode class - trying to access any public attribute except path,
646 name, kind or state (or methods/attributes checking those two) would raise
646 name, kind or state (or methods/attributes checking those two) would raise
647 RemovedFileNodeError.
647 RemovedFileNodeError.
648 """
648 """
649 ALLOWED_ATTRIBUTES = [
649 ALLOWED_ATTRIBUTES = [
650 'name', 'path', 'state', 'is_root', 'is_file', 'is_dir', 'kind',
650 'name', 'path', 'state', 'is_root', 'is_file', 'is_dir', 'kind',
651 'added', 'changed', 'not_changed', 'removed'
651 'added', 'changed', 'not_changed', 'removed'
652 ]
652 ]
653
653
654 def __init__(self, path):
654 def __init__(self, path):
655 """
655 """
656 :param path: relative path to the node
656 :param path: relative path to the node
657 """
657 """
658 super(RemovedFileNode, self).__init__(path=path)
658 super(RemovedFileNode, self).__init__(path=path)
659
659
660 def __getattribute__(self, attr):
660 def __getattribute__(self, attr):
661 if attr.startswith('_') or attr in RemovedFileNode.ALLOWED_ATTRIBUTES:
661 if attr.startswith('_') or attr in RemovedFileNode.ALLOWED_ATTRIBUTES:
662 return super(RemovedFileNode, self).__getattribute__(attr)
662 return super(RemovedFileNode, self).__getattribute__(attr)
663 raise RemovedFileNodeError(
663 raise RemovedFileNodeError(
664 "Cannot access attribute %s on RemovedFileNode" % attr)
664 "Cannot access attribute %s on RemovedFileNode" % attr)
665
665
666 @LazyProperty
666 @LazyProperty
667 def state(self):
667 def state(self):
668 return NodeState.REMOVED
668 return NodeState.REMOVED
669
669
670
670
671 class DirNode(Node):
671 class DirNode(Node):
672 """
672 """
673 DirNode stores list of files and directories within this node.
673 DirNode stores list of files and directories within this node.
674 Nodes may be used standalone but within repository context they
674 Nodes may be used standalone but within repository context they
675 lazily fetch data within same repository's commit.
675 lazily fetch data within same repository's commit.
676 """
676 """
677
677
678 def __init__(self, path, nodes=(), commit=None):
678 def __init__(self, path, nodes=(), commit=None):
679 """
679 """
680 Only one of ``nodes`` and ``commit`` may be given. Passing both
680 Only one of ``nodes`` and ``commit`` may be given. Passing both
681 would raise ``NodeError`` exception.
681 would raise ``NodeError`` exception.
682
682
683 :param path: relative path to the node
683 :param path: relative path to the node
684 :param nodes: content may be passed to constructor
684 :param nodes: content may be passed to constructor
685 :param commit: if given, will use it to lazily fetch content
685 :param commit: if given, will use it to lazily fetch content
686 """
686 """
687 if nodes and commit:
687 if nodes and commit:
688 raise NodeError("Cannot use both nodes and commit")
688 raise NodeError("Cannot use both nodes and commit")
689 super(DirNode, self).__init__(path, NodeKind.DIR)
689 super(DirNode, self).__init__(path, NodeKind.DIR)
690 self.commit = commit
690 self.commit = commit
691 self._nodes = nodes
691 self._nodes = nodes
692
692
693 @LazyProperty
693 @LazyProperty
694 def content(self):
694 def content(self):
695 raise NodeError(
695 raise NodeError(
696 "%s represents a dir and has no `content` attribute" % self)
696 "%s represents a dir and has no `content` attribute" % self)
697
697
698 @LazyProperty
698 @LazyProperty
699 def nodes(self):
699 def nodes(self):
700 if self.commit:
700 if self.commit:
701 nodes = self.commit.get_nodes(self.path)
701 nodes = self.commit.get_nodes(self.path)
702 else:
702 else:
703 nodes = self._nodes
703 nodes = self._nodes
704 self._nodes_dict = dict((node.path, node) for node in nodes)
704 self._nodes_dict = dict((node.path, node) for node in nodes)
705 return sorted(nodes)
705 return sorted(nodes)
706
706
707 @LazyProperty
707 @LazyProperty
708 def files(self):
708 def files(self):
709 return sorted((node for node in self.nodes if node.is_file()))
709 return sorted((node for node in self.nodes if node.is_file()))
710
710
711 @LazyProperty
711 @LazyProperty
712 def dirs(self):
712 def dirs(self):
713 return sorted((node for node in self.nodes if node.is_dir()))
713 return sorted((node for node in self.nodes if node.is_dir()))
714
714
715 def __iter__(self):
715 def __iter__(self):
716 for node in self.nodes:
716 for node in self.nodes:
717 yield node
717 yield node
718
718
719 def get_node(self, path):
719 def get_node(self, path):
720 """
720 """
721 Returns node from within this particular ``DirNode``, so it is now
721 Returns node from within this particular ``DirNode``, so it is now
722 allowed to fetch, i.e. node located at 'docs/api/index.rst' from node
722 allowed to fetch, i.e. node located at 'docs/api/index.rst' from node
723 'docs'. In order to access deeper nodes one must fetch nodes between
723 'docs'. In order to access deeper nodes one must fetch nodes between
724 them first - this would work::
724 them first - this would work::
725
725
726 docs = root.get_node('docs')
726 docs = root.get_node('docs')
727 docs.get_node('api').get_node('index.rst')
727 docs.get_node('api').get_node('index.rst')
728
728
729 :param: path - relative to the current node
729 :param: path - relative to the current node
730
730
731 .. note::
731 .. note::
732 To access lazily (as in example above) node have to be initialized
732 To access lazily (as in example above) node have to be initialized
733 with related commit object - without it node is out of
733 with related commit object - without it node is out of
734 context and may know nothing about anything else than nearest
734 context and may know nothing about anything else than nearest
735 (located at same level) nodes.
735 (located at same level) nodes.
736 """
736 """
737 try:
737 try:
738 path = path.rstrip('/')
738 path = path.rstrip('/')
739 if path == '':
739 if path == '':
740 raise NodeError("Cannot retrieve node without path")
740 raise NodeError("Cannot retrieve node without path")
741 self.nodes # access nodes first in order to set _nodes_dict
741 self.nodes # access nodes first in order to set _nodes_dict
742 paths = path.split('/')
742 paths = path.split('/')
743 if len(paths) == 1:
743 if len(paths) == 1:
744 if not self.is_root():
744 if not self.is_root():
745 path = '/'.join((self.path, paths[0]))
745 path = '/'.join((self.path, paths[0]))
746 else:
746 else:
747 path = paths[0]
747 path = paths[0]
748 return self._nodes_dict[path]
748 return self._nodes_dict[path]
749 elif len(paths) > 1:
749 elif len(paths) > 1:
750 if self.commit is None:
750 if self.commit is None:
751 raise NodeError("Cannot access deeper nodes without commit")
751 raise NodeError("Cannot access deeper nodes without commit")
752 else:
752 else:
753 path1, path2 = paths[0], '/'.join(paths[1:])
753 path1, path2 = paths[0], '/'.join(paths[1:])
754 return self.get_node(path1).get_node(path2)
754 return self.get_node(path1).get_node(path2)
755 else:
755 else:
756 raise KeyError
756 raise KeyError
757 except KeyError:
757 except KeyError:
758 raise NodeError("Node does not exist at %s" % path)
758 raise NodeError("Node does not exist at %s" % path)
759
759
760 @LazyProperty
760 @LazyProperty
761 def state(self):
761 def state(self):
762 raise NodeError("Cannot access state of DirNode")
762 raise NodeError("Cannot access state of DirNode")
763
763
764 @LazyProperty
764 @LazyProperty
765 def size(self):
765 def size(self):
766 size = 0
766 size = 0
767 for root, dirs, files in self.commit.walk(self.path):
767 for root, dirs, files in self.commit.walk(self.path):
768 for f in files:
768 for f in files:
769 size += f.size
769 size += f.size
770
770
771 return size
771 return size
772
772
773 @LazyProperty
773 @LazyProperty
774 def last_commit(self):
774 def last_commit(self):
775 if self.commit:
775 if self.commit:
776 pre_load = ["author", "date", "message", "parents"]
776 pre_load = ["author", "date", "message", "parents"]
777 return self.commit.get_path_commit(self.path, pre_load=pre_load)
777 return self.commit.get_path_commit(self.path, pre_load=pre_load)
778 raise NodeError(
778 raise NodeError(
779 "Cannot retrieve last commit of the file without "
779 "Cannot retrieve last commit of the file without "
780 "related commit attribute")
780 "related commit attribute")
781
781
782 def __repr__(self):
782 def __repr__(self):
783 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
783 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
784 getattr(self.commit, 'short_id', ''))
784 getattr(self.commit, 'short_id', ''))
785
785
786
786
787 class RootNode(DirNode):
787 class RootNode(DirNode):
788 """
788 """
789 DirNode being the root node of the repository.
789 DirNode being the root node of the repository.
790 """
790 """
791
791
792 def __init__(self, nodes=(), commit=None):
792 def __init__(self, nodes=(), commit=None):
793 super(RootNode, self).__init__(path='', nodes=nodes, commit=commit)
793 super(RootNode, self).__init__(path='', nodes=nodes, commit=commit)
794
794
795 def __repr__(self):
795 def __repr__(self):
796 return '<%s>' % self.__class__.__name__
796 return '<%s>' % self.__class__.__name__
797
797
798
798
799 class SubModuleNode(Node):
799 class SubModuleNode(Node):
800 """
800 """
801 represents a SubModule of Git or SubRepo of Mercurial
801 represents a SubModule of Git or SubRepo of Mercurial
802 """
802 """
803 is_binary = False
803 is_binary = False
804 size = 0
804 size = 0
805
805
806 def __init__(self, name, url=None, commit=None, alias=None):
806 def __init__(self, name, url=None, commit=None, alias=None):
807 self.path = name
807 self.path = name
808 self.kind = NodeKind.SUBMODULE
808 self.kind = NodeKind.SUBMODULE
809 self.alias = alias
809 self.alias = alias
810
810
811 # we have to use EmptyCommit here since this can point to svn/git/hg
811 # we have to use EmptyCommit here since this can point to svn/git/hg
812 # submodules we cannot get from repository
812 # submodules we cannot get from repository
813 self.commit = EmptyCommit(str(commit), alias=alias)
813 self.commit = EmptyCommit(str(commit), alias=alias)
814 self.url = url or self._extract_submodule_url()
814 self.url = url or self._extract_submodule_url()
815
815
816 def __repr__(self):
816 def __repr__(self):
817 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
817 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
818 getattr(self.commit, 'short_id', ''))
818 getattr(self.commit, 'short_id', ''))
819
819
820 def _extract_submodule_url(self):
820 def _extract_submodule_url(self):
821 # TODO: find a way to parse gits submodule file and extract the
821 # TODO: find a way to parse gits submodule file and extract the
822 # linking URL
822 # linking URL
823 return self.path
823 return self.path
824
824
825 @LazyProperty
825 @LazyProperty
826 def name(self):
826 def name(self):
827 """
827 """
828 Returns name of the node so if its path
828 Returns name of the node so if its path
829 then only last part is returned.
829 then only last part is returned.
830 """
830 """
831 org = safe_unicode(self.path.rstrip('/').split('/')[-1])
831 org = safe_unicode(self.path.rstrip('/').split('/')[-1])
832 return u'%s @ %s' % (org, self.commit.short_id)
832 return '%s @ %s' % (org, self.commit.short_id)
833
833
834
834
835 class LargeFileNode(FileNode):
835 class LargeFileNode(FileNode):
836
836
837 def __init__(self, path, url=None, commit=None, alias=None, org_path=None):
837 def __init__(self, path, url=None, commit=None, alias=None, org_path=None):
838 self.path = path
838 self.path = path
839 self.org_path = org_path
839 self.org_path = org_path
840 self.kind = NodeKind.LARGEFILE
840 self.kind = NodeKind.LARGEFILE
841 self.alias = alias
841 self.alias = alias
842 self._content = ''
842 self._content = ''
843
843
844 def _validate_path(self, path):
844 def _validate_path(self, path):
845 """
845 """
846 we override check since the LargeFileNode path is system absolute
846 we override check since the LargeFileNode path is system absolute
847 """
847 """
848 pass
848 pass
849
849
850 def __repr__(self):
850 def __repr__(self):
851 return '<%s %r>' % (self.__class__.__name__, self.path)
851 return '<%s %r>' % (self.__class__.__name__, self.path)
852
852
853 @LazyProperty
853 @LazyProperty
854 def size(self):
854 def size(self):
855 return os.stat(self.path).st_size
855 return os.stat(self.path).st_size
856
856
857 @LazyProperty
857 @LazyProperty
858 def raw_bytes(self):
858 def raw_bytes(self):
859 with open(self.path, 'rb') as f:
859 with open(self.path, 'rb') as f:
860 content = f.read()
860 content = f.read()
861 return content
861 return content
862
862
863 @LazyProperty
863 @LazyProperty
864 def name(self):
864 def name(self):
865 """
865 """
866 Overwrites name to be the org lf path
866 Overwrites name to be the org lf path
867 """
867 """
868 return self.org_path
868 return self.org_path
869
869
870 def stream_bytes(self):
870 def stream_bytes(self):
871 with open(self.path, 'rb') as stream:
871 with open(self.path, 'rb') as stream:
872 while True:
872 while True:
873 data = stream.read(16 * 1024)
873 data = stream.read(16 * 1024)
874 if not data:
874 if not data:
875 break
875 break
876 yield data
876 yield data
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now