##// END OF EJS Templates
FOLD: into unicode changes
super-admin -
r4959:00826968 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,2530 +1,2533 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 authentication and permission libraries
23 23 """
24 24
25 25 import os
26 26
27 27 import colander
28 28 import time
29 29 import collections
30 30 import fnmatch
31 31 import itertools
32 32 import logging
33 33 import random
34 34 import traceback
35 35 from functools import wraps
36 36
37 37 import ipaddress
38 38
39 39 from pyramid.httpexceptions import HTTPForbidden, HTTPFound, HTTPNotFound
40 40 from sqlalchemy.orm.exc import ObjectDeletedError
41 41 from sqlalchemy.orm import joinedload
42 42 from zope.cachedescriptors.property import Lazy as LazyProperty
43 43
44 44 import rhodecode
45 45 from rhodecode.model import meta
46 46 from rhodecode.model.meta import Session
47 47 from rhodecode.model.user import UserModel
48 48 from rhodecode.model.db import (
49 49 false, User, Repository, Permission, UserToPerm, UserGroupToPerm, UserGroupMember,
50 50 UserIpMap, UserApiKeys, RepoGroup, UserGroup, UserNotice)
51 51 from rhodecode.lib import rc_cache
52 52 from rhodecode.lib.utils import (
53 53 get_repo_slug, get_repo_group_slug, get_user_group_slug)
54 54 from rhodecode.lib.type_utils import aslist
55 55 from rhodecode.lib.hash_utils import sha1, sha256, md5
56 56 from rhodecode.lib.str_utils import ascii_bytes, safe_str, safe_int, safe_bytes
57 57 from rhodecode.lib.caching_query import FromCache
58 58
59 59
60 60 if rhodecode.is_unix:
61 61 import bcrypt
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65 csrf_token_key = "csrf_token"
66 66
67 67
68 68 class PasswordGenerator(object):
69 69 """
70 70 This is a simple class for generating password from different sets of
71 71 characters
72 72 usage::
73 73 passwd_gen = PasswordGenerator()
74 74 #print 8-letter password containing only big and small letters
75 75 of alphabet
76 76 passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
77 77 """
78 78 ALPHABETS_NUM = r'''1234567890'''
79 79 ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
80 80 ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
81 81 ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
82 82 ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
83 83 + ALPHABETS_NUM + ALPHABETS_SPECIAL
84 84 ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
85 85 ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
86 86 ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
87 87 ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
88 88
89 89 def __init__(self, passwd=''):
90 90 self.passwd = passwd
91 91
92 92 def gen_password(self, length, type_=None):
93 93 if type_ is None:
94 94 type_ = self.ALPHABETS_FULL
95 95 self.passwd = ''.join([random.choice(type_) for _ in range(length)])
96 96 return self.passwd
97 97
98 98
99 99 class _RhodeCodeCryptoBase(object):
100 100 ENC_PREF = None
101 101
102 102 def hash_create(self, str_):
103 103 """
104 104 hash the string using
105 105
106 106 :param str_: password to hash
107 107 """
108 108 raise NotImplementedError
109 109
110 110 def hash_check_with_upgrade(self, password, hashed):
111 111 """
112 112 Returns tuple in which first element is boolean that states that
113 113 given password matches it's hashed version, and the second is new hash
114 114 of the password, in case this password should be migrated to new
115 115 cipher.
116 116 """
117 117 checked_hash = self.hash_check(password, hashed)
118 118 return checked_hash, None
119 119
120 120 def hash_check(self, password, hashed):
121 121 """
122 122 Checks matching password with it's hashed value.
123 123
124 124 :param password: password
125 125 :param hashed: password in hashed form
126 126 """
127 127 raise NotImplementedError
128 128
129 129 def _assert_bytes(self, value):
130 130 """
131 131 Passing in an `unicode` object can lead to hard to detect issues
132 132 if passwords contain non-ascii characters. Doing a type check
133 133 during runtime, so that such mistakes are detected early on.
134 134 """
135 135 if not isinstance(value, str):
136 136 raise TypeError(
137 137 "Bytestring required as input, got %r." % (value, ))
138 138
139 139
140 140 class _RhodeCodeCryptoBCrypt(_RhodeCodeCryptoBase):
141 141 ENC_PREF = ('$2a$10', '$2b$10')
142 142
143 143 def hash_create(self, str_):
144 144 self._assert_bytes(str_)
145 145 return bcrypt.hashpw(str_, bcrypt.gensalt(10))
146 146
147 147 def hash_check_with_upgrade(self, password, hashed):
148 148 """
149 149 Returns tuple in which first element is boolean that states that
150 150 given password matches it's hashed version, and the second is new hash
151 151 of the password, in case this password should be migrated to new
152 152 cipher.
153 153
154 154 This implements special upgrade logic which works like that:
155 155 - check if the given password == bcrypted hash, if yes then we
156 156 properly used password and it was already in bcrypt. Proceed
157 157 without any changes
158 158 - if bcrypt hash check is not working try with sha256. If hash compare
159 159 is ok, it means we using correct but old hashed password. indicate
160 160 hash change and proceed
161 161 """
162 162
163 163 new_hash = None
164 164
165 165 # regular pw check
166 166 password_match_bcrypt = self.hash_check(password, hashed)
167 167
168 168 # now we want to know if the password was maybe from sha256
169 169 # basically calling _RhodeCodeCryptoSha256().hash_check()
170 170 if not password_match_bcrypt:
171 171 if _RhodeCodeCryptoSha256().hash_check(password, hashed):
172 172 new_hash = self.hash_create(password) # make new bcrypt hash
173 173 password_match_bcrypt = True
174 174
175 175 return password_match_bcrypt, new_hash
176 176
177 177 def hash_check(self, password, hashed):
178 178 """
179 179 Checks matching password with it's hashed value.
180 180
181 181 :param password: password
182 182 :param hashed: password in hashed form
183 183 """
184 184 self._assert_bytes(password)
185 185 try:
186 186 return bcrypt.hashpw(password, hashed) == hashed
187 187 except ValueError as e:
188 188 # we're having a invalid salt here probably, we should not crash
189 189 # just return with False as it would be a wrong password.
190 190 log.debug('Failed to check password hash using bcrypt %s',
191 191 safe_str(e))
192 192
193 193 return False
194 194
195 195
196 196 class _RhodeCodeCryptoSha256(_RhodeCodeCryptoBase):
197 197 ENC_PREF = '_'
198 198
199 199 def hash_create(self, str_):
200 200 self._assert_bytes(str_)
201 201 return sha256(str_)
202 202
203 203 def hash_check(self, password, hashed):
204 204 """
205 205 Checks matching password with it's hashed value.
206 206
207 207 :param password: password
208 208 :param hashed: password in hashed form
209 209 """
210 210 self._assert_bytes(password)
211 211 return sha256(password) == hashed
212 212
213 213
214 214 class _RhodeCodeCryptoTest(_RhodeCodeCryptoBase):
215 215 ENC_PREF = '_'
216 216
217 217 def hash_create(self, str_):
218 218 self._assert_bytes(str_)
219 219 return sha1(str_)
220 220
221 221 def hash_check(self, password, hashed):
222 222 """
223 223 Checks matching password with it's hashed value.
224 224
225 225 :param password: password
226 226 :param hashed: password in hashed form
227 227 """
228 228 self._assert_bytes(password)
229 229 return sha1(password) == hashed
230 230
231 231
232 232 def crypto_backend():
233 233 """
234 234 Return the matching crypto backend.
235 235
236 236 Selection is based on if we run tests or not, we pick sha1-test backend to run
237 237 tests faster since BCRYPT is expensive to calculate
238 238 """
239 239 if rhodecode.is_test:
240 240 RhodeCodeCrypto = _RhodeCodeCryptoTest()
241 241 else:
242 242 RhodeCodeCrypto = _RhodeCodeCryptoBCrypt()
243 243
244 244 return RhodeCodeCrypto
245 245
246 246
247 247 def get_crypt_password(password):
248 248 """
249 249 Create the hash of `password` with the active crypto backend.
250 250
251 251 :param password: The cleartext password.
252 252 :type password: unicode
253 253 """
254 254 password = safe_str(password)
255 255 return crypto_backend().hash_create(password)
256 256
257 257
258 258 def check_password(password, hashed):
259 259 """
260 260 Check if the value in `password` matches the hash in `hashed`.
261 261
262 262 :param password: The cleartext password.
263 263 :type password: unicode
264 264
265 265 :param hashed: The expected hashed version of the password.
266 266 :type hashed: The hash has to be passed in in text representation.
267 267 """
268 268 password = safe_str(password)
269 269 return crypto_backend().hash_check(password, hashed)
270 270
271 271
272 272 def generate_auth_token(data, salt=None):
273 273 """
274 274 Generates API KEY from given string
275 275 """
276 276
277 277 if salt is None:
278 278 salt = os.urandom(16)
279 279 return sha1(data + salt)
280 280
281 281
282 282 def get_came_from(request):
283 283 """
284 284 get query_string+path from request sanitized after removing auth_token
285 285 """
286 286 _req = request
287 287
288 288 path = _req.path
289 289 if 'auth_token' in _req.GET:
290 290 # sanitize the request and remove auth_token for redirection
291 291 _req.GET.pop('auth_token')
292 292 qs = _req.query_string
293 293 if qs:
294 294 path += '?' + qs
295 295
296 296 return path
297 297
298 298
299 299 class CookieStoreWrapper(object):
300 300
301 301 def __init__(self, cookie_store):
302 302 self.cookie_store = cookie_store
303 303
304 304 def __repr__(self):
305 305 return 'CookieStore<%s>' % (self.cookie_store)
306 306
307 307 def get(self, key, other=None):
308 308 if isinstance(self.cookie_store, dict):
309 309 return self.cookie_store.get(key, other)
310 310 elif isinstance(self.cookie_store, AuthUser):
311 311 return self.cookie_store.__dict__.get(key, other)
312 312
313 313
314 314 def _cached_perms_data(user_id, scope, user_is_admin,
315 315 user_inherit_default_permissions, explicit, algo,
316 316 calculate_super_admin):
317 317
318 318 permissions = PermissionCalculator(
319 319 user_id, scope, user_is_admin, user_inherit_default_permissions,
320 320 explicit, algo, calculate_super_admin)
321 321 return permissions.calculate()
322 322
323 323
324 324 class PermOrigin(object):
325 325 SUPER_ADMIN = 'superadmin'
326 326 ARCHIVED = 'archived'
327 327
328 328 REPO_USER = 'user:%s'
329 329 REPO_USERGROUP = 'usergroup:%s'
330 330 REPO_OWNER = 'repo.owner'
331 331 REPO_DEFAULT = 'repo.default'
332 332 REPO_DEFAULT_NO_INHERIT = 'repo.default.no.inherit'
333 333 REPO_PRIVATE = 'repo.private'
334 334
335 335 REPOGROUP_USER = 'user:%s'
336 336 REPOGROUP_USERGROUP = 'usergroup:%s'
337 337 REPOGROUP_OWNER = 'group.owner'
338 338 REPOGROUP_DEFAULT = 'group.default'
339 339 REPOGROUP_DEFAULT_NO_INHERIT = 'group.default.no.inherit'
340 340
341 341 USERGROUP_USER = 'user:%s'
342 342 USERGROUP_USERGROUP = 'usergroup:%s'
343 343 USERGROUP_OWNER = 'usergroup.owner'
344 344 USERGROUP_DEFAULT = 'usergroup.default'
345 345 USERGROUP_DEFAULT_NO_INHERIT = 'usergroup.default.no.inherit'
346 346
347 347
348 348 class PermOriginDict(dict):
349 349 """
350 350 A special dict used for tracking permissions along with their origins.
351 351
352 352 `__setitem__` has been overridden to expect a tuple(perm, origin)
353 353 `__getitem__` will return only the perm
354 354 `.perm_origin_stack` will return the stack of (perm, origin) set per key
355 355
356 356 >>> perms = PermOriginDict()
357 357 >>> perms['resource'] = 'read', 'default', 1
358 358 >>> perms['resource']
359 359 'read'
360 360 >>> perms['resource'] = 'write', 'admin', 2
361 361 >>> perms['resource']
362 362 'write'
363 363 >>> perms.perm_origin_stack
364 364 {'resource': [('read', 'default', 1), ('write', 'admin', 2)]}
365 365 """
366 366
367 367 def __init__(self, *args, **kw):
368 368 dict.__init__(self, *args, **kw)
369 369 self.perm_origin_stack = collections.OrderedDict()
370 370
371 371 def __setitem__(self, key, perm_origin_obj_id):
372 372 # set (most likely via pickle) key:val pair without tuple
373 373 if not isinstance(perm_origin_obj_id, tuple):
374 374 perm = perm_origin_obj_id
375 375 dict.__setitem__(self, key, perm)
376 376 else:
377 377 # unpack if we create a key from tuple
378 378 (perm, origin, obj_id) = perm_origin_obj_id
379 379 self.perm_origin_stack.setdefault(key, []).append((perm, origin, obj_id))
380 380 dict.__setitem__(self, key, perm)
381 381
382 382
383 383 class BranchPermOriginDict(dict):
384 384 """
385 385 Dedicated branch permissions dict, with tracking of patterns and origins.
386 386
387 387 >>> perms = BranchPermOriginDict()
388 388 >>> perms['resource'] = '*pattern', 'read', 'default'
389 389 >>> perms['resource']
390 390 {'*pattern': 'read'}
391 391 >>> perms['resource'] = '*pattern', 'write', 'admin'
392 392 >>> perms['resource']
393 393 {'*pattern': 'write'}
394 394 >>> perms.perm_origin_stack
395 395 {'resource': {'*pattern': [('read', 'default'), ('write', 'admin')]}}
396 396 """
397 397 def __init__(self, *args, **kw):
398 398 dict.__init__(self, *args, **kw)
399 399 self.perm_origin_stack = collections.OrderedDict()
400 400
401 401 def __setitem__(self, key, pattern_perm_origin):
402 402 # set (most likely via pickle) key:val pair without tuple
403 403 if not isinstance(pattern_perm_origin, tuple):
404 404 pattern_perm = pattern_perm_origin
405 405 dict.__setitem__(self, key, pattern_perm)
406 406
407 407 else:
408 408 (pattern_perm, origin) = pattern_perm_origin
409 409 # we're passing in the dict, so we save the the stack
410 410 for pattern, perm in pattern_perm.items():
411 411 self.perm_origin_stack.setdefault(key, {})\
412 412 .setdefault(pattern, []).append((perm, origin))
413 413
414 414 dict.__setitem__(self, key, pattern_perm)
415 415
416 416
417 417 class PermissionCalculator(object):
418 418
419 419 def __init__(
420 420 self, user_id, scope, user_is_admin,
421 421 user_inherit_default_permissions, explicit, algo,
422 422 calculate_super_admin_as_user=False):
423 423
424 424 self.user_id = user_id
425 425 self.user_is_admin = user_is_admin
426 426 self.inherit_default_permissions = user_inherit_default_permissions
427 427 self.explicit = explicit
428 428 self.algo = algo
429 429 self.calculate_super_admin_as_user = calculate_super_admin_as_user
430 430
431 431 scope = scope or {}
432 432 self.scope_repo_id = scope.get('repo_id')
433 433 self.scope_repo_group_id = scope.get('repo_group_id')
434 434 self.scope_user_group_id = scope.get('user_group_id')
435 435
436 436 self.default_user_id = User.get_default_user(cache=True).user_id
437 437
438 438 self.permissions_repositories = PermOriginDict()
439 439 self.permissions_repository_groups = PermOriginDict()
440 440 self.permissions_user_groups = PermOriginDict()
441 441 self.permissions_repository_branches = BranchPermOriginDict()
442 442 self.permissions_global = set()
443 443
444 444 self.default_repo_perms = Permission.get_default_repo_perms(
445 445 self.default_user_id, self.scope_repo_id)
446 446 self.default_repo_groups_perms = Permission.get_default_group_perms(
447 447 self.default_user_id, self.scope_repo_group_id)
448 448 self.default_user_group_perms = \
449 449 Permission.get_default_user_group_perms(
450 450 self.default_user_id, self.scope_user_group_id)
451 451
452 452 # default branch perms
453 453 self.default_branch_repo_perms = \
454 454 Permission.get_default_repo_branch_perms(
455 455 self.default_user_id, self.scope_repo_id)
456 456
457 457 def calculate(self):
458 458 if self.user_is_admin and not self.calculate_super_admin_as_user:
459 459 return self._calculate_super_admin_permissions()
460 460
461 461 self._calculate_global_default_permissions()
462 462 self._calculate_global_permissions()
463 463 self._calculate_default_permissions()
464 464 self._calculate_repository_permissions()
465 465 self._calculate_repository_branch_permissions()
466 466 self._calculate_repository_group_permissions()
467 467 self._calculate_user_group_permissions()
468 468 return self._permission_structure()
469 469
470 470 def _calculate_super_admin_permissions(self):
471 471 """
472 472 super-admin user have all default rights for repositories
473 473 and groups set to admin
474 474 """
475 475 self.permissions_global.add('hg.admin')
476 476 self.permissions_global.add('hg.create.write_on_repogroup.true')
477 477
478 478 # repositories
479 479 for perm in self.default_repo_perms:
480 480 r_k = perm.UserRepoToPerm.repository.repo_name
481 481 obj_id = perm.UserRepoToPerm.repository.repo_id
482 482 archived = perm.UserRepoToPerm.repository.archived
483 483 p = 'repository.admin'
484 484 self.permissions_repositories[r_k] = p, PermOrigin.SUPER_ADMIN, obj_id
485 485 # special case for archived repositories, which we block still even for
486 486 # super admins
487 487 if archived:
488 488 p = 'repository.read'
489 489 self.permissions_repositories[r_k] = p, PermOrigin.ARCHIVED, obj_id
490 490
491 491 # repository groups
492 492 for perm in self.default_repo_groups_perms:
493 493 rg_k = perm.UserRepoGroupToPerm.group.group_name
494 494 obj_id = perm.UserRepoGroupToPerm.group.group_id
495 495 p = 'group.admin'
496 496 self.permissions_repository_groups[rg_k] = p, PermOrigin.SUPER_ADMIN, obj_id
497 497
498 498 # user groups
499 499 for perm in self.default_user_group_perms:
500 500 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
501 501 obj_id = perm.UserUserGroupToPerm.user_group.users_group_id
502 502 p = 'usergroup.admin'
503 503 self.permissions_user_groups[u_k] = p, PermOrigin.SUPER_ADMIN, obj_id
504 504
505 505 # branch permissions
506 506 # since super-admin also can have custom rule permissions
507 507 # we *always* need to calculate those inherited from default, and also explicit
508 508 self._calculate_default_permissions_repository_branches(
509 509 user_inherit_object_permissions=False)
510 510 self._calculate_repository_branch_permissions()
511 511
512 512 return self._permission_structure()
513 513
514 514 def _calculate_global_default_permissions(self):
515 515 """
516 516 global permissions taken from the default user
517 517 """
518 518 default_global_perms = UserToPerm.query()\
519 519 .filter(UserToPerm.user_id == self.default_user_id)\
520 520 .options(joinedload(UserToPerm.permission))
521 521
522 522 for perm in default_global_perms:
523 523 self.permissions_global.add(perm.permission.permission_name)
524 524
525 525 if self.user_is_admin:
526 526 self.permissions_global.add('hg.admin')
527 527 self.permissions_global.add('hg.create.write_on_repogroup.true')
528 528
529 529 def _calculate_global_permissions(self):
530 530 """
531 531 Set global system permissions with user permissions or permissions
532 532 taken from the user groups of the current user.
533 533
534 534 The permissions include repo creating, repo group creating, forking
535 535 etc.
536 536 """
537 537
538 538 # now we read the defined permissions and overwrite what we have set
539 539 # before those can be configured from groups or users explicitly.
540 540
541 541 # In case we want to extend this list we should make sure
542 542 # this is in sync with User.DEFAULT_USER_PERMISSIONS definitions
543 543 from rhodecode.model.permission import PermissionModel
544 544
545 545 _configurable = frozenset([
546 546 PermissionModel.FORKING_DISABLED, PermissionModel.FORKING_ENABLED,
547 547 'hg.create.none', 'hg.create.repository',
548 548 'hg.usergroup.create.false', 'hg.usergroup.create.true',
549 549 'hg.repogroup.create.false', 'hg.repogroup.create.true',
550 550 'hg.create.write_on_repogroup.false', 'hg.create.write_on_repogroup.true',
551 551 'hg.inherit_default_perms.false', 'hg.inherit_default_perms.true'
552 552 ])
553 553
554 554 # USER GROUPS comes first user group global permissions
555 555 user_perms_from_users_groups = Session().query(UserGroupToPerm)\
556 556 .options(joinedload(UserGroupToPerm.permission))\
557 557 .join((UserGroupMember, UserGroupToPerm.users_group_id ==
558 558 UserGroupMember.users_group_id))\
559 559 .filter(UserGroupMember.user_id == self.user_id)\
560 560 .order_by(UserGroupToPerm.users_group_id)\
561 561 .all()
562 562
563 563 # need to group here by groups since user can be in more than
564 564 # one group, so we get all groups
565 565 _explicit_grouped_perms = [
566 566 [x, list(y)] for x, y in
567 567 itertools.groupby(user_perms_from_users_groups,
568 568 lambda _x: _x.users_group)]
569 569
570 570 for gr, perms in _explicit_grouped_perms:
571 571 # since user can be in multiple groups iterate over them and
572 572 # select the lowest permissions first (more explicit)
573 573 # TODO(marcink): do this^^
574 574
575 575 # group doesn't inherit default permissions so we actually set them
576 576 if not gr.inherit_default_permissions:
577 577 # NEED TO IGNORE all previously set configurable permissions
578 578 # and replace them with explicitly set from this user
579 579 # group permissions
580 580 self.permissions_global = self.permissions_global.difference(
581 581 _configurable)
582 582 for perm in perms:
583 583 self.permissions_global.add(perm.permission.permission_name)
584 584
585 585 # user explicit global permissions
586 586 user_perms = Session().query(UserToPerm)\
587 587 .options(joinedload(UserToPerm.permission))\
588 588 .filter(UserToPerm.user_id == self.user_id).all()
589 589
590 590 if not self.inherit_default_permissions:
591 591 # NEED TO IGNORE all configurable permissions and
592 592 # replace them with explicitly set from this user permissions
593 593 self.permissions_global = self.permissions_global.difference(
594 594 _configurable)
595 595 for perm in user_perms:
596 596 self.permissions_global.add(perm.permission.permission_name)
597 597
598 598 def _calculate_default_permissions_repositories(self, user_inherit_object_permissions):
599 599 for perm in self.default_repo_perms:
600 600 r_k = perm.UserRepoToPerm.repository.repo_name
601 601 obj_id = perm.UserRepoToPerm.repository.repo_id
602 602 archived = perm.UserRepoToPerm.repository.archived
603 603 p = perm.Permission.permission_name
604 604 o = PermOrigin.REPO_DEFAULT
605 605 self.permissions_repositories[r_k] = p, o, obj_id
606 606
607 607 # if we decide this user isn't inheriting permissions from
608 608 # default user we set him to .none so only explicit
609 609 # permissions work
610 610 if not user_inherit_object_permissions:
611 611 p = 'repository.none'
612 612 o = PermOrigin.REPO_DEFAULT_NO_INHERIT
613 613 self.permissions_repositories[r_k] = p, o, obj_id
614 614
615 615 if perm.Repository.private and not (
616 616 perm.Repository.user_id == self.user_id):
617 617 # disable defaults for private repos,
618 618 p = 'repository.none'
619 619 o = PermOrigin.REPO_PRIVATE
620 620 self.permissions_repositories[r_k] = p, o, obj_id
621 621
622 622 elif perm.Repository.user_id == self.user_id:
623 623 # set admin if owner
624 624 p = 'repository.admin'
625 625 o = PermOrigin.REPO_OWNER
626 626 self.permissions_repositories[r_k] = p, o, obj_id
627 627
628 628 if self.user_is_admin:
629 629 p = 'repository.admin'
630 630 o = PermOrigin.SUPER_ADMIN
631 631 self.permissions_repositories[r_k] = p, o, obj_id
632 632
633 633 # finally in case of archived repositories, we downgrade higher
634 634 # permissions to read
635 635 if archived:
636 636 current_perm = self.permissions_repositories[r_k]
637 637 if current_perm in ['repository.write', 'repository.admin']:
638 638 p = 'repository.read'
639 639 o = PermOrigin.ARCHIVED
640 640 self.permissions_repositories[r_k] = p, o, obj_id
641 641
642 642 def _calculate_default_permissions_repository_branches(self, user_inherit_object_permissions):
643 643 for perm in self.default_branch_repo_perms:
644 644
645 645 r_k = perm.UserRepoToPerm.repository.repo_name
646 646 p = perm.Permission.permission_name
647 647 pattern = perm.UserToRepoBranchPermission.branch_pattern
648 648 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
649 649
650 650 if not self.explicit:
651 651 cur_perm = self.permissions_repository_branches.get(r_k)
652 652 if cur_perm:
653 653 cur_perm = cur_perm[pattern]
654 654 cur_perm = cur_perm or 'branch.none'
655 655
656 656 p = self._choose_permission(p, cur_perm)
657 657
658 658 # NOTE(marcink): register all pattern/perm instances in this
659 659 # special dict that aggregates entries
660 660 self.permissions_repository_branches[r_k] = {pattern: p}, o
661 661
662 662 def _calculate_default_permissions_repository_groups(self, user_inherit_object_permissions):
663 663 for perm in self.default_repo_groups_perms:
664 664 rg_k = perm.UserRepoGroupToPerm.group.group_name
665 665 obj_id = perm.UserRepoGroupToPerm.group.group_id
666 666 p = perm.Permission.permission_name
667 667 o = PermOrigin.REPOGROUP_DEFAULT
668 668 self.permissions_repository_groups[rg_k] = p, o, obj_id
669 669
670 670 # if we decide this user isn't inheriting permissions from default
671 671 # user we set him to .none so only explicit permissions work
672 672 if not user_inherit_object_permissions:
673 673 p = 'group.none'
674 674 o = PermOrigin.REPOGROUP_DEFAULT_NO_INHERIT
675 675 self.permissions_repository_groups[rg_k] = p, o, obj_id
676 676
677 677 if perm.RepoGroup.user_id == self.user_id:
678 678 # set admin if owner
679 679 p = 'group.admin'
680 680 o = PermOrigin.REPOGROUP_OWNER
681 681 self.permissions_repository_groups[rg_k] = p, o, obj_id
682 682
683 683 if self.user_is_admin:
684 684 p = 'group.admin'
685 685 o = PermOrigin.SUPER_ADMIN
686 686 self.permissions_repository_groups[rg_k] = p, o, obj_id
687 687
688 688 def _calculate_default_permissions_user_groups(self, user_inherit_object_permissions):
689 689 for perm in self.default_user_group_perms:
690 690 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
691 691 obj_id = perm.UserUserGroupToPerm.user_group.users_group_id
692 692 p = perm.Permission.permission_name
693 693 o = PermOrigin.USERGROUP_DEFAULT
694 694 self.permissions_user_groups[u_k] = p, o, obj_id
695 695
696 696 # if we decide this user isn't inheriting permissions from default
697 697 # user we set him to .none so only explicit permissions work
698 698 if not user_inherit_object_permissions:
699 699 p = 'usergroup.none'
700 700 o = PermOrigin.USERGROUP_DEFAULT_NO_INHERIT
701 701 self.permissions_user_groups[u_k] = p, o, obj_id
702 702
703 703 if perm.UserGroup.user_id == self.user_id:
704 704 # set admin if owner
705 705 p = 'usergroup.admin'
706 706 o = PermOrigin.USERGROUP_OWNER
707 707 self.permissions_user_groups[u_k] = p, o, obj_id
708 708
709 709 if self.user_is_admin:
710 710 p = 'usergroup.admin'
711 711 o = PermOrigin.SUPER_ADMIN
712 712 self.permissions_user_groups[u_k] = p, o, obj_id
713 713
714 714 def _calculate_default_permissions(self):
715 715 """
716 716 Set default user permissions for repositories, repository branches,
717 717 repository groups, user groups taken from the default user.
718 718
719 719 Calculate inheritance of object permissions based on what we have now
720 720 in GLOBAL permissions. We check if .false is in GLOBAL since this is
721 721 explicitly set. Inherit is the opposite of .false being there.
722 722
723 723 .. note::
724 724
725 725 the syntax is little bit odd but what we need to check here is
726 726 the opposite of .false permission being in the list so even for
727 727 inconsistent state when both .true/.false is there
728 728 .false is more important
729 729
730 730 """
731 731 user_inherit_object_permissions = not ('hg.inherit_default_perms.false'
732 732 in self.permissions_global)
733 733
734 734 # default permissions inherited from `default` user permissions
735 735 self._calculate_default_permissions_repositories(
736 736 user_inherit_object_permissions)
737 737
738 738 self._calculate_default_permissions_repository_branches(
739 739 user_inherit_object_permissions)
740 740
741 741 self._calculate_default_permissions_repository_groups(
742 742 user_inherit_object_permissions)
743 743
744 744 self._calculate_default_permissions_user_groups(
745 745 user_inherit_object_permissions)
746 746
747 747 def _calculate_repository_permissions(self):
748 748 """
749 749 Repository access permissions for the current user.
750 750
751 751 Check if the user is part of user groups for this repository and
752 752 fill in the permission from it. `_choose_permission` decides of which
753 753 permission should be selected based on selected method.
754 754 """
755 755
756 756 # user group for repositories permissions
757 757 user_repo_perms_from_user_group = Permission\
758 758 .get_default_repo_perms_from_user_group(
759 759 self.user_id, self.scope_repo_id)
760 760
761 761 multiple_counter = collections.defaultdict(int)
762 762 for perm in user_repo_perms_from_user_group:
763 763 r_k = perm.UserGroupRepoToPerm.repository.repo_name
764 764 obj_id = perm.UserGroupRepoToPerm.repository.repo_id
765 765 multiple_counter[r_k] += 1
766 766 p = perm.Permission.permission_name
767 767 o = PermOrigin.REPO_USERGROUP % perm.UserGroupRepoToPerm\
768 768 .users_group.users_group_name
769 769
770 770 if multiple_counter[r_k] > 1:
771 771 cur_perm = self.permissions_repositories[r_k]
772 772 p = self._choose_permission(p, cur_perm)
773 773
774 774 self.permissions_repositories[r_k] = p, o, obj_id
775 775
776 776 if perm.Repository.user_id == self.user_id:
777 777 # set admin if owner
778 778 p = 'repository.admin'
779 779 o = PermOrigin.REPO_OWNER
780 780 self.permissions_repositories[r_k] = p, o, obj_id
781 781
782 782 if self.user_is_admin:
783 783 p = 'repository.admin'
784 784 o = PermOrigin.SUPER_ADMIN
785 785 self.permissions_repositories[r_k] = p, o, obj_id
786 786
787 787 # user explicit permissions for repositories, overrides any specified
788 788 # by the group permission
789 789 user_repo_perms = Permission.get_default_repo_perms(
790 790 self.user_id, self.scope_repo_id)
791 791 for perm in user_repo_perms:
792 792 r_k = perm.UserRepoToPerm.repository.repo_name
793 793 obj_id = perm.UserRepoToPerm.repository.repo_id
794 794 archived = perm.UserRepoToPerm.repository.archived
795 795 p = perm.Permission.permission_name
796 796 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
797 797
798 798 if not self.explicit:
799 799 cur_perm = self.permissions_repositories.get(
800 800 r_k, 'repository.none')
801 801 p = self._choose_permission(p, cur_perm)
802 802
803 803 self.permissions_repositories[r_k] = p, o, obj_id
804 804
805 805 if perm.Repository.user_id == self.user_id:
806 806 # set admin if owner
807 807 p = 'repository.admin'
808 808 o = PermOrigin.REPO_OWNER
809 809 self.permissions_repositories[r_k] = p, o, obj_id
810 810
811 811 if self.user_is_admin:
812 812 p = 'repository.admin'
813 813 o = PermOrigin.SUPER_ADMIN
814 814 self.permissions_repositories[r_k] = p, o, obj_id
815 815
816 816 # finally in case of archived repositories, we downgrade higher
817 817 # permissions to read
818 818 if archived:
819 819 current_perm = self.permissions_repositories[r_k]
820 820 if current_perm in ['repository.write', 'repository.admin']:
821 821 p = 'repository.read'
822 822 o = PermOrigin.ARCHIVED
823 823 self.permissions_repositories[r_k] = p, o, obj_id
824 824
825 825 def _calculate_repository_branch_permissions(self):
826 826 # user group for repositories permissions
827 827 user_repo_branch_perms_from_user_group = Permission\
828 828 .get_default_repo_branch_perms_from_user_group(
829 829 self.user_id, self.scope_repo_id)
830 830
831 831 multiple_counter = collections.defaultdict(int)
832 832 for perm in user_repo_branch_perms_from_user_group:
833 833 r_k = perm.UserGroupRepoToPerm.repository.repo_name
834 834 p = perm.Permission.permission_name
835 835 pattern = perm.UserGroupToRepoBranchPermission.branch_pattern
836 836 o = PermOrigin.REPO_USERGROUP % perm.UserGroupRepoToPerm\
837 837 .users_group.users_group_name
838 838
839 839 multiple_counter[r_k] += 1
840 840 if multiple_counter[r_k] > 1:
841 841 cur_perm = self.permissions_repository_branches[r_k][pattern]
842 842 p = self._choose_permission(p, cur_perm)
843 843
844 844 self.permissions_repository_branches[r_k] = {pattern: p}, o
845 845
846 846 # user explicit branch permissions for repositories, overrides
847 847 # any specified by the group permission
848 848 user_repo_branch_perms = Permission.get_default_repo_branch_perms(
849 849 self.user_id, self.scope_repo_id)
850 850
851 851 for perm in user_repo_branch_perms:
852 852
853 853 r_k = perm.UserRepoToPerm.repository.repo_name
854 854 p = perm.Permission.permission_name
855 855 pattern = perm.UserToRepoBranchPermission.branch_pattern
856 856 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
857 857
858 858 if not self.explicit:
859 859 cur_perm = self.permissions_repository_branches.get(r_k)
860 860 if cur_perm:
861 861 cur_perm = cur_perm[pattern]
862 862 cur_perm = cur_perm or 'branch.none'
863 863 p = self._choose_permission(p, cur_perm)
864 864
865 865 # NOTE(marcink): register all pattern/perm instances in this
866 866 # special dict that aggregates entries
867 867 self.permissions_repository_branches[r_k] = {pattern: p}, o
868 868
869 869 def _calculate_repository_group_permissions(self):
870 870 """
871 871 Repository group permissions for the current user.
872 872
873 873 Check if the user is part of user groups for repository groups and
874 874 fill in the permissions from it. `_choose_permission` decides of which
875 875 permission should be selected based on selected method.
876 876 """
877 877 # user group for repo groups permissions
878 878 user_repo_group_perms_from_user_group = Permission\
879 879 .get_default_group_perms_from_user_group(
880 880 self.user_id, self.scope_repo_group_id)
881 881
882 882 multiple_counter = collections.defaultdict(int)
883 883 for perm in user_repo_group_perms_from_user_group:
884 884 rg_k = perm.UserGroupRepoGroupToPerm.group.group_name
885 885 obj_id = perm.UserGroupRepoGroupToPerm.group.group_id
886 886 multiple_counter[rg_k] += 1
887 887 o = PermOrigin.REPOGROUP_USERGROUP % perm.UserGroupRepoGroupToPerm\
888 888 .users_group.users_group_name
889 889 p = perm.Permission.permission_name
890 890
891 891 if multiple_counter[rg_k] > 1:
892 892 cur_perm = self.permissions_repository_groups[rg_k]
893 893 p = self._choose_permission(p, cur_perm)
894 894 self.permissions_repository_groups[rg_k] = p, o, obj_id
895 895
896 896 if perm.RepoGroup.user_id == self.user_id:
897 897 # set admin if owner, even for member of other user group
898 898 p = 'group.admin'
899 899 o = PermOrigin.REPOGROUP_OWNER
900 900 self.permissions_repository_groups[rg_k] = p, o, obj_id
901 901
902 902 if self.user_is_admin:
903 903 p = 'group.admin'
904 904 o = PermOrigin.SUPER_ADMIN
905 905 self.permissions_repository_groups[rg_k] = p, o, obj_id
906 906
907 907 # user explicit permissions for repository groups
908 908 user_repo_groups_perms = Permission.get_default_group_perms(
909 909 self.user_id, self.scope_repo_group_id)
910 910 for perm in user_repo_groups_perms:
911 911 rg_k = perm.UserRepoGroupToPerm.group.group_name
912 912 obj_id = perm.UserRepoGroupToPerm.group.group_id
913 913 o = PermOrigin.REPOGROUP_USER % perm.UserRepoGroupToPerm\
914 914 .user.username
915 915 p = perm.Permission.permission_name
916 916
917 917 if not self.explicit:
918 918 cur_perm = self.permissions_repository_groups.get(rg_k, 'group.none')
919 919 p = self._choose_permission(p, cur_perm)
920 920
921 921 self.permissions_repository_groups[rg_k] = p, o, obj_id
922 922
923 923 if perm.RepoGroup.user_id == self.user_id:
924 924 # set admin if owner
925 925 p = 'group.admin'
926 926 o = PermOrigin.REPOGROUP_OWNER
927 927 self.permissions_repository_groups[rg_k] = p, o, obj_id
928 928
929 929 if self.user_is_admin:
930 930 p = 'group.admin'
931 931 o = PermOrigin.SUPER_ADMIN
932 932 self.permissions_repository_groups[rg_k] = p, o, obj_id
933 933
934 934 def _calculate_user_group_permissions(self):
935 935 """
936 936 User group permissions for the current user.
937 937 """
938 938 # user group for user group permissions
939 939 user_group_from_user_group = Permission\
940 940 .get_default_user_group_perms_from_user_group(
941 941 self.user_id, self.scope_user_group_id)
942 942
943 943 multiple_counter = collections.defaultdict(int)
944 944 for perm in user_group_from_user_group:
945 945 ug_k = perm.UserGroupUserGroupToPerm.target_user_group.users_group_name
946 946 obj_id = perm.UserGroupUserGroupToPerm.target_user_group.users_group_id
947 947 multiple_counter[ug_k] += 1
948 948 o = PermOrigin.USERGROUP_USERGROUP % perm.UserGroupUserGroupToPerm\
949 949 .user_group.users_group_name
950 950 p = perm.Permission.permission_name
951 951
952 952 if multiple_counter[ug_k] > 1:
953 953 cur_perm = self.permissions_user_groups[ug_k]
954 954 p = self._choose_permission(p, cur_perm)
955 955
956 956 self.permissions_user_groups[ug_k] = p, o, obj_id
957 957
958 958 if perm.UserGroup.user_id == self.user_id:
959 959 # set admin if owner, even for member of other user group
960 960 p = 'usergroup.admin'
961 961 o = PermOrigin.USERGROUP_OWNER
962 962 self.permissions_user_groups[ug_k] = p, o, obj_id
963 963
964 964 if self.user_is_admin:
965 965 p = 'usergroup.admin'
966 966 o = PermOrigin.SUPER_ADMIN
967 967 self.permissions_user_groups[ug_k] = p, o, obj_id
968 968
969 969 # user explicit permission for user groups
970 970 user_user_groups_perms = Permission.get_default_user_group_perms(
971 971 self.user_id, self.scope_user_group_id)
972 972 for perm in user_user_groups_perms:
973 973 ug_k = perm.UserUserGroupToPerm.user_group.users_group_name
974 974 obj_id = perm.UserUserGroupToPerm.user_group.users_group_id
975 975 o = PermOrigin.USERGROUP_USER % perm.UserUserGroupToPerm\
976 976 .user.username
977 977 p = perm.Permission.permission_name
978 978
979 979 if not self.explicit:
980 980 cur_perm = self.permissions_user_groups.get(ug_k, 'usergroup.none')
981 981 p = self._choose_permission(p, cur_perm)
982 982
983 983 self.permissions_user_groups[ug_k] = p, o, obj_id
984 984
985 985 if perm.UserGroup.user_id == self.user_id:
986 986 # set admin if owner
987 987 p = 'usergroup.admin'
988 988 o = PermOrigin.USERGROUP_OWNER
989 989 self.permissions_user_groups[ug_k] = p, o, obj_id
990 990
991 991 if self.user_is_admin:
992 992 p = 'usergroup.admin'
993 993 o = PermOrigin.SUPER_ADMIN
994 994 self.permissions_user_groups[ug_k] = p, o, obj_id
995 995
996 996 def _choose_permission(self, new_perm, cur_perm):
997 997 new_perm_val = Permission.PERM_WEIGHTS[new_perm]
998 998 cur_perm_val = Permission.PERM_WEIGHTS[cur_perm]
999 999 if self.algo == 'higherwin':
1000 1000 if new_perm_val > cur_perm_val:
1001 1001 return new_perm
1002 1002 return cur_perm
1003 1003 elif self.algo == 'lowerwin':
1004 1004 if new_perm_val < cur_perm_val:
1005 1005 return new_perm
1006 1006 return cur_perm
1007 1007
1008 1008 def _permission_structure(self):
1009 1009 return {
1010 1010 'global': self.permissions_global,
1011 1011 'repositories': self.permissions_repositories,
1012 1012 'repository_branches': self.permissions_repository_branches,
1013 1013 'repositories_groups': self.permissions_repository_groups,
1014 1014 'user_groups': self.permissions_user_groups,
1015 1015 }
1016 1016
1017 1017
1018 1018 def allowed_auth_token_access(view_name, auth_token, whitelist=None):
1019 1019 """
1020 1020 Check if given controller_name is in whitelist of auth token access
1021 1021 """
1022 1022 if not whitelist:
1023 1023 from rhodecode import CONFIG
1024 1024 whitelist = aslist(
1025 1025 CONFIG.get('api_access_controllers_whitelist'), sep=',')
1026 1026 # backward compat translation
1027 1027 compat = {
1028 1028 # old controller, new VIEW
1029 1029 'ChangesetController:*': 'RepoCommitsView:*',
1030 1030 'ChangesetController:changeset_patch': 'RepoCommitsView:repo_commit_patch',
1031 1031 'ChangesetController:changeset_raw': 'RepoCommitsView:repo_commit_raw',
1032 1032 'FilesController:raw': 'RepoCommitsView:repo_commit_raw',
1033 1033 'FilesController:archivefile': 'RepoFilesView:repo_archivefile',
1034 1034 'GistsController:*': 'GistView:*',
1035 1035 }
1036 1036
1037 1037 log.debug(
1038 1038 'Allowed views for AUTH TOKEN access: %s', whitelist)
1039 1039 auth_token_access_valid = False
1040 1040
1041 1041 for entry in whitelist:
1042 1042 token_match = True
1043 1043 if entry in compat:
1044 1044 # translate from old Controllers to Pyramid Views
1045 1045 entry = compat[entry]
1046 1046
1047 1047 if '@' in entry:
1048 1048 # specific AuthToken
1049 1049 entry, allowed_token = entry.split('@', 1)
1050 1050 token_match = auth_token == allowed_token
1051 1051
1052 1052 if fnmatch.fnmatch(view_name, entry) and token_match:
1053 1053 auth_token_access_valid = True
1054 1054 break
1055 1055
1056 1056 if auth_token_access_valid:
1057 1057 log.debug('view: `%s` matches entry in whitelist: %s',
1058 1058 view_name, whitelist)
1059 1059
1060 1060 else:
1061 1061 msg = ('view: `%s` does *NOT* match any entry in whitelist: %s'
1062 1062 % (view_name, whitelist))
1063 1063 if auth_token:
1064 1064 # if we use auth token key and don't have access it's a warning
1065 1065 log.warning(msg)
1066 1066 else:
1067 1067 log.debug(msg)
1068 1068
1069 1069 return auth_token_access_valid
1070 1070
1071 1071
1072 1072 class AuthUser(object):
1073 1073 """
1074 1074 A simple object that handles all attributes of user in RhodeCode
1075 1075
1076 1076 It does lookup based on API key,given user, or user present in session
1077 1077 Then it fills all required information for such user. It also checks if
1078 1078 anonymous access is enabled and if so, it returns default user as logged in
1079 1079 """
1080 1080 GLOBAL_PERMS = [x[0] for x in Permission.PERMS]
1081 1081 repo_read_perms = ['repository.read', 'repository.admin', 'repository.write']
1082 1082 repo_group_read_perms = ['group.read', 'group.write', 'group.admin']
1083 1083 user_group_read_perms = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
1084 1084
1085 1085 def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None):
1086 1086
1087 1087 self.user_id = user_id
1088 1088 self._api_key = api_key
1089 1089
1090 1090 self.api_key = None
1091 1091 self.username = username
1092 1092 self.ip_addr = ip_addr
1093 1093 self.name = ''
1094 1094 self.lastname = ''
1095 1095 self.first_name = ''
1096 1096 self.last_name = ''
1097 1097 self.email = ''
1098 1098 self.is_authenticated = False
1099 1099 self.admin = False
1100 1100 self.inherit_default_permissions = False
1101 1101 self.password = ''
1102 1102
1103 1103 self.anonymous_user = None # propagated on propagate_data
1104 1104 self.propagate_data()
1105 1105 self._instance = None
1106 1106 self._permissions_scoped_cache = {} # used to bind scoped calculation
1107 1107
1108 1108 @LazyProperty
1109 1109 def permissions(self):
1110 1110 return self.get_perms(user=self, cache=None)
1111 1111
1112 1112 @LazyProperty
1113 1113 def permissions_safe(self):
1114 1114 """
1115 1115 Filtered permissions excluding not allowed repositories
1116 1116 """
1117 1117 perms = self.get_perms(user=self, cache=None)
1118 1118
1119 1119 perms['repositories'] = {
1120 1120 k: v for k, v in perms['repositories'].items()
1121 1121 if v != 'repository.none'}
1122 1122 perms['repositories_groups'] = {
1123 1123 k: v for k, v in perms['repositories_groups'].items()
1124 1124 if v != 'group.none'}
1125 1125 perms['user_groups'] = {
1126 1126 k: v for k, v in perms['user_groups'].items()
1127 1127 if v != 'usergroup.none'}
1128 1128 perms['repository_branches'] = {
1129 1129 k: v for k, v in perms['repository_branches'].items()
1130 1130 if v != 'branch.none'}
1131 1131 return perms
1132 1132
1133 1133 @LazyProperty
1134 1134 def permissions_full_details(self):
1135 1135 return self.get_perms(
1136 1136 user=self, cache=None, calculate_super_admin=True)
1137 1137
1138 1138 def permissions_with_scope(self, scope):
1139 1139 """
1140 1140 Call the get_perms function with scoped data. The scope in that function
1141 1141 narrows the SQL calls to the given ID of objects resulting in fetching
1142 1142 Just particular permission we want to obtain. If scope is an empty dict
1143 1143 then it basically narrows the scope to GLOBAL permissions only.
1144 1144
1145 1145 :param scope: dict
1146 1146 """
1147 1147 if 'repo_name' in scope:
1148 1148 obj = Repository.get_by_repo_name(scope['repo_name'])
1149 1149 if obj:
1150 1150 scope['repo_id'] = obj.repo_id
1151 1151 _scope = collections.OrderedDict()
1152 1152 _scope['repo_id'] = -1
1153 1153 _scope['user_group_id'] = -1
1154 1154 _scope['repo_group_id'] = -1
1155 1155
1156 1156 for k in sorted(scope.keys()):
1157 1157 _scope[k] = scope[k]
1158 1158
1159 1159 # store in cache to mimic how the @LazyProperty works,
1160 1160 # the difference here is that we use the unique key calculated
1161 1161 # from params and values
1162 1162 return self.get_perms(user=self, cache=None, scope=_scope)
1163 1163
1164 1164 def get_instance(self):
1165 1165 return User.get(self.user_id)
1166 1166
1167 1167 def propagate_data(self):
1168 1168 """
1169 1169 Fills in user data and propagates values to this instance. Maps fetched
1170 1170 user attributes to this class instance attributes
1171 1171 """
1172 1172 log.debug('AuthUser: starting data propagation for new potential user')
1173 1173 user_model = UserModel()
1174 1174 anon_user = self.anonymous_user = User.get_default_user(cache=True)
1175 1175 is_user_loaded = False
1176 1176
1177 1177 # lookup by userid
1178 1178 if self.user_id is not None and self.user_id != anon_user.user_id:
1179 1179 log.debug('Trying Auth User lookup by USER ID: `%s`', self.user_id)
1180 1180 is_user_loaded = user_model.fill_data(self, user_id=self.user_id)
1181 1181
1182 1182 # try go get user by api key
1183 1183 elif self._api_key and self._api_key != anon_user.api_key:
1184 1184 log.debug('Trying Auth User lookup by API KEY: `...%s`', self._api_key[-4:])
1185 1185 is_user_loaded = user_model.fill_data(self, api_key=self._api_key)
1186 1186
1187 1187 # lookup by username
1188 1188 elif self.username:
1189 1189 log.debug('Trying Auth User lookup by USER NAME: `%s`', self.username)
1190 1190 is_user_loaded = user_model.fill_data(self, username=self.username)
1191 1191 else:
1192 1192 log.debug('No data in %s that could been used to log in', self)
1193 1193
1194 1194 if not is_user_loaded:
1195 1195 log.debug(
1196 1196 'Failed to load user. Fallback to default user %s', anon_user)
1197 1197 # if we cannot authenticate user try anonymous
1198 1198 if anon_user.active:
1199 1199 log.debug('default user is active, using it as a session user')
1200 1200 user_model.fill_data(self, user_id=anon_user.user_id)
1201 1201 # then we set this user is logged in
1202 1202 self.is_authenticated = True
1203 1203 else:
1204 1204 log.debug('default user is NOT active')
1205 1205 # in case of disabled anonymous user we reset some of the
1206 1206 # parameters so such user is "corrupted", skipping the fill_data
1207 1207 for attr in ['user_id', 'username', 'admin', 'active']:
1208 1208 setattr(self, attr, None)
1209 1209 self.is_authenticated = False
1210 1210
1211 1211 if not self.username:
1212 1212 self.username = 'None'
1213 1213
1214 1214 log.debug('AuthUser: propagated user is now %s', self)
1215 1215
1216 1216 def get_perms(self, user, scope=None, explicit=True, algo='higherwin',
1217 1217 calculate_super_admin=False, cache=None):
1218 1218 """
1219 1219 Fills user permission attribute with permissions taken from database
1220 1220 works for permissions given for repositories, and for permissions that
1221 1221 are granted to groups
1222 1222
1223 1223 :param user: instance of User object from database
1224 1224 :param explicit: In case there are permissions both for user and a group
1225 1225 that user is part of, explicit flag will defiine if user will
1226 1226 explicitly override permissions from group, if it's False it will
1227 1227 make decision based on the algo
1228 1228 :param algo: algorithm to decide what permission should be choose if
1229 1229 it's multiple defined, eg user in two different groups. It also
1230 1230 decides if explicit flag is turned off how to specify the permission
1231 1231 for case when user is in a group + have defined separate permission
1232 1232 :param calculate_super_admin: calculate permissions for super-admin in the
1233 1233 same way as for regular user without speedups
1234 1234 :param cache: Use caching for calculation, None = let the cache backend decide
1235 1235 """
1236 1236 user_id = user.user_id
1237 1237 user_is_admin = user.is_admin
1238 1238
1239 1239 # inheritance of global permissions like create repo/fork repo etc
1240 1240 user_inherit_default_permissions = user.inherit_default_permissions
1241 1241
1242 1242 cache_seconds = safe_int(
1243 1243 rhodecode.CONFIG.get('rc_cache.cache_perms.expiration_time'))
1244 1244
1245 1245 if cache is None:
1246 1246 # let the backend cache decide
1247 1247 cache_on = cache_seconds > 0
1248 1248 else:
1249 1249 cache_on = cache
1250 1250
1251 1251 log.debug(
1252 1252 'Computing PERMISSION tree for user %s scope `%s` '
1253 1253 'with caching: %s[TTL: %ss]', user, scope, cache_on, cache_seconds or 0)
1254 1254
1255 1255 cache_namespace_uid = 'cache_user_auth.{}'.format(user_id)
1256 1256 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1257 1257
1258 1258 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
1259 1259 condition=cache_on)
1260 1260 def compute_perm_tree(cache_name, cache_ver,
1261 1261 user_id, scope, user_is_admin,user_inherit_default_permissions,
1262 1262 explicit, algo, calculate_super_admin):
1263 1263 return _cached_perms_data(
1264 1264 user_id, scope, user_is_admin, user_inherit_default_permissions,
1265 1265 explicit, algo, calculate_super_admin)
1266 1266
1267 1267 start = time.time()
1268 1268 result = compute_perm_tree(
1269 1269 'permissions', 'v1', user_id, scope, user_is_admin,
1270 1270 user_inherit_default_permissions, explicit, algo,
1271 1271 calculate_super_admin)
1272 1272
1273 1273 result_repr = []
1274 1274 for k in result:
1275 1275 result_repr.append((k, len(result[k])))
1276 1276 total = time.time() - start
1277 1277 log.debug('PERMISSION tree for user %s computed in %.4fs: %s',
1278 1278 user, total, result_repr)
1279 1279
1280 1280 return result
1281 1281
1282 1282 @property
1283 1283 def is_default(self):
1284 1284 return self.username == User.DEFAULT_USER
1285 1285
1286 1286 @property
1287 1287 def is_admin(self):
1288 1288 return self.admin
1289 1289
1290 1290 @property
1291 1291 def is_user_object(self):
1292 1292 return self.user_id is not None
1293 1293
1294 1294 @property
1295 1295 def repositories_admin(self):
1296 1296 """
1297 1297 Returns list of repositories you're an admin of
1298 1298 """
1299 1299 return [
1300 1300 x[0] for x in self.permissions['repositories'].items()
1301 1301 if x[1] == 'repository.admin']
1302 1302
1303 1303 @property
1304 1304 def repository_groups_admin(self):
1305 1305 """
1306 1306 Returns list of repository groups you're an admin of
1307 1307 """
1308 1308 return [
1309 1309 x[0] for x in self.permissions['repositories_groups'].items()
1310 1310 if x[1] == 'group.admin']
1311 1311
1312 1312 @property
1313 1313 def user_groups_admin(self):
1314 1314 """
1315 1315 Returns list of user groups you're an admin of
1316 1316 """
1317 1317 return [
1318 1318 x[0] for x in self.permissions['user_groups'].items()
1319 1319 if x[1] == 'usergroup.admin']
1320 1320
1321 1321 def repo_acl_ids_from_stack(self, perms=None, prefix_filter=None, cache=False):
1322 1322 if not perms:
1323 1323 perms = AuthUser.repo_read_perms
1324 1324 allowed_ids = []
1325 1325 for k, stack_data in self.permissions['repositories'].perm_origin_stack.items():
1326 1326 perm, origin, obj_id = stack_data[-1] # last item is the current permission
1327 1327 if prefix_filter and not k.startswith(prefix_filter):
1328 1328 continue
1329 1329 if perm in perms:
1330 1330 allowed_ids.append(obj_id)
1331 1331 return allowed_ids
1332 1332
1333 1333 def repo_acl_ids(self, perms=None, name_filter=None, cache=False):
1334 1334 """
1335 1335 Returns list of repository ids that user have access to based on given
1336 1336 perms. The cache flag should be only used in cases that are used for
1337 1337 display purposes, NOT IN ANY CASE for permission checks.
1338 1338 """
1339 1339 from rhodecode.model.scm import RepoList
1340 1340 if not perms:
1341 1341 perms = AuthUser.repo_read_perms
1342 1342
1343 1343 if not isinstance(perms, list):
1344 1344 raise ValueError('perms parameter must be a list got {} instead'.format(perms))
1345 1345
1346 1346 def _cached_repo_acl(perm_def, _name_filter):
1347 1347 qry = Repository.query()
1348 1348 if _name_filter:
1349 ilike_expression = u'%{}%'.format(safe_unicode(_name_filter))
1349 ilike_expression = '%{}%'.format(_name_filter)
1350 1350 qry = qry.filter(
1351 1351 Repository.repo_name.ilike(ilike_expression))
1352 1352
1353 1353 return [x.repo_id for x in
1354 1354 RepoList(qry, perm_set=perm_def, extra_kwargs={'user': self})]
1355 1355
1356 1356 log.debug('Computing REPO ACL IDS user %s', self)
1357 1357
1358 1358 cache_namespace_uid = 'cache_user_repo_acl_ids.{}'.format(self.user_id)
1359 1359 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1360 1360
1361 1361 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache)
1362 1362 def compute_repo_acl_ids(cache_ver, user_id, perm_def, _name_filter):
1363 1363 return _cached_repo_acl(perm_def, _name_filter)
1364 1364
1365 1365 start = time.time()
1366 1366 result = compute_repo_acl_ids('v1', self.user_id, perms, name_filter)
1367 1367 total = time.time() - start
1368 1368 log.debug('REPO ACL IDS for user %s computed in %.4fs', self, total)
1369 1369
1370 1370 return result
1371 1371
1372 1372 def repo_group_acl_ids_from_stack(self, perms=None, prefix_filter=None, cache=False):
1373 1373 if not perms:
1374 1374 perms = AuthUser.repo_group_read_perms
1375 1375 allowed_ids = []
1376 1376 for k, stack_data in self.permissions['repositories_groups'].perm_origin_stack.items():
1377 1377 perm, origin, obj_id = stack_data[-1] # last item is the current permission
1378 1378 if prefix_filter and not k.startswith(prefix_filter):
1379 1379 continue
1380 1380 if perm in perms:
1381 1381 allowed_ids.append(obj_id)
1382 1382 return allowed_ids
1383 1383
1384 1384 def repo_group_acl_ids(self, perms=None, name_filter=None, cache=False):
1385 1385 """
1386 1386 Returns list of repository group ids that user have access to based on given
1387 1387 perms. The cache flag should be only used in cases that are used for
1388 1388 display purposes, NOT IN ANY CASE for permission checks.
1389 1389 """
1390 1390 from rhodecode.model.scm import RepoGroupList
1391 1391 if not perms:
1392 1392 perms = AuthUser.repo_group_read_perms
1393 1393
1394 1394 if not isinstance(perms, list):
1395 1395 raise ValueError('perms parameter must be a list got {} instead'.format(perms))
1396 1396
1397 1397 def _cached_repo_group_acl(perm_def, _name_filter):
1398 1398 qry = RepoGroup.query()
1399 1399 if _name_filter:
1400 ilike_expression = u'%{}%'.format(safe_unicode(_name_filter))
1400 ilike_expression = '%{}%'.format(_name_filter)
1401 1401 qry = qry.filter(
1402 1402 RepoGroup.group_name.ilike(ilike_expression))
1403 1403
1404 1404 return [x.group_id for x in
1405 1405 RepoGroupList(qry, perm_set=perm_def, extra_kwargs={'user': self})]
1406 1406
1407 1407 log.debug('Computing REPO GROUP ACL IDS user %s', self)
1408 1408
1409 1409 cache_namespace_uid = 'cache_user_repo_group_acl_ids.{}'.format(self.user_id)
1410 1410 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1411 1411
1412 1412 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache)
1413 1413 def compute_repo_group_acl_ids(cache_ver, user_id, perm_def, _name_filter):
1414 1414 return _cached_repo_group_acl(perm_def, _name_filter)
1415 1415
1416 1416 start = time.time()
1417 1417 result = compute_repo_group_acl_ids('v1', self.user_id, perms, name_filter)
1418 1418 total = time.time() - start
1419 1419 log.debug('REPO GROUP ACL IDS for user %s computed in %.4fs', self, total)
1420 1420
1421 1421 return result
1422 1422
1423 1423 def user_group_acl_ids_from_stack(self, perms=None, cache=False):
1424 1424 if not perms:
1425 1425 perms = AuthUser.user_group_read_perms
1426 1426 allowed_ids = []
1427 1427 for k, stack_data in self.permissions['user_groups'].perm_origin_stack.items():
1428 1428 perm, origin, obj_id = stack_data[-1] # last item is the current permission
1429 1429 if perm in perms:
1430 1430 allowed_ids.append(obj_id)
1431 1431 return allowed_ids
1432 1432
1433 1433 def user_group_acl_ids(self, perms=None, name_filter=None, cache=False):
1434 1434 """
1435 1435 Returns list of user group ids that user have access to based on given
1436 1436 perms. The cache flag should be only used in cases that are used for
1437 1437 display purposes, NOT IN ANY CASE for permission checks.
1438 1438 """
1439 1439 from rhodecode.model.scm import UserGroupList
1440 1440 if not perms:
1441 1441 perms = AuthUser.user_group_read_perms
1442 1442
1443 1443 if not isinstance(perms, list):
1444 1444 raise ValueError('perms parameter must be a list got {} instead'.format(perms))
1445 1445
1446 1446 def _cached_user_group_acl(perm_def, _name_filter):
1447 1447 qry = UserGroup.query()
1448 1448 if _name_filter:
1449 ilike_expression = u'%{}%'.format(safe_unicode(_name_filter))
1449 ilike_expression = '%{}%'.format(_name_filter)
1450 1450 qry = qry.filter(
1451 1451 UserGroup.users_group_name.ilike(ilike_expression))
1452 1452
1453 1453 return [x.users_group_id for x in
1454 1454 UserGroupList(qry, perm_set=perm_def, extra_kwargs={'user': self})]
1455 1455
1456 1456 log.debug('Computing USER GROUP ACL IDS user %s', self)
1457 1457
1458 1458 cache_namespace_uid = 'cache_user_user_group_acl_ids.{}'.format(self.user_id)
1459 1459 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1460 1460
1461 1461 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache)
1462 1462 def compute_user_group_acl_ids(cache_ver, user_id, perm_def, _name_filter):
1463 1463 return _cached_user_group_acl(perm_def, _name_filter)
1464 1464
1465 1465 start = time.time()
1466 1466 result = compute_user_group_acl_ids('v1', self.user_id, perms, name_filter)
1467 1467 total = time.time() - start
1468 1468 log.debug('USER GROUP ACL IDS for user %s computed in %.4fs', self, total)
1469 1469
1470 1470 return result
1471 1471
1472 1472 @property
1473 1473 def ip_allowed(self):
1474 1474 """
1475 1475 Checks if ip_addr used in constructor is allowed from defined list of
1476 1476 allowed ip_addresses for user
1477 1477
1478 1478 :returns: boolean, True if ip is in allowed ip range
1479 1479 """
1480 1480 # check IP
1481 1481 inherit = self.inherit_default_permissions
1482 1482 return AuthUser.check_ip_allowed(self.user_id, self.ip_addr,
1483 1483 inherit_from_default=inherit)
1484 1484
1485 1485 @property
1486 1486 def personal_repo_group(self):
1487 1487 return RepoGroup.get_user_personal_repo_group(self.user_id)
1488 1488
1489 1489 @LazyProperty
1490 1490 def feed_token(self):
1491 1491 return self.get_instance().feed_token
1492 1492
1493 1493 @LazyProperty
1494 1494 def artifact_token(self):
1495 1495 return self.get_instance().artifact_token
1496 1496
1497 1497 @classmethod
1498 1498 def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default):
1499 1499 allowed_ips = AuthUser.get_allowed_ips(
1500 1500 user_id, cache=True, inherit_from_default=inherit_from_default)
1501 1501 if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
1502 1502 log.debug('IP:%s for user %s is in range of %s',
1503 1503 ip_addr, user_id, allowed_ips)
1504 1504 return True
1505 1505 else:
1506 1506 log.info('Access for IP:%s forbidden for user %s, '
1507 1507 'not in %s', ip_addr, user_id, allowed_ips,
1508 1508 extra={"ip": ip_addr, "user_id": user_id})
1509 1509 return False
1510 1510
1511 1511 def get_branch_permissions(self, repo_name, perms=None):
1512 1512 perms = perms or self.permissions_with_scope({'repo_name': repo_name})
1513 1513 branch_perms = perms.get('repository_branches', {})
1514 1514 if not branch_perms:
1515 1515 return {}
1516 1516 repo_branch_perms = branch_perms.get(repo_name)
1517 1517 return repo_branch_perms or {}
1518 1518
1519 1519 def get_rule_and_branch_permission(self, repo_name, branch_name):
1520 1520 """
1521 1521 Check if this AuthUser has defined any permissions for branches. If any of
1522 1522 the rules match in order, we return the matching permissions
1523 1523 """
1524 1524
1525 1525 rule = default_perm = ''
1526 1526
1527 1527 repo_branch_perms = self.get_branch_permissions(repo_name=repo_name)
1528 1528 if not repo_branch_perms:
1529 1529 return rule, default_perm
1530 1530
1531 1531 # now calculate the permissions
1532 1532 for pattern, branch_perm in repo_branch_perms.items():
1533 1533 if fnmatch.fnmatch(branch_name, pattern):
1534 1534 rule = '`{}`=>{}'.format(pattern, branch_perm)
1535 1535 return rule, branch_perm
1536 1536
1537 1537 return rule, default_perm
1538 1538
1539 1539 def get_notice_messages(self):
1540 1540
1541 1541 notice_level = 'notice-error'
1542 1542 notice_messages = []
1543 1543 if self.is_default:
1544 1544 return [], notice_level
1545 1545
1546 1546 notices = UserNotice.query()\
1547 1547 .filter(UserNotice.user_id == self.user_id)\
1548 1548 .filter(UserNotice.notice_read == false())\
1549 1549 .all()
1550 1550
1551 1551 try:
1552 1552 for entry in notices:
1553 1553
1554 1554 msg = {
1555 1555 'msg_id': entry.user_notice_id,
1556 1556 'level': entry.notification_level,
1557 1557 'subject': entry.notice_subject,
1558 1558 'body': entry.notice_body,
1559 1559 }
1560 1560 notice_messages.append(msg)
1561 1561
1562 1562 log.debug('Got user %s %s messages', self, len(notice_messages))
1563 1563
1564 1564 levels = [x['level'] for x in notice_messages]
1565 1565 notice_level = 'notice-error' if 'error' in levels else 'notice-warning'
1566 1566 except Exception:
1567 1567 pass
1568 1568
1569 1569 return notice_messages, notice_level
1570 1570
1571 1571 def __repr__(self):
1572 1572 return self.repr_user(self.user_id, self.username, self.ip_addr, self.is_authenticated)
1573 1573
1574 1574 def set_authenticated(self, authenticated=True):
1575 1575 if self.user_id != self.anonymous_user.user_id:
1576 1576 self.is_authenticated = authenticated
1577 1577
1578 1578 def get_cookie_store(self):
1579 1579 return {
1580 1580 'username': self.username,
1581 1581 'password': md5(safe_bytes(self.password or '')),
1582 1582 'user_id': self.user_id,
1583 1583 'is_authenticated': self.is_authenticated
1584 1584 }
1585 1585
1586 1586 @classmethod
1587 1587 def repr_user(cls, user_id=0, username='ANONYMOUS', ip='0.0.0.0', is_authenticated=False):
1588 1588 tmpl = "<AuthUser('id:{}[{}] ip:{} auth:{}')>"
1589 1589 return tmpl.format(user_id, username, ip, is_authenticated)
1590 1590
1591 1591 @classmethod
1592 1592 def from_cookie_store(cls, cookie_store):
1593 1593 """
1594 1594 Creates AuthUser from a cookie store
1595 1595
1596 1596 :param cls:
1597 1597 :param cookie_store:
1598 1598 """
1599 1599 user_id = cookie_store.get('user_id')
1600 1600 username = cookie_store.get('username')
1601 1601 api_key = cookie_store.get('api_key')
1602 1602 return AuthUser(user_id, api_key, username)
1603 1603
1604 1604 @classmethod
1605 1605 def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False):
1606 1606 _set = set()
1607 1607
1608 1608 if inherit_from_default:
1609 1609 def_user_id = User.get_default_user(cache=True).user_id
1610 1610 default_ips = UserIpMap.query().filter(UserIpMap.user_id == def_user_id)
1611 1611 if cache:
1612 1612 default_ips = default_ips.options(
1613 1613 FromCache("sql_cache_short", "get_user_ips_default"))
1614 1614
1615 1615 # populate from default user
1616 1616 for ip in default_ips:
1617 1617 try:
1618 1618 _set.add(ip.ip_addr)
1619 1619 except ObjectDeletedError:
1620 1620 # since we use heavy caching sometimes it happens that
1621 1621 # we get deleted objects here, we just skip them
1622 1622 pass
1623 1623
1624 1624 # NOTE:(marcink) we don't want to load any rules for empty
1625 1625 # user_id which is the case of access of non logged users when anonymous
1626 1626 # access is disabled
1627 1627 user_ips = []
1628 1628 if user_id:
1629 1629 user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
1630 1630 if cache:
1631 1631 user_ips = user_ips.options(
1632 1632 FromCache("sql_cache_short", "get_user_ips_%s" % user_id))
1633 1633
1634 1634 for ip in user_ips:
1635 1635 try:
1636 1636 _set.add(ip.ip_addr)
1637 1637 except ObjectDeletedError:
1638 1638 # since we use heavy caching sometimes it happens that we get
1639 1639 # deleted objects here, we just skip them
1640 1640 pass
1641 1641 return _set or {ip for ip in ['0.0.0.0/0', '::/0']}
1642 1642
1643 1643
1644 1644 def set_available_permissions(settings):
1645 1645 """
1646 1646 This function will propagate pyramid settings with all available defined
1647 1647 permission given in db. We don't want to check each time from db for new
1648 1648 permissions since adding a new permission also requires application restart
1649 1649 ie. to decorate new views with the newly created permission
1650 1650
1651 1651 :param settings: current pyramid registry.settings
1652 1652
1653 1653 """
1654 1654 log.debug('auth: getting information about all available permissions')
1655 1655 try:
1656 1656 sa = meta.Session
1657 1657 all_perms = sa.query(Permission).all()
1658 1658 settings.setdefault('available_permissions',
1659 1659 [x.permission_name for x in all_perms])
1660 1660 log.debug('auth: set available permissions')
1661 1661 except Exception:
1662 1662 log.exception('Failed to fetch permissions from the database.')
1663 1663 raise
1664 1664
1665 1665
1666 1666 def get_csrf_token(session, force_new=False, save_if_missing=True):
1667 1667 """
1668 1668 Return the current authentication token, creating one if one doesn't
1669 1669 already exist and the save_if_missing flag is present.
1670 1670
1671 1671 :param session: pass in the pyramid session, else we use the global ones
1672 1672 :param force_new: force to re-generate the token and store it in session
1673 1673 :param save_if_missing: save the newly generated token if it's missing in
1674 1674 session
1675 1675 """
1676 1676 # NOTE(marcink): probably should be replaced with below one from pyramid 1.9
1677 1677 # from pyramid.csrf import get_csrf_token
1678 1678
1679 1679 if (csrf_token_key not in session and save_if_missing) or force_new:
1680 1680 token = sha1(ascii_bytes(str(random.getrandbits(128))))
1681 1681 session[csrf_token_key] = token
1682 1682 if hasattr(session, 'save'):
1683 1683 session.save()
1684 1684 return session.get(csrf_token_key)
1685 1685
1686 1686
1687 1687 def get_request(perm_class_instance):
1688 1688 from pyramid.threadlocal import get_current_request
1689 1689 pyramid_request = get_current_request()
1690 1690 return pyramid_request
1691 1691
1692 1692
1693 1693 # CHECK DECORATORS
1694 1694 class CSRFRequired(object):
1695 1695 """
1696 1696 Decorator for authenticating a form
1697 1697
1698 1698 This decorator uses an authorization token stored in the client's
1699 1699 session for prevention of certain Cross-site request forgery (CSRF)
1700 1700 attacks (See
1701 1701 http://en.wikipedia.org/wiki/Cross-site_request_forgery for more
1702 1702 information).
1703 1703
1704 1704 For use with the ``secure_form`` helper functions.
1705 1705
1706 1706 """
1707 1707 def __init__(self, token=csrf_token_key, header='X-CSRF-Token', except_methods=None):
1708 1708 self.token = token
1709 1709 self.header = header
1710 1710 self.except_methods = except_methods or []
1711 1711
1712 1712 def __call__(self, func):
1713 1713 return get_cython_compat_decorator(self.__wrapper, func)
1714 1714
1715 1715 def _get_csrf(self, _request):
1716 1716 return _request.POST.get(self.token, _request.headers.get(self.header))
1717 1717
1718 1718 def check_csrf(self, _request, cur_token):
1719 1719 supplied_token = self._get_csrf(_request)
1720 1720 return supplied_token and supplied_token == cur_token
1721 1721
1722 1722 def _get_request(self):
1723 1723 return get_request(self)
1724 1724
1725 1725 def __wrapper(self, func, *fargs, **fkwargs):
1726 1726 request = self._get_request()
1727 1727
1728 1728 if request.method in self.except_methods:
1729 1729 return func(*fargs, **fkwargs)
1730 1730
1731 1731 cur_token = get_csrf_token(request.session, save_if_missing=False)
1732 1732 if self.check_csrf(request, cur_token):
1733 1733 if request.POST.get(self.token):
1734 1734 del request.POST[self.token]
1735 1735 return func(*fargs, **fkwargs)
1736 1736 else:
1737 1737 reason = 'token-missing'
1738 1738 supplied_token = self._get_csrf(request)
1739 1739 if supplied_token and cur_token != supplied_token:
1740 1740 reason = 'token-mismatch [%s:%s]' % (
1741 1741 cur_token or ''[:6], supplied_token or ''[:6])
1742 1742
1743 1743 csrf_message = \
1744 1744 ("Cross-site request forgery detected, request denied. See "
1745 1745 "http://en.wikipedia.org/wiki/Cross-site_request_forgery for "
1746 1746 "more information.")
1747 1747 log.warn('Cross-site request forgery detected, request %r DENIED: %s '
1748 1748 'REMOTE_ADDR:%s, HEADERS:%s' % (
1749 1749 request, reason, request.remote_addr, request.headers))
1750 1750
1751 1751 raise HTTPForbidden(explanation=csrf_message)
1752 1752
1753 1753
1754 1754 class LoginRequired(object):
1755 1755 """
1756 1756 Must be logged in to execute this function else
1757 1757 redirect to login page
1758 1758
1759 1759 :param api_access: if enabled this checks only for valid auth token
1760 1760 and grants access based on valid token
1761 1761 """
1762 1762 def __init__(self, auth_token_access=None):
1763 1763 self.auth_token_access = auth_token_access
1764 1764 if self.auth_token_access:
1765 1765 valid_type = set(auth_token_access).intersection(set(UserApiKeys.ROLES))
1766 1766 if not valid_type:
1767 1767 raise ValueError('auth_token_access must be on of {}, got {}'.format(
1768 1768 UserApiKeys.ROLES, auth_token_access))
1769 1769
1770 1770 def __call__(self, func):
1771 1771 return get_cython_compat_decorator(self.__wrapper, func)
1772 1772
1773 1773 def _get_request(self):
1774 1774 return get_request(self)
1775 1775
1776 1776 def __wrapper(self, func, *fargs, **fkwargs):
1777 1777 from rhodecode.lib import helpers as h
1778 1778 cls = fargs[0]
1779 1779 user = cls._rhodecode_user
1780 1780 request = self._get_request()
1781 1781 _ = request.translate
1782 1782
1783 1783 loc = "%s:%s" % (cls.__class__.__name__, func.__name__)
1784 1784 log.debug('Starting login restriction checks for user: %s', user)
1785 1785 # check if our IP is allowed
1786 1786 ip_access_valid = True
1787 1787 if not user.ip_allowed:
1788 1788 h.flash(h.literal(_('IP {} not allowed'.format(user.ip_addr))),
1789 1789 category='warning')
1790 1790 ip_access_valid = False
1791 1791
1792 1792 # we used stored token that is extract from GET or URL param (if any)
1793 1793 _auth_token = request.user_auth_token
1794 1794
1795 1795 # check if we used an AUTH_TOKEN and it's a valid one
1796 1796 # defined white-list of controllers which API access will be enabled
1797 1797 whitelist = None
1798 1798 if self.auth_token_access:
1799 1799 # since this location is allowed by @LoginRequired decorator it's our
1800 1800 # only whitelist
1801 1801 whitelist = [loc]
1802 1802 auth_token_access_valid = allowed_auth_token_access(
1803 1803 loc, whitelist=whitelist, auth_token=_auth_token)
1804 1804
1805 1805 # explicit controller is enabled or API is in our whitelist
1806 1806 if auth_token_access_valid:
1807 1807 log.debug('Checking AUTH TOKEN access for %s', cls)
1808 1808 db_user = user.get_instance()
1809 1809
1810 1810 if db_user:
1811 1811 if self.auth_token_access:
1812 1812 roles = self.auth_token_access
1813 1813 else:
1814 1814 roles = [UserApiKeys.ROLE_HTTP]
1815 1815 log.debug('AUTH TOKEN: checking auth for user %s and roles %s',
1816 1816 db_user, roles)
1817 1817 token_match = db_user.authenticate_by_token(
1818 1818 _auth_token, roles=roles)
1819 1819 else:
1820 1820 log.debug('Unable to fetch db instance for auth user: %s', user)
1821 1821 token_match = False
1822 1822
1823 1823 if _auth_token and token_match:
1824 1824 auth_token_access_valid = True
1825 1825 log.debug('AUTH TOKEN ****%s is VALID', _auth_token[-4:])
1826 1826 else:
1827 1827 auth_token_access_valid = False
1828 1828 if not _auth_token:
1829 1829 log.debug("AUTH TOKEN *NOT* present in request")
1830 1830 else:
1831 1831 log.warning("AUTH TOKEN ****%s *NOT* valid", _auth_token[-4:])
1832 1832
1833 1833 log.debug('Checking if %s is authenticated @ %s', user.username, loc)
1834 1834 reason = 'RHODECODE_AUTH' if user.is_authenticated \
1835 1835 else 'AUTH_TOKEN_AUTH'
1836 1836
1837 1837 if ip_access_valid and (
1838 1838 user.is_authenticated or auth_token_access_valid):
1839 1839 log.info('user %s authenticating with:%s IS authenticated on func %s',
1840 1840 user, reason, loc)
1841 1841
1842 1842 return func(*fargs, **fkwargs)
1843 1843 else:
1844 1844 log.warning(
1845 1845 'user %s authenticating with:%s NOT authenticated on '
1846 1846 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s',
1847 1847 user, reason, loc, ip_access_valid, auth_token_access_valid)
1848 1848 # we preserve the get PARAM
1849 1849 came_from = get_came_from(request)
1850 1850
1851 1851 log.debug('redirecting to login page with %s', came_from)
1852 1852 raise HTTPFound(
1853 1853 h.route_path('login', _query={'came_from': came_from}))
1854 1854
1855 1855
1856 1856 class NotAnonymous(object):
1857 1857 """
1858 1858 Must be logged in to execute this function else
1859 1859 redirect to login page
1860 1860 """
1861 1861
1862 1862 def __call__(self, func):
1863 1863 return get_cython_compat_decorator(self.__wrapper, func)
1864 1864
1865 1865 def _get_request(self):
1866 1866 return get_request(self)
1867 1867
1868 1868 def __wrapper(self, func, *fargs, **fkwargs):
1869 1869 import rhodecode.lib.helpers as h
1870 1870 cls = fargs[0]
1871 1871 self.user = cls._rhodecode_user
1872 1872 request = self._get_request()
1873 1873 _ = request.translate
1874 1874 log.debug('Checking if user is not anonymous @%s', cls)
1875 1875
1876 1876 anonymous = self.user.username == User.DEFAULT_USER
1877 1877
1878 1878 if anonymous:
1879 1879 came_from = get_came_from(request)
1880 1880 h.flash(_('You need to be a registered user to '
1881 1881 'perform this action'),
1882 1882 category='warning')
1883 1883 raise HTTPFound(
1884 1884 h.route_path('login', _query={'came_from': came_from}))
1885 1885 else:
1886 1886 return func(*fargs, **fkwargs)
1887 1887
1888 1888
1889 1889 class PermsDecorator(object):
1890 1890 """
1891 1891 Base class for controller decorators, we extract the current user from
1892 1892 the class itself, which has it stored in base controllers
1893 1893 """
1894 1894
1895 1895 def __init__(self, *required_perms):
1896 1896 self.required_perms = set(required_perms)
1897 1897
1898 1898 def __call__(self, func):
1899 1899 return get_cython_compat_decorator(self.__wrapper, func)
1900 1900
1901 1901 def _get_request(self):
1902 1902 return get_request(self)
1903 1903
1904 1904 def __wrapper(self, func, *fargs, **fkwargs):
1905 1905 import rhodecode.lib.helpers as h
1906 1906 cls = fargs[0]
1907 1907 _user = cls._rhodecode_user
1908 1908 request = self._get_request()
1909 1909 _ = request.translate
1910 1910
1911 1911 log.debug('checking %s permissions %s for %s %s',
1912 1912 self.__class__.__name__, self.required_perms, cls, _user)
1913 1913
1914 1914 if self.check_permissions(_user):
1915 1915 log.debug('Permission granted for %s %s', cls, _user)
1916 1916 return func(*fargs, **fkwargs)
1917 1917
1918 1918 else:
1919 1919 log.debug('Permission denied for %s %s', cls, _user)
1920 1920 anonymous = _user.username == User.DEFAULT_USER
1921 1921
1922 1922 if anonymous:
1923 1923 came_from = get_came_from(self._get_request())
1924 1924 h.flash(_('You need to be signed in to view this page'),
1925 1925 category='warning')
1926 1926 raise HTTPFound(
1927 1927 h.route_path('login', _query={'came_from': came_from}))
1928 1928
1929 1929 else:
1930 1930 # redirect with 404 to prevent resource discovery
1931 1931 raise HTTPNotFound()
1932 1932
1933 1933 def check_permissions(self, user):
1934 1934 """Dummy function for overriding"""
1935 1935 raise NotImplementedError(
1936 1936 'You have to write this function in child class')
1937 1937
1938 1938
1939 1939 class HasPermissionAllDecorator(PermsDecorator):
1940 1940 """
1941 1941 Checks for access permission for all given predicates. All of them
1942 1942 have to be meet in order to fulfill the request
1943 1943 """
1944 1944
1945 1945 def check_permissions(self, user):
1946 1946 perms = user.permissions_with_scope({})
1947 1947 if self.required_perms.issubset(perms['global']):
1948 1948 return True
1949 1949 return False
1950 1950
1951 1951
1952 1952 class HasPermissionAnyDecorator(PermsDecorator):
1953 1953 """
1954 1954 Checks for access permission for any of given predicates. In order to
1955 1955 fulfill the request any of predicates must be meet
1956 1956 """
1957 1957
1958 1958 def check_permissions(self, user):
1959 1959 perms = user.permissions_with_scope({})
1960 1960 if self.required_perms.intersection(perms['global']):
1961 1961 return True
1962 1962 return False
1963 1963
1964 1964
1965 1965 class HasRepoPermissionAllDecorator(PermsDecorator):
1966 1966 """
1967 1967 Checks for access permission for all given predicates for specific
1968 1968 repository. All of them have to be meet in order to fulfill the request
1969 1969 """
1970 1970 def _get_repo_name(self):
1971 1971 _request = self._get_request()
1972 1972 return get_repo_slug(_request)
1973 1973
1974 1974 def check_permissions(self, user):
1975 1975 perms = user.permissions
1976 1976 repo_name = self._get_repo_name()
1977 1977
1978 1978 try:
1979 1979 user_perms = {perms['repositories'][repo_name]}
1980 1980 except KeyError:
1981 1981 log.debug('cannot locate repo with name: `%s` in permissions defs',
1982 1982 repo_name)
1983 1983 return False
1984 1984
1985 1985 log.debug('checking `%s` permissions for repo `%s`',
1986 1986 user_perms, repo_name)
1987 1987 if self.required_perms.issubset(user_perms):
1988 1988 return True
1989 1989 return False
1990 1990
1991 1991
1992 1992 class HasRepoPermissionAnyDecorator(PermsDecorator):
1993 1993 """
1994 1994 Checks for access permission for any of given predicates for specific
1995 1995 repository. In order to fulfill the request any of predicates must be meet
1996 1996 """
1997 1997 def _get_repo_name(self):
1998 1998 _request = self._get_request()
1999 1999 return get_repo_slug(_request)
2000 2000
2001 2001 def check_permissions(self, user):
2002 2002 perms = user.permissions
2003 2003 repo_name = self._get_repo_name()
2004 2004
2005 2005 try:
2006 2006 user_perms = {perms['repositories'][repo_name]}
2007 2007 except KeyError:
2008 2008 log.debug(
2009 2009 'cannot locate repo with name: `%s` in permissions defs',
2010 2010 repo_name)
2011 2011 return False
2012 2012
2013 2013 log.debug('checking `%s` permissions for repo `%s`',
2014 2014 user_perms, repo_name)
2015 2015 if self.required_perms.intersection(user_perms):
2016 2016 return True
2017 2017 return False
2018 2018
2019 2019
2020 2020 class HasRepoGroupPermissionAllDecorator(PermsDecorator):
2021 2021 """
2022 2022 Checks for access permission for all given predicates for specific
2023 2023 repository group. All of them have to be meet in order to
2024 2024 fulfill the request
2025 2025 """
2026 2026 def _get_repo_group_name(self):
2027 2027 _request = self._get_request()
2028 2028 return get_repo_group_slug(_request)
2029 2029
2030 2030 def check_permissions(self, user):
2031 2031 perms = user.permissions
2032 2032 group_name = self._get_repo_group_name()
2033 2033 try:
2034 2034 user_perms = {perms['repositories_groups'][group_name]}
2035 2035 except KeyError:
2036 2036 log.debug(
2037 2037 'cannot locate repo group with name: `%s` in permissions defs',
2038 2038 group_name)
2039 2039 return False
2040 2040
2041 2041 log.debug('checking `%s` permissions for repo group `%s`',
2042 2042 user_perms, group_name)
2043 2043 if self.required_perms.issubset(user_perms):
2044 2044 return True
2045 2045 return False
2046 2046
2047 2047
2048 2048 class HasRepoGroupPermissionAnyDecorator(PermsDecorator):
2049 2049 """
2050 2050 Checks for access permission for any of given predicates for specific
2051 2051 repository group. In order to fulfill the request any
2052 2052 of predicates must be met
2053 2053 """
2054 2054 def _get_repo_group_name(self):
2055 2055 _request = self._get_request()
2056 2056 return get_repo_group_slug(_request)
2057 2057
2058 2058 def check_permissions(self, user):
2059 2059 perms = user.permissions
2060 2060 group_name = self._get_repo_group_name()
2061 2061
2062 2062 try:
2063 2063 user_perms = {perms['repositories_groups'][group_name]}
2064 2064 except KeyError:
2065 2065 log.debug(
2066 2066 'cannot locate repo group with name: `%s` in permissions defs',
2067 2067 group_name)
2068 2068 return False
2069 2069
2070 2070 log.debug('checking `%s` permissions for repo group `%s`',
2071 2071 user_perms, group_name)
2072 2072 if self.required_perms.intersection(user_perms):
2073 2073 return True
2074 2074 return False
2075 2075
2076 2076
2077 2077 class HasUserGroupPermissionAllDecorator(PermsDecorator):
2078 2078 """
2079 2079 Checks for access permission for all given predicates for specific
2080 2080 user group. All of them have to be meet in order to fulfill the request
2081 2081 """
2082 2082 def _get_user_group_name(self):
2083 2083 _request = self._get_request()
2084 2084 return get_user_group_slug(_request)
2085 2085
2086 2086 def check_permissions(self, user):
2087 2087 perms = user.permissions
2088 2088 group_name = self._get_user_group_name()
2089 2089 try:
2090 2090 user_perms = {perms['user_groups'][group_name]}
2091 2091 except KeyError:
2092 2092 return False
2093 2093
2094 2094 if self.required_perms.issubset(user_perms):
2095 2095 return True
2096 2096 return False
2097 2097
2098 2098
2099 2099 class HasUserGroupPermissionAnyDecorator(PermsDecorator):
2100 2100 """
2101 2101 Checks for access permission for any of given predicates for specific
2102 2102 user group. In order to fulfill the request any of predicates must be meet
2103 2103 """
2104 2104 def _get_user_group_name(self):
2105 2105 _request = self._get_request()
2106 2106 return get_user_group_slug(_request)
2107 2107
2108 2108 def check_permissions(self, user):
2109 2109 perms = user.permissions
2110 2110 group_name = self._get_user_group_name()
2111 2111 try:
2112 2112 user_perms = {perms['user_groups'][group_name]}
2113 2113 except KeyError:
2114 2114 return False
2115 2115
2116 2116 if self.required_perms.intersection(user_perms):
2117 2117 return True
2118 2118 return False
2119 2119
2120 2120
2121 2121 # CHECK FUNCTIONS
2122 2122 class PermsFunction(object):
2123 2123 """Base function for other check functions"""
2124 2124
2125 2125 def __init__(self, *perms):
2126 2126 self.required_perms = set(perms)
2127 2127 self.repo_name = None
2128 2128 self.repo_group_name = None
2129 2129 self.user_group_name = None
2130 2130
2131 2131 def __bool__(self):
2132 2132 import inspect
2133 2133 frame = inspect.currentframe()
2134 2134 stack_trace = traceback.format_stack(frame)
2135 2135 log.error('Checking bool value on a class instance of perm '
2136 2136 'function is not allowed: %s', ''.join(stack_trace))
2137 2137 # rather than throwing errors, here we always return False so if by
2138 2138 # accident someone checks truth for just an instance it will always end
2139 2139 # up in returning False
2140 2140 return False
2141 2141 __nonzero__ = __bool__
2142 2142
2143 2143 def __call__(self, check_location='', user=None):
2144 2144 if not user:
2145 2145 log.debug('Using user attribute from global request')
2146 2146 request = self._get_request()
2147 2147 user = request.user
2148 2148
2149 2149 # init auth user if not already given
2150 2150 if not isinstance(user, AuthUser):
2151 2151 log.debug('Wrapping user %s into AuthUser', user)
2152 2152 user = AuthUser(user.user_id)
2153 2153
2154 2154 cls_name = self.__class__.__name__
2155 2155 check_scope = self._get_check_scope(cls_name)
2156 2156 check_location = check_location or 'unspecified location'
2157 2157
2158 2158 log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name,
2159 2159 self.required_perms, user, check_scope, check_location)
2160 2160 if not user:
2161 2161 log.warning('Empty user given for permission check')
2162 2162 return False
2163 2163
2164 2164 if self.check_permissions(user):
2165 2165 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
2166 2166 check_scope, user, check_location)
2167 2167 return True
2168 2168
2169 2169 else:
2170 2170 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
2171 2171 check_scope, user, check_location)
2172 2172 return False
2173 2173
2174 2174 def _get_request(self):
2175 2175 return get_request(self)
2176 2176
2177 2177 def _get_check_scope(self, cls_name):
2178 2178 return {
2179 2179 'HasPermissionAll': 'GLOBAL',
2180 2180 'HasPermissionAny': 'GLOBAL',
2181 2181 'HasRepoPermissionAll': 'repo:%s' % self.repo_name,
2182 2182 'HasRepoPermissionAny': 'repo:%s' % self.repo_name,
2183 2183 'HasRepoGroupPermissionAll': 'repo_group:%s' % self.repo_group_name,
2184 2184 'HasRepoGroupPermissionAny': 'repo_group:%s' % self.repo_group_name,
2185 2185 'HasUserGroupPermissionAll': 'user_group:%s' % self.user_group_name,
2186 2186 'HasUserGroupPermissionAny': 'user_group:%s' % self.user_group_name,
2187 2187 }.get(cls_name, '?:%s' % cls_name)
2188 2188
2189 2189 def check_permissions(self, user):
2190 2190 """Dummy function for overriding"""
2191 2191 raise Exception('You have to write this function in child class')
2192 2192
2193 2193
2194 2194 class HasPermissionAll(PermsFunction):
2195 2195 def check_permissions(self, user):
2196 2196 perms = user.permissions_with_scope({})
2197 2197 if self.required_perms.issubset(perms.get('global')):
2198 2198 return True
2199 2199 return False
2200 2200
2201 2201
2202 2202 class HasPermissionAny(PermsFunction):
2203 2203 def check_permissions(self, user):
2204 2204 perms = user.permissions_with_scope({})
2205 2205 if self.required_perms.intersection(perms.get('global')):
2206 2206 return True
2207 2207 return False
2208 2208
2209 2209
2210 2210 class HasRepoPermissionAll(PermsFunction):
2211 2211 def __call__(self, repo_name=None, check_location='', user=None):
2212 2212 self.repo_name = repo_name
2213 2213 return super(HasRepoPermissionAll, self).__call__(check_location, user)
2214 2214
2215 2215 def _get_repo_name(self):
2216 2216 if not self.repo_name:
2217 2217 _request = self._get_request()
2218 2218 self.repo_name = get_repo_slug(_request)
2219 2219 return self.repo_name
2220 2220
2221 2221 def check_permissions(self, user):
2222 2222 self.repo_name = self._get_repo_name()
2223 2223 perms = user.permissions
2224 2224 try:
2225 2225 user_perms = {perms['repositories'][self.repo_name]}
2226 2226 except KeyError:
2227 2227 return False
2228 2228 if self.required_perms.issubset(user_perms):
2229 2229 return True
2230 2230 return False
2231 2231
2232 2232
2233 2233 class HasRepoPermissionAny(PermsFunction):
2234 2234 def __call__(self, repo_name=None, check_location='', user=None):
2235 2235 self.repo_name = repo_name
2236 2236 return super(HasRepoPermissionAny, self).__call__(check_location, user)
2237 2237
2238 2238 def _get_repo_name(self):
2239 2239 if not self.repo_name:
2240 2240 _request = self._get_request()
2241 2241 self.repo_name = get_repo_slug(_request)
2242 2242 return self.repo_name
2243 2243
2244 2244 def check_permissions(self, user):
2245 2245 self.repo_name = self._get_repo_name()
2246 2246 perms = user.permissions
2247 2247 try:
2248 2248 user_perms = {perms['repositories'][self.repo_name]}
2249 2249 except KeyError:
2250 2250 return False
2251 2251 if self.required_perms.intersection(user_perms):
2252 2252 return True
2253 2253 return False
2254 2254
2255 2255
2256 2256 class HasRepoGroupPermissionAny(PermsFunction):
2257 2257 def __call__(self, group_name=None, check_location='', user=None):
2258 2258 self.repo_group_name = group_name
2259 2259 return super(HasRepoGroupPermissionAny, self).__call__(check_location, user)
2260 2260
2261 2261 def check_permissions(self, user):
2262 2262 perms = user.permissions
2263 2263 try:
2264 2264 user_perms = {perms['repositories_groups'][self.repo_group_name]}
2265 2265 except KeyError:
2266 2266 return False
2267 2267 if self.required_perms.intersection(user_perms):
2268 2268 return True
2269 2269 return False
2270 2270
2271 2271
2272 2272 class HasRepoGroupPermissionAll(PermsFunction):
2273 2273 def __call__(self, group_name=None, check_location='', user=None):
2274 2274 self.repo_group_name = group_name
2275 2275 return super(HasRepoGroupPermissionAll, self).__call__(check_location, user)
2276 2276
2277 2277 def check_permissions(self, user):
2278 2278 perms = user.permissions
2279 2279 try:
2280 2280 user_perms = {perms['repositories_groups'][self.repo_group_name]}
2281 2281 except KeyError:
2282 2282 return False
2283 2283 if self.required_perms.issubset(user_perms):
2284 2284 return True
2285 2285 return False
2286 2286
2287 2287
2288 2288 class HasUserGroupPermissionAny(PermsFunction):
2289 2289 def __call__(self, user_group_name=None, check_location='', user=None):
2290 2290 self.user_group_name = user_group_name
2291 2291 return super(HasUserGroupPermissionAny, self).__call__(check_location, user)
2292 2292
2293 2293 def check_permissions(self, user):
2294 2294 perms = user.permissions
2295 2295 try:
2296 2296 user_perms = {perms['user_groups'][self.user_group_name]}
2297 2297 except KeyError:
2298 2298 return False
2299 2299 if self.required_perms.intersection(user_perms):
2300 2300 return True
2301 2301 return False
2302 2302
2303 2303
2304 2304 class HasUserGroupPermissionAll(PermsFunction):
2305 2305 def __call__(self, user_group_name=None, check_location='', user=None):
2306 2306 self.user_group_name = user_group_name
2307 2307 return super(HasUserGroupPermissionAll, self).__call__(check_location, user)
2308 2308
2309 2309 def check_permissions(self, user):
2310 2310 perms = user.permissions
2311 2311 try:
2312 2312 user_perms = {perms['user_groups'][self.user_group_name]}
2313 2313 except KeyError:
2314 2314 return False
2315 2315 if self.required_perms.issubset(user_perms):
2316 2316 return True
2317 2317 return False
2318 2318
2319 2319
2320 2320 # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH
2321 2321 class HasPermissionAnyMiddleware(object):
2322 2322 def __init__(self, *perms):
2323 2323 self.required_perms = set(perms)
2324 2324
2325 2325 def __call__(self, auth_user, repo_name):
2326 # repo_name MUST be unicode, since we handle keys in permission
2327 # dict by unicode
2328 repo_name = safe_unicode(repo_name)
2326 # # repo_name MUST be unicode, since we handle keys in permission
2327 # # dict by unicode
2328 #TODO: verify
2329 # repo_name = safe_unicode(repo_name)
2330
2329 2331 log.debug(
2330 2332 'Checking VCS protocol permissions %s for user:%s repo:`%s`',
2331 2333 self.required_perms, auth_user, repo_name)
2332 2334
2333 2335 if self.check_permissions(auth_user, repo_name):
2334 2336 log.debug('Permission to repo:`%s` GRANTED for user:%s @ %s',
2335 2337 repo_name, auth_user, 'PermissionMiddleware')
2336 2338 return True
2337 2339
2338 2340 else:
2339 2341 log.debug('Permission to repo:`%s` DENIED for user:%s @ %s',
2340 2342 repo_name, auth_user, 'PermissionMiddleware')
2341 2343 return False
2342 2344
2343 2345 def check_permissions(self, user, repo_name):
2344 2346 perms = user.permissions_with_scope({'repo_name': repo_name})
2345 2347
2346 2348 try:
2347 2349 user_perms = {perms['repositories'][repo_name]}
2348 2350 except Exception:
2349 2351 log.exception('Error while accessing user permissions')
2350 2352 return False
2351 2353
2352 2354 if self.required_perms.intersection(user_perms):
2353 2355 return True
2354 2356 return False
2355 2357
2356 2358
2357 2359 # SPECIAL VERSION TO HANDLE API AUTH
2358 2360 class _BaseApiPerm(object):
2359 2361 def __init__(self, *perms):
2360 2362 self.required_perms = set(perms)
2361 2363
2362 2364 def __call__(self, check_location=None, user=None, repo_name=None,
2363 2365 group_name=None, user_group_name=None):
2364 2366 cls_name = self.__class__.__name__
2365 2367 check_scope = 'global:%s' % (self.required_perms,)
2366 2368 if repo_name:
2367 2369 check_scope += ', repo_name:%s' % (repo_name,)
2368 2370
2369 2371 if group_name:
2370 2372 check_scope += ', repo_group_name:%s' % (group_name,)
2371 2373
2372 2374 if user_group_name:
2373 2375 check_scope += ', user_group_name:%s' % (user_group_name,)
2374 2376
2375 2377 log.debug('checking cls:%s %s %s @ %s',
2376 2378 cls_name, self.required_perms, check_scope, check_location)
2377 2379 if not user:
2378 2380 log.debug('Empty User passed into arguments')
2379 2381 return False
2380 2382
2381 2383 # process user
2382 2384 if not isinstance(user, AuthUser):
2383 2385 user = AuthUser(user.user_id)
2384 2386 if not check_location:
2385 2387 check_location = 'unspecified'
2386 2388 if self.check_permissions(user.permissions, repo_name, group_name,
2387 2389 user_group_name):
2388 2390 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
2389 2391 check_scope, user, check_location)
2390 2392 return True
2391 2393
2392 2394 else:
2393 2395 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
2394 2396 check_scope, user, check_location)
2395 2397 return False
2396 2398
2397 2399 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2398 2400 user_group_name=None):
2399 2401 """
2400 2402 implement in child class should return True if permissions are ok,
2401 2403 False otherwise
2402 2404
2403 2405 :param perm_defs: dict with permission definitions
2404 2406 :param repo_name: repo name
2405 2407 """
2406 2408 raise NotImplementedError()
2407 2409
2408 2410
2409 2411 class HasPermissionAllApi(_BaseApiPerm):
2410 2412 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2411 2413 user_group_name=None):
2412 2414 if self.required_perms.issubset(perm_defs.get('global')):
2413 2415 return True
2414 2416 return False
2415 2417
2416 2418
2417 2419 class HasPermissionAnyApi(_BaseApiPerm):
2418 2420 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2419 2421 user_group_name=None):
2420 2422 if self.required_perms.intersection(perm_defs.get('global')):
2421 2423 return True
2422 2424 return False
2423 2425
2424 2426
2425 2427 class HasRepoPermissionAllApi(_BaseApiPerm):
2426 2428 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2427 2429 user_group_name=None):
2428 2430 try:
2429 2431 _user_perms = {perm_defs['repositories'][repo_name]}
2430 2432 except KeyError:
2431 2433 log.warning(traceback.format_exc())
2432 2434 return False
2433 2435 if self.required_perms.issubset(_user_perms):
2434 2436 return True
2435 2437 return False
2436 2438
2437 2439
2438 2440 class HasRepoPermissionAnyApi(_BaseApiPerm):
2439 2441 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2440 2442 user_group_name=None):
2441 2443 try:
2442 2444 _user_perms = {perm_defs['repositories'][repo_name]}
2443 2445 except KeyError:
2444 2446 log.warning(traceback.format_exc())
2445 2447 return False
2446 2448 if self.required_perms.intersection(_user_perms):
2447 2449 return True
2448 2450 return False
2449 2451
2450 2452
2451 2453 class HasRepoGroupPermissionAnyApi(_BaseApiPerm):
2452 2454 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2453 2455 user_group_name=None):
2454 2456 try:
2455 2457 _user_perms = {perm_defs['repositories_groups'][group_name]}
2456 2458 except KeyError:
2457 2459 log.warning(traceback.format_exc())
2458 2460 return False
2459 2461 if self.required_perms.intersection(_user_perms):
2460 2462 return True
2461 2463 return False
2462 2464
2463 2465
2464 2466 class HasRepoGroupPermissionAllApi(_BaseApiPerm):
2465 2467 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2466 2468 user_group_name=None):
2467 2469 try:
2468 2470 _user_perms = {perm_defs['repositories_groups'][group_name]}
2469 2471 except KeyError:
2470 2472 log.warning(traceback.format_exc())
2471 2473 return False
2472 2474 if self.required_perms.issubset(_user_perms):
2473 2475 return True
2474 2476 return False
2475 2477
2476 2478
2477 2479 class HasUserGroupPermissionAnyApi(_BaseApiPerm):
2478 2480 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2479 2481 user_group_name=None):
2480 2482 try:
2481 2483 _user_perms = {perm_defs['user_groups'][user_group_name]}
2482 2484 except KeyError:
2483 2485 log.warning(traceback.format_exc())
2484 2486 return False
2485 2487 if self.required_perms.intersection(_user_perms):
2486 2488 return True
2487 2489 return False
2488 2490
2489 2491
2490 2492 def check_ip_access(source_ip, allowed_ips=None):
2491 2493 """
2492 2494 Checks if source_ip is a subnet of any of allowed_ips.
2493 2495
2494 2496 :param source_ip:
2495 2497 :param allowed_ips: list of allowed ips together with mask
2496 2498 """
2497 2499 log.debug('checking if ip:%s is subnet of %s', source_ip, allowed_ips)
2498 source_ip_address = ipaddress.ip_address(safe_unicode(source_ip))
2500 source_ip_address = ipaddress.ip_address(source_ip)
2499 2501 if isinstance(allowed_ips, (tuple, list, set)):
2500 2502 for ip in allowed_ips:
2501 ip = safe_unicode(ip)
2503 #TODO: verify
2504 #ip = safe_unicode(ip)
2502 2505 try:
2503 2506 network_address = ipaddress.ip_network(ip, strict=False)
2504 2507 if source_ip_address in network_address:
2505 2508 log.debug('IP %s is network %s', source_ip_address, network_address)
2506 2509 return True
2507 2510 # for any case we cannot determine the IP, don't crash just
2508 2511 # skip it and log as error, we want to say forbidden still when
2509 2512 # sending bad IP
2510 2513 except Exception:
2511 2514 log.error(traceback.format_exc())
2512 2515 continue
2513 2516 return False
2514 2517
2515 2518
2516 2519 def get_cython_compat_decorator(wrapper, func):
2517 2520 """
2518 2521 Creates a cython compatible decorator. The previously used
2519 2522 decorator.decorator() function seems to be incompatible with cython.
2520 2523
2521 2524 :param wrapper: __wrapper method of the decorator class
2522 2525 :param func: decorated function
2523 2526 """
2524 2527 @wraps(func)
2525 2528 def local_wrapper(*args, **kwds):
2526 2529 return wrapper(func, *args, **kwds)
2527 2530 local_wrapper.__wrapped__ = func
2528 2531 return local_wrapper
2529 2532
2530 2533
@@ -1,611 +1,611 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 The base Controller API
23 23 Provides the BaseController class for subclassing. And usage in different
24 24 controllers
25 25 """
26 26
27 27 import logging
28 28 import socket
29 29
30 30 import markupsafe
31 31 import ipaddress
32 32
33 33 from paste.auth.basic import AuthBasicAuthenticator
34 34 from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception
35 35 from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION
36 36
37 37 import rhodecode
38 38 from rhodecode.authentication.base import VCS_TYPE
39 39 from rhodecode.lib import auth, utils2
40 40 from rhodecode.lib import helpers as h
41 41 from rhodecode.lib.auth import AuthUser, CookieStoreWrapper
42 42 from rhodecode.lib.exceptions import UserCreationError
43 43 from rhodecode.lib.utils import (password_changed, get_enabled_hook_classes)
44 44 from rhodecode.lib.utils2 import (
45 45 str2bool, safe_unicode, AttributeDict, safe_int, sha1, aslist, safe_str)
46 46 from rhodecode.model.db import Repository, User, ChangesetComment, UserBookmark
47 47 from rhodecode.model.notification import NotificationModel
48 48 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 def _filter_proxy(ip):
54 54 """
55 55 Passed in IP addresses in HEADERS can be in a special format of multiple
56 56 ips. Those comma separated IPs are passed from various proxies in the
57 57 chain of request processing. The left-most being the original client.
58 58 We only care about the first IP which came from the org. client.
59 59
60 60 :param ip: ip string from headers
61 61 """
62 62 if ',' in ip:
63 63 _ips = ip.split(',')
64 64 _first_ip = _ips[0].strip()
65 65 log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
66 66 return _first_ip
67 67 return ip
68 68
69 69
70 70 def _filter_port(ip):
71 71 """
72 72 Removes a port from ip, there are 4 main cases to handle here.
73 73 - ipv4 eg. 127.0.0.1
74 74 - ipv6 eg. ::1
75 75 - ipv4+port eg. 127.0.0.1:8080
76 76 - ipv6+port eg. [::1]:8080
77 77
78 78 :param ip:
79 79 """
80 80 def is_ipv6(ip_addr):
81 81 if hasattr(socket, 'inet_pton'):
82 82 try:
83 83 socket.inet_pton(socket.AF_INET6, ip_addr)
84 84 except socket.error:
85 85 return False
86 86 else:
87 87 # fallback to ipaddress
88 88 try:
89 ipaddress.IPv6Address(safe_unicode(ip_addr))
89 ipaddress.IPv6Address(safe_str(ip_addr))
90 90 except Exception:
91 91 return False
92 92 return True
93 93
94 94 if ':' not in ip: # must be ipv4 pure ip
95 95 return ip
96 96
97 97 if '[' in ip and ']' in ip: # ipv6 with port
98 98 return ip.split(']')[0][1:].lower()
99 99
100 100 # must be ipv6 or ipv4 with port
101 101 if is_ipv6(ip):
102 102 return ip
103 103 else:
104 104 ip, _port = ip.split(':')[:2] # means ipv4+port
105 105 return ip
106 106
107 107
108 108 def get_ip_addr(environ):
109 109 proxy_key = 'HTTP_X_REAL_IP'
110 110 proxy_key2 = 'HTTP_X_FORWARDED_FOR'
111 111 def_key = 'REMOTE_ADDR'
112 112 _filters = lambda x: _filter_port(_filter_proxy(x))
113 113
114 114 ip = environ.get(proxy_key)
115 115 if ip:
116 116 return _filters(ip)
117 117
118 118 ip = environ.get(proxy_key2)
119 119 if ip:
120 120 return _filters(ip)
121 121
122 122 ip = environ.get(def_key, '0.0.0.0')
123 123 return _filters(ip)
124 124
125 125
126 126 def get_server_ip_addr(environ, log_errors=True):
127 127 hostname = environ.get('SERVER_NAME')
128 128 try:
129 129 return socket.gethostbyname(hostname)
130 130 except Exception as e:
131 131 if log_errors:
132 132 # in some cases this lookup is not possible, and we don't want to
133 133 # make it an exception in logs
134 134 log.exception('Could not retrieve server ip address: %s', e)
135 135 return hostname
136 136
137 137
138 138 def get_server_port(environ):
139 139 return environ.get('SERVER_PORT')
140 140
141 141
142 142 def get_access_path(environ):
143 143 path = environ.get('PATH_INFO')
144 144 org_req = environ.get('pylons.original_request')
145 145 if org_req:
146 146 path = org_req.environ.get('PATH_INFO')
147 147 return path
148 148
149 149
150 150 def get_user_agent(environ):
151 151 return environ.get('HTTP_USER_AGENT')
152 152
153 153
154 154 def vcs_operation_context(
155 155 environ, repo_name, username, action, scm, check_locking=True,
156 156 is_shadow_repo=False, check_branch_perms=False, detect_force_push=False):
157 157 """
158 158 Generate the context for a vcs operation, e.g. push or pull.
159 159
160 160 This context is passed over the layers so that hooks triggered by the
161 161 vcs operation know details like the user, the user's IP address etc.
162 162
163 163 :param check_locking: Allows to switch of the computation of the locking
164 164 data. This serves mainly the need of the simplevcs middleware to be
165 165 able to disable this for certain operations.
166 166
167 167 """
168 168 # Tri-state value: False: unlock, None: nothing, True: lock
169 169 make_lock = None
170 170 locked_by = [None, None, None]
171 171 is_anonymous = username == User.DEFAULT_USER
172 172 user = User.get_by_username(username)
173 173 if not is_anonymous and check_locking:
174 174 log.debug('Checking locking on repository "%s"', repo_name)
175 175 repo = Repository.get_by_repo_name(repo_name)
176 176 make_lock, __, locked_by = repo.get_locking_state(
177 177 action, user.user_id)
178 178 user_id = user.user_id
179 179 settings_model = VcsSettingsModel(repo=repo_name)
180 180 ui_settings = settings_model.get_ui_settings()
181 181
182 182 # NOTE(marcink): This should be also in sync with
183 183 # rhodecode/apps/ssh_support/lib/backends/base.py:update_environment scm_data
184 184 store = [x for x in ui_settings if x.key == '/']
185 185 repo_store = ''
186 186 if store:
187 187 repo_store = store[0].value
188 188
189 189 scm_data = {
190 190 'ip': get_ip_addr(environ),
191 191 'username': username,
192 192 'user_id': user_id,
193 193 'action': action,
194 194 'repository': repo_name,
195 195 'scm': scm,
196 196 'config': rhodecode.CONFIG['__file__'],
197 197 'repo_store': repo_store,
198 198 'make_lock': make_lock,
199 199 'locked_by': locked_by,
200 200 'server_url': utils2.get_server_url(environ),
201 201 'user_agent': get_user_agent(environ),
202 202 'hooks': get_enabled_hook_classes(ui_settings),
203 203 'is_shadow_repo': is_shadow_repo,
204 204 'detect_force_push': detect_force_push,
205 205 'check_branch_perms': check_branch_perms,
206 206 }
207 207 return scm_data
208 208
209 209
210 210 class BasicAuth(AuthBasicAuthenticator):
211 211
212 212 def __init__(self, realm, authfunc, registry, auth_http_code=None,
213 213 initial_call_detection=False, acl_repo_name=None, rc_realm=''):
214 214 self.realm = realm
215 215 self.rc_realm = rc_realm
216 216 self.initial_call = initial_call_detection
217 217 self.authfunc = authfunc
218 218 self.registry = registry
219 219 self.acl_repo_name = acl_repo_name
220 220 self._rc_auth_http_code = auth_http_code
221 221
222 222 def _get_response_from_code(self, http_code):
223 223 try:
224 224 return get_exception(safe_int(http_code))
225 225 except Exception:
226 226 log.exception('Failed to fetch response for code %s', http_code)
227 227 return HTTPForbidden
228 228
229 229 def get_rc_realm(self):
230 230 return safe_str(self.rc_realm)
231 231
232 232 def build_authentication(self):
233 233 head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm)
234 234 if self._rc_auth_http_code and not self.initial_call:
235 235 # return alternative HTTP code if alternative http return code
236 236 # is specified in RhodeCode config, but ONLY if it's not the
237 237 # FIRST call
238 238 custom_response_klass = self._get_response_from_code(
239 239 self._rc_auth_http_code)
240 240 return custom_response_klass(headers=head)
241 241 return HTTPUnauthorized(headers=head)
242 242
243 243 def authenticate(self, environ):
244 244 authorization = AUTHORIZATION(environ)
245 245 if not authorization:
246 246 return self.build_authentication()
247 247 (authmeth, auth) = authorization.split(' ', 1)
248 248 if 'basic' != authmeth.lower():
249 249 return self.build_authentication()
250 250 auth = auth.strip().decode('base64')
251 251 _parts = auth.split(':', 1)
252 252 if len(_parts) == 2:
253 253 username, password = _parts
254 254 auth_data = self.authfunc(
255 255 username, password, environ, VCS_TYPE,
256 256 registry=self.registry, acl_repo_name=self.acl_repo_name)
257 257 if auth_data:
258 258 return {'username': username, 'auth_data': auth_data}
259 259 if username and password:
260 260 # we mark that we actually executed authentication once, at
261 261 # that point we can use the alternative auth code
262 262 self.initial_call = False
263 263
264 264 return self.build_authentication()
265 265
266 266 __call__ = authenticate
267 267
268 268
269 269 def calculate_version_hash(config):
270 270 return sha1(
271 271 config.get('beaker.session.secret', '') +
272 272 rhodecode.__version__)[:8]
273 273
274 274
275 275 def get_current_lang(request):
276 276 # NOTE(marcink): remove after pyramid move
277 277 try:
278 278 return translation.get_lang()[0]
279 279 except:
280 280 pass
281 281
282 282 return getattr(request, '_LOCALE_', request.locale_name)
283 283
284 284
285 285 def attach_context_attributes(context, request, user_id=None, is_api=None):
286 286 """
287 287 Attach variables into template context called `c`.
288 288 """
289 289 config = request.registry.settings
290 290
291 291 rc_config = SettingsModel().get_all_settings(cache=True, from_request=False)
292 292 context.rc_config = rc_config
293 293 context.rhodecode_version = rhodecode.__version__
294 294 context.rhodecode_edition = config.get('rhodecode.edition')
295 295 context.rhodecode_edition_id = config.get('rhodecode.edition_id')
296 296 # unique secret + version does not leak the version but keep consistency
297 297 context.rhodecode_version_hash = calculate_version_hash(config)
298 298
299 299 # Default language set for the incoming request
300 300 context.language = get_current_lang(request)
301 301
302 302 # Visual options
303 303 context.visual = AttributeDict({})
304 304
305 305 # DB stored Visual Items
306 306 context.visual.show_public_icon = str2bool(
307 307 rc_config.get('rhodecode_show_public_icon'))
308 308 context.visual.show_private_icon = str2bool(
309 309 rc_config.get('rhodecode_show_private_icon'))
310 310 context.visual.stylify_metatags = str2bool(
311 311 rc_config.get('rhodecode_stylify_metatags'))
312 312 context.visual.dashboard_items = safe_int(
313 313 rc_config.get('rhodecode_dashboard_items', 100))
314 314 context.visual.admin_grid_items = safe_int(
315 315 rc_config.get('rhodecode_admin_grid_items', 100))
316 316 context.visual.show_revision_number = str2bool(
317 317 rc_config.get('rhodecode_show_revision_number', True))
318 318 context.visual.show_sha_length = safe_int(
319 319 rc_config.get('rhodecode_show_sha_length', 100))
320 320 context.visual.repository_fields = str2bool(
321 321 rc_config.get('rhodecode_repository_fields'))
322 322 context.visual.show_version = str2bool(
323 323 rc_config.get('rhodecode_show_version'))
324 324 context.visual.use_gravatar = str2bool(
325 325 rc_config.get('rhodecode_use_gravatar'))
326 326 context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url')
327 327 context.visual.default_renderer = rc_config.get(
328 328 'rhodecode_markup_renderer', 'rst')
329 329 context.visual.comment_types = ChangesetComment.COMMENT_TYPES
330 330 context.visual.rhodecode_support_url = \
331 331 rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support')
332 332
333 333 context.visual.affected_files_cut_off = 60
334 334
335 335 context.pre_code = rc_config.get('rhodecode_pre_code')
336 336 context.post_code = rc_config.get('rhodecode_post_code')
337 337 context.rhodecode_name = rc_config.get('rhodecode_title')
338 338 context.default_encodings = aslist(config.get('default_encoding'), sep=',')
339 339 # if we have specified default_encoding in the request, it has more
340 340 # priority
341 341 if request.GET.get('default_encoding'):
342 342 context.default_encodings.insert(0, request.GET.get('default_encoding'))
343 343 context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl')
344 344 context.clone_uri_id_tmpl = rc_config.get('rhodecode_clone_uri_id_tmpl')
345 345 context.clone_uri_ssh_tmpl = rc_config.get('rhodecode_clone_uri_ssh_tmpl')
346 346
347 347 # INI stored
348 348 context.labs_active = str2bool(
349 349 config.get('labs_settings_active', 'false'))
350 350 context.ssh_enabled = str2bool(
351 351 config.get('ssh.generate_authorized_keyfile', 'false'))
352 352 context.ssh_key_generator_enabled = str2bool(
353 353 config.get('ssh.enable_ui_key_generator', 'true'))
354 354
355 355 context.visual.allow_repo_location_change = str2bool(
356 356 config.get('allow_repo_location_change', True))
357 357 context.visual.allow_custom_hooks_settings = str2bool(
358 358 config.get('allow_custom_hooks_settings', True))
359 359 context.debug_style = str2bool(config.get('debug_style', False))
360 360
361 361 context.rhodecode_instanceid = config.get('instance_id')
362 362
363 363 context.visual.cut_off_limit_diff = safe_int(
364 364 config.get('cut_off_limit_diff'))
365 365 context.visual.cut_off_limit_file = safe_int(
366 366 config.get('cut_off_limit_file'))
367 367
368 368 context.license = AttributeDict({})
369 369 context.license.hide_license_info = str2bool(
370 370 config.get('license.hide_license_info', False))
371 371
372 372 # AppEnlight
373 373 context.appenlight_enabled = config.get('appenlight', False)
374 374 context.appenlight_api_public_key = config.get(
375 375 'appenlight.api_public_key', '')
376 376 context.appenlight_server_url = config.get('appenlight.server_url', '')
377 377
378 378 diffmode = {
379 379 "unified": "unified",
380 380 "sideside": "sideside"
381 381 }.get(request.GET.get('diffmode'))
382 382
383 383 if is_api is not None:
384 384 is_api = hasattr(request, 'rpc_user')
385 385 session_attrs = {
386 386 # defaults
387 387 "clone_url_format": "http",
388 388 "diffmode": "sideside",
389 389 "license_fingerprint": request.session.get('license_fingerprint')
390 390 }
391 391
392 392 if not is_api:
393 393 # don't access pyramid session for API calls
394 394 if diffmode and diffmode != request.session.get('rc_user_session_attr.diffmode'):
395 395 request.session['rc_user_session_attr.diffmode'] = diffmode
396 396
397 397 # session settings per user
398 398
399 399 for k, v in request.session.items():
400 400 pref = 'rc_user_session_attr.'
401 401 if k and k.startswith(pref):
402 402 k = k[len(pref):]
403 403 session_attrs[k] = v
404 404
405 405 context.user_session_attrs = session_attrs
406 406
407 407 # JS template context
408 408 context.template_context = {
409 409 'repo_name': None,
410 410 'repo_type': None,
411 411 'repo_landing_commit': None,
412 412 'rhodecode_user': {
413 413 'username': None,
414 414 'email': None,
415 415 'notification_status': False
416 416 },
417 417 'session_attrs': session_attrs,
418 418 'visual': {
419 419 'default_renderer': None
420 420 },
421 421 'commit_data': {
422 422 'commit_id': None
423 423 },
424 424 'pull_request_data': {'pull_request_id': None},
425 425 'timeago': {
426 426 'refresh_time': 120 * 1000,
427 427 'cutoff_limit': 1000 * 60 * 60 * 24 * 7
428 428 },
429 429 'pyramid_dispatch': {
430 430
431 431 },
432 432 'extra': {'plugins': {}}
433 433 }
434 434 # END CONFIG VARS
435 435 if is_api:
436 436 csrf_token = None
437 437 else:
438 438 csrf_token = auth.get_csrf_token(session=request.session)
439 439
440 440 context.csrf_token = csrf_token
441 441 context.backends = rhodecode.BACKENDS.keys()
442 442
443 443 unread_count = 0
444 444 user_bookmark_list = []
445 445 if user_id:
446 446 unread_count = NotificationModel().get_unread_cnt_for_user(user_id)
447 447 user_bookmark_list = UserBookmark.get_bookmarks_for_user(user_id)
448 448 context.unread_notifications = unread_count
449 449 context.bookmark_items = user_bookmark_list
450 450
451 451 # web case
452 452 if hasattr(request, 'user'):
453 453 context.auth_user = request.user
454 454 context.rhodecode_user = request.user
455 455
456 456 # api case
457 457 if hasattr(request, 'rpc_user'):
458 458 context.auth_user = request.rpc_user
459 459 context.rhodecode_user = request.rpc_user
460 460
461 461 # attach the whole call context to the request
462 462 request.set_call_context(context)
463 463
464 464
465 465 def get_auth_user(request):
466 466 environ = request.environ
467 467 session = request.session
468 468
469 469 ip_addr = get_ip_addr(environ)
470 470
471 471 # make sure that we update permissions each time we call controller
472 472 _auth_token = (
473 473 # ?auth_token=XXX
474 474 request.GET.get('auth_token', '')
475 475 # ?api_key=XXX !LEGACY
476 476 or request.GET.get('api_key', '')
477 477 # or headers....
478 478 or request.headers.get('X-Rc-Auth-Token', '')
479 479 )
480 480 if not _auth_token and request.matchdict:
481 481 url_auth_token = request.matchdict.get('_auth_token')
482 482 _auth_token = url_auth_token
483 483 if _auth_token:
484 484 log.debug('Using URL extracted auth token `...%s`', _auth_token[-4:])
485 485
486 486 if _auth_token:
487 487 # when using API_KEY we assume user exists, and
488 488 # doesn't need auth based on cookies.
489 489 auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr)
490 490 authenticated = False
491 491 else:
492 492 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
493 493 try:
494 494 auth_user = AuthUser(user_id=cookie_store.get('user_id', None),
495 495 ip_addr=ip_addr)
496 496 except UserCreationError as e:
497 497 h.flash(e, 'error')
498 498 # container auth or other auth functions that create users
499 499 # on the fly can throw this exception signaling that there's
500 500 # issue with user creation, explanation should be provided
501 501 # in Exception itself. We then create a simple blank
502 502 # AuthUser
503 503 auth_user = AuthUser(ip_addr=ip_addr)
504 504
505 505 # in case someone changes a password for user it triggers session
506 506 # flush and forces a re-login
507 507 if password_changed(auth_user, session):
508 508 session.invalidate()
509 509 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
510 510 auth_user = AuthUser(ip_addr=ip_addr)
511 511
512 512 authenticated = cookie_store.get('is_authenticated')
513 513
514 514 if not auth_user.is_authenticated and auth_user.is_user_object:
515 515 # user is not authenticated and not empty
516 516 auth_user.set_authenticated(authenticated)
517 517
518 518 return auth_user, _auth_token
519 519
520 520
521 521 def h_filter(s):
522 522 """
523 523 Custom filter for Mako templates. Mako by standard uses `markupsafe.escape`
524 524 we wrap this with additional functionality that converts None to empty
525 525 strings
526 526 """
527 527 if s is None:
528 528 return markupsafe.Markup()
529 529 return markupsafe.escape(s)
530 530
531 531
532 532 def add_events_routes(config):
533 533 """
534 534 Adds routing that can be used in events. Because some events are triggered
535 535 outside of pyramid context, we need to bootstrap request with some
536 536 routing registered
537 537 """
538 538
539 539 from rhodecode.apps._base import ADMIN_PREFIX
540 540
541 541 config.add_route(name='home', pattern='/')
542 542 config.add_route(name='main_page_repos_data', pattern='/_home_repos')
543 543 config.add_route(name='main_page_repo_groups_data', pattern='/_home_repo_groups')
544 544
545 545 config.add_route(name='login', pattern=ADMIN_PREFIX + '/login')
546 546 config.add_route(name='logout', pattern=ADMIN_PREFIX + '/logout')
547 547 config.add_route(name='repo_summary', pattern='/{repo_name}')
548 548 config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary')
549 549 config.add_route(name='repo_group_home', pattern='/{repo_group_name}')
550 550
551 551 config.add_route(name='pullrequest_show',
552 552 pattern='/{repo_name}/pull-request/{pull_request_id}')
553 553 config.add_route(name='pull_requests_global',
554 554 pattern='/pull-request/{pull_request_id}')
555 555
556 556 config.add_route(name='repo_commit',
557 557 pattern='/{repo_name}/changeset/{commit_id}')
558 558 config.add_route(name='repo_files',
559 559 pattern='/{repo_name}/files/{commit_id}/{f_path}')
560 560
561 561 config.add_route(name='hovercard_user',
562 562 pattern='/_hovercard/user/{user_id}')
563 563
564 564 config.add_route(name='hovercard_user_group',
565 565 pattern='/_hovercard/user_group/{user_group_id}')
566 566
567 567 config.add_route(name='hovercard_pull_request',
568 568 pattern='/_hovercard/pull_request/{pull_request_id}')
569 569
570 570 config.add_route(name='hovercard_repo_commit',
571 571 pattern='/_hovercard/commit/{repo_name}/{commit_id}')
572 572
573 573
574 574 def bootstrap_config(request, registry_name='RcTestRegistry'):
575 575 import pyramid.testing
576 576 registry = pyramid.testing.Registry(registry_name)
577 577
578 578 config = pyramid.testing.setUp(registry=registry, request=request)
579 579
580 580 # allow pyramid lookup in testing
581 581 config.include('pyramid_mako')
582 582 config.include('rhodecode.lib.rc_beaker')
583 583 config.include('rhodecode.lib.rc_cache')
584 584
585 585 add_events_routes(config)
586 586
587 587 return config
588 588
589 589
590 590 def bootstrap_request(**kwargs):
591 591 """
592 592 Returns a thin version of Request Object that is used in non-web context like testing/celery
593 593 """
594 594
595 595 import pyramid.testing
596 596 from rhodecode.lib.request import ThinRequest as _ThinRequest
597 597
598 598 class ThinRequest(_ThinRequest):
599 599 application_url = kwargs.pop('application_url', 'http://example.com')
600 600 host = kwargs.pop('host', 'example.com:80')
601 601 domain = kwargs.pop('domain', 'example.com')
602 602
603 603 class ThinSession(pyramid.testing.DummySession):
604 604 def save(*arg, **kw):
605 605 pass
606 606
607 607 request = ThinRequest(**kwargs)
608 608 request.session = ThinSession()
609 609
610 610 return request
611 611
@@ -1,2155 +1,2155 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Helper functions
23 23
24 24 Consists of functions to typically be used within templates, but also
25 25 available to Controllers. This module is available to both as 'h'.
26 26 """
27 27 import base64
28 28 import collections
29 29
30 30 import os
31 31 import random
32 32 import hashlib
33 33 from io import StringIO
34 34 import textwrap
35 35 import urllib.request, urllib.parse, urllib.error
36 36 import math
37 37 import logging
38 38 import re
39 39 import time
40 40 import string
41 41 import hashlib
42 42 import regex
43 43 from collections import OrderedDict
44 44
45 45 import pygments
46 46 import itertools
47 47 import fnmatch
48 48 import bleach
49 49
50 50 from datetime import datetime
51 51 from functools import partial
52 52 from pygments.formatters.html import HtmlFormatter
53 53 from pygments.lexers import (
54 54 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
55 55
56 56 from pyramid.threadlocal import get_current_request
57 57 from tempita import looper
58 58 from webhelpers2.html import literal, HTML, escape
59 59 from webhelpers2.html._autolink import _auto_link_urls
60 60 from webhelpers2.html.tools import (
61 61 button_to, highlight, js_obfuscate, strip_links, strip_tags)
62 62
63 63 from webhelpers2.text import (
64 64 chop_at, collapse, convert_accented_entities,
65 65 convert_misc_entities, lchop, plural, rchop, remove_formatting,
66 66 replace_whitespace, urlify, truncate, wrap_paragraphs)
67 67 from webhelpers2.date import time_ago_in_words
68 68
69 69 from webhelpers2.html.tags import (
70 70 _input, NotGiven, _make_safe_id_component as safeid,
71 71 form as insecure_form,
72 72 auto_discovery_link, checkbox, end_form, file,
73 73 hidden, image, javascript_link, link_to, link_to_if, link_to_unless, ol,
74 74 select as raw_select, stylesheet_link, submit, text, password, textarea,
75 75 ul, radio, Options)
76 76
77 77 from webhelpers2.number import format_byte_size
78 78
79 79 from rhodecode.lib.action_parser import action_parser
80 80 from rhodecode.lib.pagination import Page, RepoPage, SqlPage
81 81 from rhodecode.lib.ext_json import json
82 82 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
83 83 from rhodecode.lib.utils2 import (
84 84 str2bool, safe_unicode, safe_str,
85 85 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime,
86 86 AttributeDict, safe_int, md5, md5_safe, get_host_info)
87 87 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
88 88 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
89 89 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
90 90 from rhodecode.lib.vcs.conf.settings import ARCHIVE_SPECS
91 91 from rhodecode.lib.index.search_utils import get_matching_line_offsets
92 92 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
93 93 from rhodecode.model.changeset_status import ChangesetStatusModel
94 94 from rhodecode.model.db import Permission, User, Repository, UserApiKeys, FileStore
95 95 from rhodecode.model.repo_group import RepoGroupModel
96 96 from rhodecode.model.settings import IssueTrackerSettingsModel
97 97
98 98
99 99 log = logging.getLogger(__name__)
100 100
101 101
102 102 DEFAULT_USER = User.DEFAULT_USER
103 103 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
104 104
105 105
106 106 def asset(path, ver=None, **kwargs):
107 107 """
108 108 Helper to generate a static asset file path for rhodecode assets
109 109
110 110 eg. h.asset('images/image.png', ver='3923')
111 111
112 112 :param path: path of asset
113 113 :param ver: optional version query param to append as ?ver=
114 114 """
115 115 request = get_current_request()
116 116 query = {}
117 117 query.update(kwargs)
118 118 if ver:
119 119 query = {'ver': ver}
120 120 return request.static_path(
121 121 'rhodecode:public/{}'.format(path), _query=query)
122 122
123 123
124 124 default_html_escape_table = {
125 125 ord('&'): u'&amp;',
126 126 ord('<'): u'&lt;',
127 127 ord('>'): u'&gt;',
128 128 ord('"'): u'&quot;',
129 129 ord("'"): u'&#39;',
130 130 }
131 131
132 132
133 133 def html_escape(text, html_escape_table=default_html_escape_table):
134 134 """Produce entities within text."""
135 135 return text.translate(html_escape_table)
136 136
137 137
138 138 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
139 139 """
140 140 Truncate string ``s`` at the first occurrence of ``sub``.
141 141
142 142 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
143 143 """
144 144 suffix_if_chopped = suffix_if_chopped or ''
145 145 pos = s.find(sub)
146 146 if pos == -1:
147 147 return s
148 148
149 149 if inclusive:
150 150 pos += len(sub)
151 151
152 152 chopped = s[:pos]
153 153 left = s[pos:].strip()
154 154
155 155 if left and suffix_if_chopped:
156 156 chopped += suffix_if_chopped
157 157
158 158 return chopped
159 159
160 160
161 161 def shorter(text, size=20, prefix=False):
162 162 postfix = '...'
163 163 if len(text) > size:
164 164 if prefix:
165 165 # shorten in front
166 166 return postfix + text[-(size - len(postfix)):]
167 167 else:
168 168 return text[:size - len(postfix)] + postfix
169 169 return text
170 170
171 171
172 172 def reset(name, value=None, id=NotGiven, type="reset", **attrs):
173 173 """
174 174 Reset button
175 175 """
176 176 return _input(type, name, value, id, attrs)
177 177
178 178
179 179 def select(name, selected_values, options, id=NotGiven, **attrs):
180 180
181 181 if isinstance(options, (list, tuple)):
182 182 options_iter = options
183 183 # Handle old value,label lists ... where value also can be value,label lists
184 184 options = Options()
185 185 for opt in options_iter:
186 186 if isinstance(opt, tuple) and len(opt) == 2:
187 187 value, label = opt
188 188 elif isinstance(opt, str):
189 189 value = label = opt
190 190 else:
191 191 raise ValueError('invalid select option type %r' % type(opt))
192 192
193 193 if isinstance(value, (list, tuple)):
194 194 option_group = options.add_optgroup(label)
195 195 for opt2 in value:
196 196 if isinstance(opt2, tuple) and len(opt2) == 2:
197 197 group_value, group_label = opt2
198 198 elif isinstance(opt2, str):
199 199 group_value = group_label = opt2
200 200 else:
201 201 raise ValueError('invalid select option type %r' % type(opt2))
202 202
203 203 option_group.add_option(group_label, group_value)
204 204 else:
205 205 options.add_option(label, value)
206 206
207 207 return raw_select(name, selected_values, options, id=id, **attrs)
208 208
209 209
210 210 def branding(name, length=40):
211 211 return truncate(name, length, indicator="")
212 212
213 213
214 214 def FID(raw_id, path):
215 215 """
216 216 Creates a unique ID for filenode based on it's hash of path and commit
217 217 it's safe to use in urls
218 218
219 219 :param raw_id:
220 220 :param path:
221 221 """
222 222
223 223 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
224 224
225 225
226 226 class _GetError(object):
227 227 """Get error from form_errors, and represent it as span wrapped error
228 228 message
229 229
230 230 :param field_name: field to fetch errors for
231 231 :param form_errors: form errors dict
232 232 """
233 233
234 234 def __call__(self, field_name, form_errors):
235 235 tmpl = """<span class="error_msg">%s</span>"""
236 236 if form_errors and field_name in form_errors:
237 237 return literal(tmpl % form_errors.get(field_name))
238 238
239 239
240 240 get_error = _GetError()
241 241
242 242
243 243 class _ToolTip(object):
244 244
245 245 def __call__(self, tooltip_title, trim_at=50):
246 246 """
247 247 Special function just to wrap our text into nice formatted
248 248 autowrapped text
249 249
250 250 :param tooltip_title:
251 251 """
252 252 tooltip_title = escape(tooltip_title)
253 253 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
254 254 return tooltip_title
255 255
256 256
257 257 tooltip = _ToolTip()
258 258
259 259 files_icon = u'<i class="file-breadcrumb-copy tooltip icon-clipboard clipboard-action" data-clipboard-text="{}" title="Copy file path"></i>'
260 260
261 261
262 262 def files_breadcrumbs(repo_name, repo_type, commit_id, file_path, landing_ref_name=None, at_ref=None,
263 263 limit_items=False, linkify_last_item=False, hide_last_item=False,
264 264 copy_path_icon=True):
265 265 if isinstance(file_path, str):
266 266 file_path = safe_unicode(file_path)
267 267
268 268 if at_ref:
269 269 route_qry = {'at': at_ref}
270 270 default_landing_ref = at_ref or landing_ref_name or commit_id
271 271 else:
272 272 route_qry = None
273 273 default_landing_ref = commit_id
274 274
275 275 # first segment is a `HOME` link to repo files root location
276 276 root_name = literal(u'<i class="icon-home"></i>')
277 277
278 278 url_segments = [
279 279 link_to(
280 280 root_name,
281 281 repo_files_by_ref_url(
282 282 repo_name,
283 283 repo_type,
284 284 f_path=None, # None here is a special case for SVN repos,
285 285 # that won't prefix with a ref
286 286 ref_name=default_landing_ref,
287 287 commit_id=commit_id,
288 288 query=route_qry
289 289 )
290 290 )]
291 291
292 292 path_segments = file_path.split('/')
293 293 last_cnt = len(path_segments) - 1
294 294 for cnt, segment in enumerate(path_segments):
295 295 if not segment:
296 296 continue
297 297 segment_html = escape(segment)
298 298
299 299 last_item = cnt == last_cnt
300 300
301 301 if last_item and hide_last_item:
302 302 # iterate over and hide last element
303 303 continue
304 304
305 305 if last_item and linkify_last_item is False:
306 306 # plain version
307 307 url_segments.append(segment_html)
308 308 else:
309 309 url_segments.append(
310 310 link_to(
311 311 segment_html,
312 312 repo_files_by_ref_url(
313 313 repo_name,
314 314 repo_type,
315 315 f_path='/'.join(path_segments[:cnt + 1]),
316 316 ref_name=default_landing_ref,
317 317 commit_id=commit_id,
318 318 query=route_qry
319 319 ),
320 320 ))
321 321
322 322 limited_url_segments = url_segments[:1] + ['...'] + url_segments[-5:]
323 323 if limit_items and len(limited_url_segments) < len(url_segments):
324 324 url_segments = limited_url_segments
325 325
326 326 full_path = file_path
327 327 if copy_path_icon:
328 328 icon = files_icon.format(escape(full_path))
329 329 else:
330 330 icon = ''
331 331
332 332 if file_path == '':
333 333 return root_name
334 334 else:
335 335 return literal(' / '.join(url_segments) + icon)
336 336
337 337
338 338 def files_url_data(request):
339 339 import urllib.request, urllib.parse, urllib.error
340 340 matchdict = request.matchdict
341 341
342 342 if 'f_path' not in matchdict:
343 343 matchdict['f_path'] = ''
344 344 else:
345 345 matchdict['f_path'] = urllib.parse.quote(safe_str(matchdict['f_path']))
346 346 if 'commit_id' not in matchdict:
347 347 matchdict['commit_id'] = 'tip'
348 348
349 349 return json.dumps(matchdict)
350 350
351 351
352 352 def repo_files_by_ref_url(db_repo_name, db_repo_type, f_path, ref_name, commit_id, query=None, ):
353 353 _is_svn = is_svn(db_repo_type)
354 354 final_f_path = f_path
355 355
356 356 if _is_svn:
357 357 """
358 358 For SVN the ref_name cannot be used as a commit_id, it needs to be prefixed with
359 359 actually commit_id followed by the ref_name. This should be done only in case
360 360 This is a initial landing url, without additional paths.
361 361
362 362 like: /1000/tags/1.0.0/?at=tags/1.0.0
363 363 """
364 364
365 365 if ref_name and ref_name != 'tip':
366 366 # NOTE(marcink): for svn the ref_name is actually the stored path, so we prefix it
367 367 # for SVN we only do this magic prefix if it's root, .eg landing revision
368 368 # of files link. If we are in the tree we don't need this since we traverse the url
369 369 # that has everything stored
370 370 if f_path in ['', '/']:
371 371 final_f_path = '/'.join([ref_name, f_path])
372 372
373 373 # SVN always needs a commit_id explicitly, without a named REF
374 374 default_commit_id = commit_id
375 375 else:
376 376 """
377 377 For git and mercurial we construct a new URL using the names instead of commit_id
378 378 like: /master/some_path?at=master
379 379 """
380 380 # We currently do not support branches with slashes
381 381 if '/' in ref_name:
382 382 default_commit_id = commit_id
383 383 else:
384 384 default_commit_id = ref_name
385 385
386 386 # sometimes we pass f_path as None, to indicate explicit no prefix,
387 387 # we translate it to string to not have None
388 388 final_f_path = final_f_path or ''
389 389
390 390 files_url = route_path(
391 391 'repo_files',
392 392 repo_name=db_repo_name,
393 393 commit_id=default_commit_id,
394 394 f_path=final_f_path,
395 395 _query=query
396 396 )
397 397 return files_url
398 398
399 399
400 400 def code_highlight(code, lexer, formatter, use_hl_filter=False):
401 401 """
402 402 Lex ``code`` with ``lexer`` and format it with the formatter ``formatter``.
403 403
404 404 If ``outfile`` is given and a valid file object (an object
405 405 with a ``write`` method), the result will be written to it, otherwise
406 406 it is returned as a string.
407 407 """
408 408 if use_hl_filter:
409 409 # add HL filter
410 410 from rhodecode.lib.index import search_utils
411 411 lexer.add_filter(search_utils.ElasticSearchHLFilter())
412 412 return pygments.format(pygments.lex(code, lexer), formatter)
413 413
414 414
415 415 class CodeHtmlFormatter(HtmlFormatter):
416 416 """
417 417 My code Html Formatter for source codes
418 418 """
419 419
420 420 def wrap(self, source, outfile):
421 421 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
422 422
423 423 def _wrap_code(self, source):
424 424 for cnt, it in enumerate(source):
425 425 i, t = it
426 426 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
427 427 yield i, t
428 428
429 429 def _wrap_tablelinenos(self, inner):
430 430 dummyoutfile = StringIO.StringIO()
431 431 lncount = 0
432 432 for t, line in inner:
433 433 if t:
434 434 lncount += 1
435 435 dummyoutfile.write(line)
436 436
437 437 fl = self.linenostart
438 438 mw = len(str(lncount + fl - 1))
439 439 sp = self.linenospecial
440 440 st = self.linenostep
441 441 la = self.lineanchors
442 442 aln = self.anchorlinenos
443 443 nocls = self.noclasses
444 444 if sp:
445 445 lines = []
446 446
447 447 for i in range(fl, fl + lncount):
448 448 if i % st == 0:
449 449 if i % sp == 0:
450 450 if aln:
451 451 lines.append('<a href="#%s%d" class="special">%*d</a>' %
452 452 (la, i, mw, i))
453 453 else:
454 454 lines.append('<span class="special">%*d</span>' % (mw, i))
455 455 else:
456 456 if aln:
457 457 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
458 458 else:
459 459 lines.append('%*d' % (mw, i))
460 460 else:
461 461 lines.append('')
462 462 ls = '\n'.join(lines)
463 463 else:
464 464 lines = []
465 465 for i in range(fl, fl + lncount):
466 466 if i % st == 0:
467 467 if aln:
468 468 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
469 469 else:
470 470 lines.append('%*d' % (mw, i))
471 471 else:
472 472 lines.append('')
473 473 ls = '\n'.join(lines)
474 474
475 475 # in case you wonder about the seemingly redundant <div> here: since the
476 476 # content in the other cell also is wrapped in a div, some browsers in
477 477 # some configurations seem to mess up the formatting...
478 478 if nocls:
479 479 yield 0, ('<table class="%stable">' % self.cssclass +
480 480 '<tr><td><div class="linenodiv" '
481 481 'style="background-color: #f0f0f0; padding-right: 10px">'
482 482 '<pre style="line-height: 125%">' +
483 483 ls + '</pre></div></td><td id="hlcode" class="code">')
484 484 else:
485 485 yield 0, ('<table class="%stable">' % self.cssclass +
486 486 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
487 487 ls + '</pre></div></td><td id="hlcode" class="code">')
488 488 yield 0, dummyoutfile.getvalue()
489 489 yield 0, '</td></tr></table>'
490 490
491 491
492 492 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
493 493 def __init__(self, **kw):
494 494 # only show these line numbers if set
495 495 self.only_lines = kw.pop('only_line_numbers', [])
496 496 self.query_terms = kw.pop('query_terms', [])
497 497 self.max_lines = kw.pop('max_lines', 5)
498 498 self.line_context = kw.pop('line_context', 3)
499 499 self.url = kw.pop('url', None)
500 500
501 501 super(CodeHtmlFormatter, self).__init__(**kw)
502 502
503 503 def _wrap_code(self, source):
504 504 for cnt, it in enumerate(source):
505 505 i, t = it
506 506 t = '<pre>%s</pre>' % t
507 507 yield i, t
508 508
509 509 def _wrap_tablelinenos(self, inner):
510 510 yield 0, '<table class="code-highlight %stable">' % self.cssclass
511 511
512 512 last_shown_line_number = 0
513 513 current_line_number = 1
514 514
515 515 for t, line in inner:
516 516 if not t:
517 517 yield t, line
518 518 continue
519 519
520 520 if current_line_number in self.only_lines:
521 521 if last_shown_line_number + 1 != current_line_number:
522 522 yield 0, '<tr>'
523 523 yield 0, '<td class="line">...</td>'
524 524 yield 0, '<td id="hlcode" class="code"></td>'
525 525 yield 0, '</tr>'
526 526
527 527 yield 0, '<tr>'
528 528 if self.url:
529 529 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
530 530 self.url, current_line_number, current_line_number)
531 531 else:
532 532 yield 0, '<td class="line"><a href="">%i</a></td>' % (
533 533 current_line_number)
534 534 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
535 535 yield 0, '</tr>'
536 536
537 537 last_shown_line_number = current_line_number
538 538
539 539 current_line_number += 1
540 540
541 541 yield 0, '</table>'
542 542
543 543
544 544 def hsv_to_rgb(h, s, v):
545 545 """ Convert hsv color values to rgb """
546 546
547 547 if s == 0.0:
548 548 return v, v, v
549 549 i = int(h * 6.0) # XXX assume int() truncates!
550 550 f = (h * 6.0) - i
551 551 p = v * (1.0 - s)
552 552 q = v * (1.0 - s * f)
553 553 t = v * (1.0 - s * (1.0 - f))
554 554 i = i % 6
555 555 if i == 0:
556 556 return v, t, p
557 557 if i == 1:
558 558 return q, v, p
559 559 if i == 2:
560 560 return p, v, t
561 561 if i == 3:
562 562 return p, q, v
563 563 if i == 4:
564 564 return t, p, v
565 565 if i == 5:
566 566 return v, p, q
567 567
568 568
569 569 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
570 570 """
571 571 Generator for getting n of evenly distributed colors using
572 572 hsv color and golden ratio. It always return same order of colors
573 573
574 574 :param n: number of colors to generate
575 575 :param saturation: saturation of returned colors
576 576 :param lightness: lightness of returned colors
577 577 :returns: RGB tuple
578 578 """
579 579
580 580 golden_ratio = 0.618033988749895
581 581 h = 0.22717784590367374
582 582
583 583 for _ in range(n):
584 584 h += golden_ratio
585 585 h %= 1
586 586 HSV_tuple = [h, saturation, lightness]
587 587 RGB_tuple = hsv_to_rgb(*HSV_tuple)
588 588 yield map(lambda x: str(int(x * 256)), RGB_tuple)
589 589
590 590
591 591 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
592 592 """
593 593 Returns a function which when called with an argument returns a unique
594 594 color for that argument, eg.
595 595
596 596 :param n: number of colors to generate
597 597 :param saturation: saturation of returned colors
598 598 :param lightness: lightness of returned colors
599 599 :returns: css RGB string
600 600
601 601 >>> color_hash = color_hasher()
602 602 >>> color_hash('hello')
603 603 'rgb(34, 12, 59)'
604 604 >>> color_hash('hello')
605 605 'rgb(34, 12, 59)'
606 606 >>> color_hash('other')
607 607 'rgb(90, 224, 159)'
608 608 """
609 609
610 610 color_dict = {}
611 611 cgenerator = unique_color_generator(
612 612 saturation=saturation, lightness=lightness)
613 613
614 614 def get_color_string(thing):
615 615 if thing in color_dict:
616 616 col = color_dict[thing]
617 617 else:
618 618 col = color_dict[thing] = next(cgenerator)
619 619 return "rgb(%s)" % (', '.join(col))
620 620
621 621 return get_color_string
622 622
623 623
624 624 def get_lexer_safe(mimetype=None, filepath=None):
625 625 """
626 626 Tries to return a relevant pygments lexer using mimetype/filepath name,
627 627 defaulting to plain text if none could be found
628 628 """
629 629 lexer = None
630 630 try:
631 631 if mimetype:
632 632 lexer = get_lexer_for_mimetype(mimetype)
633 633 if not lexer:
634 634 lexer = get_lexer_for_filename(filepath)
635 635 except pygments.util.ClassNotFound:
636 636 pass
637 637
638 638 if not lexer:
639 639 lexer = get_lexer_by_name('text')
640 640
641 641 return lexer
642 642
643 643
644 644 def get_lexer_for_filenode(filenode):
645 645 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
646 646 return lexer
647 647
648 648
649 649 def pygmentize(filenode, **kwargs):
650 650 """
651 651 pygmentize function using pygments
652 652
653 653 :param filenode:
654 654 """
655 655 lexer = get_lexer_for_filenode(filenode)
656 656 return literal(code_highlight(filenode.content, lexer,
657 657 CodeHtmlFormatter(**kwargs)))
658 658
659 659
660 660 def is_following_repo(repo_name, user_id):
661 661 from rhodecode.model.scm import ScmModel
662 662 return ScmModel().is_following_repo(repo_name, user_id)
663 663
664 664
665 665 class _Message(object):
666 666 """A message returned by ``Flash.pop_messages()``.
667 667
668 668 Converting the message to a string returns the message text. Instances
669 669 also have the following attributes:
670 670
671 671 * ``message``: the message text.
672 672 * ``category``: the category specified when the message was created.
673 673 """
674 674
675 675 def __init__(self, category, message, sub_data=None):
676 676 self.category = category
677 677 self.message = message
678 678 self.sub_data = sub_data or {}
679 679
680 680 def __str__(self):
681 681 return self.message
682 682
683 683 __unicode__ = __str__
684 684
685 685 def __html__(self):
686 686 return escape(safe_unicode(self.message))
687 687
688 688
689 689 class Flash(object):
690 690 # List of allowed categories. If None, allow any category.
691 691 categories = ["warning", "notice", "error", "success"]
692 692
693 693 # Default category if none is specified.
694 694 default_category = "notice"
695 695
696 696 def __init__(self, session_key="flash", categories=None,
697 697 default_category=None):
698 698 """
699 699 Instantiate a ``Flash`` object.
700 700
701 701 ``session_key`` is the key to save the messages under in the user's
702 702 session.
703 703
704 704 ``categories`` is an optional list which overrides the default list
705 705 of categories.
706 706
707 707 ``default_category`` overrides the default category used for messages
708 708 when none is specified.
709 709 """
710 710 self.session_key = session_key
711 711 if categories is not None:
712 712 self.categories = categories
713 713 if default_category is not None:
714 714 self.default_category = default_category
715 715 if self.categories and self.default_category not in self.categories:
716 716 raise ValueError(
717 717 "unrecognized default category %r" % (self.default_category,))
718 718
719 719 def pop_messages(self, session=None, request=None):
720 720 """
721 721 Return all accumulated messages and delete them from the session.
722 722
723 723 The return value is a list of ``Message`` objects.
724 724 """
725 725 messages = []
726 726
727 727 if not session:
728 728 if not request:
729 729 request = get_current_request()
730 730 session = request.session
731 731
732 732 # Pop the 'old' pylons flash messages. They are tuples of the form
733 733 # (category, message)
734 734 for cat, msg in session.pop(self.session_key, []):
735 735 messages.append(_Message(cat, msg))
736 736
737 737 # Pop the 'new' pyramid flash messages for each category as list
738 738 # of strings.
739 739 for cat in self.categories:
740 740 for msg in session.pop_flash(queue=cat):
741 741 sub_data = {}
742 742 if hasattr(msg, 'rsplit'):
743 743 flash_data = msg.rsplit('|DELIM|', 1)
744 744 org_message = flash_data[0]
745 745 if len(flash_data) > 1:
746 746 sub_data = json.loads(flash_data[1])
747 747 else:
748 748 org_message = msg
749 749
750 750 messages.append(_Message(cat, org_message, sub_data=sub_data))
751 751
752 752 # Map messages from the default queue to the 'notice' category.
753 753 for msg in session.pop_flash():
754 754 messages.append(_Message('notice', msg))
755 755
756 756 session.save()
757 757 return messages
758 758
759 759 def json_alerts(self, session=None, request=None):
760 760 payloads = []
761 761 messages = flash.pop_messages(session=session, request=request) or []
762 762 for message in messages:
763 763 payloads.append({
764 764 'message': {
765 765 'message': u'{}'.format(message.message),
766 766 'level': message.category,
767 767 'force': True,
768 768 'subdata': message.sub_data
769 769 }
770 770 })
771 771 return json.dumps(payloads)
772 772
773 773 def __call__(self, message, category=None, ignore_duplicate=True,
774 774 session=None, request=None):
775 775
776 776 if not session:
777 777 if not request:
778 778 request = get_current_request()
779 779 session = request.session
780 780
781 781 session.flash(
782 782 message, queue=category, allow_duplicate=not ignore_duplicate)
783 783
784 784
785 785 flash = Flash()
786 786
787 787 #==============================================================================
788 788 # SCM FILTERS available via h.
789 789 #==============================================================================
790 790 from rhodecode.lib.vcs.utils import author_name, author_email
791 791 from rhodecode.lib.utils2 import age, age_from_seconds
792 792 from rhodecode.model.db import User, ChangesetStatus
793 793
794 794
795 795 email = author_email
796 796
797 797
798 798 def capitalize(raw_text):
799 799 return raw_text.capitalize()
800 800
801 801
802 802 def short_id(long_id):
803 803 return long_id[:12]
804 804
805 805
806 806 def hide_credentials(url):
807 807 from rhodecode.lib.utils2 import credentials_filter
808 808 return credentials_filter(url)
809 809
810 810
811 811 import pytz
812 812 import tzlocal
813 813 local_timezone = tzlocal.get_localzone()
814 814
815 815
816 816 def get_timezone(datetime_iso, time_is_local=False):
817 817 tzinfo = '+00:00'
818 818
819 819 # detect if we have a timezone info, otherwise, add it
820 820 if time_is_local and isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
821 821 force_timezone = os.environ.get('RC_TIMEZONE', '')
822 822 if force_timezone:
823 823 force_timezone = pytz.timezone(force_timezone)
824 824 timezone = force_timezone or local_timezone
825 825 offset = timezone.localize(datetime_iso).strftime('%z')
826 826 tzinfo = '{}:{}'.format(offset[:-2], offset[-2:])
827 827 return tzinfo
828 828
829 829
830 830 def age_component(datetime_iso, value=None, time_is_local=False, tooltip=True):
831 831 title = value or format_date(datetime_iso)
832 832 tzinfo = get_timezone(datetime_iso, time_is_local=time_is_local)
833 833
834 834 return literal(
835 835 '<time class="timeago {cls}" title="{tt_title}" datetime="{dt}{tzinfo}">{title}</time>'.format(
836 836 cls='tooltip' if tooltip else '',
837 837 tt_title=('{title}{tzinfo}'.format(title=title, tzinfo=tzinfo)) if tooltip else '',
838 838 title=title, dt=datetime_iso, tzinfo=tzinfo
839 839 ))
840 840
841 841
842 842 def _shorten_commit_id(commit_id, commit_len=None):
843 843 if commit_len is None:
844 844 request = get_current_request()
845 845 commit_len = request.call_context.visual.show_sha_length
846 846 return commit_id[:commit_len]
847 847
848 848
849 849 def show_id(commit, show_idx=None, commit_len=None):
850 850 """
851 851 Configurable function that shows ID
852 852 by default it's r123:fffeeefffeee
853 853
854 854 :param commit: commit instance
855 855 """
856 856 if show_idx is None:
857 857 request = get_current_request()
858 858 show_idx = request.call_context.visual.show_revision_number
859 859
860 860 raw_id = _shorten_commit_id(commit.raw_id, commit_len=commit_len)
861 861 if show_idx:
862 862 return 'r%s:%s' % (commit.idx, raw_id)
863 863 else:
864 864 return '%s' % (raw_id, )
865 865
866 866
867 867 def format_date(date):
868 868 """
869 869 use a standardized formatting for dates used in RhodeCode
870 870
871 871 :param date: date/datetime object
872 872 :return: formatted date
873 873 """
874 874
875 875 if date:
876 876 _fmt = "%a, %d %b %Y %H:%M:%S"
877 877 return safe_unicode(date.strftime(_fmt))
878 878
879 879 return u""
880 880
881 881
882 882 class _RepoChecker(object):
883 883
884 884 def __init__(self, backend_alias):
885 885 self._backend_alias = backend_alias
886 886
887 887 def __call__(self, repository):
888 888 if hasattr(repository, 'alias'):
889 889 _type = repository.alias
890 890 elif hasattr(repository, 'repo_type'):
891 891 _type = repository.repo_type
892 892 else:
893 893 _type = repository
894 894 return _type == self._backend_alias
895 895
896 896
897 897 is_git = _RepoChecker('git')
898 898 is_hg = _RepoChecker('hg')
899 899 is_svn = _RepoChecker('svn')
900 900
901 901
902 902 def get_repo_type_by_name(repo_name):
903 903 repo = Repository.get_by_repo_name(repo_name)
904 904 if repo:
905 905 return repo.repo_type
906 906
907 907
908 908 def is_svn_without_proxy(repository):
909 909 if is_svn(repository):
910 910 from rhodecode.model.settings import VcsSettingsModel
911 911 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
912 912 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
913 913 return False
914 914
915 915
916 916 def discover_user(author):
917 917 """
918 918 Tries to discover RhodeCode User based on the author string. Author string
919 919 is typically `FirstName LastName <email@address.com>`
920 920 """
921 921
922 922 # if author is already an instance use it for extraction
923 923 if isinstance(author, User):
924 924 return author
925 925
926 926 # Valid email in the attribute passed, see if they're in the system
927 927 _email = author_email(author)
928 928 if _email != '':
929 929 user = User.get_by_email(_email, case_insensitive=True, cache=True)
930 930 if user is not None:
931 931 return user
932 932
933 933 # Maybe it's a username, we try to extract it and fetch by username ?
934 934 _author = author_name(author)
935 935 user = User.get_by_username(_author, case_insensitive=True, cache=True)
936 936 if user is not None:
937 937 return user
938 938
939 939 return None
940 940
941 941
942 942 def email_or_none(author):
943 943 # extract email from the commit string
944 944 _email = author_email(author)
945 945
946 946 # If we have an email, use it, otherwise
947 947 # see if it contains a username we can get an email from
948 948 if _email != '':
949 949 return _email
950 950 else:
951 951 user = User.get_by_username(
952 952 author_name(author), case_insensitive=True, cache=True)
953 953
954 954 if user is not None:
955 955 return user.email
956 956
957 957 # No valid email, not a valid user in the system, none!
958 958 return None
959 959
960 960
961 961 def link_to_user(author, length=0, **kwargs):
962 962 user = discover_user(author)
963 963 # user can be None, but if we have it already it means we can re-use it
964 964 # in the person() function, so we save 1 intensive-query
965 965 if user:
966 966 author = user
967 967
968 968 display_person = person(author, 'username_or_name_or_email')
969 969 if length:
970 970 display_person = shorter(display_person, length)
971 971
972 972 if user and user.username != user.DEFAULT_USER:
973 973 return link_to(
974 974 escape(display_person),
975 975 route_path('user_profile', username=user.username),
976 976 **kwargs)
977 977 else:
978 978 return escape(display_person)
979 979
980 980
981 981 def link_to_group(users_group_name, **kwargs):
982 982 return link_to(
983 983 escape(users_group_name),
984 984 route_path('user_group_profile', user_group_name=users_group_name),
985 985 **kwargs)
986 986
987 987
988 988 def person(author, show_attr="username_and_name"):
989 989 user = discover_user(author)
990 990 if user:
991 991 return getattr(user, show_attr)
992 992 else:
993 993 _author = author_name(author)
994 994 _email = email(author)
995 995 return _author or _email
996 996
997 997
998 998 def author_string(email):
999 999 if email:
1000 1000 user = User.get_by_email(email, case_insensitive=True, cache=True)
1001 1001 if user:
1002 1002 if user.first_name or user.last_name:
1003 1003 return '%s %s &lt;%s&gt;' % (
1004 1004 user.first_name, user.last_name, email)
1005 1005 else:
1006 1006 return email
1007 1007 else:
1008 1008 return email
1009 1009 else:
1010 1010 return None
1011 1011
1012 1012
1013 1013 def person_by_id(id_, show_attr="username_and_name"):
1014 1014 # attr to return from fetched user
1015 1015 person_getter = lambda usr: getattr(usr, show_attr)
1016 1016
1017 1017 #maybe it's an ID ?
1018 1018 if str(id_).isdigit() or isinstance(id_, int):
1019 1019 id_ = int(id_)
1020 1020 user = User.get(id_)
1021 1021 if user is not None:
1022 1022 return person_getter(user)
1023 1023 return id_
1024 1024
1025 1025
1026 1026 def gravatar_with_user(request, author, show_disabled=False, tooltip=False):
1027 1027 _render = request.get_partial_renderer('rhodecode:templates/base/base.mako')
1028 1028 return _render('gravatar_with_user', author, show_disabled=show_disabled, tooltip=tooltip)
1029 1029
1030 1030
1031 1031 tags_paterns = OrderedDict((
1032 1032 ('lang', (re.compile(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+\.]*)\]'),
1033 1033 '<div class="metatag" tag="lang">\\2</div>')),
1034 1034
1035 1035 ('see', (re.compile(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
1036 1036 '<div class="metatag" tag="see">see: \\1 </div>')),
1037 1037
1038 1038 ('url', (re.compile(r'\[url\ \=\&gt;\ \[([a-zA-Z0-9\ \.\-\_]+)\]\((http://|https://|/)(.*?)\)\]'),
1039 1039 '<div class="metatag" tag="url"> <a href="\\2\\3">\\1</a> </div>')),
1040 1040
1041 1041 ('license', (re.compile(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
1042 1042 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>')),
1043 1043
1044 1044 ('ref', (re.compile(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]'),
1045 1045 '<div class="metatag" tag="ref \\1">\\1: <a href="/\\2">\\2</a></div>')),
1046 1046
1047 1047 ('state', (re.compile(r'\[(stable|featured|stale|dead|dev|deprecated)\]'),
1048 1048 '<div class="metatag" tag="state \\1">\\1</div>')),
1049 1049
1050 1050 # label in grey
1051 1051 ('label', (re.compile(r'\[([a-z]+)\]'),
1052 1052 '<div class="metatag" tag="label">\\1</div>')),
1053 1053
1054 1054 # generic catch all in grey
1055 1055 ('generic', (re.compile(r'\[([a-zA-Z0-9\.\-\_]+)\]'),
1056 1056 '<div class="metatag" tag="generic">\\1</div>')),
1057 1057 ))
1058 1058
1059 1059
1060 1060 def extract_metatags(value):
1061 1061 """
1062 1062 Extract supported meta-tags from given text value
1063 1063 """
1064 1064 tags = []
1065 1065 if not value:
1066 1066 return tags, ''
1067 1067
1068 1068 for key, val in tags_paterns.items():
1069 1069 pat, replace_html = val
1070 1070 tags.extend([(key, x.group()) for x in pat.finditer(value)])
1071 1071 value = pat.sub('', value)
1072 1072
1073 1073 return tags, value
1074 1074
1075 1075
1076 1076 def style_metatag(tag_type, value):
1077 1077 """
1078 1078 converts tags from value into html equivalent
1079 1079 """
1080 1080 if not value:
1081 1081 return ''
1082 1082
1083 1083 html_value = value
1084 1084 tag_data = tags_paterns.get(tag_type)
1085 1085 if tag_data:
1086 1086 pat, replace_html = tag_data
1087 1087 # convert to plain `unicode` instead of a markup tag to be used in
1088 1088 # regex expressions. safe_unicode doesn't work here
1089 html_value = pat.sub(replace_html, unicode(value))
1089 html_value = pat.sub(replace_html, value)
1090 1090
1091 1091 return html_value
1092 1092
1093 1093
1094 1094 def bool2icon(value, show_at_false=True):
1095 1095 """
1096 1096 Returns boolean value of a given value, represented as html element with
1097 1097 classes that will represent icons
1098 1098
1099 1099 :param value: given value to convert to html node
1100 1100 """
1101 1101
1102 1102 if value: # does bool conversion
1103 1103 return HTML.tag('i', class_="icon-true", title='True')
1104 1104 else: # not true as bool
1105 1105 if show_at_false:
1106 1106 return HTML.tag('i', class_="icon-false", title='False')
1107 1107 return HTML.tag('i')
1108 1108
1109 1109
1110 1110 def b64(inp):
1111 1111 return base64.b64encode(inp)
1112 1112
1113 1113 #==============================================================================
1114 1114 # PERMS
1115 1115 #==============================================================================
1116 1116 from rhodecode.lib.auth import (
1117 1117 HasPermissionAny, HasPermissionAll,
1118 1118 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll,
1119 1119 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token,
1120 1120 csrf_token_key, AuthUser)
1121 1121
1122 1122
1123 1123 #==============================================================================
1124 1124 # GRAVATAR URL
1125 1125 #==============================================================================
1126 1126 class InitialsGravatar(object):
1127 1127 def __init__(self, email_address, first_name, last_name, size=30,
1128 1128 background=None, text_color='#fff'):
1129 1129 self.size = size
1130 1130 self.first_name = first_name
1131 1131 self.last_name = last_name
1132 1132 self.email_address = email_address
1133 1133 self.background = background or self.str2color(email_address)
1134 1134 self.text_color = text_color
1135 1135
1136 1136 def get_color_bank(self):
1137 1137 """
1138 1138 returns a predefined list of colors that gravatars can use.
1139 1139 Those are randomized distinct colors that guarantee readability and
1140 1140 uniqueness.
1141 1141
1142 1142 generated with: http://phrogz.net/css/distinct-colors.html
1143 1143 """
1144 1144 return [
1145 1145 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1146 1146 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1147 1147 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1148 1148 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1149 1149 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1150 1150 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1151 1151 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1152 1152 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1153 1153 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1154 1154 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1155 1155 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1156 1156 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1157 1157 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1158 1158 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1159 1159 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1160 1160 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1161 1161 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1162 1162 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1163 1163 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1164 1164 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1165 1165 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1166 1166 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1167 1167 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1168 1168 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1169 1169 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1170 1170 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1171 1171 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1172 1172 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1173 1173 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1174 1174 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1175 1175 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1176 1176 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1177 1177 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1178 1178 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1179 1179 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1180 1180 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1181 1181 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1182 1182 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1183 1183 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1184 1184 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1185 1185 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1186 1186 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1187 1187 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1188 1188 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1189 1189 '#4f8c46', '#368dd9', '#5c0073'
1190 1190 ]
1191 1191
1192 1192 def rgb_to_hex_color(self, rgb_tuple):
1193 1193 """
1194 1194 Converts an rgb_tuple passed to an hex color.
1195 1195
1196 1196 :param rgb_tuple: tuple with 3 ints represents rgb color space
1197 1197 """
1198 1198 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1199 1199
1200 1200 def email_to_int_list(self, email_str):
1201 1201 """
1202 1202 Get every byte of the hex digest value of email and turn it to integer.
1203 1203 It's going to be always between 0-255
1204 1204 """
1205 1205 digest = md5_safe(email_str.lower())
1206 1206 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1207 1207
1208 1208 def pick_color_bank_index(self, email_str, color_bank):
1209 1209 return self.email_to_int_list(email_str)[0] % len(color_bank)
1210 1210
1211 1211 def str2color(self, email_str):
1212 1212 """
1213 1213 Tries to map in a stable algorithm an email to color
1214 1214
1215 1215 :param email_str:
1216 1216 """
1217 1217 color_bank = self.get_color_bank()
1218 1218 # pick position (module it's length so we always find it in the
1219 1219 # bank even if it's smaller than 256 values
1220 1220 pos = self.pick_color_bank_index(email_str, color_bank)
1221 1221 return color_bank[pos]
1222 1222
1223 1223 def normalize_email(self, email_address):
1224 1224 import unicodedata
1225 1225 # default host used to fill in the fake/missing email
1226 default_host = u'localhost'
1226 default_host = 'localhost'
1227 1227
1228 1228 if not email_address:
1229 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1229 email_address = '%s@%s' % (User.DEFAULT_USER, default_host)
1230 1230
1231 1231 email_address = safe_unicode(email_address)
1232 1232
1233 1233 if u'@' not in email_address:
1234 1234 email_address = u'%s@%s' % (email_address, default_host)
1235 1235
1236 1236 if email_address.endswith(u'@'):
1237 1237 email_address = u'%s%s' % (email_address, default_host)
1238 1238
1239 1239 email_address = unicodedata.normalize('NFKD', email_address)\
1240 1240 .encode('ascii', 'ignore')
1241 1241 return email_address
1242 1242
1243 1243 def get_initials(self):
1244 1244 """
1245 1245 Returns 2 letter initials calculated based on the input.
1246 1246 The algorithm picks first given email address, and takes first letter
1247 1247 of part before @, and then the first letter of server name. In case
1248 1248 the part before @ is in a format of `somestring.somestring2` it replaces
1249 1249 the server letter with first letter of somestring2
1250 1250
1251 1251 In case function was initialized with both first and lastname, this
1252 1252 overrides the extraction from email by first letter of the first and
1253 1253 last name. We add special logic to that functionality, In case Full name
1254 1254 is compound, like Guido Von Rossum, we use last part of the last name
1255 1255 (Von Rossum) picking `R`.
1256 1256
1257 1257 Function also normalizes the non-ascii characters to they ascii
1258 1258 representation, eg Ą => A
1259 1259 """
1260 1260 import unicodedata
1261 1261 # replace non-ascii to ascii
1262 1262 first_name = unicodedata.normalize(
1263 1263 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1264 1264 last_name = unicodedata.normalize(
1265 1265 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1266 1266
1267 1267 # do NFKD encoding, and also make sure email has proper format
1268 1268 email_address = self.normalize_email(self.email_address)
1269 1269
1270 1270 # first push the email initials
1271 1271 prefix, server = email_address.split('@', 1)
1272 1272
1273 1273 # check if prefix is maybe a 'first_name.last_name' syntax
1274 1274 _dot_split = prefix.rsplit('.', 1)
1275 1275 if len(_dot_split) == 2 and _dot_split[1]:
1276 1276 initials = [_dot_split[0][0], _dot_split[1][0]]
1277 1277 else:
1278 1278 initials = [prefix[0], server[0]]
1279 1279
1280 1280 # then try to replace either first_name or last_name
1281 1281 fn_letter = (first_name or " ")[0].strip()
1282 1282 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1283 1283
1284 1284 if fn_letter:
1285 1285 initials[0] = fn_letter
1286 1286
1287 1287 if ln_letter:
1288 1288 initials[1] = ln_letter
1289 1289
1290 1290 return ''.join(initials).upper()
1291 1291
1292 1292 def get_img_data_by_type(self, font_family, img_type):
1293 1293 default_user = """
1294 1294 <svg xmlns="http://www.w3.org/2000/svg"
1295 1295 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1296 1296 viewBox="-15 -10 439.165 429.164"
1297 1297
1298 1298 xml:space="preserve"
1299 1299 style="background:{background};" >
1300 1300
1301 1301 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1302 1302 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1303 1303 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1304 1304 168.596,153.916,216.671,
1305 1305 204.583,216.671z" fill="{text_color}"/>
1306 1306 <path d="M407.164,374.717L360.88,
1307 1307 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1308 1308 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1309 1309 15.366-44.203,23.488-69.076,23.488c-24.877,
1310 1310 0-48.762-8.122-69.078-23.488
1311 1311 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1312 1312 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1313 1313 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1314 1314 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1315 1315 19.402-10.527 C409.699,390.129,
1316 1316 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1317 1317 </svg>""".format(
1318 1318 size=self.size,
1319 1319 background='#979797', # @grey4
1320 1320 text_color=self.text_color,
1321 1321 font_family=font_family)
1322 1322
1323 1323 return {
1324 1324 "default_user": default_user
1325 1325 }[img_type]
1326 1326
1327 1327 def get_img_data(self, svg_type=None):
1328 1328 """
1329 1329 generates the svg metadata for image
1330 1330 """
1331 1331 fonts = [
1332 1332 '-apple-system',
1333 1333 'BlinkMacSystemFont',
1334 1334 'Segoe UI',
1335 1335 'Roboto',
1336 1336 'Oxygen-Sans',
1337 1337 'Ubuntu',
1338 1338 'Cantarell',
1339 1339 'Helvetica Neue',
1340 1340 'sans-serif'
1341 1341 ]
1342 1342 font_family = ','.join(fonts)
1343 1343 if svg_type:
1344 1344 return self.get_img_data_by_type(font_family, svg_type)
1345 1345
1346 1346 initials = self.get_initials()
1347 1347 img_data = """
1348 1348 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1349 1349 width="{size}" height="{size}"
1350 1350 style="width: 100%; height: 100%; background-color: {background}"
1351 1351 viewBox="0 0 {size} {size}">
1352 1352 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1353 1353 pointer-events="auto" fill="{text_color}"
1354 1354 font-family="{font_family}"
1355 1355 style="font-weight: 400; font-size: {f_size}px;">{text}
1356 1356 </text>
1357 1357 </svg>""".format(
1358 1358 size=self.size,
1359 1359 f_size=self.size/2.05, # scale the text inside the box nicely
1360 1360 background=self.background,
1361 1361 text_color=self.text_color,
1362 1362 text=initials.upper(),
1363 1363 font_family=font_family)
1364 1364
1365 1365 return img_data
1366 1366
1367 1367 def generate_svg(self, svg_type=None):
1368 1368 img_data = self.get_img_data(svg_type)
1369 1369 return "data:image/svg+xml;base64,%s" % base64.b64encode(img_data)
1370 1370
1371 1371
1372 1372 def initials_gravatar(request, email_address, first_name, last_name, size=30, store_on_disk=False):
1373 1373
1374 1374 svg_type = None
1375 1375 if email_address == User.DEFAULT_USER_EMAIL:
1376 1376 svg_type = 'default_user'
1377 1377
1378 1378 klass = InitialsGravatar(email_address, first_name, last_name, size)
1379 1379
1380 1380 if store_on_disk:
1381 1381 from rhodecode.apps.file_store import utils as store_utils
1382 1382 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \
1383 1383 FileOverSizeException
1384 1384 from rhodecode.model.db import Session
1385 1385
1386 1386 image_key = md5_safe(email_address.lower()
1387 1387 + first_name.lower() + last_name.lower())
1388 1388
1389 1389 storage = store_utils.get_file_storage(request.registry.settings)
1390 1390 filename = '{}.svg'.format(image_key)
1391 1391 subdir = 'gravatars'
1392 1392 # since final name has a counter, we apply the 0
1393 1393 uid = storage.apply_counter(0, store_utils.uid_filename(filename, randomized=False))
1394 1394 store_uid = os.path.join(subdir, uid)
1395 1395
1396 1396 db_entry = FileStore.get_by_store_uid(store_uid)
1397 1397 if db_entry:
1398 1398 return request.route_path('download_file', fid=store_uid)
1399 1399
1400 1400 img_data = klass.get_img_data(svg_type=svg_type)
1401 1401 img_file = store_utils.bytes_to_file_obj(img_data)
1402 1402
1403 1403 try:
1404 1404 store_uid, metadata = storage.save_file(
1405 1405 img_file, filename, directory=subdir,
1406 1406 extensions=['.svg'], randomized_name=False)
1407 1407 except (FileNotAllowedException, FileOverSizeException):
1408 1408 raise
1409 1409
1410 1410 try:
1411 1411 entry = FileStore.create(
1412 1412 file_uid=store_uid, filename=metadata["filename"],
1413 1413 file_hash=metadata["sha256"], file_size=metadata["size"],
1414 1414 file_display_name=filename,
1415 1415 file_description=u'user gravatar `{}`'.format(safe_unicode(filename)),
1416 1416 hidden=True, check_acl=False, user_id=1
1417 1417 )
1418 1418 Session().add(entry)
1419 1419 Session().commit()
1420 1420 log.debug('Stored upload in DB as %s', entry)
1421 1421 except Exception:
1422 1422 raise
1423 1423
1424 1424 return request.route_path('download_file', fid=store_uid)
1425 1425
1426 1426 else:
1427 1427 return klass.generate_svg(svg_type=svg_type)
1428 1428
1429 1429
1430 1430 def gravatar_external(request, gravatar_url_tmpl, email_address, size=30):
1431 1431 return safe_str(gravatar_url_tmpl)\
1432 1432 .replace('{email}', email_address) \
1433 1433 .replace('{md5email}', md5_safe(email_address.lower())) \
1434 1434 .replace('{netloc}', request.host) \
1435 1435 .replace('{scheme}', request.scheme) \
1436 1436 .replace('{size}', safe_str(size))
1437 1437
1438 1438
1439 1439 def gravatar_url(email_address, size=30, request=None):
1440 1440 request = request or get_current_request()
1441 1441 _use_gravatar = request.call_context.visual.use_gravatar
1442 1442
1443 1443 email_address = email_address or User.DEFAULT_USER_EMAIL
1444 1444 if isinstance(email_address, str):
1445 1445 # hashlib crashes on unicode items
1446 1446 email_address = safe_str(email_address)
1447 1447
1448 1448 # empty email or default user
1449 1449 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1450 1450 return initials_gravatar(request, User.DEFAULT_USER_EMAIL, '', '', size=size)
1451 1451
1452 1452 if _use_gravatar:
1453 1453 gravatar_url_tmpl = request.call_context.visual.gravatar_url \
1454 1454 or User.DEFAULT_GRAVATAR_URL
1455 1455 return gravatar_external(request, gravatar_url_tmpl, email_address, size=size)
1456 1456
1457 1457 else:
1458 1458 return initials_gravatar(request, email_address, '', '', size=size)
1459 1459
1460 1460
1461 1461 def breadcrumb_repo_link(repo):
1462 1462 """
1463 1463 Makes a breadcrumbs path link to repo
1464 1464
1465 1465 ex::
1466 1466 group >> subgroup >> repo
1467 1467
1468 1468 :param repo: a Repository instance
1469 1469 """
1470 1470
1471 1471 path = [
1472 1472 link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name),
1473 1473 title='last change:{}'.format(format_date(group.last_commit_change)))
1474 1474 for group in repo.groups_with_parents
1475 1475 ] + [
1476 1476 link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name),
1477 1477 title='last change:{}'.format(format_date(repo.last_commit_change)))
1478 1478 ]
1479 1479
1480 1480 return literal(' &raquo; '.join(path))
1481 1481
1482 1482
1483 1483 def breadcrumb_repo_group_link(repo_group):
1484 1484 """
1485 1485 Makes a breadcrumbs path link to repo
1486 1486
1487 1487 ex::
1488 1488 group >> subgroup
1489 1489
1490 1490 :param repo_group: a Repository Group instance
1491 1491 """
1492 1492
1493 1493 path = [
1494 1494 link_to(group.name,
1495 1495 route_path('repo_group_home', repo_group_name=group.group_name),
1496 1496 title='last change:{}'.format(format_date(group.last_commit_change)))
1497 1497 for group in repo_group.parents
1498 1498 ] + [
1499 1499 link_to(repo_group.name,
1500 1500 route_path('repo_group_home', repo_group_name=repo_group.group_name),
1501 1501 title='last change:{}'.format(format_date(repo_group.last_commit_change)))
1502 1502 ]
1503 1503
1504 1504 return literal(' &raquo; '.join(path))
1505 1505
1506 1506
1507 1507 def format_byte_size_binary(file_size):
1508 1508 """
1509 1509 Formats file/folder sizes to standard.
1510 1510 """
1511 1511 if file_size is None:
1512 1512 file_size = 0
1513 1513
1514 1514 formatted_size = format_byte_size(file_size, binary=True)
1515 1515 return formatted_size
1516 1516
1517 1517
1518 1518 def urlify_text(text_, safe=True, **href_attrs):
1519 1519 """
1520 1520 Extract urls from text and make html links out of them
1521 1521 """
1522 1522
1523 1523 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1524 1524 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1525 1525
1526 1526 def url_func(match_obj):
1527 1527 url_full = match_obj.groups()[0]
1528 1528 a_options = dict(href_attrs)
1529 1529 a_options['href'] = url_full
1530 1530 a_text = url_full
1531 1531 return HTML.tag("a", a_text, **a_options)
1532 1532
1533 1533 _new_text = url_pat.sub(url_func, text_)
1534 1534
1535 1535 if safe:
1536 1536 return literal(_new_text)
1537 1537 return _new_text
1538 1538
1539 1539
1540 1540 def urlify_commits(text_, repo_name):
1541 1541 """
1542 1542 Extract commit ids from text and make link from them
1543 1543
1544 1544 :param text_:
1545 1545 :param repo_name: repo name to build the URL with
1546 1546 """
1547 1547
1548 1548 url_pat = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1549 1549
1550 1550 def url_func(match_obj):
1551 1551 commit_id = match_obj.groups()[1]
1552 1552 pref = match_obj.groups()[0]
1553 1553 suf = match_obj.groups()[2]
1554 1554
1555 1555 tmpl = (
1556 1556 '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-alt="%(hovercard_alt)s" data-hovercard-url="%(hovercard_url)s">'
1557 1557 '%(commit_id)s</a>%(suf)s'
1558 1558 )
1559 1559 return tmpl % {
1560 1560 'pref': pref,
1561 1561 'cls': 'revision-link',
1562 1562 'url': route_url(
1563 1563 'repo_commit', repo_name=repo_name, commit_id=commit_id),
1564 1564 'commit_id': commit_id,
1565 1565 'suf': suf,
1566 1566 'hovercard_alt': 'Commit: {}'.format(commit_id),
1567 1567 'hovercard_url': route_url(
1568 1568 'hovercard_repo_commit', repo_name=repo_name, commit_id=commit_id)
1569 1569 }
1570 1570
1571 1571 new_text = url_pat.sub(url_func, text_)
1572 1572
1573 1573 return new_text
1574 1574
1575 1575
1576 1576 def _process_url_func(match_obj, repo_name, uid, entry,
1577 1577 return_raw_data=False, link_format='html'):
1578 1578 pref = ''
1579 1579 if match_obj.group().startswith(' '):
1580 1580 pref = ' '
1581 1581
1582 1582 issue_id = ''.join(match_obj.groups())
1583 1583
1584 1584 if link_format == 'html':
1585 1585 tmpl = (
1586 1586 '%(pref)s<a class="tooltip %(cls)s" href="%(url)s" title="%(title)s">'
1587 1587 '%(issue-prefix)s%(id-repr)s'
1588 1588 '</a>')
1589 1589 elif link_format == 'html+hovercard':
1590 1590 tmpl = (
1591 1591 '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-url="%(hovercard_url)s">'
1592 1592 '%(issue-prefix)s%(id-repr)s'
1593 1593 '</a>')
1594 1594 elif link_format in ['rst', 'rst+hovercard']:
1595 1595 tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_'
1596 1596 elif link_format in ['markdown', 'markdown+hovercard']:
1597 1597 tmpl = '[%(pref)s%(issue-prefix)s%(id-repr)s](%(url)s)'
1598 1598 else:
1599 1599 raise ValueError('Bad link_format:{}'.format(link_format))
1600 1600
1601 1601 (repo_name_cleaned,
1602 1602 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(repo_name)
1603 1603
1604 1604 # variables replacement
1605 1605 named_vars = {
1606 1606 'id': issue_id,
1607 1607 'repo': repo_name,
1608 1608 'repo_name': repo_name_cleaned,
1609 1609 'group_name': parent_group_name,
1610 1610 # set dummy keys so we always have them
1611 1611 'hostname': '',
1612 1612 'netloc': '',
1613 1613 'scheme': ''
1614 1614 }
1615 1615
1616 1616 request = get_current_request()
1617 1617 if request:
1618 1618 # exposes, hostname, netloc, scheme
1619 1619 host_data = get_host_info(request)
1620 1620 named_vars.update(host_data)
1621 1621
1622 1622 # named regex variables
1623 1623 named_vars.update(match_obj.groupdict())
1624 1624 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1625 1625 desc = string.Template(escape(entry['desc'])).safe_substitute(**named_vars)
1626 1626 hovercard_url = string.Template(entry.get('hovercard_url', '')).safe_substitute(**named_vars)
1627 1627
1628 1628 def quote_cleaner(input_str):
1629 1629 """Remove quotes as it's HTML"""
1630 1630 return input_str.replace('"', '')
1631 1631
1632 1632 data = {
1633 1633 'pref': pref,
1634 1634 'cls': quote_cleaner('issue-tracker-link'),
1635 1635 'url': quote_cleaner(_url),
1636 1636 'id-repr': issue_id,
1637 1637 'issue-prefix': entry['pref'],
1638 1638 'serv': entry['url'],
1639 1639 'title': bleach.clean(desc, strip=True),
1640 1640 'hovercard_url': hovercard_url
1641 1641 }
1642 1642
1643 1643 if return_raw_data:
1644 1644 return {
1645 1645 'id': issue_id,
1646 1646 'url': _url
1647 1647 }
1648 1648 return tmpl % data
1649 1649
1650 1650
1651 1651 def get_active_pattern_entries(repo_name):
1652 1652 repo = None
1653 1653 if repo_name:
1654 1654 # Retrieving repo_name to avoid invalid repo_name to explode on
1655 1655 # IssueTrackerSettingsModel but still passing invalid name further down
1656 1656 repo = Repository.get_by_repo_name(repo_name, cache=True)
1657 1657
1658 1658 settings_model = IssueTrackerSettingsModel(repo=repo)
1659 1659 active_entries = settings_model.get_settings(cache=True)
1660 1660 return active_entries
1661 1661
1662 1662
1663 1663 pr_pattern_re = regex.compile(r'(?:(?:^!)|(?: !))(\d+)')
1664 1664
1665 1665 allowed_link_formats = [
1666 1666 'html', 'rst', 'markdown', 'html+hovercard', 'rst+hovercard', 'markdown+hovercard']
1667 1667
1668 1668 compile_cache = {
1669 1669
1670 1670 }
1671 1671
1672 1672
1673 1673 def process_patterns(text_string, repo_name, link_format='html', active_entries=None):
1674 1674
1675 1675 if link_format not in allowed_link_formats:
1676 1676 raise ValueError('Link format can be only one of:{} got {}'.format(
1677 1677 allowed_link_formats, link_format))
1678 1678 issues_data = []
1679 1679 errors = []
1680 1680 new_text = text_string
1681 1681
1682 1682 if active_entries is None:
1683 1683 log.debug('Fetch active issue tracker patterns for repo: %s', repo_name)
1684 1684 active_entries = get_active_pattern_entries(repo_name)
1685 1685
1686 1686 log.debug('Got %s pattern entries to process', len(active_entries))
1687 1687
1688 1688 for uid, entry in active_entries.items():
1689 1689
1690 1690 if not (entry['pat'] and entry['url']):
1691 1691 log.debug('skipping due to missing data')
1692 1692 continue
1693 1693
1694 1694 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s',
1695 1695 uid, entry['pat'], entry['url'], entry['pref'])
1696 1696
1697 1697 if entry.get('pat_compiled'):
1698 1698 pattern = entry['pat_compiled']
1699 1699 elif entry['pat'] in compile_cache:
1700 1700 pattern = compile_cache[entry['pat']]
1701 1701 else:
1702 1702 try:
1703 1703 pattern = regex.compile(r'%s' % entry['pat'])
1704 1704 except regex.error as e:
1705 1705 regex_err = ValueError('{}:{}'.format(entry['pat'], e))
1706 1706 log.exception('issue tracker pattern: `%s` failed to compile', regex_err)
1707 1707 errors.append(regex_err)
1708 1708 continue
1709 1709 compile_cache[entry['pat']] = pattern
1710 1710
1711 1711 data_func = partial(
1712 1712 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1713 1713 return_raw_data=True)
1714 1714
1715 1715 for match_obj in pattern.finditer(text_string):
1716 1716 issues_data.append(data_func(match_obj))
1717 1717
1718 1718 url_func = partial(
1719 1719 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1720 1720 link_format=link_format)
1721 1721
1722 1722 new_text = pattern.sub(url_func, new_text)
1723 1723 log.debug('processed prefix:uid `%s`', uid)
1724 1724
1725 1725 # finally use global replace, eg !123 -> pr-link, those will not catch
1726 1726 # if already similar pattern exists
1727 1727 server_url = '${scheme}://${netloc}'
1728 1728 pr_entry = {
1729 1729 'pref': '!',
1730 1730 'url': server_url + '/_admin/pull-requests/${id}',
1731 1731 'desc': 'Pull Request !${id}',
1732 1732 'hovercard_url': server_url + '/_hovercard/pull_request/${id}'
1733 1733 }
1734 1734 pr_url_func = partial(
1735 1735 _process_url_func, repo_name=repo_name, entry=pr_entry, uid=None,
1736 1736 link_format=link_format+'+hovercard')
1737 1737 new_text = pr_pattern_re.sub(pr_url_func, new_text)
1738 1738 log.debug('processed !pr pattern')
1739 1739
1740 1740 return new_text, issues_data, errors
1741 1741
1742 1742
1743 1743 def urlify_commit_message(commit_text, repository=None, active_pattern_entries=None,
1744 1744 issues_container_callback=None, error_container=None):
1745 1745 """
1746 1746 Parses given text message and makes proper links.
1747 1747 issues are linked to given issue-server, and rest is a commit link
1748 1748 """
1749 1749
1750 1750 def escaper(_text):
1751 1751 return _text.replace('<', '&lt;').replace('>', '&gt;')
1752 1752
1753 1753 new_text = escaper(commit_text)
1754 1754
1755 1755 # extract http/https links and make them real urls
1756 1756 new_text = urlify_text(new_text, safe=False)
1757 1757
1758 1758 # urlify commits - extract commit ids and make link out of them, if we have
1759 1759 # the scope of repository present.
1760 1760 if repository:
1761 1761 new_text = urlify_commits(new_text, repository)
1762 1762
1763 1763 # process issue tracker patterns
1764 1764 new_text, issues, errors = process_patterns(
1765 1765 new_text, repository or '', active_entries=active_pattern_entries)
1766 1766
1767 1767 if issues_container_callback is not None:
1768 1768 for issue in issues:
1769 1769 issues_container_callback(issue)
1770 1770
1771 1771 if error_container is not None:
1772 1772 error_container.extend(errors)
1773 1773
1774 1774 return literal(new_text)
1775 1775
1776 1776
1777 1777 def render_binary(repo_name, file_obj):
1778 1778 """
1779 1779 Choose how to render a binary file
1780 1780 """
1781 1781
1782 1782 # unicode
1783 1783 filename = file_obj.name
1784 1784
1785 1785 # images
1786 1786 for ext in ['*.png', '*.jpeg', '*.jpg', '*.ico', '*.gif']:
1787 1787 if fnmatch.fnmatch(filename, pat=ext):
1788 1788 src = route_path(
1789 1789 'repo_file_raw', repo_name=repo_name,
1790 1790 commit_id=file_obj.commit.raw_id,
1791 1791 f_path=file_obj.path)
1792 1792
1793 1793 return literal(
1794 1794 '<img class="rendered-binary" alt="rendered-image" src="{}">'.format(src))
1795 1795
1796 1796
1797 1797 def renderer_from_filename(filename, exclude=None):
1798 1798 """
1799 1799 choose a renderer based on filename, this works only for text based files
1800 1800 """
1801 1801
1802 1802 # ipython
1803 1803 for ext in ['*.ipynb']:
1804 1804 if fnmatch.fnmatch(filename, pat=ext):
1805 1805 return 'jupyter'
1806 1806
1807 1807 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1808 1808 if is_markup:
1809 1809 return is_markup
1810 1810 return None
1811 1811
1812 1812
1813 1813 def render(source, renderer='rst', mentions=False, relative_urls=None,
1814 1814 repo_name=None, active_pattern_entries=None, issues_container_callback=None):
1815 1815
1816 1816 def maybe_convert_relative_links(html_source):
1817 1817 if relative_urls:
1818 1818 return relative_links(html_source, relative_urls)
1819 1819 return html_source
1820 1820
1821 1821 if renderer == 'plain':
1822 1822 return literal(
1823 1823 MarkupRenderer.plain(source, leading_newline=False))
1824 1824
1825 1825 elif renderer == 'rst':
1826 1826 if repo_name:
1827 1827 # process patterns on comments if we pass in repo name
1828 1828 source, issues, errors = process_patterns(
1829 1829 source, repo_name, link_format='rst',
1830 1830 active_entries=active_pattern_entries)
1831 1831 if issues_container_callback is not None:
1832 1832 for issue in issues:
1833 1833 issues_container_callback(issue)
1834 1834
1835 1835 return literal(
1836 1836 '<div class="rst-block">%s</div>' %
1837 1837 maybe_convert_relative_links(
1838 1838 MarkupRenderer.rst(source, mentions=mentions)))
1839 1839
1840 1840 elif renderer == 'markdown':
1841 1841 if repo_name:
1842 1842 # process patterns on comments if we pass in repo name
1843 1843 source, issues, errors = process_patterns(
1844 1844 source, repo_name, link_format='markdown',
1845 1845 active_entries=active_pattern_entries)
1846 1846 if issues_container_callback is not None:
1847 1847 for issue in issues:
1848 1848 issues_container_callback(issue)
1849 1849
1850 1850
1851 1851 return literal(
1852 1852 '<div class="markdown-block">%s</div>' %
1853 1853 maybe_convert_relative_links(
1854 1854 MarkupRenderer.markdown(source, flavored=True,
1855 1855 mentions=mentions)))
1856 1856
1857 1857 elif renderer == 'jupyter':
1858 1858 return literal(
1859 1859 '<div class="ipynb">%s</div>' %
1860 1860 maybe_convert_relative_links(
1861 1861 MarkupRenderer.jupyter(source)))
1862 1862
1863 1863 # None means just show the file-source
1864 1864 return None
1865 1865
1866 1866
1867 1867 def commit_status(repo, commit_id):
1868 1868 return ChangesetStatusModel().get_status(repo, commit_id)
1869 1869
1870 1870
1871 1871 def commit_status_lbl(commit_status):
1872 1872 return dict(ChangesetStatus.STATUSES).get(commit_status)
1873 1873
1874 1874
1875 1875 def commit_time(repo_name, commit_id):
1876 1876 repo = Repository.get_by_repo_name(repo_name)
1877 1877 commit = repo.get_commit(commit_id=commit_id)
1878 1878 return commit.date
1879 1879
1880 1880
1881 1881 def get_permission_name(key):
1882 1882 return dict(Permission.PERMS).get(key)
1883 1883
1884 1884
1885 1885 def journal_filter_help(request):
1886 1886 _ = request.translate
1887 1887 from rhodecode.lib.audit_logger import ACTIONS
1888 1888 actions = '\n'.join(textwrap.wrap(', '.join(sorted(ACTIONS.keys())), 80))
1889 1889
1890 1890 return _(
1891 1891 'Example filter terms:\n' +
1892 1892 ' repository:vcs\n' +
1893 1893 ' username:marcin\n' +
1894 1894 ' username:(NOT marcin)\n' +
1895 1895 ' action:*push*\n' +
1896 1896 ' ip:127.0.0.1\n' +
1897 1897 ' date:20120101\n' +
1898 1898 ' date:[20120101100000 TO 20120102]\n' +
1899 1899 '\n' +
1900 1900 'Actions: {actions}\n' +
1901 1901 '\n' +
1902 1902 'Generate wildcards using \'*\' character:\n' +
1903 1903 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1904 1904 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1905 1905 '\n' +
1906 1906 'Optional AND / OR operators in queries\n' +
1907 1907 ' "repository:vcs OR repository:test"\n' +
1908 1908 ' "username:test AND repository:test*"\n'
1909 1909 ).format(actions=actions)
1910 1910
1911 1911
1912 1912 def not_mapped_error(repo_name):
1913 1913 from rhodecode.translation import _
1914 1914 flash(_('%s repository is not mapped to db perhaps'
1915 1915 ' it was created or renamed from the filesystem'
1916 1916 ' please run the application again'
1917 1917 ' in order to rescan repositories') % repo_name, category='error')
1918 1918
1919 1919
1920 1920 def ip_range(ip_addr):
1921 1921 from rhodecode.model.db import UserIpMap
1922 1922 s, e = UserIpMap._get_ip_range(ip_addr)
1923 1923 return '%s - %s' % (s, e)
1924 1924
1925 1925
1926 1926 def form(url, method='post', needs_csrf_token=True, **attrs):
1927 1927 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1928 1928 if method.lower() != 'get' and needs_csrf_token:
1929 1929 raise Exception(
1930 1930 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1931 1931 'CSRF token. If the endpoint does not require such token you can ' +
1932 1932 'explicitly set the parameter needs_csrf_token to false.')
1933 1933
1934 1934 return insecure_form(url, method=method, **attrs)
1935 1935
1936 1936
1937 1937 def secure_form(form_url, method="POST", multipart=False, **attrs):
1938 1938 """Start a form tag that points the action to an url. This
1939 1939 form tag will also include the hidden field containing
1940 1940 the auth token.
1941 1941
1942 1942 The url options should be given either as a string, or as a
1943 1943 ``url()`` function. The method for the form defaults to POST.
1944 1944
1945 1945 Options:
1946 1946
1947 1947 ``multipart``
1948 1948 If set to True, the enctype is set to "multipart/form-data".
1949 1949 ``method``
1950 1950 The method to use when submitting the form, usually either
1951 1951 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1952 1952 hidden input with name _method is added to simulate the verb
1953 1953 over POST.
1954 1954
1955 1955 """
1956 1956
1957 1957 if 'request' in attrs:
1958 1958 session = attrs['request'].session
1959 1959 del attrs['request']
1960 1960 else:
1961 1961 raise ValueError(
1962 1962 'Calling this form requires request= to be passed as argument')
1963 1963
1964 1964 _form = insecure_form(form_url, method, multipart, **attrs)
1965 1965 token = literal(
1966 1966 '<input type="hidden" name="{}" value="{}">'.format(
1967 1967 csrf_token_key, get_csrf_token(session)))
1968 1968
1969 1969 return literal("%s\n%s" % (_form, token))
1970 1970
1971 1971
1972 1972 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1973 1973 select_html = select(name, selected, options, **attrs)
1974 1974
1975 1975 select2 = """
1976 1976 <script>
1977 1977 $(document).ready(function() {
1978 1978 $('#%s').select2({
1979 1979 containerCssClass: 'drop-menu %s',
1980 1980 dropdownCssClass: 'drop-menu-dropdown',
1981 1981 dropdownAutoWidth: true%s
1982 1982 });
1983 1983 });
1984 1984 </script>
1985 1985 """
1986 1986
1987 1987 filter_option = """,
1988 1988 minimumResultsForSearch: -1
1989 1989 """
1990 1990 input_id = attrs.get('id') or name
1991 1991 extra_classes = ' '.join(attrs.pop('extra_classes', []))
1992 1992 filter_enabled = "" if enable_filter else filter_option
1993 1993 select_script = literal(select2 % (input_id, extra_classes, filter_enabled))
1994 1994
1995 1995 return literal(select_html+select_script)
1996 1996
1997 1997
1998 1998 def get_visual_attr(tmpl_context_var, attr_name):
1999 1999 """
2000 2000 A safe way to get a variable from visual variable of template context
2001 2001
2002 2002 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
2003 2003 :param attr_name: name of the attribute we fetch from the c.visual
2004 2004 """
2005 2005 visual = getattr(tmpl_context_var, 'visual', None)
2006 2006 if not visual:
2007 2007 return
2008 2008 else:
2009 2009 return getattr(visual, attr_name, None)
2010 2010
2011 2011
2012 2012 def get_last_path_part(file_node):
2013 2013 if not file_node.path:
2014 2014 return u'/'
2015 2015
2016 2016 path = safe_unicode(file_node.path.split('/')[-1])
2017 2017 return u'../' + path
2018 2018
2019 2019
2020 2020 def route_url(*args, **kwargs):
2021 2021 """
2022 2022 Wrapper around pyramids `route_url` (fully qualified url) function.
2023 2023 """
2024 2024 req = get_current_request()
2025 2025 return req.route_url(*args, **kwargs)
2026 2026
2027 2027
2028 2028 def route_path(*args, **kwargs):
2029 2029 """
2030 2030 Wrapper around pyramids `route_path` function.
2031 2031 """
2032 2032 req = get_current_request()
2033 2033 return req.route_path(*args, **kwargs)
2034 2034
2035 2035
2036 2036 def route_path_or_none(*args, **kwargs):
2037 2037 try:
2038 2038 return route_path(*args, **kwargs)
2039 2039 except KeyError:
2040 2040 return None
2041 2041
2042 2042
2043 2043 def current_route_path(request, **kw):
2044 2044 new_args = request.GET.mixed()
2045 2045 new_args.update(kw)
2046 2046 return request.current_route_path(_query=new_args)
2047 2047
2048 2048
2049 2049 def curl_api_example(method, args):
2050 2050 args_json = json.dumps(OrderedDict([
2051 2051 ('id', 1),
2052 2052 ('auth_token', 'SECRET'),
2053 2053 ('method', method),
2054 2054 ('args', args)
2055 2055 ]))
2056 2056
2057 2057 return "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{args_json}'".format(
2058 2058 api_url=route_url('apiv2'),
2059 2059 args_json=args_json
2060 2060 )
2061 2061
2062 2062
2063 2063 def api_call_example(method, args):
2064 2064 """
2065 2065 Generates an API call example via CURL
2066 2066 """
2067 2067 curl_call = curl_api_example(method, args)
2068 2068
2069 2069 return literal(
2070 2070 curl_call +
2071 2071 "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, "
2072 2072 "and needs to be of `api calls` role."
2073 2073 .format(token_url=route_url('my_account_auth_tokens')))
2074 2074
2075 2075
2076 2076 def notification_description(notification, request):
2077 2077 """
2078 2078 Generate notification human readable description based on notification type
2079 2079 """
2080 2080 from rhodecode.model.notification import NotificationModel
2081 2081 return NotificationModel().make_description(
2082 2082 notification, translate=request.translate)
2083 2083
2084 2084
2085 2085 def go_import_header(request, db_repo=None):
2086 2086 """
2087 2087 Creates a header for go-import functionality in Go Lang
2088 2088 """
2089 2089
2090 2090 if not db_repo:
2091 2091 return
2092 2092 if 'go-get' not in request.GET:
2093 2093 return
2094 2094
2095 2095 clone_url = db_repo.clone_url()
2096 2096 prefix = re.split(r'^https?:\/\/', clone_url)[-1]
2097 2097 # we have a repo and go-get flag,
2098 2098 return literal('<meta name="go-import" content="{} {} {}">'.format(
2099 2099 prefix, db_repo.repo_type, clone_url))
2100 2100
2101 2101
2102 2102 def reviewer_as_json(*args, **kwargs):
2103 2103 from rhodecode.apps.repository.utils import reviewer_as_json as _reviewer_as_json
2104 2104 return _reviewer_as_json(*args, **kwargs)
2105 2105
2106 2106
2107 2107 def get_repo_view_type(request):
2108 2108 route_name = request.matched_route.name
2109 2109 route_to_view_type = {
2110 2110 'repo_changelog': 'commits',
2111 2111 'repo_commits': 'commits',
2112 2112 'repo_files': 'files',
2113 2113 'repo_summary': 'summary',
2114 2114 'repo_commit': 'commit'
2115 2115 }
2116 2116
2117 2117 return route_to_view_type.get(route_name)
2118 2118
2119 2119
2120 2120 def is_active(menu_entry, selected):
2121 2121 """
2122 2122 Returns active class for selecting menus in templates
2123 2123 <li class=${h.is_active('settings', current_active)}></li>
2124 2124 """
2125 2125 if not isinstance(menu_entry, list):
2126 2126 menu_entry = [menu_entry]
2127 2127
2128 2128 if selected in menu_entry:
2129 2129 return "active"
2130 2130
2131 2131
2132 2132 class IssuesRegistry(object):
2133 2133 """
2134 2134 issue_registry = IssuesRegistry()
2135 2135 some_func(issues_callback=issues_registry(...))
2136 2136 """
2137 2137
2138 2138 def __init__(self):
2139 2139 self.issues = []
2140 2140 self.unique_issues = collections.defaultdict(lambda: [])
2141 2141
2142 2142 def __call__(self, commit_dict=None):
2143 2143 def callback(issue):
2144 2144 if commit_dict and issue:
2145 2145 issue['commit'] = commit_dict
2146 2146 self.issues.append(issue)
2147 2147 self.unique_issues[issue['id']].append(issue)
2148 2148 return callback
2149 2149
2150 2150 def get_issues(self):
2151 2151 return self.issues
2152 2152
2153 2153 @property
2154 2154 def issues_unique_count(self):
2155 2155 return len(set(i['id'] for i in self.issues))
@@ -1,1012 +1,1013 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24 import os
25 25 import logging
26 26 import binascii
27 27 import configparser
28 28 import urllib.request, urllib.parse, urllib.error
29 29
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 from collections import OrderedDict
33 33 from rhodecode.lib.datelib import (
34 34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
35 35 from rhodecode.lib.utils import safe_unicode, safe_str
36 36 from rhodecode.lib.utils2 import CachedProperty
37 37 from rhodecode.lib.vcs import connection, exceptions
38 38 from rhodecode.lib.vcs.backends.base import (
39 39 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 40 MergeFailureReason, Reference, BasePathPermissionChecker)
41 41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 44 from rhodecode.lib.vcs.exceptions import (
45 45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
47 47
48 48 hexlify = binascii.hexlify
49 49 nullid = "\0" * 20
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 class MercurialRepository(BaseRepository):
55 55 """
56 56 Mercurial repository backend
57 57 """
58 58 DEFAULT_BRANCH_NAME = 'default'
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 do_workspace_checkout=False, with_wire=None, bare=False):
62 62 """
63 63 Raises RepositoryError if repository could not be find at the given
64 64 ``repo_path``.
65 65
66 66 :param repo_path: local path of the repository
67 67 :param config: config object containing the repo configuration
68 68 :param create=False: if set to True, would try to create repository if
69 69 it does not exist rather than raising exception
70 70 :param src_url=None: would try to clone repository from given location
71 71 :param do_workspace_checkout=False: sets update of working copy after
72 72 making a clone
73 73 :param bare: not used, compatible with other VCS
74 74 """
75 75
76 76 self.path = safe_str(os.path.abspath(repo_path))
77 77 # mercurial since 4.4.X requires certain configuration to be present
78 78 # because sometimes we init the repos with config we need to meet
79 79 # special requirements
80 80 self.config = config if config else self.get_default_config(
81 81 default=[('extensions', 'largefiles', '1')])
82 82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83 83
84 84 self._init_repo(create, src_url, do_workspace_checkout)
85 85
86 86 # caches
87 87 self._commit_ids = {}
88 88
89 89 @LazyProperty
90 90 def _remote(self):
91 91 repo_id = self.path
92 92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93 93
94 94 @CachedProperty
95 95 def commit_ids(self):
96 96 """
97 97 Returns list of commit ids, in ascending order. Being lazy
98 98 attribute allows external tools to inject shas from cache.
99 99 """
100 100 commit_ids = self._get_all_commit_ids()
101 101 self._rebuild_cache(commit_ids)
102 102 return commit_ids
103 103
104 104 def _rebuild_cache(self, commit_ids):
105 105 self._commit_ids = dict((commit_id, index)
106 106 for index, commit_id in enumerate(commit_ids))
107 107
108 108 @CachedProperty
109 109 def branches(self):
110 110 return self._get_branches()
111 111
112 112 @CachedProperty
113 113 def branches_closed(self):
114 114 return self._get_branches(active=False, closed=True)
115 115
116 116 @CachedProperty
117 117 def branches_all(self):
118 118 all_branches = {}
119 119 all_branches.update(self.branches)
120 120 all_branches.update(self.branches_closed)
121 121 return all_branches
122 122
123 123 def _get_branches(self, active=True, closed=False):
124 124 """
125 125 Gets branches for this repository
126 126 Returns only not closed active branches by default
127 127
128 128 :param active: return also active branches
129 129 :param closed: return also closed branches
130 130
131 131 """
132 132 if self.is_empty():
133 133 return {}
134 134
135 135 def get_name(ctx):
136 136 return ctx[0]
137 137
138 138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
139 139 self._remote.branches(active, closed).items()]
140 140
141 141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142 142
143 143 @CachedProperty
144 144 def tags(self):
145 145 """
146 146 Gets tags for this repository
147 147 """
148 148 return self._get_tags()
149 149
150 150 def _get_tags(self):
151 151 if self.is_empty():
152 152 return {}
153 153
154 154 def get_name(ctx):
155 155 return ctx[0]
156 156
157 157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
158 158 self._remote.tags().items()]
159 159
160 160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161 161
162 162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 163 """
164 164 Creates and returns a tag for the given ``commit_id``.
165 165
166 166 :param name: name for new tag
167 167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 168 :param commit_id: commit id for which new tag would be created
169 169 :param message: message of the tag's commit
170 170 :param date: date of tag's commit
171 171
172 172 :raises TagAlreadyExistError: if tag with same name already exists
173 173 """
174 174 if name in self.tags:
175 175 raise TagAlreadyExistError("Tag %s already exists" % name)
176 176
177 177 commit = self.get_commit(commit_id=commit_id)
178 178 local = kwargs.setdefault('local', False)
179 179
180 180 if message is None:
181 181 message = "Added tag %s for commit %s" % (name, commit.short_id)
182 182
183 183 date, tz = date_to_timestamp_plus_offset(date)
184 184
185 185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 186 self._remote.invalidate_vcs_cache()
187 187
188 188 # Reinitialize tags
189 189 self._invalidate_prop_cache('tags')
190 190 tag_id = self.tags[name]
191 191
192 192 return self.get_commit(commit_id=tag_id)
193 193
194 194 def remove_tag(self, name, user, message=None, date=None):
195 195 """
196 196 Removes tag with the given `name`.
197 197
198 198 :param name: name of the tag to be removed
199 199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 200 :param message: message of the tag's removal commit
201 201 :param date: date of tag's removal commit
202 202
203 203 :raises TagDoesNotExistError: if tag with given name does not exists
204 204 """
205 205 if name not in self.tags:
206 206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207 207
208 208 if message is None:
209 209 message = "Removed tag %s" % name
210 210 local = False
211 211
212 212 date, tz = date_to_timestamp_plus_offset(date)
213 213
214 214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 215 self._remote.invalidate_vcs_cache()
216 216 self._invalidate_prop_cache('tags')
217 217
218 218 @LazyProperty
219 219 def bookmarks(self):
220 220 """
221 221 Gets bookmarks for this repository
222 222 """
223 223 return self._get_bookmarks()
224 224
225 225 def _get_bookmarks(self):
226 226 if self.is_empty():
227 227 return {}
228 228
229 229 def get_name(ctx):
230 230 return ctx[0]
231 231
232 232 _bookmarks = [
233 233 (safe_unicode(n), hexlify(h)) for n, h in
234 234 self._remote.bookmarks().items()]
235 235
236 236 return OrderedDict(sorted(_bookmarks, key=get_name))
237 237
238 238 def _get_all_commit_ids(self):
239 239 return self._remote.get_all_commit_ids('visible')
240 240
241 241 def get_diff(
242 242 self, commit1, commit2, path='', ignore_whitespace=False,
243 243 context=3, path1=None):
244 244 """
245 245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 246 `commit2` since `commit1`.
247 247
248 248 :param commit1: Entry point from which diff is shown. Can be
249 249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 250 the changes since empty state of the repository until `commit2`
251 251 :param commit2: Until which commit changes should be shown.
252 252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 253 changes. Defaults to ``False``.
254 254 :param context: How many lines before/after changed lines should be
255 255 shown. Defaults to ``3``.
256 256 """
257 257 self._validate_diff_commits(commit1, commit2)
258 258 if path1 is not None and path1 != path:
259 259 raise ValueError("Diff of two different paths not supported.")
260 260
261 261 if path:
262 262 file_filter = [self.path, path]
263 263 else:
264 264 file_filter = None
265 265
266 266 diff = self._remote.diff(
267 267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 268 opt_git=True, opt_ignorews=ignore_whitespace,
269 269 context=context)
270 270 return MercurialDiff(diff)
271 271
272 272 def strip(self, commit_id, branch=None):
273 273 self._remote.strip(commit_id, update=False, backup="none")
274 274
275 275 self._remote.invalidate_vcs_cache()
276 276 # clear cache
277 277 self._invalidate_prop_cache('commit_ids')
278 278
279 279 return len(self.commit_ids)
280 280
281 281 def verify(self):
282 282 verify = self._remote.verify()
283 283
284 284 self._remote.invalidate_vcs_cache()
285 285 return verify
286 286
287 287 def hg_update_cache(self):
288 288 update_cache = self._remote.hg_update_cache()
289 289
290 290 self._remote.invalidate_vcs_cache()
291 291 return update_cache
292 292
293 293 def hg_rebuild_fn_cache(self):
294 294 update_cache = self._remote.hg_rebuild_fn_cache()
295 295
296 296 self._remote.invalidate_vcs_cache()
297 297 return update_cache
298 298
299 299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
300 300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
301 301 self, commit_id1, repo2, commit_id2)
302 302
303 303 if commit_id1 == commit_id2:
304 304 return commit_id1
305 305
306 306 ancestors = self._remote.revs_from_revspec(
307 307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
308 308 other_path=repo2.path)
309 309
310 310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
311 311
312 312 log.debug('Found common ancestor with sha: %s', ancestor_id)
313 313 return ancestor_id
314 314
315 315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
316 316 if commit_id1 == commit_id2:
317 317 commits = []
318 318 else:
319 319 if merge:
320 320 indexes = self._remote.revs_from_revspec(
321 321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
322 322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
323 323 else:
324 324 indexes = self._remote.revs_from_revspec(
325 325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
326 326 commit_id1, other_path=repo2.path)
327 327
328 328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
329 329 for idx in indexes]
330 330
331 331 return commits
332 332
333 333 @staticmethod
334 334 def check_url(url, config):
335 335 """
336 336 Function will check given url and try to verify if it's a valid
337 337 link. Sometimes it may happened that mercurial will issue basic
338 338 auth request that can cause whole API to hang when used from python
339 339 or other external calls.
340 340
341 341 On failures it'll raise urllib2.HTTPError, exception is also thrown
342 342 when the return code is non 200
343 343 """
344 344 # check first if it's not an local url
345 345 if os.path.isdir(url) or url.startswith('file:'):
346 346 return True
347 347
348 348 # Request the _remote to verify the url
349 349 return connection.Hg.check_url(url, config.serialize())
350 350
351 351 @staticmethod
352 352 def is_valid_repository(path):
353 353 return os.path.isdir(os.path.join(path, '.hg'))
354 354
355 355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
356 356 """
357 357 Function will check for mercurial repository in given path. If there
358 358 is no repository in that path it will raise an exception unless
359 359 `create` parameter is set to True - in that case repository would
360 360 be created.
361 361
362 362 If `src_url` is given, would try to clone repository from the
363 363 location at given clone_point. Additionally it'll make update to
364 364 working copy accordingly to `do_workspace_checkout` flag.
365 365 """
366 366 if create and os.path.exists(self.path):
367 367 raise RepositoryError(
368 368 "Cannot create repository at %s, location already exist"
369 369 % self.path)
370 370
371 371 if src_url:
372 372 url = str(self._get_url(src_url))
373 373 MercurialRepository.check_url(url, self.config)
374 374
375 375 self._remote.clone(url, self.path, do_workspace_checkout)
376 376
377 377 # Don't try to create if we've already cloned repo
378 378 create = False
379 379
380 380 if create:
381 381 os.makedirs(self.path, mode=0o755)
382 382 self._remote.localrepository(create)
383 383
384 384 @LazyProperty
385 385 def in_memory_commit(self):
386 386 return MercurialInMemoryCommit(self)
387 387
388 388 @LazyProperty
389 389 def description(self):
390 390 description = self._remote.get_config_value(
391 391 'web', 'description', untrusted=True)
392 392 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
393 393
394 394 @LazyProperty
395 395 def contact(self):
396 396 contact = (
397 397 self._remote.get_config_value("web", "contact") or
398 398 self._remote.get_config_value("ui", "username"))
399 399 return safe_unicode(contact or self.DEFAULT_CONTACT)
400 400
401 401 @LazyProperty
402 402 def last_change(self):
403 403 """
404 404 Returns last change made on this repository as
405 405 `datetime.datetime` object.
406 406 """
407 407 try:
408 408 return self.get_commit().date
409 409 except RepositoryError:
410 410 tzoffset = makedate()[1]
411 411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
412 412
413 413 def _get_fs_mtime(self):
414 414 # fallback to filesystem
415 415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
416 416 st_path = os.path.join(self.path, '.hg', "store")
417 417 if os.path.exists(cl_path):
418 418 return os.stat(cl_path).st_mtime
419 419 else:
420 420 return os.stat(st_path).st_mtime
421 421
422 422 def _get_url(self, url):
423 423 """
424 424 Returns normalized url. If schema is not given, would fall
425 425 to filesystem
426 426 (``file:///``) schema.
427 427 """
428 428 url = url.encode('utf8')
429 429 if url != 'default' and '://' not in url:
430 430 url = "file:" + urllib.request.pathname2url(url)
431 431 return url
432 432
433 433 def get_hook_location(self):
434 434 """
435 435 returns absolute path to location where hooks are stored
436 436 """
437 437 return os.path.join(self.path, '.hg', '.hgrc')
438 438
439 439 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
440 440 translate_tag=None, maybe_unreachable=False, reference_obj=None):
441 441 """
442 442 Returns ``MercurialCommit`` object representing repository's
443 443 commit at the given `commit_id` or `commit_idx`.
444 444 """
445 445 if self.is_empty():
446 446 raise EmptyRepositoryError("There are no commits yet")
447 447
448 448 if commit_id is not None:
449 449 self._validate_commit_id(commit_id)
450 450 try:
451 451 # we have cached idx, use it without contacting the remote
452 452 idx = self._commit_ids[commit_id]
453 453 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
454 454 except KeyError:
455 455 pass
456 456
457 457 elif commit_idx is not None:
458 458 self._validate_commit_idx(commit_idx)
459 459 try:
460 460 _commit_id = self.commit_ids[commit_idx]
461 461 if commit_idx < 0:
462 462 commit_idx = self.commit_ids.index(_commit_id)
463 463
464 464 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
465 465 except IndexError:
466 466 commit_id = commit_idx
467 467 else:
468 468 commit_id = "tip"
469 469
470 if isinstance(commit_id, unicode):
471 commit_id = safe_str(commit_id)
470 #TODO: decide if we pass bytes or str into lookup ?
471 # if isinstance(commit_id, unicode):
472 # commit_id = safe_str(commit_id)
472 473
473 474 try:
474 475 raw_id, idx = self._remote.lookup(commit_id, both=True)
475 476 except CommitDoesNotExistError:
476 477 msg = "Commit {} does not exist for `{}`".format(
477 478 *map(safe_str, [commit_id, self.name]))
478 479 raise CommitDoesNotExistError(msg)
479 480
480 481 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
481 482
482 483 def get_commits(
483 484 self, start_id=None, end_id=None, start_date=None, end_date=None,
484 485 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
485 486 """
486 487 Returns generator of ``MercurialCommit`` objects from start to end
487 488 (both are inclusive)
488 489
489 490 :param start_id: None, str(commit_id)
490 491 :param end_id: None, str(commit_id)
491 492 :param start_date: if specified, commits with commit date less than
492 493 ``start_date`` would be filtered out from returned set
493 494 :param end_date: if specified, commits with commit date greater than
494 495 ``end_date`` would be filtered out from returned set
495 496 :param branch_name: if specified, commits not reachable from given
496 497 branch would be filtered out from returned set
497 498 :param show_hidden: Show hidden commits such as obsolete or hidden from
498 499 Mercurial evolve
499 500 :raise BranchDoesNotExistError: If given ``branch_name`` does not
500 501 exist.
501 502 :raise CommitDoesNotExistError: If commit for given ``start`` or
502 503 ``end`` could not be found.
503 504 """
504 505 # actually we should check now if it's not an empty repo
505 506 if self.is_empty():
506 507 raise EmptyRepositoryError("There are no commits yet")
507 508 self._validate_branch_name(branch_name)
508 509
509 510 branch_ancestors = False
510 511 if start_id is not None:
511 512 self._validate_commit_id(start_id)
512 513 c_start = self.get_commit(commit_id=start_id)
513 514 start_pos = self._commit_ids[c_start.raw_id]
514 515 else:
515 516 start_pos = None
516 517
517 518 if end_id is not None:
518 519 self._validate_commit_id(end_id)
519 520 c_end = self.get_commit(commit_id=end_id)
520 521 end_pos = max(0, self._commit_ids[c_end.raw_id])
521 522 else:
522 523 end_pos = None
523 524
524 525 if None not in [start_id, end_id] and start_pos > end_pos:
525 526 raise RepositoryError(
526 527 "Start commit '%s' cannot be after end commit '%s'" %
527 528 (start_id, end_id))
528 529
529 530 if end_pos is not None:
530 531 end_pos += 1
531 532
532 533 commit_filter = []
533 534
534 535 if branch_name and not branch_ancestors:
535 536 commit_filter.append('branch("%s")' % (branch_name,))
536 537 elif branch_name and branch_ancestors:
537 538 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
538 539
539 540 if start_date and not end_date:
540 541 commit_filter.append('date(">%s")' % (start_date,))
541 542 if end_date and not start_date:
542 543 commit_filter.append('date("<%s")' % (end_date,))
543 544 if start_date and end_date:
544 545 commit_filter.append(
545 546 'date(">%s") and date("<%s")' % (start_date, end_date))
546 547
547 548 if not show_hidden:
548 549 commit_filter.append('not obsolete()')
549 550 commit_filter.append('not hidden()')
550 551
551 552 # TODO: johbo: Figure out a simpler way for this solution
552 553 collection_generator = CollectionGenerator
553 554 if commit_filter:
554 555 commit_filter = ' and '.join(map(safe_str, commit_filter))
555 556 revisions = self._remote.rev_range([commit_filter])
556 557 collection_generator = MercurialIndexBasedCollectionGenerator
557 558 else:
558 559 revisions = self.commit_ids
559 560
560 561 if start_pos or end_pos:
561 562 revisions = revisions[start_pos:end_pos]
562 563
563 564 return collection_generator(self, revisions, pre_load=pre_load)
564 565
565 566 def pull(self, url, commit_ids=None):
566 567 """
567 568 Pull changes from external location.
568 569
569 570 :param commit_ids: Optional. Can be set to a list of commit ids
570 571 which shall be pulled from the other repository.
571 572 """
572 573 url = self._get_url(url)
573 574 self._remote.pull(url, commit_ids=commit_ids)
574 575 self._remote.invalidate_vcs_cache()
575 576
576 577 def fetch(self, url, commit_ids=None):
577 578 """
578 579 Backward compatibility with GIT fetch==pull
579 580 """
580 581 return self.pull(url, commit_ids=commit_ids)
581 582
582 583 def push(self, url):
583 584 url = self._get_url(url)
584 585 self._remote.sync_push(url)
585 586
586 587 def _local_clone(self, clone_path):
587 588 """
588 589 Create a local clone of the current repo.
589 590 """
590 591 self._remote.clone(self.path, clone_path, update_after_clone=True,
591 592 hooks=False)
592 593
593 594 def _update(self, revision, clean=False):
594 595 """
595 596 Update the working copy to the specified revision.
596 597 """
597 598 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
598 599 self._remote.update(revision, clean=clean)
599 600
600 601 def _identify(self):
601 602 """
602 603 Return the current state of the working directory.
603 604 """
604 605 return self._remote.identify().strip().rstrip('+')
605 606
606 607 def _heads(self, branch=None):
607 608 """
608 609 Return the commit ids of the repository heads.
609 610 """
610 611 return self._remote.heads(branch=branch).strip().split(' ')
611 612
612 613 def _ancestor(self, revision1, revision2):
613 614 """
614 615 Return the common ancestor of the two revisions.
615 616 """
616 617 return self._remote.ancestor(revision1, revision2)
617 618
618 619 def _local_push(
619 620 self, revision, repository_path, push_branches=False,
620 621 enable_hooks=False):
621 622 """
622 623 Push the given revision to the specified repository.
623 624
624 625 :param push_branches: allow to create branches in the target repo.
625 626 """
626 627 self._remote.push(
627 628 [revision], repository_path, hooks=enable_hooks,
628 629 push_branches=push_branches)
629 630
630 631 def _local_merge(self, target_ref, merge_message, user_name, user_email,
631 632 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
632 633 """
633 634 Merge the given source_revision into the checked out revision.
634 635
635 636 Returns the commit id of the merge and a boolean indicating if the
636 637 commit needs to be pushed.
637 638 """
638 639 source_ref_commit_id = source_ref.commit_id
639 640 target_ref_commit_id = target_ref.commit_id
640 641
641 642 # update our workdir to target ref, for proper merge
642 643 self._update(target_ref_commit_id, clean=True)
643 644
644 645 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
645 646 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
646 647
647 648 if close_commit_id:
648 649 # NOTE(marcink): if we get the close commit, this is our new source
649 650 # which will include the close commit itself.
650 651 source_ref_commit_id = close_commit_id
651 652
652 653 if ancestor == source_ref_commit_id:
653 654 # Nothing to do, the changes were already integrated
654 655 return target_ref_commit_id, False
655 656
656 657 elif ancestor == target_ref_commit_id and is_the_same_branch:
657 658 # In this case we should force a commit message
658 659 return source_ref_commit_id, True
659 660
660 661 unresolved = None
661 662 if use_rebase:
662 663 try:
663 664 bookmark_name = 'rcbook%s%s' % (source_ref_commit_id, target_ref_commit_id)
664 665 self.bookmark(bookmark_name, revision=source_ref.commit_id)
665 666 self._remote.rebase(
666 667 source=source_ref_commit_id, dest=target_ref_commit_id)
667 668 self._remote.invalidate_vcs_cache()
668 669 self._update(bookmark_name, clean=True)
669 670 return self._identify(), True
670 671 except RepositoryError as e:
671 672 # The rebase-abort may raise another exception which 'hides'
672 673 # the original one, therefore we log it here.
673 674 log.exception('Error while rebasing shadow repo during merge.')
674 675 if 'unresolved conflicts' in safe_str(e):
675 676 unresolved = self._remote.get_unresolved_files()
676 677 log.debug('unresolved files: %s', unresolved)
677 678
678 679 # Cleanup any rebase leftovers
679 680 self._remote.invalidate_vcs_cache()
680 681 self._remote.rebase(abort=True)
681 682 self._remote.invalidate_vcs_cache()
682 683 self._remote.update(clean=True)
683 684 if unresolved:
684 685 raise UnresolvedFilesInRepo(unresolved)
685 686 else:
686 687 raise
687 688 else:
688 689 try:
689 690 self._remote.merge(source_ref_commit_id)
690 691 self._remote.invalidate_vcs_cache()
691 692 self._remote.commit(
692 693 message=safe_str(merge_message),
693 694 username=safe_str('%s <%s>' % (user_name, user_email)))
694 695 self._remote.invalidate_vcs_cache()
695 696 return self._identify(), True
696 697 except RepositoryError as e:
697 698 # The merge-abort may raise another exception which 'hides'
698 699 # the original one, therefore we log it here.
699 700 log.exception('Error while merging shadow repo during merge.')
700 701 if 'unresolved merge conflicts' in safe_str(e):
701 702 unresolved = self._remote.get_unresolved_files()
702 703 log.debug('unresolved files: %s', unresolved)
703 704
704 705 # Cleanup any merge leftovers
705 706 self._remote.update(clean=True)
706 707 if unresolved:
707 708 raise UnresolvedFilesInRepo(unresolved)
708 709 else:
709 710 raise
710 711
711 712 def _local_close(self, target_ref, user_name, user_email,
712 713 source_ref, close_message=''):
713 714 """
714 715 Close the branch of the given source_revision
715 716
716 717 Returns the commit id of the close and a boolean indicating if the
717 718 commit needs to be pushed.
718 719 """
719 720 self._update(source_ref.commit_id)
720 721 message = close_message or "Closing branch: `{}`".format(source_ref.name)
721 722 try:
722 723 self._remote.commit(
723 724 message=safe_str(message),
724 725 username=safe_str('%s <%s>' % (user_name, user_email)),
725 726 close_branch=True)
726 727 self._remote.invalidate_vcs_cache()
727 728 return self._identify(), True
728 729 except RepositoryError:
729 730 # Cleanup any commit leftovers
730 731 self._remote.update(clean=True)
731 732 raise
732 733
733 734 def _is_the_same_branch(self, target_ref, source_ref):
734 735 return (
735 736 self._get_branch_name(target_ref) ==
736 737 self._get_branch_name(source_ref))
737 738
738 739 def _get_branch_name(self, ref):
739 740 if ref.type == 'branch':
740 741 return ref.name
741 742 return self._remote.ctx_branch(ref.commit_id)
742 743
743 744 def _maybe_prepare_merge_workspace(
744 745 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
745 746 shadow_repository_path = self._get_shadow_repository_path(
746 747 self.path, repo_id, workspace_id)
747 748 if not os.path.exists(shadow_repository_path):
748 749 self._local_clone(shadow_repository_path)
749 750 log.debug(
750 751 'Prepared shadow repository in %s', shadow_repository_path)
751 752
752 753 return shadow_repository_path
753 754
754 755 def _merge_repo(self, repo_id, workspace_id, target_ref,
755 756 source_repo, source_ref, merge_message,
756 757 merger_name, merger_email, dry_run=False,
757 758 use_rebase=False, close_branch=False):
758 759
759 760 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
760 761 'rebase' if use_rebase else 'merge', dry_run)
761 762 if target_ref.commit_id not in self._heads():
762 763 return MergeResponse(
763 764 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
764 765 metadata={'target_ref': target_ref})
765 766
766 767 try:
767 768 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
768 769 heads_all = self._heads(target_ref.name)
769 770 max_heads = 10
770 771 if len(heads_all) > max_heads:
771 772 heads = '\n,'.join(
772 773 heads_all[:max_heads] +
773 774 ['and {} more.'.format(len(heads_all)-max_heads)])
774 775 else:
775 776 heads = '\n,'.join(heads_all)
776 777 metadata = {
777 778 'target_ref': target_ref,
778 779 'source_ref': source_ref,
779 780 'heads': heads
780 781 }
781 782 return MergeResponse(
782 783 False, False, None,
783 784 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
784 785 metadata=metadata)
785 786 except CommitDoesNotExistError:
786 787 log.exception('Failure when looking up branch heads on hg target')
787 788 return MergeResponse(
788 789 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
789 790 metadata={'target_ref': target_ref})
790 791
791 792 shadow_repository_path = self._maybe_prepare_merge_workspace(
792 793 repo_id, workspace_id, target_ref, source_ref)
793 794 shadow_repo = self.get_shadow_instance(shadow_repository_path)
794 795
795 796 log.debug('Pulling in target reference %s', target_ref)
796 797 self._validate_pull_reference(target_ref)
797 798 shadow_repo._local_pull(self.path, target_ref)
798 799
799 800 try:
800 801 log.debug('Pulling in source reference %s', source_ref)
801 802 source_repo._validate_pull_reference(source_ref)
802 803 shadow_repo._local_pull(source_repo.path, source_ref)
803 804 except CommitDoesNotExistError:
804 805 log.exception('Failure when doing local pull on hg shadow repo')
805 806 return MergeResponse(
806 807 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
807 808 metadata={'source_ref': source_ref})
808 809
809 810 merge_ref = None
810 811 merge_commit_id = None
811 812 close_commit_id = None
812 813 merge_failure_reason = MergeFailureReason.NONE
813 814 metadata = {}
814 815
815 816 # enforce that close branch should be used only in case we source from
816 817 # an actual Branch
817 818 close_branch = close_branch and source_ref.type == 'branch'
818 819
819 820 # don't allow to close branch if source and target are the same
820 821 close_branch = close_branch and source_ref.name != target_ref.name
821 822
822 823 needs_push_on_close = False
823 824 if close_branch and not use_rebase and not dry_run:
824 825 try:
825 826 close_commit_id, needs_push_on_close = shadow_repo._local_close(
826 827 target_ref, merger_name, merger_email, source_ref)
827 828 merge_possible = True
828 829 except RepositoryError:
829 830 log.exception('Failure when doing close branch on '
830 831 'shadow repo: %s', shadow_repo)
831 832 merge_possible = False
832 833 merge_failure_reason = MergeFailureReason.MERGE_FAILED
833 834 else:
834 835 merge_possible = True
835 836
836 837 needs_push = False
837 838 if merge_possible:
838 839
839 840 try:
840 841 merge_commit_id, needs_push = shadow_repo._local_merge(
841 842 target_ref, merge_message, merger_name, merger_email,
842 843 source_ref, use_rebase=use_rebase,
843 844 close_commit_id=close_commit_id, dry_run=dry_run)
844 845 merge_possible = True
845 846
846 847 # read the state of the close action, if it
847 848 # maybe required a push
848 849 needs_push = needs_push or needs_push_on_close
849 850
850 851 # Set a bookmark pointing to the merge commit. This bookmark
851 852 # may be used to easily identify the last successful merge
852 853 # commit in the shadow repository.
853 854 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
854 855 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
855 856 except SubrepoMergeError:
856 857 log.exception(
857 858 'Subrepo merge error during local merge on hg shadow repo.')
858 859 merge_possible = False
859 860 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
860 861 needs_push = False
861 862 except RepositoryError as e:
862 863 log.exception('Failure when doing local merge on hg shadow repo')
863 864 if isinstance(e, UnresolvedFilesInRepo):
864 865 all_conflicts = list(e.args[0])
865 866 max_conflicts = 20
866 867 if len(all_conflicts) > max_conflicts:
867 868 conflicts = all_conflicts[:max_conflicts] \
868 869 + ['and {} more.'.format(len(all_conflicts)-max_conflicts)]
869 870 else:
870 871 conflicts = all_conflicts
871 872 metadata['unresolved_files'] = \
872 873 '\n* conflict: ' + \
873 874 ('\n * conflict: '.join(conflicts))
874 875
875 876 merge_possible = False
876 877 merge_failure_reason = MergeFailureReason.MERGE_FAILED
877 878 needs_push = False
878 879
879 880 if merge_possible and not dry_run:
880 881 if needs_push:
881 882 # In case the target is a bookmark, update it, so after pushing
882 883 # the bookmarks is also updated in the target.
883 884 if target_ref.type == 'book':
884 885 shadow_repo.bookmark(
885 886 target_ref.name, revision=merge_commit_id)
886 887 try:
887 888 shadow_repo_with_hooks = self.get_shadow_instance(
888 889 shadow_repository_path,
889 890 enable_hooks=True)
890 891 # This is the actual merge action, we push from shadow
891 892 # into origin.
892 893 # Note: the push_branches option will push any new branch
893 894 # defined in the source repository to the target. This may
894 895 # be dangerous as branches are permanent in Mercurial.
895 896 # This feature was requested in issue #441.
896 897 shadow_repo_with_hooks._local_push(
897 898 merge_commit_id, self.path, push_branches=True,
898 899 enable_hooks=True)
899 900
900 901 # maybe we also need to push the close_commit_id
901 902 if close_commit_id:
902 903 shadow_repo_with_hooks._local_push(
903 904 close_commit_id, self.path, push_branches=True,
904 905 enable_hooks=True)
905 906 merge_succeeded = True
906 907 except RepositoryError:
907 908 log.exception(
908 909 'Failure when doing local push from the shadow '
909 910 'repository to the target repository at %s.', self.path)
910 911 merge_succeeded = False
911 912 merge_failure_reason = MergeFailureReason.PUSH_FAILED
912 913 metadata['target'] = 'hg shadow repo'
913 914 metadata['merge_commit'] = merge_commit_id
914 915 else:
915 916 merge_succeeded = True
916 917 else:
917 918 merge_succeeded = False
918 919
919 920 return MergeResponse(
920 921 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
921 922 metadata=metadata)
922 923
923 924 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
924 925 config = self.config.copy()
925 926 if not enable_hooks:
926 927 config.clear_section('hooks')
927 928 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
928 929
929 930 def _validate_pull_reference(self, reference):
930 931 if not (reference.name in self.bookmarks or
931 932 reference.name in self.branches or
932 933 self.get_commit(reference.commit_id)):
933 934 raise CommitDoesNotExistError(
934 935 'Unknown branch, bookmark or commit id')
935 936
936 937 def _local_pull(self, repository_path, reference):
937 938 """
938 939 Fetch a branch, bookmark or commit from a local repository.
939 940 """
940 941 repository_path = os.path.abspath(repository_path)
941 942 if repository_path == self.path:
942 943 raise ValueError('Cannot pull from the same repository')
943 944
944 945 reference_type_to_option_name = {
945 946 'book': 'bookmark',
946 947 'branch': 'branch',
947 948 }
948 949 option_name = reference_type_to_option_name.get(
949 950 reference.type, 'revision')
950 951
951 952 if option_name == 'revision':
952 953 ref = reference.commit_id
953 954 else:
954 955 ref = reference.name
955 956
956 957 options = {option_name: [ref]}
957 958 self._remote.pull_cmd(repository_path, hooks=False, **options)
958 959 self._remote.invalidate_vcs_cache()
959 960
960 961 def bookmark(self, bookmark, revision=None):
961 962 if isinstance(bookmark, unicode):
962 963 bookmark = safe_str(bookmark)
963 964 self._remote.bookmark(bookmark, revision=revision)
964 965 self._remote.invalidate_vcs_cache()
965 966
966 967 def get_path_permissions(self, username):
967 968 hgacl_file = os.path.join(self.path, '.hg/hgacl')
968 969
969 970 def read_patterns(suffix):
970 971 svalue = None
971 972 for section, option in [
972 973 ('narrowacl', username + suffix),
973 974 ('narrowacl', 'default' + suffix),
974 975 ('narrowhgacl', username + suffix),
975 976 ('narrowhgacl', 'default' + suffix)
976 977 ]:
977 978 try:
978 979 svalue = hgacl.get(section, option)
979 980 break # stop at the first value we find
980 981 except configparser.NoOptionError:
981 982 pass
982 983 if not svalue:
983 984 return None
984 985 result = ['/']
985 986 for pattern in svalue.split():
986 987 result.append(pattern)
987 988 if '*' not in pattern and '?' not in pattern:
988 989 result.append(pattern + '/*')
989 990 return result
990 991
991 992 if os.path.exists(hgacl_file):
992 993 try:
993 994 hgacl = configparser.RawConfigParser()
994 995 hgacl.read(hgacl_file)
995 996
996 997 includes = read_patterns('.includes')
997 998 excludes = read_patterns('.excludes')
998 999 return BasePathPermissionChecker.create_from_patterns(
999 1000 includes, excludes)
1000 1001 except BaseException as e:
1001 1002 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1002 1003 hgacl_file, self.name, e)
1003 1004 raise exceptions.RepositoryRequirementError(msg)
1004 1005 else:
1005 1006 return None
1006 1007
1007 1008
1008 1009 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1009 1010
1010 1011 def _commit_factory(self, commit_id):
1011 1012 return self.repo.get_commit(
1012 1013 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,876 +1,876 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Module holding everything related to vcs nodes, with vcs2 architecture.
23 23 """
24 24
25 25 import os
26 26 import stat
27 27
28 28 from zope.cachedescriptors.property import Lazy as LazyProperty
29 29
30 30 import rhodecode
31 31 from rhodecode.config.conf import LANGUAGES_EXTENSIONS_MAP
32 32 from rhodecode.lib.utils import safe_unicode, safe_str
33 33 from rhodecode.lib.utils2 import md5
34 34 from rhodecode.lib.vcs import path as vcspath
35 35 from rhodecode.lib.vcs.backends.base import EmptyCommit, FILEMODE_DEFAULT
36 36 from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db
37 37 from rhodecode.lib.vcs.exceptions import NodeError, RemovedFileNodeError
38 38
39 39 LARGEFILE_PREFIX = '.hglf'
40 40
41 41
42 42 class NodeKind:
43 43 SUBMODULE = -1
44 44 DIR = 1
45 45 FILE = 2
46 46 LARGEFILE = 3
47 47
48 48
49 49 class NodeState:
50 ADDED = u'added'
51 CHANGED = u'changed'
52 NOT_CHANGED = u'not changed'
53 REMOVED = u'removed'
50 ADDED = 'added'
51 CHANGED = 'changed'
52 NOT_CHANGED = 'not changed'
53 REMOVED = 'removed'
54 54
55 55
56 56 class NodeGeneratorBase(object):
57 57 """
58 58 Base class for removed added and changed filenodes, it's a lazy generator
59 59 class that will create filenodes only on iteration or call
60 60
61 61 The len method doesn't need to create filenodes at all
62 62 """
63 63
64 64 def __init__(self, current_paths, cs):
65 65 self.cs = cs
66 66 self.current_paths = current_paths
67 67
68 68 def __call__(self):
69 69 return [n for n in self]
70 70
71 71 def __getslice__(self, i, j):
72 72 for p in self.current_paths[i:j]:
73 73 yield self.cs.get_node(p)
74 74
75 75 def __len__(self):
76 76 return len(self.current_paths)
77 77
78 78 def __iter__(self):
79 79 for p in self.current_paths:
80 80 yield self.cs.get_node(p)
81 81
82 82
83 83 class AddedFileNodesGenerator(NodeGeneratorBase):
84 84 """
85 85 Class holding added files for current commit
86 86 """
87 87
88 88
89 89 class ChangedFileNodesGenerator(NodeGeneratorBase):
90 90 """
91 91 Class holding changed files for current commit
92 92 """
93 93
94 94
95 95 class RemovedFileNodesGenerator(NodeGeneratorBase):
96 96 """
97 97 Class holding removed files for current commit
98 98 """
99 99 def __iter__(self):
100 100 for p in self.current_paths:
101 101 yield RemovedFileNode(path=p)
102 102
103 103 def __getslice__(self, i, j):
104 104 for p in self.current_paths[i:j]:
105 105 yield RemovedFileNode(path=p)
106 106
107 107
108 108 class Node(object):
109 109 """
110 110 Simplest class representing file or directory on repository. SCM backends
111 111 should use ``FileNode`` and ``DirNode`` subclasses rather than ``Node``
112 112 directly.
113 113
114 114 Node's ``path`` cannot start with slash as we operate on *relative* paths
115 115 only. Moreover, every single node is identified by the ``path`` attribute,
116 116 so it cannot end with slash, too. Otherwise, path could lead to mistakes.
117 117 """
118 RTLO_MARKER = u"\u202E" # RTLO marker allows swapping text, and certain
118 RTLO_MARKER = "\u202E" # RTLO marker allows swapping text, and certain
119 119 # security attacks could be used with this
120 120 commit = None
121 121
122 122 def __init__(self, path, kind):
123 123 self._validate_path(path) # can throw exception if path is invalid
124 124 self.path = safe_str(path.rstrip('/')) # we store paths as str
125 125 if path == '' and kind != NodeKind.DIR:
126 126 raise NodeError("Only DirNode and its subclasses may be "
127 127 "initialized with empty path")
128 128 self.kind = kind
129 129
130 130 if self.is_root() and not self.is_dir():
131 131 raise NodeError("Root node cannot be FILE kind")
132 132
133 133 def _validate_path(self, path):
134 134 if path.startswith('/'):
135 135 raise NodeError(
136 136 "Cannot initialize Node objects with slash at "
137 137 "the beginning as only relative paths are supported. "
138 138 "Got %s" % (path,))
139 139
140 140 @LazyProperty
141 141 def parent(self):
142 142 parent_path = self.get_parent_path()
143 143 if parent_path:
144 144 if self.commit:
145 145 return self.commit.get_node(parent_path)
146 146 return DirNode(parent_path)
147 147 return None
148 148
149 149 @LazyProperty
150 150 def unicode_path(self):
151 151 return safe_unicode(self.path)
152 152
153 153 @LazyProperty
154 154 def has_rtlo(self):
155 155 """Detects if a path has right-to-left-override marker"""
156 156 return self.RTLO_MARKER in self.unicode_path
157 157
158 158 @LazyProperty
159 159 def unicode_path_safe(self):
160 160 """
161 161 Special SAFE representation of path without the right-to-left-override.
162 162 This should be only used for "showing" the file, cannot be used for any
163 163 urls etc.
164 164 """
165 165 return safe_unicode(self.path).replace(self.RTLO_MARKER, '')
166 166
167 167 @LazyProperty
168 168 def dir_path(self):
169 169 """
170 170 Returns name of the directory from full path of this vcs node. Empty
171 171 string is returned if there's no directory in the path
172 172 """
173 173 _parts = self.path.rstrip('/').rsplit('/', 1)
174 174 if len(_parts) == 2:
175 175 return safe_unicode(_parts[0])
176 return u''
176 return ''
177 177
178 178 @LazyProperty
179 179 def name(self):
180 180 """
181 181 Returns name of the node so if its path
182 182 then only last part is returned.
183 183 """
184 184 return safe_unicode(self.path.rstrip('/').split('/')[-1])
185 185
186 186 @property
187 187 def kind(self):
188 188 return self._kind
189 189
190 190 @kind.setter
191 191 def kind(self, kind):
192 192 if hasattr(self, '_kind'):
193 193 raise NodeError("Cannot change node's kind")
194 194 else:
195 195 self._kind = kind
196 196 # Post setter check (path's trailing slash)
197 197 if self.path.endswith('/'):
198 198 raise NodeError("Node's path cannot end with slash")
199 199
200 200 def __cmp__(self, other):
201 201 """
202 202 Comparator using name of the node, needed for quick list sorting.
203 203 """
204 204
205 205 kind_cmp = cmp(self.kind, other.kind)
206 206 if kind_cmp:
207 207 if isinstance(self, SubModuleNode):
208 208 # we make submodules equal to dirnode for "sorting" purposes
209 209 return NodeKind.DIR
210 210 return kind_cmp
211 211 return cmp(self.name, other.name)
212 212
213 213 def __eq__(self, other):
214 214 for attr in ['name', 'path', 'kind']:
215 215 if getattr(self, attr) != getattr(other, attr):
216 216 return False
217 217 if self.is_file():
218 218 if self.content != other.content:
219 219 return False
220 220 else:
221 221 # For DirNode's check without entering each dir
222 222 self_nodes_paths = list(sorted(n.path for n in self.nodes))
223 223 other_nodes_paths = list(sorted(n.path for n in self.nodes))
224 224 if self_nodes_paths != other_nodes_paths:
225 225 return False
226 226 return True
227 227
228 228 def __ne__(self, other):
229 229 return not self.__eq__(other)
230 230
231 231 def __repr__(self):
232 232 return '<%s %r>' % (self.__class__.__name__, self.path)
233 233
234 234 def __str__(self):
235 235 return self.__repr__()
236 236
237 237 def __unicode__(self):
238 238 return self.name
239 239
240 240 def get_parent_path(self):
241 241 """
242 242 Returns node's parent path or empty string if node is root.
243 243 """
244 244 if self.is_root():
245 245 return ''
246 246 return vcspath.dirname(self.path.rstrip('/')) + '/'
247 247
248 248 def is_file(self):
249 249 """
250 250 Returns ``True`` if node's kind is ``NodeKind.FILE``, ``False``
251 251 otherwise.
252 252 """
253 253 return self.kind == NodeKind.FILE
254 254
255 255 def is_dir(self):
256 256 """
257 257 Returns ``True`` if node's kind is ``NodeKind.DIR``, ``False``
258 258 otherwise.
259 259 """
260 260 return self.kind == NodeKind.DIR
261 261
262 262 def is_root(self):
263 263 """
264 264 Returns ``True`` if node is a root node and ``False`` otherwise.
265 265 """
266 266 return self.kind == NodeKind.DIR and self.path == ''
267 267
268 268 def is_submodule(self):
269 269 """
270 270 Returns ``True`` if node's kind is ``NodeKind.SUBMODULE``, ``False``
271 271 otherwise.
272 272 """
273 273 return self.kind == NodeKind.SUBMODULE
274 274
275 275 def is_largefile(self):
276 276 """
277 277 Returns ``True`` if node's kind is ``NodeKind.LARGEFILE``, ``False``
278 278 otherwise
279 279 """
280 280 return self.kind == NodeKind.LARGEFILE
281 281
282 282 def is_link(self):
283 283 if self.commit:
284 284 return self.commit.is_link(self.path)
285 285 return False
286 286
287 287 @LazyProperty
288 288 def added(self):
289 289 return self.state is NodeState.ADDED
290 290
291 291 @LazyProperty
292 292 def changed(self):
293 293 return self.state is NodeState.CHANGED
294 294
295 295 @LazyProperty
296 296 def not_changed(self):
297 297 return self.state is NodeState.NOT_CHANGED
298 298
299 299 @LazyProperty
300 300 def removed(self):
301 301 return self.state is NodeState.REMOVED
302 302
303 303
304 304 class FileNode(Node):
305 305 """
306 306 Class representing file nodes.
307 307
308 308 :attribute: path: path to the node, relative to repository's root
309 309 :attribute: content: if given arbitrary sets content of the file
310 310 :attribute: commit: if given, first time content is accessed, callback
311 311 :attribute: mode: stat mode for a node. Default is `FILEMODE_DEFAULT`.
312 312 """
313 313 _filter_pre_load = []
314 314
315 315 def __init__(self, path, content=None, commit=None, mode=None, pre_load=None):
316 316 """
317 317 Only one of ``content`` and ``commit`` may be given. Passing both
318 318 would raise ``NodeError`` exception.
319 319
320 320 :param path: relative path to the node
321 321 :param content: content may be passed to constructor
322 322 :param commit: if given, will use it to lazily fetch content
323 323 :param mode: ST_MODE (i.e. 0100644)
324 324 """
325 325 if content and commit:
326 326 raise NodeError("Cannot use both content and commit")
327 327 super(FileNode, self).__init__(path, kind=NodeKind.FILE)
328 328 self.commit = commit
329 329 self._content = content
330 330 self._mode = mode or FILEMODE_DEFAULT
331 331
332 332 self._set_bulk_properties(pre_load)
333 333
334 334 def _set_bulk_properties(self, pre_load):
335 335 if not pre_load:
336 336 return
337 337 pre_load = [entry for entry in pre_load
338 338 if entry not in self._filter_pre_load]
339 339 if not pre_load:
340 340 return
341 341
342 342 for attr_name in pre_load:
343 343 result = getattr(self, attr_name)
344 344 if callable(result):
345 345 result = result()
346 346 self.__dict__[attr_name] = result
347 347
348 348 @LazyProperty
349 349 def mode(self):
350 350 """
351 351 Returns lazily mode of the FileNode. If `commit` is not set, would
352 352 use value given at initialization or `FILEMODE_DEFAULT` (default).
353 353 """
354 354 if self.commit:
355 355 mode = self.commit.get_file_mode(self.path)
356 356 else:
357 357 mode = self._mode
358 358 return mode
359 359
360 360 @LazyProperty
361 361 def raw_bytes(self):
362 362 """
363 363 Returns lazily the raw bytes of the FileNode.
364 364 """
365 365 if self.commit:
366 366 if self._content is None:
367 367 self._content = self.commit.get_file_content(self.path)
368 368 content = self._content
369 369 else:
370 370 content = self._content
371 371 return content
372 372
373 373 def stream_bytes(self):
374 374 """
375 375 Returns an iterator that will stream the content of the file directly from
376 376 vcsserver without loading it to memory.
377 377 """
378 378 if self.commit:
379 379 return self.commit.get_file_content_streamed(self.path)
380 380 raise NodeError("Cannot retrieve stream_bytes without related commit attribute")
381 381
382 382 @LazyProperty
383 383 def md5(self):
384 384 """
385 385 Returns md5 of the file node.
386 386 """
387 387 return md5(self.raw_bytes)
388 388
389 389 def metadata_uncached(self):
390 390 """
391 391 Returns md5, binary flag of the file node, without any cache usage.
392 392 """
393 393
394 394 content = self.content_uncached()
395 395
396 396 is_binary = content and '\0' in content
397 397 size = 0
398 398 if content:
399 399 size = len(content)
400 400
401 401 return is_binary, md5(content), size, content
402 402
403 403 def content_uncached(self):
404 404 """
405 405 Returns lazily content of the FileNode. If possible, would try to
406 406 decode content from UTF-8.
407 407 """
408 408 if self.commit:
409 409 content = self.commit.get_file_content(self.path)
410 410 else:
411 411 content = self._content
412 412 return content
413 413
414 414 @LazyProperty
415 415 def content(self):
416 416 """
417 417 Returns lazily content of the FileNode. If possible, would try to
418 418 decode content from UTF-8.
419 419 """
420 420 content = self.raw_bytes
421 421
422 422 if self.is_binary:
423 423 return content
424 424 return safe_unicode(content)
425 425
426 426 @LazyProperty
427 427 def size(self):
428 428 if self.commit:
429 429 return self.commit.get_file_size(self.path)
430 430 raise NodeError(
431 431 "Cannot retrieve size of the file without related "
432 432 "commit attribute")
433 433
434 434 @LazyProperty
435 435 def message(self):
436 436 if self.commit:
437 437 return self.last_commit.message
438 438 raise NodeError(
439 439 "Cannot retrieve message of the file without related "
440 440 "commit attribute")
441 441
442 442 @LazyProperty
443 443 def last_commit(self):
444 444 if self.commit:
445 445 pre_load = ["author", "date", "message", "parents"]
446 446 return self.commit.get_path_commit(self.path, pre_load=pre_load)
447 447 raise NodeError(
448 448 "Cannot retrieve last commit of the file without "
449 449 "related commit attribute")
450 450
451 451 def get_mimetype(self):
452 452 """
453 453 Mimetype is calculated based on the file's content. If ``_mimetype``
454 454 attribute is available, it will be returned (backends which store
455 455 mimetypes or can easily recognize them, should set this private
456 456 attribute to indicate that type should *NOT* be calculated).
457 457 """
458 458
459 459 if hasattr(self, '_mimetype'):
460 460 if (isinstance(self._mimetype, (tuple, list,)) and
461 461 len(self._mimetype) == 2):
462 462 return self._mimetype
463 463 else:
464 464 raise NodeError('given _mimetype attribute must be an 2 '
465 465 'element list or tuple')
466 466
467 467 db = get_mimetypes_db()
468 468 mtype, encoding = db.guess_type(self.name)
469 469
470 470 if mtype is None:
471 471 if not self.is_largefile() and self.is_binary:
472 472 mtype = 'application/octet-stream'
473 473 encoding = None
474 474 else:
475 475 mtype = 'text/plain'
476 476 encoding = None
477 477
478 478 # try with pygments
479 479 try:
480 480 from pygments.lexers import get_lexer_for_filename
481 481 mt = get_lexer_for_filename(self.name).mimetypes
482 482 except Exception:
483 483 mt = None
484 484
485 485 if mt:
486 486 mtype = mt[0]
487 487
488 488 return mtype, encoding
489 489
490 490 @LazyProperty
491 491 def mimetype(self):
492 492 """
493 493 Wrapper around full mimetype info. It returns only type of fetched
494 494 mimetype without the encoding part. use get_mimetype function to fetch
495 495 full set of (type,encoding)
496 496 """
497 497 return self.get_mimetype()[0]
498 498
499 499 @LazyProperty
500 500 def mimetype_main(self):
501 501 return self.mimetype.split('/')[0]
502 502
503 503 @classmethod
504 504 def get_lexer(cls, filename, content=None):
505 505 from pygments import lexers
506 506
507 507 extension = filename.split('.')[-1]
508 508 lexer = None
509 509
510 510 try:
511 511 lexer = lexers.guess_lexer_for_filename(
512 512 filename, content, stripnl=False)
513 513 except lexers.ClassNotFound:
514 514 lexer = None
515 515
516 516 # try our EXTENSION_MAP
517 517 if not lexer:
518 518 try:
519 519 lexer_class = LANGUAGES_EXTENSIONS_MAP.get(extension)
520 520 if lexer_class:
521 521 lexer = lexers.get_lexer_by_name(lexer_class[0])
522 522 except lexers.ClassNotFound:
523 523 lexer = None
524 524
525 525 if not lexer:
526 526 lexer = lexers.TextLexer(stripnl=False)
527 527
528 528 return lexer
529 529
530 530 @LazyProperty
531 531 def lexer(self):
532 532 """
533 533 Returns pygment's lexer class. Would try to guess lexer taking file's
534 534 content, name and mimetype.
535 535 """
536 536 return self.get_lexer(self.name, self.content)
537 537
538 538 @LazyProperty
539 539 def lexer_alias(self):
540 540 """
541 541 Returns first alias of the lexer guessed for this file.
542 542 """
543 543 return self.lexer.aliases[0]
544 544
545 545 @LazyProperty
546 546 def history(self):
547 547 """
548 548 Returns a list of commit for this file in which the file was changed
549 549 """
550 550 if self.commit is None:
551 551 raise NodeError('Unable to get commit for this FileNode')
552 552 return self.commit.get_path_history(self.path)
553 553
554 554 @LazyProperty
555 555 def annotate(self):
556 556 """
557 557 Returns a list of three element tuples with lineno, commit and line
558 558 """
559 559 if self.commit is None:
560 560 raise NodeError('Unable to get commit for this FileNode')
561 561 pre_load = ["author", "date", "message", "parents"]
562 562 return self.commit.get_file_annotate(self.path, pre_load=pre_load)
563 563
564 564 @LazyProperty
565 565 def state(self):
566 566 if not self.commit:
567 567 raise NodeError(
568 568 "Cannot check state of the node if it's not "
569 569 "linked with commit")
570 570 elif self.path in (node.path for node in self.commit.added):
571 571 return NodeState.ADDED
572 572 elif self.path in (node.path for node in self.commit.changed):
573 573 return NodeState.CHANGED
574 574 else:
575 575 return NodeState.NOT_CHANGED
576 576
577 577 @LazyProperty
578 578 def is_binary(self):
579 579 """
580 580 Returns True if file has binary content.
581 581 """
582 582 if self.commit:
583 583 return self.commit.is_node_binary(self.path)
584 584 else:
585 585 raw_bytes = self._content
586 586 return raw_bytes and '\0' in raw_bytes
587 587
588 588 @LazyProperty
589 589 def extension(self):
590 590 """Returns filenode extension"""
591 591 return self.name.split('.')[-1]
592 592
593 593 @property
594 594 def is_executable(self):
595 595 """
596 596 Returns ``True`` if file has executable flag turned on.
597 597 """
598 598 return bool(self.mode & stat.S_IXUSR)
599 599
600 600 def get_largefile_node(self):
601 601 """
602 602 Try to return a Mercurial FileNode from this node. It does internal
603 603 checks inside largefile store, if that file exist there it will
604 604 create special instance of LargeFileNode which can get content from
605 605 LF store.
606 606 """
607 607 if self.commit:
608 608 return self.commit.get_largefile_node(self.path)
609 609
610 610 def count_lines(self, content, count_empty=False):
611 611
612 612 if count_empty:
613 613 all_lines = 0
614 614 empty_lines = 0
615 615 for line in content.splitlines(True):
616 616 if line == '\n':
617 617 empty_lines += 1
618 618 all_lines += 1
619 619
620 620 return all_lines, all_lines - empty_lines
621 621 else:
622 622 # fast method
623 623 empty_lines = all_lines = content.count('\n')
624 624 if all_lines == 0 and content:
625 625 # one-line without a newline
626 626 empty_lines = all_lines = 1
627 627
628 628 return all_lines, empty_lines
629 629
630 630 def lines(self, count_empty=False):
631 631 all_lines, empty_lines = 0, 0
632 632
633 633 if not self.is_binary:
634 634 content = self.content
635 635 all_lines, empty_lines = self.count_lines(content, count_empty=count_empty)
636 636 return all_lines, empty_lines
637 637
638 638 def __repr__(self):
639 639 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
640 640 getattr(self.commit, 'short_id', ''))
641 641
642 642
643 643 class RemovedFileNode(FileNode):
644 644 """
645 645 Dummy FileNode class - trying to access any public attribute except path,
646 646 name, kind or state (or methods/attributes checking those two) would raise
647 647 RemovedFileNodeError.
648 648 """
649 649 ALLOWED_ATTRIBUTES = [
650 650 'name', 'path', 'state', 'is_root', 'is_file', 'is_dir', 'kind',
651 651 'added', 'changed', 'not_changed', 'removed'
652 652 ]
653 653
654 654 def __init__(self, path):
655 655 """
656 656 :param path: relative path to the node
657 657 """
658 658 super(RemovedFileNode, self).__init__(path=path)
659 659
660 660 def __getattribute__(self, attr):
661 661 if attr.startswith('_') or attr in RemovedFileNode.ALLOWED_ATTRIBUTES:
662 662 return super(RemovedFileNode, self).__getattribute__(attr)
663 663 raise RemovedFileNodeError(
664 664 "Cannot access attribute %s on RemovedFileNode" % attr)
665 665
666 666 @LazyProperty
667 667 def state(self):
668 668 return NodeState.REMOVED
669 669
670 670
671 671 class DirNode(Node):
672 672 """
673 673 DirNode stores list of files and directories within this node.
674 674 Nodes may be used standalone but within repository context they
675 675 lazily fetch data within same repository's commit.
676 676 """
677 677
678 678 def __init__(self, path, nodes=(), commit=None):
679 679 """
680 680 Only one of ``nodes`` and ``commit`` may be given. Passing both
681 681 would raise ``NodeError`` exception.
682 682
683 683 :param path: relative path to the node
684 684 :param nodes: content may be passed to constructor
685 685 :param commit: if given, will use it to lazily fetch content
686 686 """
687 687 if nodes and commit:
688 688 raise NodeError("Cannot use both nodes and commit")
689 689 super(DirNode, self).__init__(path, NodeKind.DIR)
690 690 self.commit = commit
691 691 self._nodes = nodes
692 692
693 693 @LazyProperty
694 694 def content(self):
695 695 raise NodeError(
696 696 "%s represents a dir and has no `content` attribute" % self)
697 697
698 698 @LazyProperty
699 699 def nodes(self):
700 700 if self.commit:
701 701 nodes = self.commit.get_nodes(self.path)
702 702 else:
703 703 nodes = self._nodes
704 704 self._nodes_dict = dict((node.path, node) for node in nodes)
705 705 return sorted(nodes)
706 706
707 707 @LazyProperty
708 708 def files(self):
709 709 return sorted((node for node in self.nodes if node.is_file()))
710 710
711 711 @LazyProperty
712 712 def dirs(self):
713 713 return sorted((node for node in self.nodes if node.is_dir()))
714 714
715 715 def __iter__(self):
716 716 for node in self.nodes:
717 717 yield node
718 718
719 719 def get_node(self, path):
720 720 """
721 721 Returns node from within this particular ``DirNode``, so it is now
722 722 allowed to fetch, i.e. node located at 'docs/api/index.rst' from node
723 723 'docs'. In order to access deeper nodes one must fetch nodes between
724 724 them first - this would work::
725 725
726 726 docs = root.get_node('docs')
727 727 docs.get_node('api').get_node('index.rst')
728 728
729 729 :param: path - relative to the current node
730 730
731 731 .. note::
732 732 To access lazily (as in example above) node have to be initialized
733 733 with related commit object - without it node is out of
734 734 context and may know nothing about anything else than nearest
735 735 (located at same level) nodes.
736 736 """
737 737 try:
738 738 path = path.rstrip('/')
739 739 if path == '':
740 740 raise NodeError("Cannot retrieve node without path")
741 741 self.nodes # access nodes first in order to set _nodes_dict
742 742 paths = path.split('/')
743 743 if len(paths) == 1:
744 744 if not self.is_root():
745 745 path = '/'.join((self.path, paths[0]))
746 746 else:
747 747 path = paths[0]
748 748 return self._nodes_dict[path]
749 749 elif len(paths) > 1:
750 750 if self.commit is None:
751 751 raise NodeError("Cannot access deeper nodes without commit")
752 752 else:
753 753 path1, path2 = paths[0], '/'.join(paths[1:])
754 754 return self.get_node(path1).get_node(path2)
755 755 else:
756 756 raise KeyError
757 757 except KeyError:
758 758 raise NodeError("Node does not exist at %s" % path)
759 759
760 760 @LazyProperty
761 761 def state(self):
762 762 raise NodeError("Cannot access state of DirNode")
763 763
764 764 @LazyProperty
765 765 def size(self):
766 766 size = 0
767 767 for root, dirs, files in self.commit.walk(self.path):
768 768 for f in files:
769 769 size += f.size
770 770
771 771 return size
772 772
773 773 @LazyProperty
774 774 def last_commit(self):
775 775 if self.commit:
776 776 pre_load = ["author", "date", "message", "parents"]
777 777 return self.commit.get_path_commit(self.path, pre_load=pre_load)
778 778 raise NodeError(
779 779 "Cannot retrieve last commit of the file without "
780 780 "related commit attribute")
781 781
782 782 def __repr__(self):
783 783 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
784 784 getattr(self.commit, 'short_id', ''))
785 785
786 786
787 787 class RootNode(DirNode):
788 788 """
789 789 DirNode being the root node of the repository.
790 790 """
791 791
792 792 def __init__(self, nodes=(), commit=None):
793 793 super(RootNode, self).__init__(path='', nodes=nodes, commit=commit)
794 794
795 795 def __repr__(self):
796 796 return '<%s>' % self.__class__.__name__
797 797
798 798
799 799 class SubModuleNode(Node):
800 800 """
801 801 represents a SubModule of Git or SubRepo of Mercurial
802 802 """
803 803 is_binary = False
804 804 size = 0
805 805
806 806 def __init__(self, name, url=None, commit=None, alias=None):
807 807 self.path = name
808 808 self.kind = NodeKind.SUBMODULE
809 809 self.alias = alias
810 810
811 811 # we have to use EmptyCommit here since this can point to svn/git/hg
812 812 # submodules we cannot get from repository
813 813 self.commit = EmptyCommit(str(commit), alias=alias)
814 814 self.url = url or self._extract_submodule_url()
815 815
816 816 def __repr__(self):
817 817 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
818 818 getattr(self.commit, 'short_id', ''))
819 819
820 820 def _extract_submodule_url(self):
821 821 # TODO: find a way to parse gits submodule file and extract the
822 822 # linking URL
823 823 return self.path
824 824
825 825 @LazyProperty
826 826 def name(self):
827 827 """
828 828 Returns name of the node so if its path
829 829 then only last part is returned.
830 830 """
831 831 org = safe_unicode(self.path.rstrip('/').split('/')[-1])
832 return u'%s @ %s' % (org, self.commit.short_id)
832 return '%s @ %s' % (org, self.commit.short_id)
833 833
834 834
835 835 class LargeFileNode(FileNode):
836 836
837 837 def __init__(self, path, url=None, commit=None, alias=None, org_path=None):
838 838 self.path = path
839 839 self.org_path = org_path
840 840 self.kind = NodeKind.LARGEFILE
841 841 self.alias = alias
842 842 self._content = ''
843 843
844 844 def _validate_path(self, path):
845 845 """
846 846 we override check since the LargeFileNode path is system absolute
847 847 """
848 848 pass
849 849
850 850 def __repr__(self):
851 851 return '<%s %r>' % (self.__class__.__name__, self.path)
852 852
853 853 @LazyProperty
854 854 def size(self):
855 855 return os.stat(self.path).st_size
856 856
857 857 @LazyProperty
858 858 def raw_bytes(self):
859 859 with open(self.path, 'rb') as f:
860 860 content = f.read()
861 861 return content
862 862
863 863 @LazyProperty
864 864 def name(self):
865 865 """
866 866 Overwrites name to be the org lf path
867 867 """
868 868 return self.org_path
869 869
870 870 def stream_bytes(self):
871 871 with open(self.path, 'rb') as stream:
872 872 while True:
873 873 data = stream.read(16 * 1024)
874 874 if not data:
875 875 break
876 876 yield data
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now