##// END OF EJS Templates
py3: remove usage of basestring
dan -
r3425:d577b778 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,563 +1,564 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 from pyramid import compat
22 23
23 24 from rhodecode.api import (
24 25 jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError)
25 26 from rhodecode.api.utils import (
26 27 Optional, OAttr, has_superadmin_permission, get_user_or_error, store_update)
27 28 from rhodecode.lib import audit_logger
28 29 from rhodecode.lib.auth import AuthUser, PasswordGenerator
29 30 from rhodecode.lib.exceptions import DefaultUserException
30 31 from rhodecode.lib.utils2 import safe_int, str2bool
31 32 from rhodecode.model.db import Session, User, Repository
32 33 from rhodecode.model.user import UserModel
33 34 from rhodecode.model import validation_schema
34 35 from rhodecode.model.validation_schema.schemas import user_schema
35 36
36 37 log = logging.getLogger(__name__)
37 38
38 39
39 40 @jsonrpc_method()
40 41 def get_user(request, apiuser, userid=Optional(OAttr('apiuser'))):
41 42 """
42 43 Returns the information associated with a username or userid.
43 44
44 45 * If the ``userid`` is not set, this command returns the information
45 46 for the ``userid`` calling the method.
46 47
47 48 .. note::
48 49
49 50 Normal users may only run this command against their ``userid``. For
50 51 full privileges you must run this command using an |authtoken| with
51 52 admin rights.
52 53
53 54 :param apiuser: This is filled automatically from the |authtoken|.
54 55 :type apiuser: AuthUser
55 56 :param userid: Sets the userid for which data will be returned.
56 57 :type userid: Optional(str or int)
57 58
58 59 Example output:
59 60
60 61 .. code-block:: bash
61 62
62 63 {
63 64 "error": null,
64 65 "id": <id>,
65 66 "result": {
66 67 "active": true,
67 68 "admin": false,
68 69 "api_keys": [ list of keys ],
69 70 "auth_tokens": [ list of tokens with details ],
70 71 "email": "user@example.com",
71 72 "emails": [
72 73 "user@example.com"
73 74 ],
74 75 "extern_name": "rhodecode",
75 76 "extern_type": "rhodecode",
76 77 "firstname": "username",
77 78 "ip_addresses": [],
78 79 "language": null,
79 80 "last_login": "Timestamp",
80 81 "last_activity": "Timestamp",
81 82 "lastname": "surnae",
82 83 "permissions": <deprecated>,
83 84 "permissions_summary": {
84 85 "global": [
85 86 "hg.inherit_default_perms.true",
86 87 "usergroup.read",
87 88 "hg.repogroup.create.false",
88 89 "hg.create.none",
89 90 "hg.password_reset.enabled",
90 91 "hg.extern_activate.manual",
91 92 "hg.create.write_on_repogroup.false",
92 93 "hg.usergroup.create.false",
93 94 "group.none",
94 95 "repository.none",
95 96 "hg.register.none",
96 97 "hg.fork.repository"
97 98 ],
98 99 "repositories": { "username/example": "repository.write"},
99 100 "repositories_groups": { "user-group/repo": "group.none" },
100 101 "user_groups": { "user_group_name": "usergroup.read" }
101 102 }
102 103 "user_id": 32,
103 104 "username": "username"
104 105 }
105 106 }
106 107 """
107 108
108 109 if not has_superadmin_permission(apiuser):
109 110 # make sure normal user does not pass someone else userid,
110 111 # he is not allowed to do that
111 112 if not isinstance(userid, Optional) and userid != apiuser.user_id:
112 113 raise JSONRPCError('userid is not the same as your user')
113 114
114 115 userid = Optional.extract(userid, evaluate_locals=locals())
115 116 userid = getattr(userid, 'user_id', userid)
116 117
117 118 user = get_user_or_error(userid)
118 119 data = user.get_api_data(include_secrets=True)
119 120 permissions = AuthUser(user_id=user.user_id).permissions
120 121 data['permissions'] = permissions # TODO(marcink): should be deprecated
121 122 data['permissions_summary'] = permissions
122 123 return data
123 124
124 125
125 126 @jsonrpc_method()
126 127 def get_users(request, apiuser):
127 128 """
128 129 Lists all users in the |RCE| user database.
129 130
130 131 This command can only be run using an |authtoken| with admin rights to
131 132 the specified repository.
132 133
133 134 This command takes the following options:
134 135
135 136 :param apiuser: This is filled automatically from the |authtoken|.
136 137 :type apiuser: AuthUser
137 138
138 139 Example output:
139 140
140 141 .. code-block:: bash
141 142
142 143 id : <id_given_in_input>
143 144 result: [<user_object>, ...]
144 145 error: null
145 146 """
146 147
147 148 if not has_superadmin_permission(apiuser):
148 149 raise JSONRPCForbidden()
149 150
150 151 result = []
151 152 users_list = User.query().order_by(User.username) \
152 153 .filter(User.username != User.DEFAULT_USER) \
153 154 .all()
154 155 for user in users_list:
155 156 result.append(user.get_api_data(include_secrets=True))
156 157 return result
157 158
158 159
159 160 @jsonrpc_method()
160 161 def create_user(request, apiuser, username, email, password=Optional(''),
161 162 firstname=Optional(''), lastname=Optional(''),
162 163 active=Optional(True), admin=Optional(False),
163 164 extern_name=Optional('rhodecode'),
164 165 extern_type=Optional('rhodecode'),
165 166 force_password_change=Optional(False),
166 167 create_personal_repo_group=Optional(None)):
167 168 """
168 169 Creates a new user and returns the new user object.
169 170
170 171 This command can only be run using an |authtoken| with admin rights to
171 172 the specified repository.
172 173
173 174 This command takes the following options:
174 175
175 176 :param apiuser: This is filled automatically from the |authtoken|.
176 177 :type apiuser: AuthUser
177 178 :param username: Set the new username.
178 179 :type username: str or int
179 180 :param email: Set the user email address.
180 181 :type email: str
181 182 :param password: Set the new user password.
182 183 :type password: Optional(str)
183 184 :param firstname: Set the new user firstname.
184 185 :type firstname: Optional(str)
185 186 :param lastname: Set the new user surname.
186 187 :type lastname: Optional(str)
187 188 :param active: Set the user as active.
188 189 :type active: Optional(``True`` | ``False``)
189 190 :param admin: Give the new user admin rights.
190 191 :type admin: Optional(``True`` | ``False``)
191 192 :param extern_name: Set the authentication plugin name.
192 193 Using LDAP this is filled with LDAP UID.
193 194 :type extern_name: Optional(str)
194 195 :param extern_type: Set the new user authentication plugin.
195 196 :type extern_type: Optional(str)
196 197 :param force_password_change: Force the new user to change password
197 198 on next login.
198 199 :type force_password_change: Optional(``True`` | ``False``)
199 200 :param create_personal_repo_group: Create personal repo group for this user
200 201 :type create_personal_repo_group: Optional(``True`` | ``False``)
201 202
202 203 Example output:
203 204
204 205 .. code-block:: bash
205 206
206 207 id : <id_given_in_input>
207 208 result: {
208 209 "msg" : "created new user `<username>`",
209 210 "user": <user_obj>
210 211 }
211 212 error: null
212 213
213 214 Example error output:
214 215
215 216 .. code-block:: bash
216 217
217 218 id : <id_given_in_input>
218 219 result : null
219 220 error : {
220 221 "user `<username>` already exist"
221 222 or
222 223 "email `<email>` already exist"
223 224 or
224 225 "failed to create user `<username>`"
225 226 }
226 227
227 228 """
228 229 if not has_superadmin_permission(apiuser):
229 230 raise JSONRPCForbidden()
230 231
231 232 if UserModel().get_by_username(username):
232 233 raise JSONRPCError("user `%s` already exist" % (username,))
233 234
234 235 if UserModel().get_by_email(email, case_insensitive=True):
235 236 raise JSONRPCError("email `%s` already exist" % (email,))
236 237
237 238 # generate random password if we actually given the
238 239 # extern_name and it's not rhodecode
239 240 if (not isinstance(extern_name, Optional) and
240 241 Optional.extract(extern_name) != 'rhodecode'):
241 242 # generate temporary password if user is external
242 243 password = PasswordGenerator().gen_password(length=16)
243 244 create_repo_group = Optional.extract(create_personal_repo_group)
244 if isinstance(create_repo_group, basestring):
245 if isinstance(create_repo_group, compat.string_types):
245 246 create_repo_group = str2bool(create_repo_group)
246 247
247 248 username = Optional.extract(username)
248 249 password = Optional.extract(password)
249 250 email = Optional.extract(email)
250 251 first_name = Optional.extract(firstname)
251 252 last_name = Optional.extract(lastname)
252 253 active = Optional.extract(active)
253 254 admin = Optional.extract(admin)
254 255 extern_type = Optional.extract(extern_type)
255 256 extern_name = Optional.extract(extern_name)
256 257
257 258 schema = user_schema.UserSchema().bind(
258 259 # user caller
259 260 user=apiuser)
260 261 try:
261 262 schema_data = schema.deserialize(dict(
262 263 username=username,
263 264 email=email,
264 265 password=password,
265 266 first_name=first_name,
266 267 last_name=last_name,
267 268 active=active,
268 269 admin=admin,
269 270 extern_type=extern_type,
270 271 extern_name=extern_name,
271 272 ))
272 273 except validation_schema.Invalid as err:
273 274 raise JSONRPCValidationError(colander_exc=err)
274 275
275 276 try:
276 277 user = UserModel().create_or_update(
277 278 username=schema_data['username'],
278 279 password=schema_data['password'],
279 280 email=schema_data['email'],
280 281 firstname=schema_data['first_name'],
281 282 lastname=schema_data['last_name'],
282 283 active=schema_data['active'],
283 284 admin=schema_data['admin'],
284 285 extern_type=schema_data['extern_type'],
285 286 extern_name=schema_data['extern_name'],
286 287 force_password_change=Optional.extract(force_password_change),
287 288 create_repo_group=create_repo_group
288 289 )
289 290 Session().flush()
290 291 creation_data = user.get_api_data()
291 292 audit_logger.store_api(
292 293 'user.create', action_data={'data': creation_data},
293 294 user=apiuser)
294 295
295 296 Session().commit()
296 297 return {
297 298 'msg': 'created new user `%s`' % username,
298 299 'user': user.get_api_data(include_secrets=True)
299 300 }
300 301 except Exception:
301 302 log.exception('Error occurred during creation of user')
302 303 raise JSONRPCError('failed to create user `%s`' % (username,))
303 304
304 305
305 306 @jsonrpc_method()
306 307 def update_user(request, apiuser, userid, username=Optional(None),
307 308 email=Optional(None), password=Optional(None),
308 309 firstname=Optional(None), lastname=Optional(None),
309 310 active=Optional(None), admin=Optional(None),
310 311 extern_type=Optional(None), extern_name=Optional(None), ):
311 312 """
312 313 Updates the details for the specified user, if that user exists.
313 314
314 315 This command can only be run using an |authtoken| with admin rights to
315 316 the specified repository.
316 317
317 318 This command takes the following options:
318 319
319 320 :param apiuser: This is filled automatically from |authtoken|.
320 321 :type apiuser: AuthUser
321 322 :param userid: Set the ``userid`` to update.
322 323 :type userid: str or int
323 324 :param username: Set the new username.
324 325 :type username: str or int
325 326 :param email: Set the new email.
326 327 :type email: str
327 328 :param password: Set the new password.
328 329 :type password: Optional(str)
329 330 :param firstname: Set the new first name.
330 331 :type firstname: Optional(str)
331 332 :param lastname: Set the new surname.
332 333 :type lastname: Optional(str)
333 334 :param active: Set the new user as active.
334 335 :type active: Optional(``True`` | ``False``)
335 336 :param admin: Give the user admin rights.
336 337 :type admin: Optional(``True`` | ``False``)
337 338 :param extern_name: Set the authentication plugin user name.
338 339 Using LDAP this is filled with LDAP UID.
339 340 :type extern_name: Optional(str)
340 341 :param extern_type: Set the authentication plugin type.
341 342 :type extern_type: Optional(str)
342 343
343 344
344 345 Example output:
345 346
346 347 .. code-block:: bash
347 348
348 349 id : <id_given_in_input>
349 350 result: {
350 351 "msg" : "updated user ID:<userid> <username>",
351 352 "user": <user_object>,
352 353 }
353 354 error: null
354 355
355 356 Example error output:
356 357
357 358 .. code-block:: bash
358 359
359 360 id : <id_given_in_input>
360 361 result : null
361 362 error : {
362 363 "failed to update user `<username>`"
363 364 }
364 365
365 366 """
366 367 if not has_superadmin_permission(apiuser):
367 368 raise JSONRPCForbidden()
368 369
369 370 user = get_user_or_error(userid)
370 371 old_data = user.get_api_data()
371 372 # only non optional arguments will be stored in updates
372 373 updates = {}
373 374
374 375 try:
375 376
376 377 store_update(updates, username, 'username')
377 378 store_update(updates, password, 'password')
378 379 store_update(updates, email, 'email')
379 380 store_update(updates, firstname, 'name')
380 381 store_update(updates, lastname, 'lastname')
381 382 store_update(updates, active, 'active')
382 383 store_update(updates, admin, 'admin')
383 384 store_update(updates, extern_name, 'extern_name')
384 385 store_update(updates, extern_type, 'extern_type')
385 386
386 387 user = UserModel().update_user(user, **updates)
387 388 audit_logger.store_api(
388 389 'user.edit', action_data={'old_data': old_data},
389 390 user=apiuser)
390 391 Session().commit()
391 392 return {
392 393 'msg': 'updated user ID:%s %s' % (user.user_id, user.username),
393 394 'user': user.get_api_data(include_secrets=True)
394 395 }
395 396 except DefaultUserException:
396 397 log.exception("Default user edit exception")
397 398 raise JSONRPCError('editing default user is forbidden')
398 399 except Exception:
399 400 log.exception("Error occurred during update of user")
400 401 raise JSONRPCError('failed to update user `%s`' % (userid,))
401 402
402 403
403 404 @jsonrpc_method()
404 405 def delete_user(request, apiuser, userid):
405 406 """
406 407 Deletes the specified user from the |RCE| user database.
407 408
408 409 This command can only be run using an |authtoken| with admin rights to
409 410 the specified repository.
410 411
411 412 .. important::
412 413
413 414 Ensure all open pull requests and open code review
414 415 requests to this user are close.
415 416
416 417 Also ensure all repositories, or repository groups owned by this
417 418 user are reassigned before deletion.
418 419
419 420 This command takes the following options:
420 421
421 422 :param apiuser: This is filled automatically from the |authtoken|.
422 423 :type apiuser: AuthUser
423 424 :param userid: Set the user to delete.
424 425 :type userid: str or int
425 426
426 427 Example output:
427 428
428 429 .. code-block:: bash
429 430
430 431 id : <id_given_in_input>
431 432 result: {
432 433 "msg" : "deleted user ID:<userid> <username>",
433 434 "user": null
434 435 }
435 436 error: null
436 437
437 438 Example error output:
438 439
439 440 .. code-block:: bash
440 441
441 442 id : <id_given_in_input>
442 443 result : null
443 444 error : {
444 445 "failed to delete user ID:<userid> <username>"
445 446 }
446 447
447 448 """
448 449 if not has_superadmin_permission(apiuser):
449 450 raise JSONRPCForbidden()
450 451
451 452 user = get_user_or_error(userid)
452 453 old_data = user.get_api_data()
453 454 try:
454 455 UserModel().delete(userid)
455 456 audit_logger.store_api(
456 457 'user.delete', action_data={'old_data': old_data},
457 458 user=apiuser)
458 459
459 460 Session().commit()
460 461 return {
461 462 'msg': 'deleted user ID:%s %s' % (user.user_id, user.username),
462 463 'user': None
463 464 }
464 465 except Exception:
465 466 log.exception("Error occurred during deleting of user")
466 467 raise JSONRPCError(
467 468 'failed to delete user ID:%s %s' % (user.user_id, user.username))
468 469
469 470
470 471 @jsonrpc_method()
471 472 def get_user_locks(request, apiuser, userid=Optional(OAttr('apiuser'))):
472 473 """
473 474 Displays all repositories locked by the specified user.
474 475
475 476 * If this command is run by a non-admin user, it returns
476 477 a list of |repos| locked by that user.
477 478
478 479 This command takes the following options:
479 480
480 481 :param apiuser: This is filled automatically from the |authtoken|.
481 482 :type apiuser: AuthUser
482 483 :param userid: Sets the userid whose list of locked |repos| will be
483 484 displayed.
484 485 :type userid: Optional(str or int)
485 486
486 487 Example output:
487 488
488 489 .. code-block:: bash
489 490
490 491 id : <id_given_in_input>
491 492 result : {
492 493 [repo_object, repo_object,...]
493 494 }
494 495 error : null
495 496 """
496 497
497 498 include_secrets = False
498 499 if not has_superadmin_permission(apiuser):
499 500 # make sure normal user does not pass someone else userid,
500 501 # he is not allowed to do that
501 502 if not isinstance(userid, Optional) and userid != apiuser.user_id:
502 503 raise JSONRPCError('userid is not the same as your user')
503 504 else:
504 505 include_secrets = True
505 506
506 507 userid = Optional.extract(userid, evaluate_locals=locals())
507 508 userid = getattr(userid, 'user_id', userid)
508 509 user = get_user_or_error(userid)
509 510
510 511 ret = []
511 512
512 513 # show all locks
513 514 for r in Repository.getAll():
514 515 _user_id, _time, _reason = r.locked
515 516 if _user_id and _time:
516 517 _api_data = r.get_api_data(include_secrets=include_secrets)
517 518 # if we use user filter just show the locks for this user
518 519 if safe_int(_user_id) == user.user_id:
519 520 ret.append(_api_data)
520 521
521 522 return ret
522 523
523 524
524 525 @jsonrpc_method()
525 526 def get_user_audit_logs(request, apiuser, userid=Optional(OAttr('apiuser'))):
526 527 """
527 528 Fetches all action logs made by the specified user.
528 529
529 530 This command takes the following options:
530 531
531 532 :param apiuser: This is filled automatically from the |authtoken|.
532 533 :type apiuser: AuthUser
533 534 :param userid: Sets the userid whose list of locked |repos| will be
534 535 displayed.
535 536 :type userid: Optional(str or int)
536 537
537 538 Example output:
538 539
539 540 .. code-block:: bash
540 541
541 542 id : <id_given_in_input>
542 543 result : {
543 544 [action, action,...]
544 545 }
545 546 error : null
546 547 """
547 548
548 549 if not has_superadmin_permission(apiuser):
549 550 # make sure normal user does not pass someone else userid,
550 551 # he is not allowed to do that
551 552 if not isinstance(userid, Optional) and userid != apiuser.user_id:
552 553 raise JSONRPCError('userid is not the same as your user')
553 554
554 555 userid = Optional.extract(userid, evaluate_locals=locals())
555 556 userid = getattr(userid, 'user_id', userid)
556 557 user = get_user_or_error(userid)
557 558
558 559 ret = []
559 560
560 561 # show all user actions
561 562 for entry in UserModel().get_user_log(user, filter_term=None):
562 563 ret.append(entry)
563 564 return ret
@@ -1,686 +1,687 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import time
22 22 import logging
23 23 import operator
24 24
25 from pyramid import compat
25 26 from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPBadRequest
26 27
27 28 from rhodecode.lib import helpers as h, diffs
28 29 from rhodecode.lib.utils2 import (
29 30 StrictAttributeDict, safe_int, datetime_to_time, safe_unicode)
30 31 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
31 32 from rhodecode.model import repo
32 33 from rhodecode.model import repo_group
33 34 from rhodecode.model import user_group
34 35 from rhodecode.model import user
35 36 from rhodecode.model.db import User
36 37 from rhodecode.model.scm import ScmModel
37 38 from rhodecode.model.settings import VcsSettingsModel
38 39
39 40 log = logging.getLogger(__name__)
40 41
41 42
42 43 ADMIN_PREFIX = '/_admin'
43 44 STATIC_FILE_PREFIX = '/_static'
44 45
45 46 URL_NAME_REQUIREMENTS = {
46 47 # group name can have a slash in them, but they must not end with a slash
47 48 'group_name': r'.*?[^/]',
48 49 'repo_group_name': r'.*?[^/]',
49 50 # repo names can have a slash in them, but they must not end with a slash
50 51 'repo_name': r'.*?[^/]',
51 52 # file path eats up everything at the end
52 53 'f_path': r'.*',
53 54 # reference types
54 55 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
55 56 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
56 57 }
57 58
58 59
59 60 def add_route_with_slash(config,name, pattern, **kw):
60 61 config.add_route(name, pattern, **kw)
61 62 if not pattern.endswith('/'):
62 63 config.add_route(name + '_slash', pattern + '/', **kw)
63 64
64 65
65 66 def add_route_requirements(route_path, requirements=None):
66 67 """
67 68 Adds regex requirements to pyramid routes using a mapping dict
68 69 e.g::
69 70 add_route_requirements('{repo_name}/settings')
70 71 """
71 72 requirements = requirements or URL_NAME_REQUIREMENTS
72 73 for key, regex in requirements.items():
73 74 route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex))
74 75 return route_path
75 76
76 77
77 78 def get_format_ref_id(repo):
78 79 """Returns a `repo` specific reference formatter function"""
79 80 if h.is_svn(repo):
80 81 return _format_ref_id_svn
81 82 else:
82 83 return _format_ref_id
83 84
84 85
85 86 def _format_ref_id(name, raw_id):
86 87 """Default formatting of a given reference `name`"""
87 88 return name
88 89
89 90
90 91 def _format_ref_id_svn(name, raw_id):
91 92 """Special way of formatting a reference for Subversion including path"""
92 93 return '%s@%s' % (name, raw_id)
93 94
94 95
95 96 class TemplateArgs(StrictAttributeDict):
96 97 pass
97 98
98 99
99 100 class BaseAppView(object):
100 101
101 102 def __init__(self, context, request):
102 103 self.request = request
103 104 self.context = context
104 105 self.session = request.session
105 106 if not hasattr(request, 'user'):
106 107 # NOTE(marcink): edge case, we ended up in matched route
107 108 # but probably of web-app context, e.g API CALL/VCS CALL
108 109 if hasattr(request, 'vcs_call') or hasattr(request, 'rpc_method'):
109 110 log.warning('Unable to process request `%s` in this scope', request)
110 111 raise HTTPBadRequest()
111 112
112 113 self._rhodecode_user = request.user # auth user
113 114 self._rhodecode_db_user = self._rhodecode_user.get_instance()
114 115 self._maybe_needs_password_change(
115 116 request.matched_route.name, self._rhodecode_db_user)
116 117
117 118 def _maybe_needs_password_change(self, view_name, user_obj):
118 119 log.debug('Checking if user %s needs password change on view %s',
119 120 user_obj, view_name)
120 121 skip_user_views = [
121 122 'logout', 'login',
122 123 'my_account_password', 'my_account_password_update'
123 124 ]
124 125
125 126 if not user_obj:
126 127 return
127 128
128 129 if user_obj.username == User.DEFAULT_USER:
129 130 return
130 131
131 132 now = time.time()
132 133 should_change = user_obj.user_data.get('force_password_change')
133 134 change_after = safe_int(should_change) or 0
134 135 if should_change and now > change_after:
135 136 log.debug('User %s requires password change', user_obj)
136 137 h.flash('You are required to change your password', 'warning',
137 138 ignore_duplicate=True)
138 139
139 140 if view_name not in skip_user_views:
140 141 raise HTTPFound(
141 142 self.request.route_path('my_account_password'))
142 143
143 144 def _log_creation_exception(self, e, repo_name):
144 145 _ = self.request.translate
145 146 reason = None
146 147 if len(e.args) == 2:
147 148 reason = e.args[1]
148 149
149 150 if reason == 'INVALID_CERTIFICATE':
150 151 log.exception(
151 152 'Exception creating a repository: invalid certificate')
152 153 msg = (_('Error creating repository %s: invalid certificate')
153 154 % repo_name)
154 155 else:
155 156 log.exception("Exception creating a repository")
156 157 msg = (_('Error creating repository %s')
157 158 % repo_name)
158 159 return msg
159 160
160 161 def _get_local_tmpl_context(self, include_app_defaults=True):
161 162 c = TemplateArgs()
162 163 c.auth_user = self.request.user
163 164 # TODO(marcink): migrate the usage of c.rhodecode_user to c.auth_user
164 165 c.rhodecode_user = self.request.user
165 166
166 167 if include_app_defaults:
167 168 from rhodecode.lib.base import attach_context_attributes
168 169 attach_context_attributes(c, self.request, self.request.user.user_id)
169 170
170 171 return c
171 172
172 173 def _get_template_context(self, tmpl_args, **kwargs):
173 174
174 175 local_tmpl_args = {
175 176 'defaults': {},
176 177 'errors': {},
177 178 'c': tmpl_args
178 179 }
179 180 local_tmpl_args.update(kwargs)
180 181 return local_tmpl_args
181 182
182 183 def load_default_context(self):
183 184 """
184 185 example:
185 186
186 187 def load_default_context(self):
187 188 c = self._get_local_tmpl_context()
188 189 c.custom_var = 'foobar'
189 190
190 191 return c
191 192 """
192 193 raise NotImplementedError('Needs implementation in view class')
193 194
194 195
195 196 class RepoAppView(BaseAppView):
196 197
197 198 def __init__(self, context, request):
198 199 super(RepoAppView, self).__init__(context, request)
199 200 self.db_repo = request.db_repo
200 201 self.db_repo_name = self.db_repo.repo_name
201 202 self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo)
202 203
203 204 def _handle_missing_requirements(self, error):
204 205 log.error(
205 206 'Requirements are missing for repository %s: %s',
206 207 self.db_repo_name, safe_unicode(error))
207 208
208 209 def _get_local_tmpl_context(self, include_app_defaults=True):
209 210 _ = self.request.translate
210 211 c = super(RepoAppView, self)._get_local_tmpl_context(
211 212 include_app_defaults=include_app_defaults)
212 213
213 214 # register common vars for this type of view
214 215 c.rhodecode_db_repo = self.db_repo
215 216 c.repo_name = self.db_repo_name
216 217 c.repository_pull_requests = self.db_repo_pull_requests
217 218 self.path_filter = PathFilter(None)
218 219
219 220 c.repository_requirements_missing = {}
220 221 try:
221 222 self.rhodecode_vcs_repo = self.db_repo.scm_instance()
222 223 if self.rhodecode_vcs_repo:
223 224 path_perms = self.rhodecode_vcs_repo.get_path_permissions(
224 225 c.auth_user.username)
225 226 self.path_filter = PathFilter(path_perms)
226 227 except RepositoryRequirementError as e:
227 228 c.repository_requirements_missing = {'error': str(e)}
228 229 self._handle_missing_requirements(e)
229 230 self.rhodecode_vcs_repo = None
230 231
231 232 c.path_filter = self.path_filter # used by atom_feed_entry.mako
232 233
233 234 if self.rhodecode_vcs_repo is None:
234 235 # unable to fetch this repo as vcs instance, report back to user
235 236 h.flash(_(
236 237 "The repository `%(repo_name)s` cannot be loaded in filesystem. "
237 238 "Please check if it exist, or is not damaged.") %
238 239 {'repo_name': c.repo_name},
239 240 category='error', ignore_duplicate=True)
240 241 if c.repository_requirements_missing:
241 242 route = self.request.matched_route.name
242 243 if route.startswith(('edit_repo', 'repo_summary')):
243 244 # allow summary and edit repo on missing requirements
244 245 return c
245 246
246 247 raise HTTPFound(
247 248 h.route_path('repo_summary', repo_name=self.db_repo_name))
248 249
249 250 else: # redirect if we don't show missing requirements
250 251 raise HTTPFound(h.route_path('home'))
251 252
252 253 c.has_origin_repo_read_perm = False
253 254 if self.db_repo.fork:
254 255 c.has_origin_repo_read_perm = h.HasRepoPermissionAny(
255 256 'repository.write', 'repository.read', 'repository.admin')(
256 257 self.db_repo.fork.repo_name, 'summary fork link')
257 258
258 259 return c
259 260
260 261 def _get_f_path_unchecked(self, matchdict, default=None):
261 262 """
262 263 Should only be used by redirects, everything else should call _get_f_path
263 264 """
264 265 f_path = matchdict.get('f_path')
265 266 if f_path:
266 267 # fix for multiple initial slashes that causes errors for GIT
267 268 return f_path.lstrip('/')
268 269
269 270 return default
270 271
271 272 def _get_f_path(self, matchdict, default=None):
272 273 f_path_match = self._get_f_path_unchecked(matchdict, default)
273 274 return self.path_filter.assert_path_permissions(f_path_match)
274 275
275 276 def _get_general_setting(self, target_repo, settings_key, default=False):
276 277 settings_model = VcsSettingsModel(repo=target_repo)
277 278 settings = settings_model.get_general_settings()
278 279 return settings.get(settings_key, default)
279 280
280 281
281 282 class PathFilter(object):
282 283
283 284 # Expects and instance of BasePathPermissionChecker or None
284 285 def __init__(self, permission_checker):
285 286 self.permission_checker = permission_checker
286 287
287 288 def assert_path_permissions(self, path):
288 289 if path and self.permission_checker and not self.permission_checker.has_access(path):
289 290 raise HTTPForbidden()
290 291 return path
291 292
292 293 def filter_patchset(self, patchset):
293 294 if not self.permission_checker or not patchset:
294 295 return patchset, False
295 296 had_filtered = False
296 297 filtered_patchset = []
297 298 for patch in patchset:
298 299 filename = patch.get('filename', None)
299 300 if not filename or self.permission_checker.has_access(filename):
300 301 filtered_patchset.append(patch)
301 302 else:
302 303 had_filtered = True
303 304 if had_filtered:
304 305 if isinstance(patchset, diffs.LimitedDiffContainer):
305 306 filtered_patchset = diffs.LimitedDiffContainer(patchset.diff_limit, patchset.cur_diff_size, filtered_patchset)
306 307 return filtered_patchset, True
307 308 else:
308 309 return patchset, False
309 310
310 311 def render_patchset_filtered(self, diffset, patchset, source_ref=None, target_ref=None):
311 312 filtered_patchset, has_hidden_changes = self.filter_patchset(patchset)
312 313 result = diffset.render_patchset(
313 314 filtered_patchset, source_ref=source_ref, target_ref=target_ref)
314 315 result.has_hidden_changes = has_hidden_changes
315 316 return result
316 317
317 318 def get_raw_patch(self, diff_processor):
318 319 if self.permission_checker is None:
319 320 return diff_processor.as_raw()
320 321 elif self.permission_checker.has_full_access:
321 322 return diff_processor.as_raw()
322 323 else:
323 324 return '# Repository has user-specific filters, raw patch generation is disabled.'
324 325
325 326 @property
326 327 def is_enabled(self):
327 328 return self.permission_checker is not None
328 329
329 330
330 331 class RepoGroupAppView(BaseAppView):
331 332 def __init__(self, context, request):
332 333 super(RepoGroupAppView, self).__init__(context, request)
333 334 self.db_repo_group = request.db_repo_group
334 335 self.db_repo_group_name = self.db_repo_group.group_name
335 336
336 337 def _revoke_perms_on_yourself(self, form_result):
337 338 _updates = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
338 339 form_result['perm_updates'])
339 340 _additions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
340 341 form_result['perm_additions'])
341 342 _deletions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
342 343 form_result['perm_deletions'])
343 344 admin_perm = 'group.admin'
344 345 if _updates and _updates[0][1] != admin_perm or \
345 346 _additions and _additions[0][1] != admin_perm or \
346 347 _deletions and _deletions[0][1] != admin_perm:
347 348 return True
348 349 return False
349 350
350 351
351 352 class UserGroupAppView(BaseAppView):
352 353 def __init__(self, context, request):
353 354 super(UserGroupAppView, self).__init__(context, request)
354 355 self.db_user_group = request.db_user_group
355 356 self.db_user_group_name = self.db_user_group.users_group_name
356 357
357 358
358 359 class UserAppView(BaseAppView):
359 360 def __init__(self, context, request):
360 361 super(UserAppView, self).__init__(context, request)
361 362 self.db_user = request.db_user
362 363 self.db_user_id = self.db_user.user_id
363 364
364 365 _ = self.request.translate
365 366 if not request.db_user_supports_default:
366 367 if self.db_user.username == User.DEFAULT_USER:
367 368 h.flash(_("Editing user `{}` is disabled.".format(
368 369 User.DEFAULT_USER)), category='warning')
369 370 raise HTTPFound(h.route_path('users'))
370 371
371 372
372 373 class DataGridAppView(object):
373 374 """
374 375 Common class to have re-usable grid rendering components
375 376 """
376 377
377 378 def _extract_ordering(self, request, column_map=None):
378 379 column_map = column_map or {}
379 380 column_index = safe_int(request.GET.get('order[0][column]'))
380 381 order_dir = request.GET.get(
381 382 'order[0][dir]', 'desc')
382 383 order_by = request.GET.get(
383 384 'columns[%s][data][sort]' % column_index, 'name_raw')
384 385
385 386 # translate datatable to DB columns
386 387 order_by = column_map.get(order_by) or order_by
387 388
388 389 search_q = request.GET.get('search[value]')
389 390 return search_q, order_by, order_dir
390 391
391 392 def _extract_chunk(self, request):
392 393 start = safe_int(request.GET.get('start'), 0)
393 394 length = safe_int(request.GET.get('length'), 25)
394 395 draw = safe_int(request.GET.get('draw'))
395 396 return draw, start, length
396 397
397 398 def _get_order_col(self, order_by, model):
398 if isinstance(order_by, basestring):
399 if isinstance(order_by, compat.string_types):
399 400 try:
400 401 return operator.attrgetter(order_by)(model)
401 402 except AttributeError:
402 403 return None
403 404 else:
404 405 return order_by
405 406
406 407
407 408 class BaseReferencesView(RepoAppView):
408 409 """
409 410 Base for reference view for branches, tags and bookmarks.
410 411 """
411 412 def load_default_context(self):
412 413 c = self._get_local_tmpl_context()
413 414
414 415
415 416 return c
416 417
417 418 def load_refs_context(self, ref_items, partials_template):
418 419 _render = self.request.get_partial_renderer(partials_template)
419 420 pre_load = ["author", "date", "message"]
420 421
421 422 is_svn = h.is_svn(self.rhodecode_vcs_repo)
422 423 is_hg = h.is_hg(self.rhodecode_vcs_repo)
423 424
424 425 format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo)
425 426
426 427 closed_refs = {}
427 428 if is_hg:
428 429 closed_refs = self.rhodecode_vcs_repo.branches_closed
429 430
430 431 data = []
431 432 for ref_name, commit_id in ref_items:
432 433 commit = self.rhodecode_vcs_repo.get_commit(
433 434 commit_id=commit_id, pre_load=pre_load)
434 435 closed = ref_name in closed_refs
435 436
436 437 # TODO: johbo: Unify generation of reference links
437 438 use_commit_id = '/' in ref_name or is_svn
438 439
439 440 if use_commit_id:
440 441 files_url = h.route_path(
441 442 'repo_files',
442 443 repo_name=self.db_repo_name,
443 444 f_path=ref_name if is_svn else '',
444 445 commit_id=commit_id)
445 446
446 447 else:
447 448 files_url = h.route_path(
448 449 'repo_files',
449 450 repo_name=self.db_repo_name,
450 451 f_path=ref_name if is_svn else '',
451 452 commit_id=ref_name,
452 453 _query=dict(at=ref_name))
453 454
454 455 data.append({
455 456 "name": _render('name', ref_name, files_url, closed),
456 457 "name_raw": ref_name,
457 458 "date": _render('date', commit.date),
458 459 "date_raw": datetime_to_time(commit.date),
459 460 "author": _render('author', commit.author),
460 461 "commit": _render(
461 462 'commit', commit.message, commit.raw_id, commit.idx),
462 463 "commit_raw": commit.idx,
463 464 "compare": _render(
464 465 'compare', format_ref_id(ref_name, commit.raw_id)),
465 466 })
466 467
467 468 return data
468 469
469 470
470 471 class RepoRoutePredicate(object):
471 472 def __init__(self, val, config):
472 473 self.val = val
473 474
474 475 def text(self):
475 476 return 'repo_route = %s' % self.val
476 477
477 478 phash = text
478 479
479 480 def __call__(self, info, request):
480 481 if hasattr(request, 'vcs_call'):
481 482 # skip vcs calls
482 483 return
483 484
484 485 repo_name = info['match']['repo_name']
485 486 repo_model = repo.RepoModel()
486 487
487 488 by_name_match = repo_model.get_by_repo_name(repo_name, cache=False)
488 489
489 490 def redirect_if_creating(route_info, db_repo):
490 491 skip_views = ['edit_repo_advanced_delete']
491 492 route = route_info['route']
492 493 # we should skip delete view so we can actually "remove" repositories
493 494 # if they get stuck in creating state.
494 495 if route.name in skip_views:
495 496 return
496 497
497 498 if db_repo.repo_state in [repo.Repository.STATE_PENDING]:
498 499 repo_creating_url = request.route_path(
499 500 'repo_creating', repo_name=db_repo.repo_name)
500 501 raise HTTPFound(repo_creating_url)
501 502
502 503 if by_name_match:
503 504 # register this as request object we can re-use later
504 505 request.db_repo = by_name_match
505 506 redirect_if_creating(info, by_name_match)
506 507 return True
507 508
508 509 by_id_match = repo_model.get_repo_by_id(repo_name)
509 510 if by_id_match:
510 511 request.db_repo = by_id_match
511 512 redirect_if_creating(info, by_id_match)
512 513 return True
513 514
514 515 return False
515 516
516 517
517 518 class RepoForbidArchivedRoutePredicate(object):
518 519 def __init__(self, val, config):
519 520 self.val = val
520 521
521 522 def text(self):
522 523 return 'repo_forbid_archived = %s' % self.val
523 524
524 525 phash = text
525 526
526 527 def __call__(self, info, request):
527 528 _ = request.translate
528 529 rhodecode_db_repo = request.db_repo
529 530
530 531 log.debug(
531 532 '%s checking if archived flag for repo for %s',
532 533 self.__class__.__name__, rhodecode_db_repo.repo_name)
533 534
534 535 if rhodecode_db_repo.archived:
535 536 log.warning('Current view is not supported for archived repo:%s',
536 537 rhodecode_db_repo.repo_name)
537 538
538 539 h.flash(
539 540 h.literal(_('Action not supported for archived repository.')),
540 541 category='warning')
541 542 summary_url = request.route_path(
542 543 'repo_summary', repo_name=rhodecode_db_repo.repo_name)
543 544 raise HTTPFound(summary_url)
544 545 return True
545 546
546 547
547 548 class RepoTypeRoutePredicate(object):
548 549 def __init__(self, val, config):
549 550 self.val = val or ['hg', 'git', 'svn']
550 551
551 552 def text(self):
552 553 return 'repo_accepted_type = %s' % self.val
553 554
554 555 phash = text
555 556
556 557 def __call__(self, info, request):
557 558 if hasattr(request, 'vcs_call'):
558 559 # skip vcs calls
559 560 return
560 561
561 562 rhodecode_db_repo = request.db_repo
562 563
563 564 log.debug(
564 565 '%s checking repo type for %s in %s',
565 566 self.__class__.__name__, rhodecode_db_repo.repo_type, self.val)
566 567
567 568 if rhodecode_db_repo.repo_type in self.val:
568 569 return True
569 570 else:
570 571 log.warning('Current view is not supported for repo type:%s',
571 572 rhodecode_db_repo.repo_type)
572 573 return False
573 574
574 575
575 576 class RepoGroupRoutePredicate(object):
576 577 def __init__(self, val, config):
577 578 self.val = val
578 579
579 580 def text(self):
580 581 return 'repo_group_route = %s' % self.val
581 582
582 583 phash = text
583 584
584 585 def __call__(self, info, request):
585 586 if hasattr(request, 'vcs_call'):
586 587 # skip vcs calls
587 588 return
588 589
589 590 repo_group_name = info['match']['repo_group_name']
590 591 repo_group_model = repo_group.RepoGroupModel()
591 592 by_name_match = repo_group_model.get_by_group_name(repo_group_name, cache=False)
592 593
593 594 if by_name_match:
594 595 # register this as request object we can re-use later
595 596 request.db_repo_group = by_name_match
596 597 return True
597 598
598 599 return False
599 600
600 601
601 602 class UserGroupRoutePredicate(object):
602 603 def __init__(self, val, config):
603 604 self.val = val
604 605
605 606 def text(self):
606 607 return 'user_group_route = %s' % self.val
607 608
608 609 phash = text
609 610
610 611 def __call__(self, info, request):
611 612 if hasattr(request, 'vcs_call'):
612 613 # skip vcs calls
613 614 return
614 615
615 616 user_group_id = info['match']['user_group_id']
616 617 user_group_model = user_group.UserGroup()
617 618 by_id_match = user_group_model.get(user_group_id, cache=False)
618 619
619 620 if by_id_match:
620 621 # register this as request object we can re-use later
621 622 request.db_user_group = by_id_match
622 623 return True
623 624
624 625 return False
625 626
626 627
627 628 class UserRoutePredicateBase(object):
628 629 supports_default = None
629 630
630 631 def __init__(self, val, config):
631 632 self.val = val
632 633
633 634 def text(self):
634 635 raise NotImplementedError()
635 636
636 637 def __call__(self, info, request):
637 638 if hasattr(request, 'vcs_call'):
638 639 # skip vcs calls
639 640 return
640 641
641 642 user_id = info['match']['user_id']
642 643 user_model = user.User()
643 644 by_id_match = user_model.get(user_id, cache=False)
644 645
645 646 if by_id_match:
646 647 # register this as request object we can re-use later
647 648 request.db_user = by_id_match
648 649 request.db_user_supports_default = self.supports_default
649 650 return True
650 651
651 652 return False
652 653
653 654
654 655 class UserRoutePredicate(UserRoutePredicateBase):
655 656 supports_default = False
656 657
657 658 def text(self):
658 659 return 'user_route = %s' % self.val
659 660
660 661 phash = text
661 662
662 663
663 664 class UserRouteWithDefaultPredicate(UserRoutePredicateBase):
664 665 supports_default = True
665 666
666 667 def text(self):
667 668 return 'user_with_default_route = %s' % self.val
668 669
669 670 phash = text
670 671
671 672
672 673 def includeme(config):
673 674 config.add_route_predicate(
674 675 'repo_route', RepoRoutePredicate)
675 676 config.add_route_predicate(
676 677 'repo_accepted_types', RepoTypeRoutePredicate)
677 678 config.add_route_predicate(
678 679 'repo_forbid_when_archived', RepoForbidArchivedRoutePredicate)
679 680 config.add_route_predicate(
680 681 'repo_group_route', RepoGroupRoutePredicate)
681 682 config.add_route_predicate(
682 683 'user_group_route', UserGroupRoutePredicate)
683 684 config.add_route_predicate(
684 685 'user_route_with_default', UserRouteWithDefaultPredicate)
685 686 config.add_route_predicate(
686 687 'user_route', UserRoutePredicate)
@@ -1,90 +1,90 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20 import os
21 21 import logging
22 import os
23 22 import shlex
23 from pyramid import compat
24 24
25 25 # Do not use `from rhodecode import events` here, it will be overridden by the
26 26 # events module in this package due to pythons import mechanism.
27 27 from rhodecode.events import RepoGroupEvent
28 28 from rhodecode.subscribers import AsyncSubprocessSubscriber
29 29 from rhodecode.config.middleware import (
30 30 _bool_setting, _string_setting, _int_setting)
31 31
32 32 from .events import ModDavSvnConfigChange
33 33 from .subscribers import generate_config_subscriber
34 34 from . import config_keys
35 35
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39
40 40 def includeme(config):
41 41 settings = config.registry.settings
42 42 _sanitize_settings_and_apply_defaults(settings)
43 43
44 44 if settings[config_keys.generate_config]:
45 45 # Add subscriber to generate the Apache mod dav svn configuration on
46 46 # repository group events.
47 47 config.add_subscriber(generate_config_subscriber, RepoGroupEvent)
48 48
49 49 # If a reload command is set add a subscriber to execute it on
50 50 # configuration changes.
51 51 reload_cmd = shlex.split(settings[config_keys.reload_command])
52 52 if reload_cmd:
53 53 reload_timeout = settings[config_keys.reload_timeout] or None
54 54 reload_subscriber = AsyncSubprocessSubscriber(
55 55 cmd=reload_cmd, timeout=reload_timeout)
56 56 config.add_subscriber(reload_subscriber, ModDavSvnConfigChange)
57 57
58 58
59 59 def _sanitize_settings_and_apply_defaults(settings):
60 60 """
61 61 Set defaults, convert to python types and validate settings.
62 62 """
63 63 _bool_setting(settings, config_keys.generate_config, 'false')
64 64 _bool_setting(settings, config_keys.list_parent_path, 'true')
65 65 _int_setting(settings, config_keys.reload_timeout, 10)
66 66 _string_setting(settings, config_keys.config_file_path, '', lower=False)
67 67 _string_setting(settings, config_keys.location_root, '/', lower=False)
68 68 _string_setting(settings, config_keys.reload_command, '', lower=False)
69 69 _string_setting(settings, config_keys.template, '', lower=False)
70 70
71 71 # Convert negative timeout values to zero.
72 72 if settings[config_keys.reload_timeout] < 0:
73 73 settings[config_keys.reload_timeout] = 0
74 74
75 75 # Append path separator to location root.
76 76 settings[config_keys.location_root] = _append_path_sep(
77 77 settings[config_keys.location_root])
78 78
79 79 # Validate settings.
80 80 if settings[config_keys.generate_config]:
81 81 assert len(settings[config_keys.config_file_path]) > 0
82 82
83 83
84 84 def _append_path_sep(path):
85 85 """
86 86 Append the path separator if missing.
87 87 """
88 if isinstance(path, basestring) and not path.endswith(os.path.sep):
88 if isinstance(path, compat.string_types) and not path.endswith(os.path.sep):
89 89 path += os.path.sep
90 90 return path
@@ -1,339 +1,340 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 RhodeCode task modules, containing all task that suppose to be run
23 23 by celery daemon
24 24 """
25 25
26 26 import os
27 27 import time
28 28
29 from pyramid import compat
29 30 from pyramid_mailer.mailer import Mailer
30 31 from pyramid_mailer.message import Message
31 32
32 33 import rhodecode
33 34 from rhodecode.lib import audit_logger
34 35 from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask
35 36 from rhodecode.lib.hooks_base import log_create_repository
36 37 from rhodecode.lib.utils2 import safe_int, str2bool
37 38 from rhodecode.model.db import Session, IntegrityError, Repository, User, true
38 39
39 40
40 41 @async_task(ignore_result=True, base=RequestContextTask)
41 42 def send_email(recipients, subject, body='', html_body='', email_config=None):
42 43 """
43 44 Sends an email with defined parameters from the .ini files.
44 45
45 46 :param recipients: list of recipients, it this is empty the defined email
46 47 address from field 'email_to' is used instead
47 48 :param subject: subject of the mail
48 49 :param body: body of the mail
49 50 :param html_body: html version of body
50 51 """
51 52 log = get_logger(send_email)
52 53
53 54 email_config = email_config or rhodecode.CONFIG
54 55
55 56 mail_server = email_config.get('smtp_server') or None
56 57 if mail_server is None:
57 58 log.error("SMTP server information missing. Sending email failed. "
58 59 "Make sure that `smtp_server` variable is configured "
59 60 "inside the .ini file")
60 61 return False
61 62
62 63 subject = "%s %s" % (email_config.get('email_prefix', ''), subject)
63 64
64 65 if recipients:
65 if isinstance(recipients, basestring):
66 if isinstance(recipients, compat.string_types):
66 67 recipients = recipients.split(',')
67 68 else:
68 69 # if recipients are not defined we send to email_config + all admins
69 70 admins = []
70 71 for u in User.query().filter(User.admin == true()).all():
71 72 if u.email:
72 73 admins.append(u.email)
73 74 recipients = []
74 75 config_email = email_config.get('email_to')
75 76 if config_email:
76 77 recipients += [config_email]
77 78 recipients += admins
78 79
79 80 # translate our LEGACY config into the one that pyramid_mailer supports
80 81 email_conf = dict(
81 82 host=mail_server,
82 83 port=email_config.get('smtp_port', 25),
83 84 username=email_config.get('smtp_username'),
84 85 password=email_config.get('smtp_password'),
85 86
86 87 tls=str2bool(email_config.get('smtp_use_tls')),
87 88 ssl=str2bool(email_config.get('smtp_use_ssl')),
88 89
89 90 # SSL key file
90 91 # keyfile='',
91 92
92 93 # SSL certificate file
93 94 # certfile='',
94 95
95 96 # Location of maildir
96 97 # queue_path='',
97 98
98 99 default_sender=email_config.get('app_email_from', 'RhodeCode'),
99 100
100 101 debug=str2bool(email_config.get('smtp_debug')),
101 102 # /usr/sbin/sendmail Sendmail executable
102 103 # sendmail_app='',
103 104
104 105 # {sendmail_app} -t -i -f {sender} Template for sendmail execution
105 106 # sendmail_template='',
106 107 )
107 108
108 109 try:
109 110 mailer = Mailer(**email_conf)
110 111
111 112 message = Message(subject=subject,
112 113 sender=email_conf['default_sender'],
113 114 recipients=recipients,
114 115 body=body, html=html_body)
115 116 mailer.send_immediately(message)
116 117
117 118 except Exception:
118 119 log.exception('Mail sending failed')
119 120 return False
120 121 return True
121 122
122 123
123 124 @async_task(ignore_result=True, base=RequestContextTask)
124 125 def create_repo(form_data, cur_user):
125 126 from rhodecode.model.repo import RepoModel
126 127 from rhodecode.model.user import UserModel
127 128 from rhodecode.model.settings import SettingsModel
128 129
129 130 log = get_logger(create_repo)
130 131
131 132 cur_user = UserModel()._get_user(cur_user)
132 133 owner = cur_user
133 134
134 135 repo_name = form_data['repo_name']
135 136 repo_name_full = form_data['repo_name_full']
136 137 repo_type = form_data['repo_type']
137 138 description = form_data['repo_description']
138 139 private = form_data['repo_private']
139 140 clone_uri = form_data.get('clone_uri')
140 141 repo_group = safe_int(form_data['repo_group'])
141 142 landing_rev = form_data['repo_landing_rev']
142 143 copy_fork_permissions = form_data.get('copy_permissions')
143 144 copy_group_permissions = form_data.get('repo_copy_permissions')
144 145 fork_of = form_data.get('fork_parent_id')
145 146 state = form_data.get('repo_state', Repository.STATE_PENDING)
146 147
147 148 # repo creation defaults, private and repo_type are filled in form
148 149 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
149 150 enable_statistics = form_data.get(
150 151 'enable_statistics', defs.get('repo_enable_statistics'))
151 152 enable_locking = form_data.get(
152 153 'enable_locking', defs.get('repo_enable_locking'))
153 154 enable_downloads = form_data.get(
154 155 'enable_downloads', defs.get('repo_enable_downloads'))
155 156
156 157 try:
157 158 RepoModel()._create_repo(
158 159 repo_name=repo_name_full,
159 160 repo_type=repo_type,
160 161 description=description,
161 162 owner=owner,
162 163 private=private,
163 164 clone_uri=clone_uri,
164 165 repo_group=repo_group,
165 166 landing_rev=landing_rev,
166 167 fork_of=fork_of,
167 168 copy_fork_permissions=copy_fork_permissions,
168 169 copy_group_permissions=copy_group_permissions,
169 170 enable_statistics=enable_statistics,
170 171 enable_locking=enable_locking,
171 172 enable_downloads=enable_downloads,
172 173 state=state
173 174 )
174 175 Session().commit()
175 176
176 177 # now create this repo on Filesystem
177 178 RepoModel()._create_filesystem_repo(
178 179 repo_name=repo_name,
179 180 repo_type=repo_type,
180 181 repo_group=RepoModel()._get_repo_group(repo_group),
181 182 clone_uri=clone_uri,
182 183 )
183 184 repo = Repository.get_by_repo_name(repo_name_full)
184 185 log_create_repository(created_by=owner.username, **repo.get_dict())
185 186
186 187 # update repo commit caches initially
187 188 repo.update_commit_cache()
188 189
189 190 # set new created state
190 191 repo.set_state(Repository.STATE_CREATED)
191 192 repo_id = repo.repo_id
192 193 repo_data = repo.get_api_data()
193 194
194 195 audit_logger.store(
195 196 'repo.create', action_data={'data': repo_data},
196 197 user=cur_user,
197 198 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
198 199
199 200 Session().commit()
200 201 except Exception as e:
201 202 log.warning('Exception occurred when creating repository, '
202 203 'doing cleanup...', exc_info=True)
203 204 if isinstance(e, IntegrityError):
204 205 Session().rollback()
205 206
206 207 # rollback things manually !
207 208 repo = Repository.get_by_repo_name(repo_name_full)
208 209 if repo:
209 210 Repository.delete(repo.repo_id)
210 211 Session().commit()
211 212 RepoModel()._delete_filesystem_repo(repo)
212 213 log.info('Cleanup of repo %s finished', repo_name_full)
213 214 raise
214 215
215 216 return True
216 217
217 218
218 219 @async_task(ignore_result=True, base=RequestContextTask)
219 220 def create_repo_fork(form_data, cur_user):
220 221 """
221 222 Creates a fork of repository using internal VCS methods
222 223 """
223 224 from rhodecode.model.repo import RepoModel
224 225 from rhodecode.model.user import UserModel
225 226
226 227 log = get_logger(create_repo_fork)
227 228
228 229 cur_user = UserModel()._get_user(cur_user)
229 230 owner = cur_user
230 231
231 232 repo_name = form_data['repo_name'] # fork in this case
232 233 repo_name_full = form_data['repo_name_full']
233 234 repo_type = form_data['repo_type']
234 235 description = form_data['description']
235 236 private = form_data['private']
236 237 clone_uri = form_data.get('clone_uri')
237 238 repo_group = safe_int(form_data['repo_group'])
238 239 landing_rev = form_data['landing_rev']
239 240 copy_fork_permissions = form_data.get('copy_permissions')
240 241 fork_id = safe_int(form_data.get('fork_parent_id'))
241 242
242 243 try:
243 244 fork_of = RepoModel()._get_repo(fork_id)
244 245 RepoModel()._create_repo(
245 246 repo_name=repo_name_full,
246 247 repo_type=repo_type,
247 248 description=description,
248 249 owner=owner,
249 250 private=private,
250 251 clone_uri=clone_uri,
251 252 repo_group=repo_group,
252 253 landing_rev=landing_rev,
253 254 fork_of=fork_of,
254 255 copy_fork_permissions=copy_fork_permissions
255 256 )
256 257
257 258 Session().commit()
258 259
259 260 base_path = Repository.base_path()
260 261 source_repo_path = os.path.join(base_path, fork_of.repo_name)
261 262
262 263 # now create this repo on Filesystem
263 264 RepoModel()._create_filesystem_repo(
264 265 repo_name=repo_name,
265 266 repo_type=repo_type,
266 267 repo_group=RepoModel()._get_repo_group(repo_group),
267 268 clone_uri=source_repo_path,
268 269 )
269 270 repo = Repository.get_by_repo_name(repo_name_full)
270 271 log_create_repository(created_by=owner.username, **repo.get_dict())
271 272
272 273 # update repo commit caches initially
273 274 config = repo._config
274 275 config.set('extensions', 'largefiles', '')
275 276 repo.update_commit_cache(config=config)
276 277
277 278 # set new created state
278 279 repo.set_state(Repository.STATE_CREATED)
279 280
280 281 repo_id = repo.repo_id
281 282 repo_data = repo.get_api_data()
282 283 audit_logger.store(
283 284 'repo.fork', action_data={'data': repo_data},
284 285 user=cur_user,
285 286 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
286 287
287 288 Session().commit()
288 289 except Exception as e:
289 290 log.warning('Exception occurred when forking repository, '
290 291 'doing cleanup...', exc_info=True)
291 292 if isinstance(e, IntegrityError):
292 293 Session().rollback()
293 294
294 295 # rollback things manually !
295 296 repo = Repository.get_by_repo_name(repo_name_full)
296 297 if repo:
297 298 Repository.delete(repo.repo_id)
298 299 Session().commit()
299 300 RepoModel()._delete_filesystem_repo(repo)
300 301 log.info('Cleanup of repo %s finished', repo_name_full)
301 302 raise
302 303
303 304 return True
304 305
305 306
306 307 @async_task(ignore_result=True)
307 308 def repo_maintenance(repoid):
308 309 from rhodecode.lib import repo_maintenance as repo_maintenance_lib
309 310 log = get_logger(repo_maintenance)
310 311 repo = Repository.get_by_id_or_repo_name(repoid)
311 312 if repo:
312 313 maintenance = repo_maintenance_lib.RepoMaintenance()
313 314 tasks = maintenance.get_tasks_for_repo(repo)
314 315 log.debug('Executing %s tasks on repo `%s`', tasks, repoid)
315 316 executed_types = maintenance.execute(repo)
316 317 log.debug('Got execution results %s', executed_types)
317 318 else:
318 319 log.debug('Repo `%s` not found or without a clone_url', repoid)
319 320
320 321
321 322 @async_task(ignore_result=True)
322 323 def check_for_update():
323 324 from rhodecode.model.update import UpdateModel
324 325 update_url = UpdateModel().get_update_url()
325 326 cur_ver = rhodecode.__version__
326 327
327 328 try:
328 329 data = UpdateModel().get_update_data(update_url)
329 330 latest = data['versions'][0]
330 331 UpdateModel().store_version(latest['version'])
331 332 except Exception:
332 333 pass
333 334
334 335
335 336 @async_task(ignore_result=False)
336 337 def beat_check(*args, **kwargs):
337 338 log = get_logger(beat_check)
338 339 log.info('Got args: %r and kwargs %r', args, kwargs)
339 340 return time.time()
@@ -1,775 +1,776 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import difflib
23 23 from itertools import groupby
24 24
25 25 from pygments import lex
26 26 from pygments.formatters.html import _get_ttype_class as pygment_token_class
27 27 from pygments.lexers.special import TextLexer, Token
28 28 from pygments.lexers import get_lexer_by_name
29 from pyramid import compat
29 30
30 31 from rhodecode.lib.helpers import (
31 32 get_lexer_for_filenode, html_escape, get_custom_lexer)
32 33 from rhodecode.lib.utils2 import AttributeDict, StrictAttributeDict, safe_unicode
33 34 from rhodecode.lib.vcs.nodes import FileNode
34 35 from rhodecode.lib.vcs.exceptions import VCSError, NodeDoesNotExistError
35 36 from rhodecode.lib.diff_match_patch import diff_match_patch
36 37 from rhodecode.lib.diffs import LimitedDiffContainer, DEL_FILENODE, BIN_FILENODE
37 38
38 39
39 40 plain_text_lexer = get_lexer_by_name(
40 41 'text', stripall=False, stripnl=False, ensurenl=False)
41 42
42 43
43 44 log = logging.getLogger(__name__)
44 45
45 46
46 47 def filenode_as_lines_tokens(filenode, lexer=None):
47 48 org_lexer = lexer
48 49 lexer = lexer or get_lexer_for_filenode(filenode)
49 50 log.debug('Generating file node pygment tokens for %s, %s, org_lexer:%s',
50 51 lexer, filenode, org_lexer)
51 52 tokens = tokenize_string(filenode.content, lexer)
52 53 lines = split_token_stream(tokens)
53 54 rv = list(lines)
54 55 return rv
55 56
56 57
57 58 def tokenize_string(content, lexer):
58 59 """
59 60 Use pygments to tokenize some content based on a lexer
60 61 ensuring all original new lines and whitespace is preserved
61 62 """
62 63
63 64 lexer.stripall = False
64 65 lexer.stripnl = False
65 66 lexer.ensurenl = False
66 67
67 68 if isinstance(lexer, TextLexer):
68 69 lexed = [(Token.Text, content)]
69 70 else:
70 71 lexed = lex(content, lexer)
71 72
72 73 for token_type, token_text in lexed:
73 74 yield pygment_token_class(token_type), token_text
74 75
75 76
76 77 def split_token_stream(tokens):
77 78 """
78 79 Take a list of (TokenType, text) tuples and split them by a string
79 80
80 81 split_token_stream([(TEXT, 'some\ntext'), (TEXT, 'more\n')])
81 82 [(TEXT, 'some'), (TEXT, 'text'),
82 83 (TEXT, 'more'), (TEXT, 'text')]
83 84 """
84 85
85 86 buffer = []
86 87 for token_class, token_text in tokens:
87 88 parts = token_text.split('\n')
88 89 for part in parts[:-1]:
89 90 buffer.append((token_class, part))
90 91 yield buffer
91 92 buffer = []
92 93
93 94 buffer.append((token_class, parts[-1]))
94 95
95 96 if buffer:
96 97 yield buffer
97 98
98 99
99 100 def filenode_as_annotated_lines_tokens(filenode):
100 101 """
101 102 Take a file node and return a list of annotations => lines, if no annotation
102 103 is found, it will be None.
103 104
104 105 eg:
105 106
106 107 [
107 108 (annotation1, [
108 109 (1, line1_tokens_list),
109 110 (2, line2_tokens_list),
110 111 ]),
111 112 (annotation2, [
112 113 (3, line1_tokens_list),
113 114 ]),
114 115 (None, [
115 116 (4, line1_tokens_list),
116 117 ]),
117 118 (annotation1, [
118 119 (5, line1_tokens_list),
119 120 (6, line2_tokens_list),
120 121 ])
121 122 ]
122 123 """
123 124
124 125 commit_cache = {} # cache commit_getter lookups
125 126
126 127 def _get_annotation(commit_id, commit_getter):
127 128 if commit_id not in commit_cache:
128 129 commit_cache[commit_id] = commit_getter()
129 130 return commit_cache[commit_id]
130 131
131 132 annotation_lookup = {
132 133 line_no: _get_annotation(commit_id, commit_getter)
133 134 for line_no, commit_id, commit_getter, line_content
134 135 in filenode.annotate
135 136 }
136 137
137 138 annotations_lines = ((annotation_lookup.get(line_no), line_no, tokens)
138 139 for line_no, tokens
139 140 in enumerate(filenode_as_lines_tokens(filenode), 1))
140 141
141 142 grouped_annotations_lines = groupby(annotations_lines, lambda x: x[0])
142 143
143 144 for annotation, group in grouped_annotations_lines:
144 145 yield (
145 146 annotation, [(line_no, tokens)
146 147 for (_, line_no, tokens) in group]
147 148 )
148 149
149 150
150 151 def render_tokenstream(tokenstream):
151 152 result = []
152 153 for token_class, token_ops_texts in rollup_tokenstream(tokenstream):
153 154
154 155 if token_class:
155 156 result.append(u'<span class="%s">' % token_class)
156 157 else:
157 158 result.append(u'<span>')
158 159
159 160 for op_tag, token_text in token_ops_texts:
160 161
161 162 if op_tag:
162 163 result.append(u'<%s>' % op_tag)
163 164
164 165 escaped_text = html_escape(token_text)
165 166
166 167 # TODO: dan: investigate showing hidden characters like space/nl/tab
167 168 # escaped_text = escaped_text.replace(' ', '<sp> </sp>')
168 169 # escaped_text = escaped_text.replace('\n', '<nl>\n</nl>')
169 170 # escaped_text = escaped_text.replace('\t', '<tab>\t</tab>')
170 171
171 172 result.append(escaped_text)
172 173
173 174 if op_tag:
174 175 result.append(u'</%s>' % op_tag)
175 176
176 177 result.append(u'</span>')
177 178
178 179 html = ''.join(result)
179 180 return html
180 181
181 182
182 183 def rollup_tokenstream(tokenstream):
183 184 """
184 185 Group a token stream of the format:
185 186
186 187 ('class', 'op', 'text')
187 188 or
188 189 ('class', 'text')
189 190
190 191 into
191 192
192 193 [('class1',
193 194 [('op1', 'text'),
194 195 ('op2', 'text')]),
195 196 ('class2',
196 197 [('op3', 'text')])]
197 198
198 199 This is used to get the minimal tags necessary when
199 200 rendering to html eg for a token stream ie.
200 201
201 202 <span class="A"><ins>he</ins>llo</span>
202 203 vs
203 204 <span class="A"><ins>he</ins></span><span class="A">llo</span>
204 205
205 206 If a 2 tuple is passed in, the output op will be an empty string.
206 207
207 208 eg:
208 209
209 210 >>> rollup_tokenstream([('classA', '', 'h'),
210 211 ('classA', 'del', 'ell'),
211 212 ('classA', '', 'o'),
212 213 ('classB', '', ' '),
213 214 ('classA', '', 'the'),
214 215 ('classA', '', 're'),
215 216 ])
216 217
217 218 [('classA', [('', 'h'), ('del', 'ell'), ('', 'o')],
218 219 ('classB', [('', ' ')],
219 220 ('classA', [('', 'there')]]
220 221
221 222 """
222 223 if tokenstream and len(tokenstream[0]) == 2:
223 224 tokenstream = ((t[0], '', t[1]) for t in tokenstream)
224 225
225 226 result = []
226 227 for token_class, op_list in groupby(tokenstream, lambda t: t[0]):
227 228 ops = []
228 229 for token_op, token_text_list in groupby(op_list, lambda o: o[1]):
229 230 text_buffer = []
230 231 for t_class, t_op, t_text in token_text_list:
231 232 text_buffer.append(t_text)
232 233 ops.append((token_op, ''.join(text_buffer)))
233 234 result.append((token_class, ops))
234 235 return result
235 236
236 237
237 238 def tokens_diff(old_tokens, new_tokens, use_diff_match_patch=True):
238 239 """
239 240 Converts a list of (token_class, token_text) tuples to a list of
240 241 (token_class, token_op, token_text) tuples where token_op is one of
241 242 ('ins', 'del', '')
242 243
243 244 :param old_tokens: list of (token_class, token_text) tuples of old line
244 245 :param new_tokens: list of (token_class, token_text) tuples of new line
245 246 :param use_diff_match_patch: boolean, will use google's diff match patch
246 247 library which has options to 'smooth' out the character by character
247 248 differences making nicer ins/del blocks
248 249 """
249 250
250 251 old_tokens_result = []
251 252 new_tokens_result = []
252 253
253 254 similarity = difflib.SequenceMatcher(None,
254 255 ''.join(token_text for token_class, token_text in old_tokens),
255 256 ''.join(token_text for token_class, token_text in new_tokens)
256 257 ).ratio()
257 258
258 259 if similarity < 0.6: # return, the blocks are too different
259 260 for token_class, token_text in old_tokens:
260 261 old_tokens_result.append((token_class, '', token_text))
261 262 for token_class, token_text in new_tokens:
262 263 new_tokens_result.append((token_class, '', token_text))
263 264 return old_tokens_result, new_tokens_result, similarity
264 265
265 266 token_sequence_matcher = difflib.SequenceMatcher(None,
266 267 [x[1] for x in old_tokens],
267 268 [x[1] for x in new_tokens])
268 269
269 270 for tag, o1, o2, n1, n2 in token_sequence_matcher.get_opcodes():
270 271 # check the differences by token block types first to give a more
271 272 # nicer "block" level replacement vs character diffs
272 273
273 274 if tag == 'equal':
274 275 for token_class, token_text in old_tokens[o1:o2]:
275 276 old_tokens_result.append((token_class, '', token_text))
276 277 for token_class, token_text in new_tokens[n1:n2]:
277 278 new_tokens_result.append((token_class, '', token_text))
278 279 elif tag == 'delete':
279 280 for token_class, token_text in old_tokens[o1:o2]:
280 281 old_tokens_result.append((token_class, 'del', token_text))
281 282 elif tag == 'insert':
282 283 for token_class, token_text in new_tokens[n1:n2]:
283 284 new_tokens_result.append((token_class, 'ins', token_text))
284 285 elif tag == 'replace':
285 286 # if same type token blocks must be replaced, do a diff on the
286 287 # characters in the token blocks to show individual changes
287 288
288 289 old_char_tokens = []
289 290 new_char_tokens = []
290 291 for token_class, token_text in old_tokens[o1:o2]:
291 292 for char in token_text:
292 293 old_char_tokens.append((token_class, char))
293 294
294 295 for token_class, token_text in new_tokens[n1:n2]:
295 296 for char in token_text:
296 297 new_char_tokens.append((token_class, char))
297 298
298 299 old_string = ''.join([token_text for
299 300 token_class, token_text in old_char_tokens])
300 301 new_string = ''.join([token_text for
301 302 token_class, token_text in new_char_tokens])
302 303
303 304 char_sequence = difflib.SequenceMatcher(
304 305 None, old_string, new_string)
305 306 copcodes = char_sequence.get_opcodes()
306 307 obuffer, nbuffer = [], []
307 308
308 309 if use_diff_match_patch:
309 310 dmp = diff_match_patch()
310 311 dmp.Diff_EditCost = 11 # TODO: dan: extract this to a setting
311 312 reps = dmp.diff_main(old_string, new_string)
312 313 dmp.diff_cleanupEfficiency(reps)
313 314
314 315 a, b = 0, 0
315 316 for op, rep in reps:
316 317 l = len(rep)
317 318 if op == 0:
318 319 for i, c in enumerate(rep):
319 320 obuffer.append((old_char_tokens[a+i][0], '', c))
320 321 nbuffer.append((new_char_tokens[b+i][0], '', c))
321 322 a += l
322 323 b += l
323 324 elif op == -1:
324 325 for i, c in enumerate(rep):
325 326 obuffer.append((old_char_tokens[a+i][0], 'del', c))
326 327 a += l
327 328 elif op == 1:
328 329 for i, c in enumerate(rep):
329 330 nbuffer.append((new_char_tokens[b+i][0], 'ins', c))
330 331 b += l
331 332 else:
332 333 for ctag, co1, co2, cn1, cn2 in copcodes:
333 334 if ctag == 'equal':
334 335 for token_class, token_text in old_char_tokens[co1:co2]:
335 336 obuffer.append((token_class, '', token_text))
336 337 for token_class, token_text in new_char_tokens[cn1:cn2]:
337 338 nbuffer.append((token_class, '', token_text))
338 339 elif ctag == 'delete':
339 340 for token_class, token_text in old_char_tokens[co1:co2]:
340 341 obuffer.append((token_class, 'del', token_text))
341 342 elif ctag == 'insert':
342 343 for token_class, token_text in new_char_tokens[cn1:cn2]:
343 344 nbuffer.append((token_class, 'ins', token_text))
344 345 elif ctag == 'replace':
345 346 for token_class, token_text in old_char_tokens[co1:co2]:
346 347 obuffer.append((token_class, 'del', token_text))
347 348 for token_class, token_text in new_char_tokens[cn1:cn2]:
348 349 nbuffer.append((token_class, 'ins', token_text))
349 350
350 351 old_tokens_result.extend(obuffer)
351 352 new_tokens_result.extend(nbuffer)
352 353
353 354 return old_tokens_result, new_tokens_result, similarity
354 355
355 356
356 357 def diffset_node_getter(commit):
357 358 def get_node(fname):
358 359 try:
359 360 return commit.get_node(fname)
360 361 except NodeDoesNotExistError:
361 362 return None
362 363
363 364 return get_node
364 365
365 366
366 367 class DiffSet(object):
367 368 """
368 369 An object for parsing the diff result from diffs.DiffProcessor and
369 370 adding highlighting, side by side/unified renderings and line diffs
370 371 """
371 372
372 373 HL_REAL = 'REAL' # highlights using original file, slow
373 374 HL_FAST = 'FAST' # highlights using just the line, fast but not correct
374 375 # in the case of multiline code
375 376 HL_NONE = 'NONE' # no highlighting, fastest
376 377
377 378 def __init__(self, highlight_mode=HL_REAL, repo_name=None,
378 379 source_repo_name=None,
379 380 source_node_getter=lambda filename: None,
380 381 target_repo_name=None,
381 382 target_node_getter=lambda filename: None,
382 383 source_nodes=None, target_nodes=None,
383 384 # files over this size will use fast highlighting
384 385 max_file_size_limit=150 * 1024,
385 386 ):
386 387
387 388 self.highlight_mode = highlight_mode
388 389 self.highlighted_filenodes = {}
389 390 self.source_node_getter = source_node_getter
390 391 self.target_node_getter = target_node_getter
391 392 self.source_nodes = source_nodes or {}
392 393 self.target_nodes = target_nodes or {}
393 394 self.repo_name = repo_name
394 395 self.target_repo_name = target_repo_name or repo_name
395 396 self.source_repo_name = source_repo_name or repo_name
396 397 self.max_file_size_limit = max_file_size_limit
397 398
398 399 def render_patchset(self, patchset, source_ref=None, target_ref=None):
399 400 diffset = AttributeDict(dict(
400 401 lines_added=0,
401 402 lines_deleted=0,
402 403 changed_files=0,
403 404 files=[],
404 405 file_stats={},
405 406 limited_diff=isinstance(patchset, LimitedDiffContainer),
406 407 repo_name=self.repo_name,
407 408 target_repo_name=self.target_repo_name,
408 409 source_repo_name=self.source_repo_name,
409 410 source_ref=source_ref,
410 411 target_ref=target_ref,
411 412 ))
412 413 for patch in patchset:
413 414 diffset.file_stats[patch['filename']] = patch['stats']
414 415 filediff = self.render_patch(patch)
415 416 filediff.diffset = StrictAttributeDict(dict(
416 417 source_ref=diffset.source_ref,
417 418 target_ref=diffset.target_ref,
418 419 repo_name=diffset.repo_name,
419 420 source_repo_name=diffset.source_repo_name,
420 421 target_repo_name=diffset.target_repo_name,
421 422 ))
422 423 diffset.files.append(filediff)
423 424 diffset.changed_files += 1
424 425 if not patch['stats']['binary']:
425 426 diffset.lines_added += patch['stats']['added']
426 427 diffset.lines_deleted += patch['stats']['deleted']
427 428
428 429 return diffset
429 430
430 431 _lexer_cache = {}
431 432
432 433 def _get_lexer_for_filename(self, filename, filenode=None):
433 434 # cached because we might need to call it twice for source/target
434 435 if filename not in self._lexer_cache:
435 436 if filenode:
436 437 lexer = filenode.lexer
437 438 extension = filenode.extension
438 439 else:
439 440 lexer = FileNode.get_lexer(filename=filename)
440 441 extension = filename.split('.')[-1]
441 442
442 443 lexer = get_custom_lexer(extension) or lexer
443 444 self._lexer_cache[filename] = lexer
444 445 return self._lexer_cache[filename]
445 446
446 447 def render_patch(self, patch):
447 448 log.debug('rendering diff for %r', patch['filename'])
448 449
449 450 source_filename = patch['original_filename']
450 451 target_filename = patch['filename']
451 452
452 453 source_lexer = plain_text_lexer
453 454 target_lexer = plain_text_lexer
454 455
455 456 if not patch['stats']['binary']:
456 457 node_hl_mode = self.HL_NONE if patch['chunks'] == [] else None
457 458 hl_mode = node_hl_mode or self.highlight_mode
458 459
459 460 if hl_mode == self.HL_REAL:
460 461 if (source_filename and patch['operation'] in ('D', 'M')
461 462 and source_filename not in self.source_nodes):
462 463 self.source_nodes[source_filename] = (
463 464 self.source_node_getter(source_filename))
464 465
465 466 if (target_filename and patch['operation'] in ('A', 'M')
466 467 and target_filename not in self.target_nodes):
467 468 self.target_nodes[target_filename] = (
468 469 self.target_node_getter(target_filename))
469 470
470 471 elif hl_mode == self.HL_FAST:
471 472 source_lexer = self._get_lexer_for_filename(source_filename)
472 473 target_lexer = self._get_lexer_for_filename(target_filename)
473 474
474 475 source_file = self.source_nodes.get(source_filename, source_filename)
475 476 target_file = self.target_nodes.get(target_filename, target_filename)
476 477 raw_id_uid = ''
477 478 if self.source_nodes.get(source_filename):
478 479 raw_id_uid = self.source_nodes[source_filename].commit.raw_id
479 480
480 481 if not raw_id_uid and self.target_nodes.get(target_filename):
481 482 # in case this is a new file we only have it in target
482 483 raw_id_uid = self.target_nodes[target_filename].commit.raw_id
483 484
484 485 source_filenode, target_filenode = None, None
485 486
486 487 # TODO: dan: FileNode.lexer works on the content of the file - which
487 488 # can be slow - issue #4289 explains a lexer clean up - which once
488 489 # done can allow caching a lexer for a filenode to avoid the file lookup
489 490 if isinstance(source_file, FileNode):
490 491 source_filenode = source_file
491 492 #source_lexer = source_file.lexer
492 493 source_lexer = self._get_lexer_for_filename(source_filename)
493 494 source_file.lexer = source_lexer
494 495
495 496 if isinstance(target_file, FileNode):
496 497 target_filenode = target_file
497 498 #target_lexer = target_file.lexer
498 499 target_lexer = self._get_lexer_for_filename(target_filename)
499 500 target_file.lexer = target_lexer
500 501
501 502 source_file_path, target_file_path = None, None
502 503
503 504 if source_filename != '/dev/null':
504 505 source_file_path = source_filename
505 506 if target_filename != '/dev/null':
506 507 target_file_path = target_filename
507 508
508 509 source_file_type = source_lexer.name
509 510 target_file_type = target_lexer.name
510 511
511 512 filediff = AttributeDict({
512 513 'source_file_path': source_file_path,
513 514 'target_file_path': target_file_path,
514 515 'source_filenode': source_filenode,
515 516 'target_filenode': target_filenode,
516 517 'source_file_type': target_file_type,
517 518 'target_file_type': source_file_type,
518 519 'patch': {'filename': patch['filename'], 'stats': patch['stats']},
519 520 'operation': patch['operation'],
520 521 'source_mode': patch['stats']['old_mode'],
521 522 'target_mode': patch['stats']['new_mode'],
522 523 'limited_diff': patch['is_limited_diff'],
523 524 'hunks': [],
524 525 'hunk_ops': None,
525 526 'diffset': self,
526 527 'raw_id': raw_id_uid,
527 528 })
528 529
529 530 file_chunks = patch['chunks'][1:]
530 531 for hunk in file_chunks:
531 532 hunkbit = self.parse_hunk(hunk, source_file, target_file)
532 533 hunkbit.source_file_path = source_file_path
533 534 hunkbit.target_file_path = target_file_path
534 535 filediff.hunks.append(hunkbit)
535 536
536 537 # Simulate hunk on OPS type line which doesn't really contain any diff
537 538 # this allows commenting on those
538 539 if not file_chunks:
539 540 actions = []
540 541 for op_id, op_text in filediff.patch['stats']['ops'].items():
541 542 if op_id == DEL_FILENODE:
542 543 actions.append(u'file was removed')
543 544 elif op_id == BIN_FILENODE:
544 545 actions.append(u'binary diff hidden')
545 546 else:
546 547 actions.append(safe_unicode(op_text))
547 548 action_line = u'NO CONTENT: ' + \
548 549 u', '.join(actions) or u'UNDEFINED_ACTION'
549 550
550 551 hunk_ops = {'source_length': 0, 'source_start': 0,
551 552 'lines': [
552 553 {'new_lineno': 0, 'old_lineno': 1,
553 554 'action': 'unmod-no-hl', 'line': action_line}
554 555 ],
555 556 'section_header': u'', 'target_start': 1, 'target_length': 1}
556 557
557 558 hunkbit = self.parse_hunk(hunk_ops, source_file, target_file)
558 559 hunkbit.source_file_path = source_file_path
559 560 hunkbit.target_file_path = target_file_path
560 561 filediff.hunk_ops = hunkbit
561 562 return filediff
562 563
563 564 def parse_hunk(self, hunk, source_file, target_file):
564 565 result = AttributeDict(dict(
565 566 source_start=hunk['source_start'],
566 567 source_length=hunk['source_length'],
567 568 target_start=hunk['target_start'],
568 569 target_length=hunk['target_length'],
569 570 section_header=hunk['section_header'],
570 571 lines=[],
571 572 ))
572 573 before, after = [], []
573 574
574 575 for line in hunk['lines']:
575 576 if line['action'] in ['unmod', 'unmod-no-hl']:
576 577 no_hl = line['action'] == 'unmod-no-hl'
577 578 result.lines.extend(
578 579 self.parse_lines(before, after, source_file, target_file, no_hl=no_hl))
579 580 after.append(line)
580 581 before.append(line)
581 582 elif line['action'] == 'add':
582 583 after.append(line)
583 584 elif line['action'] == 'del':
584 585 before.append(line)
585 586 elif line['action'] == 'old-no-nl':
586 587 before.append(line)
587 588 elif line['action'] == 'new-no-nl':
588 589 after.append(line)
589 590
590 591 all_actions = [x['action'] for x in after] + [x['action'] for x in before]
591 592 no_hl = {x for x in all_actions} == {'unmod-no-hl'}
592 593 result.lines.extend(
593 594 self.parse_lines(before, after, source_file, target_file, no_hl=no_hl))
594 595 # NOTE(marcink): we must keep list() call here so we can cache the result...
595 596 result.unified = list(self.as_unified(result.lines))
596 597 result.sideside = result.lines
597 598
598 599 return result
599 600
600 601 def parse_lines(self, before_lines, after_lines, source_file, target_file,
601 602 no_hl=False):
602 603 # TODO: dan: investigate doing the diff comparison and fast highlighting
603 604 # on the entire before and after buffered block lines rather than by
604 605 # line, this means we can get better 'fast' highlighting if the context
605 606 # allows it - eg.
606 607 # line 4: """
607 608 # line 5: this gets highlighted as a string
608 609 # line 6: """
609 610
610 611 lines = []
611 612
612 613 before_newline = AttributeDict()
613 614 after_newline = AttributeDict()
614 615 if before_lines and before_lines[-1]['action'] == 'old-no-nl':
615 616 before_newline_line = before_lines.pop(-1)
616 617 before_newline.content = '\n {}'.format(
617 618 render_tokenstream(
618 619 [(x[0], '', x[1])
619 620 for x in [('nonl', before_newline_line['line'])]]))
620 621
621 622 if after_lines and after_lines[-1]['action'] == 'new-no-nl':
622 623 after_newline_line = after_lines.pop(-1)
623 624 after_newline.content = '\n {}'.format(
624 625 render_tokenstream(
625 626 [(x[0], '', x[1])
626 627 for x in [('nonl', after_newline_line['line'])]]))
627 628
628 629 while before_lines or after_lines:
629 630 before, after = None, None
630 631 before_tokens, after_tokens = None, None
631 632
632 633 if before_lines:
633 634 before = before_lines.pop(0)
634 635 if after_lines:
635 636 after = after_lines.pop(0)
636 637
637 638 original = AttributeDict()
638 639 modified = AttributeDict()
639 640
640 641 if before:
641 642 if before['action'] == 'old-no-nl':
642 643 before_tokens = [('nonl', before['line'])]
643 644 else:
644 645 before_tokens = self.get_line_tokens(
645 646 line_text=before['line'], line_number=before['old_lineno'],
646 647 input_file=source_file, no_hl=no_hl)
647 648 original.lineno = before['old_lineno']
648 649 original.content = before['line']
649 650 original.action = self.action_to_op(before['action'])
650 651
651 652 original.get_comment_args = (
652 653 source_file, 'o', before['old_lineno'])
653 654
654 655 if after:
655 656 if after['action'] == 'new-no-nl':
656 657 after_tokens = [('nonl', after['line'])]
657 658 else:
658 659 after_tokens = self.get_line_tokens(
659 660 line_text=after['line'], line_number=after['new_lineno'],
660 661 input_file=target_file, no_hl=no_hl)
661 662 modified.lineno = after['new_lineno']
662 663 modified.content = after['line']
663 664 modified.action = self.action_to_op(after['action'])
664 665
665 666 modified.get_comment_args = (target_file, 'n', after['new_lineno'])
666 667
667 668 # diff the lines
668 669 if before_tokens and after_tokens:
669 670 o_tokens, m_tokens, similarity = tokens_diff(
670 671 before_tokens, after_tokens)
671 672 original.content = render_tokenstream(o_tokens)
672 673 modified.content = render_tokenstream(m_tokens)
673 674 elif before_tokens:
674 675 original.content = render_tokenstream(
675 676 [(x[0], '', x[1]) for x in before_tokens])
676 677 elif after_tokens:
677 678 modified.content = render_tokenstream(
678 679 [(x[0], '', x[1]) for x in after_tokens])
679 680
680 681 if not before_lines and before_newline:
681 682 original.content += before_newline.content
682 683 before_newline = None
683 684 if not after_lines and after_newline:
684 685 modified.content += after_newline.content
685 686 after_newline = None
686 687
687 688 lines.append(AttributeDict({
688 689 'original': original,
689 690 'modified': modified,
690 691 }))
691 692
692 693 return lines
693 694
694 695 def get_line_tokens(self, line_text, line_number, input_file=None, no_hl=False):
695 696 filenode = None
696 697 filename = None
697 698
698 if isinstance(input_file, basestring):
699 if isinstance(input_file, compat.string_types):
699 700 filename = input_file
700 701 elif isinstance(input_file, FileNode):
701 702 filenode = input_file
702 703 filename = input_file.unicode_path
703 704
704 705 hl_mode = self.HL_NONE if no_hl else self.highlight_mode
705 706 if hl_mode == self.HL_REAL and filenode:
706 707 lexer = self._get_lexer_for_filename(filename)
707 708 file_size_allowed = input_file.size < self.max_file_size_limit
708 709 if line_number and file_size_allowed:
709 710 return self.get_tokenized_filenode_line(
710 711 input_file, line_number, lexer)
711 712
712 713 if hl_mode in (self.HL_REAL, self.HL_FAST) and filename:
713 714 lexer = self._get_lexer_for_filename(filename)
714 715 return list(tokenize_string(line_text, lexer))
715 716
716 717 return list(tokenize_string(line_text, plain_text_lexer))
717 718
718 719 def get_tokenized_filenode_line(self, filenode, line_number, lexer=None):
719 720
720 721 if filenode not in self.highlighted_filenodes:
721 722 tokenized_lines = filenode_as_lines_tokens(filenode, lexer)
722 723 self.highlighted_filenodes[filenode] = tokenized_lines
723 724 return self.highlighted_filenodes[filenode][line_number - 1]
724 725
725 726 def action_to_op(self, action):
726 727 return {
727 728 'add': '+',
728 729 'del': '-',
729 730 'unmod': ' ',
730 731 'unmod-no-hl': ' ',
731 732 'old-no-nl': ' ',
732 733 'new-no-nl': ' ',
733 734 }.get(action, action)
734 735
735 736 def as_unified(self, lines):
736 737 """
737 738 Return a generator that yields the lines of a diff in unified order
738 739 """
739 740 def generator():
740 741 buf = []
741 742 for line in lines:
742 743
743 744 if buf and not line.original or line.original.action == ' ':
744 745 for b in buf:
745 746 yield b
746 747 buf = []
747 748
748 749 if line.original:
749 750 if line.original.action == ' ':
750 751 yield (line.original.lineno, line.modified.lineno,
751 752 line.original.action, line.original.content,
752 753 line.original.get_comment_args)
753 754 continue
754 755
755 756 if line.original.action == '-':
756 757 yield (line.original.lineno, None,
757 758 line.original.action, line.original.content,
758 759 line.original.get_comment_args)
759 760
760 761 if line.modified.action == '+':
761 762 buf.append((
762 763 None, line.modified.lineno,
763 764 line.modified.action, line.modified.content,
764 765 line.modified.get_comment_args))
765 766 continue
766 767
767 768 if line.modified:
768 769 yield (None, line.modified.lineno,
769 770 line.modified.action, line.modified.content,
770 771 line.modified.get_comment_args)
771 772
772 773 for b in buf:
773 774 yield b
774 775
775 776 return generator()
@@ -1,30 +1,32 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 from pyramid import compat
22
21 23
22 24 def strip_whitespace(value):
23 25 """
24 26 Removes leading/trailing whitespace, newlines, and tabs from the value.
25 27 Implements the `colander.interface.Preparer` interface.
26 28 """
27 if isinstance(value, basestring):
29 if isinstance(value, compat.string_types):
28 30 return value.strip(' \t\n\r')
29 31 else:
30 32 return value
@@ -1,665 +1,666 b''
1 1 """
2 2 Schema module providing common schema operations.
3 3 """
4 4 import warnings
5 5
6 6 from UserDict import DictMixin
7 7
8 8 import sqlalchemy
9 9
10 10 from sqlalchemy.schema import ForeignKeyConstraint
11 11 from sqlalchemy.schema import UniqueConstraint
12 from pyramid import compat
12 13
13 14 from rhodecode.lib.dbmigrate.migrate.exceptions import *
14 15 from rhodecode.lib.dbmigrate.migrate.changeset import SQLA_07, SQLA_08
15 16 from rhodecode.lib.dbmigrate.migrate.changeset import util
16 17 from rhodecode.lib.dbmigrate.migrate.changeset.databases.visitor import (
17 18 get_engine_visitor, run_single_visitor)
18 19
19 20
20 21 __all__ = [
21 22 'create_column',
22 23 'drop_column',
23 24 'alter_column',
24 25 'rename_table',
25 26 'rename_index',
26 27 'ChangesetTable',
27 28 'ChangesetColumn',
28 29 'ChangesetIndex',
29 30 'ChangesetDefaultClause',
30 31 'ColumnDelta',
31 32 ]
32 33
33 34 def create_column(column, table=None, *p, **kw):
34 35 """Create a column, given the table.
35 36
36 37 API to :meth:`ChangesetColumn.create`.
37 38 """
38 39 if table is not None:
39 40 return table.create_column(column, *p, **kw)
40 41 return column.create(*p, **kw)
41 42
42 43
43 44 def drop_column(column, table=None, *p, **kw):
44 45 """Drop a column, given the table.
45 46
46 47 API to :meth:`ChangesetColumn.drop`.
47 48 """
48 49 if table is not None:
49 50 return table.drop_column(column, *p, **kw)
50 51 return column.drop(*p, **kw)
51 52
52 53
53 54 def rename_table(table, name, engine=None, **kw):
54 55 """Rename a table.
55 56
56 57 If Table instance is given, engine is not used.
57 58
58 59 API to :meth:`ChangesetTable.rename`.
59 60
60 61 :param table: Table to be renamed.
61 62 :param name: New name for Table.
62 63 :param engine: Engine instance.
63 64 :type table: string or Table instance
64 65 :type name: string
65 66 :type engine: obj
66 67 """
67 68 table = _to_table(table, engine)
68 69 table.rename(name, **kw)
69 70
70 71
71 72 def rename_index(index, name, table=None, engine=None, **kw):
72 73 """Rename an index.
73 74
74 75 If Index instance is given,
75 76 table and engine are not used.
76 77
77 78 API to :meth:`ChangesetIndex.rename`.
78 79
79 80 :param index: Index to be renamed.
80 81 :param name: New name for index.
81 82 :param table: Table to which Index is reffered.
82 83 :param engine: Engine instance.
83 84 :type index: string or Index instance
84 85 :type name: string
85 86 :type table: string or Table instance
86 87 :type engine: obj
87 88 """
88 89 index = _to_index(index, table, engine)
89 90 index.rename(name, **kw)
90 91
91 92
92 93 def alter_column(*p, **k):
93 94 """Alter a column.
94 95
95 96 This is a helper function that creates a :class:`ColumnDelta` and
96 97 runs it.
97 98
98 99 :argument column:
99 100 The name of the column to be altered or a
100 101 :class:`ChangesetColumn` column representing it.
101 102
102 103 :param table:
103 104 A :class:`~sqlalchemy.schema.Table` or table name to
104 105 for the table where the column will be changed.
105 106
106 107 :param engine:
107 108 The :class:`~sqlalchemy.engine.base.Engine` to use for table
108 109 reflection and schema alterations.
109 110
110 111 :returns: A :class:`ColumnDelta` instance representing the change.
111 112
112 113
113 114 """
114 115
115 116 if 'table' not in k and isinstance(p[0], sqlalchemy.Column):
116 117 k['table'] = p[0].table
117 118 if 'engine' not in k:
118 119 k['engine'] = k['table'].bind
119 120
120 121 # deprecation
121 122 if len(p) >= 2 and isinstance(p[1], sqlalchemy.Column):
122 123 warnings.warn(
123 124 "Passing a Column object to alter_column is deprecated."
124 125 " Just pass in keyword parameters instead.",
125 126 MigrateDeprecationWarning
126 127 )
127 128 engine = k['engine']
128 129
129 130 # enough tests seem to break when metadata is always altered
130 131 # that this crutch has to be left in until they can be sorted
131 132 # out
132 133 k['alter_metadata']=True
133 134
134 135 delta = ColumnDelta(*p, **k)
135 136
136 137 visitorcallable = get_engine_visitor(engine, 'schemachanger')
137 138 engine._run_visitor(visitorcallable, delta)
138 139
139 140 return delta
140 141
141 142
142 143 def _to_table(table, engine=None):
143 144 """Return if instance of Table, else construct new with metadata"""
144 145 if isinstance(table, sqlalchemy.Table):
145 146 return table
146 147
147 148 # Given: table name, maybe an engine
148 149 meta = sqlalchemy.MetaData()
149 150 if engine is not None:
150 151 meta.bind = engine
151 152 return sqlalchemy.Table(table, meta)
152 153
153 154
154 155 def _to_index(index, table=None, engine=None):
155 156 """Return if instance of Index, else construct new with metadata"""
156 157 if isinstance(index, sqlalchemy.Index):
157 158 return index
158 159
159 160 # Given: index name; table name required
160 161 table = _to_table(table, engine)
161 162 ret = sqlalchemy.Index(index)
162 163 ret.table = table
163 164 return ret
164 165
165 166
166 167 class ColumnDelta(DictMixin, sqlalchemy.schema.SchemaItem):
167 168 """Extracts the differences between two columns/column-parameters
168 169
169 170 May receive parameters arranged in several different ways:
170 171
171 172 * **current_column, new_column, \*p, \*\*kw**
172 173 Additional parameters can be specified to override column
173 174 differences.
174 175
175 176 * **current_column, \*p, \*\*kw**
176 177 Additional parameters alter current_column. Table name is extracted
177 178 from current_column object.
178 179 Name is changed to current_column.name from current_name,
179 180 if current_name is specified.
180 181
181 182 * **current_col_name, \*p, \*\*kw**
182 183 Table kw must specified.
183 184
184 185 :param table: Table at which current Column should be bound to.\
185 186 If table name is given, reflection will be used.
186 187 :type table: string or Table instance
187 188
188 189 :param metadata: A :class:`MetaData` instance to store
189 190 reflected table names
190 191
191 192 :param engine: When reflecting tables, either engine or metadata must \
192 193 be specified to acquire engine object.
193 194 :type engine: :class:`Engine` instance
194 195 :returns: :class:`ColumnDelta` instance provides interface for altered attributes to \
195 196 `result_column` through :func:`dict` alike object.
196 197
197 198 * :class:`ColumnDelta`.result_column is altered column with new attributes
198 199
199 200 * :class:`ColumnDelta`.current_name is current name of column in db
200 201
201 202
202 203 """
203 204
204 205 # Column attributes that can be altered
205 206 diff_keys = ('name', 'type', 'primary_key', 'nullable',
206 207 'server_onupdate', 'server_default', 'autoincrement')
207 208 diffs = dict()
208 209 __visit_name__ = 'column'
209 210
210 211 def __init__(self, *p, **kw):
211 212 # 'alter_metadata' is not a public api. It exists purely
212 213 # as a crutch until the tests that fail when 'alter_metadata'
213 214 # behaviour always happens can be sorted out
214 215 self.alter_metadata = kw.pop("alter_metadata", False)
215 216
216 217 self.meta = kw.pop("metadata", None)
217 218 self.engine = kw.pop("engine", None)
218 219
219 220 # Things are initialized differently depending on how many column
220 221 # parameters are given. Figure out how many and call the appropriate
221 222 # method.
222 223 if len(p) >= 1 and isinstance(p[0], sqlalchemy.Column):
223 224 # At least one column specified
224 225 if len(p) >= 2 and isinstance(p[1], sqlalchemy.Column):
225 226 # Two columns specified
226 227 diffs = self.compare_2_columns(*p, **kw)
227 228 else:
228 229 # Exactly one column specified
229 230 diffs = self.compare_1_column(*p, **kw)
230 231 else:
231 232 # Zero columns specified
232 if not len(p) or not isinstance(p[0], basestring):
233 if not len(p) or not isinstance(p[0], compat.string_types):
233 234 raise ValueError("First argument must be column name")
234 235 diffs = self.compare_parameters(*p, **kw)
235 236
236 237 self.apply_diffs(diffs)
237 238
238 239 def __repr__(self):
239 240 return '<ColumnDelta altermetadata=%r, %s>' % (
240 241 self.alter_metadata,
241 242 super(ColumnDelta, self).__repr__()
242 243 )
243 244
244 245 def __getitem__(self, key):
245 246 if key not in self.keys():
246 247 raise KeyError("No such diff key, available: %s" % self.diffs )
247 248 return getattr(self.result_column, key)
248 249
249 250 def __setitem__(self, key, value):
250 251 if key not in self.keys():
251 252 raise KeyError("No such diff key, available: %s" % self.diffs )
252 253 setattr(self.result_column, key, value)
253 254
254 255 def __delitem__(self, key):
255 256 raise NotImplementedError
256 257
257 258 def __len__(self):
258 259 raise NotImplementedError
259 260
260 261 def __iter__(self):
261 262 raise NotImplementedError
262 263
263 264 def keys(self):
264 265 return self.diffs.keys()
265 266
266 267 def compare_parameters(self, current_name, *p, **k):
267 268 """Compares Column objects with reflection"""
268 269 self.table = k.pop('table')
269 270 self.result_column = self._table.c.get(current_name)
270 271 if len(p):
271 272 k = self._extract_parameters(p, k, self.result_column)
272 273 return k
273 274
274 275 def compare_1_column(self, col, *p, **k):
275 276 """Compares one Column object"""
276 277 self.table = k.pop('table', None)
277 278 if self.table is None:
278 279 self.table = col.table
279 280 self.result_column = col
280 281 if len(p):
281 282 k = self._extract_parameters(p, k, self.result_column)
282 283 return k
283 284
284 285 def compare_2_columns(self, old_col, new_col, *p, **k):
285 286 """Compares two Column objects"""
286 287 self.process_column(new_col)
287 288 self.table = k.pop('table', None)
288 289 # we cannot use bool() on table in SA06
289 290 if self.table is None:
290 291 self.table = old_col.table
291 292 if self.table is None:
292 293 new_col.table
293 294 self.result_column = old_col
294 295
295 296 # set differences
296 297 # leave out some stuff for later comp
297 298 for key in (set(self.diff_keys) - set(('type',))):
298 299 val = getattr(new_col, key, None)
299 300 if getattr(self.result_column, key, None) != val:
300 301 k.setdefault(key, val)
301 302
302 303 # inspect types
303 304 if not self.are_column_types_eq(self.result_column.type, new_col.type):
304 305 k.setdefault('type', new_col.type)
305 306
306 307 if len(p):
307 308 k = self._extract_parameters(p, k, self.result_column)
308 309 return k
309 310
310 311 def apply_diffs(self, diffs):
311 312 """Populate dict and column object with new values"""
312 313 self.diffs = diffs
313 314 for key in self.diff_keys:
314 315 if key in diffs:
315 316 setattr(self.result_column, key, diffs[key])
316 317
317 318 self.process_column(self.result_column)
318 319
319 320 # create an instance of class type if not yet
320 321 if 'type' in diffs and callable(self.result_column.type):
321 322 self.result_column.type = self.result_column.type()
322 323
323 324 # add column to the table
324 325 if self.table is not None and self.alter_metadata:
325 326 self.result_column.add_to_table(self.table)
326 327
327 328 def are_column_types_eq(self, old_type, new_type):
328 329 """Compares two types to be equal"""
329 330 ret = old_type.__class__ == new_type.__class__
330 331
331 332 # String length is a special case
332 333 if ret and isinstance(new_type, sqlalchemy.types.String):
333 334 ret = (getattr(old_type, 'length', None) == \
334 335 getattr(new_type, 'length', None))
335 336 return ret
336 337
337 338 def _extract_parameters(self, p, k, column):
338 339 """Extracts data from p and modifies diffs"""
339 340 p = list(p)
340 341 while len(p):
341 if isinstance(p[0], basestring):
342 if isinstance(p[0], compat.string_types):
342 343 k.setdefault('name', p.pop(0))
343 344 elif isinstance(p[0], sqlalchemy.types.TypeEngine):
344 345 k.setdefault('type', p.pop(0))
345 346 elif callable(p[0]):
346 347 p[0] = p[0]()
347 348 else:
348 349 break
349 350
350 351 if len(p):
351 352 new_col = column.copy_fixed()
352 353 new_col._init_items(*p)
353 354 k = self.compare_2_columns(column, new_col, **k)
354 355 return k
355 356
356 357 def process_column(self, column):
357 358 """Processes default values for column"""
358 359 # XXX: this is a snippet from SA processing of positional parameters
359 360 toinit = list()
360 361
361 362 if column.server_default is not None:
362 363 if isinstance(column.server_default, sqlalchemy.FetchedValue):
363 364 toinit.append(column.server_default)
364 365 else:
365 366 toinit.append(sqlalchemy.DefaultClause(column.server_default))
366 367 if column.server_onupdate is not None:
367 368 if isinstance(column.server_onupdate, FetchedValue):
368 369 toinit.append(column.server_default)
369 370 else:
370 371 toinit.append(sqlalchemy.DefaultClause(column.server_onupdate,
371 372 for_update=True))
372 373 if toinit:
373 374 column._init_items(*toinit)
374 375
375 376 def _get_table(self):
376 377 return getattr(self, '_table', None)
377 378
378 379 def _set_table(self, table):
379 if isinstance(table, basestring):
380 if isinstance(table, compat.string_types):
380 381 if self.alter_metadata:
381 382 if not self.meta:
382 383 raise ValueError("metadata must be specified for table"
383 384 " reflection when using alter_metadata")
384 385 meta = self.meta
385 386 if self.engine:
386 387 meta.bind = self.engine
387 388 else:
388 389 if not self.engine and not self.meta:
389 390 raise ValueError("engine or metadata must be specified"
390 391 " to reflect tables")
391 392 if not self.engine:
392 393 self.engine = self.meta.bind
393 394 meta = sqlalchemy.MetaData(bind=self.engine)
394 395 self._table = sqlalchemy.Table(table, meta, autoload=True)
395 396 elif isinstance(table, sqlalchemy.Table):
396 397 self._table = table
397 398 if not self.alter_metadata:
398 399 self._table.meta = sqlalchemy.MetaData(bind=self._table.bind)
399 400 def _get_result_column(self):
400 401 return getattr(self, '_result_column', None)
401 402
402 403 def _set_result_column(self, column):
403 404 """Set Column to Table based on alter_metadata evaluation."""
404 405 self.process_column(column)
405 406 if not hasattr(self, 'current_name'):
406 407 self.current_name = column.name
407 408 if self.alter_metadata:
408 409 self._result_column = column
409 410 else:
410 411 self._result_column = column.copy_fixed()
411 412
412 413 table = property(_get_table, _set_table)
413 414 result_column = property(_get_result_column, _set_result_column)
414 415
415 416
416 417 class ChangesetTable(object):
417 418 """Changeset extensions to SQLAlchemy tables."""
418 419
419 420 def create_column(self, column, *p, **kw):
420 421 """Creates a column.
421 422
422 423 The column parameter may be a column definition or the name of
423 424 a column in this table.
424 425
425 426 API to :meth:`ChangesetColumn.create`
426 427
427 428 :param column: Column to be created
428 429 :type column: Column instance or string
429 430 """
430 431 if not isinstance(column, sqlalchemy.Column):
431 432 # It's a column name
432 433 column = getattr(self.c, str(column))
433 434 column.create(table=self, *p, **kw)
434 435
435 436 def drop_column(self, column, *p, **kw):
436 437 """Drop a column, given its name or definition.
437 438
438 439 API to :meth:`ChangesetColumn.drop`
439 440
440 441 :param column: Column to be droped
441 442 :type column: Column instance or string
442 443 """
443 444 if not isinstance(column, sqlalchemy.Column):
444 445 # It's a column name
445 446 try:
446 447 column = getattr(self.c, str(column))
447 448 except AttributeError:
448 449 # That column isn't part of the table. We don't need
449 450 # its entire definition to drop the column, just its
450 451 # name, so create a dummy column with the same name.
451 452 column = sqlalchemy.Column(str(column), sqlalchemy.Integer())
452 453 column.drop(table=self, *p, **kw)
453 454
454 455 def rename(self, name, connection=None, **kwargs):
455 456 """Rename this table.
456 457
457 458 :param name: New name of the table.
458 459 :type name: string
459 460 :param connection: reuse connection istead of creating new one.
460 461 :type connection: :class:`sqlalchemy.engine.base.Connection` instance
461 462 """
462 463 engine = self.bind
463 464 self.new_name = name
464 465 visitorcallable = get_engine_visitor(engine, 'schemachanger')
465 466 run_single_visitor(engine, visitorcallable, self, connection, **kwargs)
466 467
467 468 # Fix metadata registration
468 469 self.name = name
469 470 self.deregister()
470 471 self._set_parent(self.metadata)
471 472
472 473 def _meta_key(self):
473 474 """Get the meta key for this table."""
474 475 return sqlalchemy.schema._get_table_key(self.name, self.schema)
475 476
476 477 def deregister(self):
477 478 """Remove this table from its metadata"""
478 479 if SQLA_07:
479 480 self.metadata._remove_table(self.name, self.schema)
480 481 else:
481 482 key = self._meta_key()
482 483 meta = self.metadata
483 484 if key in meta.tables:
484 485 del meta.tables[key]
485 486
486 487
487 488 class ChangesetColumn(object):
488 489 """Changeset extensions to SQLAlchemy columns."""
489 490
490 491 def alter(self, *p, **k):
491 492 """Makes a call to :func:`alter_column` for the column this
492 493 method is called on.
493 494 """
494 495 if 'table' not in k:
495 496 k['table'] = self.table
496 497 if 'engine' not in k:
497 498 k['engine'] = k['table'].bind
498 499 return alter_column(self, *p, **k)
499 500
500 501 def create(self, table=None, index_name=None, unique_name=None,
501 502 primary_key_name=None, populate_default=True, connection=None, **kwargs):
502 503 """Create this column in the database.
503 504
504 505 Assumes the given table exists. ``ALTER TABLE ADD COLUMN``,
505 506 for most databases.
506 507
507 508 :param table: Table instance to create on.
508 509 :param index_name: Creates :class:`ChangesetIndex` on this column.
509 510 :param unique_name: Creates :class:\
510 511 `~migrate.changeset.constraint.UniqueConstraint` on this column.
511 512 :param primary_key_name: Creates :class:\
512 513 `~migrate.changeset.constraint.PrimaryKeyConstraint` on this column.
513 514 :param populate_default: If True, created column will be \
514 515 populated with defaults
515 516 :param connection: reuse connection istead of creating new one.
516 517 :type table: Table instance
517 518 :type index_name: string
518 519 :type unique_name: string
519 520 :type primary_key_name: string
520 521 :type populate_default: bool
521 522 :type connection: :class:`sqlalchemy.engine.base.Connection` instance
522 523
523 524 :returns: self
524 525 """
525 526 self.populate_default = populate_default
526 527 self.index_name = index_name
527 528 self.unique_name = unique_name
528 529 self.primary_key_name = primary_key_name
529 530 for cons in ('index_name', 'unique_name', 'primary_key_name'):
530 531 self._check_sanity_constraints(cons)
531 532
532 533 self.add_to_table(table)
533 534 engine = self.table.bind
534 535 visitorcallable = get_engine_visitor(engine, 'columngenerator')
535 536 engine._run_visitor(visitorcallable, self, connection, **kwargs)
536 537
537 538 # TODO: reuse existing connection
538 539 if self.populate_default and self.default is not None:
539 540 stmt = table.update().values({self: engine._execute_default(self.default)})
540 541 engine.execute(stmt)
541 542
542 543 return self
543 544
544 545 def drop(self, table=None, connection=None, **kwargs):
545 546 """Drop this column from the database, leaving its table intact.
546 547
547 548 ``ALTER TABLE DROP COLUMN``, for most databases.
548 549
549 550 :param connection: reuse connection istead of creating new one.
550 551 :type connection: :class:`sqlalchemy.engine.base.Connection` instance
551 552 """
552 553 if table is not None:
553 554 self.table = table
554 555 engine = self.table.bind
555 556 visitorcallable = get_engine_visitor(engine, 'columndropper')
556 557 engine._run_visitor(visitorcallable, self, connection, **kwargs)
557 558 self.remove_from_table(self.table, unset_table=False)
558 559 self.table = None
559 560 return self
560 561
561 562 def add_to_table(self, table):
562 563 if table is not None and self.table is None:
563 564 if SQLA_07:
564 565 table.append_column(self)
565 566 else:
566 567 self._set_parent(table)
567 568
568 569 def _col_name_in_constraint(self,cons,name):
569 570 return False
570 571
571 572 def remove_from_table(self, table, unset_table=True):
572 573 # TODO: remove primary keys, constraints, etc
573 574 if unset_table:
574 575 self.table = None
575 576
576 577 to_drop = set()
577 578 for index in table.indexes:
578 579 columns = []
579 580 for col in index.columns:
580 581 if col.name!=self.name:
581 582 columns.append(col)
582 583 if columns:
583 584 index.columns = columns
584 585 if SQLA_08:
585 586 index.expressions = columns
586 587 else:
587 588 to_drop.add(index)
588 589 table.indexes = table.indexes - to_drop
589 590
590 591 to_drop = set()
591 592 for cons in table.constraints:
592 593 # TODO: deal with other types of constraint
593 594 if isinstance(cons,(ForeignKeyConstraint,
594 595 UniqueConstraint)):
595 596 for col_name in cons.columns:
596 if not isinstance(col_name,basestring):
597 if not isinstance(col_name, compat.string_types):
597 598 col_name = col_name.name
598 599 if self.name==col_name:
599 600 to_drop.add(cons)
600 601 table.constraints = table.constraints - to_drop
601 602
602 603 if table.c.contains_column(self):
603 604 if SQLA_07:
604 605 table._columns.remove(self)
605 606 else:
606 607 table.c.remove(self)
607 608
608 609 # TODO: this is fixed in 0.6
609 610 def copy_fixed(self, **kw):
610 611 """Create a copy of this ``Column``, with all attributes."""
611 612 q = util.safe_quote(self)
612 613 return sqlalchemy.Column(self.name, self.type, self.default,
613 614 key=self.key,
614 615 primary_key=self.primary_key,
615 616 nullable=self.nullable,
616 617 quote=q,
617 618 index=self.index,
618 619 unique=self.unique,
619 620 onupdate=self.onupdate,
620 621 autoincrement=self.autoincrement,
621 622 server_default=self.server_default,
622 623 server_onupdate=self.server_onupdate,
623 624 *[c.copy(**kw) for c in self.constraints])
624 625
625 626 def _check_sanity_constraints(self, name):
626 627 """Check if constraints names are correct"""
627 628 obj = getattr(self, name)
628 629 if (getattr(self, name[:-5]) and not obj):
629 630 raise InvalidConstraintError("Column.create() accepts index_name,"
630 631 " primary_key_name and unique_name to generate constraints")
631 if not isinstance(obj, basestring) and obj is not None:
632 if not isinstance(obj, compat.string_types) and obj is not None:
632 633 raise InvalidConstraintError(
633 634 "%s argument for column must be constraint name" % name)
634 635
635 636
636 637 class ChangesetIndex(object):
637 638 """Changeset extensions to SQLAlchemy Indexes."""
638 639
639 640 __visit_name__ = 'index'
640 641
641 642 def rename(self, name, connection=None, **kwargs):
642 643 """Change the name of an index.
643 644
644 645 :param name: New name of the Index.
645 646 :type name: string
646 647 :param connection: reuse connection istead of creating new one.
647 648 :type connection: :class:`sqlalchemy.engine.base.Connection` instance
648 649 """
649 650 engine = self.table.bind
650 651 self.new_name = name
651 652 visitorcallable = get_engine_visitor(engine, 'schemachanger')
652 653 engine._run_visitor(visitorcallable, self, connection, **kwargs)
653 654 self.name = name
654 655
655 656
656 657 class ChangesetDefaultClause(object):
657 658 """Implements comparison between :class:`DefaultClause` instances"""
658 659
659 660 def __eq__(self, other):
660 661 if isinstance(other, self.__class__):
661 662 if self.arg == other.arg:
662 663 return True
663 664
664 665 def __ne__(self, other):
665 666 return not self.__eq__(other)
@@ -1,221 +1,222 b''
1 1 """
2 2 Database schema version management.
3 3 """
4 4 import sys
5 5 import logging
6 6
7 7 from sqlalchemy import (Table, Column, MetaData, String, Text, Integer,
8 8 create_engine)
9 9 from sqlalchemy.sql import and_
10 10 from sqlalchemy import exc as sa_exceptions
11 11 from sqlalchemy.sql import bindparam
12 from pyramid import compat
12 13
13 14 from rhodecode.lib.dbmigrate.migrate import exceptions
14 15 from rhodecode.lib.dbmigrate.migrate.changeset import SQLA_07
15 16 from rhodecode.lib.dbmigrate.migrate.versioning import genmodel, schemadiff
16 17 from rhodecode.lib.dbmigrate.migrate.versioning.repository import Repository
17 18 from rhodecode.lib.dbmigrate.migrate.versioning.util import load_model
18 19 from rhodecode.lib.dbmigrate.migrate.versioning.version import VerNum
19 20
20 21
21 22 log = logging.getLogger(__name__)
22 23
23 24
24 25 class ControlledSchema(object):
25 26 """A database under version control"""
26 27
27 28 def __init__(self, engine, repository):
28 if isinstance(repository, basestring):
29 if isinstance(repository, compat.string_types):
29 30 repository = Repository(repository)
30 31 self.engine = engine
31 32 self.repository = repository
32 33 self.meta = MetaData(engine)
33 34 self.load()
34 35
35 36 def __eq__(self, other):
36 37 """Compare two schemas by repositories and versions"""
37 38 return (self.repository is other.repository \
38 39 and self.version == other.version)
39 40
40 41 def load(self):
41 42 """Load controlled schema version info from DB"""
42 43 tname = self.repository.version_table
43 44 try:
44 45 if not hasattr(self, 'table') or self.table is None:
45 46 self.table = Table(tname, self.meta, autoload=True)
46 47
47 48 result = self.engine.execute(self.table.select(
48 49 self.table.c.repository_id == str(self.repository.id)))
49 50
50 51 data = list(result)[0]
51 52 except:
52 53 cls, exc, tb = sys.exc_info()
53 54 raise exceptions.DatabaseNotControlledError, exc.__str__(), tb
54 55
55 56 self.version = data['version']
56 57 return data
57 58
58 59 def drop(self):
59 60 """
60 61 Remove version control from a database.
61 62 """
62 63 if SQLA_07:
63 64 try:
64 65 self.table.drop()
65 66 except sa_exceptions.DatabaseError:
66 67 raise exceptions.DatabaseNotControlledError(str(self.table))
67 68 else:
68 69 try:
69 70 self.table.drop()
70 71 except (sa_exceptions.SQLError):
71 72 raise exceptions.DatabaseNotControlledError(str(self.table))
72 73
73 74 def changeset(self, version=None):
74 75 """API to Changeset creation.
75 76
76 77 Uses self.version for start version and engine.name
77 78 to get database name.
78 79 """
79 80 database = self.engine.name
80 81 start_ver = self.version
81 82 changeset = self.repository.changeset(database, start_ver, version)
82 83 return changeset
83 84
84 85 def runchange(self, ver, change, step):
85 86 startver = ver
86 87 endver = ver + step
87 88 # Current database version must be correct! Don't run if corrupt!
88 89 if self.version != startver:
89 90 raise exceptions.InvalidVersionError("%s is not %s" % \
90 91 (self.version, startver))
91 92 # Run the change
92 93 change.run(self.engine, step)
93 94
94 95 # Update/refresh database version
95 96 self.update_repository_table(startver, endver)
96 97 self.load()
97 98
98 99 def update_repository_table(self, startver, endver):
99 100 """Update version_table with new information"""
100 101 update = self.table.update(and_(self.table.c.version == int(startver),
101 102 self.table.c.repository_id == str(self.repository.id)))
102 103 self.engine.execute(update, version=int(endver))
103 104
104 105 def upgrade(self, version=None):
105 106 """
106 107 Upgrade (or downgrade) to a specified version, or latest version.
107 108 """
108 109 changeset = self.changeset(version)
109 110 for ver, change in changeset:
110 111 self.runchange(ver, change, changeset.step)
111 112
112 113 def update_db_from_model(self, model):
113 114 """
114 115 Modify the database to match the structure of the current Python model.
115 116 """
116 117 model = load_model(model)
117 118
118 119 diff = schemadiff.getDiffOfModelAgainstDatabase(
119 120 model, self.engine, excludeTables=[self.repository.version_table]
120 121 )
121 122 genmodel.ModelGenerator(diff,self.engine).runB2A()
122 123
123 124 self.update_repository_table(self.version, int(self.repository.latest))
124 125
125 126 self.load()
126 127
127 128 @classmethod
128 129 def create(cls, engine, repository, version=None):
129 130 """
130 131 Declare a database to be under a repository's version control.
131 132
132 133 :raises: :exc:`DatabaseAlreadyControlledError`
133 134 :returns: :class:`ControlledSchema`
134 135 """
135 136 # Confirm that the version # is valid: positive, integer,
136 137 # exists in repos
137 if isinstance(repository, basestring):
138 if isinstance(repository, compat.string_types):
138 139 repository = Repository(repository)
139 140 version = cls._validate_version(repository, version)
140 141 table = cls._create_table_version(engine, repository, version)
141 142 # TODO: history table
142 143 # Load repository information and return
143 144 return cls(engine, repository)
144 145
145 146 @classmethod
146 147 def _validate_version(cls, repository, version):
147 148 """
148 149 Ensures this is a valid version number for this repository.
149 150
150 151 :raises: :exc:`InvalidVersionError` if invalid
151 152 :return: valid version number
152 153 """
153 154 if version is None:
154 155 version = 0
155 156 try:
156 157 version = VerNum(version) # raises valueerror
157 158 if version < 0 or version > repository.latest:
158 159 raise ValueError()
159 160 except ValueError:
160 161 raise exceptions.InvalidVersionError(version)
161 162 return version
162 163
163 164 @classmethod
164 165 def _create_table_version(cls, engine, repository, version):
165 166 """
166 167 Creates the versioning table in a database.
167 168
168 169 :raises: :exc:`DatabaseAlreadyControlledError`
169 170 """
170 171 # Create tables
171 172 tname = repository.version_table
172 173 meta = MetaData(engine)
173 174
174 175 table = Table(
175 176 tname, meta,
176 177 Column('repository_id', String(250), primary_key=True),
177 178 Column('repository_path', Text),
178 179 Column('version', Integer), )
179 180
180 181 # there can be multiple repositories/schemas in the same db
181 182 if not table.exists():
182 183 table.create()
183 184
184 185 # test for existing repository_id
185 186 s = table.select(table.c.repository_id == bindparam("repository_id"))
186 187 result = engine.execute(s, repository_id=repository.id)
187 188 if result.fetchone():
188 189 raise exceptions.DatabaseAlreadyControlledError
189 190
190 191 # Insert data
191 192 engine.execute(table.insert().values(
192 193 repository_id=repository.id,
193 194 repository_path=repository.path,
194 195 version=int(version)))
195 196 return table
196 197
197 198 @classmethod
198 199 def compare_model_to_db(cls, engine, model, repository):
199 200 """
200 201 Compare the current model against the current database.
201 202 """
202 if isinstance(repository, basestring):
203 if isinstance(repository, compat.string_types):
203 204 repository = Repository(repository)
204 205 model = load_model(model)
205 206
206 207 diff = schemadiff.getDiffOfModelAgainstDatabase(
207 208 model, engine, excludeTables=[repository.version_table])
208 209 return diff
209 210
210 211 @classmethod
211 212 def create_model(cls, engine, repository, declarative=False):
212 213 """
213 214 Dump the current database as a Python model.
214 215 """
215 if isinstance(repository, basestring):
216 if isinstance(repository, compat.string_types):
216 217 repository = Repository(repository)
217 218
218 219 diff = schemadiff.getDiffOfModelAgainstDatabase(
219 220 MetaData(), engine, excludeTables=[repository.version_table]
220 221 )
221 222 return genmodel.ModelGenerator(diff, engine, declarative).genBDefinition()
@@ -1,159 +1,160 b''
1 1 #!/usr/bin/env python
2 2 # -*- coding: utf-8 -*-
3 3
4 4 import shutil
5 5 import warnings
6 6 import logging
7 7 import inspect
8 8 from StringIO import StringIO
9 9
10 from pyramid import compat
10 11 from rhodecode.lib.dbmigrate import migrate
11 12 from rhodecode.lib.dbmigrate.migrate.versioning import genmodel, schemadiff
12 13 from rhodecode.lib.dbmigrate.migrate.versioning.config import operations
13 14 from rhodecode.lib.dbmigrate.migrate.versioning.template import Template
14 15 from rhodecode.lib.dbmigrate.migrate.versioning.script import base
15 16 from rhodecode.lib.dbmigrate.migrate.versioning.util import import_path, load_model, with_engine
16 17 from rhodecode.lib.dbmigrate.migrate.exceptions import MigrateDeprecationWarning, InvalidScriptError, ScriptError
17 18
18 19 log = logging.getLogger(__name__)
19 20 __all__ = ['PythonScript']
20 21
21 22
22 23 class PythonScript(base.BaseScript):
23 24 """Base for Python scripts"""
24 25
25 26 @classmethod
26 27 def create(cls, path, **opts):
27 28 """Create an empty migration script at specified path
28 29
29 30 :returns: :class:`PythonScript instance <migrate.versioning.script.py.PythonScript>`"""
30 31 cls.require_notfound(path)
31 32
32 33 src = Template(opts.pop('templates_path', None)).get_script(theme=opts.pop('templates_theme', None))
33 34 shutil.copy(src, path)
34 35
35 36 return cls(path)
36 37
37 38 @classmethod
38 39 def make_update_script_for_model(cls, engine, oldmodel,
39 40 model, repository, **opts):
40 41 """Create a migration script based on difference between two SA models.
41 42
42 43 :param repository: path to migrate repository
43 44 :param oldmodel: dotted.module.name:SAClass or SAClass object
44 45 :param model: dotted.module.name:SAClass or SAClass object
45 46 :param engine: SQLAlchemy engine
46 47 :type repository: string or :class:`Repository instance <migrate.versioning.repository.Repository>`
47 48 :type oldmodel: string or Class
48 49 :type model: string or Class
49 50 :type engine: Engine instance
50 51 :returns: Upgrade / Downgrade script
51 52 :rtype: string
52 53 """
53 54
54 if isinstance(repository, basestring):
55 if isinstance(repository, compat.string_types):
55 56 # oh dear, an import cycle!
56 57 from rhodecode.lib.dbmigrate.migrate.versioning.repository import Repository
57 58 repository = Repository(repository)
58 59
59 60 oldmodel = load_model(oldmodel)
60 61 model = load_model(model)
61 62
62 63 # Compute differences.
63 64 diff = schemadiff.getDiffOfModelAgainstModel(
64 65 model,
65 66 oldmodel,
66 67 excludeTables=[repository.version_table])
67 68 # TODO: diff can be False (there is no difference?)
68 69 decls, upgradeCommands, downgradeCommands = \
69 70 genmodel.ModelGenerator(diff,engine).genB2AMigration()
70 71
71 72 # Store differences into file.
72 73 src = Template(opts.pop('templates_path', None)).get_script(opts.pop('templates_theme', None))
73 74 with open(src) as f:
74 75 contents = f.read()
75 76
76 77 # generate source
77 78 search = 'def upgrade(migrate_engine):'
78 79 contents = contents.replace(search, '\n\n'.join((decls, search)), 1)
79 80 if upgradeCommands:
80 81 contents = contents.replace(' pass', upgradeCommands, 1)
81 82 if downgradeCommands:
82 83 contents = contents.replace(' pass', downgradeCommands, 1)
83 84 return contents
84 85
85 86 @classmethod
86 87 def verify_module(cls, path):
87 88 """Ensure path is a valid script
88 89
89 90 :param path: Script location
90 91 :type path: string
91 92 :raises: :exc:`InvalidScriptError <migrate.exceptions.InvalidScriptError>`
92 93 :returns: Python module
93 94 """
94 95 # Try to import and get the upgrade() func
95 96 module = import_path(path)
96 97 try:
97 98 assert callable(module.upgrade)
98 99 except Exception as e:
99 100 raise InvalidScriptError(path + ': %s' % str(e))
100 101 return module
101 102
102 103 def preview_sql(self, url, step, **args):
103 104 """Mocks SQLAlchemy Engine to store all executed calls in a string
104 105 and runs :meth:`PythonScript.run <migrate.versioning.script.py.PythonScript.run>`
105 106
106 107 :returns: SQL file
107 108 """
108 109 buf = StringIO()
109 110 args['engine_arg_strategy'] = 'mock'
110 111 args['engine_arg_executor'] = lambda s, p = '': buf.write(str(s) + p)
111 112
112 113 @with_engine
113 114 def go(url, step, **kw):
114 115 engine = kw.pop('engine')
115 116 self.run(engine, step)
116 117 return buf.getvalue()
117 118
118 119 return go(url, step, **args)
119 120
120 121 def run(self, engine, step):
121 122 """Core method of Script file.
122 123 Exectues :func:`update` or :func:`downgrade` functions
123 124
124 125 :param engine: SQLAlchemy Engine
125 126 :param step: Operation to run
126 127 :type engine: string
127 128 :type step: int
128 129 """
129 130 if step > 0:
130 131 op = 'upgrade'
131 132 elif step < 0:
132 133 op = 'downgrade'
133 134 else:
134 135 raise ScriptError("%d is not a valid step" % step)
135 136
136 137 funcname = base.operations[op]
137 138 script_func = self._func(funcname)
138 139
139 140 # check for old way of using engine
140 141 if not inspect.getargspec(script_func)[0]:
141 142 raise TypeError("upgrade/downgrade functions must accept engine"
142 143 " parameter (since version 0.5.4)")
143 144
144 145 script_func(engine)
145 146
146 147 @property
147 148 def module(self):
148 149 """Calls :meth:`migrate.versioning.script.py.verify_module`
149 150 and returns it.
150 151 """
151 152 if not hasattr(self, '_module'):
152 153 self._module = self.verify_module(self.path)
153 154 return self._module
154 155
155 156 def _func(self, funcname):
156 157 if not hasattr(self.module, funcname):
157 158 msg = "Function '%s' is not defined in this script"
158 159 raise ScriptError(msg % funcname)
159 160 return getattr(self.module, funcname)
@@ -1,179 +1,181 b''
1 1 #!/usr/bin/env python
2 2 # -*- coding: utf-8 -*-
3 3 """.. currentmodule:: migrate.versioning.util"""
4 4
5 5 import warnings
6 6 import logging
7 7 from decorator import decorator
8 8 from pkg_resources import EntryPoint
9 9
10 10 from sqlalchemy import create_engine
11 11 from sqlalchemy.engine import Engine
12 12 from sqlalchemy.pool import StaticPool
13 13
14 from pyramid import compat
14 15 from rhodecode.lib.dbmigrate.migrate import exceptions
15 16 from rhodecode.lib.dbmigrate.migrate.versioning.util.keyedinstance import KeyedInstance
16 17 from rhodecode.lib.dbmigrate.migrate.versioning.util.importpath import import_path
17 18
18 19
19 20 log = logging.getLogger(__name__)
20 21
22
21 23 def load_model(dotted_name):
22 24 """Import module and use module-level variable".
23 25
24 26 :param dotted_name: path to model in form of string: ``some.python.module:Class``
25 27
26 28 .. versionchanged:: 0.5.4
27 29
28 30 """
29 if isinstance(dotted_name, basestring):
31 if isinstance(dotted_name, compat.string_types):
30 32 if ':' not in dotted_name:
31 33 # backwards compatibility
32 34 warnings.warn('model should be in form of module.model:User '
33 35 'and not module.model.User', exceptions.MigrateDeprecationWarning)
34 36 dotted_name = ':'.join(dotted_name.rsplit('.', 1))
35 37 return EntryPoint.parse('x=%s' % dotted_name).load(False)
36 38 else:
37 39 # Assume it's already loaded.
38 40 return dotted_name
39 41
40 42 def asbool(obj):
41 43 """Do everything to use object as bool"""
42 if isinstance(obj, basestring):
44 if isinstance(obj, compat.string_types):
43 45 obj = obj.strip().lower()
44 46 if obj in ['true', 'yes', 'on', 'y', 't', '1']:
45 47 return True
46 48 elif obj in ['false', 'no', 'off', 'n', 'f', '0']:
47 49 return False
48 50 else:
49 51 raise ValueError("String is not true/false: %r" % obj)
50 52 if obj in (True, False):
51 53 return bool(obj)
52 54 else:
53 55 raise ValueError("String is not true/false: %r" % obj)
54 56
55 57 def guess_obj_type(obj):
56 58 """Do everything to guess object type from string
57 59
58 60 Tries to convert to `int`, `bool` and finally returns if not succeded.
59 61
60 62 .. versionadded: 0.5.4
61 63 """
62 64
63 65 result = None
64 66
65 67 try:
66 68 result = int(obj)
67 69 except:
68 70 pass
69 71
70 72 if result is None:
71 73 try:
72 74 result = asbool(obj)
73 75 except:
74 76 pass
75 77
76 78 if result is not None:
77 79 return result
78 80 else:
79 81 return obj
80 82
81 83 @decorator
82 84 def catch_known_errors(f, *a, **kw):
83 85 """Decorator that catches known api errors
84 86
85 87 .. versionadded: 0.5.4
86 88 """
87 89
88 90 try:
89 91 return f(*a, **kw)
90 92 except exceptions.PathFoundError as e:
91 93 raise exceptions.KnownError("The path %s already exists" % e.args[0])
92 94
93 95 def construct_engine(engine, **opts):
94 96 """.. versionadded:: 0.5.4
95 97
96 98 Constructs and returns SQLAlchemy engine.
97 99
98 100 Currently, there are 2 ways to pass create_engine options to :mod:`migrate.versioning.api` functions:
99 101
100 102 :param engine: connection string or a existing engine
101 103 :param engine_dict: python dictionary of options to pass to `create_engine`
102 104 :param engine_arg_*: keyword parameters to pass to `create_engine` (evaluated with :func:`migrate.versioning.util.guess_obj_type`)
103 105 :type engine_dict: dict
104 106 :type engine: string or Engine instance
105 107 :type engine_arg_*: string
106 108 :returns: SQLAlchemy Engine
107 109
108 110 .. note::
109 111
110 112 keyword parameters override ``engine_dict`` values.
111 113
112 114 """
113 115 if isinstance(engine, Engine):
114 116 return engine
115 elif not isinstance(engine, basestring):
117 elif not isinstance(engine, compat.string_types):
116 118 raise ValueError("you need to pass either an existing engine or a database uri")
117 119
118 120 # get options for create_engine
119 121 if opts.get('engine_dict') and isinstance(opts['engine_dict'], dict):
120 122 kwargs = opts['engine_dict']
121 123 else:
122 124 kwargs = {}
123 125
124 126 # DEPRECATED: handle echo the old way
125 127 echo = asbool(opts.get('echo', False))
126 128 if echo:
127 129 warnings.warn('echo=True parameter is deprecated, pass '
128 130 'engine_arg_echo=True or engine_dict={"echo": True}',
129 131 exceptions.MigrateDeprecationWarning)
130 132 kwargs['echo'] = echo
131 133
132 134 # parse keyword arguments
133 135 for key, value in opts.iteritems():
134 136 if key.startswith('engine_arg_'):
135 137 kwargs[key[11:]] = guess_obj_type(value)
136 138
137 139 log.debug('Constructing engine')
138 140 # TODO: return create_engine(engine, poolclass=StaticPool, **kwargs)
139 141 # seems like 0.5.x branch does not work with engine.dispose and staticpool
140 142 return create_engine(engine, **kwargs)
141 143
142 144 @decorator
143 145 def with_engine(f, *a, **kw):
144 146 """Decorator for :mod:`migrate.versioning.api` functions
145 147 to safely close resources after function usage.
146 148
147 149 Passes engine parameters to :func:`construct_engine` and
148 150 resulting parameter is available as kw['engine'].
149 151
150 152 Engine is disposed after wrapped function is executed.
151 153
152 154 .. versionadded: 0.6.0
153 155 """
154 156 url = a[0]
155 157 engine = construct_engine(url, **kw)
156 158
157 159 try:
158 160 kw['engine'] = engine
159 161 return f(*a, **kw)
160 162 finally:
161 163 if isinstance(engine, Engine) and engine is not url:
162 164 log.debug('Disposing SQLAlchemy engine %s', engine)
163 165 engine.dispose()
164 166
165 167
166 168 class Memoize:
167 169 """Memoize(fn) - an instance which acts like fn but memoizes its arguments
168 170 Will only work on functions with non-mutable arguments
169 171
170 172 ActiveState Code 52201
171 173 """
172 174 def __init__(self, fn):
173 175 self.fn = fn
174 176 self.memo = {}
175 177
176 178 def __call__(self, *args):
177 179 if args not in self.memo:
178 180 self.memo[args] = self.fn(*args)
179 181 return self.memo[args]
@@ -1,1043 +1,1044 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import logging
23 23 import datetime
24 24 import traceback
25 25 from datetime import date
26 26
27 27 from sqlalchemy import *
28 28 from sqlalchemy.ext.hybrid import hybrid_property
29 29 from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
30 30 from beaker.cache import cache_region, region_invalidate
31 from pyramid import compat
31 32
32 33 from rhodecode.lib.vcs import get_backend
33 34 from rhodecode.lib.vcs.utils.helpers import get_scm
34 35 from rhodecode.lib.vcs.exceptions import VCSError
35 36 from zope.cachedescriptors.property import Lazy as LazyProperty
36 37 from rhodecode.lib.auth import generate_auth_token
37 38 from rhodecode.lib.utils2 import str2bool, safe_str, get_commit_safe, safe_unicode
38 39 from rhodecode.lib.exceptions import UserGroupAssignedException
39 40 from rhodecode.lib.ext_json import json
40 41
41 42 from rhodecode.model.meta import Base, Session
42 43 from rhodecode.lib.caching_query import FromCache
43 44
44 45
45 46 log = logging.getLogger(__name__)
46 47
47 48 #==============================================================================
48 49 # BASE CLASSES
49 50 #==============================================================================
50 51
51 52 class ModelSerializer(json.JSONEncoder):
52 53 """
53 54 Simple Serializer for JSON,
54 55
55 56 usage::
56 57
57 58 to make object customized for serialization implement a __json__
58 59 method that will return a dict for serialization into json
59 60
60 61 example::
61 62
62 63 class Task(object):
63 64
64 65 def __init__(self, name, value):
65 66 self.name = name
66 67 self.value = value
67 68
68 69 def __json__(self):
69 70 return dict(name=self.name,
70 71 value=self.value)
71 72
72 73 """
73 74
74 75 def default(self, obj):
75 76
76 77 if hasattr(obj, '__json__'):
77 78 return obj.__json__()
78 79 else:
79 80 return json.JSONEncoder.default(self, obj)
80 81
81 82 class BaseModel(object):
82 83 """Base Model for all classess
83 84
84 85 """
85 86
86 87 @classmethod
87 88 def _get_keys(cls):
88 89 """return column names for this model """
89 90 return class_mapper(cls).c.keys()
90 91
91 92 def get_dict(self):
92 93 """return dict with keys and values corresponding
93 94 to this model data """
94 95
95 96 d = {}
96 97 for k in self._get_keys():
97 98 d[k] = getattr(self, k)
98 99 return d
99 100
100 101 def get_appstruct(self):
101 102 """return list with keys and values tupples corresponding
102 103 to this model data """
103 104
104 105 l = []
105 106 for k in self._get_keys():
106 107 l.append((k, getattr(self, k),))
107 108 return l
108 109
109 110 def populate_obj(self, populate_dict):
110 111 """populate model with data from given populate_dict"""
111 112
112 113 for k in self._get_keys():
113 114 if k in populate_dict:
114 115 setattr(self, k, populate_dict[k])
115 116
116 117 @classmethod
117 118 def query(cls):
118 119 return Session.query(cls)
119 120
120 121 @classmethod
121 122 def get(cls, id_):
122 123 if id_:
123 124 return cls.query().get(id_)
124 125
125 126 @classmethod
126 127 def getAll(cls):
127 128 return cls.query().all()
128 129
129 130 @classmethod
130 131 def delete(cls, id_):
131 132 obj = cls.query().get(id_)
132 133 Session.delete(obj)
133 134 Session.commit()
134 135
135 136
136 137 class RhodeCodeSetting(Base, BaseModel):
137 138 __tablename__ = 'rhodecode_settings'
138 139 __table_args__ = (UniqueConstraint('app_settings_name'), {'extend_existing':True})
139 140 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
140 141 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
141 142 _app_settings_value = Column("app_settings_value", String(255), nullable=True, unique=None, default=None)
142 143
143 144 def __init__(self, k='', v=''):
144 145 self.app_settings_name = k
145 146 self.app_settings_value = v
146 147
147 148
148 149 @validates('_app_settings_value')
149 150 def validate_settings_value(self, key, val):
150 151 assert type(val) == unicode
151 152 return val
152 153
153 154 @hybrid_property
154 155 def app_settings_value(self):
155 156 v = self._app_settings_value
156 157 if v == 'ldap_active':
157 158 v = str2bool(v)
158 159 return v
159 160
160 161 @app_settings_value.setter
161 162 def app_settings_value(self, val):
162 163 """
163 164 Setter that will always make sure we use unicode in app_settings_value
164 165
165 166 :param val:
166 167 """
167 168 self._app_settings_value = safe_unicode(val)
168 169
169 170 def __repr__(self):
170 171 return "<%s('%s:%s')>" % (self.__class__.__name__,
171 172 self.app_settings_name, self.app_settings_value)
172 173
173 174
174 175 @classmethod
175 176 def get_by_name(cls, ldap_key):
176 177 return cls.query()\
177 178 .filter(cls.app_settings_name == ldap_key).scalar()
178 179
179 180 @classmethod
180 181 def get_app_settings(cls, cache=False):
181 182
182 183 ret = cls.query()
183 184
184 185 if cache:
185 186 ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
186 187
187 188 if not ret:
188 189 raise Exception('Could not get application settings !')
189 190 settings = {}
190 191 for each in ret:
191 192 settings['rhodecode_' + each.app_settings_name] = \
192 193 each.app_settings_value
193 194
194 195 return settings
195 196
196 197 @classmethod
197 198 def get_ldap_settings(cls, cache=False):
198 199 ret = cls.query()\
199 200 .filter(cls.app_settings_name.startswith('ldap_')).all()
200 201 fd = {}
201 202 for row in ret:
202 203 fd.update({row.app_settings_name:row.app_settings_value})
203 204
204 205 return fd
205 206
206 207
207 208 class RhodeCodeUi(Base, BaseModel):
208 209 __tablename__ = 'rhodecode_ui'
209 210 __table_args__ = (UniqueConstraint('ui_key'), {'extend_existing':True})
210 211
211 212 HOOK_REPO_SIZE = 'changegroup.repo_size'
212 213 HOOK_PUSH = 'pretxnchangegroup.push_logger'
213 214 HOOK_PULL = 'preoutgoing.pull_logger'
214 215
215 216 ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
216 217 ui_section = Column("ui_section", String(255), nullable=True, unique=None, default=None)
217 218 ui_key = Column("ui_key", String(255), nullable=True, unique=None, default=None)
218 219 ui_value = Column("ui_value", String(255), nullable=True, unique=None, default=None)
219 220 ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
220 221
221 222
222 223 @classmethod
223 224 def get_by_key(cls, key):
224 225 return cls.query().filter(cls.ui_key == key)
225 226
226 227
227 228 @classmethod
228 229 def get_builtin_hooks(cls):
229 230 q = cls.query()
230 231 q = q.filter(cls.ui_key.in_([cls.HOOK_REPO_SIZE,
231 232 cls.HOOK_PUSH, cls.HOOK_PULL]))
232 233 return q.all()
233 234
234 235 @classmethod
235 236 def get_custom_hooks(cls):
236 237 q = cls.query()
237 238 q = q.filter(~cls.ui_key.in_([cls.HOOK_REPO_SIZE,
238 239 cls.HOOK_PUSH, cls.HOOK_PULL]))
239 240 q = q.filter(cls.ui_section == 'hooks')
240 241 return q.all()
241 242
242 243 @classmethod
243 244 def create_or_update_hook(cls, key, val):
244 245 new_ui = cls.get_by_key(key).scalar() or cls()
245 246 new_ui.ui_section = 'hooks'
246 247 new_ui.ui_active = True
247 248 new_ui.ui_key = key
248 249 new_ui.ui_value = val
249 250
250 251 Session.add(new_ui)
251 252 Session.commit()
252 253
253 254
254 255 class User(Base, BaseModel):
255 256 __tablename__ = 'users'
256 257 __table_args__ = (UniqueConstraint('username'), UniqueConstraint('email'), {'extend_existing':True})
257 258 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
258 259 username = Column("username", String(255), nullable=True, unique=None, default=None)
259 260 password = Column("password", String(255), nullable=True, unique=None, default=None)
260 261 active = Column("active", Boolean(), nullable=True, unique=None, default=None)
261 262 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
262 263 name = Column("name", String(255), nullable=True, unique=None, default=None)
263 264 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
264 265 email = Column("email", String(255), nullable=True, unique=None, default=None)
265 266 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
266 267 ldap_dn = Column("ldap_dn", String(255), nullable=True, unique=None, default=None)
267 268 api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
268 269
269 270 user_log = relationship('UserLog', cascade='all')
270 271 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
271 272
272 273 repositories = relationship('Repository')
273 274 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
274 275 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
275 276
276 277 group_member = relationship('UserGroupMember', cascade='all')
277 278
278 279 @property
279 280 def full_contact(self):
280 281 return '%s %s <%s>' % (self.name, self.lastname, self.email)
281 282
282 283 @property
283 284 def short_contact(self):
284 285 return '%s %s' % (self.name, self.lastname)
285 286
286 287 @property
287 288 def is_admin(self):
288 289 return self.admin
289 290
290 291 def __repr__(self):
291 292 try:
292 293 return "<%s('id:%s:%s')>" % (self.__class__.__name__,
293 294 self.user_id, self.username)
294 295 except:
295 296 return self.__class__.__name__
296 297
297 298 @classmethod
298 299 def get_by_username(cls, username, case_insensitive=False):
299 300 if case_insensitive:
300 301 return Session.query(cls).filter(cls.username.ilike(username)).scalar()
301 302 else:
302 303 return Session.query(cls).filter(cls.username == username).scalar()
303 304
304 305 @classmethod
305 306 def get_by_auth_token(cls, auth_token):
306 307 return cls.query().filter(cls.api_key == auth_token).one()
307 308
308 309 def update_lastlogin(self):
309 310 """Update user lastlogin"""
310 311
311 312 self.last_login = datetime.datetime.now()
312 313 Session.add(self)
313 314 Session.commit()
314 315 log.debug('updated user %s lastlogin', self.username)
315 316
316 317 @classmethod
317 318 def create(cls, form_data):
318 319 from rhodecode.lib.auth import get_crypt_password
319 320
320 321 try:
321 322 new_user = cls()
322 323 for k, v in form_data.items():
323 324 if k == 'password':
324 325 v = get_crypt_password(v)
325 326 setattr(new_user, k, v)
326 327
327 328 new_user.api_key = generate_auth_token(form_data['username'])
328 329 Session.add(new_user)
329 330 Session.commit()
330 331 return new_user
331 332 except:
332 333 log.error(traceback.format_exc())
333 334 Session.rollback()
334 335 raise
335 336
336 337 class UserLog(Base, BaseModel):
337 338 __tablename__ = 'user_logs'
338 339 __table_args__ = {'extend_existing':True}
339 340 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
340 341 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
341 342 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
342 343 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
343 344 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
344 345 action = Column("action", String(1200000), nullable=True, unique=None, default=None)
345 346 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
346 347
347 348 @property
348 349 def action_as_day(self):
349 350 return date(*self.action_date.timetuple()[:3])
350 351
351 352 user = relationship('User')
352 353 repository = relationship('Repository')
353 354
354 355
355 356 class UserGroup(Base, BaseModel):
356 357 __tablename__ = 'users_groups'
357 358 __table_args__ = {'extend_existing':True}
358 359
359 360 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
360 361 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
361 362 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
362 363
363 364 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
364 365
365 366 def __repr__(self):
366 367 return '<userGroup(%s)>' % (self.users_group_name)
367 368
368 369 @classmethod
369 370 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
370 371 if case_insensitive:
371 372 gr = cls.query()\
372 373 .filter(cls.users_group_name.ilike(group_name))
373 374 else:
374 375 gr = cls.query()\
375 376 .filter(cls.users_group_name == group_name)
376 377 if cache:
377 378 gr = gr.options(FromCache("sql_cache_short",
378 379 "get_user_%s" % group_name))
379 380 return gr.scalar()
380 381
381 382 @classmethod
382 383 def get(cls, users_group_id, cache=False):
383 384 users_group = cls.query()
384 385 if cache:
385 386 users_group = users_group.options(FromCache("sql_cache_short",
386 387 "get_users_group_%s" % users_group_id))
387 388 return users_group.get(users_group_id)
388 389
389 390 @classmethod
390 391 def create(cls, form_data):
391 392 try:
392 393 new_user_group = cls()
393 394 for k, v in form_data.items():
394 395 setattr(new_user_group, k, v)
395 396
396 397 Session.add(new_user_group)
397 398 Session.commit()
398 399 return new_user_group
399 400 except:
400 401 log.error(traceback.format_exc())
401 402 Session.rollback()
402 403 raise
403 404
404 405 @classmethod
405 406 def update(cls, users_group_id, form_data):
406 407
407 408 try:
408 409 users_group = cls.get(users_group_id, cache=False)
409 410
410 411 for k, v in form_data.items():
411 412 if k == 'users_group_members':
412 413 users_group.members = []
413 414 Session.flush()
414 415 members_list = []
415 416 if v:
416 v = [v] if isinstance(v, basestring) else v
417 v = [v] if isinstance(v, compat.string_types) else v
417 418 for u_id in set(v):
418 419 member = UserGroupMember(users_group_id, u_id)
419 420 members_list.append(member)
420 421 setattr(users_group, 'members', members_list)
421 422 setattr(users_group, k, v)
422 423
423 424 Session.add(users_group)
424 425 Session.commit()
425 426 except:
426 427 log.error(traceback.format_exc())
427 428 Session.rollback()
428 429 raise
429 430
430 431 @classmethod
431 432 def delete(cls, user_group_id):
432 433 try:
433 434
434 435 # check if this group is not assigned to repo
435 436 assigned_groups = UserGroupRepoToPerm.query()\
436 437 .filter(UserGroupRepoToPerm.users_group_id ==
437 438 user_group_id).all()
438 439
439 440 if assigned_groups:
440 441 raise UserGroupAssignedException(
441 442 'UserGroup assigned to %s' % assigned_groups)
442 443
443 444 users_group = cls.get(user_group_id, cache=False)
444 445 Session.delete(users_group)
445 446 Session.commit()
446 447 except:
447 448 log.error(traceback.format_exc())
448 449 Session.rollback()
449 450 raise
450 451
451 452 class UserGroupMember(Base, BaseModel):
452 453 __tablename__ = 'users_groups_members'
453 454 __table_args__ = {'extend_existing':True}
454 455
455 456 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
456 457 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
457 458 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
458 459
459 460 user = relationship('User', lazy='joined')
460 461 users_group = relationship('UserGroup')
461 462
462 463 def __init__(self, gr_id='', u_id=''):
463 464 self.users_group_id = gr_id
464 465 self.user_id = u_id
465 466
466 467 @staticmethod
467 468 def add_user_to_group(group, user):
468 469 ugm = UserGroupMember()
469 470 ugm.users_group = group
470 471 ugm.user = user
471 472 Session.add(ugm)
472 473 Session.commit()
473 474 return ugm
474 475
475 476 class Repository(Base, BaseModel):
476 477 __tablename__ = 'repositories'
477 478 __table_args__ = (UniqueConstraint('repo_name'), {'extend_existing':True},)
478 479
479 480 repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
480 481 repo_name = Column("repo_name", String(255), nullable=False, unique=True, default=None)
481 482 clone_uri = Column("clone_uri", String(255), nullable=True, unique=False, default=None)
482 483 repo_type = Column("repo_type", String(255), nullable=False, unique=False, default='hg')
483 484 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
484 485 private = Column("private", Boolean(), nullable=True, unique=None, default=None)
485 486 enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
486 487 enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
487 488 description = Column("description", String(10000), nullable=True, unique=None, default=None)
488 489 created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
489 490
490 491 fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
491 492 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
492 493
493 494
494 495 user = relationship('User')
495 496 fork = relationship('Repository', remote_side=repo_id)
496 497 group = relationship('RepoGroup')
497 498 repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
498 499 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
499 500 stats = relationship('Statistics', cascade='all', uselist=False)
500 501
501 502 followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all')
502 503
503 504 logs = relationship('UserLog', cascade='all')
504 505
505 506 def __repr__(self):
506 507 return "<%s('%s:%s')>" % (self.__class__.__name__,
507 508 self.repo_id, self.repo_name)
508 509
509 510 @classmethod
510 511 def url_sep(cls):
511 512 return '/'
512 513
513 514 @classmethod
514 515 def get_by_repo_name(cls, repo_name):
515 516 q = Session.query(cls).filter(cls.repo_name == repo_name)
516 517 q = q.options(joinedload(Repository.fork))\
517 518 .options(joinedload(Repository.user))\
518 519 .options(joinedload(Repository.group))
519 520 return q.one()
520 521
521 522 @classmethod
522 523 def get_repo_forks(cls, repo_id):
523 524 return cls.query().filter(Repository.fork_id == repo_id)
524 525
525 526 @classmethod
526 527 def base_path(cls):
527 528 """
528 529 Returns base path when all repos are stored
529 530
530 531 :param cls:
531 532 """
532 533 q = Session.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key ==
533 534 cls.url_sep())
534 535 q.options(FromCache("sql_cache_short", "repository_repo_path"))
535 536 return q.one().ui_value
536 537
537 538 @property
538 539 def just_name(self):
539 540 return self.repo_name.split(Repository.url_sep())[-1]
540 541
541 542 @property
542 543 def groups_with_parents(self):
543 544 groups = []
544 545 if self.group is None:
545 546 return groups
546 547
547 548 cur_gr = self.group
548 549 groups.insert(0, cur_gr)
549 550 while 1:
550 551 gr = getattr(cur_gr, 'parent_group', None)
551 552 cur_gr = cur_gr.parent_group
552 553 if gr is None:
553 554 break
554 555 groups.insert(0, gr)
555 556
556 557 return groups
557 558
558 559 @property
559 560 def groups_and_repo(self):
560 561 return self.groups_with_parents, self.just_name
561 562
562 563 @LazyProperty
563 564 def repo_path(self):
564 565 """
565 566 Returns base full path for that repository means where it actually
566 567 exists on a filesystem
567 568 """
568 569 q = Session.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key ==
569 570 Repository.url_sep())
570 571 q.options(FromCache("sql_cache_short", "repository_repo_path"))
571 572 return q.one().ui_value
572 573
573 574 @property
574 575 def repo_full_path(self):
575 576 p = [self.repo_path]
576 577 # we need to split the name by / since this is how we store the
577 578 # names in the database, but that eventually needs to be converted
578 579 # into a valid system path
579 580 p += self.repo_name.split(Repository.url_sep())
580 581 return os.path.join(*p)
581 582
582 583 def get_new_name(self, repo_name):
583 584 """
584 585 returns new full repository name based on assigned group and new new
585 586
586 587 :param group_name:
587 588 """
588 589 path_prefix = self.group.full_path_splitted if self.group else []
589 590 return Repository.url_sep().join(path_prefix + [repo_name])
590 591
591 592 @property
592 593 def _config(self):
593 594 """
594 595 Returns db based config object.
595 596 """
596 597 from rhodecode.lib.utils import make_db_config
597 598 return make_db_config(clear_session=False)
598 599
599 600 @classmethod
600 601 def is_valid(cls, repo_name):
601 602 """
602 603 returns True if given repo name is a valid filesystem repository
603 604
604 605 :param cls:
605 606 :param repo_name:
606 607 """
607 608 from rhodecode.lib.utils import is_valid_repo
608 609
609 610 return is_valid_repo(repo_name, cls.base_path())
610 611
611 612
612 613 #==========================================================================
613 614 # SCM PROPERTIES
614 615 #==========================================================================
615 616
616 617 def get_commit(self, rev):
617 618 return get_commit_safe(self.scm_instance, rev)
618 619
619 620 @property
620 621 def tip(self):
621 622 return self.get_commit('tip')
622 623
623 624 @property
624 625 def author(self):
625 626 return self.tip.author
626 627
627 628 @property
628 629 def last_change(self):
629 630 return self.scm_instance.last_change
630 631
631 632 #==========================================================================
632 633 # SCM CACHE INSTANCE
633 634 #==========================================================================
634 635
635 636 @property
636 637 def invalidate(self):
637 638 return CacheInvalidation.invalidate(self.repo_name)
638 639
639 640 def set_invalidate(self):
640 641 """
641 642 set a cache for invalidation for this instance
642 643 """
643 644 CacheInvalidation.set_invalidate(self.repo_name)
644 645
645 646 @LazyProperty
646 647 def scm_instance(self):
647 648 return self.__get_instance()
648 649
649 650 @property
650 651 def scm_instance_cached(self):
651 652 return self.__get_instance()
652 653
653 654 def __get_instance(self):
654 655
655 656 repo_full_path = self.repo_full_path
656 657
657 658 try:
658 659 alias = get_scm(repo_full_path)[0]
659 660 log.debug('Creating instance of %s repository', alias)
660 661 backend = get_backend(alias)
661 662 except VCSError:
662 663 log.error(traceback.format_exc())
663 664 log.error('Perhaps this repository is in db and not in '
664 665 'filesystem run rescan repositories with '
665 666 '"destroy old data " option from admin panel')
666 667 return
667 668
668 669 if alias == 'hg':
669 670
670 671 repo = backend(safe_str(repo_full_path), create=False,
671 672 config=self._config)
672 673
673 674 else:
674 675 repo = backend(repo_full_path, create=False)
675 676
676 677 return repo
677 678
678 679
679 680 class Group(Base, BaseModel):
680 681 __tablename__ = 'groups'
681 682 __table_args__ = (UniqueConstraint('group_name', 'group_parent_id'),
682 683 CheckConstraint('group_id != group_parent_id'), {'extend_existing':True},)
683 684 __mapper_args__ = {'order_by':'group_name'}
684 685
685 686 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
686 687 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
687 688 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
688 689 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
689 690
690 691 parent_group = relationship('Group', remote_side=group_id)
691 692
692 693 def __init__(self, group_name='', parent_group=None):
693 694 self.group_name = group_name
694 695 self.parent_group = parent_group
695 696
696 697 def __repr__(self):
697 698 return "<%s('%s:%s')>" % (self.__class__.__name__, self.group_id,
698 699 self.group_name)
699 700
700 701 @classmethod
701 702 def url_sep(cls):
702 703 return '/'
703 704
704 705 @classmethod
705 706 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
706 707 if case_insensitive:
707 708 gr = cls.query()\
708 709 .filter(cls.group_name.ilike(group_name))
709 710 else:
710 711 gr = cls.query()\
711 712 .filter(cls.group_name == group_name)
712 713 if cache:
713 714 gr = gr.options(FromCache("sql_cache_short",
714 715 "get_group_%s" % group_name))
715 716 return gr.scalar()
716 717
717 718 @property
718 719 def parents(self):
719 720 parents_recursion_limit = 5
720 721 groups = []
721 722 if self.parent_group is None:
722 723 return groups
723 724 cur_gr = self.parent_group
724 725 groups.insert(0, cur_gr)
725 726 cnt = 0
726 727 while 1:
727 728 cnt += 1
728 729 gr = getattr(cur_gr, 'parent_group', None)
729 730 cur_gr = cur_gr.parent_group
730 731 if gr is None:
731 732 break
732 733 if cnt == parents_recursion_limit:
733 734 # this will prevent accidental infinit loops
734 735 log.error('group nested more than %s',
735 736 parents_recursion_limit)
736 737 break
737 738
738 739 groups.insert(0, gr)
739 740 return groups
740 741
741 742 @property
742 743 def children(self):
743 744 return Group.query().filter(Group.parent_group == self)
744 745
745 746 @property
746 747 def name(self):
747 748 return self.group_name.split(Group.url_sep())[-1]
748 749
749 750 @property
750 751 def full_path(self):
751 752 return self.group_name
752 753
753 754 @property
754 755 def full_path_splitted(self):
755 756 return self.group_name.split(Group.url_sep())
756 757
757 758 @property
758 759 def repositories(self):
759 760 return Repository.query().filter(Repository.group == self)
760 761
761 762 @property
762 763 def repositories_recursive_count(self):
763 764 cnt = self.repositories.count()
764 765
765 766 def children_count(group):
766 767 cnt = 0
767 768 for child in group.children:
768 769 cnt += child.repositories.count()
769 770 cnt += children_count(child)
770 771 return cnt
771 772
772 773 return cnt + children_count(self)
773 774
774 775
775 776 def get_new_name(self, group_name):
776 777 """
777 778 returns new full group name based on parent and new name
778 779
779 780 :param group_name:
780 781 """
781 782 path_prefix = (self.parent_group.full_path_splitted if
782 783 self.parent_group else [])
783 784 return Group.url_sep().join(path_prefix + [group_name])
784 785
785 786
786 787 class Permission(Base, BaseModel):
787 788 __tablename__ = 'permissions'
788 789 __table_args__ = {'extend_existing':True}
789 790 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
790 791 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
791 792 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
792 793
793 794 def __repr__(self):
794 795 return "<%s('%s:%s')>" % (self.__class__.__name__,
795 796 self.permission_id, self.permission_name)
796 797
797 798 @classmethod
798 799 def get_by_key(cls, key):
799 800 return cls.query().filter(cls.permission_name == key).scalar()
800 801
801 802 class UserRepoToPerm(Base, BaseModel):
802 803 __tablename__ = 'repo_to_perm'
803 804 __table_args__ = (UniqueConstraint('user_id', 'repository_id'), {'extend_existing':True})
804 805 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
805 806 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
806 807 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
807 808 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
808 809
809 810 user = relationship('User')
810 811 permission = relationship('Permission')
811 812 repository = relationship('Repository')
812 813
813 814 class UserToPerm(Base, BaseModel):
814 815 __tablename__ = 'user_to_perm'
815 816 __table_args__ = (UniqueConstraint('user_id', 'permission_id'), {'extend_existing':True})
816 817 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
817 818 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
818 819 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
819 820
820 821 user = relationship('User')
821 822 permission = relationship('Permission')
822 823
823 824 @classmethod
824 825 def has_perm(cls, user_id, perm):
825 826 if not isinstance(perm, Permission):
826 827 raise Exception('perm needs to be an instance of Permission class')
827 828
828 829 return cls.query().filter(cls.user_id == user_id)\
829 830 .filter(cls.permission == perm).scalar() is not None
830 831
831 832 @classmethod
832 833 def grant_perm(cls, user_id, perm):
833 834 if not isinstance(perm, Permission):
834 835 raise Exception('perm needs to be an instance of Permission class')
835 836
836 837 new = cls()
837 838 new.user_id = user_id
838 839 new.permission = perm
839 840 try:
840 841 Session.add(new)
841 842 Session.commit()
842 843 except:
843 844 Session.rollback()
844 845
845 846
846 847 @classmethod
847 848 def revoke_perm(cls, user_id, perm):
848 849 if not isinstance(perm, Permission):
849 850 raise Exception('perm needs to be an instance of Permission class')
850 851
851 852 try:
852 853 cls.query().filter(cls.user_id == user_id) \
853 854 .filter(cls.permission == perm).delete()
854 855 Session.commit()
855 856 except:
856 857 Session.rollback()
857 858
858 859 class UserGroupRepoToPerm(Base, BaseModel):
859 860 __tablename__ = 'users_group_repo_to_perm'
860 861 __table_args__ = (UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), {'extend_existing':True})
861 862 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
862 863 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
863 864 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
864 865 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
865 866
866 867 users_group = relationship('UserGroup')
867 868 permission = relationship('Permission')
868 869 repository = relationship('Repository')
869 870
870 871 def __repr__(self):
871 872 return '<userGroup:%s => %s >' % (self.users_group, self.repository)
872 873
873 874 class UserGroupToPerm(Base, BaseModel):
874 875 __tablename__ = 'users_group_to_perm'
875 876 __table_args__ = {'extend_existing':True}
876 877 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
877 878 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
878 879 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
879 880
880 881 users_group = relationship('UserGroup')
881 882 permission = relationship('Permission')
882 883
883 884
884 885 @classmethod
885 886 def has_perm(cls, users_group_id, perm):
886 887 if not isinstance(perm, Permission):
887 888 raise Exception('perm needs to be an instance of Permission class')
888 889
889 890 return cls.query().filter(cls.users_group_id ==
890 891 users_group_id)\
891 892 .filter(cls.permission == perm)\
892 893 .scalar() is not None
893 894
894 895 @classmethod
895 896 def grant_perm(cls, users_group_id, perm):
896 897 if not isinstance(perm, Permission):
897 898 raise Exception('perm needs to be an instance of Permission class')
898 899
899 900 new = cls()
900 901 new.users_group_id = users_group_id
901 902 new.permission = perm
902 903 try:
903 904 Session.add(new)
904 905 Session.commit()
905 906 except:
906 907 Session.rollback()
907 908
908 909
909 910 @classmethod
910 911 def revoke_perm(cls, users_group_id, perm):
911 912 if not isinstance(perm, Permission):
912 913 raise Exception('perm needs to be an instance of Permission class')
913 914
914 915 try:
915 916 cls.query().filter(cls.users_group_id == users_group_id) \
916 917 .filter(cls.permission == perm).delete()
917 918 Session.commit()
918 919 except:
919 920 Session.rollback()
920 921
921 922
922 923 class UserRepoGroupToPerm(Base, BaseModel):
923 924 __tablename__ = 'group_to_perm'
924 925 __table_args__ = (UniqueConstraint('group_id', 'permission_id'), {'extend_existing':True})
925 926
926 927 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
927 928 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
928 929 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
929 930 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
930 931
931 932 user = relationship('User')
932 933 permission = relationship('Permission')
933 934 group = relationship('RepoGroup')
934 935
935 936 class Statistics(Base, BaseModel):
936 937 __tablename__ = 'statistics'
937 938 __table_args__ = (UniqueConstraint('repository_id'), {'extend_existing':True})
938 939 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
939 940 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
940 941 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
941 942 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
942 943 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
943 944 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
944 945
945 946 repository = relationship('Repository', single_parent=True)
946 947
947 948 class UserFollowing(Base, BaseModel):
948 949 __tablename__ = 'user_followings'
949 950 __table_args__ = (UniqueConstraint('user_id', 'follows_repository_id'),
950 951 UniqueConstraint('user_id', 'follows_user_id')
951 952 , {'extend_existing':True})
952 953
953 954 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
954 955 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
955 956 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
956 957 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
957 958 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
958 959
959 960 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
960 961
961 962 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
962 963 follows_repository = relationship('Repository', order_by='Repository.repo_name')
963 964
964 965
965 966 @classmethod
966 967 def get_repo_followers(cls, repo_id):
967 968 return cls.query().filter(cls.follows_repo_id == repo_id)
968 969
969 970 class CacheInvalidation(Base, BaseModel):
970 971 __tablename__ = 'cache_invalidation'
971 972 __table_args__ = (UniqueConstraint('cache_key'), {'extend_existing':True})
972 973 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
973 974 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
974 975 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
975 976 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
976 977
977 978
978 979 def __init__(self, cache_key, cache_args=''):
979 980 self.cache_key = cache_key
980 981 self.cache_args = cache_args
981 982 self.cache_active = False
982 983
983 984 def __repr__(self):
984 985 return "<%s('%s:%s')>" % (self.__class__.__name__,
985 986 self.cache_id, self.cache_key)
986 987
987 988 @classmethod
988 989 def invalidate(cls, key):
989 990 """
990 991 Returns Invalidation object if this given key should be invalidated
991 992 None otherwise. `cache_active = False` means that this cache
992 993 state is not valid and needs to be invalidated
993 994
994 995 :param key:
995 996 """
996 997 return cls.query()\
997 998 .filter(CacheInvalidation.cache_key == key)\
998 999 .filter(CacheInvalidation.cache_active == False)\
999 1000 .scalar()
1000 1001
1001 1002 @classmethod
1002 1003 def set_invalidate(cls, key):
1003 1004 """
1004 1005 Mark this Cache key for invalidation
1005 1006
1006 1007 :param key:
1007 1008 """
1008 1009
1009 1010 log.debug('marking %s for invalidation', key)
1010 1011 inv_obj = Session.query(cls)\
1011 1012 .filter(cls.cache_key == key).scalar()
1012 1013 if inv_obj:
1013 1014 inv_obj.cache_active = False
1014 1015 else:
1015 1016 log.debug('cache key not found in invalidation db -> creating one')
1016 1017 inv_obj = CacheInvalidation(key)
1017 1018
1018 1019 try:
1019 1020 Session.add(inv_obj)
1020 1021 Session.commit()
1021 1022 except Exception:
1022 1023 log.error(traceback.format_exc())
1023 1024 Session.rollback()
1024 1025
1025 1026 @classmethod
1026 1027 def set_valid(cls, key):
1027 1028 """
1028 1029 Mark this cache key as active and currently cached
1029 1030
1030 1031 :param key:
1031 1032 """
1032 1033 inv_obj = Session.query(CacheInvalidation)\
1033 1034 .filter(CacheInvalidation.cache_key == key).scalar()
1034 1035 inv_obj.cache_active = True
1035 1036 Session.add(inv_obj)
1036 1037 Session.commit()
1037 1038
1038 1039 class DbMigrateVersion(Base, BaseModel):
1039 1040 __tablename__ = 'db_migrate_version'
1040 1041 __table_args__ = {'extend_existing':True}
1041 1042 repository_id = Column('repository_id', String(250), primary_key=True)
1042 1043 repository_path = Column('repository_path', Text)
1043 1044 version = Column('version', Integer)
@@ -1,4334 +1,4334 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import hashlib
29 29 import logging
30 30 import datetime
31 31 import warnings
32 32 import ipaddress
33 33 import functools
34 34 import traceback
35 35 import collections
36 36
37 37 from sqlalchemy import (
38 38 or_, and_, not_, func, TypeDecorator, event,
39 39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
40 40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
41 41 Text, Float, PickleType)
42 42 from sqlalchemy.sql.expression import true, false
43 43 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
44 44 from sqlalchemy.orm import (
45 45 relationship, joinedload, class_mapper, validates, aliased)
46 46 from sqlalchemy.ext.declarative import declared_attr
47 47 from sqlalchemy.ext.hybrid import hybrid_property
48 48 from sqlalchemy.exc import IntegrityError # pragma: no cover
49 49 from sqlalchemy.dialects.mysql import LONGTEXT
50 50 from beaker.cache import cache_region
51 51 from zope.cachedescriptors.property import Lazy as LazyProperty
52
52 from pyramid import compat
53 53 from pyramid.threadlocal import get_current_request
54 54
55 55 from rhodecode.translation import _
56 56 from rhodecode.lib.vcs import get_vcs_instance
57 57 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
58 58 from rhodecode.lib.utils2 import (
59 59 str2bool, safe_str, get_commit_safe, safe_unicode, md5_safe,
60 60 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
61 61 glob2re, StrictAttributeDict, cleaned_uri)
62 62 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
63 63 JsonRaw
64 64 from rhodecode.lib.ext_json import json
65 65 from rhodecode.lib.caching_query import FromCache
66 66 from rhodecode.lib.encrypt import AESCipher
67 67
68 68 from rhodecode.model.meta import Base, Session
69 69
70 70 URL_SEP = '/'
71 71 log = logging.getLogger(__name__)
72 72
73 73 # =============================================================================
74 74 # BASE CLASSES
75 75 # =============================================================================
76 76
77 77 # this is propagated from .ini file rhodecode.encrypted_values.secret or
78 78 # beaker.session.secret if first is not set.
79 79 # and initialized at environment.py
80 80 ENCRYPTION_KEY = None
81 81
82 82 # used to sort permissions by types, '#' used here is not allowed to be in
83 83 # usernames, and it's very early in sorted string.printable table.
84 84 PERMISSION_TYPE_SORT = {
85 85 'admin': '####',
86 86 'write': '###',
87 87 'read': '##',
88 88 'none': '#',
89 89 }
90 90
91 91
92 92 def display_user_sort(obj):
93 93 """
94 94 Sort function used to sort permissions in .permissions() function of
95 95 Repository, RepoGroup, UserGroup. Also it put the default user in front
96 96 of all other resources
97 97 """
98 98
99 99 if obj.username == User.DEFAULT_USER:
100 100 return '#####'
101 101 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
102 102 return prefix + obj.username
103 103
104 104
105 105 def display_user_group_sort(obj):
106 106 """
107 107 Sort function used to sort permissions in .permissions() function of
108 108 Repository, RepoGroup, UserGroup. Also it put the default user in front
109 109 of all other resources
110 110 """
111 111
112 112 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
113 113 return prefix + obj.users_group_name
114 114
115 115
116 116 def _hash_key(k):
117 117 return md5_safe(k)
118 118
119 119
120 120 def in_filter_generator(qry, items, limit=500):
121 121 """
122 122 Splits IN() into multiple with OR
123 123 e.g.::
124 124 cnt = Repository.query().filter(
125 125 or_(
126 126 *in_filter_generator(Repository.repo_id, range(100000))
127 127 )).count()
128 128 """
129 129 if not items:
130 130 # empty list will cause empty query which might cause security issues
131 131 # this can lead to hidden unpleasant results
132 132 items = [-1]
133 133
134 134 parts = []
135 135 for chunk in xrange(0, len(items), limit):
136 136 parts.append(
137 137 qry.in_(items[chunk: chunk + limit])
138 138 )
139 139
140 140 return parts
141 141
142 142
143 143 class EncryptedTextValue(TypeDecorator):
144 144 """
145 145 Special column for encrypted long text data, use like::
146 146
147 147 value = Column("encrypted_value", EncryptedValue(), nullable=False)
148 148
149 149 This column is intelligent so if value is in unencrypted form it return
150 150 unencrypted form, but on save it always encrypts
151 151 """
152 152 impl = Text
153 153
154 154 def process_bind_param(self, value, dialect):
155 155 if not value:
156 156 return value
157 157 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
158 158 # protect against double encrypting if someone manually starts
159 159 # doing
160 160 raise ValueError('value needs to be in unencrypted format, ie. '
161 161 'not starting with enc$aes')
162 162 return 'enc$aes_hmac$%s' % AESCipher(
163 163 ENCRYPTION_KEY, hmac=True).encrypt(value)
164 164
165 165 def process_result_value(self, value, dialect):
166 166 import rhodecode
167 167
168 168 if not value:
169 169 return value
170 170
171 171 parts = value.split('$', 3)
172 172 if not len(parts) == 3:
173 173 # probably not encrypted values
174 174 return value
175 175 else:
176 176 if parts[0] != 'enc':
177 177 # parts ok but without our header ?
178 178 return value
179 179 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
180 180 'rhodecode.encrypted_values.strict') or True)
181 181 # at that stage we know it's our encryption
182 182 if parts[1] == 'aes':
183 183 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
184 184 elif parts[1] == 'aes_hmac':
185 185 decrypted_data = AESCipher(
186 186 ENCRYPTION_KEY, hmac=True,
187 187 strict_verification=enc_strict_mode).decrypt(parts[2])
188 188 else:
189 189 raise ValueError(
190 190 'Encryption type part is wrong, must be `aes` '
191 191 'or `aes_hmac`, got `%s` instead' % (parts[1]))
192 192 return decrypted_data
193 193
194 194
195 195 class BaseModel(object):
196 196 """
197 197 Base Model for all classes
198 198 """
199 199
200 200 @classmethod
201 201 def _get_keys(cls):
202 202 """return column names for this model """
203 203 return class_mapper(cls).c.keys()
204 204
205 205 def get_dict(self):
206 206 """
207 207 return dict with keys and values corresponding
208 208 to this model data """
209 209
210 210 d = {}
211 211 for k in self._get_keys():
212 212 d[k] = getattr(self, k)
213 213
214 214 # also use __json__() if present to get additional fields
215 215 _json_attr = getattr(self, '__json__', None)
216 216 if _json_attr:
217 217 # update with attributes from __json__
218 218 if callable(_json_attr):
219 219 _json_attr = _json_attr()
220 220 for k, val in _json_attr.iteritems():
221 221 d[k] = val
222 222 return d
223 223
224 224 def get_appstruct(self):
225 225 """return list with keys and values tuples corresponding
226 226 to this model data """
227 227
228 228 lst = []
229 229 for k in self._get_keys():
230 230 lst.append((k, getattr(self, k),))
231 231 return lst
232 232
233 233 def populate_obj(self, populate_dict):
234 234 """populate model with data from given populate_dict"""
235 235
236 236 for k in self._get_keys():
237 237 if k in populate_dict:
238 238 setattr(self, k, populate_dict[k])
239 239
240 240 @classmethod
241 241 def query(cls):
242 242 return Session().query(cls)
243 243
244 244 @classmethod
245 245 def get(cls, id_):
246 246 if id_:
247 247 return cls.query().get(id_)
248 248
249 249 @classmethod
250 250 def get_or_404(cls, id_):
251 251 from pyramid.httpexceptions import HTTPNotFound
252 252
253 253 try:
254 254 id_ = int(id_)
255 255 except (TypeError, ValueError):
256 256 raise HTTPNotFound()
257 257
258 258 res = cls.query().get(id_)
259 259 if not res:
260 260 raise HTTPNotFound()
261 261 return res
262 262
263 263 @classmethod
264 264 def getAll(cls):
265 265 # deprecated and left for backward compatibility
266 266 return cls.get_all()
267 267
268 268 @classmethod
269 269 def get_all(cls):
270 270 return cls.query().all()
271 271
272 272 @classmethod
273 273 def delete(cls, id_):
274 274 obj = cls.query().get(id_)
275 275 Session().delete(obj)
276 276
277 277 @classmethod
278 278 def identity_cache(cls, session, attr_name, value):
279 279 exist_in_session = []
280 280 for (item_cls, pkey), instance in session.identity_map.items():
281 281 if cls == item_cls and getattr(instance, attr_name) == value:
282 282 exist_in_session.append(instance)
283 283 if exist_in_session:
284 284 if len(exist_in_session) == 1:
285 285 return exist_in_session[0]
286 286 log.exception(
287 287 'multiple objects with attr %s and '
288 288 'value %s found with same name: %r',
289 289 attr_name, value, exist_in_session)
290 290
291 291 def __repr__(self):
292 292 if hasattr(self, '__unicode__'):
293 293 # python repr needs to return str
294 294 try:
295 295 return safe_str(self.__unicode__())
296 296 except UnicodeDecodeError:
297 297 pass
298 298 return '<DB:%s>' % (self.__class__.__name__)
299 299
300 300
301 301 class RhodeCodeSetting(Base, BaseModel):
302 302 __tablename__ = 'rhodecode_settings'
303 303 __table_args__ = (
304 304 UniqueConstraint('app_settings_name'),
305 305 {'extend_existing': True, 'mysql_engine': 'InnoDB',
306 306 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
307 307 )
308 308
309 309 SETTINGS_TYPES = {
310 310 'str': safe_str,
311 311 'int': safe_int,
312 312 'unicode': safe_unicode,
313 313 'bool': str2bool,
314 314 'list': functools.partial(aslist, sep=',')
315 315 }
316 316 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
317 317 GLOBAL_CONF_KEY = 'app_settings'
318 318
319 319 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
320 320 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
321 321 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
322 322 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
323 323
324 324 def __init__(self, key='', val='', type='unicode'):
325 325 self.app_settings_name = key
326 326 self.app_settings_type = type
327 327 self.app_settings_value = val
328 328
329 329 @validates('_app_settings_value')
330 330 def validate_settings_value(self, key, val):
331 331 assert type(val) == unicode
332 332 return val
333 333
334 334 @hybrid_property
335 335 def app_settings_value(self):
336 336 v = self._app_settings_value
337 337 _type = self.app_settings_type
338 338 if _type:
339 339 _type = self.app_settings_type.split('.')[0]
340 340 # decode the encrypted value
341 341 if 'encrypted' in self.app_settings_type:
342 342 cipher = EncryptedTextValue()
343 343 v = safe_unicode(cipher.process_result_value(v, None))
344 344
345 345 converter = self.SETTINGS_TYPES.get(_type) or \
346 346 self.SETTINGS_TYPES['unicode']
347 347 return converter(v)
348 348
349 349 @app_settings_value.setter
350 350 def app_settings_value(self, val):
351 351 """
352 352 Setter that will always make sure we use unicode in app_settings_value
353 353
354 354 :param val:
355 355 """
356 356 val = safe_unicode(val)
357 357 # encode the encrypted value
358 358 if 'encrypted' in self.app_settings_type:
359 359 cipher = EncryptedTextValue()
360 360 val = safe_unicode(cipher.process_bind_param(val, None))
361 361 self._app_settings_value = val
362 362
363 363 @hybrid_property
364 364 def app_settings_type(self):
365 365 return self._app_settings_type
366 366
367 367 @app_settings_type.setter
368 368 def app_settings_type(self, val):
369 369 if val.split('.')[0] not in self.SETTINGS_TYPES:
370 370 raise Exception('type must be one of %s got %s'
371 371 % (self.SETTINGS_TYPES.keys(), val))
372 372 self._app_settings_type = val
373 373
374 374 def __unicode__(self):
375 375 return u"<%s('%s:%s[%s]')>" % (
376 376 self.__class__.__name__,
377 377 self.app_settings_name, self.app_settings_value,
378 378 self.app_settings_type
379 379 )
380 380
381 381
382 382 class RhodeCodeUi(Base, BaseModel):
383 383 __tablename__ = 'rhodecode_ui'
384 384 __table_args__ = (
385 385 UniqueConstraint('ui_key'),
386 386 {'extend_existing': True, 'mysql_engine': 'InnoDB',
387 387 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
388 388 )
389 389
390 390 HOOK_REPO_SIZE = 'changegroup.repo_size'
391 391 # HG
392 392 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
393 393 HOOK_PULL = 'outgoing.pull_logger'
394 394 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
395 395 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
396 396 HOOK_PUSH = 'changegroup.push_logger'
397 397 HOOK_PUSH_KEY = 'pushkey.key_push'
398 398
399 399 # TODO: johbo: Unify way how hooks are configured for git and hg,
400 400 # git part is currently hardcoded.
401 401
402 402 # SVN PATTERNS
403 403 SVN_BRANCH_ID = 'vcs_svn_branch'
404 404 SVN_TAG_ID = 'vcs_svn_tag'
405 405
406 406 ui_id = Column(
407 407 "ui_id", Integer(), nullable=False, unique=True, default=None,
408 408 primary_key=True)
409 409 ui_section = Column(
410 410 "ui_section", String(255), nullable=True, unique=None, default=None)
411 411 ui_key = Column(
412 412 "ui_key", String(255), nullable=True, unique=None, default=None)
413 413 ui_value = Column(
414 414 "ui_value", String(255), nullable=True, unique=None, default=None)
415 415 ui_active = Column(
416 416 "ui_active", Boolean(), nullable=True, unique=None, default=True)
417 417
418 418 def __repr__(self):
419 419 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
420 420 self.ui_key, self.ui_value)
421 421
422 422
423 423 class RepoRhodeCodeSetting(Base, BaseModel):
424 424 __tablename__ = 'repo_rhodecode_settings'
425 425 __table_args__ = (
426 426 UniqueConstraint(
427 427 'app_settings_name', 'repository_id',
428 428 name='uq_repo_rhodecode_setting_name_repo_id'),
429 429 {'extend_existing': True, 'mysql_engine': 'InnoDB',
430 430 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
431 431 )
432 432
433 433 repository_id = Column(
434 434 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
435 435 nullable=False)
436 436 app_settings_id = Column(
437 437 "app_settings_id", Integer(), nullable=False, unique=True,
438 438 default=None, primary_key=True)
439 439 app_settings_name = Column(
440 440 "app_settings_name", String(255), nullable=True, unique=None,
441 441 default=None)
442 442 _app_settings_value = Column(
443 443 "app_settings_value", String(4096), nullable=True, unique=None,
444 444 default=None)
445 445 _app_settings_type = Column(
446 446 "app_settings_type", String(255), nullable=True, unique=None,
447 447 default=None)
448 448
449 449 repository = relationship('Repository')
450 450
451 451 def __init__(self, repository_id, key='', val='', type='unicode'):
452 452 self.repository_id = repository_id
453 453 self.app_settings_name = key
454 454 self.app_settings_type = type
455 455 self.app_settings_value = val
456 456
457 457 @validates('_app_settings_value')
458 458 def validate_settings_value(self, key, val):
459 459 assert type(val) == unicode
460 460 return val
461 461
462 462 @hybrid_property
463 463 def app_settings_value(self):
464 464 v = self._app_settings_value
465 465 type_ = self.app_settings_type
466 466 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
467 467 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
468 468 return converter(v)
469 469
470 470 @app_settings_value.setter
471 471 def app_settings_value(self, val):
472 472 """
473 473 Setter that will always make sure we use unicode in app_settings_value
474 474
475 475 :param val:
476 476 """
477 477 self._app_settings_value = safe_unicode(val)
478 478
479 479 @hybrid_property
480 480 def app_settings_type(self):
481 481 return self._app_settings_type
482 482
483 483 @app_settings_type.setter
484 484 def app_settings_type(self, val):
485 485 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
486 486 if val not in SETTINGS_TYPES:
487 487 raise Exception('type must be one of %s got %s'
488 488 % (SETTINGS_TYPES.keys(), val))
489 489 self._app_settings_type = val
490 490
491 491 def __unicode__(self):
492 492 return u"<%s('%s:%s:%s[%s]')>" % (
493 493 self.__class__.__name__, self.repository.repo_name,
494 494 self.app_settings_name, self.app_settings_value,
495 495 self.app_settings_type
496 496 )
497 497
498 498
499 499 class RepoRhodeCodeUi(Base, BaseModel):
500 500 __tablename__ = 'repo_rhodecode_ui'
501 501 __table_args__ = (
502 502 UniqueConstraint(
503 503 'repository_id', 'ui_section', 'ui_key',
504 504 name='uq_repo_rhodecode_ui_repository_id_section_key'),
505 505 {'extend_existing': True, 'mysql_engine': 'InnoDB',
506 506 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
507 507 )
508 508
509 509 repository_id = Column(
510 510 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
511 511 nullable=False)
512 512 ui_id = Column(
513 513 "ui_id", Integer(), nullable=False, unique=True, default=None,
514 514 primary_key=True)
515 515 ui_section = Column(
516 516 "ui_section", String(255), nullable=True, unique=None, default=None)
517 517 ui_key = Column(
518 518 "ui_key", String(255), nullable=True, unique=None, default=None)
519 519 ui_value = Column(
520 520 "ui_value", String(255), nullable=True, unique=None, default=None)
521 521 ui_active = Column(
522 522 "ui_active", Boolean(), nullable=True, unique=None, default=True)
523 523
524 524 repository = relationship('Repository')
525 525
526 526 def __repr__(self):
527 527 return '<%s[%s:%s]%s=>%s]>' % (
528 528 self.__class__.__name__, self.repository.repo_name,
529 529 self.ui_section, self.ui_key, self.ui_value)
530 530
531 531
532 532 class User(Base, BaseModel):
533 533 __tablename__ = 'users'
534 534 __table_args__ = (
535 535 UniqueConstraint('username'), UniqueConstraint('email'),
536 536 Index('u_username_idx', 'username'),
537 537 Index('u_email_idx', 'email'),
538 538 {'extend_existing': True, 'mysql_engine': 'InnoDB',
539 539 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
540 540 )
541 541 DEFAULT_USER = 'default'
542 542 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
543 543 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
544 544
545 545 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
546 546 username = Column("username", String(255), nullable=True, unique=None, default=None)
547 547 password = Column("password", String(255), nullable=True, unique=None, default=None)
548 548 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
549 549 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
550 550 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
551 551 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
552 552 _email = Column("email", String(255), nullable=True, unique=None, default=None)
553 553 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
554 554 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
555 555
556 556 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
557 557 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
558 558 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
559 559 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
560 560 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
561 561 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
562 562
563 563 user_log = relationship('UserLog')
564 564 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
565 565
566 566 repositories = relationship('Repository')
567 567 repository_groups = relationship('RepoGroup')
568 568 user_groups = relationship('UserGroup')
569 569
570 570 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
571 571 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
572 572
573 573 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
574 574 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
575 575 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
576 576
577 577 group_member = relationship('UserGroupMember', cascade='all')
578 578
579 579 notifications = relationship('UserNotification', cascade='all')
580 580 # notifications assigned to this user
581 581 user_created_notifications = relationship('Notification', cascade='all')
582 582 # comments created by this user
583 583 user_comments = relationship('ChangesetComment', cascade='all')
584 584 # user profile extra info
585 585 user_emails = relationship('UserEmailMap', cascade='all')
586 586 user_ip_map = relationship('UserIpMap', cascade='all')
587 587 user_auth_tokens = relationship('UserApiKeys', cascade='all')
588 588 user_ssh_keys = relationship('UserSshKeys', cascade='all')
589 589
590 590 # gists
591 591 user_gists = relationship('Gist', cascade='all')
592 592 # user pull requests
593 593 user_pull_requests = relationship('PullRequest', cascade='all')
594 594 # external identities
595 595 extenal_identities = relationship(
596 596 'ExternalIdentity',
597 597 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
598 598 cascade='all')
599 599 # review rules
600 600 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
601 601
602 602 def __unicode__(self):
603 603 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
604 604 self.user_id, self.username)
605 605
606 606 @hybrid_property
607 607 def email(self):
608 608 return self._email
609 609
610 610 @email.setter
611 611 def email(self, val):
612 612 self._email = val.lower() if val else None
613 613
614 614 @hybrid_property
615 615 def first_name(self):
616 616 from rhodecode.lib import helpers as h
617 617 if self.name:
618 618 return h.escape(self.name)
619 619 return self.name
620 620
621 621 @hybrid_property
622 622 def last_name(self):
623 623 from rhodecode.lib import helpers as h
624 624 if self.lastname:
625 625 return h.escape(self.lastname)
626 626 return self.lastname
627 627
628 628 @hybrid_property
629 629 def api_key(self):
630 630 """
631 631 Fetch if exist an auth-token with role ALL connected to this user
632 632 """
633 633 user_auth_token = UserApiKeys.query()\
634 634 .filter(UserApiKeys.user_id == self.user_id)\
635 635 .filter(or_(UserApiKeys.expires == -1,
636 636 UserApiKeys.expires >= time.time()))\
637 637 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
638 638 if user_auth_token:
639 639 user_auth_token = user_auth_token.api_key
640 640
641 641 return user_auth_token
642 642
643 643 @api_key.setter
644 644 def api_key(self, val):
645 645 # don't allow to set API key this is deprecated for now
646 646 self._api_key = None
647 647
648 648 @property
649 649 def reviewer_pull_requests(self):
650 650 return PullRequestReviewers.query() \
651 651 .options(joinedload(PullRequestReviewers.pull_request)) \
652 652 .filter(PullRequestReviewers.user_id == self.user_id) \
653 653 .all()
654 654
655 655 @property
656 656 def firstname(self):
657 657 # alias for future
658 658 return self.name
659 659
660 660 @property
661 661 def emails(self):
662 662 other = UserEmailMap.query()\
663 663 .filter(UserEmailMap.user == self) \
664 664 .order_by(UserEmailMap.email_id.asc()) \
665 665 .all()
666 666 return [self.email] + [x.email for x in other]
667 667
668 668 @property
669 669 def auth_tokens(self):
670 670 auth_tokens = self.get_auth_tokens()
671 671 return [x.api_key for x in auth_tokens]
672 672
673 673 def get_auth_tokens(self):
674 674 return UserApiKeys.query()\
675 675 .filter(UserApiKeys.user == self)\
676 676 .order_by(UserApiKeys.user_api_key_id.asc())\
677 677 .all()
678 678
679 679 @property
680 680 def feed_token(self):
681 681 return self.get_feed_token()
682 682
683 683 def get_feed_token(self):
684 684 feed_tokens = UserApiKeys.query()\
685 685 .filter(UserApiKeys.user == self)\
686 686 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)\
687 687 .all()
688 688 if feed_tokens:
689 689 return feed_tokens[0].api_key
690 690 return 'NO_FEED_TOKEN_AVAILABLE'
691 691
692 692 @classmethod
693 693 def get(cls, user_id, cache=False):
694 694 if not user_id:
695 695 return
696 696
697 697 user = cls.query()
698 698 if cache:
699 699 user = user.options(
700 700 FromCache("sql_cache_short", "get_users_%s" % user_id))
701 701 return user.get(user_id)
702 702
703 703 @classmethod
704 704 def extra_valid_auth_tokens(cls, user, role=None):
705 705 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
706 706 .filter(or_(UserApiKeys.expires == -1,
707 707 UserApiKeys.expires >= time.time()))
708 708 if role:
709 709 tokens = tokens.filter(or_(UserApiKeys.role == role,
710 710 UserApiKeys.role == UserApiKeys.ROLE_ALL))
711 711 return tokens.all()
712 712
713 713 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
714 714 from rhodecode.lib import auth
715 715
716 716 log.debug('Trying to authenticate user: %s via auth-token, '
717 717 'and roles: %s', self, roles)
718 718
719 719 if not auth_token:
720 720 return False
721 721
722 722 crypto_backend = auth.crypto_backend()
723 723
724 724 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
725 725 tokens_q = UserApiKeys.query()\
726 726 .filter(UserApiKeys.user_id == self.user_id)\
727 727 .filter(or_(UserApiKeys.expires == -1,
728 728 UserApiKeys.expires >= time.time()))
729 729
730 730 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
731 731
732 732 plain_tokens = []
733 733 hash_tokens = []
734 734
735 735 for token in tokens_q.all():
736 736 # verify scope first
737 737 if token.repo_id:
738 738 # token has a scope, we need to verify it
739 739 if scope_repo_id != token.repo_id:
740 740 log.debug(
741 741 'Scope mismatch: token has a set repo scope: %s, '
742 742 'and calling scope is:%s, skipping further checks',
743 743 token.repo, scope_repo_id)
744 744 # token has a scope, and it doesn't match, skip token
745 745 continue
746 746
747 747 if token.api_key.startswith(crypto_backend.ENC_PREF):
748 748 hash_tokens.append(token.api_key)
749 749 else:
750 750 plain_tokens.append(token.api_key)
751 751
752 752 is_plain_match = auth_token in plain_tokens
753 753 if is_plain_match:
754 754 return True
755 755
756 756 for hashed in hash_tokens:
757 757 # TODO(marcink): this is expensive to calculate, but most secure
758 758 match = crypto_backend.hash_check(auth_token, hashed)
759 759 if match:
760 760 return True
761 761
762 762 return False
763 763
764 764 @property
765 765 def ip_addresses(self):
766 766 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
767 767 return [x.ip_addr for x in ret]
768 768
769 769 @property
770 770 def username_and_name(self):
771 771 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
772 772
773 773 @property
774 774 def username_or_name_or_email(self):
775 775 full_name = self.full_name if self.full_name is not ' ' else None
776 776 return self.username or full_name or self.email
777 777
778 778 @property
779 779 def full_name(self):
780 780 return '%s %s' % (self.first_name, self.last_name)
781 781
782 782 @property
783 783 def full_name_or_username(self):
784 784 return ('%s %s' % (self.first_name, self.last_name)
785 785 if (self.first_name and self.last_name) else self.username)
786 786
787 787 @property
788 788 def full_contact(self):
789 789 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
790 790
791 791 @property
792 792 def short_contact(self):
793 793 return '%s %s' % (self.first_name, self.last_name)
794 794
795 795 @property
796 796 def is_admin(self):
797 797 return self.admin
798 798
799 799 def AuthUser(self, **kwargs):
800 800 """
801 801 Returns instance of AuthUser for this user
802 802 """
803 803 from rhodecode.lib.auth import AuthUser
804 804 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
805 805
806 806 @hybrid_property
807 807 def user_data(self):
808 808 if not self._user_data:
809 809 return {}
810 810
811 811 try:
812 812 return json.loads(self._user_data)
813 813 except TypeError:
814 814 return {}
815 815
816 816 @user_data.setter
817 817 def user_data(self, val):
818 818 if not isinstance(val, dict):
819 819 raise Exception('user_data must be dict, got %s' % type(val))
820 820 try:
821 821 self._user_data = json.dumps(val)
822 822 except Exception:
823 823 log.error(traceback.format_exc())
824 824
825 825 @classmethod
826 826 def get_by_username(cls, username, case_insensitive=False,
827 827 cache=False, identity_cache=False):
828 828 session = Session()
829 829
830 830 if case_insensitive:
831 831 q = cls.query().filter(
832 832 func.lower(cls.username) == func.lower(username))
833 833 else:
834 834 q = cls.query().filter(cls.username == username)
835 835
836 836 if cache:
837 837 if identity_cache:
838 838 val = cls.identity_cache(session, 'username', username)
839 839 if val:
840 840 return val
841 841 else:
842 842 cache_key = "get_user_by_name_%s" % _hash_key(username)
843 843 q = q.options(
844 844 FromCache("sql_cache_short", cache_key))
845 845
846 846 return q.scalar()
847 847
848 848 @classmethod
849 849 def get_by_auth_token(cls, auth_token, cache=False):
850 850 q = UserApiKeys.query()\
851 851 .filter(UserApiKeys.api_key == auth_token)\
852 852 .filter(or_(UserApiKeys.expires == -1,
853 853 UserApiKeys.expires >= time.time()))
854 854 if cache:
855 855 q = q.options(
856 856 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
857 857
858 858 match = q.first()
859 859 if match:
860 860 return match.user
861 861
862 862 @classmethod
863 863 def get_by_email(cls, email, case_insensitive=False, cache=False):
864 864
865 865 if case_insensitive:
866 866 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
867 867
868 868 else:
869 869 q = cls.query().filter(cls.email == email)
870 870
871 871 email_key = _hash_key(email)
872 872 if cache:
873 873 q = q.options(
874 874 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
875 875
876 876 ret = q.scalar()
877 877 if ret is None:
878 878 q = UserEmailMap.query()
879 879 # try fetching in alternate email map
880 880 if case_insensitive:
881 881 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
882 882 else:
883 883 q = q.filter(UserEmailMap.email == email)
884 884 q = q.options(joinedload(UserEmailMap.user))
885 885 if cache:
886 886 q = q.options(
887 887 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
888 888 ret = getattr(q.scalar(), 'user', None)
889 889
890 890 return ret
891 891
892 892 @classmethod
893 893 def get_from_cs_author(cls, author):
894 894 """
895 895 Tries to get User objects out of commit author string
896 896
897 897 :param author:
898 898 """
899 899 from rhodecode.lib.helpers import email, author_name
900 900 # Valid email in the attribute passed, see if they're in the system
901 901 _email = email(author)
902 902 if _email:
903 903 user = cls.get_by_email(_email, case_insensitive=True)
904 904 if user:
905 905 return user
906 906 # Maybe we can match by username?
907 907 _author = author_name(author)
908 908 user = cls.get_by_username(_author, case_insensitive=True)
909 909 if user:
910 910 return user
911 911
912 912 def update_userdata(self, **kwargs):
913 913 usr = self
914 914 old = usr.user_data
915 915 old.update(**kwargs)
916 916 usr.user_data = old
917 917 Session().add(usr)
918 918 log.debug('updated userdata with ', kwargs)
919 919
920 920 def update_lastlogin(self):
921 921 """Update user lastlogin"""
922 922 self.last_login = datetime.datetime.now()
923 923 Session().add(self)
924 924 log.debug('updated user %s lastlogin', self.username)
925 925
926 926 def update_lastactivity(self):
927 927 """Update user lastactivity"""
928 928 self.last_activity = datetime.datetime.now()
929 929 Session().add(self)
930 930 log.debug('updated user `%s` last activity', self.username)
931 931
932 932 def update_password(self, new_password):
933 933 from rhodecode.lib.auth import get_crypt_password
934 934
935 935 self.password = get_crypt_password(new_password)
936 936 Session().add(self)
937 937
938 938 @classmethod
939 939 def get_first_super_admin(cls):
940 940 user = User.query().filter(User.admin == true()).first()
941 941 if user is None:
942 942 raise Exception('FATAL: Missing administrative account!')
943 943 return user
944 944
945 945 @classmethod
946 946 def get_all_super_admins(cls):
947 947 """
948 948 Returns all admin accounts sorted by username
949 949 """
950 950 return User.query().filter(User.admin == true())\
951 951 .order_by(User.username.asc()).all()
952 952
953 953 @classmethod
954 954 def get_default_user(cls, cache=False, refresh=False):
955 955 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
956 956 if user is None:
957 957 raise Exception('FATAL: Missing default account!')
958 958 if refresh:
959 959 # The default user might be based on outdated state which
960 960 # has been loaded from the cache.
961 961 # A call to refresh() ensures that the
962 962 # latest state from the database is used.
963 963 Session().refresh(user)
964 964 return user
965 965
966 966 def _get_default_perms(self, user, suffix=''):
967 967 from rhodecode.model.permission import PermissionModel
968 968 return PermissionModel().get_default_perms(user.user_perms, suffix)
969 969
970 970 def get_default_perms(self, suffix=''):
971 971 return self._get_default_perms(self, suffix)
972 972
973 973 def get_api_data(self, include_secrets=False, details='full'):
974 974 """
975 975 Common function for generating user related data for API
976 976
977 977 :param include_secrets: By default secrets in the API data will be replaced
978 978 by a placeholder value to prevent exposing this data by accident. In case
979 979 this data shall be exposed, set this flag to ``True``.
980 980
981 981 :param details: details can be 'basic|full' basic gives only a subset of
982 982 the available user information that includes user_id, name and emails.
983 983 """
984 984 user = self
985 985 user_data = self.user_data
986 986 data = {
987 987 'user_id': user.user_id,
988 988 'username': user.username,
989 989 'firstname': user.name,
990 990 'lastname': user.lastname,
991 991 'email': user.email,
992 992 'emails': user.emails,
993 993 }
994 994 if details == 'basic':
995 995 return data
996 996
997 997 auth_token_length = 40
998 998 auth_token_replacement = '*' * auth_token_length
999 999
1000 1000 extras = {
1001 1001 'auth_tokens': [auth_token_replacement],
1002 1002 'active': user.active,
1003 1003 'admin': user.admin,
1004 1004 'extern_type': user.extern_type,
1005 1005 'extern_name': user.extern_name,
1006 1006 'last_login': user.last_login,
1007 1007 'last_activity': user.last_activity,
1008 1008 'ip_addresses': user.ip_addresses,
1009 1009 'language': user_data.get('language')
1010 1010 }
1011 1011 data.update(extras)
1012 1012
1013 1013 if include_secrets:
1014 1014 data['auth_tokens'] = user.auth_tokens
1015 1015 return data
1016 1016
1017 1017 def __json__(self):
1018 1018 data = {
1019 1019 'full_name': self.full_name,
1020 1020 'full_name_or_username': self.full_name_or_username,
1021 1021 'short_contact': self.short_contact,
1022 1022 'full_contact': self.full_contact,
1023 1023 }
1024 1024 data.update(self.get_api_data())
1025 1025 return data
1026 1026
1027 1027
1028 1028 class UserApiKeys(Base, BaseModel):
1029 1029 __tablename__ = 'user_api_keys'
1030 1030 __table_args__ = (
1031 1031 Index('uak_api_key_idx', 'api_key', unique=True),
1032 1032 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1033 1033 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1034 1034 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1035 1035 )
1036 1036 __mapper_args__ = {}
1037 1037
1038 1038 # ApiKey role
1039 1039 ROLE_ALL = 'token_role_all'
1040 1040 ROLE_HTTP = 'token_role_http'
1041 1041 ROLE_VCS = 'token_role_vcs'
1042 1042 ROLE_API = 'token_role_api'
1043 1043 ROLE_FEED = 'token_role_feed'
1044 1044 ROLE_PASSWORD_RESET = 'token_password_reset'
1045 1045
1046 1046 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
1047 1047
1048 1048 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1049 1049 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1050 1050 api_key = Column("api_key", String(255), nullable=False, unique=True)
1051 1051 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1052 1052 expires = Column('expires', Float(53), nullable=False)
1053 1053 role = Column('role', String(255), nullable=True)
1054 1054 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1055 1055
1056 1056 # scope columns
1057 1057 repo_id = Column(
1058 1058 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1059 1059 nullable=True, unique=None, default=None)
1060 1060 repo = relationship('Repository', lazy='joined')
1061 1061
1062 1062 repo_group_id = Column(
1063 1063 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1064 1064 nullable=True, unique=None, default=None)
1065 1065 repo_group = relationship('RepoGroup', lazy='joined')
1066 1066
1067 1067 user = relationship('User', lazy='joined')
1068 1068
1069 1069 def __unicode__(self):
1070 1070 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1071 1071
1072 1072 def __json__(self):
1073 1073 data = {
1074 1074 'auth_token': self.api_key,
1075 1075 'role': self.role,
1076 1076 'scope': self.scope_humanized,
1077 1077 'expired': self.expired
1078 1078 }
1079 1079 return data
1080 1080
1081 1081 def get_api_data(self, include_secrets=False):
1082 1082 data = self.__json__()
1083 1083 if include_secrets:
1084 1084 return data
1085 1085 else:
1086 1086 data['auth_token'] = self.token_obfuscated
1087 1087 return data
1088 1088
1089 1089 @hybrid_property
1090 1090 def description_safe(self):
1091 1091 from rhodecode.lib import helpers as h
1092 1092 return h.escape(self.description)
1093 1093
1094 1094 @property
1095 1095 def expired(self):
1096 1096 if self.expires == -1:
1097 1097 return False
1098 1098 return time.time() > self.expires
1099 1099
1100 1100 @classmethod
1101 1101 def _get_role_name(cls, role):
1102 1102 return {
1103 1103 cls.ROLE_ALL: _('all'),
1104 1104 cls.ROLE_HTTP: _('http/web interface'),
1105 1105 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1106 1106 cls.ROLE_API: _('api calls'),
1107 1107 cls.ROLE_FEED: _('feed access'),
1108 1108 }.get(role, role)
1109 1109
1110 1110 @property
1111 1111 def role_humanized(self):
1112 1112 return self._get_role_name(self.role)
1113 1113
1114 1114 def _get_scope(self):
1115 1115 if self.repo:
1116 1116 return repr(self.repo)
1117 1117 if self.repo_group:
1118 1118 return repr(self.repo_group) + ' (recursive)'
1119 1119 return 'global'
1120 1120
1121 1121 @property
1122 1122 def scope_humanized(self):
1123 1123 return self._get_scope()
1124 1124
1125 1125 @property
1126 1126 def token_obfuscated(self):
1127 1127 if self.api_key:
1128 1128 return self.api_key[:4] + "****"
1129 1129
1130 1130
1131 1131 class UserEmailMap(Base, BaseModel):
1132 1132 __tablename__ = 'user_email_map'
1133 1133 __table_args__ = (
1134 1134 Index('uem_email_idx', 'email'),
1135 1135 UniqueConstraint('email'),
1136 1136 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1137 1137 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1138 1138 )
1139 1139 __mapper_args__ = {}
1140 1140
1141 1141 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1142 1142 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1143 1143 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1144 1144 user = relationship('User', lazy='joined')
1145 1145
1146 1146 @validates('_email')
1147 1147 def validate_email(self, key, email):
1148 1148 # check if this email is not main one
1149 1149 main_email = Session().query(User).filter(User.email == email).scalar()
1150 1150 if main_email is not None:
1151 1151 raise AttributeError('email %s is present is user table' % email)
1152 1152 return email
1153 1153
1154 1154 @hybrid_property
1155 1155 def email(self):
1156 1156 return self._email
1157 1157
1158 1158 @email.setter
1159 1159 def email(self, val):
1160 1160 self._email = val.lower() if val else None
1161 1161
1162 1162
1163 1163 class UserIpMap(Base, BaseModel):
1164 1164 __tablename__ = 'user_ip_map'
1165 1165 __table_args__ = (
1166 1166 UniqueConstraint('user_id', 'ip_addr'),
1167 1167 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1168 1168 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1169 1169 )
1170 1170 __mapper_args__ = {}
1171 1171
1172 1172 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1173 1173 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1174 1174 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1175 1175 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1176 1176 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1177 1177 user = relationship('User', lazy='joined')
1178 1178
1179 1179 @hybrid_property
1180 1180 def description_safe(self):
1181 1181 from rhodecode.lib import helpers as h
1182 1182 return h.escape(self.description)
1183 1183
1184 1184 @classmethod
1185 1185 def _get_ip_range(cls, ip_addr):
1186 1186 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1187 1187 return [str(net.network_address), str(net.broadcast_address)]
1188 1188
1189 1189 def __json__(self):
1190 1190 return {
1191 1191 'ip_addr': self.ip_addr,
1192 1192 'ip_range': self._get_ip_range(self.ip_addr),
1193 1193 }
1194 1194
1195 1195 def __unicode__(self):
1196 1196 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1197 1197 self.user_id, self.ip_addr)
1198 1198
1199 1199
1200 1200 class UserSshKeys(Base, BaseModel):
1201 1201 __tablename__ = 'user_ssh_keys'
1202 1202 __table_args__ = (
1203 1203 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1204 1204
1205 1205 UniqueConstraint('ssh_key_fingerprint'),
1206 1206
1207 1207 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1208 1208 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1209 1209 )
1210 1210 __mapper_args__ = {}
1211 1211
1212 1212 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1213 1213 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1214 1214 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1215 1215
1216 1216 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1217 1217
1218 1218 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1219 1219 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1220 1220 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1221 1221
1222 1222 user = relationship('User', lazy='joined')
1223 1223
1224 1224 def __json__(self):
1225 1225 data = {
1226 1226 'ssh_fingerprint': self.ssh_key_fingerprint,
1227 1227 'description': self.description,
1228 1228 'created_on': self.created_on
1229 1229 }
1230 1230 return data
1231 1231
1232 1232 def get_api_data(self):
1233 1233 data = self.__json__()
1234 1234 return data
1235 1235
1236 1236
1237 1237 class UserLog(Base, BaseModel):
1238 1238 __tablename__ = 'user_logs'
1239 1239 __table_args__ = (
1240 1240 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1241 1241 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1242 1242 )
1243 1243 VERSION_1 = 'v1'
1244 1244 VERSION_2 = 'v2'
1245 1245 VERSIONS = [VERSION_1, VERSION_2]
1246 1246
1247 1247 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1248 1248 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1249 1249 username = Column("username", String(255), nullable=True, unique=None, default=None)
1250 1250 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1251 1251 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1252 1252 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1253 1253 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1254 1254 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1255 1255
1256 1256 version = Column("version", String(255), nullable=True, default=VERSION_1)
1257 1257 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1258 1258 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1259 1259
1260 1260 def __unicode__(self):
1261 1261 return u"<%s('id:%s:%s')>" % (
1262 1262 self.__class__.__name__, self.repository_name, self.action)
1263 1263
1264 1264 def __json__(self):
1265 1265 return {
1266 1266 'user_id': self.user_id,
1267 1267 'username': self.username,
1268 1268 'repository_id': self.repository_id,
1269 1269 'repository_name': self.repository_name,
1270 1270 'user_ip': self.user_ip,
1271 1271 'action_date': self.action_date,
1272 1272 'action': self.action,
1273 1273 }
1274 1274
1275 1275 @hybrid_property
1276 1276 def entry_id(self):
1277 1277 return self.user_log_id
1278 1278
1279 1279 @property
1280 1280 def action_as_day(self):
1281 1281 return datetime.date(*self.action_date.timetuple()[:3])
1282 1282
1283 1283 user = relationship('User')
1284 1284 repository = relationship('Repository', cascade='')
1285 1285
1286 1286
1287 1287 class UserGroup(Base, BaseModel):
1288 1288 __tablename__ = 'users_groups'
1289 1289 __table_args__ = (
1290 1290 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1291 1291 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1292 1292 )
1293 1293
1294 1294 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1295 1295 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1296 1296 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1297 1297 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1298 1298 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1299 1299 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1300 1300 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1301 1301 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1302 1302
1303 1303 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1304 1304 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1305 1305 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1306 1306 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1307 1307 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1308 1308 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1309 1309
1310 1310 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1311 1311 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1312 1312
1313 1313 @classmethod
1314 1314 def _load_group_data(cls, column):
1315 1315 if not column:
1316 1316 return {}
1317 1317
1318 1318 try:
1319 1319 return json.loads(column) or {}
1320 1320 except TypeError:
1321 1321 return {}
1322 1322
1323 1323 @hybrid_property
1324 1324 def description_safe(self):
1325 1325 from rhodecode.lib import helpers as h
1326 1326 return h.escape(self.description)
1327 1327
1328 1328 @hybrid_property
1329 1329 def group_data(self):
1330 1330 return self._load_group_data(self._group_data)
1331 1331
1332 1332 @group_data.expression
1333 1333 def group_data(self, **kwargs):
1334 1334 return self._group_data
1335 1335
1336 1336 @group_data.setter
1337 1337 def group_data(self, val):
1338 1338 try:
1339 1339 self._group_data = json.dumps(val)
1340 1340 except Exception:
1341 1341 log.error(traceback.format_exc())
1342 1342
1343 1343 def __unicode__(self):
1344 1344 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1345 1345 self.users_group_id,
1346 1346 self.users_group_name)
1347 1347
1348 1348 @classmethod
1349 1349 def get_by_group_name(cls, group_name, cache=False,
1350 1350 case_insensitive=False):
1351 1351 if case_insensitive:
1352 1352 q = cls.query().filter(func.lower(cls.users_group_name) ==
1353 1353 func.lower(group_name))
1354 1354
1355 1355 else:
1356 1356 q = cls.query().filter(cls.users_group_name == group_name)
1357 1357 if cache:
1358 1358 q = q.options(
1359 1359 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1360 1360 return q.scalar()
1361 1361
1362 1362 @classmethod
1363 1363 def get(cls, user_group_id, cache=False):
1364 1364 if not user_group_id:
1365 1365 return
1366 1366
1367 1367 user_group = cls.query()
1368 1368 if cache:
1369 1369 user_group = user_group.options(
1370 1370 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1371 1371 return user_group.get(user_group_id)
1372 1372
1373 1373 def permissions(self, with_admins=True, with_owner=True):
1374 1374 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1375 1375 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1376 1376 joinedload(UserUserGroupToPerm.user),
1377 1377 joinedload(UserUserGroupToPerm.permission),)
1378 1378
1379 1379 # get owners and admins and permissions. We do a trick of re-writing
1380 1380 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1381 1381 # has a global reference and changing one object propagates to all
1382 1382 # others. This means if admin is also an owner admin_row that change
1383 1383 # would propagate to both objects
1384 1384 perm_rows = []
1385 1385 for _usr in q.all():
1386 1386 usr = AttributeDict(_usr.user.get_dict())
1387 1387 usr.permission = _usr.permission.permission_name
1388 1388 perm_rows.append(usr)
1389 1389
1390 1390 # filter the perm rows by 'default' first and then sort them by
1391 1391 # admin,write,read,none permissions sorted again alphabetically in
1392 1392 # each group
1393 1393 perm_rows = sorted(perm_rows, key=display_user_sort)
1394 1394
1395 1395 _admin_perm = 'usergroup.admin'
1396 1396 owner_row = []
1397 1397 if with_owner:
1398 1398 usr = AttributeDict(self.user.get_dict())
1399 1399 usr.owner_row = True
1400 1400 usr.permission = _admin_perm
1401 1401 owner_row.append(usr)
1402 1402
1403 1403 super_admin_rows = []
1404 1404 if with_admins:
1405 1405 for usr in User.get_all_super_admins():
1406 1406 # if this admin is also owner, don't double the record
1407 1407 if usr.user_id == owner_row[0].user_id:
1408 1408 owner_row[0].admin_row = True
1409 1409 else:
1410 1410 usr = AttributeDict(usr.get_dict())
1411 1411 usr.admin_row = True
1412 1412 usr.permission = _admin_perm
1413 1413 super_admin_rows.append(usr)
1414 1414
1415 1415 return super_admin_rows + owner_row + perm_rows
1416 1416
1417 1417 def permission_user_groups(self):
1418 1418 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1419 1419 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1420 1420 joinedload(UserGroupUserGroupToPerm.target_user_group),
1421 1421 joinedload(UserGroupUserGroupToPerm.permission),)
1422 1422
1423 1423 perm_rows = []
1424 1424 for _user_group in q.all():
1425 1425 usr = AttributeDict(_user_group.user_group.get_dict())
1426 1426 usr.permission = _user_group.permission.permission_name
1427 1427 perm_rows.append(usr)
1428 1428
1429 1429 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1430 1430 return perm_rows
1431 1431
1432 1432 def _get_default_perms(self, user_group, suffix=''):
1433 1433 from rhodecode.model.permission import PermissionModel
1434 1434 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1435 1435
1436 1436 def get_default_perms(self, suffix=''):
1437 1437 return self._get_default_perms(self, suffix)
1438 1438
1439 1439 def get_api_data(self, with_group_members=True, include_secrets=False):
1440 1440 """
1441 1441 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1442 1442 basically forwarded.
1443 1443
1444 1444 """
1445 1445 user_group = self
1446 1446 data = {
1447 1447 'users_group_id': user_group.users_group_id,
1448 1448 'group_name': user_group.users_group_name,
1449 1449 'group_description': user_group.user_group_description,
1450 1450 'active': user_group.users_group_active,
1451 1451 'owner': user_group.user.username,
1452 1452 'owner_email': user_group.user.email,
1453 1453 }
1454 1454
1455 1455 if with_group_members:
1456 1456 users = []
1457 1457 for user in user_group.members:
1458 1458 user = user.user
1459 1459 users.append(user.get_api_data(include_secrets=include_secrets))
1460 1460 data['users'] = users
1461 1461
1462 1462 return data
1463 1463
1464 1464
1465 1465 class UserGroupMember(Base, BaseModel):
1466 1466 __tablename__ = 'users_groups_members'
1467 1467 __table_args__ = (
1468 1468 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1469 1469 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1470 1470 )
1471 1471
1472 1472 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1473 1473 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1474 1474 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1475 1475
1476 1476 user = relationship('User', lazy='joined')
1477 1477 users_group = relationship('UserGroup')
1478 1478
1479 1479 def __init__(self, gr_id='', u_id=''):
1480 1480 self.users_group_id = gr_id
1481 1481 self.user_id = u_id
1482 1482
1483 1483
1484 1484 class RepositoryField(Base, BaseModel):
1485 1485 __tablename__ = 'repositories_fields'
1486 1486 __table_args__ = (
1487 1487 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1488 1488 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1489 1489 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1490 1490 )
1491 1491 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1492 1492
1493 1493 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1494 1494 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1495 1495 field_key = Column("field_key", String(250))
1496 1496 field_label = Column("field_label", String(1024), nullable=False)
1497 1497 field_value = Column("field_value", String(10000), nullable=False)
1498 1498 field_desc = Column("field_desc", String(1024), nullable=False)
1499 1499 field_type = Column("field_type", String(255), nullable=False, unique=None)
1500 1500 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1501 1501
1502 1502 repository = relationship('Repository')
1503 1503
1504 1504 @property
1505 1505 def field_key_prefixed(self):
1506 1506 return 'ex_%s' % self.field_key
1507 1507
1508 1508 @classmethod
1509 1509 def un_prefix_key(cls, key):
1510 1510 if key.startswith(cls.PREFIX):
1511 1511 return key[len(cls.PREFIX):]
1512 1512 return key
1513 1513
1514 1514 @classmethod
1515 1515 def get_by_key_name(cls, key, repo):
1516 1516 row = cls.query()\
1517 1517 .filter(cls.repository == repo)\
1518 1518 .filter(cls.field_key == key).scalar()
1519 1519 return row
1520 1520
1521 1521
1522 1522 class Repository(Base, BaseModel):
1523 1523 __tablename__ = 'repositories'
1524 1524 __table_args__ = (
1525 1525 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1526 1526 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1527 1527 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1528 1528 )
1529 1529 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1530 1530 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1531 1531
1532 1532 STATE_CREATED = 'repo_state_created'
1533 1533 STATE_PENDING = 'repo_state_pending'
1534 1534 STATE_ERROR = 'repo_state_error'
1535 1535
1536 1536 LOCK_AUTOMATIC = 'lock_auto'
1537 1537 LOCK_API = 'lock_api'
1538 1538 LOCK_WEB = 'lock_web'
1539 1539 LOCK_PULL = 'lock_pull'
1540 1540
1541 1541 NAME_SEP = URL_SEP
1542 1542
1543 1543 repo_id = Column(
1544 1544 "repo_id", Integer(), nullable=False, unique=True, default=None,
1545 1545 primary_key=True)
1546 1546 _repo_name = Column(
1547 1547 "repo_name", Text(), nullable=False, default=None)
1548 1548 _repo_name_hash = Column(
1549 1549 "repo_name_hash", String(255), nullable=False, unique=True)
1550 1550 repo_state = Column("repo_state", String(255), nullable=True)
1551 1551
1552 1552 clone_uri = Column(
1553 1553 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1554 1554 default=None)
1555 1555 repo_type = Column(
1556 1556 "repo_type", String(255), nullable=False, unique=False, default=None)
1557 1557 user_id = Column(
1558 1558 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1559 1559 unique=False, default=None)
1560 1560 private = Column(
1561 1561 "private", Boolean(), nullable=True, unique=None, default=None)
1562 1562 enable_statistics = Column(
1563 1563 "statistics", Boolean(), nullable=True, unique=None, default=True)
1564 1564 enable_downloads = Column(
1565 1565 "downloads", Boolean(), nullable=True, unique=None, default=True)
1566 1566 description = Column(
1567 1567 "description", String(10000), nullable=True, unique=None, default=None)
1568 1568 created_on = Column(
1569 1569 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1570 1570 default=datetime.datetime.now)
1571 1571 updated_on = Column(
1572 1572 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1573 1573 default=datetime.datetime.now)
1574 1574 _landing_revision = Column(
1575 1575 "landing_revision", String(255), nullable=False, unique=False,
1576 1576 default=None)
1577 1577 enable_locking = Column(
1578 1578 "enable_locking", Boolean(), nullable=False, unique=None,
1579 1579 default=False)
1580 1580 _locked = Column(
1581 1581 "locked", String(255), nullable=True, unique=False, default=None)
1582 1582 _changeset_cache = Column(
1583 1583 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1584 1584
1585 1585 fork_id = Column(
1586 1586 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1587 1587 nullable=True, unique=False, default=None)
1588 1588 group_id = Column(
1589 1589 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1590 1590 unique=False, default=None)
1591 1591
1592 1592 user = relationship('User', lazy='joined')
1593 1593 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1594 1594 group = relationship('RepoGroup', lazy='joined')
1595 1595 repo_to_perm = relationship(
1596 1596 'UserRepoToPerm', cascade='all',
1597 1597 order_by='UserRepoToPerm.repo_to_perm_id')
1598 1598 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1599 1599 stats = relationship('Statistics', cascade='all', uselist=False)
1600 1600
1601 1601 followers = relationship(
1602 1602 'UserFollowing',
1603 1603 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1604 1604 cascade='all')
1605 1605 extra_fields = relationship(
1606 1606 'RepositoryField', cascade="all, delete, delete-orphan")
1607 1607 logs = relationship('UserLog')
1608 1608 comments = relationship(
1609 1609 'ChangesetComment', cascade="all, delete, delete-orphan")
1610 1610 pull_requests_source = relationship(
1611 1611 'PullRequest',
1612 1612 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1613 1613 cascade="all, delete, delete-orphan")
1614 1614 pull_requests_target = relationship(
1615 1615 'PullRequest',
1616 1616 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1617 1617 cascade="all, delete, delete-orphan")
1618 1618 ui = relationship('RepoRhodeCodeUi', cascade="all")
1619 1619 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1620 1620 integrations = relationship('Integration',
1621 1621 cascade="all, delete, delete-orphan")
1622 1622
1623 1623 def __unicode__(self):
1624 1624 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1625 1625 safe_unicode(self.repo_name))
1626 1626
1627 1627 @hybrid_property
1628 1628 def description_safe(self):
1629 1629 from rhodecode.lib import helpers as h
1630 1630 return h.escape(self.description)
1631 1631
1632 1632 @hybrid_property
1633 1633 def landing_rev(self):
1634 1634 # always should return [rev_type, rev]
1635 1635 if self._landing_revision:
1636 1636 _rev_info = self._landing_revision.split(':')
1637 1637 if len(_rev_info) < 2:
1638 1638 _rev_info.insert(0, 'rev')
1639 1639 return [_rev_info[0], _rev_info[1]]
1640 1640 return [None, None]
1641 1641
1642 1642 @landing_rev.setter
1643 1643 def landing_rev(self, val):
1644 1644 if ':' not in val:
1645 1645 raise ValueError('value must be delimited with `:` and consist '
1646 1646 'of <rev_type>:<rev>, got %s instead' % val)
1647 1647 self._landing_revision = val
1648 1648
1649 1649 @hybrid_property
1650 1650 def locked(self):
1651 1651 if self._locked:
1652 1652 user_id, timelocked, reason = self._locked.split(':')
1653 1653 lock_values = int(user_id), timelocked, reason
1654 1654 else:
1655 1655 lock_values = [None, None, None]
1656 1656 return lock_values
1657 1657
1658 1658 @locked.setter
1659 1659 def locked(self, val):
1660 1660 if val and isinstance(val, (list, tuple)):
1661 1661 self._locked = ':'.join(map(str, val))
1662 1662 else:
1663 1663 self._locked = None
1664 1664
1665 1665 @hybrid_property
1666 1666 def changeset_cache(self):
1667 1667 from rhodecode.lib.vcs.backends.base import EmptyCommit
1668 1668 dummy = EmptyCommit().__json__()
1669 1669 if not self._changeset_cache:
1670 1670 return dummy
1671 1671 try:
1672 1672 return json.loads(self._changeset_cache)
1673 1673 except TypeError:
1674 1674 return dummy
1675 1675 except Exception:
1676 1676 log.error(traceback.format_exc())
1677 1677 return dummy
1678 1678
1679 1679 @changeset_cache.setter
1680 1680 def changeset_cache(self, val):
1681 1681 try:
1682 1682 self._changeset_cache = json.dumps(val)
1683 1683 except Exception:
1684 1684 log.error(traceback.format_exc())
1685 1685
1686 1686 @hybrid_property
1687 1687 def repo_name(self):
1688 1688 return self._repo_name
1689 1689
1690 1690 @repo_name.setter
1691 1691 def repo_name(self, value):
1692 1692 self._repo_name = value
1693 1693 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1694 1694
1695 1695 @classmethod
1696 1696 def normalize_repo_name(cls, repo_name):
1697 1697 """
1698 1698 Normalizes os specific repo_name to the format internally stored inside
1699 1699 database using URL_SEP
1700 1700
1701 1701 :param cls:
1702 1702 :param repo_name:
1703 1703 """
1704 1704 return cls.NAME_SEP.join(repo_name.split(os.sep))
1705 1705
1706 1706 @classmethod
1707 1707 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1708 1708 session = Session()
1709 1709 q = session.query(cls).filter(cls.repo_name == repo_name)
1710 1710
1711 1711 if cache:
1712 1712 if identity_cache:
1713 1713 val = cls.identity_cache(session, 'repo_name', repo_name)
1714 1714 if val:
1715 1715 return val
1716 1716 else:
1717 1717 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1718 1718 q = q.options(
1719 1719 FromCache("sql_cache_short", cache_key))
1720 1720
1721 1721 return q.scalar()
1722 1722
1723 1723 @classmethod
1724 1724 def get_by_full_path(cls, repo_full_path):
1725 1725 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1726 1726 repo_name = cls.normalize_repo_name(repo_name)
1727 1727 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1728 1728
1729 1729 @classmethod
1730 1730 def get_repo_forks(cls, repo_id):
1731 1731 return cls.query().filter(Repository.fork_id == repo_id)
1732 1732
1733 1733 @classmethod
1734 1734 def base_path(cls):
1735 1735 """
1736 1736 Returns base path when all repos are stored
1737 1737
1738 1738 :param cls:
1739 1739 """
1740 1740 q = Session().query(RhodeCodeUi)\
1741 1741 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1742 1742 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1743 1743 return q.one().ui_value
1744 1744
1745 1745 @classmethod
1746 1746 def is_valid(cls, repo_name):
1747 1747 """
1748 1748 returns True if given repo name is a valid filesystem repository
1749 1749
1750 1750 :param cls:
1751 1751 :param repo_name:
1752 1752 """
1753 1753 from rhodecode.lib.utils import is_valid_repo
1754 1754
1755 1755 return is_valid_repo(repo_name, cls.base_path())
1756 1756
1757 1757 @classmethod
1758 1758 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1759 1759 case_insensitive=True):
1760 1760 q = Repository.query()
1761 1761
1762 1762 if not isinstance(user_id, Optional):
1763 1763 q = q.filter(Repository.user_id == user_id)
1764 1764
1765 1765 if not isinstance(group_id, Optional):
1766 1766 q = q.filter(Repository.group_id == group_id)
1767 1767
1768 1768 if case_insensitive:
1769 1769 q = q.order_by(func.lower(Repository.repo_name))
1770 1770 else:
1771 1771 q = q.order_by(Repository.repo_name)
1772 1772 return q.all()
1773 1773
1774 1774 @property
1775 1775 def forks(self):
1776 1776 """
1777 1777 Return forks of this repo
1778 1778 """
1779 1779 return Repository.get_repo_forks(self.repo_id)
1780 1780
1781 1781 @property
1782 1782 def parent(self):
1783 1783 """
1784 1784 Returns fork parent
1785 1785 """
1786 1786 return self.fork
1787 1787
1788 1788 @property
1789 1789 def just_name(self):
1790 1790 return self.repo_name.split(self.NAME_SEP)[-1]
1791 1791
1792 1792 @property
1793 1793 def groups_with_parents(self):
1794 1794 groups = []
1795 1795 if self.group is None:
1796 1796 return groups
1797 1797
1798 1798 cur_gr = self.group
1799 1799 groups.insert(0, cur_gr)
1800 1800 while 1:
1801 1801 gr = getattr(cur_gr, 'parent_group', None)
1802 1802 cur_gr = cur_gr.parent_group
1803 1803 if gr is None:
1804 1804 break
1805 1805 groups.insert(0, gr)
1806 1806
1807 1807 return groups
1808 1808
1809 1809 @property
1810 1810 def groups_and_repo(self):
1811 1811 return self.groups_with_parents, self
1812 1812
1813 1813 @LazyProperty
1814 1814 def repo_path(self):
1815 1815 """
1816 1816 Returns base full path for that repository means where it actually
1817 1817 exists on a filesystem
1818 1818 """
1819 1819 q = Session().query(RhodeCodeUi).filter(
1820 1820 RhodeCodeUi.ui_key == self.NAME_SEP)
1821 1821 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1822 1822 return q.one().ui_value
1823 1823
1824 1824 @property
1825 1825 def repo_full_path(self):
1826 1826 p = [self.repo_path]
1827 1827 # we need to split the name by / since this is how we store the
1828 1828 # names in the database, but that eventually needs to be converted
1829 1829 # into a valid system path
1830 1830 p += self.repo_name.split(self.NAME_SEP)
1831 1831 return os.path.join(*map(safe_unicode, p))
1832 1832
1833 1833 @property
1834 1834 def cache_keys(self):
1835 1835 """
1836 1836 Returns associated cache keys for that repo
1837 1837 """
1838 1838 return CacheKey.query()\
1839 1839 .filter(CacheKey.cache_args == self.repo_name)\
1840 1840 .order_by(CacheKey.cache_key)\
1841 1841 .all()
1842 1842
1843 1843 def get_new_name(self, repo_name):
1844 1844 """
1845 1845 returns new full repository name based on assigned group and new new
1846 1846
1847 1847 :param group_name:
1848 1848 """
1849 1849 path_prefix = self.group.full_path_splitted if self.group else []
1850 1850 return self.NAME_SEP.join(path_prefix + [repo_name])
1851 1851
1852 1852 @property
1853 1853 def _config(self):
1854 1854 """
1855 1855 Returns db based config object.
1856 1856 """
1857 1857 from rhodecode.lib.utils import make_db_config
1858 1858 return make_db_config(clear_session=False, repo=self)
1859 1859
1860 1860 def permissions(self, with_admins=True, with_owner=True):
1861 1861 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1862 1862 q = q.options(joinedload(UserRepoToPerm.repository),
1863 1863 joinedload(UserRepoToPerm.user),
1864 1864 joinedload(UserRepoToPerm.permission),)
1865 1865
1866 1866 # get owners and admins and permissions. We do a trick of re-writing
1867 1867 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1868 1868 # has a global reference and changing one object propagates to all
1869 1869 # others. This means if admin is also an owner admin_row that change
1870 1870 # would propagate to both objects
1871 1871 perm_rows = []
1872 1872 for _usr in q.all():
1873 1873 usr = AttributeDict(_usr.user.get_dict())
1874 1874 usr.permission = _usr.permission.permission_name
1875 1875 perm_rows.append(usr)
1876 1876
1877 1877 # filter the perm rows by 'default' first and then sort them by
1878 1878 # admin,write,read,none permissions sorted again alphabetically in
1879 1879 # each group
1880 1880 perm_rows = sorted(perm_rows, key=display_user_sort)
1881 1881
1882 1882 _admin_perm = 'repository.admin'
1883 1883 owner_row = []
1884 1884 if with_owner:
1885 1885 usr = AttributeDict(self.user.get_dict())
1886 1886 usr.owner_row = True
1887 1887 usr.permission = _admin_perm
1888 1888 owner_row.append(usr)
1889 1889
1890 1890 super_admin_rows = []
1891 1891 if with_admins:
1892 1892 for usr in User.get_all_super_admins():
1893 1893 # if this admin is also owner, don't double the record
1894 1894 if usr.user_id == owner_row[0].user_id:
1895 1895 owner_row[0].admin_row = True
1896 1896 else:
1897 1897 usr = AttributeDict(usr.get_dict())
1898 1898 usr.admin_row = True
1899 1899 usr.permission = _admin_perm
1900 1900 super_admin_rows.append(usr)
1901 1901
1902 1902 return super_admin_rows + owner_row + perm_rows
1903 1903
1904 1904 def permission_user_groups(self):
1905 1905 q = UserGroupRepoToPerm.query().filter(
1906 1906 UserGroupRepoToPerm.repository == self)
1907 1907 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1908 1908 joinedload(UserGroupRepoToPerm.users_group),
1909 1909 joinedload(UserGroupRepoToPerm.permission),)
1910 1910
1911 1911 perm_rows = []
1912 1912 for _user_group in q.all():
1913 1913 usr = AttributeDict(_user_group.users_group.get_dict())
1914 1914 usr.permission = _user_group.permission.permission_name
1915 1915 perm_rows.append(usr)
1916 1916
1917 1917 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1918 1918 return perm_rows
1919 1919
1920 1920 def get_api_data(self, include_secrets=False):
1921 1921 """
1922 1922 Common function for generating repo api data
1923 1923
1924 1924 :param include_secrets: See :meth:`User.get_api_data`.
1925 1925
1926 1926 """
1927 1927 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1928 1928 # move this methods on models level.
1929 1929 from rhodecode.model.settings import SettingsModel
1930 1930 from rhodecode.model.repo import RepoModel
1931 1931
1932 1932 repo = self
1933 1933 _user_id, _time, _reason = self.locked
1934 1934
1935 1935 data = {
1936 1936 'repo_id': repo.repo_id,
1937 1937 'repo_name': repo.repo_name,
1938 1938 'repo_type': repo.repo_type,
1939 1939 'clone_uri': repo.clone_uri or '',
1940 1940 'url': RepoModel().get_url(self),
1941 1941 'private': repo.private,
1942 1942 'created_on': repo.created_on,
1943 1943 'description': repo.description_safe,
1944 1944 'landing_rev': repo.landing_rev,
1945 1945 'owner': repo.user.username,
1946 1946 'fork_of': repo.fork.repo_name if repo.fork else None,
1947 1947 'fork_of_id': repo.fork.repo_id if repo.fork else None,
1948 1948 'enable_statistics': repo.enable_statistics,
1949 1949 'enable_locking': repo.enable_locking,
1950 1950 'enable_downloads': repo.enable_downloads,
1951 1951 'last_changeset': repo.changeset_cache,
1952 1952 'locked_by': User.get(_user_id).get_api_data(
1953 1953 include_secrets=include_secrets) if _user_id else None,
1954 1954 'locked_date': time_to_datetime(_time) if _time else None,
1955 1955 'lock_reason': _reason if _reason else None,
1956 1956 }
1957 1957
1958 1958 # TODO: mikhail: should be per-repo settings here
1959 1959 rc_config = SettingsModel().get_all_settings()
1960 1960 repository_fields = str2bool(
1961 1961 rc_config.get('rhodecode_repository_fields'))
1962 1962 if repository_fields:
1963 1963 for f in self.extra_fields:
1964 1964 data[f.field_key_prefixed] = f.field_value
1965 1965
1966 1966 return data
1967 1967
1968 1968 @classmethod
1969 1969 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
1970 1970 if not lock_time:
1971 1971 lock_time = time.time()
1972 1972 if not lock_reason:
1973 1973 lock_reason = cls.LOCK_AUTOMATIC
1974 1974 repo.locked = [user_id, lock_time, lock_reason]
1975 1975 Session().add(repo)
1976 1976 Session().commit()
1977 1977
1978 1978 @classmethod
1979 1979 def unlock(cls, repo):
1980 1980 repo.locked = None
1981 1981 Session().add(repo)
1982 1982 Session().commit()
1983 1983
1984 1984 @classmethod
1985 1985 def getlock(cls, repo):
1986 1986 return repo.locked
1987 1987
1988 1988 def is_user_lock(self, user_id):
1989 1989 if self.lock[0]:
1990 1990 lock_user_id = safe_int(self.lock[0])
1991 1991 user_id = safe_int(user_id)
1992 1992 # both are ints, and they are equal
1993 1993 return all([lock_user_id, user_id]) and lock_user_id == user_id
1994 1994
1995 1995 return False
1996 1996
1997 1997 def get_locking_state(self, action, user_id, only_when_enabled=True):
1998 1998 """
1999 1999 Checks locking on this repository, if locking is enabled and lock is
2000 2000 present returns a tuple of make_lock, locked, locked_by.
2001 2001 make_lock can have 3 states None (do nothing) True, make lock
2002 2002 False release lock, This value is later propagated to hooks, which
2003 2003 do the locking. Think about this as signals passed to hooks what to do.
2004 2004
2005 2005 """
2006 2006 # TODO: johbo: This is part of the business logic and should be moved
2007 2007 # into the RepositoryModel.
2008 2008
2009 2009 if action not in ('push', 'pull'):
2010 2010 raise ValueError("Invalid action value: %s" % repr(action))
2011 2011
2012 2012 # defines if locked error should be thrown to user
2013 2013 currently_locked = False
2014 2014 # defines if new lock should be made, tri-state
2015 2015 make_lock = None
2016 2016 repo = self
2017 2017 user = User.get(user_id)
2018 2018
2019 2019 lock_info = repo.locked
2020 2020
2021 2021 if repo and (repo.enable_locking or not only_when_enabled):
2022 2022 if action == 'push':
2023 2023 # check if it's already locked !, if it is compare users
2024 2024 locked_by_user_id = lock_info[0]
2025 2025 if user.user_id == locked_by_user_id:
2026 2026 log.debug(
2027 2027 'Got `push` action from user %s, now unlocking', user)
2028 2028 # unlock if we have push from user who locked
2029 2029 make_lock = False
2030 2030 else:
2031 2031 # we're not the same user who locked, ban with
2032 2032 # code defined in settings (default is 423 HTTP Locked) !
2033 2033 log.debug('Repo %s is currently locked by %s', repo, user)
2034 2034 currently_locked = True
2035 2035 elif action == 'pull':
2036 2036 # [0] user [1] date
2037 2037 if lock_info[0] and lock_info[1]:
2038 2038 log.debug('Repo %s is currently locked by %s', repo, user)
2039 2039 currently_locked = True
2040 2040 else:
2041 2041 log.debug('Setting lock on repo %s by %s', repo, user)
2042 2042 make_lock = True
2043 2043
2044 2044 else:
2045 2045 log.debug('Repository %s do not have locking enabled', repo)
2046 2046
2047 2047 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2048 2048 make_lock, currently_locked, lock_info)
2049 2049
2050 2050 from rhodecode.lib.auth import HasRepoPermissionAny
2051 2051 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2052 2052 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2053 2053 # if we don't have at least write permission we cannot make a lock
2054 2054 log.debug('lock state reset back to FALSE due to lack '
2055 2055 'of at least read permission')
2056 2056 make_lock = False
2057 2057
2058 2058 return make_lock, currently_locked, lock_info
2059 2059
2060 2060 @property
2061 2061 def last_db_change(self):
2062 2062 return self.updated_on
2063 2063
2064 2064 @property
2065 2065 def clone_uri_hidden(self):
2066 2066 clone_uri = self.clone_uri
2067 2067 if clone_uri:
2068 2068 import urlobject
2069 2069 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2070 2070 if url_obj.password:
2071 2071 clone_uri = url_obj.with_password('*****')
2072 2072 return clone_uri
2073 2073
2074 2074 def clone_url(self, **override):
2075 2075 from rhodecode.model.settings import SettingsModel
2076 2076
2077 2077 uri_tmpl = None
2078 2078 if 'with_id' in override:
2079 2079 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2080 2080 del override['with_id']
2081 2081
2082 2082 if 'uri_tmpl' in override:
2083 2083 uri_tmpl = override['uri_tmpl']
2084 2084 del override['uri_tmpl']
2085 2085
2086 2086 # we didn't override our tmpl from **overrides
2087 2087 if not uri_tmpl:
2088 2088 rc_config = SettingsModel().get_all_settings(cache=True)
2089 2089 uri_tmpl = rc_config.get(
2090 2090 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2091 2091
2092 2092 request = get_current_request()
2093 2093 return get_clone_url(request=request,
2094 2094 uri_tmpl=uri_tmpl,
2095 2095 repo_name=self.repo_name,
2096 2096 repo_id=self.repo_id, **override)
2097 2097
2098 2098 def set_state(self, state):
2099 2099 self.repo_state = state
2100 2100 Session().add(self)
2101 2101 #==========================================================================
2102 2102 # SCM PROPERTIES
2103 2103 #==========================================================================
2104 2104
2105 2105 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2106 2106 return get_commit_safe(
2107 2107 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2108 2108
2109 2109 def get_changeset(self, rev=None, pre_load=None):
2110 2110 warnings.warn("Use get_commit", DeprecationWarning)
2111 2111 commit_id = None
2112 2112 commit_idx = None
2113 if isinstance(rev, basestring):
2113 if isinstance(rev, compat.string_types):
2114 2114 commit_id = rev
2115 2115 else:
2116 2116 commit_idx = rev
2117 2117 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2118 2118 pre_load=pre_load)
2119 2119
2120 2120 def get_landing_commit(self):
2121 2121 """
2122 2122 Returns landing commit, or if that doesn't exist returns the tip
2123 2123 """
2124 2124 _rev_type, _rev = self.landing_rev
2125 2125 commit = self.get_commit(_rev)
2126 2126 if isinstance(commit, EmptyCommit):
2127 2127 return self.get_commit()
2128 2128 return commit
2129 2129
2130 2130 def update_commit_cache(self, cs_cache=None, config=None):
2131 2131 """
2132 2132 Update cache of last changeset for repository, keys should be::
2133 2133
2134 2134 short_id
2135 2135 raw_id
2136 2136 revision
2137 2137 parents
2138 2138 message
2139 2139 date
2140 2140 author
2141 2141
2142 2142 :param cs_cache:
2143 2143 """
2144 2144 from rhodecode.lib.vcs.backends.base import BaseChangeset
2145 2145 if cs_cache is None:
2146 2146 # use no-cache version here
2147 2147 scm_repo = self.scm_instance(cache=False, config=config)
2148 2148 if scm_repo:
2149 2149 cs_cache = scm_repo.get_commit(
2150 2150 pre_load=["author", "date", "message", "parents"])
2151 2151 else:
2152 2152 cs_cache = EmptyCommit()
2153 2153
2154 2154 if isinstance(cs_cache, BaseChangeset):
2155 2155 cs_cache = cs_cache.__json__()
2156 2156
2157 2157 def is_outdated(new_cs_cache):
2158 2158 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2159 2159 new_cs_cache['revision'] != self.changeset_cache['revision']):
2160 2160 return True
2161 2161 return False
2162 2162
2163 2163 # check if we have maybe already latest cached revision
2164 2164 if is_outdated(cs_cache) or not self.changeset_cache:
2165 2165 _default = datetime.datetime.fromtimestamp(0)
2166 2166 last_change = cs_cache.get('date') or _default
2167 2167 log.debug('updated repo %s with new cs cache %s',
2168 2168 self.repo_name, cs_cache)
2169 2169 self.updated_on = last_change
2170 2170 self.changeset_cache = cs_cache
2171 2171 Session().add(self)
2172 2172 Session().commit()
2173 2173 else:
2174 2174 log.debug('Skipping update_commit_cache for repo:`%s` '
2175 2175 'commit already with latest changes', self.repo_name)
2176 2176
2177 2177 @property
2178 2178 def tip(self):
2179 2179 return self.get_commit('tip')
2180 2180
2181 2181 @property
2182 2182 def author(self):
2183 2183 return self.tip.author
2184 2184
2185 2185 @property
2186 2186 def last_change(self):
2187 2187 return self.scm_instance().last_change
2188 2188
2189 2189 def get_comments(self, revisions=None):
2190 2190 """
2191 2191 Returns comments for this repository grouped by revisions
2192 2192
2193 2193 :param revisions: filter query by revisions only
2194 2194 """
2195 2195 cmts = ChangesetComment.query()\
2196 2196 .filter(ChangesetComment.repo == self)
2197 2197 if revisions:
2198 2198 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2199 2199 grouped = collections.defaultdict(list)
2200 2200 for cmt in cmts.all():
2201 2201 grouped[cmt.revision].append(cmt)
2202 2202 return grouped
2203 2203
2204 2204 def statuses(self, revisions=None):
2205 2205 """
2206 2206 Returns statuses for this repository
2207 2207
2208 2208 :param revisions: list of revisions to get statuses for
2209 2209 """
2210 2210 statuses = ChangesetStatus.query()\
2211 2211 .filter(ChangesetStatus.repo == self)\
2212 2212 .filter(ChangesetStatus.version == 0)
2213 2213
2214 2214 if revisions:
2215 2215 # Try doing the filtering in chunks to avoid hitting limits
2216 2216 size = 500
2217 2217 status_results = []
2218 2218 for chunk in xrange(0, len(revisions), size):
2219 2219 status_results += statuses.filter(
2220 2220 ChangesetStatus.revision.in_(
2221 2221 revisions[chunk: chunk+size])
2222 2222 ).all()
2223 2223 else:
2224 2224 status_results = statuses.all()
2225 2225
2226 2226 grouped = {}
2227 2227
2228 2228 # maybe we have open new pullrequest without a status?
2229 2229 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2230 2230 status_lbl = ChangesetStatus.get_status_lbl(stat)
2231 2231 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2232 2232 for rev in pr.revisions:
2233 2233 pr_id = pr.pull_request_id
2234 2234 pr_repo = pr.target_repo.repo_name
2235 2235 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2236 2236
2237 2237 for stat in status_results:
2238 2238 pr_id = pr_repo = None
2239 2239 if stat.pull_request:
2240 2240 pr_id = stat.pull_request.pull_request_id
2241 2241 pr_repo = stat.pull_request.target_repo.repo_name
2242 2242 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2243 2243 pr_id, pr_repo]
2244 2244 return grouped
2245 2245
2246 2246 # ==========================================================================
2247 2247 # SCM CACHE INSTANCE
2248 2248 # ==========================================================================
2249 2249
2250 2250 def scm_instance(self, **kwargs):
2251 2251 import rhodecode
2252 2252
2253 2253 # Passing a config will not hit the cache currently only used
2254 2254 # for repo2dbmapper
2255 2255 config = kwargs.pop('config', None)
2256 2256 cache = kwargs.pop('cache', None)
2257 2257 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2258 2258 # if cache is NOT defined use default global, else we have a full
2259 2259 # control over cache behaviour
2260 2260 if cache is None and full_cache and not config:
2261 2261 return self._get_instance_cached()
2262 2262 return self._get_instance(cache=bool(cache), config=config)
2263 2263
2264 2264 def _get_instance_cached(self):
2265 2265 self._get_instance()
2266 2266
2267 2267 def _get_instance(self, cache=True, config=None):
2268 2268 config = config or self._config
2269 2269 custom_wire = {
2270 2270 'cache': cache # controls the vcs.remote cache
2271 2271 }
2272 2272 repo = get_vcs_instance(
2273 2273 repo_path=safe_str(self.repo_full_path),
2274 2274 config=config,
2275 2275 with_wire=custom_wire,
2276 2276 create=False,
2277 2277 _vcs_alias=self.repo_type)
2278 2278
2279 2279 return repo
2280 2280
2281 2281 def __json__(self):
2282 2282 return {'landing_rev': self.landing_rev}
2283 2283
2284 2284 def get_dict(self):
2285 2285
2286 2286 # Since we transformed `repo_name` to a hybrid property, we need to
2287 2287 # keep compatibility with the code which uses `repo_name` field.
2288 2288
2289 2289 result = super(Repository, self).get_dict()
2290 2290 result['repo_name'] = result.pop('_repo_name', None)
2291 2291 return result
2292 2292
2293 2293
2294 2294 class RepoGroup(Base, BaseModel):
2295 2295 __tablename__ = 'groups'
2296 2296 __table_args__ = (
2297 2297 UniqueConstraint('group_name', 'group_parent_id'),
2298 2298 CheckConstraint('group_id != group_parent_id'),
2299 2299 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2300 2300 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2301 2301 )
2302 2302 __mapper_args__ = {'order_by': 'group_name'}
2303 2303
2304 2304 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2305 2305
2306 2306 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2307 2307 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2308 2308 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2309 2309 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2310 2310 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2311 2311 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2312 2312 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2313 2313 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2314 2314 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2315 2315
2316 2316 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2317 2317 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2318 2318 parent_group = relationship('RepoGroup', remote_side=group_id)
2319 2319 user = relationship('User')
2320 2320 integrations = relationship('Integration',
2321 2321 cascade="all, delete, delete-orphan")
2322 2322
2323 2323 def __init__(self, group_name='', parent_group=None):
2324 2324 self.group_name = group_name
2325 2325 self.parent_group = parent_group
2326 2326
2327 2327 def __unicode__(self):
2328 2328 return u"<%s('id:%s:%s')>" % (
2329 2329 self.__class__.__name__, self.group_id, self.group_name)
2330 2330
2331 2331 @hybrid_property
2332 2332 def description_safe(self):
2333 2333 from rhodecode.lib import helpers as h
2334 2334 return h.escape(self.group_description)
2335 2335
2336 2336 @classmethod
2337 2337 def _generate_choice(cls, repo_group):
2338 2338 from webhelpers.html import literal as _literal
2339 2339 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2340 2340 return repo_group.group_id, _name(repo_group.full_path_splitted)
2341 2341
2342 2342 @classmethod
2343 2343 def groups_choices(cls, groups=None, show_empty_group=True):
2344 2344 if not groups:
2345 2345 groups = cls.query().all()
2346 2346
2347 2347 repo_groups = []
2348 2348 if show_empty_group:
2349 2349 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2350 2350
2351 2351 repo_groups.extend([cls._generate_choice(x) for x in groups])
2352 2352
2353 2353 repo_groups = sorted(
2354 2354 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2355 2355 return repo_groups
2356 2356
2357 2357 @classmethod
2358 2358 def url_sep(cls):
2359 2359 return URL_SEP
2360 2360
2361 2361 @classmethod
2362 2362 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2363 2363 if case_insensitive:
2364 2364 gr = cls.query().filter(func.lower(cls.group_name)
2365 2365 == func.lower(group_name))
2366 2366 else:
2367 2367 gr = cls.query().filter(cls.group_name == group_name)
2368 2368 if cache:
2369 2369 name_key = _hash_key(group_name)
2370 2370 gr = gr.options(
2371 2371 FromCache("sql_cache_short", "get_group_%s" % name_key))
2372 2372 return gr.scalar()
2373 2373
2374 2374 @classmethod
2375 2375 def get_user_personal_repo_group(cls, user_id):
2376 2376 user = User.get(user_id)
2377 2377 if user.username == User.DEFAULT_USER:
2378 2378 return None
2379 2379
2380 2380 return cls.query()\
2381 2381 .filter(cls.personal == true()) \
2382 2382 .filter(cls.user == user).scalar()
2383 2383
2384 2384 @classmethod
2385 2385 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2386 2386 case_insensitive=True):
2387 2387 q = RepoGroup.query()
2388 2388
2389 2389 if not isinstance(user_id, Optional):
2390 2390 q = q.filter(RepoGroup.user_id == user_id)
2391 2391
2392 2392 if not isinstance(group_id, Optional):
2393 2393 q = q.filter(RepoGroup.group_parent_id == group_id)
2394 2394
2395 2395 if case_insensitive:
2396 2396 q = q.order_by(func.lower(RepoGroup.group_name))
2397 2397 else:
2398 2398 q = q.order_by(RepoGroup.group_name)
2399 2399 return q.all()
2400 2400
2401 2401 @property
2402 2402 def parents(self):
2403 2403 parents_recursion_limit = 10
2404 2404 groups = []
2405 2405 if self.parent_group is None:
2406 2406 return groups
2407 2407 cur_gr = self.parent_group
2408 2408 groups.insert(0, cur_gr)
2409 2409 cnt = 0
2410 2410 while 1:
2411 2411 cnt += 1
2412 2412 gr = getattr(cur_gr, 'parent_group', None)
2413 2413 cur_gr = cur_gr.parent_group
2414 2414 if gr is None:
2415 2415 break
2416 2416 if cnt == parents_recursion_limit:
2417 2417 # this will prevent accidental infinit loops
2418 2418 log.error('more than %s parents found for group %s, stopping '
2419 2419 'recursive parent fetching', parents_recursion_limit, self)
2420 2420 break
2421 2421
2422 2422 groups.insert(0, gr)
2423 2423 return groups
2424 2424
2425 2425 @property
2426 2426 def last_db_change(self):
2427 2427 return self.updated_on
2428 2428
2429 2429 @property
2430 2430 def children(self):
2431 2431 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2432 2432
2433 2433 @property
2434 2434 def name(self):
2435 2435 return self.group_name.split(RepoGroup.url_sep())[-1]
2436 2436
2437 2437 @property
2438 2438 def full_path(self):
2439 2439 return self.group_name
2440 2440
2441 2441 @property
2442 2442 def full_path_splitted(self):
2443 2443 return self.group_name.split(RepoGroup.url_sep())
2444 2444
2445 2445 @property
2446 2446 def repositories(self):
2447 2447 return Repository.query()\
2448 2448 .filter(Repository.group == self)\
2449 2449 .order_by(Repository.repo_name)
2450 2450
2451 2451 @property
2452 2452 def repositories_recursive_count(self):
2453 2453 cnt = self.repositories.count()
2454 2454
2455 2455 def children_count(group):
2456 2456 cnt = 0
2457 2457 for child in group.children:
2458 2458 cnt += child.repositories.count()
2459 2459 cnt += children_count(child)
2460 2460 return cnt
2461 2461
2462 2462 return cnt + children_count(self)
2463 2463
2464 2464 def _recursive_objects(self, include_repos=True):
2465 2465 all_ = []
2466 2466
2467 2467 def _get_members(root_gr):
2468 2468 if include_repos:
2469 2469 for r in root_gr.repositories:
2470 2470 all_.append(r)
2471 2471 childs = root_gr.children.all()
2472 2472 if childs:
2473 2473 for gr in childs:
2474 2474 all_.append(gr)
2475 2475 _get_members(gr)
2476 2476
2477 2477 _get_members(self)
2478 2478 return [self] + all_
2479 2479
2480 2480 def recursive_groups_and_repos(self):
2481 2481 """
2482 2482 Recursive return all groups, with repositories in those groups
2483 2483 """
2484 2484 return self._recursive_objects()
2485 2485
2486 2486 def recursive_groups(self):
2487 2487 """
2488 2488 Returns all children groups for this group including children of children
2489 2489 """
2490 2490 return self._recursive_objects(include_repos=False)
2491 2491
2492 2492 def get_new_name(self, group_name):
2493 2493 """
2494 2494 returns new full group name based on parent and new name
2495 2495
2496 2496 :param group_name:
2497 2497 """
2498 2498 path_prefix = (self.parent_group.full_path_splitted if
2499 2499 self.parent_group else [])
2500 2500 return RepoGroup.url_sep().join(path_prefix + [group_name])
2501 2501
2502 2502 def permissions(self, with_admins=True, with_owner=True):
2503 2503 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2504 2504 q = q.options(joinedload(UserRepoGroupToPerm.group),
2505 2505 joinedload(UserRepoGroupToPerm.user),
2506 2506 joinedload(UserRepoGroupToPerm.permission),)
2507 2507
2508 2508 # get owners and admins and permissions. We do a trick of re-writing
2509 2509 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2510 2510 # has a global reference and changing one object propagates to all
2511 2511 # others. This means if admin is also an owner admin_row that change
2512 2512 # would propagate to both objects
2513 2513 perm_rows = []
2514 2514 for _usr in q.all():
2515 2515 usr = AttributeDict(_usr.user.get_dict())
2516 2516 usr.permission = _usr.permission.permission_name
2517 2517 perm_rows.append(usr)
2518 2518
2519 2519 # filter the perm rows by 'default' first and then sort them by
2520 2520 # admin,write,read,none permissions sorted again alphabetically in
2521 2521 # each group
2522 2522 perm_rows = sorted(perm_rows, key=display_user_sort)
2523 2523
2524 2524 _admin_perm = 'group.admin'
2525 2525 owner_row = []
2526 2526 if with_owner:
2527 2527 usr = AttributeDict(self.user.get_dict())
2528 2528 usr.owner_row = True
2529 2529 usr.permission = _admin_perm
2530 2530 owner_row.append(usr)
2531 2531
2532 2532 super_admin_rows = []
2533 2533 if with_admins:
2534 2534 for usr in User.get_all_super_admins():
2535 2535 # if this admin is also owner, don't double the record
2536 2536 if usr.user_id == owner_row[0].user_id:
2537 2537 owner_row[0].admin_row = True
2538 2538 else:
2539 2539 usr = AttributeDict(usr.get_dict())
2540 2540 usr.admin_row = True
2541 2541 usr.permission = _admin_perm
2542 2542 super_admin_rows.append(usr)
2543 2543
2544 2544 return super_admin_rows + owner_row + perm_rows
2545 2545
2546 2546 def permission_user_groups(self):
2547 2547 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2548 2548 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2549 2549 joinedload(UserGroupRepoGroupToPerm.users_group),
2550 2550 joinedload(UserGroupRepoGroupToPerm.permission),)
2551 2551
2552 2552 perm_rows = []
2553 2553 for _user_group in q.all():
2554 2554 usr = AttributeDict(_user_group.users_group.get_dict())
2555 2555 usr.permission = _user_group.permission.permission_name
2556 2556 perm_rows.append(usr)
2557 2557
2558 2558 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2559 2559 return perm_rows
2560 2560
2561 2561 def get_api_data(self):
2562 2562 """
2563 2563 Common function for generating api data
2564 2564
2565 2565 """
2566 2566 group = self
2567 2567 data = {
2568 2568 'group_id': group.group_id,
2569 2569 'group_name': group.group_name,
2570 2570 'group_description': group.description_safe,
2571 2571 'parent_group': group.parent_group.group_name if group.parent_group else None,
2572 2572 'repositories': [x.repo_name for x in group.repositories],
2573 2573 'owner': group.user.username,
2574 2574 }
2575 2575 return data
2576 2576
2577 2577
2578 2578 class Permission(Base, BaseModel):
2579 2579 __tablename__ = 'permissions'
2580 2580 __table_args__ = (
2581 2581 Index('p_perm_name_idx', 'permission_name'),
2582 2582 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2583 2583 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2584 2584 )
2585 2585 PERMS = [
2586 2586 ('hg.admin', _('RhodeCode Super Administrator')),
2587 2587
2588 2588 ('repository.none', _('Repository no access')),
2589 2589 ('repository.read', _('Repository read access')),
2590 2590 ('repository.write', _('Repository write access')),
2591 2591 ('repository.admin', _('Repository admin access')),
2592 2592
2593 2593 ('group.none', _('Repository group no access')),
2594 2594 ('group.read', _('Repository group read access')),
2595 2595 ('group.write', _('Repository group write access')),
2596 2596 ('group.admin', _('Repository group admin access')),
2597 2597
2598 2598 ('usergroup.none', _('User group no access')),
2599 2599 ('usergroup.read', _('User group read access')),
2600 2600 ('usergroup.write', _('User group write access')),
2601 2601 ('usergroup.admin', _('User group admin access')),
2602 2602
2603 2603 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2604 2604 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2605 2605
2606 2606 ('hg.usergroup.create.false', _('User Group creation disabled')),
2607 2607 ('hg.usergroup.create.true', _('User Group creation enabled')),
2608 2608
2609 2609 ('hg.create.none', _('Repository creation disabled')),
2610 2610 ('hg.create.repository', _('Repository creation enabled')),
2611 2611 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2612 2612 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2613 2613
2614 2614 ('hg.fork.none', _('Repository forking disabled')),
2615 2615 ('hg.fork.repository', _('Repository forking enabled')),
2616 2616
2617 2617 ('hg.register.none', _('Registration disabled')),
2618 2618 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2619 2619 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2620 2620
2621 2621 ('hg.password_reset.enabled', _('Password reset enabled')),
2622 2622 ('hg.password_reset.hidden', _('Password reset hidden')),
2623 2623 ('hg.password_reset.disabled', _('Password reset disabled')),
2624 2624
2625 2625 ('hg.extern_activate.manual', _('Manual activation of external account')),
2626 2626 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2627 2627
2628 2628 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2629 2629 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2630 2630 ]
2631 2631
2632 2632 # definition of system default permissions for DEFAULT user
2633 2633 DEFAULT_USER_PERMISSIONS = [
2634 2634 'repository.read',
2635 2635 'group.read',
2636 2636 'usergroup.read',
2637 2637 'hg.create.repository',
2638 2638 'hg.repogroup.create.false',
2639 2639 'hg.usergroup.create.false',
2640 2640 'hg.create.write_on_repogroup.true',
2641 2641 'hg.fork.repository',
2642 2642 'hg.register.manual_activate',
2643 2643 'hg.password_reset.enabled',
2644 2644 'hg.extern_activate.auto',
2645 2645 'hg.inherit_default_perms.true',
2646 2646 ]
2647 2647
2648 2648 # defines which permissions are more important higher the more important
2649 2649 # Weight defines which permissions are more important.
2650 2650 # The higher number the more important.
2651 2651 PERM_WEIGHTS = {
2652 2652 'repository.none': 0,
2653 2653 'repository.read': 1,
2654 2654 'repository.write': 3,
2655 2655 'repository.admin': 4,
2656 2656
2657 2657 'group.none': 0,
2658 2658 'group.read': 1,
2659 2659 'group.write': 3,
2660 2660 'group.admin': 4,
2661 2661
2662 2662 'usergroup.none': 0,
2663 2663 'usergroup.read': 1,
2664 2664 'usergroup.write': 3,
2665 2665 'usergroup.admin': 4,
2666 2666
2667 2667 'hg.repogroup.create.false': 0,
2668 2668 'hg.repogroup.create.true': 1,
2669 2669
2670 2670 'hg.usergroup.create.false': 0,
2671 2671 'hg.usergroup.create.true': 1,
2672 2672
2673 2673 'hg.fork.none': 0,
2674 2674 'hg.fork.repository': 1,
2675 2675 'hg.create.none': 0,
2676 2676 'hg.create.repository': 1
2677 2677 }
2678 2678
2679 2679 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2680 2680 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2681 2681 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2682 2682
2683 2683 def __unicode__(self):
2684 2684 return u"<%s('%s:%s')>" % (
2685 2685 self.__class__.__name__, self.permission_id, self.permission_name
2686 2686 )
2687 2687
2688 2688 @classmethod
2689 2689 def get_by_key(cls, key):
2690 2690 return cls.query().filter(cls.permission_name == key).scalar()
2691 2691
2692 2692 @classmethod
2693 2693 def get_default_repo_perms(cls, user_id, repo_id=None):
2694 2694 q = Session().query(UserRepoToPerm, Repository, Permission)\
2695 2695 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2696 2696 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2697 2697 .filter(UserRepoToPerm.user_id == user_id)
2698 2698 if repo_id:
2699 2699 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2700 2700 return q.all()
2701 2701
2702 2702 @classmethod
2703 2703 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2704 2704 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2705 2705 .join(
2706 2706 Permission,
2707 2707 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2708 2708 .join(
2709 2709 Repository,
2710 2710 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2711 2711 .join(
2712 2712 UserGroup,
2713 2713 UserGroupRepoToPerm.users_group_id ==
2714 2714 UserGroup.users_group_id)\
2715 2715 .join(
2716 2716 UserGroupMember,
2717 2717 UserGroupRepoToPerm.users_group_id ==
2718 2718 UserGroupMember.users_group_id)\
2719 2719 .filter(
2720 2720 UserGroupMember.user_id == user_id,
2721 2721 UserGroup.users_group_active == true())
2722 2722 if repo_id:
2723 2723 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2724 2724 return q.all()
2725 2725
2726 2726 @classmethod
2727 2727 def get_default_group_perms(cls, user_id, repo_group_id=None):
2728 2728 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2729 2729 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2730 2730 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2731 2731 .filter(UserRepoGroupToPerm.user_id == user_id)
2732 2732 if repo_group_id:
2733 2733 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2734 2734 return q.all()
2735 2735
2736 2736 @classmethod
2737 2737 def get_default_group_perms_from_user_group(
2738 2738 cls, user_id, repo_group_id=None):
2739 2739 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2740 2740 .join(
2741 2741 Permission,
2742 2742 UserGroupRepoGroupToPerm.permission_id ==
2743 2743 Permission.permission_id)\
2744 2744 .join(
2745 2745 RepoGroup,
2746 2746 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2747 2747 .join(
2748 2748 UserGroup,
2749 2749 UserGroupRepoGroupToPerm.users_group_id ==
2750 2750 UserGroup.users_group_id)\
2751 2751 .join(
2752 2752 UserGroupMember,
2753 2753 UserGroupRepoGroupToPerm.users_group_id ==
2754 2754 UserGroupMember.users_group_id)\
2755 2755 .filter(
2756 2756 UserGroupMember.user_id == user_id,
2757 2757 UserGroup.users_group_active == true())
2758 2758 if repo_group_id:
2759 2759 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2760 2760 return q.all()
2761 2761
2762 2762 @classmethod
2763 2763 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2764 2764 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2765 2765 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2766 2766 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2767 2767 .filter(UserUserGroupToPerm.user_id == user_id)
2768 2768 if user_group_id:
2769 2769 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2770 2770 return q.all()
2771 2771
2772 2772 @classmethod
2773 2773 def get_default_user_group_perms_from_user_group(
2774 2774 cls, user_id, user_group_id=None):
2775 2775 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2776 2776 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2777 2777 .join(
2778 2778 Permission,
2779 2779 UserGroupUserGroupToPerm.permission_id ==
2780 2780 Permission.permission_id)\
2781 2781 .join(
2782 2782 TargetUserGroup,
2783 2783 UserGroupUserGroupToPerm.target_user_group_id ==
2784 2784 TargetUserGroup.users_group_id)\
2785 2785 .join(
2786 2786 UserGroup,
2787 2787 UserGroupUserGroupToPerm.user_group_id ==
2788 2788 UserGroup.users_group_id)\
2789 2789 .join(
2790 2790 UserGroupMember,
2791 2791 UserGroupUserGroupToPerm.user_group_id ==
2792 2792 UserGroupMember.users_group_id)\
2793 2793 .filter(
2794 2794 UserGroupMember.user_id == user_id,
2795 2795 UserGroup.users_group_active == true())
2796 2796 if user_group_id:
2797 2797 q = q.filter(
2798 2798 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2799 2799
2800 2800 return q.all()
2801 2801
2802 2802
2803 2803 class UserRepoToPerm(Base, BaseModel):
2804 2804 __tablename__ = 'repo_to_perm'
2805 2805 __table_args__ = (
2806 2806 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2807 2807 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2808 2808 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2809 2809 )
2810 2810 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2811 2811 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2812 2812 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2813 2813 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2814 2814
2815 2815 user = relationship('User')
2816 2816 repository = relationship('Repository')
2817 2817 permission = relationship('Permission')
2818 2818
2819 2819 @classmethod
2820 2820 def create(cls, user, repository, permission):
2821 2821 n = cls()
2822 2822 n.user = user
2823 2823 n.repository = repository
2824 2824 n.permission = permission
2825 2825 Session().add(n)
2826 2826 return n
2827 2827
2828 2828 def __unicode__(self):
2829 2829 return u'<%s => %s >' % (self.user, self.repository)
2830 2830
2831 2831
2832 2832 class UserUserGroupToPerm(Base, BaseModel):
2833 2833 __tablename__ = 'user_user_group_to_perm'
2834 2834 __table_args__ = (
2835 2835 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2836 2836 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2837 2837 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2838 2838 )
2839 2839 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2840 2840 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2841 2841 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2842 2842 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2843 2843
2844 2844 user = relationship('User')
2845 2845 user_group = relationship('UserGroup')
2846 2846 permission = relationship('Permission')
2847 2847
2848 2848 @classmethod
2849 2849 def create(cls, user, user_group, permission):
2850 2850 n = cls()
2851 2851 n.user = user
2852 2852 n.user_group = user_group
2853 2853 n.permission = permission
2854 2854 Session().add(n)
2855 2855 return n
2856 2856
2857 2857 def __unicode__(self):
2858 2858 return u'<%s => %s >' % (self.user, self.user_group)
2859 2859
2860 2860
2861 2861 class UserToPerm(Base, BaseModel):
2862 2862 __tablename__ = 'user_to_perm'
2863 2863 __table_args__ = (
2864 2864 UniqueConstraint('user_id', 'permission_id'),
2865 2865 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2866 2866 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2867 2867 )
2868 2868 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2869 2869 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2870 2870 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2871 2871
2872 2872 user = relationship('User')
2873 2873 permission = relationship('Permission', lazy='joined')
2874 2874
2875 2875 def __unicode__(self):
2876 2876 return u'<%s => %s >' % (self.user, self.permission)
2877 2877
2878 2878
2879 2879 class UserGroupRepoToPerm(Base, BaseModel):
2880 2880 __tablename__ = 'users_group_repo_to_perm'
2881 2881 __table_args__ = (
2882 2882 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2883 2883 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2884 2884 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2885 2885 )
2886 2886 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2887 2887 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2888 2888 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2889 2889 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2890 2890
2891 2891 users_group = relationship('UserGroup')
2892 2892 permission = relationship('Permission')
2893 2893 repository = relationship('Repository')
2894 2894
2895 2895 @classmethod
2896 2896 def create(cls, users_group, repository, permission):
2897 2897 n = cls()
2898 2898 n.users_group = users_group
2899 2899 n.repository = repository
2900 2900 n.permission = permission
2901 2901 Session().add(n)
2902 2902 return n
2903 2903
2904 2904 def __unicode__(self):
2905 2905 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2906 2906
2907 2907
2908 2908 class UserGroupUserGroupToPerm(Base, BaseModel):
2909 2909 __tablename__ = 'user_group_user_group_to_perm'
2910 2910 __table_args__ = (
2911 2911 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
2912 2912 CheckConstraint('target_user_group_id != user_group_id'),
2913 2913 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2914 2914 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2915 2915 )
2916 2916 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2917 2917 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2918 2918 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2919 2919 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2920 2920
2921 2921 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
2922 2922 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
2923 2923 permission = relationship('Permission')
2924 2924
2925 2925 @classmethod
2926 2926 def create(cls, target_user_group, user_group, permission):
2927 2927 n = cls()
2928 2928 n.target_user_group = target_user_group
2929 2929 n.user_group = user_group
2930 2930 n.permission = permission
2931 2931 Session().add(n)
2932 2932 return n
2933 2933
2934 2934 def __unicode__(self):
2935 2935 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
2936 2936
2937 2937
2938 2938 class UserGroupToPerm(Base, BaseModel):
2939 2939 __tablename__ = 'users_group_to_perm'
2940 2940 __table_args__ = (
2941 2941 UniqueConstraint('users_group_id', 'permission_id',),
2942 2942 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2943 2943 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2944 2944 )
2945 2945 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2946 2946 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2947 2947 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2948 2948
2949 2949 users_group = relationship('UserGroup')
2950 2950 permission = relationship('Permission')
2951 2951
2952 2952
2953 2953 class UserRepoGroupToPerm(Base, BaseModel):
2954 2954 __tablename__ = 'user_repo_group_to_perm'
2955 2955 __table_args__ = (
2956 2956 UniqueConstraint('user_id', 'group_id', 'permission_id'),
2957 2957 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2958 2958 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2959 2959 )
2960 2960
2961 2961 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2962 2962 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2963 2963 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2964 2964 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2965 2965
2966 2966 user = relationship('User')
2967 2967 group = relationship('RepoGroup')
2968 2968 permission = relationship('Permission')
2969 2969
2970 2970 @classmethod
2971 2971 def create(cls, user, repository_group, permission):
2972 2972 n = cls()
2973 2973 n.user = user
2974 2974 n.group = repository_group
2975 2975 n.permission = permission
2976 2976 Session().add(n)
2977 2977 return n
2978 2978
2979 2979
2980 2980 class UserGroupRepoGroupToPerm(Base, BaseModel):
2981 2981 __tablename__ = 'users_group_repo_group_to_perm'
2982 2982 __table_args__ = (
2983 2983 UniqueConstraint('users_group_id', 'group_id'),
2984 2984 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2985 2985 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2986 2986 )
2987 2987
2988 2988 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2989 2989 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2990 2990 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2991 2991 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2992 2992
2993 2993 users_group = relationship('UserGroup')
2994 2994 permission = relationship('Permission')
2995 2995 group = relationship('RepoGroup')
2996 2996
2997 2997 @classmethod
2998 2998 def create(cls, user_group, repository_group, permission):
2999 2999 n = cls()
3000 3000 n.users_group = user_group
3001 3001 n.group = repository_group
3002 3002 n.permission = permission
3003 3003 Session().add(n)
3004 3004 return n
3005 3005
3006 3006 def __unicode__(self):
3007 3007 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3008 3008
3009 3009
3010 3010 class Statistics(Base, BaseModel):
3011 3011 __tablename__ = 'statistics'
3012 3012 __table_args__ = (
3013 3013 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3014 3014 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3015 3015 )
3016 3016 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3017 3017 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3018 3018 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3019 3019 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3020 3020 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3021 3021 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3022 3022
3023 3023 repository = relationship('Repository', single_parent=True)
3024 3024
3025 3025
3026 3026 class UserFollowing(Base, BaseModel):
3027 3027 __tablename__ = 'user_followings'
3028 3028 __table_args__ = (
3029 3029 UniqueConstraint('user_id', 'follows_repository_id'),
3030 3030 UniqueConstraint('user_id', 'follows_user_id'),
3031 3031 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3032 3032 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3033 3033 )
3034 3034
3035 3035 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3036 3036 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3037 3037 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3038 3038 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3039 3039 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3040 3040
3041 3041 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3042 3042
3043 3043 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3044 3044 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3045 3045
3046 3046 @classmethod
3047 3047 def get_repo_followers(cls, repo_id):
3048 3048 return cls.query().filter(cls.follows_repo_id == repo_id)
3049 3049
3050 3050
3051 3051 class CacheKey(Base, BaseModel):
3052 3052 __tablename__ = 'cache_invalidation'
3053 3053 __table_args__ = (
3054 3054 UniqueConstraint('cache_key'),
3055 3055 Index('key_idx', 'cache_key'),
3056 3056 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3057 3057 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3058 3058 )
3059 3059 CACHE_TYPE_ATOM = 'ATOM'
3060 3060 CACHE_TYPE_RSS = 'RSS'
3061 3061 CACHE_TYPE_README = 'README'
3062 3062
3063 3063 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3064 3064 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3065 3065 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3066 3066 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3067 3067
3068 3068 def __init__(self, cache_key, cache_args=''):
3069 3069 self.cache_key = cache_key
3070 3070 self.cache_args = cache_args
3071 3071 self.cache_active = False
3072 3072
3073 3073 def __unicode__(self):
3074 3074 return u"<%s('%s:%s[%s]')>" % (
3075 3075 self.__class__.__name__,
3076 3076 self.cache_id, self.cache_key, self.cache_active)
3077 3077
3078 3078 def _cache_key_partition(self):
3079 3079 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3080 3080 return prefix, repo_name, suffix
3081 3081
3082 3082 def get_prefix(self):
3083 3083 """
3084 3084 Try to extract prefix from existing cache key. The key could consist
3085 3085 of prefix, repo_name, suffix
3086 3086 """
3087 3087 # this returns prefix, repo_name, suffix
3088 3088 return self._cache_key_partition()[0]
3089 3089
3090 3090 def get_suffix(self):
3091 3091 """
3092 3092 get suffix that might have been used in _get_cache_key to
3093 3093 generate self.cache_key. Only used for informational purposes
3094 3094 in repo_edit.mako.
3095 3095 """
3096 3096 # prefix, repo_name, suffix
3097 3097 return self._cache_key_partition()[2]
3098 3098
3099 3099 @classmethod
3100 3100 def delete_all_cache(cls):
3101 3101 """
3102 3102 Delete all cache keys from database.
3103 3103 Should only be run when all instances are down and all entries
3104 3104 thus stale.
3105 3105 """
3106 3106 cls.query().delete()
3107 3107 Session().commit()
3108 3108
3109 3109 @classmethod
3110 3110 def get_cache_key(cls, repo_name, cache_type):
3111 3111 """
3112 3112
3113 3113 Generate a cache key for this process of RhodeCode instance.
3114 3114 Prefix most likely will be process id or maybe explicitly set
3115 3115 instance_id from .ini file.
3116 3116 """
3117 3117 import rhodecode
3118 3118 prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '')
3119 3119
3120 3120 repo_as_unicode = safe_unicode(repo_name)
3121 3121 key = u'{}_{}'.format(repo_as_unicode, cache_type) \
3122 3122 if cache_type else repo_as_unicode
3123 3123
3124 3124 return u'{}{}'.format(prefix, key)
3125 3125
3126 3126 @classmethod
3127 3127 def set_invalidate(cls, repo_name, delete=False):
3128 3128 """
3129 3129 Mark all caches of a repo as invalid in the database.
3130 3130 """
3131 3131
3132 3132 try:
3133 3133 qry = Session().query(cls).filter(cls.cache_args == repo_name)
3134 3134 if delete:
3135 3135 log.debug('cache objects deleted for repo %s',
3136 3136 safe_str(repo_name))
3137 3137 qry.delete()
3138 3138 else:
3139 3139 log.debug('cache objects marked as invalid for repo %s',
3140 3140 safe_str(repo_name))
3141 3141 qry.update({"cache_active": False})
3142 3142
3143 3143 Session().commit()
3144 3144 except Exception:
3145 3145 log.exception(
3146 3146 'Cache key invalidation failed for repository %s',
3147 3147 safe_str(repo_name))
3148 3148 Session().rollback()
3149 3149
3150 3150 @classmethod
3151 3151 def get_active_cache(cls, cache_key):
3152 3152 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3153 3153 if inv_obj:
3154 3154 return inv_obj
3155 3155 return None
3156 3156
3157 3157
3158 3158 class ChangesetComment(Base, BaseModel):
3159 3159 __tablename__ = 'changeset_comments'
3160 3160 __table_args__ = (
3161 3161 Index('cc_revision_idx', 'revision'),
3162 3162 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3163 3163 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3164 3164 )
3165 3165
3166 3166 COMMENT_OUTDATED = u'comment_outdated'
3167 3167 COMMENT_TYPE_NOTE = u'note'
3168 3168 COMMENT_TYPE_TODO = u'todo'
3169 3169 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3170 3170
3171 3171 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3172 3172 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3173 3173 revision = Column('revision', String(40), nullable=True)
3174 3174 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3175 3175 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3176 3176 line_no = Column('line_no', Unicode(10), nullable=True)
3177 3177 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3178 3178 f_path = Column('f_path', Unicode(1000), nullable=True)
3179 3179 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3180 3180 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3181 3181 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3182 3182 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3183 3183 renderer = Column('renderer', Unicode(64), nullable=True)
3184 3184 display_state = Column('display_state', Unicode(128), nullable=True)
3185 3185
3186 3186 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3187 3187 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3188 3188 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, backref='resolved_by')
3189 3189 author = relationship('User', lazy='joined')
3190 3190 repo = relationship('Repository')
3191 3191 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3192 3192 pull_request = relationship('PullRequest', lazy='joined')
3193 3193 pull_request_version = relationship('PullRequestVersion')
3194 3194
3195 3195 @classmethod
3196 3196 def get_users(cls, revision=None, pull_request_id=None):
3197 3197 """
3198 3198 Returns user associated with this ChangesetComment. ie those
3199 3199 who actually commented
3200 3200
3201 3201 :param cls:
3202 3202 :param revision:
3203 3203 """
3204 3204 q = Session().query(User)\
3205 3205 .join(ChangesetComment.author)
3206 3206 if revision:
3207 3207 q = q.filter(cls.revision == revision)
3208 3208 elif pull_request_id:
3209 3209 q = q.filter(cls.pull_request_id == pull_request_id)
3210 3210 return q.all()
3211 3211
3212 3212 @classmethod
3213 3213 def get_index_from_version(cls, pr_version, versions):
3214 3214 num_versions = [x.pull_request_version_id for x in versions]
3215 3215 try:
3216 3216 return num_versions.index(pr_version) +1
3217 3217 except (IndexError, ValueError):
3218 3218 return
3219 3219
3220 3220 @property
3221 3221 def outdated(self):
3222 3222 return self.display_state == self.COMMENT_OUTDATED
3223 3223
3224 3224 def outdated_at_version(self, version):
3225 3225 """
3226 3226 Checks if comment is outdated for given pull request version
3227 3227 """
3228 3228 return self.outdated and self.pull_request_version_id != version
3229 3229
3230 3230 def older_than_version(self, version):
3231 3231 """
3232 3232 Checks if comment is made from previous version than given
3233 3233 """
3234 3234 if version is None:
3235 3235 return self.pull_request_version_id is not None
3236 3236
3237 3237 return self.pull_request_version_id < version
3238 3238
3239 3239 @property
3240 3240 def resolved(self):
3241 3241 return self.resolved_by[0] if self.resolved_by else None
3242 3242
3243 3243 @property
3244 3244 def is_todo(self):
3245 3245 return self.comment_type == self.COMMENT_TYPE_TODO
3246 3246
3247 3247 @property
3248 3248 def is_inline(self):
3249 3249 return self.line_no and self.f_path
3250 3250
3251 3251 def get_index_version(self, versions):
3252 3252 return self.get_index_from_version(
3253 3253 self.pull_request_version_id, versions)
3254 3254
3255 3255 def __repr__(self):
3256 3256 if self.comment_id:
3257 3257 return '<DB:Comment #%s>' % self.comment_id
3258 3258 else:
3259 3259 return '<DB:Comment at %#x>' % id(self)
3260 3260
3261 3261 def get_api_data(self):
3262 3262 comment = self
3263 3263 data = {
3264 3264 'comment_id': comment.comment_id,
3265 3265 'comment_type': comment.comment_type,
3266 3266 'comment_text': comment.text,
3267 3267 'comment_status': comment.status_change,
3268 3268 'comment_f_path': comment.f_path,
3269 3269 'comment_lineno': comment.line_no,
3270 3270 'comment_author': comment.author,
3271 3271 'comment_created_on': comment.created_on
3272 3272 }
3273 3273 return data
3274 3274
3275 3275 def __json__(self):
3276 3276 data = dict()
3277 3277 data.update(self.get_api_data())
3278 3278 return data
3279 3279
3280 3280
3281 3281 class ChangesetStatus(Base, BaseModel):
3282 3282 __tablename__ = 'changeset_statuses'
3283 3283 __table_args__ = (
3284 3284 Index('cs_revision_idx', 'revision'),
3285 3285 Index('cs_version_idx', 'version'),
3286 3286 UniqueConstraint('repo_id', 'revision', 'version'),
3287 3287 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3288 3288 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3289 3289 )
3290 3290 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3291 3291 STATUS_APPROVED = 'approved'
3292 3292 STATUS_REJECTED = 'rejected'
3293 3293 STATUS_UNDER_REVIEW = 'under_review'
3294 3294
3295 3295 STATUSES = [
3296 3296 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3297 3297 (STATUS_APPROVED, _("Approved")),
3298 3298 (STATUS_REJECTED, _("Rejected")),
3299 3299 (STATUS_UNDER_REVIEW, _("Under Review")),
3300 3300 ]
3301 3301
3302 3302 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3303 3303 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3304 3304 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3305 3305 revision = Column('revision', String(40), nullable=False)
3306 3306 status = Column('status', String(128), nullable=False, default=DEFAULT)
3307 3307 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3308 3308 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3309 3309 version = Column('version', Integer(), nullable=False, default=0)
3310 3310 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3311 3311
3312 3312 author = relationship('User', lazy='joined')
3313 3313 repo = relationship('Repository')
3314 3314 comment = relationship('ChangesetComment', lazy='joined')
3315 3315 pull_request = relationship('PullRequest', lazy='joined')
3316 3316
3317 3317 def __unicode__(self):
3318 3318 return u"<%s('%s[v%s]:%s')>" % (
3319 3319 self.__class__.__name__,
3320 3320 self.status, self.version, self.author
3321 3321 )
3322 3322
3323 3323 @classmethod
3324 3324 def get_status_lbl(cls, value):
3325 3325 return dict(cls.STATUSES).get(value)
3326 3326
3327 3327 @property
3328 3328 def status_lbl(self):
3329 3329 return ChangesetStatus.get_status_lbl(self.status)
3330 3330
3331 3331 def get_api_data(self):
3332 3332 status = self
3333 3333 data = {
3334 3334 'status_id': status.changeset_status_id,
3335 3335 'status': status.status,
3336 3336 }
3337 3337 return data
3338 3338
3339 3339 def __json__(self):
3340 3340 data = dict()
3341 3341 data.update(self.get_api_data())
3342 3342 return data
3343 3343
3344 3344
3345 3345 class _PullRequestBase(BaseModel):
3346 3346 """
3347 3347 Common attributes of pull request and version entries.
3348 3348 """
3349 3349
3350 3350 # .status values
3351 3351 STATUS_NEW = u'new'
3352 3352 STATUS_OPEN = u'open'
3353 3353 STATUS_CLOSED = u'closed'
3354 3354
3355 3355 title = Column('title', Unicode(255), nullable=True)
3356 3356 description = Column(
3357 3357 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3358 3358 nullable=True)
3359 3359 # new/open/closed status of pull request (not approve/reject/etc)
3360 3360 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3361 3361 created_on = Column(
3362 3362 'created_on', DateTime(timezone=False), nullable=False,
3363 3363 default=datetime.datetime.now)
3364 3364 updated_on = Column(
3365 3365 'updated_on', DateTime(timezone=False), nullable=False,
3366 3366 default=datetime.datetime.now)
3367 3367
3368 3368 @declared_attr
3369 3369 def user_id(cls):
3370 3370 return Column(
3371 3371 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3372 3372 unique=None)
3373 3373
3374 3374 # 500 revisions max
3375 3375 _revisions = Column(
3376 3376 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3377 3377
3378 3378 @declared_attr
3379 3379 def source_repo_id(cls):
3380 3380 # TODO: dan: rename column to source_repo_id
3381 3381 return Column(
3382 3382 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3383 3383 nullable=False)
3384 3384
3385 3385 source_ref = Column('org_ref', Unicode(255), nullable=False)
3386 3386
3387 3387 @declared_attr
3388 3388 def target_repo_id(cls):
3389 3389 # TODO: dan: rename column to target_repo_id
3390 3390 return Column(
3391 3391 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3392 3392 nullable=False)
3393 3393
3394 3394 target_ref = Column('other_ref', Unicode(255), nullable=False)
3395 3395 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3396 3396
3397 3397 # TODO: dan: rename column to last_merge_source_rev
3398 3398 _last_merge_source_rev = Column(
3399 3399 'last_merge_org_rev', String(40), nullable=True)
3400 3400 # TODO: dan: rename column to last_merge_target_rev
3401 3401 _last_merge_target_rev = Column(
3402 3402 'last_merge_other_rev', String(40), nullable=True)
3403 3403 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3404 3404 merge_rev = Column('merge_rev', String(40), nullable=True)
3405 3405
3406 3406 reviewer_data = Column(
3407 3407 'reviewer_data_json', MutationObj.as_mutable(
3408 3408 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3409 3409
3410 3410 @property
3411 3411 def reviewer_data_json(self):
3412 3412 return json.dumps(self.reviewer_data)
3413 3413
3414 3414 @hybrid_property
3415 3415 def description_safe(self):
3416 3416 from rhodecode.lib import helpers as h
3417 3417 return h.escape(self.description)
3418 3418
3419 3419 @hybrid_property
3420 3420 def revisions(self):
3421 3421 return self._revisions.split(':') if self._revisions else []
3422 3422
3423 3423 @revisions.setter
3424 3424 def revisions(self, val):
3425 3425 self._revisions = ':'.join(val)
3426 3426
3427 3427 @hybrid_property
3428 3428 def last_merge_status(self):
3429 3429 return safe_int(self._last_merge_status)
3430 3430
3431 3431 @last_merge_status.setter
3432 3432 def last_merge_status(self, val):
3433 3433 self._last_merge_status = val
3434 3434
3435 3435 @declared_attr
3436 3436 def author(cls):
3437 3437 return relationship('User', lazy='joined')
3438 3438
3439 3439 @declared_attr
3440 3440 def source_repo(cls):
3441 3441 return relationship(
3442 3442 'Repository',
3443 3443 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3444 3444
3445 3445 @property
3446 3446 def source_ref_parts(self):
3447 3447 return self.unicode_to_reference(self.source_ref)
3448 3448
3449 3449 @declared_attr
3450 3450 def target_repo(cls):
3451 3451 return relationship(
3452 3452 'Repository',
3453 3453 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3454 3454
3455 3455 @property
3456 3456 def target_ref_parts(self):
3457 3457 return self.unicode_to_reference(self.target_ref)
3458 3458
3459 3459 @property
3460 3460 def shadow_merge_ref(self):
3461 3461 return self.unicode_to_reference(self._shadow_merge_ref)
3462 3462
3463 3463 @shadow_merge_ref.setter
3464 3464 def shadow_merge_ref(self, ref):
3465 3465 self._shadow_merge_ref = self.reference_to_unicode(ref)
3466 3466
3467 3467 def unicode_to_reference(self, raw):
3468 3468 """
3469 3469 Convert a unicode (or string) to a reference object.
3470 3470 If unicode evaluates to False it returns None.
3471 3471 """
3472 3472 if raw:
3473 3473 refs = raw.split(':')
3474 3474 return Reference(*refs)
3475 3475 else:
3476 3476 return None
3477 3477
3478 3478 def reference_to_unicode(self, ref):
3479 3479 """
3480 3480 Convert a reference object to unicode.
3481 3481 If reference is None it returns None.
3482 3482 """
3483 3483 if ref:
3484 3484 return u':'.join(ref)
3485 3485 else:
3486 3486 return None
3487 3487
3488 3488 def get_api_data(self, with_merge_state=True):
3489 3489 from rhodecode.model.pull_request import PullRequestModel
3490 3490
3491 3491 pull_request = self
3492 3492 if with_merge_state:
3493 3493 merge_status = PullRequestModel().merge_status(pull_request)
3494 3494 merge_state = {
3495 3495 'status': merge_status[0],
3496 3496 'message': safe_unicode(merge_status[1]),
3497 3497 }
3498 3498 else:
3499 3499 merge_state = {'status': 'not_available',
3500 3500 'message': 'not_available'}
3501 3501
3502 3502 merge_data = {
3503 3503 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3504 3504 'reference': (
3505 3505 pull_request.shadow_merge_ref._asdict()
3506 3506 if pull_request.shadow_merge_ref else None),
3507 3507 }
3508 3508
3509 3509 data = {
3510 3510 'pull_request_id': pull_request.pull_request_id,
3511 3511 'url': PullRequestModel().get_url(pull_request),
3512 3512 'title': pull_request.title,
3513 3513 'description': pull_request.description,
3514 3514 'status': pull_request.status,
3515 3515 'created_on': pull_request.created_on,
3516 3516 'updated_on': pull_request.updated_on,
3517 3517 'commit_ids': pull_request.revisions,
3518 3518 'review_status': pull_request.calculated_review_status(),
3519 3519 'mergeable': merge_state,
3520 3520 'source': {
3521 3521 'clone_url': pull_request.source_repo.clone_url(),
3522 3522 'repository': pull_request.source_repo.repo_name,
3523 3523 'reference': {
3524 3524 'name': pull_request.source_ref_parts.name,
3525 3525 'type': pull_request.source_ref_parts.type,
3526 3526 'commit_id': pull_request.source_ref_parts.commit_id,
3527 3527 },
3528 3528 },
3529 3529 'target': {
3530 3530 'clone_url': pull_request.target_repo.clone_url(),
3531 3531 'repository': pull_request.target_repo.repo_name,
3532 3532 'reference': {
3533 3533 'name': pull_request.target_ref_parts.name,
3534 3534 'type': pull_request.target_ref_parts.type,
3535 3535 'commit_id': pull_request.target_ref_parts.commit_id,
3536 3536 },
3537 3537 },
3538 3538 'merge': merge_data,
3539 3539 'author': pull_request.author.get_api_data(include_secrets=False,
3540 3540 details='basic'),
3541 3541 'reviewers': [
3542 3542 {
3543 3543 'user': reviewer.get_api_data(include_secrets=False,
3544 3544 details='basic'),
3545 3545 'reasons': reasons,
3546 3546 'review_status': st[0][1].status if st else 'not_reviewed',
3547 3547 }
3548 3548 for reviewer, reasons, mandatory, st in
3549 3549 pull_request.reviewers_statuses()
3550 3550 ]
3551 3551 }
3552 3552
3553 3553 return data
3554 3554
3555 3555
3556 3556 class PullRequest(Base, _PullRequestBase):
3557 3557 __tablename__ = 'pull_requests'
3558 3558 __table_args__ = (
3559 3559 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3560 3560 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3561 3561 )
3562 3562
3563 3563 pull_request_id = Column(
3564 3564 'pull_request_id', Integer(), nullable=False, primary_key=True)
3565 3565
3566 3566 def __repr__(self):
3567 3567 if self.pull_request_id:
3568 3568 return '<DB:PullRequest #%s>' % self.pull_request_id
3569 3569 else:
3570 3570 return '<DB:PullRequest at %#x>' % id(self)
3571 3571
3572 3572 reviewers = relationship('PullRequestReviewers',
3573 3573 cascade="all, delete, delete-orphan")
3574 3574 statuses = relationship('ChangesetStatus',
3575 3575 cascade="all, delete, delete-orphan")
3576 3576 comments = relationship('ChangesetComment',
3577 3577 cascade="all, delete, delete-orphan")
3578 3578 versions = relationship('PullRequestVersion',
3579 3579 cascade="all, delete, delete-orphan",
3580 3580 lazy='dynamic')
3581 3581
3582 3582 @classmethod
3583 3583 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3584 3584 internal_methods=None):
3585 3585
3586 3586 class PullRequestDisplay(object):
3587 3587 """
3588 3588 Special object wrapper for showing PullRequest data via Versions
3589 3589 It mimics PR object as close as possible. This is read only object
3590 3590 just for display
3591 3591 """
3592 3592
3593 3593 def __init__(self, attrs, internal=None):
3594 3594 self.attrs = attrs
3595 3595 # internal have priority over the given ones via attrs
3596 3596 self.internal = internal or ['versions']
3597 3597
3598 3598 def __getattr__(self, item):
3599 3599 if item in self.internal:
3600 3600 return getattr(self, item)
3601 3601 try:
3602 3602 return self.attrs[item]
3603 3603 except KeyError:
3604 3604 raise AttributeError(
3605 3605 '%s object has no attribute %s' % (self, item))
3606 3606
3607 3607 def __repr__(self):
3608 3608 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3609 3609
3610 3610 def versions(self):
3611 3611 return pull_request_obj.versions.order_by(
3612 3612 PullRequestVersion.pull_request_version_id).all()
3613 3613
3614 3614 def is_closed(self):
3615 3615 return pull_request_obj.is_closed()
3616 3616
3617 3617 @property
3618 3618 def pull_request_version_id(self):
3619 3619 return getattr(pull_request_obj, 'pull_request_version_id', None)
3620 3620
3621 3621 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3622 3622
3623 3623 attrs.author = StrictAttributeDict(
3624 3624 pull_request_obj.author.get_api_data())
3625 3625 if pull_request_obj.target_repo:
3626 3626 attrs.target_repo = StrictAttributeDict(
3627 3627 pull_request_obj.target_repo.get_api_data())
3628 3628 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3629 3629
3630 3630 if pull_request_obj.source_repo:
3631 3631 attrs.source_repo = StrictAttributeDict(
3632 3632 pull_request_obj.source_repo.get_api_data())
3633 3633 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3634 3634
3635 3635 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3636 3636 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3637 3637 attrs.revisions = pull_request_obj.revisions
3638 3638
3639 3639 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3640 3640 attrs.reviewer_data = org_pull_request_obj.reviewer_data
3641 3641 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
3642 3642
3643 3643 return PullRequestDisplay(attrs, internal=internal_methods)
3644 3644
3645 3645 def is_closed(self):
3646 3646 return self.status == self.STATUS_CLOSED
3647 3647
3648 3648 def __json__(self):
3649 3649 return {
3650 3650 'revisions': self.revisions,
3651 3651 }
3652 3652
3653 3653 def calculated_review_status(self):
3654 3654 from rhodecode.model.changeset_status import ChangesetStatusModel
3655 3655 return ChangesetStatusModel().calculated_review_status(self)
3656 3656
3657 3657 def reviewers_statuses(self):
3658 3658 from rhodecode.model.changeset_status import ChangesetStatusModel
3659 3659 return ChangesetStatusModel().reviewers_statuses(self)
3660 3660
3661 3661 @property
3662 3662 def workspace_id(self):
3663 3663 from rhodecode.model.pull_request import PullRequestModel
3664 3664 return PullRequestModel()._workspace_id(self)
3665 3665
3666 3666 def get_shadow_repo(self):
3667 3667 workspace_id = self.workspace_id
3668 3668 vcs_obj = self.target_repo.scm_instance()
3669 3669 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3670 3670 workspace_id)
3671 3671 return vcs_obj._get_shadow_instance(shadow_repository_path)
3672 3672
3673 3673
3674 3674 class PullRequestVersion(Base, _PullRequestBase):
3675 3675 __tablename__ = 'pull_request_versions'
3676 3676 __table_args__ = (
3677 3677 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3678 3678 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3679 3679 )
3680 3680
3681 3681 pull_request_version_id = Column(
3682 3682 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3683 3683 pull_request_id = Column(
3684 3684 'pull_request_id', Integer(),
3685 3685 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3686 3686 pull_request = relationship('PullRequest')
3687 3687
3688 3688 def __repr__(self):
3689 3689 if self.pull_request_version_id:
3690 3690 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3691 3691 else:
3692 3692 return '<DB:PullRequestVersion at %#x>' % id(self)
3693 3693
3694 3694 @property
3695 3695 def reviewers(self):
3696 3696 return self.pull_request.reviewers
3697 3697
3698 3698 @property
3699 3699 def versions(self):
3700 3700 return self.pull_request.versions
3701 3701
3702 3702 def is_closed(self):
3703 3703 # calculate from original
3704 3704 return self.pull_request.status == self.STATUS_CLOSED
3705 3705
3706 3706 def calculated_review_status(self):
3707 3707 return self.pull_request.calculated_review_status()
3708 3708
3709 3709 def reviewers_statuses(self):
3710 3710 return self.pull_request.reviewers_statuses()
3711 3711
3712 3712
3713 3713 class PullRequestReviewers(Base, BaseModel):
3714 3714 __tablename__ = 'pull_request_reviewers'
3715 3715 __table_args__ = (
3716 3716 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3717 3717 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3718 3718 )
3719 3719
3720 3720 @hybrid_property
3721 3721 def reasons(self):
3722 3722 if not self._reasons:
3723 3723 return []
3724 3724 return self._reasons
3725 3725
3726 3726 @reasons.setter
3727 3727 def reasons(self, val):
3728 3728 val = val or []
3729 if any(not isinstance(x, basestring) for x in val):
3729 if any(not isinstance(x, compat.string_types) for x in val):
3730 3730 raise Exception('invalid reasons type, must be list of strings')
3731 3731 self._reasons = val
3732 3732
3733 3733 pull_requests_reviewers_id = Column(
3734 3734 'pull_requests_reviewers_id', Integer(), nullable=False,
3735 3735 primary_key=True)
3736 3736 pull_request_id = Column(
3737 3737 "pull_request_id", Integer(),
3738 3738 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3739 3739 user_id = Column(
3740 3740 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3741 3741 _reasons = Column(
3742 3742 'reason', MutationList.as_mutable(
3743 3743 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
3744 3744 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3745 3745 user = relationship('User')
3746 3746 pull_request = relationship('PullRequest')
3747 3747
3748 3748
3749 3749 class Notification(Base, BaseModel):
3750 3750 __tablename__ = 'notifications'
3751 3751 __table_args__ = (
3752 3752 Index('notification_type_idx', 'type'),
3753 3753 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3754 3754 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3755 3755 )
3756 3756
3757 3757 TYPE_CHANGESET_COMMENT = u'cs_comment'
3758 3758 TYPE_MESSAGE = u'message'
3759 3759 TYPE_MENTION = u'mention'
3760 3760 TYPE_REGISTRATION = u'registration'
3761 3761 TYPE_PULL_REQUEST = u'pull_request'
3762 3762 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3763 3763
3764 3764 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3765 3765 subject = Column('subject', Unicode(512), nullable=True)
3766 3766 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3767 3767 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3768 3768 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3769 3769 type_ = Column('type', Unicode(255))
3770 3770
3771 3771 created_by_user = relationship('User')
3772 3772 notifications_to_users = relationship('UserNotification', lazy='joined',
3773 3773 cascade="all, delete, delete-orphan")
3774 3774
3775 3775 @property
3776 3776 def recipients(self):
3777 3777 return [x.user for x in UserNotification.query()\
3778 3778 .filter(UserNotification.notification == self)\
3779 3779 .order_by(UserNotification.user_id.asc()).all()]
3780 3780
3781 3781 @classmethod
3782 3782 def create(cls, created_by, subject, body, recipients, type_=None):
3783 3783 if type_ is None:
3784 3784 type_ = Notification.TYPE_MESSAGE
3785 3785
3786 3786 notification = cls()
3787 3787 notification.created_by_user = created_by
3788 3788 notification.subject = subject
3789 3789 notification.body = body
3790 3790 notification.type_ = type_
3791 3791 notification.created_on = datetime.datetime.now()
3792 3792
3793 3793 for u in recipients:
3794 3794 assoc = UserNotification()
3795 3795 assoc.notification = notification
3796 3796
3797 3797 # if created_by is inside recipients mark his notification
3798 3798 # as read
3799 3799 if u.user_id == created_by.user_id:
3800 3800 assoc.read = True
3801 3801
3802 3802 u.notifications.append(assoc)
3803 3803 Session().add(notification)
3804 3804
3805 3805 return notification
3806 3806
3807 3807
3808 3808 class UserNotification(Base, BaseModel):
3809 3809 __tablename__ = 'user_to_notification'
3810 3810 __table_args__ = (
3811 3811 UniqueConstraint('user_id', 'notification_id'),
3812 3812 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3813 3813 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3814 3814 )
3815 3815 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3816 3816 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3817 3817 read = Column('read', Boolean, default=False)
3818 3818 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3819 3819
3820 3820 user = relationship('User', lazy="joined")
3821 3821 notification = relationship('Notification', lazy="joined",
3822 3822 order_by=lambda: Notification.created_on.desc(),)
3823 3823
3824 3824 def mark_as_read(self):
3825 3825 self.read = True
3826 3826 Session().add(self)
3827 3827
3828 3828
3829 3829 class Gist(Base, BaseModel):
3830 3830 __tablename__ = 'gists'
3831 3831 __table_args__ = (
3832 3832 Index('g_gist_access_id_idx', 'gist_access_id'),
3833 3833 Index('g_created_on_idx', 'created_on'),
3834 3834 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3835 3835 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3836 3836 )
3837 3837 GIST_PUBLIC = u'public'
3838 3838 GIST_PRIVATE = u'private'
3839 3839 DEFAULT_FILENAME = u'gistfile1.txt'
3840 3840
3841 3841 ACL_LEVEL_PUBLIC = u'acl_public'
3842 3842 ACL_LEVEL_PRIVATE = u'acl_private'
3843 3843
3844 3844 gist_id = Column('gist_id', Integer(), primary_key=True)
3845 3845 gist_access_id = Column('gist_access_id', Unicode(250))
3846 3846 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3847 3847 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3848 3848 gist_expires = Column('gist_expires', Float(53), nullable=False)
3849 3849 gist_type = Column('gist_type', Unicode(128), nullable=False)
3850 3850 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3851 3851 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3852 3852 acl_level = Column('acl_level', Unicode(128), nullable=True)
3853 3853
3854 3854 owner = relationship('User')
3855 3855
3856 3856 def __repr__(self):
3857 3857 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3858 3858
3859 3859 @hybrid_property
3860 3860 def description_safe(self):
3861 3861 from rhodecode.lib import helpers as h
3862 3862 return h.escape(self.gist_description)
3863 3863
3864 3864 @classmethod
3865 3865 def get_or_404(cls, id_):
3866 3866 from pyramid.httpexceptions import HTTPNotFound
3867 3867
3868 3868 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3869 3869 if not res:
3870 3870 raise HTTPNotFound()
3871 3871 return res
3872 3872
3873 3873 @classmethod
3874 3874 def get_by_access_id(cls, gist_access_id):
3875 3875 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3876 3876
3877 3877 def gist_url(self):
3878 3878 from rhodecode.model.gist import GistModel
3879 3879 return GistModel().get_url(self)
3880 3880
3881 3881 @classmethod
3882 3882 def base_path(cls):
3883 3883 """
3884 3884 Returns base path when all gists are stored
3885 3885
3886 3886 :param cls:
3887 3887 """
3888 3888 from rhodecode.model.gist import GIST_STORE_LOC
3889 3889 q = Session().query(RhodeCodeUi)\
3890 3890 .filter(RhodeCodeUi.ui_key == URL_SEP)
3891 3891 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
3892 3892 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
3893 3893
3894 3894 def get_api_data(self):
3895 3895 """
3896 3896 Common function for generating gist related data for API
3897 3897 """
3898 3898 gist = self
3899 3899 data = {
3900 3900 'gist_id': gist.gist_id,
3901 3901 'type': gist.gist_type,
3902 3902 'access_id': gist.gist_access_id,
3903 3903 'description': gist.gist_description,
3904 3904 'url': gist.gist_url(),
3905 3905 'expires': gist.gist_expires,
3906 3906 'created_on': gist.created_on,
3907 3907 'modified_at': gist.modified_at,
3908 3908 'content': None,
3909 3909 'acl_level': gist.acl_level,
3910 3910 }
3911 3911 return data
3912 3912
3913 3913 def __json__(self):
3914 3914 data = dict(
3915 3915 )
3916 3916 data.update(self.get_api_data())
3917 3917 return data
3918 3918 # SCM functions
3919 3919
3920 3920 def scm_instance(self, **kwargs):
3921 3921 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
3922 3922 return get_vcs_instance(
3923 3923 repo_path=safe_str(full_repo_path), create=False)
3924 3924
3925 3925
3926 3926 class ExternalIdentity(Base, BaseModel):
3927 3927 __tablename__ = 'external_identities'
3928 3928 __table_args__ = (
3929 3929 Index('local_user_id_idx', 'local_user_id'),
3930 3930 Index('external_id_idx', 'external_id'),
3931 3931 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3932 3932 'mysql_charset': 'utf8'})
3933 3933
3934 3934 external_id = Column('external_id', Unicode(255), default=u'',
3935 3935 primary_key=True)
3936 3936 external_username = Column('external_username', Unicode(1024), default=u'')
3937 3937 local_user_id = Column('local_user_id', Integer(),
3938 3938 ForeignKey('users.user_id'), primary_key=True)
3939 3939 provider_name = Column('provider_name', Unicode(255), default=u'',
3940 3940 primary_key=True)
3941 3941 access_token = Column('access_token', String(1024), default=u'')
3942 3942 alt_token = Column('alt_token', String(1024), default=u'')
3943 3943 token_secret = Column('token_secret', String(1024), default=u'')
3944 3944
3945 3945 @classmethod
3946 3946 def by_external_id_and_provider(cls, external_id, provider_name,
3947 3947 local_user_id=None):
3948 3948 """
3949 3949 Returns ExternalIdentity instance based on search params
3950 3950
3951 3951 :param external_id:
3952 3952 :param provider_name:
3953 3953 :return: ExternalIdentity
3954 3954 """
3955 3955 query = cls.query()
3956 3956 query = query.filter(cls.external_id == external_id)
3957 3957 query = query.filter(cls.provider_name == provider_name)
3958 3958 if local_user_id:
3959 3959 query = query.filter(cls.local_user_id == local_user_id)
3960 3960 return query.first()
3961 3961
3962 3962 @classmethod
3963 3963 def user_by_external_id_and_provider(cls, external_id, provider_name):
3964 3964 """
3965 3965 Returns User instance based on search params
3966 3966
3967 3967 :param external_id:
3968 3968 :param provider_name:
3969 3969 :return: User
3970 3970 """
3971 3971 query = User.query()
3972 3972 query = query.filter(cls.external_id == external_id)
3973 3973 query = query.filter(cls.provider_name == provider_name)
3974 3974 query = query.filter(User.user_id == cls.local_user_id)
3975 3975 return query.first()
3976 3976
3977 3977 @classmethod
3978 3978 def by_local_user_id(cls, local_user_id):
3979 3979 """
3980 3980 Returns all tokens for user
3981 3981
3982 3982 :param local_user_id:
3983 3983 :return: ExternalIdentity
3984 3984 """
3985 3985 query = cls.query()
3986 3986 query = query.filter(cls.local_user_id == local_user_id)
3987 3987 return query
3988 3988
3989 3989
3990 3990 class Integration(Base, BaseModel):
3991 3991 __tablename__ = 'integrations'
3992 3992 __table_args__ = (
3993 3993 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3994 3994 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3995 3995 )
3996 3996
3997 3997 integration_id = Column('integration_id', Integer(), primary_key=True)
3998 3998 integration_type = Column('integration_type', String(255))
3999 3999 enabled = Column('enabled', Boolean(), nullable=False)
4000 4000 name = Column('name', String(255), nullable=False)
4001 4001 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4002 4002 default=False)
4003 4003
4004 4004 settings = Column(
4005 4005 'settings_json', MutationObj.as_mutable(
4006 4006 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4007 4007 repo_id = Column(
4008 4008 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4009 4009 nullable=True, unique=None, default=None)
4010 4010 repo = relationship('Repository', lazy='joined')
4011 4011
4012 4012 repo_group_id = Column(
4013 4013 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4014 4014 nullable=True, unique=None, default=None)
4015 4015 repo_group = relationship('RepoGroup', lazy='joined')
4016 4016
4017 4017 @property
4018 4018 def scope(self):
4019 4019 if self.repo:
4020 4020 return repr(self.repo)
4021 4021 if self.repo_group:
4022 4022 if self.child_repos_only:
4023 4023 return repr(self.repo_group) + ' (child repos only)'
4024 4024 else:
4025 4025 return repr(self.repo_group) + ' (recursive)'
4026 4026 if self.child_repos_only:
4027 4027 return 'root_repos'
4028 4028 return 'global'
4029 4029
4030 4030 def __repr__(self):
4031 4031 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4032 4032
4033 4033
4034 4034 class RepoReviewRuleUser(Base, BaseModel):
4035 4035 __tablename__ = 'repo_review_rules_users'
4036 4036 __table_args__ = (
4037 4037 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4038 4038 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4039 4039 )
4040 4040 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4041 4041 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4042 4042 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4043 4043 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4044 4044 user = relationship('User')
4045 4045
4046 4046 def rule_data(self):
4047 4047 return {
4048 4048 'mandatory': self.mandatory
4049 4049 }
4050 4050
4051 4051
4052 4052 class RepoReviewRuleUserGroup(Base, BaseModel):
4053 4053 __tablename__ = 'repo_review_rules_users_groups'
4054 4054 __table_args__ = (
4055 4055 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4056 4056 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4057 4057 )
4058 4058 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4059 4059 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4060 4060 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4061 4061 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4062 4062 users_group = relationship('UserGroup')
4063 4063
4064 4064 def rule_data(self):
4065 4065 return {
4066 4066 'mandatory': self.mandatory
4067 4067 }
4068 4068
4069 4069
4070 4070 class RepoReviewRule(Base, BaseModel):
4071 4071 __tablename__ = 'repo_review_rules'
4072 4072 __table_args__ = (
4073 4073 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4074 4074 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4075 4075 )
4076 4076
4077 4077 repo_review_rule_id = Column(
4078 4078 'repo_review_rule_id', Integer(), primary_key=True)
4079 4079 repo_id = Column(
4080 4080 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4081 4081 repo = relationship('Repository', backref='review_rules')
4082 4082
4083 4083 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4084 4084 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4085 4085
4086 4086 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4087 4087 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4088 4088 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4089 4089 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4090 4090
4091 4091 rule_users = relationship('RepoReviewRuleUser')
4092 4092 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4093 4093
4094 4094 @hybrid_property
4095 4095 def branch_pattern(self):
4096 4096 return self._branch_pattern or '*'
4097 4097
4098 4098 def _validate_glob(self, value):
4099 4099 re.compile('^' + glob2re(value) + '$')
4100 4100
4101 4101 @branch_pattern.setter
4102 4102 def branch_pattern(self, value):
4103 4103 self._validate_glob(value)
4104 4104 self._branch_pattern = value or '*'
4105 4105
4106 4106 @hybrid_property
4107 4107 def file_pattern(self):
4108 4108 return self._file_pattern or '*'
4109 4109
4110 4110 @file_pattern.setter
4111 4111 def file_pattern(self, value):
4112 4112 self._validate_glob(value)
4113 4113 self._file_pattern = value or '*'
4114 4114
4115 4115 def matches(self, branch, files_changed):
4116 4116 """
4117 4117 Check if this review rule matches a branch/files in a pull request
4118 4118
4119 4119 :param branch: branch name for the commit
4120 4120 :param files_changed: list of file paths changed in the pull request
4121 4121 """
4122 4122
4123 4123 branch = branch or ''
4124 4124 files_changed = files_changed or []
4125 4125
4126 4126 branch_matches = True
4127 4127 if branch:
4128 4128 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
4129 4129 branch_matches = bool(branch_regex.search(branch))
4130 4130
4131 4131 files_matches = True
4132 4132 if self.file_pattern != '*':
4133 4133 files_matches = False
4134 4134 file_regex = re.compile(glob2re(self.file_pattern))
4135 4135 for filename in files_changed:
4136 4136 if file_regex.search(filename):
4137 4137 files_matches = True
4138 4138 break
4139 4139
4140 4140 return branch_matches and files_matches
4141 4141
4142 4142 @property
4143 4143 def review_users(self):
4144 4144 """ Returns the users which this rule applies to """
4145 4145
4146 4146 users = collections.OrderedDict()
4147 4147
4148 4148 for rule_user in self.rule_users:
4149 4149 if rule_user.user.active:
4150 4150 if rule_user.user not in users:
4151 4151 users[rule_user.user.username] = {
4152 4152 'user': rule_user.user,
4153 4153 'source': 'user',
4154 4154 'source_data': {},
4155 4155 'data': rule_user.rule_data()
4156 4156 }
4157 4157
4158 4158 for rule_user_group in self.rule_user_groups:
4159 4159 source_data = {
4160 4160 'name': rule_user_group.users_group.users_group_name,
4161 4161 'members': len(rule_user_group.users_group.members)
4162 4162 }
4163 4163 for member in rule_user_group.users_group.members:
4164 4164 if member.user.active:
4165 4165 users[member.user.username] = {
4166 4166 'user': member.user,
4167 4167 'source': 'user_group',
4168 4168 'source_data': source_data,
4169 4169 'data': rule_user_group.rule_data()
4170 4170 }
4171 4171
4172 4172 return users
4173 4173
4174 4174 def __repr__(self):
4175 4175 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4176 4176 self.repo_review_rule_id, self.repo)
4177 4177
4178 4178
4179 4179 class ScheduleEntry(Base, BaseModel):
4180 4180 __tablename__ = 'schedule_entries'
4181 4181 __table_args__ = (
4182 4182 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4183 4183 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4184 4184 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4185 4185 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4186 4186 )
4187 4187 schedule_types = ['crontab', 'timedelta', 'integer']
4188 4188 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4189 4189
4190 4190 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4191 4191 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4192 4192 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4193 4193
4194 4194 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4195 4195 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4196 4196
4197 4197 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4198 4198 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4199 4199
4200 4200 # task
4201 4201 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4202 4202 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4203 4203 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4204 4204 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4205 4205
4206 4206 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4207 4207 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4208 4208
4209 4209 @hybrid_property
4210 4210 def schedule_type(self):
4211 4211 return self._schedule_type
4212 4212
4213 4213 @schedule_type.setter
4214 4214 def schedule_type(self, val):
4215 4215 if val not in self.schedule_types:
4216 4216 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4217 4217 val, self.schedule_type))
4218 4218
4219 4219 self._schedule_type = val
4220 4220
4221 4221 @classmethod
4222 4222 def get_uid(cls, obj):
4223 4223 args = obj.task_args
4224 4224 kwargs = obj.task_kwargs
4225 4225 if isinstance(args, JsonRaw):
4226 4226 try:
4227 4227 args = json.loads(args)
4228 4228 except ValueError:
4229 4229 args = tuple()
4230 4230
4231 4231 if isinstance(kwargs, JsonRaw):
4232 4232 try:
4233 4233 kwargs = json.loads(kwargs)
4234 4234 except ValueError:
4235 4235 kwargs = dict()
4236 4236
4237 4237 dot_notation = obj.task_dot_notation
4238 4238 val = '.'.join(map(safe_str, [
4239 4239 sorted(dot_notation), args, sorted(kwargs.items())]))
4240 4240 return hashlib.sha1(val).hexdigest()
4241 4241
4242 4242 @classmethod
4243 4243 def get_by_schedule_name(cls, schedule_name):
4244 4244 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4245 4245
4246 4246 @classmethod
4247 4247 def get_by_schedule_id(cls, schedule_id):
4248 4248 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4249 4249
4250 4250 @property
4251 4251 def task(self):
4252 4252 return self.task_dot_notation
4253 4253
4254 4254 @property
4255 4255 def schedule(self):
4256 4256 from rhodecode.lib.celerylib.utils import raw_2_schedule
4257 4257 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4258 4258 return schedule
4259 4259
4260 4260 @property
4261 4261 def args(self):
4262 4262 try:
4263 4263 return list(self.task_args or [])
4264 4264 except ValueError:
4265 4265 return list()
4266 4266
4267 4267 @property
4268 4268 def kwargs(self):
4269 4269 try:
4270 4270 return dict(self.task_kwargs or {})
4271 4271 except ValueError:
4272 4272 return dict()
4273 4273
4274 4274 def _as_raw(self, val):
4275 4275 if hasattr(val, 'de_coerce'):
4276 4276 val = val.de_coerce()
4277 4277 if val:
4278 4278 val = json.dumps(val)
4279 4279
4280 4280 return val
4281 4281
4282 4282 @property
4283 4283 def schedule_definition_raw(self):
4284 4284 return self._as_raw(self.schedule_definition)
4285 4285
4286 4286 @property
4287 4287 def args_raw(self):
4288 4288 return self._as_raw(self.task_args)
4289 4289
4290 4290 @property
4291 4291 def kwargs_raw(self):
4292 4292 return self._as_raw(self.task_kwargs)
4293 4293
4294 4294 def __repr__(self):
4295 4295 return '<DB:ScheduleEntry({}:{})>'.format(
4296 4296 self.schedule_entry_id, self.schedule_name)
4297 4297
4298 4298
4299 4299 @event.listens_for(ScheduleEntry, 'before_update')
4300 4300 def update_task_uid(mapper, connection, target):
4301 4301 target.task_uid = ScheduleEntry.get_uid(target)
4302 4302
4303 4303
4304 4304 @event.listens_for(ScheduleEntry, 'before_insert')
4305 4305 def set_task_uid(mapper, connection, target):
4306 4306 target.task_uid = ScheduleEntry.get_uid(target)
4307 4307
4308 4308
4309 4309 class DbMigrateVersion(Base, BaseModel):
4310 4310 __tablename__ = 'db_migrate_version'
4311 4311 __table_args__ = (
4312 4312 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4313 4313 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4314 4314 )
4315 4315 repository_id = Column('repository_id', String(250), primary_key=True)
4316 4316 repository_path = Column('repository_path', Text)
4317 4317 version = Column('version', Integer)
4318 4318
4319 4319
4320 4320 class DbSession(Base, BaseModel):
4321 4321 __tablename__ = 'db_session'
4322 4322 __table_args__ = (
4323 4323 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4324 4324 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4325 4325 )
4326 4326
4327 4327 def __repr__(self):
4328 4328 return '<DB:DbSession({})>'.format(self.id)
4329 4329
4330 4330 id = Column('id', Integer())
4331 4331 namespace = Column('namespace', String(255), primary_key=True)
4332 4332 accessed = Column('accessed', DateTime, nullable=False)
4333 4333 created = Column('created', DateTime, nullable=False)
4334 4334 data = Column('data', PickleType, nullable=False)
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now