##// END OF EJS Templates
py3: remove use of pyramid.compat
super-admin -
r4908:04e2d7da default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,421 +1,419 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import itertools
23 23 import base64
24 24
25 from pyramid import compat
26
27 25 from rhodecode.api import (
28 26 jsonrpc_method, JSONRPCError, JSONRPCForbidden, find_methods)
29 27
30 28 from rhodecode.api.utils import (
31 29 Optional, OAttr, has_superadmin_permission, get_user_or_error)
32 30 from rhodecode.lib.utils import repo2db_mapper
33 31 from rhodecode.lib import system_info
34 32 from rhodecode.lib import user_sessions
35 33 from rhodecode.lib import exc_tracking
36 34 from rhodecode.lib.ext_json import json
37 35 from rhodecode.lib.utils2 import safe_int
38 36 from rhodecode.model.db import UserIpMap
39 37 from rhodecode.model.scm import ScmModel
40 38 from rhodecode.model.settings import VcsSettingsModel
41 39 from rhodecode.apps.file_store import utils
42 40 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \
43 41 FileOverSizeException
44 42
45 43 log = logging.getLogger(__name__)
46 44
47 45
48 46 @jsonrpc_method()
49 47 def get_server_info(request, apiuser):
50 48 """
51 49 Returns the |RCE| server information.
52 50
53 51 This includes the running version of |RCE| and all installed
54 52 packages. This command takes the following options:
55 53
56 54 :param apiuser: This is filled automatically from the |authtoken|.
57 55 :type apiuser: AuthUser
58 56
59 57 Example output:
60 58
61 59 .. code-block:: bash
62 60
63 61 id : <id_given_in_input>
64 62 result : {
65 63 'modules': [<module name>,...]
66 64 'py_version': <python version>,
67 65 'platform': <platform type>,
68 66 'rhodecode_version': <rhodecode version>
69 67 }
70 68 error : null
71 69 """
72 70
73 71 if not has_superadmin_permission(apiuser):
74 72 raise JSONRPCForbidden()
75 73
76 74 server_info = ScmModel().get_server_info(request.environ)
77 75 # rhodecode-index requires those
78 76
79 77 server_info['index_storage'] = server_info['search']['value']['location']
80 78 server_info['storage'] = server_info['storage']['value']['path']
81 79
82 80 return server_info
83 81
84 82
85 83 @jsonrpc_method()
86 84 def get_repo_store(request, apiuser):
87 85 """
88 86 Returns the |RCE| repository storage information.
89 87
90 88 :param apiuser: This is filled automatically from the |authtoken|.
91 89 :type apiuser: AuthUser
92 90
93 91 Example output:
94 92
95 93 .. code-block:: bash
96 94
97 95 id : <id_given_in_input>
98 96 result : {
99 97 'modules': [<module name>,...]
100 98 'py_version': <python version>,
101 99 'platform': <platform type>,
102 100 'rhodecode_version': <rhodecode version>
103 101 }
104 102 error : null
105 103 """
106 104
107 105 if not has_superadmin_permission(apiuser):
108 106 raise JSONRPCForbidden()
109 107
110 108 path = VcsSettingsModel().get_repos_location()
111 109 return {"path": path}
112 110
113 111
114 112 @jsonrpc_method()
115 113 def get_ip(request, apiuser, userid=Optional(OAttr('apiuser'))):
116 114 """
117 115 Displays the IP Address as seen from the |RCE| server.
118 116
119 117 * This command displays the IP Address, as well as all the defined IP
120 118 addresses for the specified user. If the ``userid`` is not set, the
121 119 data returned is for the user calling the method.
122 120
123 121 This command can only be run using an |authtoken| with admin rights to
124 122 the specified repository.
125 123
126 124 This command takes the following options:
127 125
128 126 :param apiuser: This is filled automatically from |authtoken|.
129 127 :type apiuser: AuthUser
130 128 :param userid: Sets the userid for which associated IP Address data
131 129 is returned.
132 130 :type userid: Optional(str or int)
133 131
134 132 Example output:
135 133
136 134 .. code-block:: bash
137 135
138 136 id : <id_given_in_input>
139 137 result : {
140 138 "server_ip_addr": "<ip_from_clien>",
141 139 "user_ips": [
142 140 {
143 141 "ip_addr": "<ip_with_mask>",
144 142 "ip_range": ["<start_ip>", "<end_ip>"],
145 143 },
146 144 ...
147 145 ]
148 146 }
149 147
150 148 """
151 149 if not has_superadmin_permission(apiuser):
152 150 raise JSONRPCForbidden()
153 151
154 152 userid = Optional.extract(userid, evaluate_locals=locals())
155 153 userid = getattr(userid, 'user_id', userid)
156 154
157 155 user = get_user_or_error(userid)
158 156 ips = UserIpMap.query().filter(UserIpMap.user == user).all()
159 157 return {
160 158 'server_ip_addr': request.rpc_ip_addr,
161 159 'user_ips': ips
162 160 }
163 161
164 162
165 163 @jsonrpc_method()
166 164 def rescan_repos(request, apiuser, remove_obsolete=Optional(False)):
167 165 """
168 166 Triggers a rescan of the specified repositories.
169 167
170 168 * If the ``remove_obsolete`` option is set, it also deletes repositories
171 169 that are found in the database but not on the file system, so called
172 170 "clean zombies".
173 171
174 172 This command can only be run using an |authtoken| with admin rights to
175 173 the specified repository.
176 174
177 175 This command takes the following options:
178 176
179 177 :param apiuser: This is filled automatically from the |authtoken|.
180 178 :type apiuser: AuthUser
181 179 :param remove_obsolete: Deletes repositories from the database that
182 180 are not found on the filesystem.
183 181 :type remove_obsolete: Optional(``True`` | ``False``)
184 182
185 183 Example output:
186 184
187 185 .. code-block:: bash
188 186
189 187 id : <id_given_in_input>
190 188 result : {
191 189 'added': [<added repository name>,...]
192 190 'removed': [<removed repository name>,...]
193 191 }
194 192 error : null
195 193
196 194 Example error output:
197 195
198 196 .. code-block:: bash
199 197
200 198 id : <id_given_in_input>
201 199 result : null
202 200 error : {
203 201 'Error occurred during rescan repositories action'
204 202 }
205 203
206 204 """
207 205 if not has_superadmin_permission(apiuser):
208 206 raise JSONRPCForbidden()
209 207
210 208 try:
211 209 rm_obsolete = Optional.extract(remove_obsolete)
212 210 added, removed = repo2db_mapper(ScmModel().repo_scan(),
213 211 remove_obsolete=rm_obsolete)
214 212 return {'added': added, 'removed': removed}
215 213 except Exception:
216 214 log.exception('Failed to run repo rescann')
217 215 raise JSONRPCError(
218 216 'Error occurred during rescan repositories action'
219 217 )
220 218
221 219
222 220 @jsonrpc_method()
223 221 def cleanup_sessions(request, apiuser, older_then=Optional(60)):
224 222 """
225 223 Triggers a session cleanup action.
226 224
227 225 If the ``older_then`` option is set, only sessions that hasn't been
228 226 accessed in the given number of days will be removed.
229 227
230 228 This command can only be run using an |authtoken| with admin rights to
231 229 the specified repository.
232 230
233 231 This command takes the following options:
234 232
235 233 :param apiuser: This is filled automatically from the |authtoken|.
236 234 :type apiuser: AuthUser
237 235 :param older_then: Deletes session that hasn't been accessed
238 236 in given number of days.
239 237 :type older_then: Optional(int)
240 238
241 239 Example output:
242 240
243 241 .. code-block:: bash
244 242
245 243 id : <id_given_in_input>
246 244 result: {
247 245 "backend": "<type of backend>",
248 246 "sessions_removed": <number_of_removed_sessions>
249 247 }
250 248 error : null
251 249
252 250 Example error output:
253 251
254 252 .. code-block:: bash
255 253
256 254 id : <id_given_in_input>
257 255 result : null
258 256 error : {
259 257 'Error occurred during session cleanup'
260 258 }
261 259
262 260 """
263 261 if not has_superadmin_permission(apiuser):
264 262 raise JSONRPCForbidden()
265 263
266 264 older_then = safe_int(Optional.extract(older_then)) or 60
267 265 older_than_seconds = 60 * 60 * 24 * older_then
268 266
269 267 config = system_info.rhodecode_config().get_value()['value']['config']
270 268 session_model = user_sessions.get_session_handler(
271 269 config.get('beaker.session.type', 'memory'))(config)
272 270
273 271 backend = session_model.SESSION_TYPE
274 272 try:
275 273 cleaned = session_model.clean_sessions(
276 274 older_than_seconds=older_than_seconds)
277 275 return {'sessions_removed': cleaned, 'backend': backend}
278 276 except user_sessions.CleanupCommand as msg:
279 277 return {'cleanup_command': msg.message, 'backend': backend}
280 278 except Exception as e:
281 279 log.exception('Failed session cleanup')
282 280 raise JSONRPCError(
283 281 'Error occurred during session cleanup'
284 282 )
285 283
286 284
287 285 @jsonrpc_method()
288 286 def get_method(request, apiuser, pattern=Optional('*')):
289 287 """
290 288 Returns list of all available API methods. By default match pattern
291 289 os "*" but any other pattern can be specified. eg *comment* will return
292 290 all methods with comment inside them. If just single method is matched
293 291 returned data will also include method specification
294 292
295 293 This command can only be run using an |authtoken| with admin rights to
296 294 the specified repository.
297 295
298 296 This command takes the following options:
299 297
300 298 :param apiuser: This is filled automatically from the |authtoken|.
301 299 :type apiuser: AuthUser
302 300 :param pattern: pattern to match method names against
303 301 :type pattern: Optional("*")
304 302
305 303 Example output:
306 304
307 305 .. code-block:: bash
308 306
309 307 id : <id_given_in_input>
310 308 "result": [
311 309 "changeset_comment",
312 310 "comment_pull_request",
313 311 "comment_commit"
314 312 ]
315 313 error : null
316 314
317 315 .. code-block:: bash
318 316
319 317 id : <id_given_in_input>
320 318 "result": [
321 319 "comment_commit",
322 320 {
323 321 "apiuser": "<RequiredType>",
324 322 "comment_type": "<Optional:u'note'>",
325 323 "commit_id": "<RequiredType>",
326 324 "message": "<RequiredType>",
327 325 "repoid": "<RequiredType>",
328 326 "request": "<RequiredType>",
329 327 "resolves_comment_id": "<Optional:None>",
330 328 "status": "<Optional:None>",
331 329 "userid": "<Optional:<OptionalAttr:apiuser>>"
332 330 }
333 331 ]
334 332 error : null
335 333 """
336 334 from rhodecode.config.patches import inspect_getargspec
337 335 inspect = inspect_getargspec()
338 336
339 337 if not has_superadmin_permission(apiuser):
340 338 raise JSONRPCForbidden()
341 339
342 340 pattern = Optional.extract(pattern)
343 341
344 342 matches = find_methods(request.registry.jsonrpc_methods, pattern)
345 343
346 344 args_desc = []
347 345 if len(matches) == 1:
348 346 func = matches[matches.keys()[0]]
349 347
350 348 argspec = inspect.getargspec(func)
351 349 arglist = argspec[0]
352 350 defaults = map(repr, argspec[3] or [])
353 351
354 352 default_empty = '<RequiredType>'
355 353
356 354 # kw arguments required by this method
357 355 func_kwargs = dict(itertools.izip_longest(
358 356 reversed(arglist), reversed(defaults), fillvalue=default_empty))
359 357 args_desc.append(func_kwargs)
360 358
361 359 return matches.keys() + args_desc
362 360
363 361
364 362 @jsonrpc_method()
365 363 def store_exception(request, apiuser, exc_data_json, prefix=Optional('rhodecode')):
366 364 """
367 365 Stores sent exception inside the built-in exception tracker in |RCE| server.
368 366
369 367 This command can only be run using an |authtoken| with admin rights to
370 368 the specified repository.
371 369
372 370 This command takes the following options:
373 371
374 372 :param apiuser: This is filled automatically from the |authtoken|.
375 373 :type apiuser: AuthUser
376 374
377 375 :param exc_data_json: JSON data with exception e.g
378 376 {"exc_traceback": "Value `1` is not allowed", "exc_type_name": "ValueError"}
379 377 :type exc_data_json: JSON data
380 378
381 379 :param prefix: prefix for error type, e.g 'rhodecode', 'vcsserver', 'rhodecode-tools'
382 380 :type prefix: Optional("rhodecode")
383 381
384 382 Example output:
385 383
386 384 .. code-block:: bash
387 385
388 386 id : <id_given_in_input>
389 387 "result": {
390 388 "exc_id": 139718459226384,
391 389 "exc_url": "http://localhost:8080/_admin/settings/exceptions/139718459226384"
392 390 }
393 391 error : null
394 392 """
395 393 if not has_superadmin_permission(apiuser):
396 394 raise JSONRPCForbidden()
397 395
398 396 prefix = Optional.extract(prefix)
399 397 exc_id = exc_tracking.generate_id()
400 398
401 399 try:
402 400 exc_data = json.loads(exc_data_json)
403 401 except Exception:
404 402 log.error('Failed to parse JSON: %r', exc_data_json)
405 403 raise JSONRPCError('Failed to parse JSON data from exc_data_json field. '
406 404 'Please make sure it contains a valid JSON.')
407 405
408 406 try:
409 407 exc_traceback = exc_data['exc_traceback']
410 408 exc_type_name = exc_data['exc_type_name']
411 409 except KeyError as err:
412 410 raise JSONRPCError('Missing exc_traceback, or exc_type_name '
413 411 'in exc_data_json field. Missing: {}'.format(err))
414 412
415 413 exc_tracking._store_exception(
416 414 exc_id=exc_id, exc_traceback=exc_traceback,
417 415 exc_type_name=exc_type_name, prefix=prefix)
418 416
419 417 exc_url = request.route_url(
420 418 'admin_settings_exception_tracker_show', exception_id=exc_id)
421 419 return {'exc_id': exc_id, 'exc_url': exc_url}
@@ -1,573 +1,572 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 from pyramid import compat
23 22
24 23 from rhodecode.api import (
25 24 jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError)
26 25 from rhodecode.api.utils import (
27 26 Optional, OAttr, has_superadmin_permission, get_user_or_error, store_update)
28 27 from rhodecode.lib import audit_logger
29 28 from rhodecode.lib.auth import AuthUser, PasswordGenerator
30 29 from rhodecode.lib.exceptions import DefaultUserException
31 30 from rhodecode.lib.utils2 import safe_int, str2bool
32 31 from rhodecode.model.db import Session, User, Repository
33 32 from rhodecode.model.user import UserModel
34 33 from rhodecode.model import validation_schema
35 34 from rhodecode.model.validation_schema.schemas import user_schema
36 35
37 36 log = logging.getLogger(__name__)
38 37
39 38
40 39 @jsonrpc_method()
41 40 def get_user(request, apiuser, userid=Optional(OAttr('apiuser'))):
42 41 """
43 42 Returns the information associated with a username or userid.
44 43
45 44 * If the ``userid`` is not set, this command returns the information
46 45 for the ``userid`` calling the method.
47 46
48 47 .. note::
49 48
50 49 Normal users may only run this command against their ``userid``. For
51 50 full privileges you must run this command using an |authtoken| with
52 51 admin rights.
53 52
54 53 :param apiuser: This is filled automatically from the |authtoken|.
55 54 :type apiuser: AuthUser
56 55 :param userid: Sets the userid for which data will be returned.
57 56 :type userid: Optional(str or int)
58 57
59 58 Example output:
60 59
61 60 .. code-block:: bash
62 61
63 62 {
64 63 "error": null,
65 64 "id": <id>,
66 65 "result": {
67 66 "active": true,
68 67 "admin": false,
69 68 "api_keys": [ list of keys ],
70 69 "auth_tokens": [ list of tokens with details ],
71 70 "email": "user@example.com",
72 71 "emails": [
73 72 "user@example.com"
74 73 ],
75 74 "extern_name": "rhodecode",
76 75 "extern_type": "rhodecode",
77 76 "firstname": "username",
78 77 "description": "user description",
79 78 "ip_addresses": [],
80 79 "language": null,
81 80 "last_login": "Timestamp",
82 81 "last_activity": "Timestamp",
83 82 "lastname": "surnae",
84 83 "permissions": <deprecated>,
85 84 "permissions_summary": {
86 85 "global": [
87 86 "hg.inherit_default_perms.true",
88 87 "usergroup.read",
89 88 "hg.repogroup.create.false",
90 89 "hg.create.none",
91 90 "hg.password_reset.enabled",
92 91 "hg.extern_activate.manual",
93 92 "hg.create.write_on_repogroup.false",
94 93 "hg.usergroup.create.false",
95 94 "group.none",
96 95 "repository.none",
97 96 "hg.register.none",
98 97 "hg.fork.repository"
99 98 ],
100 99 "repositories": { "username/example": "repository.write"},
101 100 "repositories_groups": { "user-group/repo": "group.none" },
102 101 "user_groups": { "user_group_name": "usergroup.read" }
103 102 }
104 103 "user_id": 32,
105 104 "username": "username"
106 105 }
107 106 }
108 107 """
109 108
110 109 if not has_superadmin_permission(apiuser):
111 110 # make sure normal user does not pass someone else userid,
112 111 # he is not allowed to do that
113 112 if not isinstance(userid, Optional) and userid != apiuser.user_id:
114 113 raise JSONRPCError('userid is not the same as your user')
115 114
116 115 userid = Optional.extract(userid, evaluate_locals=locals())
117 116 userid = getattr(userid, 'user_id', userid)
118 117
119 118 user = get_user_or_error(userid)
120 119 data = user.get_api_data(include_secrets=True)
121 120 permissions = AuthUser(user_id=user.user_id).permissions
122 121 data['permissions'] = permissions # TODO(marcink): should be deprecated
123 122 data['permissions_summary'] = permissions
124 123 return data
125 124
126 125
127 126 @jsonrpc_method()
128 127 def get_users(request, apiuser):
129 128 """
130 129 Lists all users in the |RCE| user database.
131 130
132 131 This command can only be run using an |authtoken| with admin rights to
133 132 the specified repository.
134 133
135 134 This command takes the following options:
136 135
137 136 :param apiuser: This is filled automatically from the |authtoken|.
138 137 :type apiuser: AuthUser
139 138
140 139 Example output:
141 140
142 141 .. code-block:: bash
143 142
144 143 id : <id_given_in_input>
145 144 result: [<user_object>, ...]
146 145 error: null
147 146 """
148 147
149 148 if not has_superadmin_permission(apiuser):
150 149 raise JSONRPCForbidden()
151 150
152 151 result = []
153 152 users_list = User.query().order_by(User.username) \
154 153 .filter(User.username != User.DEFAULT_USER) \
155 154 .all()
156 155 for user in users_list:
157 156 result.append(user.get_api_data(include_secrets=True))
158 157 return result
159 158
160 159
161 160 @jsonrpc_method()
162 161 def create_user(request, apiuser, username, email, password=Optional(''),
163 162 firstname=Optional(''), lastname=Optional(''), description=Optional(''),
164 163 active=Optional(True), admin=Optional(False),
165 164 extern_name=Optional('rhodecode'),
166 165 extern_type=Optional('rhodecode'),
167 166 force_password_change=Optional(False),
168 167 create_personal_repo_group=Optional(None)):
169 168 """
170 169 Creates a new user and returns the new user object.
171 170
172 171 This command can only be run using an |authtoken| with admin rights to
173 172 the specified repository.
174 173
175 174 This command takes the following options:
176 175
177 176 :param apiuser: This is filled automatically from the |authtoken|.
178 177 :type apiuser: AuthUser
179 178 :param username: Set the new username.
180 179 :type username: str or int
181 180 :param email: Set the user email address.
182 181 :type email: str
183 182 :param password: Set the new user password.
184 183 :type password: Optional(str)
185 184 :param firstname: Set the new user firstname.
186 185 :type firstname: Optional(str)
187 186 :param lastname: Set the new user surname.
188 187 :type lastname: Optional(str)
189 188 :param description: Set user description, or short bio. Metatags are allowed.
190 189 :type description: Optional(str)
191 190 :param active: Set the user as active.
192 191 :type active: Optional(``True`` | ``False``)
193 192 :param admin: Give the new user admin rights.
194 193 :type admin: Optional(``True`` | ``False``)
195 194 :param extern_name: Set the authentication plugin name.
196 195 Using LDAP this is filled with LDAP UID.
197 196 :type extern_name: Optional(str)
198 197 :param extern_type: Set the new user authentication plugin.
199 198 :type extern_type: Optional(str)
200 199 :param force_password_change: Force the new user to change password
201 200 on next login.
202 201 :type force_password_change: Optional(``True`` | ``False``)
203 202 :param create_personal_repo_group: Create personal repo group for this user
204 203 :type create_personal_repo_group: Optional(``True`` | ``False``)
205 204
206 205 Example output:
207 206
208 207 .. code-block:: bash
209 208
210 209 id : <id_given_in_input>
211 210 result: {
212 211 "msg" : "created new user `<username>`",
213 212 "user": <user_obj>
214 213 }
215 214 error: null
216 215
217 216 Example error output:
218 217
219 218 .. code-block:: bash
220 219
221 220 id : <id_given_in_input>
222 221 result : null
223 222 error : {
224 223 "user `<username>` already exist"
225 224 or
226 225 "email `<email>` already exist"
227 226 or
228 227 "failed to create user `<username>`"
229 228 }
230 229
231 230 """
232 231 if not has_superadmin_permission(apiuser):
233 232 raise JSONRPCForbidden()
234 233
235 234 if UserModel().get_by_username(username):
236 235 raise JSONRPCError("user `%s` already exist" % (username,))
237 236
238 237 if UserModel().get_by_email(email, case_insensitive=True):
239 238 raise JSONRPCError("email `%s` already exist" % (email,))
240 239
241 240 # generate random password if we actually given the
242 241 # extern_name and it's not rhodecode
243 242 if (not isinstance(extern_name, Optional) and
244 243 Optional.extract(extern_name) != 'rhodecode'):
245 244 # generate temporary password if user is external
246 245 password = PasswordGenerator().gen_password(length=16)
247 246 create_repo_group = Optional.extract(create_personal_repo_group)
248 if isinstance(create_repo_group, compat.string_types):
247 if isinstance(create_repo_group, str):
249 248 create_repo_group = str2bool(create_repo_group)
250 249
251 250 username = Optional.extract(username)
252 251 password = Optional.extract(password)
253 252 email = Optional.extract(email)
254 253 first_name = Optional.extract(firstname)
255 254 last_name = Optional.extract(lastname)
256 255 description = Optional.extract(description)
257 256 active = Optional.extract(active)
258 257 admin = Optional.extract(admin)
259 258 extern_type = Optional.extract(extern_type)
260 259 extern_name = Optional.extract(extern_name)
261 260
262 261 schema = user_schema.UserSchema().bind(
263 262 # user caller
264 263 user=apiuser)
265 264 try:
266 265 schema_data = schema.deserialize(dict(
267 266 username=username,
268 267 email=email,
269 268 password=password,
270 269 first_name=first_name,
271 270 last_name=last_name,
272 271 active=active,
273 272 admin=admin,
274 273 description=description,
275 274 extern_type=extern_type,
276 275 extern_name=extern_name,
277 276 ))
278 277 except validation_schema.Invalid as err:
279 278 raise JSONRPCValidationError(colander_exc=err)
280 279
281 280 try:
282 281 user = UserModel().create_or_update(
283 282 username=schema_data['username'],
284 283 password=schema_data['password'],
285 284 email=schema_data['email'],
286 285 firstname=schema_data['first_name'],
287 286 lastname=schema_data['last_name'],
288 287 description=schema_data['description'],
289 288 active=schema_data['active'],
290 289 admin=schema_data['admin'],
291 290 extern_type=schema_data['extern_type'],
292 291 extern_name=schema_data['extern_name'],
293 292 force_password_change=Optional.extract(force_password_change),
294 293 create_repo_group=create_repo_group
295 294 )
296 295 Session().flush()
297 296 creation_data = user.get_api_data()
298 297 audit_logger.store_api(
299 298 'user.create', action_data={'data': creation_data},
300 299 user=apiuser)
301 300
302 301 Session().commit()
303 302 return {
304 303 'msg': 'created new user `%s`' % username,
305 304 'user': user.get_api_data(include_secrets=True)
306 305 }
307 306 except Exception:
308 307 log.exception('Error occurred during creation of user')
309 308 raise JSONRPCError('failed to create user `%s`' % (username,))
310 309
311 310
312 311 @jsonrpc_method()
313 312 def update_user(request, apiuser, userid, username=Optional(None),
314 313 email=Optional(None), password=Optional(None),
315 314 firstname=Optional(None), lastname=Optional(None),
316 315 description=Optional(None), active=Optional(None), admin=Optional(None),
317 316 extern_type=Optional(None), extern_name=Optional(None), ):
318 317 """
319 318 Updates the details for the specified user, if that user exists.
320 319
321 320 This command can only be run using an |authtoken| with admin rights to
322 321 the specified repository.
323 322
324 323 This command takes the following options:
325 324
326 325 :param apiuser: This is filled automatically from |authtoken|.
327 326 :type apiuser: AuthUser
328 327 :param userid: Set the ``userid`` to update.
329 328 :type userid: str or int
330 329 :param username: Set the new username.
331 330 :type username: str or int
332 331 :param email: Set the new email.
333 332 :type email: str
334 333 :param password: Set the new password.
335 334 :type password: Optional(str)
336 335 :param firstname: Set the new first name.
337 336 :type firstname: Optional(str)
338 337 :param lastname: Set the new surname.
339 338 :type lastname: Optional(str)
340 339 :param description: Set user description, or short bio. Metatags are allowed.
341 340 :type description: Optional(str)
342 341 :param active: Set the new user as active.
343 342 :type active: Optional(``True`` | ``False``)
344 343 :param admin: Give the user admin rights.
345 344 :type admin: Optional(``True`` | ``False``)
346 345 :param extern_name: Set the authentication plugin user name.
347 346 Using LDAP this is filled with LDAP UID.
348 347 :type extern_name: Optional(str)
349 348 :param extern_type: Set the authentication plugin type.
350 349 :type extern_type: Optional(str)
351 350
352 351
353 352 Example output:
354 353
355 354 .. code-block:: bash
356 355
357 356 id : <id_given_in_input>
358 357 result: {
359 358 "msg" : "updated user ID:<userid> <username>",
360 359 "user": <user_object>,
361 360 }
362 361 error: null
363 362
364 363 Example error output:
365 364
366 365 .. code-block:: bash
367 366
368 367 id : <id_given_in_input>
369 368 result : null
370 369 error : {
371 370 "failed to update user `<username>`"
372 371 }
373 372
374 373 """
375 374 if not has_superadmin_permission(apiuser):
376 375 raise JSONRPCForbidden()
377 376
378 377 user = get_user_or_error(userid)
379 378 old_data = user.get_api_data()
380 379 # only non optional arguments will be stored in updates
381 380 updates = {}
382 381
383 382 try:
384 383
385 384 store_update(updates, username, 'username')
386 385 store_update(updates, password, 'password')
387 386 store_update(updates, email, 'email')
388 387 store_update(updates, firstname, 'name')
389 388 store_update(updates, lastname, 'lastname')
390 389 store_update(updates, description, 'description')
391 390 store_update(updates, active, 'active')
392 391 store_update(updates, admin, 'admin')
393 392 store_update(updates, extern_name, 'extern_name')
394 393 store_update(updates, extern_type, 'extern_type')
395 394
396 395 user = UserModel().update_user(user, **updates)
397 396 audit_logger.store_api(
398 397 'user.edit', action_data={'old_data': old_data},
399 398 user=apiuser)
400 399 Session().commit()
401 400 return {
402 401 'msg': 'updated user ID:%s %s' % (user.user_id, user.username),
403 402 'user': user.get_api_data(include_secrets=True)
404 403 }
405 404 except DefaultUserException:
406 405 log.exception("Default user edit exception")
407 406 raise JSONRPCError('editing default user is forbidden')
408 407 except Exception:
409 408 log.exception("Error occurred during update of user")
410 409 raise JSONRPCError('failed to update user `%s`' % (userid,))
411 410
412 411
413 412 @jsonrpc_method()
414 413 def delete_user(request, apiuser, userid):
415 414 """
416 415 Deletes the specified user from the |RCE| user database.
417 416
418 417 This command can only be run using an |authtoken| with admin rights to
419 418 the specified repository.
420 419
421 420 .. important::
422 421
423 422 Ensure all open pull requests and open code review
424 423 requests to this user are close.
425 424
426 425 Also ensure all repositories, or repository groups owned by this
427 426 user are reassigned before deletion.
428 427
429 428 This command takes the following options:
430 429
431 430 :param apiuser: This is filled automatically from the |authtoken|.
432 431 :type apiuser: AuthUser
433 432 :param userid: Set the user to delete.
434 433 :type userid: str or int
435 434
436 435 Example output:
437 436
438 437 .. code-block:: bash
439 438
440 439 id : <id_given_in_input>
441 440 result: {
442 441 "msg" : "deleted user ID:<userid> <username>",
443 442 "user": null
444 443 }
445 444 error: null
446 445
447 446 Example error output:
448 447
449 448 .. code-block:: bash
450 449
451 450 id : <id_given_in_input>
452 451 result : null
453 452 error : {
454 453 "failed to delete user ID:<userid> <username>"
455 454 }
456 455
457 456 """
458 457 if not has_superadmin_permission(apiuser):
459 458 raise JSONRPCForbidden()
460 459
461 460 user = get_user_or_error(userid)
462 461 old_data = user.get_api_data()
463 462 try:
464 463 UserModel().delete(userid)
465 464 audit_logger.store_api(
466 465 'user.delete', action_data={'old_data': old_data},
467 466 user=apiuser)
468 467
469 468 Session().commit()
470 469 return {
471 470 'msg': 'deleted user ID:%s %s' % (user.user_id, user.username),
472 471 'user': None
473 472 }
474 473 except Exception:
475 474 log.exception("Error occurred during deleting of user")
476 475 raise JSONRPCError(
477 476 'failed to delete user ID:%s %s' % (user.user_id, user.username))
478 477
479 478
480 479 @jsonrpc_method()
481 480 def get_user_locks(request, apiuser, userid=Optional(OAttr('apiuser'))):
482 481 """
483 482 Displays all repositories locked by the specified user.
484 483
485 484 * If this command is run by a non-admin user, it returns
486 485 a list of |repos| locked by that user.
487 486
488 487 This command takes the following options:
489 488
490 489 :param apiuser: This is filled automatically from the |authtoken|.
491 490 :type apiuser: AuthUser
492 491 :param userid: Sets the userid whose list of locked |repos| will be
493 492 displayed.
494 493 :type userid: Optional(str or int)
495 494
496 495 Example output:
497 496
498 497 .. code-block:: bash
499 498
500 499 id : <id_given_in_input>
501 500 result : {
502 501 [repo_object, repo_object,...]
503 502 }
504 503 error : null
505 504 """
506 505
507 506 include_secrets = False
508 507 if not has_superadmin_permission(apiuser):
509 508 # make sure normal user does not pass someone else userid,
510 509 # he is not allowed to do that
511 510 if not isinstance(userid, Optional) and userid != apiuser.user_id:
512 511 raise JSONRPCError('userid is not the same as your user')
513 512 else:
514 513 include_secrets = True
515 514
516 515 userid = Optional.extract(userid, evaluate_locals=locals())
517 516 userid = getattr(userid, 'user_id', userid)
518 517 user = get_user_or_error(userid)
519 518
520 519 ret = []
521 520
522 521 # show all locks
523 522 for r in Repository.getAll():
524 523 _user_id, _time, _reason = r.locked
525 524 if _user_id and _time:
526 525 _api_data = r.get_api_data(include_secrets=include_secrets)
527 526 # if we use user filter just show the locks for this user
528 527 if safe_int(_user_id) == user.user_id:
529 528 ret.append(_api_data)
530 529
531 530 return ret
532 531
533 532
534 533 @jsonrpc_method()
535 534 def get_user_audit_logs(request, apiuser, userid=Optional(OAttr('apiuser'))):
536 535 """
537 536 Fetches all action logs made by the specified user.
538 537
539 538 This command takes the following options:
540 539
541 540 :param apiuser: This is filled automatically from the |authtoken|.
542 541 :type apiuser: AuthUser
543 542 :param userid: Sets the userid whose list of locked |repos| will be
544 543 displayed.
545 544 :type userid: Optional(str or int)
546 545
547 546 Example output:
548 547
549 548 .. code-block:: bash
550 549
551 550 id : <id_given_in_input>
552 551 result : {
553 552 [action, action,...]
554 553 }
555 554 error : null
556 555 """
557 556
558 557 if not has_superadmin_permission(apiuser):
559 558 # make sure normal user does not pass someone else userid,
560 559 # he is not allowed to do that
561 560 if not isinstance(userid, Optional) and userid != apiuser.user_id:
562 561 raise JSONRPCError('userid is not the same as your user')
563 562
564 563 userid = Optional.extract(userid, evaluate_locals=locals())
565 564 userid = getattr(userid, 'user_id', userid)
566 565 user = get_user_or_error(userid)
567 566
568 567 ret = []
569 568
570 569 # show all user actions
571 570 for entry in UserModel().get_user_log(user, filter_term=None):
572 571 ret.append(entry)
573 572 return ret
@@ -1,840 +1,839 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import time
22 22 import logging
23 23 import operator
24 24
25 from pyramid import compat
26 25 from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPBadRequest
27 26
28 27 from rhodecode.lib import helpers as h, diffs, rc_cache
29 28 from rhodecode.lib.utils import repo_name_slug
30 29 from rhodecode.lib.utils2 import (
31 30 StrictAttributeDict, str2bool, safe_int, datetime_to_time, safe_unicode)
32 31 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
33 32 from rhodecode.lib.vcs.backends.base import EmptyCommit
34 33 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
35 34 from rhodecode.model import repo
36 35 from rhodecode.model import repo_group
37 36 from rhodecode.model import user_group
38 37 from rhodecode.model import user
39 38 from rhodecode.model.db import User
40 39 from rhodecode.model.scm import ScmModel
41 40 from rhodecode.model.settings import VcsSettingsModel, IssueTrackerSettingsModel
42 41 from rhodecode.model.repo import ReadmeFinder
43 42
44 43 log = logging.getLogger(__name__)
45 44
46 45
47 46 ADMIN_PREFIX = '/_admin'
48 47 STATIC_FILE_PREFIX = '/_static'
49 48
50 49 URL_NAME_REQUIREMENTS = {
51 50 # group name can have a slash in them, but they must not end with a slash
52 51 'group_name': r'.*?[^/]',
53 52 'repo_group_name': r'.*?[^/]',
54 53 # repo names can have a slash in them, but they must not end with a slash
55 54 'repo_name': r'.*?[^/]',
56 55 # file path eats up everything at the end
57 56 'f_path': r'.*',
58 57 # reference types
59 58 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
60 59 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
61 60 }
62 61
63 62
64 63 def add_route_with_slash(config,name, pattern, **kw):
65 64 config.add_route(name, pattern, **kw)
66 65 if not pattern.endswith('/'):
67 66 config.add_route(name + '_slash', pattern + '/', **kw)
68 67
69 68
70 69 def add_route_requirements(route_path, requirements=None):
71 70 """
72 71 Adds regex requirements to pyramid routes using a mapping dict
73 72 e.g::
74 73 add_route_requirements('{repo_name}/settings')
75 74 """
76 75 requirements = requirements or URL_NAME_REQUIREMENTS
77 76 for key, regex in requirements.items():
78 77 route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex))
79 78 return route_path
80 79
81 80
82 81 def get_format_ref_id(repo):
83 82 """Returns a `repo` specific reference formatter function"""
84 83 if h.is_svn(repo):
85 84 return _format_ref_id_svn
86 85 else:
87 86 return _format_ref_id
88 87
89 88
90 89 def _format_ref_id(name, raw_id):
91 90 """Default formatting of a given reference `name`"""
92 91 return name
93 92
94 93
95 94 def _format_ref_id_svn(name, raw_id):
96 95 """Special way of formatting a reference for Subversion including path"""
97 96 return '%s@%s' % (name, raw_id)
98 97
99 98
100 99 class TemplateArgs(StrictAttributeDict):
101 100 pass
102 101
103 102
104 103 class BaseAppView(object):
105 104
106 105 def __init__(self, context, request):
107 106 self.request = request
108 107 self.context = context
109 108 self.session = request.session
110 109 if not hasattr(request, 'user'):
111 110 # NOTE(marcink): edge case, we ended up in matched route
112 111 # but probably of web-app context, e.g API CALL/VCS CALL
113 112 if hasattr(request, 'vcs_call') or hasattr(request, 'rpc_method'):
114 113 log.warning('Unable to process request `%s` in this scope', request)
115 114 raise HTTPBadRequest()
116 115
117 116 self._rhodecode_user = request.user # auth user
118 117 self._rhodecode_db_user = self._rhodecode_user.get_instance()
119 118 self._maybe_needs_password_change(
120 119 request.matched_route.name, self._rhodecode_db_user)
121 120
122 121 def _maybe_needs_password_change(self, view_name, user_obj):
123 122
124 123 dont_check_views = [
125 124 'channelstream_connect'
126 125 ]
127 126 if view_name in dont_check_views:
128 127 return
129 128
130 129 log.debug('Checking if user %s needs password change on view %s',
131 130 user_obj, view_name)
132 131
133 132 skip_user_views = [
134 133 'logout', 'login',
135 134 'my_account_password', 'my_account_password_update'
136 135 ]
137 136
138 137 if not user_obj:
139 138 return
140 139
141 140 if user_obj.username == User.DEFAULT_USER:
142 141 return
143 142
144 143 now = time.time()
145 144 should_change = user_obj.user_data.get('force_password_change')
146 145 change_after = safe_int(should_change) or 0
147 146 if should_change and now > change_after:
148 147 log.debug('User %s requires password change', user_obj)
149 148 h.flash('You are required to change your password', 'warning',
150 149 ignore_duplicate=True)
151 150
152 151 if view_name not in skip_user_views:
153 152 raise HTTPFound(
154 153 self.request.route_path('my_account_password'))
155 154
156 155 def _log_creation_exception(self, e, repo_name):
157 156 _ = self.request.translate
158 157 reason = None
159 158 if len(e.args) == 2:
160 159 reason = e.args[1]
161 160
162 161 if reason == 'INVALID_CERTIFICATE':
163 162 log.exception(
164 163 'Exception creating a repository: invalid certificate')
165 164 msg = (_('Error creating repository %s: invalid certificate')
166 165 % repo_name)
167 166 else:
168 167 log.exception("Exception creating a repository")
169 168 msg = (_('Error creating repository %s')
170 169 % repo_name)
171 170 return msg
172 171
173 172 def _get_local_tmpl_context(self, include_app_defaults=True):
174 173 c = TemplateArgs()
175 174 c.auth_user = self.request.user
176 175 # TODO(marcink): migrate the usage of c.rhodecode_user to c.auth_user
177 176 c.rhodecode_user = self.request.user
178 177
179 178 if include_app_defaults:
180 179 from rhodecode.lib.base import attach_context_attributes
181 180 attach_context_attributes(c, self.request, self.request.user.user_id)
182 181
183 182 c.is_super_admin = c.auth_user.is_admin
184 183
185 184 c.can_create_repo = c.is_super_admin
186 185 c.can_create_repo_group = c.is_super_admin
187 186 c.can_create_user_group = c.is_super_admin
188 187
189 188 c.is_delegated_admin = False
190 189
191 190 if not c.auth_user.is_default and not c.is_super_admin:
192 191 c.can_create_repo = h.HasPermissionAny('hg.create.repository')(
193 192 user=self.request.user)
194 193 repositories = c.auth_user.repositories_admin or c.can_create_repo
195 194
196 195 c.can_create_repo_group = h.HasPermissionAny('hg.repogroup.create.true')(
197 196 user=self.request.user)
198 197 repository_groups = c.auth_user.repository_groups_admin or c.can_create_repo_group
199 198
200 199 c.can_create_user_group = h.HasPermissionAny('hg.usergroup.create.true')(
201 200 user=self.request.user)
202 201 user_groups = c.auth_user.user_groups_admin or c.can_create_user_group
203 202 # delegated admin can create, or manage some objects
204 203 c.is_delegated_admin = repositories or repository_groups or user_groups
205 204 return c
206 205
207 206 def _get_template_context(self, tmpl_args, **kwargs):
208 207
209 208 local_tmpl_args = {
210 209 'defaults': {},
211 210 'errors': {},
212 211 'c': tmpl_args
213 212 }
214 213 local_tmpl_args.update(kwargs)
215 214 return local_tmpl_args
216 215
217 216 def load_default_context(self):
218 217 """
219 218 example:
220 219
221 220 def load_default_context(self):
222 221 c = self._get_local_tmpl_context()
223 222 c.custom_var = 'foobar'
224 223
225 224 return c
226 225 """
227 226 raise NotImplementedError('Needs implementation in view class')
228 227
229 228
230 229 class RepoAppView(BaseAppView):
231 230
232 231 def __init__(self, context, request):
233 232 super(RepoAppView, self).__init__(context, request)
234 233 self.db_repo = request.db_repo
235 234 self.db_repo_name = self.db_repo.repo_name
236 235 self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo)
237 236 self.db_repo_artifacts = ScmModel().get_artifacts(self.db_repo)
238 237 self.db_repo_patterns = IssueTrackerSettingsModel(repo=self.db_repo)
239 238
240 239 def _handle_missing_requirements(self, error):
241 240 log.error(
242 241 'Requirements are missing for repository %s: %s',
243 242 self.db_repo_name, safe_unicode(error))
244 243
245 244 def _get_local_tmpl_context(self, include_app_defaults=True):
246 245 _ = self.request.translate
247 246 c = super(RepoAppView, self)._get_local_tmpl_context(
248 247 include_app_defaults=include_app_defaults)
249 248
250 249 # register common vars for this type of view
251 250 c.rhodecode_db_repo = self.db_repo
252 251 c.repo_name = self.db_repo_name
253 252 c.repository_pull_requests = self.db_repo_pull_requests
254 253 c.repository_artifacts = self.db_repo_artifacts
255 254 c.repository_is_user_following = ScmModel().is_following_repo(
256 255 self.db_repo_name, self._rhodecode_user.user_id)
257 256 self.path_filter = PathFilter(None)
258 257
259 258 c.repository_requirements_missing = {}
260 259 try:
261 260 self.rhodecode_vcs_repo = self.db_repo.scm_instance()
262 261 # NOTE(marcink):
263 262 # comparison to None since if it's an object __bool__ is expensive to
264 263 # calculate
265 264 if self.rhodecode_vcs_repo is not None:
266 265 path_perms = self.rhodecode_vcs_repo.get_path_permissions(
267 266 c.auth_user.username)
268 267 self.path_filter = PathFilter(path_perms)
269 268 except RepositoryRequirementError as e:
270 269 c.repository_requirements_missing = {'error': str(e)}
271 270 self._handle_missing_requirements(e)
272 271 self.rhodecode_vcs_repo = None
273 272
274 273 c.path_filter = self.path_filter # used by atom_feed_entry.mako
275 274
276 275 if self.rhodecode_vcs_repo is None:
277 276 # unable to fetch this repo as vcs instance, report back to user
278 277 log.debug('Repository was not found on filesystem, check if it exists or is not damaged')
279 278 h.flash(_(
280 279 "The repository `%(repo_name)s` cannot be loaded in filesystem. "
281 280 "Please check if it exist, or is not damaged.") %
282 281 {'repo_name': c.repo_name},
283 282 category='error', ignore_duplicate=True)
284 283 if c.repository_requirements_missing:
285 284 route = self.request.matched_route.name
286 285 if route.startswith(('edit_repo', 'repo_summary')):
287 286 # allow summary and edit repo on missing requirements
288 287 return c
289 288
290 289 raise HTTPFound(
291 290 h.route_path('repo_summary', repo_name=self.db_repo_name))
292 291
293 292 else: # redirect if we don't show missing requirements
294 293 raise HTTPFound(h.route_path('home'))
295 294
296 295 c.has_origin_repo_read_perm = False
297 296 if self.db_repo.fork:
298 297 c.has_origin_repo_read_perm = h.HasRepoPermissionAny(
299 298 'repository.write', 'repository.read', 'repository.admin')(
300 299 self.db_repo.fork.repo_name, 'summary fork link')
301 300
302 301 return c
303 302
304 303 def _get_f_path_unchecked(self, matchdict, default=None):
305 304 """
306 305 Should only be used by redirects, everything else should call _get_f_path
307 306 """
308 307 f_path = matchdict.get('f_path')
309 308 if f_path:
310 309 # fix for multiple initial slashes that causes errors for GIT
311 310 return f_path.lstrip('/')
312 311
313 312 return default
314 313
315 314 def _get_f_path(self, matchdict, default=None):
316 315 f_path_match = self._get_f_path_unchecked(matchdict, default)
317 316 return self.path_filter.assert_path_permissions(f_path_match)
318 317
319 318 def _get_general_setting(self, target_repo, settings_key, default=False):
320 319 settings_model = VcsSettingsModel(repo=target_repo)
321 320 settings = settings_model.get_general_settings()
322 321 return settings.get(settings_key, default)
323 322
324 323 def _get_repo_setting(self, target_repo, settings_key, default=False):
325 324 settings_model = VcsSettingsModel(repo=target_repo)
326 325 settings = settings_model.get_repo_settings_inherited()
327 326 return settings.get(settings_key, default)
328 327
329 328 def _get_readme_data(self, db_repo, renderer_type, commit_id=None, path='/'):
330 329 log.debug('Looking for README file at path %s', path)
331 330 if commit_id:
332 331 landing_commit_id = commit_id
333 332 else:
334 333 landing_commit = db_repo.get_landing_commit()
335 334 if isinstance(landing_commit, EmptyCommit):
336 335 return None, None
337 336 landing_commit_id = landing_commit.raw_id
338 337
339 338 cache_namespace_uid = 'cache_repo.{}'.format(db_repo.repo_id)
340 339 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
341 340 start = time.time()
342 341
343 342 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
344 343 def generate_repo_readme(repo_id, _commit_id, _repo_name, _readme_search_path, _renderer_type):
345 344 readme_data = None
346 345 readme_filename = None
347 346
348 347 commit = db_repo.get_commit(_commit_id)
349 348 log.debug("Searching for a README file at commit %s.", _commit_id)
350 349 readme_node = ReadmeFinder(_renderer_type).search(commit, path=_readme_search_path)
351 350
352 351 if readme_node:
353 352 log.debug('Found README node: %s', readme_node)
354 353 relative_urls = {
355 354 'raw': h.route_path(
356 355 'repo_file_raw', repo_name=_repo_name,
357 356 commit_id=commit.raw_id, f_path=readme_node.path),
358 357 'standard': h.route_path(
359 358 'repo_files', repo_name=_repo_name,
360 359 commit_id=commit.raw_id, f_path=readme_node.path),
361 360 }
362 361 readme_data = self._render_readme_or_none(commit, readme_node, relative_urls)
363 362 readme_filename = readme_node.unicode_path
364 363
365 364 return readme_data, readme_filename
366 365
367 366 readme_data, readme_filename = generate_repo_readme(
368 367 db_repo.repo_id, landing_commit_id, db_repo.repo_name, path, renderer_type,)
369 368 compute_time = time.time() - start
370 369 log.debug('Repo README for path %s generated and computed in %.4fs',
371 370 path, compute_time)
372 371 return readme_data, readme_filename
373 372
374 373 def _render_readme_or_none(self, commit, readme_node, relative_urls):
375 374 log.debug('Found README file `%s` rendering...', readme_node.path)
376 375 renderer = MarkupRenderer()
377 376 try:
378 377 html_source = renderer.render(
379 378 readme_node.content, filename=readme_node.path)
380 379 if relative_urls:
381 380 return relative_links(html_source, relative_urls)
382 381 return html_source
383 382 except Exception:
384 383 log.exception(
385 384 "Exception while trying to render the README")
386 385
387 386 def get_recache_flag(self):
388 387 for flag_name in ['force_recache', 'force-recache', 'no-cache']:
389 388 flag_val = self.request.GET.get(flag_name)
390 389 if str2bool(flag_val):
391 390 return True
392 391 return False
393 392
394 393 def get_commit_preload_attrs(cls):
395 394 pre_load = ['author', 'branch', 'date', 'message', 'parents',
396 395 'obsolete', 'phase', 'hidden']
397 396 return pre_load
398 397
399 398
400 399 class PathFilter(object):
401 400
402 401 # Expects and instance of BasePathPermissionChecker or None
403 402 def __init__(self, permission_checker):
404 403 self.permission_checker = permission_checker
405 404
406 405 def assert_path_permissions(self, path):
407 406 if self.path_access_allowed(path):
408 407 return path
409 408 raise HTTPForbidden()
410 409
411 410 def path_access_allowed(self, path):
412 411 log.debug('Checking ACL permissions for PathFilter for `%s`', path)
413 412 if self.permission_checker:
414 413 has_access = path and self.permission_checker.has_access(path)
415 414 log.debug('ACL Permissions checker enabled, ACL Check has_access: %s', has_access)
416 415 return has_access
417 416
418 417 log.debug('ACL permissions checker not enabled, skipping...')
419 418 return True
420 419
421 420 def filter_patchset(self, patchset):
422 421 if not self.permission_checker or not patchset:
423 422 return patchset, False
424 423 had_filtered = False
425 424 filtered_patchset = []
426 425 for patch in patchset:
427 426 filename = patch.get('filename', None)
428 427 if not filename or self.permission_checker.has_access(filename):
429 428 filtered_patchset.append(patch)
430 429 else:
431 430 had_filtered = True
432 431 if had_filtered:
433 432 if isinstance(patchset, diffs.LimitedDiffContainer):
434 433 filtered_patchset = diffs.LimitedDiffContainer(patchset.diff_limit, patchset.cur_diff_size, filtered_patchset)
435 434 return filtered_patchset, True
436 435 else:
437 436 return patchset, False
438 437
439 438 def render_patchset_filtered(self, diffset, patchset, source_ref=None, target_ref=None):
440 439 filtered_patchset, has_hidden_changes = self.filter_patchset(patchset)
441 440 result = diffset.render_patchset(
442 441 filtered_patchset, source_ref=source_ref, target_ref=target_ref)
443 442 result.has_hidden_changes = has_hidden_changes
444 443 return result
445 444
446 445 def get_raw_patch(self, diff_processor):
447 446 if self.permission_checker is None:
448 447 return diff_processor.as_raw()
449 448 elif self.permission_checker.has_full_access:
450 449 return diff_processor.as_raw()
451 450 else:
452 451 return '# Repository has user-specific filters, raw patch generation is disabled.'
453 452
454 453 @property
455 454 def is_enabled(self):
456 455 return self.permission_checker is not None
457 456
458 457
459 458 class RepoGroupAppView(BaseAppView):
460 459 def __init__(self, context, request):
461 460 super(RepoGroupAppView, self).__init__(context, request)
462 461 self.db_repo_group = request.db_repo_group
463 462 self.db_repo_group_name = self.db_repo_group.group_name
464 463
465 464 def _get_local_tmpl_context(self, include_app_defaults=True):
466 465 _ = self.request.translate
467 466 c = super(RepoGroupAppView, self)._get_local_tmpl_context(
468 467 include_app_defaults=include_app_defaults)
469 468 c.repo_group = self.db_repo_group
470 469 return c
471 470
472 471 def _revoke_perms_on_yourself(self, form_result):
473 472 _updates = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
474 473 form_result['perm_updates'])
475 474 _additions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
476 475 form_result['perm_additions'])
477 476 _deletions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
478 477 form_result['perm_deletions'])
479 478 admin_perm = 'group.admin'
480 479 if _updates and _updates[0][1] != admin_perm or \
481 480 _additions and _additions[0][1] != admin_perm or \
482 481 _deletions and _deletions[0][1] != admin_perm:
483 482 return True
484 483 return False
485 484
486 485
487 486 class UserGroupAppView(BaseAppView):
488 487 def __init__(self, context, request):
489 488 super(UserGroupAppView, self).__init__(context, request)
490 489 self.db_user_group = request.db_user_group
491 490 self.db_user_group_name = self.db_user_group.users_group_name
492 491
493 492
494 493 class UserAppView(BaseAppView):
495 494 def __init__(self, context, request):
496 495 super(UserAppView, self).__init__(context, request)
497 496 self.db_user = request.db_user
498 497 self.db_user_id = self.db_user.user_id
499 498
500 499 _ = self.request.translate
501 500 if not request.db_user_supports_default:
502 501 if self.db_user.username == User.DEFAULT_USER:
503 502 h.flash(_("Editing user `{}` is disabled.".format(
504 503 User.DEFAULT_USER)), category='warning')
505 504 raise HTTPFound(h.route_path('users'))
506 505
507 506
508 507 class DataGridAppView(object):
509 508 """
510 509 Common class to have re-usable grid rendering components
511 510 """
512 511
513 512 def _extract_ordering(self, request, column_map=None):
514 513 column_map = column_map or {}
515 514 column_index = safe_int(request.GET.get('order[0][column]'))
516 515 order_dir = request.GET.get(
517 516 'order[0][dir]', 'desc')
518 517 order_by = request.GET.get(
519 518 'columns[%s][data][sort]' % column_index, 'name_raw')
520 519
521 520 # translate datatable to DB columns
522 521 order_by = column_map.get(order_by) or order_by
523 522
524 523 search_q = request.GET.get('search[value]')
525 524 return search_q, order_by, order_dir
526 525
527 526 def _extract_chunk(self, request):
528 527 start = safe_int(request.GET.get('start'), 0)
529 528 length = safe_int(request.GET.get('length'), 25)
530 529 draw = safe_int(request.GET.get('draw'))
531 530 return draw, start, length
532 531
533 532 def _get_order_col(self, order_by, model):
534 if isinstance(order_by, compat.string_types):
533 if isinstance(order_by, str):
535 534 try:
536 535 return operator.attrgetter(order_by)(model)
537 536 except AttributeError:
538 537 return None
539 538 else:
540 539 return order_by
541 540
542 541
543 542 class BaseReferencesView(RepoAppView):
544 543 """
545 544 Base for reference view for branches, tags and bookmarks.
546 545 """
547 546 def load_default_context(self):
548 547 c = self._get_local_tmpl_context()
549 548 return c
550 549
551 550 def load_refs_context(self, ref_items, partials_template):
552 551 _render = self.request.get_partial_renderer(partials_template)
553 552 pre_load = ["author", "date", "message", "parents"]
554 553
555 554 is_svn = h.is_svn(self.rhodecode_vcs_repo)
556 555 is_hg = h.is_hg(self.rhodecode_vcs_repo)
557 556
558 557 format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo)
559 558
560 559 closed_refs = {}
561 560 if is_hg:
562 561 closed_refs = self.rhodecode_vcs_repo.branches_closed
563 562
564 563 data = []
565 564 for ref_name, commit_id in ref_items:
566 565 commit = self.rhodecode_vcs_repo.get_commit(
567 566 commit_id=commit_id, pre_load=pre_load)
568 567 closed = ref_name in closed_refs
569 568
570 569 # TODO: johbo: Unify generation of reference links
571 570 use_commit_id = '/' in ref_name or is_svn
572 571
573 572 if use_commit_id:
574 573 files_url = h.route_path(
575 574 'repo_files',
576 575 repo_name=self.db_repo_name,
577 576 f_path=ref_name if is_svn else '',
578 577 commit_id=commit_id,
579 578 _query=dict(at=ref_name)
580 579 )
581 580
582 581 else:
583 582 files_url = h.route_path(
584 583 'repo_files',
585 584 repo_name=self.db_repo_name,
586 585 f_path=ref_name if is_svn else '',
587 586 commit_id=ref_name,
588 587 _query=dict(at=ref_name)
589 588 )
590 589
591 590 data.append({
592 591 "name": _render('name', ref_name, files_url, closed),
593 592 "name_raw": ref_name,
594 593 "date": _render('date', commit.date),
595 594 "date_raw": datetime_to_time(commit.date),
596 595 "author": _render('author', commit.author),
597 596 "commit": _render(
598 597 'commit', commit.message, commit.raw_id, commit.idx),
599 598 "commit_raw": commit.idx,
600 599 "compare": _render(
601 600 'compare', format_ref_id(ref_name, commit.raw_id)),
602 601 })
603 602
604 603 return data
605 604
606 605
607 606 class RepoRoutePredicate(object):
608 607 def __init__(self, val, config):
609 608 self.val = val
610 609
611 610 def text(self):
612 611 return 'repo_route = %s' % self.val
613 612
614 613 phash = text
615 614
616 615 def __call__(self, info, request):
617 616 if hasattr(request, 'vcs_call'):
618 617 # skip vcs calls
619 618 return
620 619
621 620 repo_name = info['match']['repo_name']
622 621
623 622 repo_name_parts = repo_name.split('/')
624 623 repo_slugs = [x for x in map(lambda x: repo_name_slug(x), repo_name_parts)]
625 624
626 625 if repo_name_parts != repo_slugs:
627 626 # short-skip if the repo-name doesn't follow slug rule
628 627 log.warning('repo_name: %s is different than slug %s', repo_name_parts, repo_slugs)
629 628 return False
630 629
631 630 repo_model = repo.RepoModel()
632 631
633 632 by_name_match = repo_model.get_by_repo_name(repo_name, cache=False)
634 633
635 634 def redirect_if_creating(route_info, db_repo):
636 635 skip_views = ['edit_repo_advanced_delete']
637 636 route = route_info['route']
638 637 # we should skip delete view so we can actually "remove" repositories
639 638 # if they get stuck in creating state.
640 639 if route.name in skip_views:
641 640 return
642 641
643 642 if db_repo.repo_state in [repo.Repository.STATE_PENDING]:
644 643 repo_creating_url = request.route_path(
645 644 'repo_creating', repo_name=db_repo.repo_name)
646 645 raise HTTPFound(repo_creating_url)
647 646
648 647 if by_name_match:
649 648 # register this as request object we can re-use later
650 649 request.db_repo = by_name_match
651 650 redirect_if_creating(info, by_name_match)
652 651 return True
653 652
654 653 by_id_match = repo_model.get_repo_by_id(repo_name)
655 654 if by_id_match:
656 655 request.db_repo = by_id_match
657 656 redirect_if_creating(info, by_id_match)
658 657 return True
659 658
660 659 return False
661 660
662 661
663 662 class RepoForbidArchivedRoutePredicate(object):
664 663 def __init__(self, val, config):
665 664 self.val = val
666 665
667 666 def text(self):
668 667 return 'repo_forbid_archived = %s' % self.val
669 668
670 669 phash = text
671 670
672 671 def __call__(self, info, request):
673 672 _ = request.translate
674 673 rhodecode_db_repo = request.db_repo
675 674
676 675 log.debug(
677 676 '%s checking if archived flag for repo for %s',
678 677 self.__class__.__name__, rhodecode_db_repo.repo_name)
679 678
680 679 if rhodecode_db_repo.archived:
681 680 log.warning('Current view is not supported for archived repo:%s',
682 681 rhodecode_db_repo.repo_name)
683 682
684 683 h.flash(
685 684 h.literal(_('Action not supported for archived repository.')),
686 685 category='warning')
687 686 summary_url = request.route_path(
688 687 'repo_summary', repo_name=rhodecode_db_repo.repo_name)
689 688 raise HTTPFound(summary_url)
690 689 return True
691 690
692 691
693 692 class RepoTypeRoutePredicate(object):
694 693 def __init__(self, val, config):
695 694 self.val = val or ['hg', 'git', 'svn']
696 695
697 696 def text(self):
698 697 return 'repo_accepted_type = %s' % self.val
699 698
700 699 phash = text
701 700
702 701 def __call__(self, info, request):
703 702 if hasattr(request, 'vcs_call'):
704 703 # skip vcs calls
705 704 return
706 705
707 706 rhodecode_db_repo = request.db_repo
708 707
709 708 log.debug(
710 709 '%s checking repo type for %s in %s',
711 710 self.__class__.__name__, rhodecode_db_repo.repo_type, self.val)
712 711
713 712 if rhodecode_db_repo.repo_type in self.val:
714 713 return True
715 714 else:
716 715 log.warning('Current view is not supported for repo type:%s',
717 716 rhodecode_db_repo.repo_type)
718 717 return False
719 718
720 719
721 720 class RepoGroupRoutePredicate(object):
722 721 def __init__(self, val, config):
723 722 self.val = val
724 723
725 724 def text(self):
726 725 return 'repo_group_route = %s' % self.val
727 726
728 727 phash = text
729 728
730 729 def __call__(self, info, request):
731 730 if hasattr(request, 'vcs_call'):
732 731 # skip vcs calls
733 732 return
734 733
735 734 repo_group_name = info['match']['repo_group_name']
736 735
737 736 repo_group_name_parts = repo_group_name.split('/')
738 737 repo_group_slugs = [x for x in map(lambda x: repo_name_slug(x), repo_group_name_parts)]
739 738 if repo_group_name_parts != repo_group_slugs:
740 739 # short-skip if the repo-name doesn't follow slug rule
741 740 log.warning('repo_group_name: %s is different than slug %s', repo_group_name_parts, repo_group_slugs)
742 741 return False
743 742
744 743 repo_group_model = repo_group.RepoGroupModel()
745 744 by_name_match = repo_group_model.get_by_group_name(repo_group_name, cache=False)
746 745
747 746 if by_name_match:
748 747 # register this as request object we can re-use later
749 748 request.db_repo_group = by_name_match
750 749 return True
751 750
752 751 return False
753 752
754 753
755 754 class UserGroupRoutePredicate(object):
756 755 def __init__(self, val, config):
757 756 self.val = val
758 757
759 758 def text(self):
760 759 return 'user_group_route = %s' % self.val
761 760
762 761 phash = text
763 762
764 763 def __call__(self, info, request):
765 764 if hasattr(request, 'vcs_call'):
766 765 # skip vcs calls
767 766 return
768 767
769 768 user_group_id = info['match']['user_group_id']
770 769 user_group_model = user_group.UserGroup()
771 770 by_id_match = user_group_model.get(user_group_id, cache=False)
772 771
773 772 if by_id_match:
774 773 # register this as request object we can re-use later
775 774 request.db_user_group = by_id_match
776 775 return True
777 776
778 777 return False
779 778
780 779
781 780 class UserRoutePredicateBase(object):
782 781 supports_default = None
783 782
784 783 def __init__(self, val, config):
785 784 self.val = val
786 785
787 786 def text(self):
788 787 raise NotImplementedError()
789 788
790 789 def __call__(self, info, request):
791 790 if hasattr(request, 'vcs_call'):
792 791 # skip vcs calls
793 792 return
794 793
795 794 user_id = info['match']['user_id']
796 795 user_model = user.User()
797 796 by_id_match = user_model.get(user_id, cache=False)
798 797
799 798 if by_id_match:
800 799 # register this as request object we can re-use later
801 800 request.db_user = by_id_match
802 801 request.db_user_supports_default = self.supports_default
803 802 return True
804 803
805 804 return False
806 805
807 806
808 807 class UserRoutePredicate(UserRoutePredicateBase):
809 808 supports_default = False
810 809
811 810 def text(self):
812 811 return 'user_route = %s' % self.val
813 812
814 813 phash = text
815 814
816 815
817 816 class UserRouteWithDefaultPredicate(UserRoutePredicateBase):
818 817 supports_default = True
819 818
820 819 def text(self):
821 820 return 'user_with_default_route = %s' % self.val
822 821
823 822 phash = text
824 823
825 824
826 825 def includeme(config):
827 826 config.add_route_predicate(
828 827 'repo_route', RepoRoutePredicate)
829 828 config.add_route_predicate(
830 829 'repo_accepted_types', RepoTypeRoutePredicate)
831 830 config.add_route_predicate(
832 831 'repo_forbid_when_archived', RepoForbidArchivedRoutePredicate)
833 832 config.add_route_predicate(
834 833 'repo_group_route', RepoGroupRoutePredicate)
835 834 config.add_route_predicate(
836 835 'user_group_route', UserGroupRoutePredicate)
837 836 config.add_route_predicate(
838 837 'user_route_with_default', UserRouteWithDefaultPredicate)
839 838 config.add_route_predicate(
840 839 'user_route', UserRoutePredicate)
@@ -1,91 +1,90 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import os
21 21 import logging
22 from pyramid import compat
23 22
24 23 # Do not use `from rhodecode import events` here, it will be overridden by the
25 24 # events module in this package due to pythons import mechanism.
26 25 from rhodecode.events import RepoGroupEvent
27 26 from rhodecode.subscribers import AsyncSubprocessSubscriber
28 27 from rhodecode.config.settings_maker import SettingsMaker
29 28
30 29 from .events import ModDavSvnConfigChange
31 30 from .subscribers import generate_config_subscriber
32 31 from . import config_keys
33 32
34 33
35 34 log = logging.getLogger(__name__)
36 35
37 36
38 37 def _sanitize_settings_and_apply_defaults(settings):
39 38 """
40 39 Set defaults, convert to python types and validate settings.
41 40 """
42 41 settings_maker = SettingsMaker(settings)
43 42 settings_maker.make_setting(config_keys.generate_config, False, parser='bool')
44 43 settings_maker.make_setting(config_keys.list_parent_path, True, parser='bool')
45 44 settings_maker.make_setting(config_keys.reload_timeout, 10, parser='bool')
46 45 settings_maker.make_setting(config_keys.config_file_path, '')
47 46 settings_maker.make_setting(config_keys.location_root, '/')
48 47 settings_maker.make_setting(config_keys.reload_command, '')
49 48 settings_maker.make_setting(config_keys.template, '')
50 49
51 50 settings_maker.env_expand()
52 51
53 52 # Convert negative timeout values to zero.
54 53 if settings[config_keys.reload_timeout] < 0:
55 54 settings[config_keys.reload_timeout] = 0
56 55
57 56 # Append path separator to location root.
58 57 settings[config_keys.location_root] = _append_path_sep(
59 58 settings[config_keys.location_root])
60 59
61 60 # Validate settings.
62 61 if settings[config_keys.generate_config]:
63 62 assert len(settings[config_keys.config_file_path]) > 0
64 63
65 64
66 65 def _append_path_sep(path):
67 66 """
68 67 Append the path separator if missing.
69 68 """
70 if isinstance(path, compat.string_types) and not path.endswith(os.path.sep):
69 if isinstance(path, str) and not path.endswith(os.path.sep):
71 70 path += os.path.sep
72 71 return path
73 72
74 73
75 74 def includeme(config):
76 75 settings = config.registry.settings
77 76 _sanitize_settings_and_apply_defaults(settings)
78 77
79 78 if settings[config_keys.generate_config]:
80 79 # Add subscriber to generate the Apache mod dav svn configuration on
81 80 # repository group events.
82 81 config.add_subscriber(generate_config_subscriber, RepoGroupEvent)
83 82
84 83 # If a reload command is set add a subscriber to execute it on
85 84 # configuration changes.
86 85 reload_cmd = settings[config_keys.reload_command]
87 86 if reload_cmd:
88 87 reload_timeout = settings[config_keys.reload_timeout] or None
89 88 reload_subscriber = AsyncSubprocessSubscriber(
90 89 cmd=reload_cmd, timeout=reload_timeout)
91 90 config.add_subscriber(reload_subscriber, ModDavSvnConfigChange)
@@ -1,415 +1,414 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 RhodeCode task modules, containing all task that suppose to be run
23 23 by celery daemon
24 24 """
25 25
26 26 import os
27 27 import time
28 28
29 from pyramid import compat
30 29 from pyramid_mailer.mailer import Mailer
31 30 from pyramid_mailer.message import Message
32 31 from email.utils import formatdate
33 32
34 33 import rhodecode
35 34 from rhodecode.lib import audit_logger
36 35 from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask, run_task
37 36 from rhodecode.lib import hooks_base
38 37 from rhodecode.lib.utils2 import safe_int, str2bool, aslist
39 38 from rhodecode.lib.statsd_client import StatsdClient
40 39 from rhodecode.model.db import (
41 40 Session, IntegrityError, true, Repository, RepoGroup, User)
42 41 from rhodecode.model.permission import PermissionModel
43 42
44 43
45 44 @async_task(ignore_result=True, base=RequestContextTask)
46 45 def send_email(recipients, subject, body='', html_body='', email_config=None,
47 46 extra_headers=None):
48 47 """
49 48 Sends an email with defined parameters from the .ini files.
50 49
51 50 :param recipients: list of recipients, it this is empty the defined email
52 51 address from field 'email_to' is used instead
53 52 :param subject: subject of the mail
54 53 :param body: body of the mail
55 54 :param html_body: html version of body
56 55 :param email_config: specify custom configuration for mailer
57 56 :param extra_headers: specify custom headers
58 57 """
59 58 log = get_logger(send_email)
60 59
61 60 email_config = email_config or rhodecode.CONFIG
62 61
63 62 mail_server = email_config.get('smtp_server') or None
64 63 if mail_server is None:
65 64 log.error("SMTP server information missing. Sending email failed. "
66 65 "Make sure that `smtp_server` variable is configured "
67 66 "inside the .ini file")
68 67 return False
69 68
70 69 subject = "%s %s" % (email_config.get('email_prefix', ''), subject)
71 70
72 71 if recipients:
73 if isinstance(recipients, compat.string_types):
72 if isinstance(recipients, str):
74 73 recipients = recipients.split(',')
75 74 else:
76 75 # if recipients are not defined we send to email_config + all admins
77 76 admins = []
78 77 for u in User.query().filter(User.admin == true()).all():
79 78 if u.email:
80 79 admins.append(u.email)
81 80 recipients = []
82 81 config_email = email_config.get('email_to')
83 82 if config_email:
84 83 recipients += [config_email]
85 84 recipients += admins
86 85
87 86 # translate our LEGACY config into the one that pyramid_mailer supports
88 87 email_conf = dict(
89 88 host=mail_server,
90 89 port=email_config.get('smtp_port', 25),
91 90 username=email_config.get('smtp_username'),
92 91 password=email_config.get('smtp_password'),
93 92
94 93 tls=str2bool(email_config.get('smtp_use_tls')),
95 94 ssl=str2bool(email_config.get('smtp_use_ssl')),
96 95
97 96 # SSL key file
98 97 # keyfile='',
99 98
100 99 # SSL certificate file
101 100 # certfile='',
102 101
103 102 # Location of maildir
104 103 # queue_path='',
105 104
106 105 default_sender=email_config.get('app_email_from', 'RhodeCode-noreply@rhodecode.com'),
107 106
108 107 debug=str2bool(email_config.get('smtp_debug')),
109 108 # /usr/sbin/sendmail Sendmail executable
110 109 # sendmail_app='',
111 110
112 111 # {sendmail_app} -t -i -f {sender} Template for sendmail execution
113 112 # sendmail_template='',
114 113 )
115 114
116 115 if extra_headers is None:
117 116 extra_headers = {}
118 117
119 118 extra_headers.setdefault('Date', formatdate(time.time()))
120 119
121 120 if 'thread_ids' in extra_headers:
122 121 thread_ids = extra_headers.pop('thread_ids')
123 122 extra_headers['References'] = ' '.join('<{}>'.format(t) for t in thread_ids)
124 123
125 124 try:
126 125 mailer = Mailer(**email_conf)
127 126
128 127 message = Message(subject=subject,
129 128 sender=email_conf['default_sender'],
130 129 recipients=recipients,
131 130 body=body, html=html_body,
132 131 extra_headers=extra_headers)
133 132 mailer.send_immediately(message)
134 133 statsd = StatsdClient.statsd
135 134 if statsd:
136 135 statsd.incr('rhodecode_email_sent_total')
137 136
138 137 except Exception:
139 138 log.exception('Mail sending failed')
140 139 return False
141 140 return True
142 141
143 142
144 143 @async_task(ignore_result=True, base=RequestContextTask)
145 144 def create_repo(form_data, cur_user):
146 145 from rhodecode.model.repo import RepoModel
147 146 from rhodecode.model.user import UserModel
148 147 from rhodecode.model.scm import ScmModel
149 148 from rhodecode.model.settings import SettingsModel
150 149
151 150 log = get_logger(create_repo)
152 151
153 152 cur_user = UserModel()._get_user(cur_user)
154 153 owner = cur_user
155 154
156 155 repo_name = form_data['repo_name']
157 156 repo_name_full = form_data['repo_name_full']
158 157 repo_type = form_data['repo_type']
159 158 description = form_data['repo_description']
160 159 private = form_data['repo_private']
161 160 clone_uri = form_data.get('clone_uri')
162 161 repo_group = safe_int(form_data['repo_group'])
163 162 copy_fork_permissions = form_data.get('copy_permissions')
164 163 copy_group_permissions = form_data.get('repo_copy_permissions')
165 164 fork_of = form_data.get('fork_parent_id')
166 165 state = form_data.get('repo_state', Repository.STATE_PENDING)
167 166
168 167 # repo creation defaults, private and repo_type are filled in form
169 168 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
170 169 enable_statistics = form_data.get(
171 170 'enable_statistics', defs.get('repo_enable_statistics'))
172 171 enable_locking = form_data.get(
173 172 'enable_locking', defs.get('repo_enable_locking'))
174 173 enable_downloads = form_data.get(
175 174 'enable_downloads', defs.get('repo_enable_downloads'))
176 175
177 176 # set landing rev based on default branches for SCM
178 177 landing_ref, _label = ScmModel.backend_landing_ref(repo_type)
179 178
180 179 try:
181 180 RepoModel()._create_repo(
182 181 repo_name=repo_name_full,
183 182 repo_type=repo_type,
184 183 description=description,
185 184 owner=owner,
186 185 private=private,
187 186 clone_uri=clone_uri,
188 187 repo_group=repo_group,
189 188 landing_rev=landing_ref,
190 189 fork_of=fork_of,
191 190 copy_fork_permissions=copy_fork_permissions,
192 191 copy_group_permissions=copy_group_permissions,
193 192 enable_statistics=enable_statistics,
194 193 enable_locking=enable_locking,
195 194 enable_downloads=enable_downloads,
196 195 state=state
197 196 )
198 197 Session().commit()
199 198
200 199 # now create this repo on Filesystem
201 200 RepoModel()._create_filesystem_repo(
202 201 repo_name=repo_name,
203 202 repo_type=repo_type,
204 203 repo_group=RepoModel()._get_repo_group(repo_group),
205 204 clone_uri=clone_uri,
206 205 )
207 206 repo = Repository.get_by_repo_name(repo_name_full)
208 207 hooks_base.create_repository(created_by=owner.username, **repo.get_dict())
209 208
210 209 # update repo commit caches initially
211 210 repo.update_commit_cache()
212 211
213 212 # set new created state
214 213 repo.set_state(Repository.STATE_CREATED)
215 214 repo_id = repo.repo_id
216 215 repo_data = repo.get_api_data()
217 216
218 217 audit_logger.store(
219 218 'repo.create', action_data={'data': repo_data},
220 219 user=cur_user,
221 220 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
222 221
223 222 Session().commit()
224 223
225 224 PermissionModel().trigger_permission_flush()
226 225
227 226 except Exception as e:
228 227 log.warning('Exception occurred when creating repository, '
229 228 'doing cleanup...', exc_info=True)
230 229 if isinstance(e, IntegrityError):
231 230 Session().rollback()
232 231
233 232 # rollback things manually !
234 233 repo = Repository.get_by_repo_name(repo_name_full)
235 234 if repo:
236 235 Repository.delete(repo.repo_id)
237 236 Session().commit()
238 237 RepoModel()._delete_filesystem_repo(repo)
239 238 log.info('Cleanup of repo %s finished', repo_name_full)
240 239 raise
241 240
242 241 return True
243 242
244 243
245 244 @async_task(ignore_result=True, base=RequestContextTask)
246 245 def create_repo_fork(form_data, cur_user):
247 246 """
248 247 Creates a fork of repository using internal VCS methods
249 248 """
250 249 from rhodecode.model.repo import RepoModel
251 250 from rhodecode.model.user import UserModel
252 251
253 252 log = get_logger(create_repo_fork)
254 253
255 254 cur_user = UserModel()._get_user(cur_user)
256 255 owner = cur_user
257 256
258 257 repo_name = form_data['repo_name'] # fork in this case
259 258 repo_name_full = form_data['repo_name_full']
260 259 repo_type = form_data['repo_type']
261 260 description = form_data['description']
262 261 private = form_data['private']
263 262 clone_uri = form_data.get('clone_uri')
264 263 repo_group = safe_int(form_data['repo_group'])
265 264 landing_ref = form_data['landing_rev']
266 265 copy_fork_permissions = form_data.get('copy_permissions')
267 266 fork_id = safe_int(form_data.get('fork_parent_id'))
268 267
269 268 try:
270 269 fork_of = RepoModel()._get_repo(fork_id)
271 270 RepoModel()._create_repo(
272 271 repo_name=repo_name_full,
273 272 repo_type=repo_type,
274 273 description=description,
275 274 owner=owner,
276 275 private=private,
277 276 clone_uri=clone_uri,
278 277 repo_group=repo_group,
279 278 landing_rev=landing_ref,
280 279 fork_of=fork_of,
281 280 copy_fork_permissions=copy_fork_permissions
282 281 )
283 282
284 283 Session().commit()
285 284
286 285 base_path = Repository.base_path()
287 286 source_repo_path = os.path.join(base_path, fork_of.repo_name)
288 287
289 288 # now create this repo on Filesystem
290 289 RepoModel()._create_filesystem_repo(
291 290 repo_name=repo_name,
292 291 repo_type=repo_type,
293 292 repo_group=RepoModel()._get_repo_group(repo_group),
294 293 clone_uri=source_repo_path,
295 294 )
296 295 repo = Repository.get_by_repo_name(repo_name_full)
297 296 hooks_base.create_repository(created_by=owner.username, **repo.get_dict())
298 297
299 298 # update repo commit caches initially
300 299 config = repo._config
301 300 config.set('extensions', 'largefiles', '')
302 301 repo.update_commit_cache(config=config)
303 302
304 303 # set new created state
305 304 repo.set_state(Repository.STATE_CREATED)
306 305
307 306 repo_id = repo.repo_id
308 307 repo_data = repo.get_api_data()
309 308 audit_logger.store(
310 309 'repo.fork', action_data={'data': repo_data},
311 310 user=cur_user,
312 311 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
313 312
314 313 Session().commit()
315 314 except Exception as e:
316 315 log.warning('Exception occurred when forking repository, '
317 316 'doing cleanup...', exc_info=True)
318 317 if isinstance(e, IntegrityError):
319 318 Session().rollback()
320 319
321 320 # rollback things manually !
322 321 repo = Repository.get_by_repo_name(repo_name_full)
323 322 if repo:
324 323 Repository.delete(repo.repo_id)
325 324 Session().commit()
326 325 RepoModel()._delete_filesystem_repo(repo)
327 326 log.info('Cleanup of repo %s finished', repo_name_full)
328 327 raise
329 328
330 329 return True
331 330
332 331
333 332 @async_task(ignore_result=True, base=RequestContextTask)
334 333 def repo_maintenance(repoid):
335 334 from rhodecode.lib import repo_maintenance as repo_maintenance_lib
336 335 log = get_logger(repo_maintenance)
337 336 repo = Repository.get_by_id_or_repo_name(repoid)
338 337 if repo:
339 338 maintenance = repo_maintenance_lib.RepoMaintenance()
340 339 tasks = maintenance.get_tasks_for_repo(repo)
341 340 log.debug('Executing %s tasks on repo `%s`', tasks, repoid)
342 341 executed_types = maintenance.execute(repo)
343 342 log.debug('Got execution results %s', executed_types)
344 343 else:
345 344 log.debug('Repo `%s` not found or without a clone_url', repoid)
346 345
347 346
348 347 @async_task(ignore_result=True, base=RequestContextTask)
349 348 def check_for_update(send_email_notification=True, email_recipients=None):
350 349 from rhodecode.model.update import UpdateModel
351 350 from rhodecode.model.notification import EmailNotificationModel
352 351
353 352 log = get_logger(check_for_update)
354 353 update_url = UpdateModel().get_update_url()
355 354 cur_ver = rhodecode.__version__
356 355
357 356 try:
358 357 data = UpdateModel().get_update_data(update_url)
359 358
360 359 current_ver = UpdateModel().get_stored_version(fallback=cur_ver)
361 360 latest_ver = data['versions'][0]['version']
362 361 UpdateModel().store_version(latest_ver)
363 362
364 363 if send_email_notification:
365 364 log.debug('Send email notification is enabled. '
366 365 'Current RhodeCode version: %s, latest known: %s', current_ver, latest_ver)
367 366 if UpdateModel().is_outdated(current_ver, latest_ver):
368 367
369 368 email_kwargs = {
370 369 'current_ver': current_ver,
371 370 'latest_ver': latest_ver,
372 371 }
373 372
374 373 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
375 374 EmailNotificationModel.TYPE_UPDATE_AVAILABLE, **email_kwargs)
376 375
377 376 email_recipients = aslist(email_recipients, sep=',') or \
378 377 [user.email for user in User.get_all_super_admins()]
379 378 run_task(send_email, email_recipients, subject,
380 379 email_body_plaintext, email_body)
381 380
382 381 except Exception:
383 382 log.exception('Failed to check for update')
384 383 raise
385 384
386 385
387 386 def sync_last_update_for_objects(*args, **kwargs):
388 387 skip_repos = kwargs.get('skip_repos')
389 388 if not skip_repos:
390 389 repos = Repository.query() \
391 390 .order_by(Repository.group_id.asc())
392 391
393 392 for repo in repos:
394 393 repo.update_commit_cache()
395 394
396 395 skip_groups = kwargs.get('skip_groups')
397 396 if not skip_groups:
398 397 repo_groups = RepoGroup.query() \
399 398 .filter(RepoGroup.group_parent_id == None)
400 399
401 400 for root_gr in repo_groups:
402 401 for repo_gr in reversed(root_gr.recursive_groups()):
403 402 repo_gr.update_commit_cache()
404 403
405 404
406 405 @async_task(ignore_result=True, base=RequestContextTask)
407 406 def sync_last_update(*args, **kwargs):
408 407 sync_last_update_for_objects(*args, **kwargs)
409 408
410 409
411 410 @async_task(ignore_result=False)
412 411 def beat_check(*args, **kwargs):
413 412 log = get_logger(beat_check)
414 413 log.info('%r: Got args: %r and kwargs %r', beat_check, args, kwargs)
415 414 return time.time()
@@ -1,798 +1,797 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import difflib
23 23 from itertools import groupby
24 24
25 25 from pygments import lex
26 26 from pygments.formatters.html import _get_ttype_class as pygment_token_class
27 27 from pygments.lexers.special import TextLexer, Token
28 28 from pygments.lexers import get_lexer_by_name
29 from pyramid import compat
30 29
31 30 from rhodecode.lib.helpers import (
32 31 get_lexer_for_filenode, html_escape, get_custom_lexer)
33 32 from rhodecode.lib.utils2 import AttributeDict, StrictAttributeDict, safe_unicode
34 33 from rhodecode.lib.vcs.nodes import FileNode
35 34 from rhodecode.lib.vcs.exceptions import VCSError, NodeDoesNotExistError
36 35 from rhodecode.lib.diff_match_patch import diff_match_patch
37 36 from rhodecode.lib.diffs import LimitedDiffContainer, DEL_FILENODE, BIN_FILENODE
38 37
39 38
40 39 plain_text_lexer = get_lexer_by_name(
41 40 'text', stripall=False, stripnl=False, ensurenl=False)
42 41
43 42
44 43 log = logging.getLogger(__name__)
45 44
46 45
47 46 def filenode_as_lines_tokens(filenode, lexer=None):
48 47 org_lexer = lexer
49 48 lexer = lexer or get_lexer_for_filenode(filenode)
50 49 log.debug('Generating file node pygment tokens for %s, %s, org_lexer:%s',
51 50 lexer, filenode, org_lexer)
52 51 content = filenode.content
53 52 tokens = tokenize_string(content, lexer)
54 53 lines = split_token_stream(tokens, content)
55 54 rv = list(lines)
56 55 return rv
57 56
58 57
59 58 def tokenize_string(content, lexer):
60 59 """
61 60 Use pygments to tokenize some content based on a lexer
62 61 ensuring all original new lines and whitespace is preserved
63 62 """
64 63
65 64 lexer.stripall = False
66 65 lexer.stripnl = False
67 66 lexer.ensurenl = False
68 67
69 68 if isinstance(lexer, TextLexer):
70 69 lexed = [(Token.Text, content)]
71 70 else:
72 71 lexed = lex(content, lexer)
73 72
74 73 for token_type, token_text in lexed:
75 74 yield pygment_token_class(token_type), token_text
76 75
77 76
78 77 def split_token_stream(tokens, content):
79 78 """
80 79 Take a list of (TokenType, text) tuples and split them by a string
81 80
82 81 split_token_stream([(TEXT, 'some\ntext'), (TEXT, 'more\n')])
83 82 [(TEXT, 'some'), (TEXT, 'text'),
84 83 (TEXT, 'more'), (TEXT, 'text')]
85 84 """
86 85
87 86 token_buffer = []
88 87 for token_class, token_text in tokens:
89 88 parts = token_text.split('\n')
90 89 for part in parts[:-1]:
91 90 token_buffer.append((token_class, part))
92 91 yield token_buffer
93 92 token_buffer = []
94 93
95 94 token_buffer.append((token_class, parts[-1]))
96 95
97 96 if token_buffer:
98 97 yield token_buffer
99 98 elif content:
100 99 # this is a special case, we have the content, but tokenization didn't produce
101 100 # any results. THis can happen if know file extensions like .css have some bogus
102 101 # unicode content without any newline characters
103 102 yield [(pygment_token_class(Token.Text), content)]
104 103
105 104
106 105 def filenode_as_annotated_lines_tokens(filenode):
107 106 """
108 107 Take a file node and return a list of annotations => lines, if no annotation
109 108 is found, it will be None.
110 109
111 110 eg:
112 111
113 112 [
114 113 (annotation1, [
115 114 (1, line1_tokens_list),
116 115 (2, line2_tokens_list),
117 116 ]),
118 117 (annotation2, [
119 118 (3, line1_tokens_list),
120 119 ]),
121 120 (None, [
122 121 (4, line1_tokens_list),
123 122 ]),
124 123 (annotation1, [
125 124 (5, line1_tokens_list),
126 125 (6, line2_tokens_list),
127 126 ])
128 127 ]
129 128 """
130 129
131 130 commit_cache = {} # cache commit_getter lookups
132 131
133 132 def _get_annotation(commit_id, commit_getter):
134 133 if commit_id not in commit_cache:
135 134 commit_cache[commit_id] = commit_getter()
136 135 return commit_cache[commit_id]
137 136
138 137 annotation_lookup = {
139 138 line_no: _get_annotation(commit_id, commit_getter)
140 139 for line_no, commit_id, commit_getter, line_content
141 140 in filenode.annotate
142 141 }
143 142
144 143 annotations_lines = ((annotation_lookup.get(line_no), line_no, tokens)
145 144 for line_no, tokens
146 145 in enumerate(filenode_as_lines_tokens(filenode), 1))
147 146
148 147 grouped_annotations_lines = groupby(annotations_lines, lambda x: x[0])
149 148
150 149 for annotation, group in grouped_annotations_lines:
151 150 yield (
152 151 annotation, [(line_no, tokens)
153 152 for (_, line_no, tokens) in group]
154 153 )
155 154
156 155
157 156 def render_tokenstream(tokenstream):
158 157 result = []
159 158 for token_class, token_ops_texts in rollup_tokenstream(tokenstream):
160 159
161 160 if token_class:
162 161 result.append(u'<span class="%s">' % token_class)
163 162 else:
164 163 result.append(u'<span>')
165 164
166 165 for op_tag, token_text in token_ops_texts:
167 166
168 167 if op_tag:
169 168 result.append(u'<%s>' % op_tag)
170 169
171 170 # NOTE(marcink): in some cases of mixed encodings, we might run into
172 171 # troubles in the html_escape, in this case we say unicode force on token_text
173 172 # that would ensure "correct" data even with the cost of rendered
174 173 try:
175 174 escaped_text = html_escape(token_text)
176 175 except TypeError:
177 176 escaped_text = html_escape(safe_unicode(token_text))
178 177
179 178 # TODO: dan: investigate showing hidden characters like space/nl/tab
180 179 # escaped_text = escaped_text.replace(' ', '<sp> </sp>')
181 180 # escaped_text = escaped_text.replace('\n', '<nl>\n</nl>')
182 181 # escaped_text = escaped_text.replace('\t', '<tab>\t</tab>')
183 182
184 183 result.append(escaped_text)
185 184
186 185 if op_tag:
187 186 result.append(u'</%s>' % op_tag)
188 187
189 188 result.append(u'</span>')
190 189
191 190 html = ''.join(result)
192 191 return html
193 192
194 193
195 194 def rollup_tokenstream(tokenstream):
196 195 """
197 196 Group a token stream of the format:
198 197
199 198 ('class', 'op', 'text')
200 199 or
201 200 ('class', 'text')
202 201
203 202 into
204 203
205 204 [('class1',
206 205 [('op1', 'text'),
207 206 ('op2', 'text')]),
208 207 ('class2',
209 208 [('op3', 'text')])]
210 209
211 210 This is used to get the minimal tags necessary when
212 211 rendering to html eg for a token stream ie.
213 212
214 213 <span class="A"><ins>he</ins>llo</span>
215 214 vs
216 215 <span class="A"><ins>he</ins></span><span class="A">llo</span>
217 216
218 217 If a 2 tuple is passed in, the output op will be an empty string.
219 218
220 219 eg:
221 220
222 221 >>> rollup_tokenstream([('classA', '', 'h'),
223 222 ('classA', 'del', 'ell'),
224 223 ('classA', '', 'o'),
225 224 ('classB', '', ' '),
226 225 ('classA', '', 'the'),
227 226 ('classA', '', 're'),
228 227 ])
229 228
230 229 [('classA', [('', 'h'), ('del', 'ell'), ('', 'o')],
231 230 ('classB', [('', ' ')],
232 231 ('classA', [('', 'there')]]
233 232
234 233 """
235 234 if tokenstream and len(tokenstream[0]) == 2:
236 235 tokenstream = ((t[0], '', t[1]) for t in tokenstream)
237 236
238 237 result = []
239 238 for token_class, op_list in groupby(tokenstream, lambda t: t[0]):
240 239 ops = []
241 240 for token_op, token_text_list in groupby(op_list, lambda o: o[1]):
242 241 text_buffer = []
243 242 for t_class, t_op, t_text in token_text_list:
244 243 text_buffer.append(t_text)
245 244 ops.append((token_op, ''.join(text_buffer)))
246 245 result.append((token_class, ops))
247 246 return result
248 247
249 248
250 249 def tokens_diff(old_tokens, new_tokens, use_diff_match_patch=True):
251 250 """
252 251 Converts a list of (token_class, token_text) tuples to a list of
253 252 (token_class, token_op, token_text) tuples where token_op is one of
254 253 ('ins', 'del', '')
255 254
256 255 :param old_tokens: list of (token_class, token_text) tuples of old line
257 256 :param new_tokens: list of (token_class, token_text) tuples of new line
258 257 :param use_diff_match_patch: boolean, will use google's diff match patch
259 258 library which has options to 'smooth' out the character by character
260 259 differences making nicer ins/del blocks
261 260 """
262 261
263 262 old_tokens_result = []
264 263 new_tokens_result = []
265 264
266 265 similarity = difflib.SequenceMatcher(None,
267 266 ''.join(token_text for token_class, token_text in old_tokens),
268 267 ''.join(token_text for token_class, token_text in new_tokens)
269 268 ).ratio()
270 269
271 270 if similarity < 0.6: # return, the blocks are too different
272 271 for token_class, token_text in old_tokens:
273 272 old_tokens_result.append((token_class, '', token_text))
274 273 for token_class, token_text in new_tokens:
275 274 new_tokens_result.append((token_class, '', token_text))
276 275 return old_tokens_result, new_tokens_result, similarity
277 276
278 277 token_sequence_matcher = difflib.SequenceMatcher(None,
279 278 [x[1] for x in old_tokens],
280 279 [x[1] for x in new_tokens])
281 280
282 281 for tag, o1, o2, n1, n2 in token_sequence_matcher.get_opcodes():
283 282 # check the differences by token block types first to give a more
284 283 # nicer "block" level replacement vs character diffs
285 284
286 285 if tag == 'equal':
287 286 for token_class, token_text in old_tokens[o1:o2]:
288 287 old_tokens_result.append((token_class, '', token_text))
289 288 for token_class, token_text in new_tokens[n1:n2]:
290 289 new_tokens_result.append((token_class, '', token_text))
291 290 elif tag == 'delete':
292 291 for token_class, token_text in old_tokens[o1:o2]:
293 292 old_tokens_result.append((token_class, 'del', token_text))
294 293 elif tag == 'insert':
295 294 for token_class, token_text in new_tokens[n1:n2]:
296 295 new_tokens_result.append((token_class, 'ins', token_text))
297 296 elif tag == 'replace':
298 297 # if same type token blocks must be replaced, do a diff on the
299 298 # characters in the token blocks to show individual changes
300 299
301 300 old_char_tokens = []
302 301 new_char_tokens = []
303 302 for token_class, token_text in old_tokens[o1:o2]:
304 303 for char in token_text:
305 304 old_char_tokens.append((token_class, char))
306 305
307 306 for token_class, token_text in new_tokens[n1:n2]:
308 307 for char in token_text:
309 308 new_char_tokens.append((token_class, char))
310 309
311 310 old_string = ''.join([token_text for
312 311 token_class, token_text in old_char_tokens])
313 312 new_string = ''.join([token_text for
314 313 token_class, token_text in new_char_tokens])
315 314
316 315 char_sequence = difflib.SequenceMatcher(
317 316 None, old_string, new_string)
318 317 copcodes = char_sequence.get_opcodes()
319 318 obuffer, nbuffer = [], []
320 319
321 320 if use_diff_match_patch:
322 321 dmp = diff_match_patch()
323 322 dmp.Diff_EditCost = 11 # TODO: dan: extract this to a setting
324 323 reps = dmp.diff_main(old_string, new_string)
325 324 dmp.diff_cleanupEfficiency(reps)
326 325
327 326 a, b = 0, 0
328 327 for op, rep in reps:
329 328 l = len(rep)
330 329 if op == 0:
331 330 for i, c in enumerate(rep):
332 331 obuffer.append((old_char_tokens[a+i][0], '', c))
333 332 nbuffer.append((new_char_tokens[b+i][0], '', c))
334 333 a += l
335 334 b += l
336 335 elif op == -1:
337 336 for i, c in enumerate(rep):
338 337 obuffer.append((old_char_tokens[a+i][0], 'del', c))
339 338 a += l
340 339 elif op == 1:
341 340 for i, c in enumerate(rep):
342 341 nbuffer.append((new_char_tokens[b+i][0], 'ins', c))
343 342 b += l
344 343 else:
345 344 for ctag, co1, co2, cn1, cn2 in copcodes:
346 345 if ctag == 'equal':
347 346 for token_class, token_text in old_char_tokens[co1:co2]:
348 347 obuffer.append((token_class, '', token_text))
349 348 for token_class, token_text in new_char_tokens[cn1:cn2]:
350 349 nbuffer.append((token_class, '', token_text))
351 350 elif ctag == 'delete':
352 351 for token_class, token_text in old_char_tokens[co1:co2]:
353 352 obuffer.append((token_class, 'del', token_text))
354 353 elif ctag == 'insert':
355 354 for token_class, token_text in new_char_tokens[cn1:cn2]:
356 355 nbuffer.append((token_class, 'ins', token_text))
357 356 elif ctag == 'replace':
358 357 for token_class, token_text in old_char_tokens[co1:co2]:
359 358 obuffer.append((token_class, 'del', token_text))
360 359 for token_class, token_text in new_char_tokens[cn1:cn2]:
361 360 nbuffer.append((token_class, 'ins', token_text))
362 361
363 362 old_tokens_result.extend(obuffer)
364 363 new_tokens_result.extend(nbuffer)
365 364
366 365 return old_tokens_result, new_tokens_result, similarity
367 366
368 367
369 368 def diffset_node_getter(commit):
370 369 def get_node(fname):
371 370 try:
372 371 return commit.get_node(fname)
373 372 except NodeDoesNotExistError:
374 373 return None
375 374
376 375 return get_node
377 376
378 377
379 378 class DiffSet(object):
380 379 """
381 380 An object for parsing the diff result from diffs.DiffProcessor and
382 381 adding highlighting, side by side/unified renderings and line diffs
383 382 """
384 383
385 384 HL_REAL = 'REAL' # highlights using original file, slow
386 385 HL_FAST = 'FAST' # highlights using just the line, fast but not correct
387 386 # in the case of multiline code
388 387 HL_NONE = 'NONE' # no highlighting, fastest
389 388
390 389 def __init__(self, highlight_mode=HL_REAL, repo_name=None,
391 390 source_repo_name=None,
392 391 source_node_getter=lambda filename: None,
393 392 target_repo_name=None,
394 393 target_node_getter=lambda filename: None,
395 394 source_nodes=None, target_nodes=None,
396 395 # files over this size will use fast highlighting
397 396 max_file_size_limit=150 * 1024,
398 397 ):
399 398
400 399 self.highlight_mode = highlight_mode
401 400 self.highlighted_filenodes = {
402 401 'before': {},
403 402 'after': {}
404 403 }
405 404 self.source_node_getter = source_node_getter
406 405 self.target_node_getter = target_node_getter
407 406 self.source_nodes = source_nodes or {}
408 407 self.target_nodes = target_nodes or {}
409 408 self.repo_name = repo_name
410 409 self.target_repo_name = target_repo_name or repo_name
411 410 self.source_repo_name = source_repo_name or repo_name
412 411 self.max_file_size_limit = max_file_size_limit
413 412
414 413 def render_patchset(self, patchset, source_ref=None, target_ref=None):
415 414 diffset = AttributeDict(dict(
416 415 lines_added=0,
417 416 lines_deleted=0,
418 417 changed_files=0,
419 418 files=[],
420 419 file_stats={},
421 420 limited_diff=isinstance(patchset, LimitedDiffContainer),
422 421 repo_name=self.repo_name,
423 422 target_repo_name=self.target_repo_name,
424 423 source_repo_name=self.source_repo_name,
425 424 source_ref=source_ref,
426 425 target_ref=target_ref,
427 426 ))
428 427 for patch in patchset:
429 428 diffset.file_stats[patch['filename']] = patch['stats']
430 429 filediff = self.render_patch(patch)
431 430 filediff.diffset = StrictAttributeDict(dict(
432 431 source_ref=diffset.source_ref,
433 432 target_ref=diffset.target_ref,
434 433 repo_name=diffset.repo_name,
435 434 source_repo_name=diffset.source_repo_name,
436 435 target_repo_name=diffset.target_repo_name,
437 436 ))
438 437 diffset.files.append(filediff)
439 438 diffset.changed_files += 1
440 439 if not patch['stats']['binary']:
441 440 diffset.lines_added += patch['stats']['added']
442 441 diffset.lines_deleted += patch['stats']['deleted']
443 442
444 443 return diffset
445 444
446 445 _lexer_cache = {}
447 446
448 447 def _get_lexer_for_filename(self, filename, filenode=None):
449 448 # cached because we might need to call it twice for source/target
450 449 if filename not in self._lexer_cache:
451 450 if filenode:
452 451 lexer = filenode.lexer
453 452 extension = filenode.extension
454 453 else:
455 454 lexer = FileNode.get_lexer(filename=filename)
456 455 extension = filename.split('.')[-1]
457 456
458 457 lexer = get_custom_lexer(extension) or lexer
459 458 self._lexer_cache[filename] = lexer
460 459 return self._lexer_cache[filename]
461 460
462 461 def render_patch(self, patch):
463 462 log.debug('rendering diff for %r', patch['filename'])
464 463
465 464 source_filename = patch['original_filename']
466 465 target_filename = patch['filename']
467 466
468 467 source_lexer = plain_text_lexer
469 468 target_lexer = plain_text_lexer
470 469
471 470 if not patch['stats']['binary']:
472 471 node_hl_mode = self.HL_NONE if patch['chunks'] == [] else None
473 472 hl_mode = node_hl_mode or self.highlight_mode
474 473
475 474 if hl_mode == self.HL_REAL:
476 475 if (source_filename and patch['operation'] in ('D', 'M')
477 476 and source_filename not in self.source_nodes):
478 477 self.source_nodes[source_filename] = (
479 478 self.source_node_getter(source_filename))
480 479
481 480 if (target_filename and patch['operation'] in ('A', 'M')
482 481 and target_filename not in self.target_nodes):
483 482 self.target_nodes[target_filename] = (
484 483 self.target_node_getter(target_filename))
485 484
486 485 elif hl_mode == self.HL_FAST:
487 486 source_lexer = self._get_lexer_for_filename(source_filename)
488 487 target_lexer = self._get_lexer_for_filename(target_filename)
489 488
490 489 source_file = self.source_nodes.get(source_filename, source_filename)
491 490 target_file = self.target_nodes.get(target_filename, target_filename)
492 491 raw_id_uid = ''
493 492 if self.source_nodes.get(source_filename):
494 493 raw_id_uid = self.source_nodes[source_filename].commit.raw_id
495 494
496 495 if not raw_id_uid and self.target_nodes.get(target_filename):
497 496 # in case this is a new file we only have it in target
498 497 raw_id_uid = self.target_nodes[target_filename].commit.raw_id
499 498
500 499 source_filenode, target_filenode = None, None
501 500
502 501 # TODO: dan: FileNode.lexer works on the content of the file - which
503 502 # can be slow - issue #4289 explains a lexer clean up - which once
504 503 # done can allow caching a lexer for a filenode to avoid the file lookup
505 504 if isinstance(source_file, FileNode):
506 505 source_filenode = source_file
507 506 #source_lexer = source_file.lexer
508 507 source_lexer = self._get_lexer_for_filename(source_filename)
509 508 source_file.lexer = source_lexer
510 509
511 510 if isinstance(target_file, FileNode):
512 511 target_filenode = target_file
513 512 #target_lexer = target_file.lexer
514 513 target_lexer = self._get_lexer_for_filename(target_filename)
515 514 target_file.lexer = target_lexer
516 515
517 516 source_file_path, target_file_path = None, None
518 517
519 518 if source_filename != '/dev/null':
520 519 source_file_path = source_filename
521 520 if target_filename != '/dev/null':
522 521 target_file_path = target_filename
523 522
524 523 source_file_type = source_lexer.name
525 524 target_file_type = target_lexer.name
526 525
527 526 filediff = AttributeDict({
528 527 'source_file_path': source_file_path,
529 528 'target_file_path': target_file_path,
530 529 'source_filenode': source_filenode,
531 530 'target_filenode': target_filenode,
532 531 'source_file_type': target_file_type,
533 532 'target_file_type': source_file_type,
534 533 'patch': {'filename': patch['filename'], 'stats': patch['stats']},
535 534 'operation': patch['operation'],
536 535 'source_mode': patch['stats']['old_mode'],
537 536 'target_mode': patch['stats']['new_mode'],
538 537 'limited_diff': patch['is_limited_diff'],
539 538 'hunks': [],
540 539 'hunk_ops': None,
541 540 'diffset': self,
542 541 'raw_id': raw_id_uid,
543 542 })
544 543
545 544 file_chunks = patch['chunks'][1:]
546 545 for i, hunk in enumerate(file_chunks, 1):
547 546 hunkbit = self.parse_hunk(hunk, source_file, target_file)
548 547 hunkbit.source_file_path = source_file_path
549 548 hunkbit.target_file_path = target_file_path
550 549 hunkbit.index = i
551 550 filediff.hunks.append(hunkbit)
552 551
553 552 # Simulate hunk on OPS type line which doesn't really contain any diff
554 553 # this allows commenting on those
555 554 if not file_chunks:
556 555 actions = []
557 556 for op_id, op_text in filediff.patch['stats']['ops'].items():
558 557 if op_id == DEL_FILENODE:
559 558 actions.append(u'file was removed')
560 559 elif op_id == BIN_FILENODE:
561 560 actions.append(u'binary diff hidden')
562 561 else:
563 562 actions.append(safe_unicode(op_text))
564 563 action_line = u'NO CONTENT: ' + \
565 564 u', '.join(actions) or u'UNDEFINED_ACTION'
566 565
567 566 hunk_ops = {'source_length': 0, 'source_start': 0,
568 567 'lines': [
569 568 {'new_lineno': 0, 'old_lineno': 1,
570 569 'action': 'unmod-no-hl', 'line': action_line}
571 570 ],
572 571 'section_header': u'', 'target_start': 1, 'target_length': 1}
573 572
574 573 hunkbit = self.parse_hunk(hunk_ops, source_file, target_file)
575 574 hunkbit.source_file_path = source_file_path
576 575 hunkbit.target_file_path = target_file_path
577 576 filediff.hunk_ops = hunkbit
578 577 return filediff
579 578
580 579 def parse_hunk(self, hunk, source_file, target_file):
581 580 result = AttributeDict(dict(
582 581 source_start=hunk['source_start'],
583 582 source_length=hunk['source_length'],
584 583 target_start=hunk['target_start'],
585 584 target_length=hunk['target_length'],
586 585 section_header=hunk['section_header'],
587 586 lines=[],
588 587 ))
589 588 before, after = [], []
590 589
591 590 for line in hunk['lines']:
592 591 if line['action'] in ['unmod', 'unmod-no-hl']:
593 592 no_hl = line['action'] == 'unmod-no-hl'
594 593 result.lines.extend(
595 594 self.parse_lines(before, after, source_file, target_file, no_hl=no_hl))
596 595 after.append(line)
597 596 before.append(line)
598 597 elif line['action'] == 'add':
599 598 after.append(line)
600 599 elif line['action'] == 'del':
601 600 before.append(line)
602 601 elif line['action'] == 'old-no-nl':
603 602 before.append(line)
604 603 elif line['action'] == 'new-no-nl':
605 604 after.append(line)
606 605
607 606 all_actions = [x['action'] for x in after] + [x['action'] for x in before]
608 607 no_hl = {x for x in all_actions} == {'unmod-no-hl'}
609 608 result.lines.extend(
610 609 self.parse_lines(before, after, source_file, target_file, no_hl=no_hl))
611 610 # NOTE(marcink): we must keep list() call here so we can cache the result...
612 611 result.unified = list(self.as_unified(result.lines))
613 612 result.sideside = result.lines
614 613
615 614 return result
616 615
617 616 def parse_lines(self, before_lines, after_lines, source_file, target_file,
618 617 no_hl=False):
619 618 # TODO: dan: investigate doing the diff comparison and fast highlighting
620 619 # on the entire before and after buffered block lines rather than by
621 620 # line, this means we can get better 'fast' highlighting if the context
622 621 # allows it - eg.
623 622 # line 4: """
624 623 # line 5: this gets highlighted as a string
625 624 # line 6: """
626 625
627 626 lines = []
628 627
629 628 before_newline = AttributeDict()
630 629 after_newline = AttributeDict()
631 630 if before_lines and before_lines[-1]['action'] == 'old-no-nl':
632 631 before_newline_line = before_lines.pop(-1)
633 632 before_newline.content = '\n {}'.format(
634 633 render_tokenstream(
635 634 [(x[0], '', x[1])
636 635 for x in [('nonl', before_newline_line['line'])]]))
637 636
638 637 if after_lines and after_lines[-1]['action'] == 'new-no-nl':
639 638 after_newline_line = after_lines.pop(-1)
640 639 after_newline.content = '\n {}'.format(
641 640 render_tokenstream(
642 641 [(x[0], '', x[1])
643 642 for x in [('nonl', after_newline_line['line'])]]))
644 643
645 644 while before_lines or after_lines:
646 645 before, after = None, None
647 646 before_tokens, after_tokens = None, None
648 647
649 648 if before_lines:
650 649 before = before_lines.pop(0)
651 650 if after_lines:
652 651 after = after_lines.pop(0)
653 652
654 653 original = AttributeDict()
655 654 modified = AttributeDict()
656 655
657 656 if before:
658 657 if before['action'] == 'old-no-nl':
659 658 before_tokens = [('nonl', before['line'])]
660 659 else:
661 660 before_tokens = self.get_line_tokens(
662 661 line_text=before['line'], line_number=before['old_lineno'],
663 662 input_file=source_file, no_hl=no_hl, source='before')
664 663 original.lineno = before['old_lineno']
665 664 original.content = before['line']
666 665 original.action = self.action_to_op(before['action'])
667 666
668 667 original.get_comment_args = (
669 668 source_file, 'o', before['old_lineno'])
670 669
671 670 if after:
672 671 if after['action'] == 'new-no-nl':
673 672 after_tokens = [('nonl', after['line'])]
674 673 else:
675 674 after_tokens = self.get_line_tokens(
676 675 line_text=after['line'], line_number=after['new_lineno'],
677 676 input_file=target_file, no_hl=no_hl, source='after')
678 677 modified.lineno = after['new_lineno']
679 678 modified.content = after['line']
680 679 modified.action = self.action_to_op(after['action'])
681 680
682 681 modified.get_comment_args = (target_file, 'n', after['new_lineno'])
683 682
684 683 # diff the lines
685 684 if before_tokens and after_tokens:
686 685 o_tokens, m_tokens, similarity = tokens_diff(
687 686 before_tokens, after_tokens)
688 687 original.content = render_tokenstream(o_tokens)
689 688 modified.content = render_tokenstream(m_tokens)
690 689 elif before_tokens:
691 690 original.content = render_tokenstream(
692 691 [(x[0], '', x[1]) for x in before_tokens])
693 692 elif after_tokens:
694 693 modified.content = render_tokenstream(
695 694 [(x[0], '', x[1]) for x in after_tokens])
696 695
697 696 if not before_lines and before_newline:
698 697 original.content += before_newline.content
699 698 before_newline = None
700 699 if not after_lines and after_newline:
701 700 modified.content += after_newline.content
702 701 after_newline = None
703 702
704 703 lines.append(AttributeDict({
705 704 'original': original,
706 705 'modified': modified,
707 706 }))
708 707
709 708 return lines
710 709
711 710 def get_line_tokens(self, line_text, line_number, input_file=None, no_hl=False, source=''):
712 711 filenode = None
713 712 filename = None
714 713
715 if isinstance(input_file, compat.string_types):
714 if isinstance(input_file, str):
716 715 filename = input_file
717 716 elif isinstance(input_file, FileNode):
718 717 filenode = input_file
719 718 filename = input_file.unicode_path
720 719
721 720 hl_mode = self.HL_NONE if no_hl else self.highlight_mode
722 721 if hl_mode == self.HL_REAL and filenode:
723 722 lexer = self._get_lexer_for_filename(filename)
724 723 file_size_allowed = input_file.size < self.max_file_size_limit
725 724 if line_number and file_size_allowed:
726 725 return self.get_tokenized_filenode_line(input_file, line_number, lexer, source)
727 726
728 727 if hl_mode in (self.HL_REAL, self.HL_FAST) and filename:
729 728 lexer = self._get_lexer_for_filename(filename)
730 729 return list(tokenize_string(line_text, lexer))
731 730
732 731 return list(tokenize_string(line_text, plain_text_lexer))
733 732
734 733 def get_tokenized_filenode_line(self, filenode, line_number, lexer=None, source=''):
735 734
736 735 def tokenize(_filenode):
737 736 self.highlighted_filenodes[source][filenode] = filenode_as_lines_tokens(filenode, lexer)
738 737
739 738 if filenode not in self.highlighted_filenodes[source]:
740 739 tokenize(filenode)
741 740
742 741 try:
743 742 return self.highlighted_filenodes[source][filenode][line_number - 1]
744 743 except Exception:
745 744 log.exception('diff rendering error')
746 745 return [('', u'L{}: rhodecode diff rendering error'.format(line_number))]
747 746
748 747 def action_to_op(self, action):
749 748 return {
750 749 'add': '+',
751 750 'del': '-',
752 751 'unmod': ' ',
753 752 'unmod-no-hl': ' ',
754 753 'old-no-nl': ' ',
755 754 'new-no-nl': ' ',
756 755 }.get(action, action)
757 756
758 757 def as_unified(self, lines):
759 758 """
760 759 Return a generator that yields the lines of a diff in unified order
761 760 """
762 761 def generator():
763 762 buf = []
764 763 for line in lines:
765 764
766 765 if buf and not line.original or line.original.action == ' ':
767 766 for b in buf:
768 767 yield b
769 768 buf = []
770 769
771 770 if line.original:
772 771 if line.original.action == ' ':
773 772 yield (line.original.lineno, line.modified.lineno,
774 773 line.original.action, line.original.content,
775 774 line.original.get_comment_args)
776 775 continue
777 776
778 777 if line.original.action == '-':
779 778 yield (line.original.lineno, None,
780 779 line.original.action, line.original.content,
781 780 line.original.get_comment_args)
782 781
783 782 if line.modified.action == '+':
784 783 buf.append((
785 784 None, line.modified.lineno,
786 785 line.modified.action, line.modified.content,
787 786 line.modified.get_comment_args))
788 787 continue
789 788
790 789 if line.modified:
791 790 yield (None, line.modified.lineno,
792 791 line.modified.action, line.modified.content,
793 792 line.modified.get_comment_args)
794 793
795 794 for b in buf:
796 795 yield b
797 796
798 797 return generator()
@@ -1,32 +1,30 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 from pyramid import compat
22
23 21
24 22 def strip_whitespace(value):
25 23 """
26 24 Removes leading/trailing whitespace, newlines, and tabs from the value.
27 25 Implements the `colander.interface.Preparer` interface.
28 26 """
29 if isinstance(value, compat.string_types):
27 if isinstance(value, str):
30 28 return value.strip(' \t\n\r')
31 29 else:
32 30 return value
@@ -1,669 +1,668 b''
1 1 """
2 2 Schema module providing common schema operations.
3 3 """
4 4 import abc
5 5 try: # Python 3
6 6 from collections.abc import MutableMapping as DictMixin
7 7 except ImportError: # Python 2
8 8 from UserDict import DictMixin
9 9 import warnings
10 10
11 11 import sqlalchemy
12 12
13 13 from sqlalchemy.schema import ForeignKeyConstraint
14 14 from sqlalchemy.schema import UniqueConstraint
15 from pyramid import compat
16 15
17 16 from rhodecode.lib.dbmigrate.migrate.exceptions import *
18 17 from rhodecode.lib.dbmigrate.migrate.changeset import SQLA_07, SQLA_08
19 18 from rhodecode.lib.dbmigrate.migrate.changeset import util
20 19 from rhodecode.lib.dbmigrate.migrate.changeset.databases.visitor import (
21 20 get_engine_visitor, run_single_visitor)
22 21
23 22
24 23 __all__ = [
25 24 'create_column',
26 25 'drop_column',
27 26 'alter_column',
28 27 'rename_table',
29 28 'rename_index',
30 29 'ChangesetTable',
31 30 'ChangesetColumn',
32 31 'ChangesetIndex',
33 32 'ChangesetDefaultClause',
34 33 'ColumnDelta',
35 34 ]
36 35
37 36 def create_column(column, table=None, *p, **kw):
38 37 """Create a column, given the table.
39 38
40 39 API to :meth:`ChangesetColumn.create`.
41 40 """
42 41 if table is not None:
43 42 return table.create_column(column, *p, **kw)
44 43 return column.create(*p, **kw)
45 44
46 45
47 46 def drop_column(column, table=None, *p, **kw):
48 47 """Drop a column, given the table.
49 48
50 49 API to :meth:`ChangesetColumn.drop`.
51 50 """
52 51 if table is not None:
53 52 return table.drop_column(column, *p, **kw)
54 53 return column.drop(*p, **kw)
55 54
56 55
57 56 def rename_table(table, name, engine=None, **kw):
58 57 """Rename a table.
59 58
60 59 If Table instance is given, engine is not used.
61 60
62 61 API to :meth:`ChangesetTable.rename`.
63 62
64 63 :param table: Table to be renamed.
65 64 :param name: New name for Table.
66 65 :param engine: Engine instance.
67 66 :type table: string or Table instance
68 67 :type name: string
69 68 :type engine: obj
70 69 """
71 70 table = _to_table(table, engine)
72 71 table.rename(name, **kw)
73 72
74 73
75 74 def rename_index(index, name, table=None, engine=None, **kw):
76 75 """Rename an index.
77 76
78 77 If Index instance is given,
79 78 table and engine are not used.
80 79
81 80 API to :meth:`ChangesetIndex.rename`.
82 81
83 82 :param index: Index to be renamed.
84 83 :param name: New name for index.
85 84 :param table: Table to which Index is reffered.
86 85 :param engine: Engine instance.
87 86 :type index: string or Index instance
88 87 :type name: string
89 88 :type table: string or Table instance
90 89 :type engine: obj
91 90 """
92 91 index = _to_index(index, table, engine)
93 92 index.rename(name, **kw)
94 93
95 94
96 95 def alter_column(*p, **k):
97 96 """Alter a column.
98 97
99 98 This is a helper function that creates a :class:`ColumnDelta` and
100 99 runs it.
101 100
102 101 :argument column:
103 102 The name of the column to be altered or a
104 103 :class:`ChangesetColumn` column representing it.
105 104
106 105 :param table:
107 106 A :class:`~sqlalchemy.schema.Table` or table name to
108 107 for the table where the column will be changed.
109 108
110 109 :param engine:
111 110 The :class:`~sqlalchemy.engine.base.Engine` to use for table
112 111 reflection and schema alterations.
113 112
114 113 :returns: A :class:`ColumnDelta` instance representing the change.
115 114
116 115
117 116 """
118 117
119 118 if 'table' not in k and isinstance(p[0], sqlalchemy.Column):
120 119 k['table'] = p[0].table
121 120 if 'engine' not in k:
122 121 k['engine'] = k['table'].bind
123 122
124 123 # deprecation
125 124 if len(p) >= 2 and isinstance(p[1], sqlalchemy.Column):
126 125 warnings.warn(
127 126 "Passing a Column object to alter_column is deprecated."
128 127 " Just pass in keyword parameters instead.",
129 128 MigrateDeprecationWarning
130 129 )
131 130 engine = k['engine']
132 131
133 132 # enough tests seem to break when metadata is always altered
134 133 # that this crutch has to be left in until they can be sorted
135 134 # out
136 135 k['alter_metadata']=True
137 136
138 137 delta = ColumnDelta(*p, **k)
139 138
140 139 visitorcallable = get_engine_visitor(engine, 'schemachanger')
141 140 engine._run_visitor(visitorcallable, delta)
142 141
143 142 return delta
144 143
145 144
146 145 def _to_table(table, engine=None):
147 146 """Return if instance of Table, else construct new with metadata"""
148 147 if isinstance(table, sqlalchemy.Table):
149 148 return table
150 149
151 150 # Given: table name, maybe an engine
152 151 meta = sqlalchemy.MetaData()
153 152 if engine is not None:
154 153 meta.bind = engine
155 154 return sqlalchemy.Table(table, meta)
156 155
157 156
158 157 def _to_index(index, table=None, engine=None):
159 158 """Return if instance of Index, else construct new with metadata"""
160 159 if isinstance(index, sqlalchemy.Index):
161 160 return index
162 161
163 162 # Given: index name; table name required
164 163 table = _to_table(table, engine)
165 164 ret = sqlalchemy.Index(index)
166 165 ret.table = table
167 166 return ret
168 167
169 168
170 169 class ColumnDelta(DictMixin, sqlalchemy.schema.SchemaItem):
171 170 """Extracts the differences between two columns/column-parameters
172 171
173 172 May receive parameters arranged in several different ways:
174 173
175 174 * **current_column, new_column, \*p, \*\*kw**
176 175 Additional parameters can be specified to override column
177 176 differences.
178 177
179 178 * **current_column, \*p, \*\*kw**
180 179 Additional parameters alter current_column. Table name is extracted
181 180 from current_column object.
182 181 Name is changed to current_column.name from current_name,
183 182 if current_name is specified.
184 183
185 184 * **current_col_name, \*p, \*\*kw**
186 185 Table kw must specified.
187 186
188 187 :param table: Table at which current Column should be bound to.\
189 188 If table name is given, reflection will be used.
190 189 :type table: string or Table instance
191 190
192 191 :param metadata: A :class:`MetaData` instance to store
193 192 reflected table names
194 193
195 194 :param engine: When reflecting tables, either engine or metadata must \
196 195 be specified to acquire engine object.
197 196 :type engine: :class:`Engine` instance
198 197 :returns: :class:`ColumnDelta` instance provides interface for altered attributes to \
199 198 `result_column` through :func:`dict` alike object.
200 199
201 200 * :class:`ColumnDelta`.result_column is altered column with new attributes
202 201
203 202 * :class:`ColumnDelta`.current_name is current name of column in db
204 203
205 204
206 205 """
207 206
208 207 # Column attributes that can be altered
209 208 diff_keys = ('name', 'type', 'primary_key', 'nullable',
210 209 'server_onupdate', 'server_default', 'autoincrement')
211 210 diffs = dict()
212 211 __visit_name__ = 'column'
213 212
214 213 def __init__(self, *p, **kw):
215 214 # 'alter_metadata' is not a public api. It exists purely
216 215 # as a crutch until the tests that fail when 'alter_metadata'
217 216 # behaviour always happens can be sorted out
218 217 self.alter_metadata = kw.pop("alter_metadata", False)
219 218
220 219 self.meta = kw.pop("metadata", None)
221 220 self.engine = kw.pop("engine", None)
222 221
223 222 # Things are initialized differently depending on how many column
224 223 # parameters are given. Figure out how many and call the appropriate
225 224 # method.
226 225 if len(p) >= 1 and isinstance(p[0], sqlalchemy.Column):
227 226 # At least one column specified
228 227 if len(p) >= 2 and isinstance(p[1], sqlalchemy.Column):
229 228 # Two columns specified
230 229 diffs = self.compare_2_columns(*p, **kw)
231 230 else:
232 231 # Exactly one column specified
233 232 diffs = self.compare_1_column(*p, **kw)
234 233 else:
235 234 # Zero columns specified
236 if not len(p) or not isinstance(p[0], compat.string_types):
235 if not len(p) or not isinstance(p[0], str):
237 236 raise ValueError("First argument must be column name")
238 237 diffs = self.compare_parameters(*p, **kw)
239 238
240 239 self.apply_diffs(diffs)
241 240
242 241 def __repr__(self):
243 242 return '<ColumnDelta altermetadata=%r, %s>' % (
244 243 self.alter_metadata,
245 244 super(ColumnDelta, self).__repr__()
246 245 )
247 246
248 247 def __getitem__(self, key):
249 248 if key not in self.keys():
250 249 raise KeyError("No such diff key, available: %s" % self.diffs )
251 250 return getattr(self.result_column, key)
252 251
253 252 def __setitem__(self, key, value):
254 253 if key not in self.keys():
255 254 raise KeyError("No such diff key, available: %s" % self.diffs )
256 255 setattr(self.result_column, key, value)
257 256
258 257 def __delitem__(self, key):
259 258 raise NotImplementedError
260 259
261 260 def __len__(self):
262 261 raise NotImplementedError
263 262
264 263 def __iter__(self):
265 264 raise NotImplementedError
266 265
267 266 def keys(self):
268 267 return self.diffs.keys()
269 268
270 269 def compare_parameters(self, current_name, *p, **k):
271 270 """Compares Column objects with reflection"""
272 271 self.table = k.pop('table')
273 272 self.result_column = self._table.c.get(current_name)
274 273 if len(p):
275 274 k = self._extract_parameters(p, k, self.result_column)
276 275 return k
277 276
278 277 def compare_1_column(self, col, *p, **k):
279 278 """Compares one Column object"""
280 279 self.table = k.pop('table', None)
281 280 if self.table is None:
282 281 self.table = col.table
283 282 self.result_column = col
284 283 if len(p):
285 284 k = self._extract_parameters(p, k, self.result_column)
286 285 return k
287 286
288 287 def compare_2_columns(self, old_col, new_col, *p, **k):
289 288 """Compares two Column objects"""
290 289 self.process_column(new_col)
291 290 self.table = k.pop('table', None)
292 291 # we cannot use bool() on table in SA06
293 292 if self.table is None:
294 293 self.table = old_col.table
295 294 if self.table is None:
296 295 new_col.table
297 296 self.result_column = old_col
298 297
299 298 # set differences
300 299 # leave out some stuff for later comp
301 300 for key in (set(self.diff_keys) - set(('type',))):
302 301 val = getattr(new_col, key, None)
303 302 if getattr(self.result_column, key, None) != val:
304 303 k.setdefault(key, val)
305 304
306 305 # inspect types
307 306 if not self.are_column_types_eq(self.result_column.type, new_col.type):
308 307 k.setdefault('type', new_col.type)
309 308
310 309 if len(p):
311 310 k = self._extract_parameters(p, k, self.result_column)
312 311 return k
313 312
314 313 def apply_diffs(self, diffs):
315 314 """Populate dict and column object with new values"""
316 315 self.diffs = diffs
317 316 for key in self.diff_keys:
318 317 if key in diffs:
319 318 setattr(self.result_column, key, diffs[key])
320 319
321 320 self.process_column(self.result_column)
322 321
323 322 # create an instance of class type if not yet
324 323 if 'type' in diffs and callable(self.result_column.type):
325 324 self.result_column.type = self.result_column.type()
326 325
327 326 # add column to the table
328 327 if self.table is not None and self.alter_metadata:
329 328 self.result_column.add_to_table(self.table)
330 329
331 330 def are_column_types_eq(self, old_type, new_type):
332 331 """Compares two types to be equal"""
333 332 ret = old_type.__class__ == new_type.__class__
334 333
335 334 # String length is a special case
336 335 if ret and isinstance(new_type, sqlalchemy.types.String):
337 336 ret = (getattr(old_type, 'length', None) == \
338 337 getattr(new_type, 'length', None))
339 338 return ret
340 339
341 340 def _extract_parameters(self, p, k, column):
342 341 """Extracts data from p and modifies diffs"""
343 342 p = list(p)
344 343 while len(p):
345 if isinstance(p[0], compat.string_types):
344 if isinstance(p[0], str):
346 345 k.setdefault('name', p.pop(0))
347 346 elif isinstance(p[0], sqlalchemy.types.TypeEngine):
348 347 k.setdefault('type', p.pop(0))
349 348 elif callable(p[0]):
350 349 p[0] = p[0]()
351 350 else:
352 351 break
353 352
354 353 if len(p):
355 354 new_col = column.copy_fixed()
356 355 new_col._init_items(*p)
357 356 k = self.compare_2_columns(column, new_col, **k)
358 357 return k
359 358
360 359 def process_column(self, column):
361 360 """Processes default values for column"""
362 361 # XXX: this is a snippet from SA processing of positional parameters
363 362 toinit = list()
364 363
365 364 if column.server_default is not None:
366 365 if isinstance(column.server_default, sqlalchemy.FetchedValue):
367 366 toinit.append(column.server_default)
368 367 else:
369 368 toinit.append(sqlalchemy.DefaultClause(column.server_default))
370 369 if column.server_onupdate is not None:
371 370 if isinstance(column.server_onupdate, FetchedValue):
372 371 toinit.append(column.server_default)
373 372 else:
374 373 toinit.append(sqlalchemy.DefaultClause(column.server_onupdate,
375 374 for_update=True))
376 375 if toinit:
377 376 column._init_items(*toinit)
378 377
379 378 def _get_table(self):
380 379 return getattr(self, '_table', None)
381 380
382 381 def _set_table(self, table):
383 if isinstance(table, compat.string_types):
382 if isinstance(table, str):
384 383 if self.alter_metadata:
385 384 if not self.meta:
386 385 raise ValueError("metadata must be specified for table"
387 386 " reflection when using alter_metadata")
388 387 meta = self.meta
389 388 if self.engine:
390 389 meta.bind = self.engine
391 390 else:
392 391 if not self.engine and not self.meta:
393 392 raise ValueError("engine or metadata must be specified"
394 393 " to reflect tables")
395 394 if not self.engine:
396 395 self.engine = self.meta.bind
397 396 meta = sqlalchemy.MetaData(bind=self.engine)
398 397 self._table = sqlalchemy.Table(table, meta, autoload=True)
399 398 elif isinstance(table, sqlalchemy.Table):
400 399 self._table = table
401 400 if not self.alter_metadata:
402 401 self._table.meta = sqlalchemy.MetaData(bind=self._table.bind)
403 402 def _get_result_column(self):
404 403 return getattr(self, '_result_column', None)
405 404
406 405 def _set_result_column(self, column):
407 406 """Set Column to Table based on alter_metadata evaluation."""
408 407 self.process_column(column)
409 408 if not hasattr(self, 'current_name'):
410 409 self.current_name = column.name
411 410 if self.alter_metadata:
412 411 self._result_column = column
413 412 else:
414 413 self._result_column = column.copy_fixed()
415 414
416 415 table = property(_get_table, _set_table)
417 416 result_column = property(_get_result_column, _set_result_column)
418 417
419 418
420 419 class ChangesetTable(object):
421 420 """Changeset extensions to SQLAlchemy tables."""
422 421
423 422 def create_column(self, column, *p, **kw):
424 423 """Creates a column.
425 424
426 425 The column parameter may be a column definition or the name of
427 426 a column in this table.
428 427
429 428 API to :meth:`ChangesetColumn.create`
430 429
431 430 :param column: Column to be created
432 431 :type column: Column instance or string
433 432 """
434 433 if not isinstance(column, sqlalchemy.Column):
435 434 # It's a column name
436 435 column = getattr(self.c, str(column))
437 436 column.create(table=self, *p, **kw)
438 437
439 438 def drop_column(self, column, *p, **kw):
440 439 """Drop a column, given its name or definition.
441 440
442 441 API to :meth:`ChangesetColumn.drop`
443 442
444 443 :param column: Column to be droped
445 444 :type column: Column instance or string
446 445 """
447 446 if not isinstance(column, sqlalchemy.Column):
448 447 # It's a column name
449 448 try:
450 449 column = getattr(self.c, str(column))
451 450 except AttributeError:
452 451 # That column isn't part of the table. We don't need
453 452 # its entire definition to drop the column, just its
454 453 # name, so create a dummy column with the same name.
455 454 column = sqlalchemy.Column(str(column), sqlalchemy.Integer())
456 455 column.drop(table=self, *p, **kw)
457 456
458 457 def rename(self, name, connection=None, **kwargs):
459 458 """Rename this table.
460 459
461 460 :param name: New name of the table.
462 461 :type name: string
463 462 :param connection: reuse connection istead of creating new one.
464 463 :type connection: :class:`sqlalchemy.engine.base.Connection` instance
465 464 """
466 465 engine = self.bind
467 466 self.new_name = name
468 467 visitorcallable = get_engine_visitor(engine, 'schemachanger')
469 468 run_single_visitor(engine, visitorcallable, self, connection, **kwargs)
470 469
471 470 # Fix metadata registration
472 471 self.name = name
473 472 self.deregister()
474 473 self._set_parent(self.metadata)
475 474
476 475 def _meta_key(self):
477 476 """Get the meta key for this table."""
478 477 return sqlalchemy.schema._get_table_key(self.name, self.schema)
479 478
480 479 def deregister(self):
481 480 """Remove this table from its metadata"""
482 481 if SQLA_07:
483 482 self.metadata._remove_table(self.name, self.schema)
484 483 else:
485 484 key = self._meta_key()
486 485 meta = self.metadata
487 486 if key in meta.tables:
488 487 del meta.tables[key]
489 488
490 489
491 490 class ChangesetColumn(object):
492 491 """Changeset extensions to SQLAlchemy columns."""
493 492
494 493 def alter(self, *p, **k):
495 494 """Makes a call to :func:`alter_column` for the column this
496 495 method is called on.
497 496 """
498 497 if 'table' not in k:
499 498 k['table'] = self.table
500 499 if 'engine' not in k:
501 500 k['engine'] = k['table'].bind
502 501 return alter_column(self, *p, **k)
503 502
504 503 def create(self, table=None, index_name=None, unique_name=None,
505 504 primary_key_name=None, populate_default=True, connection=None, **kwargs):
506 505 """Create this column in the database.
507 506
508 507 Assumes the given table exists. ``ALTER TABLE ADD COLUMN``,
509 508 for most databases.
510 509
511 510 :param table: Table instance to create on.
512 511 :param index_name: Creates :class:`ChangesetIndex` on this column.
513 512 :param unique_name: Creates :class:\
514 513 `~migrate.changeset.constraint.UniqueConstraint` on this column.
515 514 :param primary_key_name: Creates :class:\
516 515 `~migrate.changeset.constraint.PrimaryKeyConstraint` on this column.
517 516 :param populate_default: If True, created column will be \
518 517 populated with defaults
519 518 :param connection: reuse connection istead of creating new one.
520 519 :type table: Table instance
521 520 :type index_name: string
522 521 :type unique_name: string
523 522 :type primary_key_name: string
524 523 :type populate_default: bool
525 524 :type connection: :class:`sqlalchemy.engine.base.Connection` instance
526 525
527 526 :returns: self
528 527 """
529 528 self.populate_default = populate_default
530 529 self.index_name = index_name
531 530 self.unique_name = unique_name
532 531 self.primary_key_name = primary_key_name
533 532 for cons in ('index_name', 'unique_name', 'primary_key_name'):
534 533 self._check_sanity_constraints(cons)
535 534
536 535 self.add_to_table(table)
537 536 engine = self.table.bind
538 537 visitorcallable = get_engine_visitor(engine, 'columngenerator')
539 538 engine._run_visitor(visitorcallable, self, connection, **kwargs)
540 539
541 540 # TODO: reuse existing connection
542 541 if self.populate_default and self.default is not None:
543 542 stmt = table.update().values({self: engine._execute_default(self.default)})
544 543 engine.execute(stmt)
545 544
546 545 return self
547 546
548 547 def drop(self, table=None, connection=None, **kwargs):
549 548 """Drop this column from the database, leaving its table intact.
550 549
551 550 ``ALTER TABLE DROP COLUMN``, for most databases.
552 551
553 552 :param connection: reuse connection istead of creating new one.
554 553 :type connection: :class:`sqlalchemy.engine.base.Connection` instance
555 554 """
556 555 if table is not None:
557 556 self.table = table
558 557 engine = self.table.bind
559 558 visitorcallable = get_engine_visitor(engine, 'columndropper')
560 559 engine._run_visitor(visitorcallable, self, connection, **kwargs)
561 560 self.remove_from_table(self.table, unset_table=False)
562 561 self.table = None
563 562 return self
564 563
565 564 def add_to_table(self, table):
566 565 if table is not None and self.table is None:
567 566 if SQLA_07:
568 567 table.append_column(self)
569 568 else:
570 569 self._set_parent(table)
571 570
572 571 def _col_name_in_constraint(self,cons,name):
573 572 return False
574 573
575 574 def remove_from_table(self, table, unset_table=True):
576 575 # TODO: remove primary keys, constraints, etc
577 576 if unset_table:
578 577 self.table = None
579 578
580 579 to_drop = set()
581 580 for index in table.indexes:
582 581 columns = []
583 582 for col in index.columns:
584 583 if col.name!=self.name:
585 584 columns.append(col)
586 585 if columns:
587 586 index.columns = columns
588 587 if SQLA_08:
589 588 index.expressions = columns
590 589 else:
591 590 to_drop.add(index)
592 591 table.indexes = table.indexes - to_drop
593 592
594 593 to_drop = set()
595 594 for cons in table.constraints:
596 595 # TODO: deal with other types of constraint
597 596 if isinstance(cons,(ForeignKeyConstraint,
598 597 UniqueConstraint)):
599 598 for col_name in cons.columns:
600 if not isinstance(col_name, compat.string_types):
599 if not isinstance(col_name, str):
601 600 col_name = col_name.name
602 601 if self.name==col_name:
603 602 to_drop.add(cons)
604 603 table.constraints = table.constraints - to_drop
605 604
606 605 if table.c.contains_column(self):
607 606 if SQLA_07:
608 607 table._columns.remove(self)
609 608 else:
610 609 table.c.remove(self)
611 610
612 611 # TODO: this is fixed in 0.6
613 612 def copy_fixed(self, **kw):
614 613 """Create a copy of this ``Column``, with all attributes."""
615 614 q = util.safe_quote(self)
616 615 return sqlalchemy.Column(self.name, self.type, self.default,
617 616 key=self.key,
618 617 primary_key=self.primary_key,
619 618 nullable=self.nullable,
620 619 quote=q,
621 620 index=self.index,
622 621 unique=self.unique,
623 622 onupdate=self.onupdate,
624 623 autoincrement=self.autoincrement,
625 624 server_default=self.server_default,
626 625 server_onupdate=self.server_onupdate,
627 626 *[c.copy(**kw) for c in self.constraints])
628 627
629 628 def _check_sanity_constraints(self, name):
630 629 """Check if constraints names are correct"""
631 630 obj = getattr(self, name)
632 631 if (getattr(self, name[:-5]) and not obj):
633 632 raise InvalidConstraintError("Column.create() accepts index_name,"
634 633 " primary_key_name and unique_name to generate constraints")
635 if not isinstance(obj, compat.string_types) and obj is not None:
634 if not isinstance(obj, str) and obj is not None:
636 635 raise InvalidConstraintError(
637 636 "%s argument for column must be constraint name" % name)
638 637
639 638
640 639 class ChangesetIndex(object):
641 640 """Changeset extensions to SQLAlchemy Indexes."""
642 641
643 642 __visit_name__ = 'index'
644 643
645 644 def rename(self, name, connection=None, **kwargs):
646 645 """Change the name of an index.
647 646
648 647 :param name: New name of the Index.
649 648 :type name: string
650 649 :param connection: reuse connection istead of creating new one.
651 650 :type connection: :class:`sqlalchemy.engine.base.Connection` instance
652 651 """
653 652 engine = self.table.bind
654 653 self.new_name = name
655 654 visitorcallable = get_engine_visitor(engine, 'schemachanger')
656 655 engine._run_visitor(visitorcallable, self, connection, **kwargs)
657 656 self.name = name
658 657
659 658
660 659 class ChangesetDefaultClause(object):
661 660 """Implements comparison between :class:`DefaultClause` instances"""
662 661
663 662 def __eq__(self, other):
664 663 if isinstance(other, self.__class__):
665 664 if self.arg == other.arg:
666 665 return True
667 666
668 667 def __ne__(self, other):
669 668 return not self.__eq__(other)
@@ -1,222 +1,221 b''
1 1 """
2 2 Database schema version management.
3 3 """
4 4 import sys
5 5 import logging
6 6
7 7 from sqlalchemy import (Table, Column, MetaData, String, Text, Integer,
8 8 create_engine)
9 9 from sqlalchemy.sql import and_
10 10 from sqlalchemy import exc as sa_exceptions
11 11 from sqlalchemy.sql import bindparam
12 from pyramid import compat
13 12
14 13 from rhodecode.lib.dbmigrate.migrate import exceptions
15 14 from rhodecode.lib.dbmigrate.migrate.changeset import SQLA_07
16 15 from rhodecode.lib.dbmigrate.migrate.versioning import genmodel, schemadiff
17 16 from rhodecode.lib.dbmigrate.migrate.versioning.repository import Repository
18 17 from rhodecode.lib.dbmigrate.migrate.versioning.util import load_model
19 18 from rhodecode.lib.dbmigrate.migrate.versioning.version import VerNum
20 19
21 20
22 21 log = logging.getLogger(__name__)
23 22
24 23
25 24 class ControlledSchema(object):
26 25 """A database under version control"""
27 26
28 27 def __init__(self, engine, repository):
29 if isinstance(repository, compat.string_types):
28 if isinstance(repository, str):
30 29 repository = Repository(repository)
31 30 self.engine = engine
32 31 self.repository = repository
33 32 self.meta = MetaData(engine)
34 33 self.load()
35 34
36 35 def __eq__(self, other):
37 36 """Compare two schemas by repositories and versions"""
38 37 return (self.repository is other.repository \
39 38 and self.version == other.version)
40 39
41 40 def load(self):
42 41 """Load controlled schema version info from DB"""
43 42 tname = self.repository.version_table
44 43 try:
45 44 if not hasattr(self, 'table') or self.table is None:
46 45 self.table = Table(tname, self.meta, autoload=True)
47 46
48 47 result = self.engine.execute(self.table.select(
49 48 self.table.c.repository_id == str(self.repository.id)))
50 49
51 50 data = list(result)[0]
52 51 except:
53 52 cls, exc, tb = sys.exc_info()
54 53 raise exceptions.DatabaseNotControlledError, exc.__str__(), tb
55 54
56 55 self.version = data['version']
57 56 return data
58 57
59 58 def drop(self):
60 59 """
61 60 Remove version control from a database.
62 61 """
63 62 if SQLA_07:
64 63 try:
65 64 self.table.drop()
66 65 except sa_exceptions.DatabaseError:
67 66 raise exceptions.DatabaseNotControlledError(str(self.table))
68 67 else:
69 68 try:
70 69 self.table.drop()
71 70 except (sa_exceptions.SQLError):
72 71 raise exceptions.DatabaseNotControlledError(str(self.table))
73 72
74 73 def changeset(self, version=None):
75 74 """API to Changeset creation.
76 75
77 76 Uses self.version for start version and engine.name
78 77 to get database name.
79 78 """
80 79 database = self.engine.name
81 80 start_ver = self.version
82 81 changeset = self.repository.changeset(database, start_ver, version)
83 82 return changeset
84 83
85 84 def runchange(self, ver, change, step):
86 85 startver = ver
87 86 endver = ver + step
88 87 # Current database version must be correct! Don't run if corrupt!
89 88 if self.version != startver:
90 89 raise exceptions.InvalidVersionError("%s is not %s" % \
91 90 (self.version, startver))
92 91 # Run the change
93 92 change.run(self.engine, step)
94 93
95 94 # Update/refresh database version
96 95 self.update_repository_table(startver, endver)
97 96 self.load()
98 97
99 98 def update_repository_table(self, startver, endver):
100 99 """Update version_table with new information"""
101 100 update = self.table.update(and_(self.table.c.version == int(startver),
102 101 self.table.c.repository_id == str(self.repository.id)))
103 102 self.engine.execute(update, version=int(endver))
104 103
105 104 def upgrade(self, version=None):
106 105 """
107 106 Upgrade (or downgrade) to a specified version, or latest version.
108 107 """
109 108 changeset = self.changeset(version)
110 109 for ver, change in changeset:
111 110 self.runchange(ver, change, changeset.step)
112 111
113 112 def update_db_from_model(self, model):
114 113 """
115 114 Modify the database to match the structure of the current Python model.
116 115 """
117 116 model = load_model(model)
118 117
119 118 diff = schemadiff.getDiffOfModelAgainstDatabase(
120 119 model, self.engine, excludeTables=[self.repository.version_table]
121 120 )
122 121 genmodel.ModelGenerator(diff,self.engine).runB2A()
123 122
124 123 self.update_repository_table(self.version, int(self.repository.latest))
125 124
126 125 self.load()
127 126
128 127 @classmethod
129 128 def create(cls, engine, repository, version=None):
130 129 """
131 130 Declare a database to be under a repository's version control.
132 131
133 132 :raises: :exc:`DatabaseAlreadyControlledError`
134 133 :returns: :class:`ControlledSchema`
135 134 """
136 135 # Confirm that the version # is valid: positive, integer,
137 136 # exists in repos
138 if isinstance(repository, compat.string_types):
137 if isinstance(repository, str):
139 138 repository = Repository(repository)
140 139 version = cls._validate_version(repository, version)
141 140 table = cls._create_table_version(engine, repository, version)
142 141 # TODO: history table
143 142 # Load repository information and return
144 143 return cls(engine, repository)
145 144
146 145 @classmethod
147 146 def _validate_version(cls, repository, version):
148 147 """
149 148 Ensures this is a valid version number for this repository.
150 149
151 150 :raises: :exc:`InvalidVersionError` if invalid
152 151 :return: valid version number
153 152 """
154 153 if version is None:
155 154 version = 0
156 155 try:
157 156 version = VerNum(version) # raises valueerror
158 157 if version < 0 or version > repository.latest:
159 158 raise ValueError()
160 159 except ValueError:
161 160 raise exceptions.InvalidVersionError(version)
162 161 return version
163 162
164 163 @classmethod
165 164 def _create_table_version(cls, engine, repository, version):
166 165 """
167 166 Creates the versioning table in a database.
168 167
169 168 :raises: :exc:`DatabaseAlreadyControlledError`
170 169 """
171 170 # Create tables
172 171 tname = repository.version_table
173 172 meta = MetaData(engine)
174 173
175 174 table = Table(
176 175 tname, meta,
177 176 Column('repository_id', String(250), primary_key=True),
178 177 Column('repository_path', Text),
179 178 Column('version', Integer), )
180 179
181 180 # there can be multiple repositories/schemas in the same db
182 181 if not table.exists():
183 182 table.create()
184 183
185 184 # test for existing repository_id
186 185 s = table.select(table.c.repository_id == bindparam("repository_id"))
187 186 result = engine.execute(s, repository_id=repository.id)
188 187 if result.fetchone():
189 188 raise exceptions.DatabaseAlreadyControlledError
190 189
191 190 # Insert data
192 191 engine.execute(table.insert().values(
193 192 repository_id=repository.id,
194 193 repository_path=repository.path,
195 194 version=int(version)))
196 195 return table
197 196
198 197 @classmethod
199 198 def compare_model_to_db(cls, engine, model, repository):
200 199 """
201 200 Compare the current model against the current database.
202 201 """
203 if isinstance(repository, compat.string_types):
202 if isinstance(repository, str):
204 203 repository = Repository(repository)
205 204 model = load_model(model)
206 205
207 206 diff = schemadiff.getDiffOfModelAgainstDatabase(
208 207 model, engine, excludeTables=[repository.version_table])
209 208 return diff
210 209
211 210 @classmethod
212 211 def create_model(cls, engine, repository, declarative=False):
213 212 """
214 213 Dump the current database as a Python model.
215 214 """
216 if isinstance(repository, compat.string_types):
215 if isinstance(repository, str):
217 216 repository = Repository(repository)
218 217
219 218 diff = schemadiff.getDiffOfModelAgainstDatabase(
220 219 MetaData(), engine, excludeTables=[repository.version_table]
221 220 )
222 221 return genmodel.ModelGenerator(diff, engine, declarative).genBDefinition()
@@ -1,160 +1,159 b''
1 1 #!/usr/bin/env python
2 2 # -*- coding: utf-8 -*-
3 3
4 4 import shutil
5 5 import warnings
6 6 import logging
7 7 import inspect
8 8 from StringIO import StringIO
9 9
10 from pyramid import compat
11 10 from rhodecode.lib.dbmigrate import migrate
12 11 from rhodecode.lib.dbmigrate.migrate.versioning import genmodel, schemadiff
13 12 from rhodecode.lib.dbmigrate.migrate.versioning.config import operations
14 13 from rhodecode.lib.dbmigrate.migrate.versioning.template import Template
15 14 from rhodecode.lib.dbmigrate.migrate.versioning.script import base
16 15 from rhodecode.lib.dbmigrate.migrate.versioning.util import import_path, load_model, with_engine
17 16 from rhodecode.lib.dbmigrate.migrate.exceptions import MigrateDeprecationWarning, InvalidScriptError, ScriptError
18 17
19 18 log = logging.getLogger(__name__)
20 19 __all__ = ['PythonScript']
21 20
22 21
23 22 class PythonScript(base.BaseScript):
24 23 """Base for Python scripts"""
25 24
26 25 @classmethod
27 26 def create(cls, path, **opts):
28 27 """Create an empty migration script at specified path
29 28
30 29 :returns: :class:`PythonScript instance <migrate.versioning.script.py.PythonScript>`"""
31 30 cls.require_notfound(path)
32 31
33 32 src = Template(opts.pop('templates_path', None)).get_script(theme=opts.pop('templates_theme', None))
34 33 shutil.copy(src, path)
35 34
36 35 return cls(path)
37 36
38 37 @classmethod
39 38 def make_update_script_for_model(cls, engine, oldmodel,
40 39 model, repository, **opts):
41 40 """Create a migration script based on difference between two SA models.
42 41
43 42 :param repository: path to migrate repository
44 43 :param oldmodel: dotted.module.name:SAClass or SAClass object
45 44 :param model: dotted.module.name:SAClass or SAClass object
46 45 :param engine: SQLAlchemy engine
47 46 :type repository: string or :class:`Repository instance <migrate.versioning.repository.Repository>`
48 47 :type oldmodel: string or Class
49 48 :type model: string or Class
50 49 :type engine: Engine instance
51 50 :returns: Upgrade / Downgrade script
52 51 :rtype: string
53 52 """
54 53
55 if isinstance(repository, compat.string_types):
54 if isinstance(repository, str):
56 55 # oh dear, an import cycle!
57 56 from rhodecode.lib.dbmigrate.migrate.versioning.repository import Repository
58 57 repository = Repository(repository)
59 58
60 59 oldmodel = load_model(oldmodel)
61 60 model = load_model(model)
62 61
63 62 # Compute differences.
64 63 diff = schemadiff.getDiffOfModelAgainstModel(
65 64 model,
66 65 oldmodel,
67 66 excludeTables=[repository.version_table])
68 67 # TODO: diff can be False (there is no difference?)
69 68 decls, upgradeCommands, downgradeCommands = \
70 69 genmodel.ModelGenerator(diff,engine).genB2AMigration()
71 70
72 71 # Store differences into file.
73 72 src = Template(opts.pop('templates_path', None)).get_script(opts.pop('templates_theme', None))
74 73 with open(src) as f:
75 74 contents = f.read()
76 75
77 76 # generate source
78 77 search = 'def upgrade(migrate_engine):'
79 78 contents = contents.replace(search, '\n\n'.join((decls, search)), 1)
80 79 if upgradeCommands:
81 80 contents = contents.replace(' pass', upgradeCommands, 1)
82 81 if downgradeCommands:
83 82 contents = contents.replace(' pass', downgradeCommands, 1)
84 83 return contents
85 84
86 85 @classmethod
87 86 def verify_module(cls, path):
88 87 """Ensure path is a valid script
89 88
90 89 :param path: Script location
91 90 :type path: string
92 91 :raises: :exc:`InvalidScriptError <migrate.exceptions.InvalidScriptError>`
93 92 :returns: Python module
94 93 """
95 94 # Try to import and get the upgrade() func
96 95 module = import_path(path)
97 96 try:
98 97 assert callable(module.upgrade)
99 98 except Exception as e:
100 99 raise InvalidScriptError(path + ': %s' % str(e))
101 100 return module
102 101
103 102 def preview_sql(self, url, step, **args):
104 103 """Mocks SQLAlchemy Engine to store all executed calls in a string
105 104 and runs :meth:`PythonScript.run <migrate.versioning.script.py.PythonScript.run>`
106 105
107 106 :returns: SQL file
108 107 """
109 108 buf = StringIO()
110 109 args['engine_arg_strategy'] = 'mock'
111 110 args['engine_arg_executor'] = lambda s, p = '': buf.write(str(s) + p)
112 111
113 112 @with_engine
114 113 def go(url, step, **kw):
115 114 engine = kw.pop('engine')
116 115 self.run(engine, step)
117 116 return buf.getvalue()
118 117
119 118 return go(url, step, **args)
120 119
121 120 def run(self, engine, step):
122 121 """Core method of Script file.
123 122 Exectues :func:`update` or :func:`downgrade` functions
124 123
125 124 :param engine: SQLAlchemy Engine
126 125 :param step: Operation to run
127 126 :type engine: string
128 127 :type step: int
129 128 """
130 129 if step > 0:
131 130 op = 'upgrade'
132 131 elif step < 0:
133 132 op = 'downgrade'
134 133 else:
135 134 raise ScriptError("%d is not a valid step" % step)
136 135
137 136 funcname = base.operations[op]
138 137 script_func = self._func(funcname)
139 138
140 139 # check for old way of using engine
141 140 if not inspect.getargspec(script_func)[0]:
142 141 raise TypeError("upgrade/downgrade functions must accept engine"
143 142 " parameter (since version 0.5.4)")
144 143
145 144 script_func(engine)
146 145
147 146 @property
148 147 def module(self):
149 148 """Calls :meth:`migrate.versioning.script.py.verify_module`
150 149 and returns it.
151 150 """
152 151 if not hasattr(self, '_module'):
153 152 self._module = self.verify_module(self.path)
154 153 return self._module
155 154
156 155 def _func(self, funcname):
157 156 if not hasattr(self.module, funcname):
158 157 msg = "Function '%s' is not defined in this script"
159 158 raise ScriptError(msg % funcname)
160 159 return getattr(self.module, funcname)
@@ -1,181 +1,180 b''
1 1 #!/usr/bin/env python
2 2 # -*- coding: utf-8 -*-
3 3 """.. currentmodule:: migrate.versioning.util"""
4 4
5 5 import warnings
6 6 import logging
7 7 from decorator import decorator
8 8 from pkg_resources import EntryPoint
9 9
10 10 from sqlalchemy import create_engine
11 11 from sqlalchemy.engine import Engine
12 12 from sqlalchemy.pool import StaticPool
13 13
14 from pyramid import compat
15 14 from rhodecode.lib.dbmigrate.migrate import exceptions
16 15 from rhodecode.lib.dbmigrate.migrate.versioning.util.keyedinstance import KeyedInstance
17 16 from rhodecode.lib.dbmigrate.migrate.versioning.util.importpath import import_path
18 17
19 18
20 19 log = logging.getLogger(__name__)
21 20
22 21
23 22 def load_model(dotted_name):
24 23 """Import module and use module-level variable".
25 24
26 25 :param dotted_name: path to model in form of string: ``some.python.module:Class``
27 26
28 27 .. versionchanged:: 0.5.4
29 28
30 29 """
31 if isinstance(dotted_name, compat.string_types):
30 if isinstance(dotted_name, str):
32 31 if ':' not in dotted_name:
33 32 # backwards compatibility
34 33 warnings.warn('model should be in form of module.model:User '
35 34 'and not module.model.User', exceptions.MigrateDeprecationWarning)
36 35 dotted_name = ':'.join(dotted_name.rsplit('.', 1))
37 36 return EntryPoint.parse('x=%s' % dotted_name).load(False)
38 37 else:
39 38 # Assume it's already loaded.
40 39 return dotted_name
41 40
42 41 def asbool(obj):
43 42 """Do everything to use object as bool"""
44 if isinstance(obj, compat.string_types):
43 if isinstance(obj, str):
45 44 obj = obj.strip().lower()
46 45 if obj in ['true', 'yes', 'on', 'y', 't', '1']:
47 46 return True
48 47 elif obj in ['false', 'no', 'off', 'n', 'f', '0']:
49 48 return False
50 49 else:
51 50 raise ValueError("String is not true/false: %r" % obj)
52 51 if obj in (True, False):
53 52 return bool(obj)
54 53 else:
55 54 raise ValueError("String is not true/false: %r" % obj)
56 55
57 56 def guess_obj_type(obj):
58 57 """Do everything to guess object type from string
59 58
60 59 Tries to convert to `int`, `bool` and finally returns if not succeded.
61 60
62 61 .. versionadded: 0.5.4
63 62 """
64 63
65 64 result = None
66 65
67 66 try:
68 67 result = int(obj)
69 68 except:
70 69 pass
71 70
72 71 if result is None:
73 72 try:
74 73 result = asbool(obj)
75 74 except:
76 75 pass
77 76
78 77 if result is not None:
79 78 return result
80 79 else:
81 80 return obj
82 81
83 82 @decorator
84 83 def catch_known_errors(f, *a, **kw):
85 84 """Decorator that catches known api errors
86 85
87 86 .. versionadded: 0.5.4
88 87 """
89 88
90 89 try:
91 90 return f(*a, **kw)
92 91 except exceptions.PathFoundError as e:
93 92 raise exceptions.KnownError("The path %s already exists" % e.args[0])
94 93
95 94 def construct_engine(engine, **opts):
96 95 """.. versionadded:: 0.5.4
97 96
98 97 Constructs and returns SQLAlchemy engine.
99 98
100 99 Currently, there are 2 ways to pass create_engine options to :mod:`migrate.versioning.api` functions:
101 100
102 101 :param engine: connection string or a existing engine
103 102 :param engine_dict: python dictionary of options to pass to `create_engine`
104 103 :param engine_arg_*: keyword parameters to pass to `create_engine` (evaluated with :func:`migrate.versioning.util.guess_obj_type`)
105 104 :type engine_dict: dict
106 105 :type engine: string or Engine instance
107 106 :type engine_arg_*: string
108 107 :returns: SQLAlchemy Engine
109 108
110 109 .. note::
111 110
112 111 keyword parameters override ``engine_dict`` values.
113 112
114 113 """
115 114 if isinstance(engine, Engine):
116 115 return engine
117 elif not isinstance(engine, compat.string_types):
116 elif not isinstance(engine, str):
118 117 raise ValueError("you need to pass either an existing engine or a database uri")
119 118
120 119 # get options for create_engine
121 120 if opts.get('engine_dict') and isinstance(opts['engine_dict'], dict):
122 121 kwargs = opts['engine_dict']
123 122 else:
124 123 kwargs = {}
125 124
126 125 # DEPRECATED: handle echo the old way
127 126 echo = asbool(opts.get('echo', False))
128 127 if echo:
129 128 warnings.warn('echo=True parameter is deprecated, pass '
130 129 'engine_arg_echo=True or engine_dict={"echo": True}',
131 130 exceptions.MigrateDeprecationWarning)
132 131 kwargs['echo'] = echo
133 132
134 133 # parse keyword arguments
135 134 for key, value in opts.iteritems():
136 135 if key.startswith('engine_arg_'):
137 136 kwargs[key[11:]] = guess_obj_type(value)
138 137
139 138 log.debug('Constructing engine')
140 139 # TODO: return create_engine(engine, poolclass=StaticPool, **kwargs)
141 140 # seems like 0.5.x branch does not work with engine.dispose and staticpool
142 141 return create_engine(engine, **kwargs)
143 142
144 143 @decorator
145 144 def with_engine(f, *a, **kw):
146 145 """Decorator for :mod:`migrate.versioning.api` functions
147 146 to safely close resources after function usage.
148 147
149 148 Passes engine parameters to :func:`construct_engine` and
150 149 resulting parameter is available as kw['engine'].
151 150
152 151 Engine is disposed after wrapped function is executed.
153 152
154 153 .. versionadded: 0.6.0
155 154 """
156 155 url = a[0]
157 156 engine = construct_engine(url, **kw)
158 157
159 158 try:
160 159 kw['engine'] = engine
161 160 return f(*a, **kw)
162 161 finally:
163 162 if isinstance(engine, Engine) and engine is not url:
164 163 log.debug('Disposing SQLAlchemy engine %s', engine)
165 164 engine.dispose()
166 165
167 166
168 167 class Memoize:
169 168 """Memoize(fn) - an instance which acts like fn but memoizes its arguments
170 169 Will only work on functions with non-mutable arguments
171 170
172 171 ActiveState Code 52201
173 172 """
174 173 def __init__(self, fn):
175 174 self.fn = fn
176 175 self.memo = {}
177 176
178 177 def __call__(self, *args):
179 178 if args not in self.memo:
180 179 self.memo[args] = self.fn(*args)
181 180 return self.memo[args]
@@ -1,1044 +1,1043 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import logging
23 23 import datetime
24 24 import traceback
25 25 from datetime import date
26 26
27 27 from sqlalchemy import *
28 28 from sqlalchemy.ext.hybrid import hybrid_property
29 29 from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
30 30 from beaker.cache import cache_region, region_invalidate
31 from pyramid import compat
32 31
33 32 from rhodecode.lib.vcs import get_backend
34 33 from rhodecode.lib.vcs.utils.helpers import get_scm
35 34 from rhodecode.lib.vcs.exceptions import VCSError
36 35 from zope.cachedescriptors.property import Lazy as LazyProperty
37 36 from rhodecode.lib.auth import generate_auth_token
38 37 from rhodecode.lib.utils2 import str2bool, safe_str, get_commit_safe, safe_unicode
39 38 from rhodecode.lib.exceptions import UserGroupAssignedException
40 39 from rhodecode.lib.ext_json import json
41 40
42 41 from rhodecode.model.meta import Base, Session
43 42 from rhodecode.lib.caching_query import FromCache
44 43
45 44
46 45 log = logging.getLogger(__name__)
47 46
48 47 #==============================================================================
49 48 # BASE CLASSES
50 49 #==============================================================================
51 50
52 51 class ModelSerializer(json.JSONEncoder):
53 52 """
54 53 Simple Serializer for JSON,
55 54
56 55 usage::
57 56
58 57 to make object customized for serialization implement a __json__
59 58 method that will return a dict for serialization into json
60 59
61 60 example::
62 61
63 62 class Task(object):
64 63
65 64 def __init__(self, name, value):
66 65 self.name = name
67 66 self.value = value
68 67
69 68 def __json__(self):
70 69 return dict(name=self.name,
71 70 value=self.value)
72 71
73 72 """
74 73
75 74 def default(self, obj):
76 75
77 76 if hasattr(obj, '__json__'):
78 77 return obj.__json__()
79 78 else:
80 79 return json.JSONEncoder.default(self, obj)
81 80
82 81 class BaseModel(object):
83 82 """Base Model for all classess
84 83
85 84 """
86 85
87 86 @classmethod
88 87 def _get_keys(cls):
89 88 """return column names for this model """
90 89 return class_mapper(cls).c.keys()
91 90
92 91 def get_dict(self):
93 92 """return dict with keys and values corresponding
94 93 to this model data """
95 94
96 95 d = {}
97 96 for k in self._get_keys():
98 97 d[k] = getattr(self, k)
99 98 return d
100 99
101 100 def get_appstruct(self):
102 101 """return list with keys and values tupples corresponding
103 102 to this model data """
104 103
105 104 l = []
106 105 for k in self._get_keys():
107 106 l.append((k, getattr(self, k),))
108 107 return l
109 108
110 109 def populate_obj(self, populate_dict):
111 110 """populate model with data from given populate_dict"""
112 111
113 112 for k in self._get_keys():
114 113 if k in populate_dict:
115 114 setattr(self, k, populate_dict[k])
116 115
117 116 @classmethod
118 117 def query(cls):
119 118 return Session.query(cls)
120 119
121 120 @classmethod
122 121 def get(cls, id_):
123 122 if id_:
124 123 return cls.query().get(id_)
125 124
126 125 @classmethod
127 126 def getAll(cls):
128 127 return cls.query().all()
129 128
130 129 @classmethod
131 130 def delete(cls, id_):
132 131 obj = cls.query().get(id_)
133 132 Session.delete(obj)
134 133 Session.commit()
135 134
136 135
137 136 class RhodeCodeSetting(Base, BaseModel):
138 137 __tablename__ = 'rhodecode_settings'
139 138 __table_args__ = (UniqueConstraint('app_settings_name'), {'extend_existing':True})
140 139 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
141 140 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
142 141 _app_settings_value = Column("app_settings_value", String(255), nullable=True, unique=None, default=None)
143 142
144 143 def __init__(self, k='', v=''):
145 144 self.app_settings_name = k
146 145 self.app_settings_value = v
147 146
148 147
149 148 @validates('_app_settings_value')
150 149 def validate_settings_value(self, key, val):
151 150 assert type(val) == unicode
152 151 return val
153 152
154 153 @hybrid_property
155 154 def app_settings_value(self):
156 155 v = self._app_settings_value
157 156 if v == 'ldap_active':
158 157 v = str2bool(v)
159 158 return v
160 159
161 160 @app_settings_value.setter
162 161 def app_settings_value(self, val):
163 162 """
164 163 Setter that will always make sure we use unicode in app_settings_value
165 164
166 165 :param val:
167 166 """
168 167 self._app_settings_value = safe_unicode(val)
169 168
170 169 def __repr__(self):
171 170 return "<%s('%s:%s')>" % (self.__class__.__name__,
172 171 self.app_settings_name, self.app_settings_value)
173 172
174 173
175 174 @classmethod
176 175 def get_by_name(cls, ldap_key):
177 176 return cls.query()\
178 177 .filter(cls.app_settings_name == ldap_key).scalar()
179 178
180 179 @classmethod
181 180 def get_app_settings(cls, cache=False):
182 181
183 182 ret = cls.query()
184 183
185 184 if cache:
186 185 ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
187 186
188 187 if not ret:
189 188 raise Exception('Could not get application settings !')
190 189 settings = {}
191 190 for each in ret:
192 191 settings['rhodecode_' + each.app_settings_name] = \
193 192 each.app_settings_value
194 193
195 194 return settings
196 195
197 196 @classmethod
198 197 def get_ldap_settings(cls, cache=False):
199 198 ret = cls.query()\
200 199 .filter(cls.app_settings_name.startswith('ldap_')).all()
201 200 fd = {}
202 201 for row in ret:
203 202 fd.update({row.app_settings_name:row.app_settings_value})
204 203
205 204 return fd
206 205
207 206
208 207 class RhodeCodeUi(Base, BaseModel):
209 208 __tablename__ = 'rhodecode_ui'
210 209 __table_args__ = (UniqueConstraint('ui_key'), {'extend_existing':True})
211 210
212 211 HOOK_REPO_SIZE = 'changegroup.repo_size'
213 212 HOOK_PUSH = 'pretxnchangegroup.push_logger'
214 213 HOOK_PULL = 'preoutgoing.pull_logger'
215 214
216 215 ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
217 216 ui_section = Column("ui_section", String(255), nullable=True, unique=None, default=None)
218 217 ui_key = Column("ui_key", String(255), nullable=True, unique=None, default=None)
219 218 ui_value = Column("ui_value", String(255), nullable=True, unique=None, default=None)
220 219 ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
221 220
222 221
223 222 @classmethod
224 223 def get_by_key(cls, key):
225 224 return cls.query().filter(cls.ui_key == key)
226 225
227 226
228 227 @classmethod
229 228 def get_builtin_hooks(cls):
230 229 q = cls.query()
231 230 q = q.filter(cls.ui_key.in_([cls.HOOK_REPO_SIZE,
232 231 cls.HOOK_PUSH, cls.HOOK_PULL]))
233 232 return q.all()
234 233
235 234 @classmethod
236 235 def get_custom_hooks(cls):
237 236 q = cls.query()
238 237 q = q.filter(~cls.ui_key.in_([cls.HOOK_REPO_SIZE,
239 238 cls.HOOK_PUSH, cls.HOOK_PULL]))
240 239 q = q.filter(cls.ui_section == 'hooks')
241 240 return q.all()
242 241
243 242 @classmethod
244 243 def create_or_update_hook(cls, key, val):
245 244 new_ui = cls.get_by_key(key).scalar() or cls()
246 245 new_ui.ui_section = 'hooks'
247 246 new_ui.ui_active = True
248 247 new_ui.ui_key = key
249 248 new_ui.ui_value = val
250 249
251 250 Session.add(new_ui)
252 251 Session.commit()
253 252
254 253
255 254 class User(Base, BaseModel):
256 255 __tablename__ = 'users'
257 256 __table_args__ = (UniqueConstraint('username'), UniqueConstraint('email'), {'extend_existing':True})
258 257 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
259 258 username = Column("username", String(255), nullable=True, unique=None, default=None)
260 259 password = Column("password", String(255), nullable=True, unique=None, default=None)
261 260 active = Column("active", Boolean(), nullable=True, unique=None, default=None)
262 261 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
263 262 name = Column("name", String(255), nullable=True, unique=None, default=None)
264 263 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
265 264 email = Column("email", String(255), nullable=True, unique=None, default=None)
266 265 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
267 266 ldap_dn = Column("ldap_dn", String(255), nullable=True, unique=None, default=None)
268 267 api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
269 268
270 269 user_log = relationship('UserLog', cascade='all')
271 270 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
272 271
273 272 repositories = relationship('Repository')
274 273 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
275 274 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
276 275
277 276 group_member = relationship('UserGroupMember', cascade='all')
278 277
279 278 @property
280 279 def full_contact(self):
281 280 return '%s %s <%s>' % (self.name, self.lastname, self.email)
282 281
283 282 @property
284 283 def short_contact(self):
285 284 return '%s %s' % (self.name, self.lastname)
286 285
287 286 @property
288 287 def is_admin(self):
289 288 return self.admin
290 289
291 290 def __repr__(self):
292 291 try:
293 292 return "<%s('id:%s:%s')>" % (self.__class__.__name__,
294 293 self.user_id, self.username)
295 294 except:
296 295 return self.__class__.__name__
297 296
298 297 @classmethod
299 298 def get_by_username(cls, username, case_insensitive=False):
300 299 if case_insensitive:
301 300 return Session.query(cls).filter(cls.username.ilike(username)).scalar()
302 301 else:
303 302 return Session.query(cls).filter(cls.username == username).scalar()
304 303
305 304 @classmethod
306 305 def get_by_auth_token(cls, auth_token):
307 306 return cls.query().filter(cls.api_key == auth_token).one()
308 307
309 308 def update_lastlogin(self):
310 309 """Update user lastlogin"""
311 310
312 311 self.last_login = datetime.datetime.now()
313 312 Session.add(self)
314 313 Session.commit()
315 314 log.debug('updated user %s lastlogin', self.username)
316 315
317 316 @classmethod
318 317 def create(cls, form_data):
319 318 from rhodecode.lib.auth import get_crypt_password
320 319
321 320 try:
322 321 new_user = cls()
323 322 for k, v in form_data.items():
324 323 if k == 'password':
325 324 v = get_crypt_password(v)
326 325 setattr(new_user, k, v)
327 326
328 327 new_user.api_key = generate_auth_token(form_data['username'])
329 328 Session.add(new_user)
330 329 Session.commit()
331 330 return new_user
332 331 except:
333 332 log.error(traceback.format_exc())
334 333 Session.rollback()
335 334 raise
336 335
337 336 class UserLog(Base, BaseModel):
338 337 __tablename__ = 'user_logs'
339 338 __table_args__ = {'extend_existing':True}
340 339 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
341 340 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
342 341 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
343 342 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
344 343 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
345 344 action = Column("action", String(1200000), nullable=True, unique=None, default=None)
346 345 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
347 346
348 347 @property
349 348 def action_as_day(self):
350 349 return date(*self.action_date.timetuple()[:3])
351 350
352 351 user = relationship('User')
353 352 repository = relationship('Repository')
354 353
355 354
356 355 class UserGroup(Base, BaseModel):
357 356 __tablename__ = 'users_groups'
358 357 __table_args__ = {'extend_existing':True}
359 358
360 359 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
361 360 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
362 361 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
363 362
364 363 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
365 364
366 365 def __repr__(self):
367 366 return '<userGroup(%s)>' % (self.users_group_name)
368 367
369 368 @classmethod
370 369 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
371 370 if case_insensitive:
372 371 gr = cls.query()\
373 372 .filter(cls.users_group_name.ilike(group_name))
374 373 else:
375 374 gr = cls.query()\
376 375 .filter(cls.users_group_name == group_name)
377 376 if cache:
378 377 gr = gr.options(FromCache("sql_cache_short",
379 378 "get_user_%s" % group_name))
380 379 return gr.scalar()
381 380
382 381 @classmethod
383 382 def get(cls, users_group_id, cache=False):
384 383 users_group = cls.query()
385 384 if cache:
386 385 users_group = users_group.options(FromCache("sql_cache_short",
387 386 "get_users_group_%s" % users_group_id))
388 387 return users_group.get(users_group_id)
389 388
390 389 @classmethod
391 390 def create(cls, form_data):
392 391 try:
393 392 new_user_group = cls()
394 393 for k, v in form_data.items():
395 394 setattr(new_user_group, k, v)
396 395
397 396 Session.add(new_user_group)
398 397 Session.commit()
399 398 return new_user_group
400 399 except:
401 400 log.error(traceback.format_exc())
402 401 Session.rollback()
403 402 raise
404 403
405 404 @classmethod
406 405 def update(cls, users_group_id, form_data):
407 406
408 407 try:
409 408 users_group = cls.get(users_group_id, cache=False)
410 409
411 410 for k, v in form_data.items():
412 411 if k == 'users_group_members':
413 412 users_group.members = []
414 413 Session.flush()
415 414 members_list = []
416 415 if v:
417 v = [v] if isinstance(v, compat.string_types) else v
416 v = [v] if isinstance(v, str) else v
418 417 for u_id in set(v):
419 418 member = UserGroupMember(users_group_id, u_id)
420 419 members_list.append(member)
421 420 setattr(users_group, 'members', members_list)
422 421 setattr(users_group, k, v)
423 422
424 423 Session.add(users_group)
425 424 Session.commit()
426 425 except:
427 426 log.error(traceback.format_exc())
428 427 Session.rollback()
429 428 raise
430 429
431 430 @classmethod
432 431 def delete(cls, user_group_id):
433 432 try:
434 433
435 434 # check if this group is not assigned to repo
436 435 assigned_groups = UserGroupRepoToPerm.query()\
437 436 .filter(UserGroupRepoToPerm.users_group_id ==
438 437 user_group_id).all()
439 438
440 439 if assigned_groups:
441 440 raise UserGroupAssignedException(
442 441 'UserGroup assigned to %s' % assigned_groups)
443 442
444 443 users_group = cls.get(user_group_id, cache=False)
445 444 Session.delete(users_group)
446 445 Session.commit()
447 446 except:
448 447 log.error(traceback.format_exc())
449 448 Session.rollback()
450 449 raise
451 450
452 451 class UserGroupMember(Base, BaseModel):
453 452 __tablename__ = 'users_groups_members'
454 453 __table_args__ = {'extend_existing':True}
455 454
456 455 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
457 456 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
458 457 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
459 458
460 459 user = relationship('User', lazy='joined')
461 460 users_group = relationship('UserGroup')
462 461
463 462 def __init__(self, gr_id='', u_id=''):
464 463 self.users_group_id = gr_id
465 464 self.user_id = u_id
466 465
467 466 @staticmethod
468 467 def add_user_to_group(group, user):
469 468 ugm = UserGroupMember()
470 469 ugm.users_group = group
471 470 ugm.user = user
472 471 Session.add(ugm)
473 472 Session.commit()
474 473 return ugm
475 474
476 475 class Repository(Base, BaseModel):
477 476 __tablename__ = 'repositories'
478 477 __table_args__ = (UniqueConstraint('repo_name'), {'extend_existing':True},)
479 478
480 479 repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
481 480 repo_name = Column("repo_name", String(255), nullable=False, unique=True, default=None)
482 481 clone_uri = Column("clone_uri", String(255), nullable=True, unique=False, default=None)
483 482 repo_type = Column("repo_type", String(255), nullable=False, unique=False, default='hg')
484 483 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
485 484 private = Column("private", Boolean(), nullable=True, unique=None, default=None)
486 485 enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
487 486 enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
488 487 description = Column("description", String(10000), nullable=True, unique=None, default=None)
489 488 created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
490 489
491 490 fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
492 491 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
493 492
494 493
495 494 user = relationship('User')
496 495 fork = relationship('Repository', remote_side=repo_id)
497 496 group = relationship('RepoGroup')
498 497 repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
499 498 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
500 499 stats = relationship('Statistics', cascade='all', uselist=False)
501 500
502 501 followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all')
503 502
504 503 logs = relationship('UserLog', cascade='all')
505 504
506 505 def __repr__(self):
507 506 return "<%s('%s:%s')>" % (self.__class__.__name__,
508 507 self.repo_id, self.repo_name)
509 508
510 509 @classmethod
511 510 def url_sep(cls):
512 511 return '/'
513 512
514 513 @classmethod
515 514 def get_by_repo_name(cls, repo_name):
516 515 q = Session.query(cls).filter(cls.repo_name == repo_name)
517 516 q = q.options(joinedload(Repository.fork))\
518 517 .options(joinedload(Repository.user))\
519 518 .options(joinedload(Repository.group))
520 519 return q.one()
521 520
522 521 @classmethod
523 522 def get_repo_forks(cls, repo_id):
524 523 return cls.query().filter(Repository.fork_id == repo_id)
525 524
526 525 @classmethod
527 526 def base_path(cls):
528 527 """
529 528 Returns base path when all repos are stored
530 529
531 530 :param cls:
532 531 """
533 532 q = Session.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key ==
534 533 cls.url_sep())
535 534 q.options(FromCache("sql_cache_short", "repository_repo_path"))
536 535 return q.one().ui_value
537 536
538 537 @property
539 538 def just_name(self):
540 539 return self.repo_name.split(Repository.url_sep())[-1]
541 540
542 541 @property
543 542 def groups_with_parents(self):
544 543 groups = []
545 544 if self.group is None:
546 545 return groups
547 546
548 547 cur_gr = self.group
549 548 groups.insert(0, cur_gr)
550 549 while 1:
551 550 gr = getattr(cur_gr, 'parent_group', None)
552 551 cur_gr = cur_gr.parent_group
553 552 if gr is None:
554 553 break
555 554 groups.insert(0, gr)
556 555
557 556 return groups
558 557
559 558 @property
560 559 def groups_and_repo(self):
561 560 return self.groups_with_parents, self.just_name
562 561
563 562 @LazyProperty
564 563 def repo_path(self):
565 564 """
566 565 Returns base full path for that repository means where it actually
567 566 exists on a filesystem
568 567 """
569 568 q = Session.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key ==
570 569 Repository.url_sep())
571 570 q.options(FromCache("sql_cache_short", "repository_repo_path"))
572 571 return q.one().ui_value
573 572
574 573 @property
575 574 def repo_full_path(self):
576 575 p = [self.repo_path]
577 576 # we need to split the name by / since this is how we store the
578 577 # names in the database, but that eventually needs to be converted
579 578 # into a valid system path
580 579 p += self.repo_name.split(Repository.url_sep())
581 580 return os.path.join(*p)
582 581
583 582 def get_new_name(self, repo_name):
584 583 """
585 584 returns new full repository name based on assigned group and new new
586 585
587 586 :param group_name:
588 587 """
589 588 path_prefix = self.group.full_path_splitted if self.group else []
590 589 return Repository.url_sep().join(path_prefix + [repo_name])
591 590
592 591 @property
593 592 def _config(self):
594 593 """
595 594 Returns db based config object.
596 595 """
597 596 from rhodecode.lib.utils import make_db_config
598 597 return make_db_config(clear_session=False)
599 598
600 599 @classmethod
601 600 def is_valid(cls, repo_name):
602 601 """
603 602 returns True if given repo name is a valid filesystem repository
604 603
605 604 :param cls:
606 605 :param repo_name:
607 606 """
608 607 from rhodecode.lib.utils import is_valid_repo
609 608
610 609 return is_valid_repo(repo_name, cls.base_path())
611 610
612 611
613 612 #==========================================================================
614 613 # SCM PROPERTIES
615 614 #==========================================================================
616 615
617 616 def get_commit(self, rev):
618 617 return get_commit_safe(self.scm_instance, rev)
619 618
620 619 @property
621 620 def tip(self):
622 621 return self.get_commit('tip')
623 622
624 623 @property
625 624 def author(self):
626 625 return self.tip.author
627 626
628 627 @property
629 628 def last_change(self):
630 629 return self.scm_instance.last_change
631 630
632 631 #==========================================================================
633 632 # SCM CACHE INSTANCE
634 633 #==========================================================================
635 634
636 635 @property
637 636 def invalidate(self):
638 637 return CacheInvalidation.invalidate(self.repo_name)
639 638
640 639 def set_invalidate(self):
641 640 """
642 641 set a cache for invalidation for this instance
643 642 """
644 643 CacheInvalidation.set_invalidate(self.repo_name)
645 644
646 645 @LazyProperty
647 646 def scm_instance(self):
648 647 return self.__get_instance()
649 648
650 649 @property
651 650 def scm_instance_cached(self):
652 651 return self.__get_instance()
653 652
654 653 def __get_instance(self):
655 654
656 655 repo_full_path = self.repo_full_path
657 656
658 657 try:
659 658 alias = get_scm(repo_full_path)[0]
660 659 log.debug('Creating instance of %s repository', alias)
661 660 backend = get_backend(alias)
662 661 except VCSError:
663 662 log.error(traceback.format_exc())
664 663 log.error('Perhaps this repository is in db and not in '
665 664 'filesystem run rescan repositories with '
666 665 '"destroy old data " option from admin panel')
667 666 return
668 667
669 668 if alias == 'hg':
670 669
671 670 repo = backend(safe_str(repo_full_path), create=False,
672 671 config=self._config)
673 672
674 673 else:
675 674 repo = backend(repo_full_path, create=False)
676 675
677 676 return repo
678 677
679 678
680 679 class Group(Base, BaseModel):
681 680 __tablename__ = 'groups'
682 681 __table_args__ = (UniqueConstraint('group_name', 'group_parent_id'),
683 682 {'extend_existing':True},)
684 683 __mapper_args__ = {'order_by':'group_name'}
685 684
686 685 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
687 686 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
688 687 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
689 688 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
690 689
691 690 parent_group = relationship('Group', remote_side=group_id)
692 691
693 692 def __init__(self, group_name='', parent_group=None):
694 693 self.group_name = group_name
695 694 self.parent_group = parent_group
696 695
697 696 def __repr__(self):
698 697 return "<%s('%s:%s')>" % (self.__class__.__name__, self.group_id,
699 698 self.group_name)
700 699
701 700 @classmethod
702 701 def url_sep(cls):
703 702 return '/'
704 703
705 704 @classmethod
706 705 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
707 706 if case_insensitive:
708 707 gr = cls.query()\
709 708 .filter(cls.group_name.ilike(group_name))
710 709 else:
711 710 gr = cls.query()\
712 711 .filter(cls.group_name == group_name)
713 712 if cache:
714 713 gr = gr.options(FromCache("sql_cache_short",
715 714 "get_group_%s" % group_name))
716 715 return gr.scalar()
717 716
718 717 @property
719 718 def parents(self):
720 719 parents_recursion_limit = 5
721 720 groups = []
722 721 if self.parent_group is None:
723 722 return groups
724 723 cur_gr = self.parent_group
725 724 groups.insert(0, cur_gr)
726 725 cnt = 0
727 726 while 1:
728 727 cnt += 1
729 728 gr = getattr(cur_gr, 'parent_group', None)
730 729 cur_gr = cur_gr.parent_group
731 730 if gr is None:
732 731 break
733 732 if cnt == parents_recursion_limit:
734 733 # this will prevent accidental infinit loops
735 734 log.error('group nested more than %s',
736 735 parents_recursion_limit)
737 736 break
738 737
739 738 groups.insert(0, gr)
740 739 return groups
741 740
742 741 @property
743 742 def children(self):
744 743 return Group.query().filter(Group.parent_group == self)
745 744
746 745 @property
747 746 def name(self):
748 747 return self.group_name.split(Group.url_sep())[-1]
749 748
750 749 @property
751 750 def full_path(self):
752 751 return self.group_name
753 752
754 753 @property
755 754 def full_path_splitted(self):
756 755 return self.group_name.split(Group.url_sep())
757 756
758 757 @property
759 758 def repositories(self):
760 759 return Repository.query().filter(Repository.group == self)
761 760
762 761 @property
763 762 def repositories_recursive_count(self):
764 763 cnt = self.repositories.count()
765 764
766 765 def children_count(group):
767 766 cnt = 0
768 767 for child in group.children:
769 768 cnt += child.repositories.count()
770 769 cnt += children_count(child)
771 770 return cnt
772 771
773 772 return cnt + children_count(self)
774 773
775 774
776 775 def get_new_name(self, group_name):
777 776 """
778 777 returns new full group name based on parent and new name
779 778
780 779 :param group_name:
781 780 """
782 781 path_prefix = (self.parent_group.full_path_splitted if
783 782 self.parent_group else [])
784 783 return Group.url_sep().join(path_prefix + [group_name])
785 784
786 785
787 786 class Permission(Base, BaseModel):
788 787 __tablename__ = 'permissions'
789 788 __table_args__ = {'extend_existing':True}
790 789 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
791 790 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
792 791 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
793 792
794 793 def __repr__(self):
795 794 return "<%s('%s:%s')>" % (self.__class__.__name__,
796 795 self.permission_id, self.permission_name)
797 796
798 797 @classmethod
799 798 def get_by_key(cls, key):
800 799 return cls.query().filter(cls.permission_name == key).scalar()
801 800
802 801 class UserRepoToPerm(Base, BaseModel):
803 802 __tablename__ = 'repo_to_perm'
804 803 __table_args__ = (UniqueConstraint('user_id', 'repository_id'), {'extend_existing':True})
805 804 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
806 805 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
807 806 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
808 807 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
809 808
810 809 user = relationship('User')
811 810 permission = relationship('Permission')
812 811 repository = relationship('Repository')
813 812
814 813 class UserToPerm(Base, BaseModel):
815 814 __tablename__ = 'user_to_perm'
816 815 __table_args__ = (UniqueConstraint('user_id', 'permission_id'), {'extend_existing':True})
817 816 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
818 817 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
819 818 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
820 819
821 820 user = relationship('User')
822 821 permission = relationship('Permission')
823 822
824 823 @classmethod
825 824 def has_perm(cls, user_id, perm):
826 825 if not isinstance(perm, Permission):
827 826 raise Exception('perm needs to be an instance of Permission class')
828 827
829 828 return cls.query().filter(cls.user_id == user_id)\
830 829 .filter(cls.permission == perm).scalar() is not None
831 830
832 831 @classmethod
833 832 def grant_perm(cls, user_id, perm):
834 833 if not isinstance(perm, Permission):
835 834 raise Exception('perm needs to be an instance of Permission class')
836 835
837 836 new = cls()
838 837 new.user_id = user_id
839 838 new.permission = perm
840 839 try:
841 840 Session.add(new)
842 841 Session.commit()
843 842 except:
844 843 Session.rollback()
845 844
846 845
847 846 @classmethod
848 847 def revoke_perm(cls, user_id, perm):
849 848 if not isinstance(perm, Permission):
850 849 raise Exception('perm needs to be an instance of Permission class')
851 850
852 851 try:
853 852 cls.query().filter(cls.user_id == user_id) \
854 853 .filter(cls.permission == perm).delete()
855 854 Session.commit()
856 855 except:
857 856 Session.rollback()
858 857
859 858 class UserGroupRepoToPerm(Base, BaseModel):
860 859 __tablename__ = 'users_group_repo_to_perm'
861 860 __table_args__ = (UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), {'extend_existing':True})
862 861 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
863 862 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
864 863 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
865 864 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
866 865
867 866 users_group = relationship('UserGroup')
868 867 permission = relationship('Permission')
869 868 repository = relationship('Repository')
870 869
871 870 def __repr__(self):
872 871 return '<userGroup:%s => %s >' % (self.users_group, self.repository)
873 872
874 873 class UserGroupToPerm(Base, BaseModel):
875 874 __tablename__ = 'users_group_to_perm'
876 875 __table_args__ = {'extend_existing':True}
877 876 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
878 877 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
879 878 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
880 879
881 880 users_group = relationship('UserGroup')
882 881 permission = relationship('Permission')
883 882
884 883
885 884 @classmethod
886 885 def has_perm(cls, users_group_id, perm):
887 886 if not isinstance(perm, Permission):
888 887 raise Exception('perm needs to be an instance of Permission class')
889 888
890 889 return cls.query().filter(cls.users_group_id ==
891 890 users_group_id)\
892 891 .filter(cls.permission == perm)\
893 892 .scalar() is not None
894 893
895 894 @classmethod
896 895 def grant_perm(cls, users_group_id, perm):
897 896 if not isinstance(perm, Permission):
898 897 raise Exception('perm needs to be an instance of Permission class')
899 898
900 899 new = cls()
901 900 new.users_group_id = users_group_id
902 901 new.permission = perm
903 902 try:
904 903 Session.add(new)
905 904 Session.commit()
906 905 except:
907 906 Session.rollback()
908 907
909 908
910 909 @classmethod
911 910 def revoke_perm(cls, users_group_id, perm):
912 911 if not isinstance(perm, Permission):
913 912 raise Exception('perm needs to be an instance of Permission class')
914 913
915 914 try:
916 915 cls.query().filter(cls.users_group_id == users_group_id) \
917 916 .filter(cls.permission == perm).delete()
918 917 Session.commit()
919 918 except:
920 919 Session.rollback()
921 920
922 921
923 922 class UserRepoGroupToPerm(Base, BaseModel):
924 923 __tablename__ = 'group_to_perm'
925 924 __table_args__ = (UniqueConstraint('group_id', 'permission_id'), {'extend_existing':True})
926 925
927 926 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
928 927 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
929 928 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
930 929 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
931 930
932 931 user = relationship('User')
933 932 permission = relationship('Permission')
934 933 group = relationship('RepoGroup')
935 934
936 935 class Statistics(Base, BaseModel):
937 936 __tablename__ = 'statistics'
938 937 __table_args__ = (UniqueConstraint('repository_id'), {'extend_existing':True})
939 938 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
940 939 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
941 940 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
942 941 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
943 942 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
944 943 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
945 944
946 945 repository = relationship('Repository', single_parent=True)
947 946
948 947 class UserFollowing(Base, BaseModel):
949 948 __tablename__ = 'user_followings'
950 949 __table_args__ = (UniqueConstraint('user_id', 'follows_repository_id'),
951 950 UniqueConstraint('user_id', 'follows_user_id')
952 951 , {'extend_existing':True})
953 952
954 953 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
955 954 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
956 955 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
957 956 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
958 957 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
959 958
960 959 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
961 960
962 961 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
963 962 follows_repository = relationship('Repository', order_by='Repository.repo_name')
964 963
965 964
966 965 @classmethod
967 966 def get_repo_followers(cls, repo_id):
968 967 return cls.query().filter(cls.follows_repo_id == repo_id)
969 968
970 969 class CacheInvalidation(Base, BaseModel):
971 970 __tablename__ = 'cache_invalidation'
972 971 __table_args__ = (UniqueConstraint('cache_key'), {'extend_existing':True})
973 972 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
974 973 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
975 974 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
976 975 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
977 976
978 977
979 978 def __init__(self, cache_key, cache_args=''):
980 979 self.cache_key = cache_key
981 980 self.cache_args = cache_args
982 981 self.cache_active = False
983 982
984 983 def __repr__(self):
985 984 return "<%s('%s:%s')>" % (self.__class__.__name__,
986 985 self.cache_id, self.cache_key)
987 986
988 987 @classmethod
989 988 def invalidate(cls, key):
990 989 """
991 990 Returns Invalidation object if this given key should be invalidated
992 991 None otherwise. `cache_active = False` means that this cache
993 992 state is not valid and needs to be invalidated
994 993
995 994 :param key:
996 995 """
997 996 return cls.query()\
998 997 .filter(CacheInvalidation.cache_key == key)\
999 998 .filter(CacheInvalidation.cache_active == False)\
1000 999 .scalar()
1001 1000
1002 1001 @classmethod
1003 1002 def set_invalidate(cls, key):
1004 1003 """
1005 1004 Mark this Cache key for invalidation
1006 1005
1007 1006 :param key:
1008 1007 """
1009 1008
1010 1009 log.debug('marking %s for invalidation', key)
1011 1010 inv_obj = Session.query(cls)\
1012 1011 .filter(cls.cache_key == key).scalar()
1013 1012 if inv_obj:
1014 1013 inv_obj.cache_active = False
1015 1014 else:
1016 1015 log.debug('cache key not found in invalidation db -> creating one')
1017 1016 inv_obj = CacheInvalidation(key)
1018 1017
1019 1018 try:
1020 1019 Session.add(inv_obj)
1021 1020 Session.commit()
1022 1021 except Exception:
1023 1022 log.error(traceback.format_exc())
1024 1023 Session.rollback()
1025 1024
1026 1025 @classmethod
1027 1026 def set_valid(cls, key):
1028 1027 """
1029 1028 Mark this cache key as active and currently cached
1030 1029
1031 1030 :param key:
1032 1031 """
1033 1032 inv_obj = Session.query(CacheInvalidation)\
1034 1033 .filter(CacheInvalidation.cache_key == key).scalar()
1035 1034 inv_obj.cache_active = True
1036 1035 Session.add(inv_obj)
1037 1036 Session.commit()
1038 1037
1039 1038 class DbMigrateVersion(Base, BaseModel):
1040 1039 __tablename__ = 'db_migrate_version'
1041 1040 __table_args__ = {'extend_existing':True}
1042 1041 repository_id = Column('repository_id', String(250), primary_key=True)
1043 1042 repository_path = Column('repository_path', Text)
1044 1043 version = Column('version', Integer)
@@ -1,4333 +1,4332 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import hashlib
29 29 import logging
30 30 import datetime
31 31 import warnings
32 32 import ipaddress
33 33 import functools
34 34 import traceback
35 35 import collections
36 36
37 37 from sqlalchemy import (
38 38 or_, and_, not_, func, TypeDecorator, event,
39 39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
40 40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
41 41 Text, Float, PickleType)
42 42 from sqlalchemy.sql.expression import true, false
43 43 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
44 44 from sqlalchemy.orm import (
45 45 relationship, joinedload, class_mapper, validates, aliased)
46 46 from sqlalchemy.ext.declarative import declared_attr
47 47 from sqlalchemy.ext.hybrid import hybrid_property
48 48 from sqlalchemy.exc import IntegrityError # pragma: no cover
49 49 from sqlalchemy.dialects.mysql import LONGTEXT
50 50 from beaker.cache import cache_region
51 51 from zope.cachedescriptors.property import Lazy as LazyProperty
52 from pyramid import compat
53 52 from pyramid.threadlocal import get_current_request
54 53
55 54 from rhodecode.translation import _
56 55 from rhodecode.lib.vcs import get_vcs_instance
57 56 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
58 57 from rhodecode.lib.utils2 import (
59 58 str2bool, safe_str, get_commit_safe, safe_unicode, md5_safe,
60 59 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
61 60 glob2re, StrictAttributeDict, cleaned_uri)
62 61 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
63 62 JsonRaw
64 63 from rhodecode.lib.ext_json import json
65 64 from rhodecode.lib.caching_query import FromCache
66 65 from rhodecode.lib.encrypt import AESCipher
67 66
68 67 from rhodecode.model.meta import Base, Session
69 68
70 69 URL_SEP = '/'
71 70 log = logging.getLogger(__name__)
72 71
73 72 # =============================================================================
74 73 # BASE CLASSES
75 74 # =============================================================================
76 75
77 76 # this is propagated from .ini file rhodecode.encrypted_values.secret or
78 77 # beaker.session.secret if first is not set.
79 78 # and initialized at environment.py
80 79 ENCRYPTION_KEY = None
81 80
82 81 # used to sort permissions by types, '#' used here is not allowed to be in
83 82 # usernames, and it's very early in sorted string.printable table.
84 83 PERMISSION_TYPE_SORT = {
85 84 'admin': '####',
86 85 'write': '###',
87 86 'read': '##',
88 87 'none': '#',
89 88 }
90 89
91 90
92 91 def display_user_sort(obj):
93 92 """
94 93 Sort function used to sort permissions in .permissions() function of
95 94 Repository, RepoGroup, UserGroup. Also it put the default user in front
96 95 of all other resources
97 96 """
98 97
99 98 if obj.username == User.DEFAULT_USER:
100 99 return '#####'
101 100 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
102 101 return prefix + obj.username
103 102
104 103
105 104 def display_user_group_sort(obj):
106 105 """
107 106 Sort function used to sort permissions in .permissions() function of
108 107 Repository, RepoGroup, UserGroup. Also it put the default user in front
109 108 of all other resources
110 109 """
111 110
112 111 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
113 112 return prefix + obj.users_group_name
114 113
115 114
116 115 def _hash_key(k):
117 116 return md5_safe(k)
118 117
119 118
120 119 def in_filter_generator(qry, items, limit=500):
121 120 """
122 121 Splits IN() into multiple with OR
123 122 e.g.::
124 123 cnt = Repository.query().filter(
125 124 or_(
126 125 *in_filter_generator(Repository.repo_id, range(100000))
127 126 )).count()
128 127 """
129 128 if not items:
130 129 # empty list will cause empty query which might cause security issues
131 130 # this can lead to hidden unpleasant results
132 131 items = [-1]
133 132
134 133 parts = []
135 134 for chunk in range(0, len(items), limit):
136 135 parts.append(
137 136 qry.in_(items[chunk: chunk + limit])
138 137 )
139 138
140 139 return parts
141 140
142 141
143 142 class EncryptedTextValue(TypeDecorator):
144 143 """
145 144 Special column for encrypted long text data, use like::
146 145
147 146 value = Column("encrypted_value", EncryptedValue(), nullable=False)
148 147
149 148 This column is intelligent so if value is in unencrypted form it return
150 149 unencrypted form, but on save it always encrypts
151 150 """
152 151 impl = Text
153 152
154 153 def process_bind_param(self, value, dialect):
155 154 if not value:
156 155 return value
157 156 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
158 157 # protect against double encrypting if someone manually starts
159 158 # doing
160 159 raise ValueError('value needs to be in unencrypted format, ie. '
161 160 'not starting with enc$aes')
162 161 return 'enc$aes_hmac$%s' % AESCipher(
163 162 ENCRYPTION_KEY, hmac=True).encrypt(value)
164 163
165 164 def process_result_value(self, value, dialect):
166 165 import rhodecode
167 166
168 167 if not value:
169 168 return value
170 169
171 170 parts = value.split('$', 3)
172 171 if not len(parts) == 3:
173 172 # probably not encrypted values
174 173 return value
175 174 else:
176 175 if parts[0] != 'enc':
177 176 # parts ok but without our header ?
178 177 return value
179 178 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
180 179 'rhodecode.encrypted_values.strict') or True)
181 180 # at that stage we know it's our encryption
182 181 if parts[1] == 'aes':
183 182 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
184 183 elif parts[1] == 'aes_hmac':
185 184 decrypted_data = AESCipher(
186 185 ENCRYPTION_KEY, hmac=True,
187 186 strict_verification=enc_strict_mode).decrypt(parts[2])
188 187 else:
189 188 raise ValueError(
190 189 'Encryption type part is wrong, must be `aes` '
191 190 'or `aes_hmac`, got `%s` instead' % (parts[1]))
192 191 return decrypted_data
193 192
194 193
195 194 class BaseModel(object):
196 195 """
197 196 Base Model for all classes
198 197 """
199 198
200 199 @classmethod
201 200 def _get_keys(cls):
202 201 """return column names for this model """
203 202 return class_mapper(cls).c.keys()
204 203
205 204 def get_dict(self):
206 205 """
207 206 return dict with keys and values corresponding
208 207 to this model data """
209 208
210 209 d = {}
211 210 for k in self._get_keys():
212 211 d[k] = getattr(self, k)
213 212
214 213 # also use __json__() if present to get additional fields
215 214 _json_attr = getattr(self, '__json__', None)
216 215 if _json_attr:
217 216 # update with attributes from __json__
218 217 if callable(_json_attr):
219 218 _json_attr = _json_attr()
220 219 for k, val in _json_attr.iteritems():
221 220 d[k] = val
222 221 return d
223 222
224 223 def get_appstruct(self):
225 224 """return list with keys and values tuples corresponding
226 225 to this model data """
227 226
228 227 lst = []
229 228 for k in self._get_keys():
230 229 lst.append((k, getattr(self, k),))
231 230 return lst
232 231
233 232 def populate_obj(self, populate_dict):
234 233 """populate model with data from given populate_dict"""
235 234
236 235 for k in self._get_keys():
237 236 if k in populate_dict:
238 237 setattr(self, k, populate_dict[k])
239 238
240 239 @classmethod
241 240 def query(cls):
242 241 return Session().query(cls)
243 242
244 243 @classmethod
245 244 def get(cls, id_):
246 245 if id_:
247 246 return cls.query().get(id_)
248 247
249 248 @classmethod
250 249 def get_or_404(cls, id_):
251 250 from pyramid.httpexceptions import HTTPNotFound
252 251
253 252 try:
254 253 id_ = int(id_)
255 254 except (TypeError, ValueError):
256 255 raise HTTPNotFound()
257 256
258 257 res = cls.query().get(id_)
259 258 if not res:
260 259 raise HTTPNotFound()
261 260 return res
262 261
263 262 @classmethod
264 263 def getAll(cls):
265 264 # deprecated and left for backward compatibility
266 265 return cls.get_all()
267 266
268 267 @classmethod
269 268 def get_all(cls):
270 269 return cls.query().all()
271 270
272 271 @classmethod
273 272 def delete(cls, id_):
274 273 obj = cls.query().get(id_)
275 274 Session().delete(obj)
276 275
277 276 @classmethod
278 277 def identity_cache(cls, session, attr_name, value):
279 278 exist_in_session = []
280 279 for (item_cls, pkey), instance in session.identity_map.items():
281 280 if cls == item_cls and getattr(instance, attr_name) == value:
282 281 exist_in_session.append(instance)
283 282 if exist_in_session:
284 283 if len(exist_in_session) == 1:
285 284 return exist_in_session[0]
286 285 log.exception(
287 286 'multiple objects with attr %s and '
288 287 'value %s found with same name: %r',
289 288 attr_name, value, exist_in_session)
290 289
291 290 def __repr__(self):
292 291 if hasattr(self, '__unicode__'):
293 292 # python repr needs to return str
294 293 try:
295 294 return safe_str(self.__unicode__())
296 295 except UnicodeDecodeError:
297 296 pass
298 297 return '<DB:%s>' % (self.__class__.__name__)
299 298
300 299
301 300 class RhodeCodeSetting(Base, BaseModel):
302 301 __tablename__ = 'rhodecode_settings'
303 302 __table_args__ = (
304 303 UniqueConstraint('app_settings_name'),
305 304 {'extend_existing': True, 'mysql_engine': 'InnoDB',
306 305 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
307 306 )
308 307
309 308 SETTINGS_TYPES = {
310 309 'str': safe_str,
311 310 'int': safe_int,
312 311 'unicode': safe_unicode,
313 312 'bool': str2bool,
314 313 'list': functools.partial(aslist, sep=',')
315 314 }
316 315 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
317 316 GLOBAL_CONF_KEY = 'app_settings'
318 317
319 318 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
320 319 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
321 320 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
322 321 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
323 322
324 323 def __init__(self, key='', val='', type='unicode'):
325 324 self.app_settings_name = key
326 325 self.app_settings_type = type
327 326 self.app_settings_value = val
328 327
329 328 @validates('_app_settings_value')
330 329 def validate_settings_value(self, key, val):
331 330 assert type(val) == unicode
332 331 return val
333 332
334 333 @hybrid_property
335 334 def app_settings_value(self):
336 335 v = self._app_settings_value
337 336 _type = self.app_settings_type
338 337 if _type:
339 338 _type = self.app_settings_type.split('.')[0]
340 339 # decode the encrypted value
341 340 if 'encrypted' in self.app_settings_type:
342 341 cipher = EncryptedTextValue()
343 342 v = safe_unicode(cipher.process_result_value(v, None))
344 343
345 344 converter = self.SETTINGS_TYPES.get(_type) or \
346 345 self.SETTINGS_TYPES['unicode']
347 346 return converter(v)
348 347
349 348 @app_settings_value.setter
350 349 def app_settings_value(self, val):
351 350 """
352 351 Setter that will always make sure we use unicode in app_settings_value
353 352
354 353 :param val:
355 354 """
356 355 val = safe_unicode(val)
357 356 # encode the encrypted value
358 357 if 'encrypted' in self.app_settings_type:
359 358 cipher = EncryptedTextValue()
360 359 val = safe_unicode(cipher.process_bind_param(val, None))
361 360 self._app_settings_value = val
362 361
363 362 @hybrid_property
364 363 def app_settings_type(self):
365 364 return self._app_settings_type
366 365
367 366 @app_settings_type.setter
368 367 def app_settings_type(self, val):
369 368 if val.split('.')[0] not in self.SETTINGS_TYPES:
370 369 raise Exception('type must be one of %s got %s'
371 370 % (self.SETTINGS_TYPES.keys(), val))
372 371 self._app_settings_type = val
373 372
374 373 def __unicode__(self):
375 374 return u"<%s('%s:%s[%s]')>" % (
376 375 self.__class__.__name__,
377 376 self.app_settings_name, self.app_settings_value,
378 377 self.app_settings_type
379 378 )
380 379
381 380
382 381 class RhodeCodeUi(Base, BaseModel):
383 382 __tablename__ = 'rhodecode_ui'
384 383 __table_args__ = (
385 384 UniqueConstraint('ui_key'),
386 385 {'extend_existing': True, 'mysql_engine': 'InnoDB',
387 386 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
388 387 )
389 388
390 389 HOOK_REPO_SIZE = 'changegroup.repo_size'
391 390 # HG
392 391 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
393 392 HOOK_PULL = 'outgoing.pull_logger'
394 393 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
395 394 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
396 395 HOOK_PUSH = 'changegroup.push_logger'
397 396 HOOK_PUSH_KEY = 'pushkey.key_push'
398 397
399 398 # TODO: johbo: Unify way how hooks are configured for git and hg,
400 399 # git part is currently hardcoded.
401 400
402 401 # SVN PATTERNS
403 402 SVN_BRANCH_ID = 'vcs_svn_branch'
404 403 SVN_TAG_ID = 'vcs_svn_tag'
405 404
406 405 ui_id = Column(
407 406 "ui_id", Integer(), nullable=False, unique=True, default=None,
408 407 primary_key=True)
409 408 ui_section = Column(
410 409 "ui_section", String(255), nullable=True, unique=None, default=None)
411 410 ui_key = Column(
412 411 "ui_key", String(255), nullable=True, unique=None, default=None)
413 412 ui_value = Column(
414 413 "ui_value", String(255), nullable=True, unique=None, default=None)
415 414 ui_active = Column(
416 415 "ui_active", Boolean(), nullable=True, unique=None, default=True)
417 416
418 417 def __repr__(self):
419 418 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
420 419 self.ui_key, self.ui_value)
421 420
422 421
423 422 class RepoRhodeCodeSetting(Base, BaseModel):
424 423 __tablename__ = 'repo_rhodecode_settings'
425 424 __table_args__ = (
426 425 UniqueConstraint(
427 426 'app_settings_name', 'repository_id',
428 427 name='uq_repo_rhodecode_setting_name_repo_id'),
429 428 {'extend_existing': True, 'mysql_engine': 'InnoDB',
430 429 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
431 430 )
432 431
433 432 repository_id = Column(
434 433 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
435 434 nullable=False)
436 435 app_settings_id = Column(
437 436 "app_settings_id", Integer(), nullable=False, unique=True,
438 437 default=None, primary_key=True)
439 438 app_settings_name = Column(
440 439 "app_settings_name", String(255), nullable=True, unique=None,
441 440 default=None)
442 441 _app_settings_value = Column(
443 442 "app_settings_value", String(4096), nullable=True, unique=None,
444 443 default=None)
445 444 _app_settings_type = Column(
446 445 "app_settings_type", String(255), nullable=True, unique=None,
447 446 default=None)
448 447
449 448 repository = relationship('Repository')
450 449
451 450 def __init__(self, repository_id, key='', val='', type='unicode'):
452 451 self.repository_id = repository_id
453 452 self.app_settings_name = key
454 453 self.app_settings_type = type
455 454 self.app_settings_value = val
456 455
457 456 @validates('_app_settings_value')
458 457 def validate_settings_value(self, key, val):
459 458 assert type(val) == unicode
460 459 return val
461 460
462 461 @hybrid_property
463 462 def app_settings_value(self):
464 463 v = self._app_settings_value
465 464 type_ = self.app_settings_type
466 465 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
467 466 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
468 467 return converter(v)
469 468
470 469 @app_settings_value.setter
471 470 def app_settings_value(self, val):
472 471 """
473 472 Setter that will always make sure we use unicode in app_settings_value
474 473
475 474 :param val:
476 475 """
477 476 self._app_settings_value = safe_unicode(val)
478 477
479 478 @hybrid_property
480 479 def app_settings_type(self):
481 480 return self._app_settings_type
482 481
483 482 @app_settings_type.setter
484 483 def app_settings_type(self, val):
485 484 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
486 485 if val not in SETTINGS_TYPES:
487 486 raise Exception('type must be one of %s got %s'
488 487 % (SETTINGS_TYPES.keys(), val))
489 488 self._app_settings_type = val
490 489
491 490 def __unicode__(self):
492 491 return u"<%s('%s:%s:%s[%s]')>" % (
493 492 self.__class__.__name__, self.repository.repo_name,
494 493 self.app_settings_name, self.app_settings_value,
495 494 self.app_settings_type
496 495 )
497 496
498 497
499 498 class RepoRhodeCodeUi(Base, BaseModel):
500 499 __tablename__ = 'repo_rhodecode_ui'
501 500 __table_args__ = (
502 501 UniqueConstraint(
503 502 'repository_id', 'ui_section', 'ui_key',
504 503 name='uq_repo_rhodecode_ui_repository_id_section_key'),
505 504 {'extend_existing': True, 'mysql_engine': 'InnoDB',
506 505 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
507 506 )
508 507
509 508 repository_id = Column(
510 509 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
511 510 nullable=False)
512 511 ui_id = Column(
513 512 "ui_id", Integer(), nullable=False, unique=True, default=None,
514 513 primary_key=True)
515 514 ui_section = Column(
516 515 "ui_section", String(255), nullable=True, unique=None, default=None)
517 516 ui_key = Column(
518 517 "ui_key", String(255), nullable=True, unique=None, default=None)
519 518 ui_value = Column(
520 519 "ui_value", String(255), nullable=True, unique=None, default=None)
521 520 ui_active = Column(
522 521 "ui_active", Boolean(), nullable=True, unique=None, default=True)
523 522
524 523 repository = relationship('Repository')
525 524
526 525 def __repr__(self):
527 526 return '<%s[%s:%s]%s=>%s]>' % (
528 527 self.__class__.__name__, self.repository.repo_name,
529 528 self.ui_section, self.ui_key, self.ui_value)
530 529
531 530
532 531 class User(Base, BaseModel):
533 532 __tablename__ = 'users'
534 533 __table_args__ = (
535 534 UniqueConstraint('username'), UniqueConstraint('email'),
536 535 Index('u_username_idx', 'username'),
537 536 Index('u_email_idx', 'email'),
538 537 {'extend_existing': True, 'mysql_engine': 'InnoDB',
539 538 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
540 539 )
541 540 DEFAULT_USER = 'default'
542 541 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
543 542 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
544 543
545 544 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
546 545 username = Column("username", String(255), nullable=True, unique=None, default=None)
547 546 password = Column("password", String(255), nullable=True, unique=None, default=None)
548 547 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
549 548 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
550 549 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
551 550 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
552 551 _email = Column("email", String(255), nullable=True, unique=None, default=None)
553 552 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
554 553 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
555 554
556 555 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
557 556 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
558 557 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
559 558 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
560 559 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
561 560 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
562 561
563 562 user_log = relationship('UserLog')
564 563 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
565 564
566 565 repositories = relationship('Repository')
567 566 repository_groups = relationship('RepoGroup')
568 567 user_groups = relationship('UserGroup')
569 568
570 569 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
571 570 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
572 571
573 572 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
574 573 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
575 574 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
576 575
577 576 group_member = relationship('UserGroupMember', cascade='all')
578 577
579 578 notifications = relationship('UserNotification', cascade='all')
580 579 # notifications assigned to this user
581 580 user_created_notifications = relationship('Notification', cascade='all')
582 581 # comments created by this user
583 582 user_comments = relationship('ChangesetComment', cascade='all')
584 583 # user profile extra info
585 584 user_emails = relationship('UserEmailMap', cascade='all')
586 585 user_ip_map = relationship('UserIpMap', cascade='all')
587 586 user_auth_tokens = relationship('UserApiKeys', cascade='all')
588 587 user_ssh_keys = relationship('UserSshKeys', cascade='all')
589 588
590 589 # gists
591 590 user_gists = relationship('Gist', cascade='all')
592 591 # user pull requests
593 592 user_pull_requests = relationship('PullRequest', cascade='all')
594 593 # external identities
595 594 extenal_identities = relationship(
596 595 'ExternalIdentity',
597 596 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
598 597 cascade='all')
599 598 # review rules
600 599 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
601 600
602 601 def __unicode__(self):
603 602 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
604 603 self.user_id, self.username)
605 604
606 605 @hybrid_property
607 606 def email(self):
608 607 return self._email
609 608
610 609 @email.setter
611 610 def email(self, val):
612 611 self._email = val.lower() if val else None
613 612
614 613 @hybrid_property
615 614 def first_name(self):
616 615 from rhodecode.lib import helpers as h
617 616 if self.name:
618 617 return h.escape(self.name)
619 618 return self.name
620 619
621 620 @hybrid_property
622 621 def last_name(self):
623 622 from rhodecode.lib import helpers as h
624 623 if self.lastname:
625 624 return h.escape(self.lastname)
626 625 return self.lastname
627 626
628 627 @hybrid_property
629 628 def api_key(self):
630 629 """
631 630 Fetch if exist an auth-token with role ALL connected to this user
632 631 """
633 632 user_auth_token = UserApiKeys.query()\
634 633 .filter(UserApiKeys.user_id == self.user_id)\
635 634 .filter(or_(UserApiKeys.expires == -1,
636 635 UserApiKeys.expires >= time.time()))\
637 636 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
638 637 if user_auth_token:
639 638 user_auth_token = user_auth_token.api_key
640 639
641 640 return user_auth_token
642 641
643 642 @api_key.setter
644 643 def api_key(self, val):
645 644 # don't allow to set API key this is deprecated for now
646 645 self._api_key = None
647 646
648 647 @property
649 648 def reviewer_pull_requests(self):
650 649 return PullRequestReviewers.query() \
651 650 .options(joinedload(PullRequestReviewers.pull_request)) \
652 651 .filter(PullRequestReviewers.user_id == self.user_id) \
653 652 .all()
654 653
655 654 @property
656 655 def firstname(self):
657 656 # alias for future
658 657 return self.name
659 658
660 659 @property
661 660 def emails(self):
662 661 other = UserEmailMap.query()\
663 662 .filter(UserEmailMap.user == self) \
664 663 .order_by(UserEmailMap.email_id.asc()) \
665 664 .all()
666 665 return [self.email] + [x.email for x in other]
667 666
668 667 @property
669 668 def auth_tokens(self):
670 669 auth_tokens = self.get_auth_tokens()
671 670 return [x.api_key for x in auth_tokens]
672 671
673 672 def get_auth_tokens(self):
674 673 return UserApiKeys.query()\
675 674 .filter(UserApiKeys.user == self)\
676 675 .order_by(UserApiKeys.user_api_key_id.asc())\
677 676 .all()
678 677
679 678 @property
680 679 def feed_token(self):
681 680 return self.get_feed_token()
682 681
683 682 def get_feed_token(self):
684 683 feed_tokens = UserApiKeys.query()\
685 684 .filter(UserApiKeys.user == self)\
686 685 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)\
687 686 .all()
688 687 if feed_tokens:
689 688 return feed_tokens[0].api_key
690 689 return 'NO_FEED_TOKEN_AVAILABLE'
691 690
692 691 @classmethod
693 692 def get(cls, user_id, cache=False):
694 693 if not user_id:
695 694 return
696 695
697 696 user = cls.query()
698 697 if cache:
699 698 user = user.options(
700 699 FromCache("sql_cache_short", "get_users_%s" % user_id))
701 700 return user.get(user_id)
702 701
703 702 @classmethod
704 703 def extra_valid_auth_tokens(cls, user, role=None):
705 704 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
706 705 .filter(or_(UserApiKeys.expires == -1,
707 706 UserApiKeys.expires >= time.time()))
708 707 if role:
709 708 tokens = tokens.filter(or_(UserApiKeys.role == role,
710 709 UserApiKeys.role == UserApiKeys.ROLE_ALL))
711 710 return tokens.all()
712 711
713 712 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
714 713 from rhodecode.lib import auth
715 714
716 715 log.debug('Trying to authenticate user: %s via auth-token, '
717 716 'and roles: %s', self, roles)
718 717
719 718 if not auth_token:
720 719 return False
721 720
722 721 crypto_backend = auth.crypto_backend()
723 722
724 723 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
725 724 tokens_q = UserApiKeys.query()\
726 725 .filter(UserApiKeys.user_id == self.user_id)\
727 726 .filter(or_(UserApiKeys.expires == -1,
728 727 UserApiKeys.expires >= time.time()))
729 728
730 729 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
731 730
732 731 plain_tokens = []
733 732 hash_tokens = []
734 733
735 734 for token in tokens_q.all():
736 735 # verify scope first
737 736 if token.repo_id:
738 737 # token has a scope, we need to verify it
739 738 if scope_repo_id != token.repo_id:
740 739 log.debug(
741 740 'Scope mismatch: token has a set repo scope: %s, '
742 741 'and calling scope is:%s, skipping further checks',
743 742 token.repo, scope_repo_id)
744 743 # token has a scope, and it doesn't match, skip token
745 744 continue
746 745
747 746 if token.api_key.startswith(crypto_backend.ENC_PREF):
748 747 hash_tokens.append(token.api_key)
749 748 else:
750 749 plain_tokens.append(token.api_key)
751 750
752 751 is_plain_match = auth_token in plain_tokens
753 752 if is_plain_match:
754 753 return True
755 754
756 755 for hashed in hash_tokens:
757 756 # TODO(marcink): this is expensive to calculate, but most secure
758 757 match = crypto_backend.hash_check(auth_token, hashed)
759 758 if match:
760 759 return True
761 760
762 761 return False
763 762
764 763 @property
765 764 def ip_addresses(self):
766 765 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
767 766 return [x.ip_addr for x in ret]
768 767
769 768 @property
770 769 def username_and_name(self):
771 770 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
772 771
773 772 @property
774 773 def username_or_name_or_email(self):
775 774 full_name = self.full_name if self.full_name is not ' ' else None
776 775 return self.username or full_name or self.email
777 776
778 777 @property
779 778 def full_name(self):
780 779 return '%s %s' % (self.first_name, self.last_name)
781 780
782 781 @property
783 782 def full_name_or_username(self):
784 783 return ('%s %s' % (self.first_name, self.last_name)
785 784 if (self.first_name and self.last_name) else self.username)
786 785
787 786 @property
788 787 def full_contact(self):
789 788 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
790 789
791 790 @property
792 791 def short_contact(self):
793 792 return '%s %s' % (self.first_name, self.last_name)
794 793
795 794 @property
796 795 def is_admin(self):
797 796 return self.admin
798 797
799 798 def AuthUser(self, **kwargs):
800 799 """
801 800 Returns instance of AuthUser for this user
802 801 """
803 802 from rhodecode.lib.auth import AuthUser
804 803 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
805 804
806 805 @hybrid_property
807 806 def user_data(self):
808 807 if not self._user_data:
809 808 return {}
810 809
811 810 try:
812 811 return json.loads(self._user_data)
813 812 except TypeError:
814 813 return {}
815 814
816 815 @user_data.setter
817 816 def user_data(self, val):
818 817 if not isinstance(val, dict):
819 818 raise Exception('user_data must be dict, got %s' % type(val))
820 819 try:
821 820 self._user_data = json.dumps(val)
822 821 except Exception:
823 822 log.error(traceback.format_exc())
824 823
825 824 @classmethod
826 825 def get_by_username(cls, username, case_insensitive=False,
827 826 cache=False, identity_cache=False):
828 827 session = Session()
829 828
830 829 if case_insensitive:
831 830 q = cls.query().filter(
832 831 func.lower(cls.username) == func.lower(username))
833 832 else:
834 833 q = cls.query().filter(cls.username == username)
835 834
836 835 if cache:
837 836 if identity_cache:
838 837 val = cls.identity_cache(session, 'username', username)
839 838 if val:
840 839 return val
841 840 else:
842 841 cache_key = "get_user_by_name_%s" % _hash_key(username)
843 842 q = q.options(
844 843 FromCache("sql_cache_short", cache_key))
845 844
846 845 return q.scalar()
847 846
848 847 @classmethod
849 848 def get_by_auth_token(cls, auth_token, cache=False):
850 849 q = UserApiKeys.query()\
851 850 .filter(UserApiKeys.api_key == auth_token)\
852 851 .filter(or_(UserApiKeys.expires == -1,
853 852 UserApiKeys.expires >= time.time()))
854 853 if cache:
855 854 q = q.options(
856 855 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
857 856
858 857 match = q.first()
859 858 if match:
860 859 return match.user
861 860
862 861 @classmethod
863 862 def get_by_email(cls, email, case_insensitive=False, cache=False):
864 863
865 864 if case_insensitive:
866 865 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
867 866
868 867 else:
869 868 q = cls.query().filter(cls.email == email)
870 869
871 870 email_key = _hash_key(email)
872 871 if cache:
873 872 q = q.options(
874 873 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
875 874
876 875 ret = q.scalar()
877 876 if ret is None:
878 877 q = UserEmailMap.query()
879 878 # try fetching in alternate email map
880 879 if case_insensitive:
881 880 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
882 881 else:
883 882 q = q.filter(UserEmailMap.email == email)
884 883 q = q.options(joinedload(UserEmailMap.user))
885 884 if cache:
886 885 q = q.options(
887 886 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
888 887 ret = getattr(q.scalar(), 'user', None)
889 888
890 889 return ret
891 890
892 891 @classmethod
893 892 def get_from_cs_author(cls, author):
894 893 """
895 894 Tries to get User objects out of commit author string
896 895
897 896 :param author:
898 897 """
899 898 from rhodecode.lib.helpers import email, author_name
900 899 # Valid email in the attribute passed, see if they're in the system
901 900 _email = email(author)
902 901 if _email:
903 902 user = cls.get_by_email(_email, case_insensitive=True)
904 903 if user:
905 904 return user
906 905 # Maybe we can match by username?
907 906 _author = author_name(author)
908 907 user = cls.get_by_username(_author, case_insensitive=True)
909 908 if user:
910 909 return user
911 910
912 911 def update_userdata(self, **kwargs):
913 912 usr = self
914 913 old = usr.user_data
915 914 old.update(**kwargs)
916 915 usr.user_data = old
917 916 Session().add(usr)
918 917 log.debug('updated userdata with ', kwargs)
919 918
920 919 def update_lastlogin(self):
921 920 """Update user lastlogin"""
922 921 self.last_login = datetime.datetime.now()
923 922 Session().add(self)
924 923 log.debug('updated user %s lastlogin', self.username)
925 924
926 925 def update_lastactivity(self):
927 926 """Update user lastactivity"""
928 927 self.last_activity = datetime.datetime.now()
929 928 Session().add(self)
930 929 log.debug('updated user `%s` last activity', self.username)
931 930
932 931 def update_password(self, new_password):
933 932 from rhodecode.lib.auth import get_crypt_password
934 933
935 934 self.password = get_crypt_password(new_password)
936 935 Session().add(self)
937 936
938 937 @classmethod
939 938 def get_first_super_admin(cls):
940 939 user = User.query().filter(User.admin == true()).first()
941 940 if user is None:
942 941 raise Exception('FATAL: Missing administrative account!')
943 942 return user
944 943
945 944 @classmethod
946 945 def get_all_super_admins(cls):
947 946 """
948 947 Returns all admin accounts sorted by username
949 948 """
950 949 return User.query().filter(User.admin == true())\
951 950 .order_by(User.username.asc()).all()
952 951
953 952 @classmethod
954 953 def get_default_user(cls, cache=False, refresh=False):
955 954 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
956 955 if user is None:
957 956 raise Exception('FATAL: Missing default account!')
958 957 if refresh:
959 958 # The default user might be based on outdated state which
960 959 # has been loaded from the cache.
961 960 # A call to refresh() ensures that the
962 961 # latest state from the database is used.
963 962 Session().refresh(user)
964 963 return user
965 964
966 965 def _get_default_perms(self, user, suffix=''):
967 966 from rhodecode.model.permission import PermissionModel
968 967 return PermissionModel().get_default_perms(user.user_perms, suffix)
969 968
970 969 def get_default_perms(self, suffix=''):
971 970 return self._get_default_perms(self, suffix)
972 971
973 972 def get_api_data(self, include_secrets=False, details='full'):
974 973 """
975 974 Common function for generating user related data for API
976 975
977 976 :param include_secrets: By default secrets in the API data will be replaced
978 977 by a placeholder value to prevent exposing this data by accident. In case
979 978 this data shall be exposed, set this flag to ``True``.
980 979
981 980 :param details: details can be 'basic|full' basic gives only a subset of
982 981 the available user information that includes user_id, name and emails.
983 982 """
984 983 user = self
985 984 user_data = self.user_data
986 985 data = {
987 986 'user_id': user.user_id,
988 987 'username': user.username,
989 988 'firstname': user.name,
990 989 'lastname': user.lastname,
991 990 'email': user.email,
992 991 'emails': user.emails,
993 992 }
994 993 if details == 'basic':
995 994 return data
996 995
997 996 auth_token_length = 40
998 997 auth_token_replacement = '*' * auth_token_length
999 998
1000 999 extras = {
1001 1000 'auth_tokens': [auth_token_replacement],
1002 1001 'active': user.active,
1003 1002 'admin': user.admin,
1004 1003 'extern_type': user.extern_type,
1005 1004 'extern_name': user.extern_name,
1006 1005 'last_login': user.last_login,
1007 1006 'last_activity': user.last_activity,
1008 1007 'ip_addresses': user.ip_addresses,
1009 1008 'language': user_data.get('language')
1010 1009 }
1011 1010 data.update(extras)
1012 1011
1013 1012 if include_secrets:
1014 1013 data['auth_tokens'] = user.auth_tokens
1015 1014 return data
1016 1015
1017 1016 def __json__(self):
1018 1017 data = {
1019 1018 'full_name': self.full_name,
1020 1019 'full_name_or_username': self.full_name_or_username,
1021 1020 'short_contact': self.short_contact,
1022 1021 'full_contact': self.full_contact,
1023 1022 }
1024 1023 data.update(self.get_api_data())
1025 1024 return data
1026 1025
1027 1026
1028 1027 class UserApiKeys(Base, BaseModel):
1029 1028 __tablename__ = 'user_api_keys'
1030 1029 __table_args__ = (
1031 1030 Index('uak_api_key_idx', 'api_key', unique=True),
1032 1031 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1033 1032 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1034 1033 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1035 1034 )
1036 1035 __mapper_args__ = {}
1037 1036
1038 1037 # ApiKey role
1039 1038 ROLE_ALL = 'token_role_all'
1040 1039 ROLE_HTTP = 'token_role_http'
1041 1040 ROLE_VCS = 'token_role_vcs'
1042 1041 ROLE_API = 'token_role_api'
1043 1042 ROLE_FEED = 'token_role_feed'
1044 1043 ROLE_PASSWORD_RESET = 'token_password_reset'
1045 1044
1046 1045 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
1047 1046
1048 1047 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1049 1048 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1050 1049 api_key = Column("api_key", String(255), nullable=False, unique=True)
1051 1050 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1052 1051 expires = Column('expires', Float(53), nullable=False)
1053 1052 role = Column('role', String(255), nullable=True)
1054 1053 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1055 1054
1056 1055 # scope columns
1057 1056 repo_id = Column(
1058 1057 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1059 1058 nullable=True, unique=None, default=None)
1060 1059 repo = relationship('Repository', lazy='joined')
1061 1060
1062 1061 repo_group_id = Column(
1063 1062 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1064 1063 nullable=True, unique=None, default=None)
1065 1064 repo_group = relationship('RepoGroup', lazy='joined')
1066 1065
1067 1066 user = relationship('User', lazy='joined')
1068 1067
1069 1068 def __unicode__(self):
1070 1069 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1071 1070
1072 1071 def __json__(self):
1073 1072 data = {
1074 1073 'auth_token': self.api_key,
1075 1074 'role': self.role,
1076 1075 'scope': self.scope_humanized,
1077 1076 'expired': self.expired
1078 1077 }
1079 1078 return data
1080 1079
1081 1080 def get_api_data(self, include_secrets=False):
1082 1081 data = self.__json__()
1083 1082 if include_secrets:
1084 1083 return data
1085 1084 else:
1086 1085 data['auth_token'] = self.token_obfuscated
1087 1086 return data
1088 1087
1089 1088 @hybrid_property
1090 1089 def description_safe(self):
1091 1090 from rhodecode.lib import helpers as h
1092 1091 return h.escape(self.description)
1093 1092
1094 1093 @property
1095 1094 def expired(self):
1096 1095 if self.expires == -1:
1097 1096 return False
1098 1097 return time.time() > self.expires
1099 1098
1100 1099 @classmethod
1101 1100 def _get_role_name(cls, role):
1102 1101 return {
1103 1102 cls.ROLE_ALL: _('all'),
1104 1103 cls.ROLE_HTTP: _('http/web interface'),
1105 1104 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1106 1105 cls.ROLE_API: _('api calls'),
1107 1106 cls.ROLE_FEED: _('feed access'),
1108 1107 }.get(role, role)
1109 1108
1110 1109 @property
1111 1110 def role_humanized(self):
1112 1111 return self._get_role_name(self.role)
1113 1112
1114 1113 def _get_scope(self):
1115 1114 if self.repo:
1116 1115 return repr(self.repo)
1117 1116 if self.repo_group:
1118 1117 return repr(self.repo_group) + ' (recursive)'
1119 1118 return 'global'
1120 1119
1121 1120 @property
1122 1121 def scope_humanized(self):
1123 1122 return self._get_scope()
1124 1123
1125 1124 @property
1126 1125 def token_obfuscated(self):
1127 1126 if self.api_key:
1128 1127 return self.api_key[:4] + "****"
1129 1128
1130 1129
1131 1130 class UserEmailMap(Base, BaseModel):
1132 1131 __tablename__ = 'user_email_map'
1133 1132 __table_args__ = (
1134 1133 Index('uem_email_idx', 'email'),
1135 1134 UniqueConstraint('email'),
1136 1135 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1137 1136 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1138 1137 )
1139 1138 __mapper_args__ = {}
1140 1139
1141 1140 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1142 1141 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1143 1142 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1144 1143 user = relationship('User', lazy='joined')
1145 1144
1146 1145 @validates('_email')
1147 1146 def validate_email(self, key, email):
1148 1147 # check if this email is not main one
1149 1148 main_email = Session().query(User).filter(User.email == email).scalar()
1150 1149 if main_email is not None:
1151 1150 raise AttributeError('email %s is present is user table' % email)
1152 1151 return email
1153 1152
1154 1153 @hybrid_property
1155 1154 def email(self):
1156 1155 return self._email
1157 1156
1158 1157 @email.setter
1159 1158 def email(self, val):
1160 1159 self._email = val.lower() if val else None
1161 1160
1162 1161
1163 1162 class UserIpMap(Base, BaseModel):
1164 1163 __tablename__ = 'user_ip_map'
1165 1164 __table_args__ = (
1166 1165 UniqueConstraint('user_id', 'ip_addr'),
1167 1166 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1168 1167 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1169 1168 )
1170 1169 __mapper_args__ = {}
1171 1170
1172 1171 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1173 1172 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1174 1173 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1175 1174 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1176 1175 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1177 1176 user = relationship('User', lazy='joined')
1178 1177
1179 1178 @hybrid_property
1180 1179 def description_safe(self):
1181 1180 from rhodecode.lib import helpers as h
1182 1181 return h.escape(self.description)
1183 1182
1184 1183 @classmethod
1185 1184 def _get_ip_range(cls, ip_addr):
1186 1185 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1187 1186 return [str(net.network_address), str(net.broadcast_address)]
1188 1187
1189 1188 def __json__(self):
1190 1189 return {
1191 1190 'ip_addr': self.ip_addr,
1192 1191 'ip_range': self._get_ip_range(self.ip_addr),
1193 1192 }
1194 1193
1195 1194 def __unicode__(self):
1196 1195 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1197 1196 self.user_id, self.ip_addr)
1198 1197
1199 1198
1200 1199 class UserSshKeys(Base, BaseModel):
1201 1200 __tablename__ = 'user_ssh_keys'
1202 1201 __table_args__ = (
1203 1202 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1204 1203
1205 1204 UniqueConstraint('ssh_key_fingerprint'),
1206 1205
1207 1206 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1208 1207 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1209 1208 )
1210 1209 __mapper_args__ = {}
1211 1210
1212 1211 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1213 1212 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1214 1213 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1215 1214
1216 1215 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1217 1216
1218 1217 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1219 1218 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1220 1219 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1221 1220
1222 1221 user = relationship('User', lazy='joined')
1223 1222
1224 1223 def __json__(self):
1225 1224 data = {
1226 1225 'ssh_fingerprint': self.ssh_key_fingerprint,
1227 1226 'description': self.description,
1228 1227 'created_on': self.created_on
1229 1228 }
1230 1229 return data
1231 1230
1232 1231 def get_api_data(self):
1233 1232 data = self.__json__()
1234 1233 return data
1235 1234
1236 1235
1237 1236 class UserLog(Base, BaseModel):
1238 1237 __tablename__ = 'user_logs'
1239 1238 __table_args__ = (
1240 1239 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1241 1240 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1242 1241 )
1243 1242 VERSION_1 = 'v1'
1244 1243 VERSION_2 = 'v2'
1245 1244 VERSIONS = [VERSION_1, VERSION_2]
1246 1245
1247 1246 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1248 1247 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1249 1248 username = Column("username", String(255), nullable=True, unique=None, default=None)
1250 1249 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1251 1250 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1252 1251 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1253 1252 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1254 1253 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1255 1254
1256 1255 version = Column("version", String(255), nullable=True, default=VERSION_1)
1257 1256 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1258 1257 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1259 1258
1260 1259 def __unicode__(self):
1261 1260 return u"<%s('id:%s:%s')>" % (
1262 1261 self.__class__.__name__, self.repository_name, self.action)
1263 1262
1264 1263 def __json__(self):
1265 1264 return {
1266 1265 'user_id': self.user_id,
1267 1266 'username': self.username,
1268 1267 'repository_id': self.repository_id,
1269 1268 'repository_name': self.repository_name,
1270 1269 'user_ip': self.user_ip,
1271 1270 'action_date': self.action_date,
1272 1271 'action': self.action,
1273 1272 }
1274 1273
1275 1274 @hybrid_property
1276 1275 def entry_id(self):
1277 1276 return self.user_log_id
1278 1277
1279 1278 @property
1280 1279 def action_as_day(self):
1281 1280 return datetime.date(*self.action_date.timetuple()[:3])
1282 1281
1283 1282 user = relationship('User')
1284 1283 repository = relationship('Repository', cascade='')
1285 1284
1286 1285
1287 1286 class UserGroup(Base, BaseModel):
1288 1287 __tablename__ = 'users_groups'
1289 1288 __table_args__ = (
1290 1289 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1291 1290 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1292 1291 )
1293 1292
1294 1293 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1295 1294 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1296 1295 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1297 1296 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1298 1297 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1299 1298 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1300 1299 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1301 1300 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1302 1301
1303 1302 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1304 1303 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1305 1304 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1306 1305 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1307 1306 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1308 1307 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1309 1308
1310 1309 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1311 1310 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1312 1311
1313 1312 @classmethod
1314 1313 def _load_group_data(cls, column):
1315 1314 if not column:
1316 1315 return {}
1317 1316
1318 1317 try:
1319 1318 return json.loads(column) or {}
1320 1319 except TypeError:
1321 1320 return {}
1322 1321
1323 1322 @hybrid_property
1324 1323 def description_safe(self):
1325 1324 from rhodecode.lib import helpers as h
1326 1325 return h.escape(self.description)
1327 1326
1328 1327 @hybrid_property
1329 1328 def group_data(self):
1330 1329 return self._load_group_data(self._group_data)
1331 1330
1332 1331 @group_data.expression
1333 1332 def group_data(self, **kwargs):
1334 1333 return self._group_data
1335 1334
1336 1335 @group_data.setter
1337 1336 def group_data(self, val):
1338 1337 try:
1339 1338 self._group_data = json.dumps(val)
1340 1339 except Exception:
1341 1340 log.error(traceback.format_exc())
1342 1341
1343 1342 def __unicode__(self):
1344 1343 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1345 1344 self.users_group_id,
1346 1345 self.users_group_name)
1347 1346
1348 1347 @classmethod
1349 1348 def get_by_group_name(cls, group_name, cache=False,
1350 1349 case_insensitive=False):
1351 1350 if case_insensitive:
1352 1351 q = cls.query().filter(func.lower(cls.users_group_name) ==
1353 1352 func.lower(group_name))
1354 1353
1355 1354 else:
1356 1355 q = cls.query().filter(cls.users_group_name == group_name)
1357 1356 if cache:
1358 1357 q = q.options(
1359 1358 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1360 1359 return q.scalar()
1361 1360
1362 1361 @classmethod
1363 1362 def get(cls, user_group_id, cache=False):
1364 1363 if not user_group_id:
1365 1364 return
1366 1365
1367 1366 user_group = cls.query()
1368 1367 if cache:
1369 1368 user_group = user_group.options(
1370 1369 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1371 1370 return user_group.get(user_group_id)
1372 1371
1373 1372 def permissions(self, with_admins=True, with_owner=True):
1374 1373 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1375 1374 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1376 1375 joinedload(UserUserGroupToPerm.user),
1377 1376 joinedload(UserUserGroupToPerm.permission),)
1378 1377
1379 1378 # get owners and admins and permissions. We do a trick of re-writing
1380 1379 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1381 1380 # has a global reference and changing one object propagates to all
1382 1381 # others. This means if admin is also an owner admin_row that change
1383 1382 # would propagate to both objects
1384 1383 perm_rows = []
1385 1384 for _usr in q.all():
1386 1385 usr = AttributeDict(_usr.user.get_dict())
1387 1386 usr.permission = _usr.permission.permission_name
1388 1387 perm_rows.append(usr)
1389 1388
1390 1389 # filter the perm rows by 'default' first and then sort them by
1391 1390 # admin,write,read,none permissions sorted again alphabetically in
1392 1391 # each group
1393 1392 perm_rows = sorted(perm_rows, key=display_user_sort)
1394 1393
1395 1394 _admin_perm = 'usergroup.admin'
1396 1395 owner_row = []
1397 1396 if with_owner:
1398 1397 usr = AttributeDict(self.user.get_dict())
1399 1398 usr.owner_row = True
1400 1399 usr.permission = _admin_perm
1401 1400 owner_row.append(usr)
1402 1401
1403 1402 super_admin_rows = []
1404 1403 if with_admins:
1405 1404 for usr in User.get_all_super_admins():
1406 1405 # if this admin is also owner, don't double the record
1407 1406 if usr.user_id == owner_row[0].user_id:
1408 1407 owner_row[0].admin_row = True
1409 1408 else:
1410 1409 usr = AttributeDict(usr.get_dict())
1411 1410 usr.admin_row = True
1412 1411 usr.permission = _admin_perm
1413 1412 super_admin_rows.append(usr)
1414 1413
1415 1414 return super_admin_rows + owner_row + perm_rows
1416 1415
1417 1416 def permission_user_groups(self):
1418 1417 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1419 1418 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1420 1419 joinedload(UserGroupUserGroupToPerm.target_user_group),
1421 1420 joinedload(UserGroupUserGroupToPerm.permission),)
1422 1421
1423 1422 perm_rows = []
1424 1423 for _user_group in q.all():
1425 1424 usr = AttributeDict(_user_group.user_group.get_dict())
1426 1425 usr.permission = _user_group.permission.permission_name
1427 1426 perm_rows.append(usr)
1428 1427
1429 1428 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1430 1429 return perm_rows
1431 1430
1432 1431 def _get_default_perms(self, user_group, suffix=''):
1433 1432 from rhodecode.model.permission import PermissionModel
1434 1433 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1435 1434
1436 1435 def get_default_perms(self, suffix=''):
1437 1436 return self._get_default_perms(self, suffix)
1438 1437
1439 1438 def get_api_data(self, with_group_members=True, include_secrets=False):
1440 1439 """
1441 1440 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1442 1441 basically forwarded.
1443 1442
1444 1443 """
1445 1444 user_group = self
1446 1445 data = {
1447 1446 'users_group_id': user_group.users_group_id,
1448 1447 'group_name': user_group.users_group_name,
1449 1448 'group_description': user_group.user_group_description,
1450 1449 'active': user_group.users_group_active,
1451 1450 'owner': user_group.user.username,
1452 1451 'owner_email': user_group.user.email,
1453 1452 }
1454 1453
1455 1454 if with_group_members:
1456 1455 users = []
1457 1456 for user in user_group.members:
1458 1457 user = user.user
1459 1458 users.append(user.get_api_data(include_secrets=include_secrets))
1460 1459 data['users'] = users
1461 1460
1462 1461 return data
1463 1462
1464 1463
1465 1464 class UserGroupMember(Base, BaseModel):
1466 1465 __tablename__ = 'users_groups_members'
1467 1466 __table_args__ = (
1468 1467 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1469 1468 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1470 1469 )
1471 1470
1472 1471 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1473 1472 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1474 1473 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1475 1474
1476 1475 user = relationship('User', lazy='joined')
1477 1476 users_group = relationship('UserGroup')
1478 1477
1479 1478 def __init__(self, gr_id='', u_id=''):
1480 1479 self.users_group_id = gr_id
1481 1480 self.user_id = u_id
1482 1481
1483 1482
1484 1483 class RepositoryField(Base, BaseModel):
1485 1484 __tablename__ = 'repositories_fields'
1486 1485 __table_args__ = (
1487 1486 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1488 1487 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1489 1488 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1490 1489 )
1491 1490 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1492 1491
1493 1492 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1494 1493 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1495 1494 field_key = Column("field_key", String(250))
1496 1495 field_label = Column("field_label", String(1024), nullable=False)
1497 1496 field_value = Column("field_value", String(10000), nullable=False)
1498 1497 field_desc = Column("field_desc", String(1024), nullable=False)
1499 1498 field_type = Column("field_type", String(255), nullable=False, unique=None)
1500 1499 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1501 1500
1502 1501 repository = relationship('Repository')
1503 1502
1504 1503 @property
1505 1504 def field_key_prefixed(self):
1506 1505 return 'ex_%s' % self.field_key
1507 1506
1508 1507 @classmethod
1509 1508 def un_prefix_key(cls, key):
1510 1509 if key.startswith(cls.PREFIX):
1511 1510 return key[len(cls.PREFIX):]
1512 1511 return key
1513 1512
1514 1513 @classmethod
1515 1514 def get_by_key_name(cls, key, repo):
1516 1515 row = cls.query()\
1517 1516 .filter(cls.repository == repo)\
1518 1517 .filter(cls.field_key == key).scalar()
1519 1518 return row
1520 1519
1521 1520
1522 1521 class Repository(Base, BaseModel):
1523 1522 __tablename__ = 'repositories'
1524 1523 __table_args__ = (
1525 1524 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1526 1525 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1527 1526 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1528 1527 )
1529 1528 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1530 1529 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1531 1530
1532 1531 STATE_CREATED = 'repo_state_created'
1533 1532 STATE_PENDING = 'repo_state_pending'
1534 1533 STATE_ERROR = 'repo_state_error'
1535 1534
1536 1535 LOCK_AUTOMATIC = 'lock_auto'
1537 1536 LOCK_API = 'lock_api'
1538 1537 LOCK_WEB = 'lock_web'
1539 1538 LOCK_PULL = 'lock_pull'
1540 1539
1541 1540 NAME_SEP = URL_SEP
1542 1541
1543 1542 repo_id = Column(
1544 1543 "repo_id", Integer(), nullable=False, unique=True, default=None,
1545 1544 primary_key=True)
1546 1545 _repo_name = Column(
1547 1546 "repo_name", Text(), nullable=False, default=None)
1548 1547 _repo_name_hash = Column(
1549 1548 "repo_name_hash", String(255), nullable=False, unique=True)
1550 1549 repo_state = Column("repo_state", String(255), nullable=True)
1551 1550
1552 1551 clone_uri = Column(
1553 1552 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1554 1553 default=None)
1555 1554 repo_type = Column(
1556 1555 "repo_type", String(255), nullable=False, unique=False, default=None)
1557 1556 user_id = Column(
1558 1557 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1559 1558 unique=False, default=None)
1560 1559 private = Column(
1561 1560 "private", Boolean(), nullable=True, unique=None, default=None)
1562 1561 enable_statistics = Column(
1563 1562 "statistics", Boolean(), nullable=True, unique=None, default=True)
1564 1563 enable_downloads = Column(
1565 1564 "downloads", Boolean(), nullable=True, unique=None, default=True)
1566 1565 description = Column(
1567 1566 "description", String(10000), nullable=True, unique=None, default=None)
1568 1567 created_on = Column(
1569 1568 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1570 1569 default=datetime.datetime.now)
1571 1570 updated_on = Column(
1572 1571 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1573 1572 default=datetime.datetime.now)
1574 1573 _landing_revision = Column(
1575 1574 "landing_revision", String(255), nullable=False, unique=False,
1576 1575 default=None)
1577 1576 enable_locking = Column(
1578 1577 "enable_locking", Boolean(), nullable=False, unique=None,
1579 1578 default=False)
1580 1579 _locked = Column(
1581 1580 "locked", String(255), nullable=True, unique=False, default=None)
1582 1581 _changeset_cache = Column(
1583 1582 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1584 1583
1585 1584 fork_id = Column(
1586 1585 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1587 1586 nullable=True, unique=False, default=None)
1588 1587 group_id = Column(
1589 1588 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1590 1589 unique=False, default=None)
1591 1590
1592 1591 user = relationship('User', lazy='joined')
1593 1592 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1594 1593 group = relationship('RepoGroup', lazy='joined')
1595 1594 repo_to_perm = relationship(
1596 1595 'UserRepoToPerm', cascade='all',
1597 1596 order_by='UserRepoToPerm.repo_to_perm_id')
1598 1597 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1599 1598 stats = relationship('Statistics', cascade='all', uselist=False)
1600 1599
1601 1600 followers = relationship(
1602 1601 'UserFollowing',
1603 1602 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1604 1603 cascade='all')
1605 1604 extra_fields = relationship(
1606 1605 'RepositoryField', cascade="all, delete, delete-orphan")
1607 1606 logs = relationship('UserLog')
1608 1607 comments = relationship(
1609 1608 'ChangesetComment', cascade="all, delete, delete-orphan")
1610 1609 pull_requests_source = relationship(
1611 1610 'PullRequest',
1612 1611 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1613 1612 cascade="all, delete, delete-orphan")
1614 1613 pull_requests_target = relationship(
1615 1614 'PullRequest',
1616 1615 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1617 1616 cascade="all, delete, delete-orphan")
1618 1617 ui = relationship('RepoRhodeCodeUi', cascade="all")
1619 1618 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1620 1619 integrations = relationship('Integration',
1621 1620 cascade="all, delete, delete-orphan")
1622 1621
1623 1622 def __unicode__(self):
1624 1623 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1625 1624 safe_unicode(self.repo_name))
1626 1625
1627 1626 @hybrid_property
1628 1627 def description_safe(self):
1629 1628 from rhodecode.lib import helpers as h
1630 1629 return h.escape(self.description)
1631 1630
1632 1631 @hybrid_property
1633 1632 def landing_rev(self):
1634 1633 # always should return [rev_type, rev]
1635 1634 if self._landing_revision:
1636 1635 _rev_info = self._landing_revision.split(':')
1637 1636 if len(_rev_info) < 2:
1638 1637 _rev_info.insert(0, 'rev')
1639 1638 return [_rev_info[0], _rev_info[1]]
1640 1639 return [None, None]
1641 1640
1642 1641 @landing_rev.setter
1643 1642 def landing_rev(self, val):
1644 1643 if ':' not in val:
1645 1644 raise ValueError('value must be delimited with `:` and consist '
1646 1645 'of <rev_type>:<rev>, got %s instead' % val)
1647 1646 self._landing_revision = val
1648 1647
1649 1648 @hybrid_property
1650 1649 def locked(self):
1651 1650 if self._locked:
1652 1651 user_id, timelocked, reason = self._locked.split(':')
1653 1652 lock_values = int(user_id), timelocked, reason
1654 1653 else:
1655 1654 lock_values = [None, None, None]
1656 1655 return lock_values
1657 1656
1658 1657 @locked.setter
1659 1658 def locked(self, val):
1660 1659 if val and isinstance(val, (list, tuple)):
1661 1660 self._locked = ':'.join(map(str, val))
1662 1661 else:
1663 1662 self._locked = None
1664 1663
1665 1664 @hybrid_property
1666 1665 def changeset_cache(self):
1667 1666 from rhodecode.lib.vcs.backends.base import EmptyCommit
1668 1667 dummy = EmptyCommit().__json__()
1669 1668 if not self._changeset_cache:
1670 1669 return dummy
1671 1670 try:
1672 1671 return json.loads(self._changeset_cache)
1673 1672 except TypeError:
1674 1673 return dummy
1675 1674 except Exception:
1676 1675 log.error(traceback.format_exc())
1677 1676 return dummy
1678 1677
1679 1678 @changeset_cache.setter
1680 1679 def changeset_cache(self, val):
1681 1680 try:
1682 1681 self._changeset_cache = json.dumps(val)
1683 1682 except Exception:
1684 1683 log.error(traceback.format_exc())
1685 1684
1686 1685 @hybrid_property
1687 1686 def repo_name(self):
1688 1687 return self._repo_name
1689 1688
1690 1689 @repo_name.setter
1691 1690 def repo_name(self, value):
1692 1691 self._repo_name = value
1693 1692 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1694 1693
1695 1694 @classmethod
1696 1695 def normalize_repo_name(cls, repo_name):
1697 1696 """
1698 1697 Normalizes os specific repo_name to the format internally stored inside
1699 1698 database using URL_SEP
1700 1699
1701 1700 :param cls:
1702 1701 :param repo_name:
1703 1702 """
1704 1703 return cls.NAME_SEP.join(repo_name.split(os.sep))
1705 1704
1706 1705 @classmethod
1707 1706 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1708 1707 session = Session()
1709 1708 q = session.query(cls).filter(cls.repo_name == repo_name)
1710 1709
1711 1710 if cache:
1712 1711 if identity_cache:
1713 1712 val = cls.identity_cache(session, 'repo_name', repo_name)
1714 1713 if val:
1715 1714 return val
1716 1715 else:
1717 1716 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1718 1717 q = q.options(
1719 1718 FromCache("sql_cache_short", cache_key))
1720 1719
1721 1720 return q.scalar()
1722 1721
1723 1722 @classmethod
1724 1723 def get_by_full_path(cls, repo_full_path):
1725 1724 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1726 1725 repo_name = cls.normalize_repo_name(repo_name)
1727 1726 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1728 1727
1729 1728 @classmethod
1730 1729 def get_repo_forks(cls, repo_id):
1731 1730 return cls.query().filter(Repository.fork_id == repo_id)
1732 1731
1733 1732 @classmethod
1734 1733 def base_path(cls):
1735 1734 """
1736 1735 Returns base path when all repos are stored
1737 1736
1738 1737 :param cls:
1739 1738 """
1740 1739 q = Session().query(RhodeCodeUi)\
1741 1740 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1742 1741 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1743 1742 return q.one().ui_value
1744 1743
1745 1744 @classmethod
1746 1745 def is_valid(cls, repo_name):
1747 1746 """
1748 1747 returns True if given repo name is a valid filesystem repository
1749 1748
1750 1749 :param cls:
1751 1750 :param repo_name:
1752 1751 """
1753 1752 from rhodecode.lib.utils import is_valid_repo
1754 1753
1755 1754 return is_valid_repo(repo_name, cls.base_path())
1756 1755
1757 1756 @classmethod
1758 1757 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1759 1758 case_insensitive=True):
1760 1759 q = Repository.query()
1761 1760
1762 1761 if not isinstance(user_id, Optional):
1763 1762 q = q.filter(Repository.user_id == user_id)
1764 1763
1765 1764 if not isinstance(group_id, Optional):
1766 1765 q = q.filter(Repository.group_id == group_id)
1767 1766
1768 1767 if case_insensitive:
1769 1768 q = q.order_by(func.lower(Repository.repo_name))
1770 1769 else:
1771 1770 q = q.order_by(Repository.repo_name)
1772 1771 return q.all()
1773 1772
1774 1773 @property
1775 1774 def forks(self):
1776 1775 """
1777 1776 Return forks of this repo
1778 1777 """
1779 1778 return Repository.get_repo_forks(self.repo_id)
1780 1779
1781 1780 @property
1782 1781 def parent(self):
1783 1782 """
1784 1783 Returns fork parent
1785 1784 """
1786 1785 return self.fork
1787 1786
1788 1787 @property
1789 1788 def just_name(self):
1790 1789 return self.repo_name.split(self.NAME_SEP)[-1]
1791 1790
1792 1791 @property
1793 1792 def groups_with_parents(self):
1794 1793 groups = []
1795 1794 if self.group is None:
1796 1795 return groups
1797 1796
1798 1797 cur_gr = self.group
1799 1798 groups.insert(0, cur_gr)
1800 1799 while 1:
1801 1800 gr = getattr(cur_gr, 'parent_group', None)
1802 1801 cur_gr = cur_gr.parent_group
1803 1802 if gr is None:
1804 1803 break
1805 1804 groups.insert(0, gr)
1806 1805
1807 1806 return groups
1808 1807
1809 1808 @property
1810 1809 def groups_and_repo(self):
1811 1810 return self.groups_with_parents, self
1812 1811
1813 1812 @LazyProperty
1814 1813 def repo_path(self):
1815 1814 """
1816 1815 Returns base full path for that repository means where it actually
1817 1816 exists on a filesystem
1818 1817 """
1819 1818 q = Session().query(RhodeCodeUi).filter(
1820 1819 RhodeCodeUi.ui_key == self.NAME_SEP)
1821 1820 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1822 1821 return q.one().ui_value
1823 1822
1824 1823 @property
1825 1824 def repo_full_path(self):
1826 1825 p = [self.repo_path]
1827 1826 # we need to split the name by / since this is how we store the
1828 1827 # names in the database, but that eventually needs to be converted
1829 1828 # into a valid system path
1830 1829 p += self.repo_name.split(self.NAME_SEP)
1831 1830 return os.path.join(*map(safe_unicode, p))
1832 1831
1833 1832 @property
1834 1833 def cache_keys(self):
1835 1834 """
1836 1835 Returns associated cache keys for that repo
1837 1836 """
1838 1837 return CacheKey.query()\
1839 1838 .filter(CacheKey.cache_args == self.repo_name)\
1840 1839 .order_by(CacheKey.cache_key)\
1841 1840 .all()
1842 1841
1843 1842 def get_new_name(self, repo_name):
1844 1843 """
1845 1844 returns new full repository name based on assigned group and new new
1846 1845
1847 1846 :param group_name:
1848 1847 """
1849 1848 path_prefix = self.group.full_path_splitted if self.group else []
1850 1849 return self.NAME_SEP.join(path_prefix + [repo_name])
1851 1850
1852 1851 @property
1853 1852 def _config(self):
1854 1853 """
1855 1854 Returns db based config object.
1856 1855 """
1857 1856 from rhodecode.lib.utils import make_db_config
1858 1857 return make_db_config(clear_session=False, repo=self)
1859 1858
1860 1859 def permissions(self, with_admins=True, with_owner=True):
1861 1860 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1862 1861 q = q.options(joinedload(UserRepoToPerm.repository),
1863 1862 joinedload(UserRepoToPerm.user),
1864 1863 joinedload(UserRepoToPerm.permission),)
1865 1864
1866 1865 # get owners and admins and permissions. We do a trick of re-writing
1867 1866 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1868 1867 # has a global reference and changing one object propagates to all
1869 1868 # others. This means if admin is also an owner admin_row that change
1870 1869 # would propagate to both objects
1871 1870 perm_rows = []
1872 1871 for _usr in q.all():
1873 1872 usr = AttributeDict(_usr.user.get_dict())
1874 1873 usr.permission = _usr.permission.permission_name
1875 1874 perm_rows.append(usr)
1876 1875
1877 1876 # filter the perm rows by 'default' first and then sort them by
1878 1877 # admin,write,read,none permissions sorted again alphabetically in
1879 1878 # each group
1880 1879 perm_rows = sorted(perm_rows, key=display_user_sort)
1881 1880
1882 1881 _admin_perm = 'repository.admin'
1883 1882 owner_row = []
1884 1883 if with_owner:
1885 1884 usr = AttributeDict(self.user.get_dict())
1886 1885 usr.owner_row = True
1887 1886 usr.permission = _admin_perm
1888 1887 owner_row.append(usr)
1889 1888
1890 1889 super_admin_rows = []
1891 1890 if with_admins:
1892 1891 for usr in User.get_all_super_admins():
1893 1892 # if this admin is also owner, don't double the record
1894 1893 if usr.user_id == owner_row[0].user_id:
1895 1894 owner_row[0].admin_row = True
1896 1895 else:
1897 1896 usr = AttributeDict(usr.get_dict())
1898 1897 usr.admin_row = True
1899 1898 usr.permission = _admin_perm
1900 1899 super_admin_rows.append(usr)
1901 1900
1902 1901 return super_admin_rows + owner_row + perm_rows
1903 1902
1904 1903 def permission_user_groups(self):
1905 1904 q = UserGroupRepoToPerm.query().filter(
1906 1905 UserGroupRepoToPerm.repository == self)
1907 1906 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1908 1907 joinedload(UserGroupRepoToPerm.users_group),
1909 1908 joinedload(UserGroupRepoToPerm.permission),)
1910 1909
1911 1910 perm_rows = []
1912 1911 for _user_group in q.all():
1913 1912 usr = AttributeDict(_user_group.users_group.get_dict())
1914 1913 usr.permission = _user_group.permission.permission_name
1915 1914 perm_rows.append(usr)
1916 1915
1917 1916 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1918 1917 return perm_rows
1919 1918
1920 1919 def get_api_data(self, include_secrets=False):
1921 1920 """
1922 1921 Common function for generating repo api data
1923 1922
1924 1923 :param include_secrets: See :meth:`User.get_api_data`.
1925 1924
1926 1925 """
1927 1926 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1928 1927 # move this methods on models level.
1929 1928 from rhodecode.model.settings import SettingsModel
1930 1929 from rhodecode.model.repo import RepoModel
1931 1930
1932 1931 repo = self
1933 1932 _user_id, _time, _reason = self.locked
1934 1933
1935 1934 data = {
1936 1935 'repo_id': repo.repo_id,
1937 1936 'repo_name': repo.repo_name,
1938 1937 'repo_type': repo.repo_type,
1939 1938 'clone_uri': repo.clone_uri or '',
1940 1939 'url': RepoModel().get_url(self),
1941 1940 'private': repo.private,
1942 1941 'created_on': repo.created_on,
1943 1942 'description': repo.description_safe,
1944 1943 'landing_rev': repo.landing_rev,
1945 1944 'owner': repo.user.username,
1946 1945 'fork_of': repo.fork.repo_name if repo.fork else None,
1947 1946 'fork_of_id': repo.fork.repo_id if repo.fork else None,
1948 1947 'enable_statistics': repo.enable_statistics,
1949 1948 'enable_locking': repo.enable_locking,
1950 1949 'enable_downloads': repo.enable_downloads,
1951 1950 'last_changeset': repo.changeset_cache,
1952 1951 'locked_by': User.get(_user_id).get_api_data(
1953 1952 include_secrets=include_secrets) if _user_id else None,
1954 1953 'locked_date': time_to_datetime(_time) if _time else None,
1955 1954 'lock_reason': _reason if _reason else None,
1956 1955 }
1957 1956
1958 1957 # TODO: mikhail: should be per-repo settings here
1959 1958 rc_config = SettingsModel().get_all_settings()
1960 1959 repository_fields = str2bool(
1961 1960 rc_config.get('rhodecode_repository_fields'))
1962 1961 if repository_fields:
1963 1962 for f in self.extra_fields:
1964 1963 data[f.field_key_prefixed] = f.field_value
1965 1964
1966 1965 return data
1967 1966
1968 1967 @classmethod
1969 1968 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
1970 1969 if not lock_time:
1971 1970 lock_time = time.time()
1972 1971 if not lock_reason:
1973 1972 lock_reason = cls.LOCK_AUTOMATIC
1974 1973 repo.locked = [user_id, lock_time, lock_reason]
1975 1974 Session().add(repo)
1976 1975 Session().commit()
1977 1976
1978 1977 @classmethod
1979 1978 def unlock(cls, repo):
1980 1979 repo.locked = None
1981 1980 Session().add(repo)
1982 1981 Session().commit()
1983 1982
1984 1983 @classmethod
1985 1984 def getlock(cls, repo):
1986 1985 return repo.locked
1987 1986
1988 1987 def is_user_lock(self, user_id):
1989 1988 if self.lock[0]:
1990 1989 lock_user_id = safe_int(self.lock[0])
1991 1990 user_id = safe_int(user_id)
1992 1991 # both are ints, and they are equal
1993 1992 return all([lock_user_id, user_id]) and lock_user_id == user_id
1994 1993
1995 1994 return False
1996 1995
1997 1996 def get_locking_state(self, action, user_id, only_when_enabled=True):
1998 1997 """
1999 1998 Checks locking on this repository, if locking is enabled and lock is
2000 1999 present returns a tuple of make_lock, locked, locked_by.
2001 2000 make_lock can have 3 states None (do nothing) True, make lock
2002 2001 False release lock, This value is later propagated to hooks, which
2003 2002 do the locking. Think about this as signals passed to hooks what to do.
2004 2003
2005 2004 """
2006 2005 # TODO: johbo: This is part of the business logic and should be moved
2007 2006 # into the RepositoryModel.
2008 2007
2009 2008 if action not in ('push', 'pull'):
2010 2009 raise ValueError("Invalid action value: %s" % repr(action))
2011 2010
2012 2011 # defines if locked error should be thrown to user
2013 2012 currently_locked = False
2014 2013 # defines if new lock should be made, tri-state
2015 2014 make_lock = None
2016 2015 repo = self
2017 2016 user = User.get(user_id)
2018 2017
2019 2018 lock_info = repo.locked
2020 2019
2021 2020 if repo and (repo.enable_locking or not only_when_enabled):
2022 2021 if action == 'push':
2023 2022 # check if it's already locked !, if it is compare users
2024 2023 locked_by_user_id = lock_info[0]
2025 2024 if user.user_id == locked_by_user_id:
2026 2025 log.debug(
2027 2026 'Got `push` action from user %s, now unlocking', user)
2028 2027 # unlock if we have push from user who locked
2029 2028 make_lock = False
2030 2029 else:
2031 2030 # we're not the same user who locked, ban with
2032 2031 # code defined in settings (default is 423 HTTP Locked) !
2033 2032 log.debug('Repo %s is currently locked by %s', repo, user)
2034 2033 currently_locked = True
2035 2034 elif action == 'pull':
2036 2035 # [0] user [1] date
2037 2036 if lock_info[0] and lock_info[1]:
2038 2037 log.debug('Repo %s is currently locked by %s', repo, user)
2039 2038 currently_locked = True
2040 2039 else:
2041 2040 log.debug('Setting lock on repo %s by %s', repo, user)
2042 2041 make_lock = True
2043 2042
2044 2043 else:
2045 2044 log.debug('Repository %s do not have locking enabled', repo)
2046 2045
2047 2046 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2048 2047 make_lock, currently_locked, lock_info)
2049 2048
2050 2049 from rhodecode.lib.auth import HasRepoPermissionAny
2051 2050 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2052 2051 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2053 2052 # if we don't have at least write permission we cannot make a lock
2054 2053 log.debug('lock state reset back to FALSE due to lack '
2055 2054 'of at least read permission')
2056 2055 make_lock = False
2057 2056
2058 2057 return make_lock, currently_locked, lock_info
2059 2058
2060 2059 @property
2061 2060 def last_db_change(self):
2062 2061 return self.updated_on
2063 2062
2064 2063 @property
2065 2064 def clone_uri_hidden(self):
2066 2065 clone_uri = self.clone_uri
2067 2066 if clone_uri:
2068 2067 import urlobject
2069 2068 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2070 2069 if url_obj.password:
2071 2070 clone_uri = url_obj.with_password('*****')
2072 2071 return clone_uri
2073 2072
2074 2073 def clone_url(self, **override):
2075 2074 from rhodecode.model.settings import SettingsModel
2076 2075
2077 2076 uri_tmpl = None
2078 2077 if 'with_id' in override:
2079 2078 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2080 2079 del override['with_id']
2081 2080
2082 2081 if 'uri_tmpl' in override:
2083 2082 uri_tmpl = override['uri_tmpl']
2084 2083 del override['uri_tmpl']
2085 2084
2086 2085 # we didn't override our tmpl from **overrides
2087 2086 if not uri_tmpl:
2088 2087 rc_config = SettingsModel().get_all_settings(cache=True)
2089 2088 uri_tmpl = rc_config.get(
2090 2089 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2091 2090
2092 2091 request = get_current_request()
2093 2092 return get_clone_url(request=request,
2094 2093 uri_tmpl=uri_tmpl,
2095 2094 repo_name=self.repo_name,
2096 2095 repo_id=self.repo_id, **override)
2097 2096
2098 2097 def set_state(self, state):
2099 2098 self.repo_state = state
2100 2099 Session().add(self)
2101 2100 #==========================================================================
2102 2101 # SCM PROPERTIES
2103 2102 #==========================================================================
2104 2103
2105 2104 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2106 2105 return get_commit_safe(
2107 2106 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2108 2107
2109 2108 def get_changeset(self, rev=None, pre_load=None):
2110 2109 warnings.warn("Use get_commit", DeprecationWarning)
2111 2110 commit_id = None
2112 2111 commit_idx = None
2113 if isinstance(rev, compat.string_types):
2112 if isinstance(rev, str):
2114 2113 commit_id = rev
2115 2114 else:
2116 2115 commit_idx = rev
2117 2116 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2118 2117 pre_load=pre_load)
2119 2118
2120 2119 def get_landing_commit(self):
2121 2120 """
2122 2121 Returns landing commit, or if that doesn't exist returns the tip
2123 2122 """
2124 2123 _rev_type, _rev = self.landing_rev
2125 2124 commit = self.get_commit(_rev)
2126 2125 if isinstance(commit, EmptyCommit):
2127 2126 return self.get_commit()
2128 2127 return commit
2129 2128
2130 2129 def update_commit_cache(self, cs_cache=None, config=None):
2131 2130 """
2132 2131 Update cache of last changeset for repository, keys should be::
2133 2132
2134 2133 short_id
2135 2134 raw_id
2136 2135 revision
2137 2136 parents
2138 2137 message
2139 2138 date
2140 2139 author
2141 2140
2142 2141 :param cs_cache:
2143 2142 """
2144 2143 from rhodecode.lib.vcs.backends.base import BaseChangeset
2145 2144 if cs_cache is None:
2146 2145 # use no-cache version here
2147 2146 scm_repo = self.scm_instance(cache=False, config=config)
2148 2147 if scm_repo:
2149 2148 cs_cache = scm_repo.get_commit(
2150 2149 pre_load=["author", "date", "message", "parents"])
2151 2150 else:
2152 2151 cs_cache = EmptyCommit()
2153 2152
2154 2153 if isinstance(cs_cache, BaseChangeset):
2155 2154 cs_cache = cs_cache.__json__()
2156 2155
2157 2156 def is_outdated(new_cs_cache):
2158 2157 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2159 2158 new_cs_cache['revision'] != self.changeset_cache['revision']):
2160 2159 return True
2161 2160 return False
2162 2161
2163 2162 # check if we have maybe already latest cached revision
2164 2163 if is_outdated(cs_cache) or not self.changeset_cache:
2165 2164 _default = datetime.datetime.fromtimestamp(0)
2166 2165 last_change = cs_cache.get('date') or _default
2167 2166 log.debug('updated repo %s with new commit cache %s',
2168 2167 self.repo_name, cs_cache)
2169 2168 self.updated_on = last_change
2170 2169 self.changeset_cache = cs_cache
2171 2170 Session().add(self)
2172 2171 Session().commit()
2173 2172 else:
2174 2173 log.debug('Skipping update_commit_cache for repo:`%s` '
2175 2174 'commit already with latest changes', self.repo_name)
2176 2175
2177 2176 @property
2178 2177 def tip(self):
2179 2178 return self.get_commit('tip')
2180 2179
2181 2180 @property
2182 2181 def author(self):
2183 2182 return self.tip.author
2184 2183
2185 2184 @property
2186 2185 def last_change(self):
2187 2186 return self.scm_instance().last_change
2188 2187
2189 2188 def get_comments(self, revisions=None):
2190 2189 """
2191 2190 Returns comments for this repository grouped by revisions
2192 2191
2193 2192 :param revisions: filter query by revisions only
2194 2193 """
2195 2194 cmts = ChangesetComment.query()\
2196 2195 .filter(ChangesetComment.repo == self)
2197 2196 if revisions:
2198 2197 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2199 2198 grouped = collections.defaultdict(list)
2200 2199 for cmt in cmts.all():
2201 2200 grouped[cmt.revision].append(cmt)
2202 2201 return grouped
2203 2202
2204 2203 def statuses(self, revisions=None):
2205 2204 """
2206 2205 Returns statuses for this repository
2207 2206
2208 2207 :param revisions: list of revisions to get statuses for
2209 2208 """
2210 2209 statuses = ChangesetStatus.query()\
2211 2210 .filter(ChangesetStatus.repo == self)\
2212 2211 .filter(ChangesetStatus.version == 0)
2213 2212
2214 2213 if revisions:
2215 2214 # Try doing the filtering in chunks to avoid hitting limits
2216 2215 size = 500
2217 2216 status_results = []
2218 2217 for chunk in range(0, len(revisions), size):
2219 2218 status_results += statuses.filter(
2220 2219 ChangesetStatus.revision.in_(
2221 2220 revisions[chunk: chunk+size])
2222 2221 ).all()
2223 2222 else:
2224 2223 status_results = statuses.all()
2225 2224
2226 2225 grouped = {}
2227 2226
2228 2227 # maybe we have open new pullrequest without a status?
2229 2228 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2230 2229 status_lbl = ChangesetStatus.get_status_lbl(stat)
2231 2230 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2232 2231 for rev in pr.revisions:
2233 2232 pr_id = pr.pull_request_id
2234 2233 pr_repo = pr.target_repo.repo_name
2235 2234 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2236 2235
2237 2236 for stat in status_results:
2238 2237 pr_id = pr_repo = None
2239 2238 if stat.pull_request:
2240 2239 pr_id = stat.pull_request.pull_request_id
2241 2240 pr_repo = stat.pull_request.target_repo.repo_name
2242 2241 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2243 2242 pr_id, pr_repo]
2244 2243 return grouped
2245 2244
2246 2245 # ==========================================================================
2247 2246 # SCM CACHE INSTANCE
2248 2247 # ==========================================================================
2249 2248
2250 2249 def scm_instance(self, **kwargs):
2251 2250 import rhodecode
2252 2251
2253 2252 # Passing a config will not hit the cache currently only used
2254 2253 # for repo2dbmapper
2255 2254 config = kwargs.pop('config', None)
2256 2255 cache = kwargs.pop('cache', None)
2257 2256 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2258 2257 # if cache is NOT defined use default global, else we have a full
2259 2258 # control over cache behaviour
2260 2259 if cache is None and full_cache and not config:
2261 2260 return self._get_instance_cached()
2262 2261 return self._get_instance(cache=bool(cache), config=config)
2263 2262
2264 2263 def _get_instance_cached(self):
2265 2264 self._get_instance()
2266 2265
2267 2266 def _get_instance(self, cache=True, config=None):
2268 2267 config = config or self._config
2269 2268 custom_wire = {
2270 2269 'cache': cache # controls the vcs.remote cache
2271 2270 }
2272 2271 repo = get_vcs_instance(
2273 2272 repo_path=safe_str(self.repo_full_path),
2274 2273 config=config,
2275 2274 with_wire=custom_wire,
2276 2275 create=False,
2277 2276 _vcs_alias=self.repo_type)
2278 2277
2279 2278 return repo
2280 2279
2281 2280 def __json__(self):
2282 2281 return {'landing_rev': self.landing_rev}
2283 2282
2284 2283 def get_dict(self):
2285 2284
2286 2285 # Since we transformed `repo_name` to a hybrid property, we need to
2287 2286 # keep compatibility with the code which uses `repo_name` field.
2288 2287
2289 2288 result = super(Repository, self).get_dict()
2290 2289 result['repo_name'] = result.pop('_repo_name', None)
2291 2290 return result
2292 2291
2293 2292
2294 2293 class RepoGroup(Base, BaseModel):
2295 2294 __tablename__ = 'groups'
2296 2295 __table_args__ = (
2297 2296 UniqueConstraint('group_name', 'group_parent_id'),
2298 2297 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2299 2298 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2300 2299 )
2301 2300 __mapper_args__ = {'order_by': 'group_name'}
2302 2301
2303 2302 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2304 2303
2305 2304 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2306 2305 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2307 2306 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2308 2307 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2309 2308 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2310 2309 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2311 2310 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2312 2311 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2313 2312 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2314 2313
2315 2314 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2316 2315 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2317 2316 parent_group = relationship('RepoGroup', remote_side=group_id)
2318 2317 user = relationship('User')
2319 2318 integrations = relationship('Integration',
2320 2319 cascade="all, delete, delete-orphan")
2321 2320
2322 2321 def __init__(self, group_name='', parent_group=None):
2323 2322 self.group_name = group_name
2324 2323 self.parent_group = parent_group
2325 2324
2326 2325 def __unicode__(self):
2327 2326 return u"<%s('id:%s:%s')>" % (
2328 2327 self.__class__.__name__, self.group_id, self.group_name)
2329 2328
2330 2329 @hybrid_property
2331 2330 def description_safe(self):
2332 2331 from rhodecode.lib import helpers as h
2333 2332 return h.escape(self.group_description)
2334 2333
2335 2334 @classmethod
2336 2335 def _generate_choice(cls, repo_group):
2337 2336 from webhelpers2.html import literal as _literal
2338 2337 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2339 2338 return repo_group.group_id, _name(repo_group.full_path_splitted)
2340 2339
2341 2340 @classmethod
2342 2341 def groups_choices(cls, groups=None, show_empty_group=True):
2343 2342 if not groups:
2344 2343 groups = cls.query().all()
2345 2344
2346 2345 repo_groups = []
2347 2346 if show_empty_group:
2348 2347 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2349 2348
2350 2349 repo_groups.extend([cls._generate_choice(x) for x in groups])
2351 2350
2352 2351 repo_groups = sorted(
2353 2352 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2354 2353 return repo_groups
2355 2354
2356 2355 @classmethod
2357 2356 def url_sep(cls):
2358 2357 return URL_SEP
2359 2358
2360 2359 @classmethod
2361 2360 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2362 2361 if case_insensitive:
2363 2362 gr = cls.query().filter(func.lower(cls.group_name)
2364 2363 == func.lower(group_name))
2365 2364 else:
2366 2365 gr = cls.query().filter(cls.group_name == group_name)
2367 2366 if cache:
2368 2367 name_key = _hash_key(group_name)
2369 2368 gr = gr.options(
2370 2369 FromCache("sql_cache_short", "get_group_%s" % name_key))
2371 2370 return gr.scalar()
2372 2371
2373 2372 @classmethod
2374 2373 def get_user_personal_repo_group(cls, user_id):
2375 2374 user = User.get(user_id)
2376 2375 if user.username == User.DEFAULT_USER:
2377 2376 return None
2378 2377
2379 2378 return cls.query()\
2380 2379 .filter(cls.personal == true()) \
2381 2380 .filter(cls.user == user).scalar()
2382 2381
2383 2382 @classmethod
2384 2383 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2385 2384 case_insensitive=True):
2386 2385 q = RepoGroup.query()
2387 2386
2388 2387 if not isinstance(user_id, Optional):
2389 2388 q = q.filter(RepoGroup.user_id == user_id)
2390 2389
2391 2390 if not isinstance(group_id, Optional):
2392 2391 q = q.filter(RepoGroup.group_parent_id == group_id)
2393 2392
2394 2393 if case_insensitive:
2395 2394 q = q.order_by(func.lower(RepoGroup.group_name))
2396 2395 else:
2397 2396 q = q.order_by(RepoGroup.group_name)
2398 2397 return q.all()
2399 2398
2400 2399 @property
2401 2400 def parents(self):
2402 2401 parents_recursion_limit = 10
2403 2402 groups = []
2404 2403 if self.parent_group is None:
2405 2404 return groups
2406 2405 cur_gr = self.parent_group
2407 2406 groups.insert(0, cur_gr)
2408 2407 cnt = 0
2409 2408 while 1:
2410 2409 cnt += 1
2411 2410 gr = getattr(cur_gr, 'parent_group', None)
2412 2411 cur_gr = cur_gr.parent_group
2413 2412 if gr is None:
2414 2413 break
2415 2414 if cnt == parents_recursion_limit:
2416 2415 # this will prevent accidental infinit loops
2417 2416 log.error('more than %s parents found for group %s, stopping '
2418 2417 'recursive parent fetching', parents_recursion_limit, self)
2419 2418 break
2420 2419
2421 2420 groups.insert(0, gr)
2422 2421 return groups
2423 2422
2424 2423 @property
2425 2424 def last_db_change(self):
2426 2425 return self.updated_on
2427 2426
2428 2427 @property
2429 2428 def children(self):
2430 2429 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2431 2430
2432 2431 @property
2433 2432 def name(self):
2434 2433 return self.group_name.split(RepoGroup.url_sep())[-1]
2435 2434
2436 2435 @property
2437 2436 def full_path(self):
2438 2437 return self.group_name
2439 2438
2440 2439 @property
2441 2440 def full_path_splitted(self):
2442 2441 return self.group_name.split(RepoGroup.url_sep())
2443 2442
2444 2443 @property
2445 2444 def repositories(self):
2446 2445 return Repository.query()\
2447 2446 .filter(Repository.group == self)\
2448 2447 .order_by(Repository.repo_name)
2449 2448
2450 2449 @property
2451 2450 def repositories_recursive_count(self):
2452 2451 cnt = self.repositories.count()
2453 2452
2454 2453 def children_count(group):
2455 2454 cnt = 0
2456 2455 for child in group.children:
2457 2456 cnt += child.repositories.count()
2458 2457 cnt += children_count(child)
2459 2458 return cnt
2460 2459
2461 2460 return cnt + children_count(self)
2462 2461
2463 2462 def _recursive_objects(self, include_repos=True):
2464 2463 all_ = []
2465 2464
2466 2465 def _get_members(root_gr):
2467 2466 if include_repos:
2468 2467 for r in root_gr.repositories:
2469 2468 all_.append(r)
2470 2469 childs = root_gr.children.all()
2471 2470 if childs:
2472 2471 for gr in childs:
2473 2472 all_.append(gr)
2474 2473 _get_members(gr)
2475 2474
2476 2475 _get_members(self)
2477 2476 return [self] + all_
2478 2477
2479 2478 def recursive_groups_and_repos(self):
2480 2479 """
2481 2480 Recursive return all groups, with repositories in those groups
2482 2481 """
2483 2482 return self._recursive_objects()
2484 2483
2485 2484 def recursive_groups(self):
2486 2485 """
2487 2486 Returns all children groups for this group including children of children
2488 2487 """
2489 2488 return self._recursive_objects(include_repos=False)
2490 2489
2491 2490 def get_new_name(self, group_name):
2492 2491 """
2493 2492 returns new full group name based on parent and new name
2494 2493
2495 2494 :param group_name:
2496 2495 """
2497 2496 path_prefix = (self.parent_group.full_path_splitted if
2498 2497 self.parent_group else [])
2499 2498 return RepoGroup.url_sep().join(path_prefix + [group_name])
2500 2499
2501 2500 def permissions(self, with_admins=True, with_owner=True):
2502 2501 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2503 2502 q = q.options(joinedload(UserRepoGroupToPerm.group),
2504 2503 joinedload(UserRepoGroupToPerm.user),
2505 2504 joinedload(UserRepoGroupToPerm.permission),)
2506 2505
2507 2506 # get owners and admins and permissions. We do a trick of re-writing
2508 2507 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2509 2508 # has a global reference and changing one object propagates to all
2510 2509 # others. This means if admin is also an owner admin_row that change
2511 2510 # would propagate to both objects
2512 2511 perm_rows = []
2513 2512 for _usr in q.all():
2514 2513 usr = AttributeDict(_usr.user.get_dict())
2515 2514 usr.permission = _usr.permission.permission_name
2516 2515 perm_rows.append(usr)
2517 2516
2518 2517 # filter the perm rows by 'default' first and then sort them by
2519 2518 # admin,write,read,none permissions sorted again alphabetically in
2520 2519 # each group
2521 2520 perm_rows = sorted(perm_rows, key=display_user_sort)
2522 2521
2523 2522 _admin_perm = 'group.admin'
2524 2523 owner_row = []
2525 2524 if with_owner:
2526 2525 usr = AttributeDict(self.user.get_dict())
2527 2526 usr.owner_row = True
2528 2527 usr.permission = _admin_perm
2529 2528 owner_row.append(usr)
2530 2529
2531 2530 super_admin_rows = []
2532 2531 if with_admins:
2533 2532 for usr in User.get_all_super_admins():
2534 2533 # if this admin is also owner, don't double the record
2535 2534 if usr.user_id == owner_row[0].user_id:
2536 2535 owner_row[0].admin_row = True
2537 2536 else:
2538 2537 usr = AttributeDict(usr.get_dict())
2539 2538 usr.admin_row = True
2540 2539 usr.permission = _admin_perm
2541 2540 super_admin_rows.append(usr)
2542 2541
2543 2542 return super_admin_rows + owner_row + perm_rows
2544 2543
2545 2544 def permission_user_groups(self):
2546 2545 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2547 2546 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2548 2547 joinedload(UserGroupRepoGroupToPerm.users_group),
2549 2548 joinedload(UserGroupRepoGroupToPerm.permission),)
2550 2549
2551 2550 perm_rows = []
2552 2551 for _user_group in q.all():
2553 2552 usr = AttributeDict(_user_group.users_group.get_dict())
2554 2553 usr.permission = _user_group.permission.permission_name
2555 2554 perm_rows.append(usr)
2556 2555
2557 2556 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2558 2557 return perm_rows
2559 2558
2560 2559 def get_api_data(self):
2561 2560 """
2562 2561 Common function for generating api data
2563 2562
2564 2563 """
2565 2564 group = self
2566 2565 data = {
2567 2566 'group_id': group.group_id,
2568 2567 'group_name': group.group_name,
2569 2568 'group_description': group.description_safe,
2570 2569 'parent_group': group.parent_group.group_name if group.parent_group else None,
2571 2570 'repositories': [x.repo_name for x in group.repositories],
2572 2571 'owner': group.user.username,
2573 2572 }
2574 2573 return data
2575 2574
2576 2575
2577 2576 class Permission(Base, BaseModel):
2578 2577 __tablename__ = 'permissions'
2579 2578 __table_args__ = (
2580 2579 Index('p_perm_name_idx', 'permission_name'),
2581 2580 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2582 2581 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2583 2582 )
2584 2583 PERMS = [
2585 2584 ('hg.admin', _('RhodeCode Super Administrator')),
2586 2585
2587 2586 ('repository.none', _('Repository no access')),
2588 2587 ('repository.read', _('Repository read access')),
2589 2588 ('repository.write', _('Repository write access')),
2590 2589 ('repository.admin', _('Repository admin access')),
2591 2590
2592 2591 ('group.none', _('Repository group no access')),
2593 2592 ('group.read', _('Repository group read access')),
2594 2593 ('group.write', _('Repository group write access')),
2595 2594 ('group.admin', _('Repository group admin access')),
2596 2595
2597 2596 ('usergroup.none', _('User group no access')),
2598 2597 ('usergroup.read', _('User group read access')),
2599 2598 ('usergroup.write', _('User group write access')),
2600 2599 ('usergroup.admin', _('User group admin access')),
2601 2600
2602 2601 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2603 2602 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2604 2603
2605 2604 ('hg.usergroup.create.false', _('User Group creation disabled')),
2606 2605 ('hg.usergroup.create.true', _('User Group creation enabled')),
2607 2606
2608 2607 ('hg.create.none', _('Repository creation disabled')),
2609 2608 ('hg.create.repository', _('Repository creation enabled')),
2610 2609 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2611 2610 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2612 2611
2613 2612 ('hg.fork.none', _('Repository forking disabled')),
2614 2613 ('hg.fork.repository', _('Repository forking enabled')),
2615 2614
2616 2615 ('hg.register.none', _('Registration disabled')),
2617 2616 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2618 2617 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2619 2618
2620 2619 ('hg.password_reset.enabled', _('Password reset enabled')),
2621 2620 ('hg.password_reset.hidden', _('Password reset hidden')),
2622 2621 ('hg.password_reset.disabled', _('Password reset disabled')),
2623 2622
2624 2623 ('hg.extern_activate.manual', _('Manual activation of external account')),
2625 2624 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2626 2625
2627 2626 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2628 2627 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2629 2628 ]
2630 2629
2631 2630 # definition of system default permissions for DEFAULT user
2632 2631 DEFAULT_USER_PERMISSIONS = [
2633 2632 'repository.read',
2634 2633 'group.read',
2635 2634 'usergroup.read',
2636 2635 'hg.create.repository',
2637 2636 'hg.repogroup.create.false',
2638 2637 'hg.usergroup.create.false',
2639 2638 'hg.create.write_on_repogroup.true',
2640 2639 'hg.fork.repository',
2641 2640 'hg.register.manual_activate',
2642 2641 'hg.password_reset.enabled',
2643 2642 'hg.extern_activate.auto',
2644 2643 'hg.inherit_default_perms.true',
2645 2644 ]
2646 2645
2647 2646 # defines which permissions are more important higher the more important
2648 2647 # Weight defines which permissions are more important.
2649 2648 # The higher number the more important.
2650 2649 PERM_WEIGHTS = {
2651 2650 'repository.none': 0,
2652 2651 'repository.read': 1,
2653 2652 'repository.write': 3,
2654 2653 'repository.admin': 4,
2655 2654
2656 2655 'group.none': 0,
2657 2656 'group.read': 1,
2658 2657 'group.write': 3,
2659 2658 'group.admin': 4,
2660 2659
2661 2660 'usergroup.none': 0,
2662 2661 'usergroup.read': 1,
2663 2662 'usergroup.write': 3,
2664 2663 'usergroup.admin': 4,
2665 2664
2666 2665 'hg.repogroup.create.false': 0,
2667 2666 'hg.repogroup.create.true': 1,
2668 2667
2669 2668 'hg.usergroup.create.false': 0,
2670 2669 'hg.usergroup.create.true': 1,
2671 2670
2672 2671 'hg.fork.none': 0,
2673 2672 'hg.fork.repository': 1,
2674 2673 'hg.create.none': 0,
2675 2674 'hg.create.repository': 1
2676 2675 }
2677 2676
2678 2677 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2679 2678 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2680 2679 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2681 2680
2682 2681 def __unicode__(self):
2683 2682 return u"<%s('%s:%s')>" % (
2684 2683 self.__class__.__name__, self.permission_id, self.permission_name
2685 2684 )
2686 2685
2687 2686 @classmethod
2688 2687 def get_by_key(cls, key):
2689 2688 return cls.query().filter(cls.permission_name == key).scalar()
2690 2689
2691 2690 @classmethod
2692 2691 def get_default_repo_perms(cls, user_id, repo_id=None):
2693 2692 q = Session().query(UserRepoToPerm, Repository, Permission)\
2694 2693 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2695 2694 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2696 2695 .filter(UserRepoToPerm.user_id == user_id)
2697 2696 if repo_id:
2698 2697 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2699 2698 return q.all()
2700 2699
2701 2700 @classmethod
2702 2701 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2703 2702 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2704 2703 .join(
2705 2704 Permission,
2706 2705 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2707 2706 .join(
2708 2707 Repository,
2709 2708 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2710 2709 .join(
2711 2710 UserGroup,
2712 2711 UserGroupRepoToPerm.users_group_id ==
2713 2712 UserGroup.users_group_id)\
2714 2713 .join(
2715 2714 UserGroupMember,
2716 2715 UserGroupRepoToPerm.users_group_id ==
2717 2716 UserGroupMember.users_group_id)\
2718 2717 .filter(
2719 2718 UserGroupMember.user_id == user_id,
2720 2719 UserGroup.users_group_active == true())
2721 2720 if repo_id:
2722 2721 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2723 2722 return q.all()
2724 2723
2725 2724 @classmethod
2726 2725 def get_default_group_perms(cls, user_id, repo_group_id=None):
2727 2726 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2728 2727 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2729 2728 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2730 2729 .filter(UserRepoGroupToPerm.user_id == user_id)
2731 2730 if repo_group_id:
2732 2731 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2733 2732 return q.all()
2734 2733
2735 2734 @classmethod
2736 2735 def get_default_group_perms_from_user_group(
2737 2736 cls, user_id, repo_group_id=None):
2738 2737 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2739 2738 .join(
2740 2739 Permission,
2741 2740 UserGroupRepoGroupToPerm.permission_id ==
2742 2741 Permission.permission_id)\
2743 2742 .join(
2744 2743 RepoGroup,
2745 2744 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2746 2745 .join(
2747 2746 UserGroup,
2748 2747 UserGroupRepoGroupToPerm.users_group_id ==
2749 2748 UserGroup.users_group_id)\
2750 2749 .join(
2751 2750 UserGroupMember,
2752 2751 UserGroupRepoGroupToPerm.users_group_id ==
2753 2752 UserGroupMember.users_group_id)\
2754 2753 .filter(
2755 2754 UserGroupMember.user_id == user_id,
2756 2755 UserGroup.users_group_active == true())
2757 2756 if repo_group_id:
2758 2757 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2759 2758 return q.all()
2760 2759
2761 2760 @classmethod
2762 2761 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2763 2762 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2764 2763 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2765 2764 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2766 2765 .filter(UserUserGroupToPerm.user_id == user_id)
2767 2766 if user_group_id:
2768 2767 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2769 2768 return q.all()
2770 2769
2771 2770 @classmethod
2772 2771 def get_default_user_group_perms_from_user_group(
2773 2772 cls, user_id, user_group_id=None):
2774 2773 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2775 2774 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2776 2775 .join(
2777 2776 Permission,
2778 2777 UserGroupUserGroupToPerm.permission_id ==
2779 2778 Permission.permission_id)\
2780 2779 .join(
2781 2780 TargetUserGroup,
2782 2781 UserGroupUserGroupToPerm.target_user_group_id ==
2783 2782 TargetUserGroup.users_group_id)\
2784 2783 .join(
2785 2784 UserGroup,
2786 2785 UserGroupUserGroupToPerm.user_group_id ==
2787 2786 UserGroup.users_group_id)\
2788 2787 .join(
2789 2788 UserGroupMember,
2790 2789 UserGroupUserGroupToPerm.user_group_id ==
2791 2790 UserGroupMember.users_group_id)\
2792 2791 .filter(
2793 2792 UserGroupMember.user_id == user_id,
2794 2793 UserGroup.users_group_active == true())
2795 2794 if user_group_id:
2796 2795 q = q.filter(
2797 2796 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2798 2797
2799 2798 return q.all()
2800 2799
2801 2800
2802 2801 class UserRepoToPerm(Base, BaseModel):
2803 2802 __tablename__ = 'repo_to_perm'
2804 2803 __table_args__ = (
2805 2804 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2806 2805 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2807 2806 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2808 2807 )
2809 2808 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2810 2809 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2811 2810 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2812 2811 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2813 2812
2814 2813 user = relationship('User')
2815 2814 repository = relationship('Repository')
2816 2815 permission = relationship('Permission')
2817 2816
2818 2817 @classmethod
2819 2818 def create(cls, user, repository, permission):
2820 2819 n = cls()
2821 2820 n.user = user
2822 2821 n.repository = repository
2823 2822 n.permission = permission
2824 2823 Session().add(n)
2825 2824 return n
2826 2825
2827 2826 def __unicode__(self):
2828 2827 return u'<%s => %s >' % (self.user, self.repository)
2829 2828
2830 2829
2831 2830 class UserUserGroupToPerm(Base, BaseModel):
2832 2831 __tablename__ = 'user_user_group_to_perm'
2833 2832 __table_args__ = (
2834 2833 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2835 2834 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2836 2835 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2837 2836 )
2838 2837 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2839 2838 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2840 2839 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2841 2840 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2842 2841
2843 2842 user = relationship('User')
2844 2843 user_group = relationship('UserGroup')
2845 2844 permission = relationship('Permission')
2846 2845
2847 2846 @classmethod
2848 2847 def create(cls, user, user_group, permission):
2849 2848 n = cls()
2850 2849 n.user = user
2851 2850 n.user_group = user_group
2852 2851 n.permission = permission
2853 2852 Session().add(n)
2854 2853 return n
2855 2854
2856 2855 def __unicode__(self):
2857 2856 return u'<%s => %s >' % (self.user, self.user_group)
2858 2857
2859 2858
2860 2859 class UserToPerm(Base, BaseModel):
2861 2860 __tablename__ = 'user_to_perm'
2862 2861 __table_args__ = (
2863 2862 UniqueConstraint('user_id', 'permission_id'),
2864 2863 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2865 2864 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2866 2865 )
2867 2866 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2868 2867 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2869 2868 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2870 2869
2871 2870 user = relationship('User')
2872 2871 permission = relationship('Permission', lazy='joined')
2873 2872
2874 2873 def __unicode__(self):
2875 2874 return u'<%s => %s >' % (self.user, self.permission)
2876 2875
2877 2876
2878 2877 class UserGroupRepoToPerm(Base, BaseModel):
2879 2878 __tablename__ = 'users_group_repo_to_perm'
2880 2879 __table_args__ = (
2881 2880 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2882 2881 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2883 2882 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2884 2883 )
2885 2884 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2886 2885 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2887 2886 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2888 2887 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2889 2888
2890 2889 users_group = relationship('UserGroup')
2891 2890 permission = relationship('Permission')
2892 2891 repository = relationship('Repository')
2893 2892
2894 2893 @classmethod
2895 2894 def create(cls, users_group, repository, permission):
2896 2895 n = cls()
2897 2896 n.users_group = users_group
2898 2897 n.repository = repository
2899 2898 n.permission = permission
2900 2899 Session().add(n)
2901 2900 return n
2902 2901
2903 2902 def __unicode__(self):
2904 2903 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2905 2904
2906 2905
2907 2906 class UserGroupUserGroupToPerm(Base, BaseModel):
2908 2907 __tablename__ = 'user_group_user_group_to_perm'
2909 2908 __table_args__ = (
2910 2909 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
2911 2910 CheckConstraint('target_user_group_id != user_group_id'),
2912 2911 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2913 2912 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2914 2913 )
2915 2914 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2916 2915 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2917 2916 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2918 2917 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2919 2918
2920 2919 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
2921 2920 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
2922 2921 permission = relationship('Permission')
2923 2922
2924 2923 @classmethod
2925 2924 def create(cls, target_user_group, user_group, permission):
2926 2925 n = cls()
2927 2926 n.target_user_group = target_user_group
2928 2927 n.user_group = user_group
2929 2928 n.permission = permission
2930 2929 Session().add(n)
2931 2930 return n
2932 2931
2933 2932 def __unicode__(self):
2934 2933 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
2935 2934
2936 2935
2937 2936 class UserGroupToPerm(Base, BaseModel):
2938 2937 __tablename__ = 'users_group_to_perm'
2939 2938 __table_args__ = (
2940 2939 UniqueConstraint('users_group_id', 'permission_id',),
2941 2940 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2942 2941 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2943 2942 )
2944 2943 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2945 2944 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2946 2945 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2947 2946
2948 2947 users_group = relationship('UserGroup')
2949 2948 permission = relationship('Permission')
2950 2949
2951 2950
2952 2951 class UserRepoGroupToPerm(Base, BaseModel):
2953 2952 __tablename__ = 'user_repo_group_to_perm'
2954 2953 __table_args__ = (
2955 2954 UniqueConstraint('user_id', 'group_id', 'permission_id'),
2956 2955 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2957 2956 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2958 2957 )
2959 2958
2960 2959 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2961 2960 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2962 2961 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2963 2962 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2964 2963
2965 2964 user = relationship('User')
2966 2965 group = relationship('RepoGroup')
2967 2966 permission = relationship('Permission')
2968 2967
2969 2968 @classmethod
2970 2969 def create(cls, user, repository_group, permission):
2971 2970 n = cls()
2972 2971 n.user = user
2973 2972 n.group = repository_group
2974 2973 n.permission = permission
2975 2974 Session().add(n)
2976 2975 return n
2977 2976
2978 2977
2979 2978 class UserGroupRepoGroupToPerm(Base, BaseModel):
2980 2979 __tablename__ = 'users_group_repo_group_to_perm'
2981 2980 __table_args__ = (
2982 2981 UniqueConstraint('users_group_id', 'group_id'),
2983 2982 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2984 2983 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2985 2984 )
2986 2985
2987 2986 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2988 2987 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2989 2988 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2990 2989 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2991 2990
2992 2991 users_group = relationship('UserGroup')
2993 2992 permission = relationship('Permission')
2994 2993 group = relationship('RepoGroup')
2995 2994
2996 2995 @classmethod
2997 2996 def create(cls, user_group, repository_group, permission):
2998 2997 n = cls()
2999 2998 n.users_group = user_group
3000 2999 n.group = repository_group
3001 3000 n.permission = permission
3002 3001 Session().add(n)
3003 3002 return n
3004 3003
3005 3004 def __unicode__(self):
3006 3005 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3007 3006
3008 3007
3009 3008 class Statistics(Base, BaseModel):
3010 3009 __tablename__ = 'statistics'
3011 3010 __table_args__ = (
3012 3011 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3013 3012 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3014 3013 )
3015 3014 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3016 3015 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3017 3016 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3018 3017 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3019 3018 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3020 3019 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3021 3020
3022 3021 repository = relationship('Repository', single_parent=True)
3023 3022
3024 3023
3025 3024 class UserFollowing(Base, BaseModel):
3026 3025 __tablename__ = 'user_followings'
3027 3026 __table_args__ = (
3028 3027 UniqueConstraint('user_id', 'follows_repository_id'),
3029 3028 UniqueConstraint('user_id', 'follows_user_id'),
3030 3029 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3031 3030 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3032 3031 )
3033 3032
3034 3033 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3035 3034 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3036 3035 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3037 3036 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3038 3037 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3039 3038
3040 3039 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3041 3040
3042 3041 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3043 3042 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3044 3043
3045 3044 @classmethod
3046 3045 def get_repo_followers(cls, repo_id):
3047 3046 return cls.query().filter(cls.follows_repo_id == repo_id)
3048 3047
3049 3048
3050 3049 class CacheKey(Base, BaseModel):
3051 3050 __tablename__ = 'cache_invalidation'
3052 3051 __table_args__ = (
3053 3052 UniqueConstraint('cache_key'),
3054 3053 Index('key_idx', 'cache_key'),
3055 3054 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3056 3055 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3057 3056 )
3058 3057 CACHE_TYPE_ATOM = 'ATOM'
3059 3058 CACHE_TYPE_RSS = 'RSS'
3060 3059 CACHE_TYPE_README = 'README'
3061 3060
3062 3061 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3063 3062 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3064 3063 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3065 3064 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3066 3065
3067 3066 def __init__(self, cache_key, cache_args=''):
3068 3067 self.cache_key = cache_key
3069 3068 self.cache_args = cache_args
3070 3069 self.cache_active = False
3071 3070
3072 3071 def __unicode__(self):
3073 3072 return u"<%s('%s:%s[%s]')>" % (
3074 3073 self.__class__.__name__,
3075 3074 self.cache_id, self.cache_key, self.cache_active)
3076 3075
3077 3076 def _cache_key_partition(self):
3078 3077 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3079 3078 return prefix, repo_name, suffix
3080 3079
3081 3080 def get_prefix(self):
3082 3081 """
3083 3082 Try to extract prefix from existing cache key. The key could consist
3084 3083 of prefix, repo_name, suffix
3085 3084 """
3086 3085 # this returns prefix, repo_name, suffix
3087 3086 return self._cache_key_partition()[0]
3088 3087
3089 3088 def get_suffix(self):
3090 3089 """
3091 3090 get suffix that might have been used in _get_cache_key to
3092 3091 generate self.cache_key. Only used for informational purposes
3093 3092 in repo_edit.mako.
3094 3093 """
3095 3094 # prefix, repo_name, suffix
3096 3095 return self._cache_key_partition()[2]
3097 3096
3098 3097 @classmethod
3099 3098 def delete_all_cache(cls):
3100 3099 """
3101 3100 Delete all cache keys from database.
3102 3101 Should only be run when all instances are down and all entries
3103 3102 thus stale.
3104 3103 """
3105 3104 cls.query().delete()
3106 3105 Session().commit()
3107 3106
3108 3107 @classmethod
3109 3108 def get_cache_key(cls, repo_name, cache_type):
3110 3109 """
3111 3110
3112 3111 Generate a cache key for this process of RhodeCode instance.
3113 3112 Prefix most likely will be process id or maybe explicitly set
3114 3113 instance_id from .ini file.
3115 3114 """
3116 3115 import rhodecode
3117 3116 prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '')
3118 3117
3119 3118 repo_as_unicode = safe_unicode(repo_name)
3120 3119 key = u'{}_{}'.format(repo_as_unicode, cache_type) \
3121 3120 if cache_type else repo_as_unicode
3122 3121
3123 3122 return u'{}{}'.format(prefix, key)
3124 3123
3125 3124 @classmethod
3126 3125 def set_invalidate(cls, repo_name, delete=False):
3127 3126 """
3128 3127 Mark all caches of a repo as invalid in the database.
3129 3128 """
3130 3129
3131 3130 try:
3132 3131 qry = Session().query(cls).filter(cls.cache_args == repo_name)
3133 3132 if delete:
3134 3133 log.debug('cache objects deleted for repo %s',
3135 3134 safe_str(repo_name))
3136 3135 qry.delete()
3137 3136 else:
3138 3137 log.debug('cache objects marked as invalid for repo %s',
3139 3138 safe_str(repo_name))
3140 3139 qry.update({"cache_active": False})
3141 3140
3142 3141 Session().commit()
3143 3142 except Exception:
3144 3143 log.exception(
3145 3144 'Cache key invalidation failed for repository %s',
3146 3145 safe_str(repo_name))
3147 3146 Session().rollback()
3148 3147
3149 3148 @classmethod
3150 3149 def get_active_cache(cls, cache_key):
3151 3150 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3152 3151 if inv_obj:
3153 3152 return inv_obj
3154 3153 return None
3155 3154
3156 3155
3157 3156 class ChangesetComment(Base, BaseModel):
3158 3157 __tablename__ = 'changeset_comments'
3159 3158 __table_args__ = (
3160 3159 Index('cc_revision_idx', 'revision'),
3161 3160 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3162 3161 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3163 3162 )
3164 3163
3165 3164 COMMENT_OUTDATED = u'comment_outdated'
3166 3165 COMMENT_TYPE_NOTE = u'note'
3167 3166 COMMENT_TYPE_TODO = u'todo'
3168 3167 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3169 3168
3170 3169 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3171 3170 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3172 3171 revision = Column('revision', String(40), nullable=True)
3173 3172 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3174 3173 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3175 3174 line_no = Column('line_no', Unicode(10), nullable=True)
3176 3175 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3177 3176 f_path = Column('f_path', Unicode(1000), nullable=True)
3178 3177 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3179 3178 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3180 3179 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3181 3180 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3182 3181 renderer = Column('renderer', Unicode(64), nullable=True)
3183 3182 display_state = Column('display_state', Unicode(128), nullable=True)
3184 3183
3185 3184 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3186 3185 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3187 3186 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, backref='resolved_by')
3188 3187 author = relationship('User', lazy='joined')
3189 3188 repo = relationship('Repository')
3190 3189 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3191 3190 pull_request = relationship('PullRequest', lazy='joined')
3192 3191 pull_request_version = relationship('PullRequestVersion')
3193 3192
3194 3193 @classmethod
3195 3194 def get_users(cls, revision=None, pull_request_id=None):
3196 3195 """
3197 3196 Returns user associated with this ChangesetComment. ie those
3198 3197 who actually commented
3199 3198
3200 3199 :param cls:
3201 3200 :param revision:
3202 3201 """
3203 3202 q = Session().query(User)\
3204 3203 .join(ChangesetComment.author)
3205 3204 if revision:
3206 3205 q = q.filter(cls.revision == revision)
3207 3206 elif pull_request_id:
3208 3207 q = q.filter(cls.pull_request_id == pull_request_id)
3209 3208 return q.all()
3210 3209
3211 3210 @classmethod
3212 3211 def get_index_from_version(cls, pr_version, versions):
3213 3212 num_versions = [x.pull_request_version_id for x in versions]
3214 3213 try:
3215 3214 return num_versions.index(pr_version) +1
3216 3215 except (IndexError, ValueError):
3217 3216 return
3218 3217
3219 3218 @property
3220 3219 def outdated(self):
3221 3220 return self.display_state == self.COMMENT_OUTDATED
3222 3221
3223 3222 def outdated_at_version(self, version):
3224 3223 """
3225 3224 Checks if comment is outdated for given pull request version
3226 3225 """
3227 3226 return self.outdated and self.pull_request_version_id != version
3228 3227
3229 3228 def older_than_version(self, version):
3230 3229 """
3231 3230 Checks if comment is made from previous version than given
3232 3231 """
3233 3232 if version is None:
3234 3233 return self.pull_request_version_id is not None
3235 3234
3236 3235 return self.pull_request_version_id < version
3237 3236
3238 3237 @property
3239 3238 def resolved(self):
3240 3239 return self.resolved_by[0] if self.resolved_by else None
3241 3240
3242 3241 @property
3243 3242 def is_todo(self):
3244 3243 return self.comment_type == self.COMMENT_TYPE_TODO
3245 3244
3246 3245 @property
3247 3246 def is_inline(self):
3248 3247 return self.line_no and self.f_path
3249 3248
3250 3249 def get_index_version(self, versions):
3251 3250 return self.get_index_from_version(
3252 3251 self.pull_request_version_id, versions)
3253 3252
3254 3253 def __repr__(self):
3255 3254 if self.comment_id:
3256 3255 return '<DB:Comment #%s>' % self.comment_id
3257 3256 else:
3258 3257 return '<DB:Comment at %#x>' % id(self)
3259 3258
3260 3259 def get_api_data(self):
3261 3260 comment = self
3262 3261 data = {
3263 3262 'comment_id': comment.comment_id,
3264 3263 'comment_type': comment.comment_type,
3265 3264 'comment_text': comment.text,
3266 3265 'comment_status': comment.status_change,
3267 3266 'comment_f_path': comment.f_path,
3268 3267 'comment_lineno': comment.line_no,
3269 3268 'comment_author': comment.author,
3270 3269 'comment_created_on': comment.created_on
3271 3270 }
3272 3271 return data
3273 3272
3274 3273 def __json__(self):
3275 3274 data = dict()
3276 3275 data.update(self.get_api_data())
3277 3276 return data
3278 3277
3279 3278
3280 3279 class ChangesetStatus(Base, BaseModel):
3281 3280 __tablename__ = 'changeset_statuses'
3282 3281 __table_args__ = (
3283 3282 Index('cs_revision_idx', 'revision'),
3284 3283 Index('cs_version_idx', 'version'),
3285 3284 UniqueConstraint('repo_id', 'revision', 'version'),
3286 3285 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3287 3286 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3288 3287 )
3289 3288 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3290 3289 STATUS_APPROVED = 'approved'
3291 3290 STATUS_REJECTED = 'rejected'
3292 3291 STATUS_UNDER_REVIEW = 'under_review'
3293 3292
3294 3293 STATUSES = [
3295 3294 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3296 3295 (STATUS_APPROVED, _("Approved")),
3297 3296 (STATUS_REJECTED, _("Rejected")),
3298 3297 (STATUS_UNDER_REVIEW, _("Under Review")),
3299 3298 ]
3300 3299
3301 3300 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3302 3301 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3303 3302 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3304 3303 revision = Column('revision', String(40), nullable=False)
3305 3304 status = Column('status', String(128), nullable=False, default=DEFAULT)
3306 3305 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3307 3306 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3308 3307 version = Column('version', Integer(), nullable=False, default=0)
3309 3308 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3310 3309
3311 3310 author = relationship('User', lazy='joined')
3312 3311 repo = relationship('Repository')
3313 3312 comment = relationship('ChangesetComment', lazy='joined')
3314 3313 pull_request = relationship('PullRequest', lazy='joined')
3315 3314
3316 3315 def __unicode__(self):
3317 3316 return u"<%s('%s[v%s]:%s')>" % (
3318 3317 self.__class__.__name__,
3319 3318 self.status, self.version, self.author
3320 3319 )
3321 3320
3322 3321 @classmethod
3323 3322 def get_status_lbl(cls, value):
3324 3323 return dict(cls.STATUSES).get(value)
3325 3324
3326 3325 @property
3327 3326 def status_lbl(self):
3328 3327 return ChangesetStatus.get_status_lbl(self.status)
3329 3328
3330 3329 def get_api_data(self):
3331 3330 status = self
3332 3331 data = {
3333 3332 'status_id': status.changeset_status_id,
3334 3333 'status': status.status,
3335 3334 }
3336 3335 return data
3337 3336
3338 3337 def __json__(self):
3339 3338 data = dict()
3340 3339 data.update(self.get_api_data())
3341 3340 return data
3342 3341
3343 3342
3344 3343 class _PullRequestBase(BaseModel):
3345 3344 """
3346 3345 Common attributes of pull request and version entries.
3347 3346 """
3348 3347
3349 3348 # .status values
3350 3349 STATUS_NEW = u'new'
3351 3350 STATUS_OPEN = u'open'
3352 3351 STATUS_CLOSED = u'closed'
3353 3352
3354 3353 title = Column('title', Unicode(255), nullable=True)
3355 3354 description = Column(
3356 3355 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3357 3356 nullable=True)
3358 3357 # new/open/closed status of pull request (not approve/reject/etc)
3359 3358 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3360 3359 created_on = Column(
3361 3360 'created_on', DateTime(timezone=False), nullable=False,
3362 3361 default=datetime.datetime.now)
3363 3362 updated_on = Column(
3364 3363 'updated_on', DateTime(timezone=False), nullable=False,
3365 3364 default=datetime.datetime.now)
3366 3365
3367 3366 @declared_attr
3368 3367 def user_id(cls):
3369 3368 return Column(
3370 3369 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3371 3370 unique=None)
3372 3371
3373 3372 # 500 revisions max
3374 3373 _revisions = Column(
3375 3374 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3376 3375
3377 3376 @declared_attr
3378 3377 def source_repo_id(cls):
3379 3378 # TODO: dan: rename column to source_repo_id
3380 3379 return Column(
3381 3380 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3382 3381 nullable=False)
3383 3382
3384 3383 source_ref = Column('org_ref', Unicode(255), nullable=False)
3385 3384
3386 3385 @declared_attr
3387 3386 def target_repo_id(cls):
3388 3387 # TODO: dan: rename column to target_repo_id
3389 3388 return Column(
3390 3389 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3391 3390 nullable=False)
3392 3391
3393 3392 target_ref = Column('other_ref', Unicode(255), nullable=False)
3394 3393 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3395 3394
3396 3395 # TODO: dan: rename column to last_merge_source_rev
3397 3396 _last_merge_source_rev = Column(
3398 3397 'last_merge_org_rev', String(40), nullable=True)
3399 3398 # TODO: dan: rename column to last_merge_target_rev
3400 3399 _last_merge_target_rev = Column(
3401 3400 'last_merge_other_rev', String(40), nullable=True)
3402 3401 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3403 3402 merge_rev = Column('merge_rev', String(40), nullable=True)
3404 3403
3405 3404 reviewer_data = Column(
3406 3405 'reviewer_data_json', MutationObj.as_mutable(
3407 3406 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3408 3407
3409 3408 @property
3410 3409 def reviewer_data_json(self):
3411 3410 return json.dumps(self.reviewer_data)
3412 3411
3413 3412 @hybrid_property
3414 3413 def description_safe(self):
3415 3414 from rhodecode.lib import helpers as h
3416 3415 return h.escape(self.description)
3417 3416
3418 3417 @hybrid_property
3419 3418 def revisions(self):
3420 3419 return self._revisions.split(':') if self._revisions else []
3421 3420
3422 3421 @revisions.setter
3423 3422 def revisions(self, val):
3424 3423 self._revisions = ':'.join(val)
3425 3424
3426 3425 @hybrid_property
3427 3426 def last_merge_status(self):
3428 3427 return safe_int(self._last_merge_status)
3429 3428
3430 3429 @last_merge_status.setter
3431 3430 def last_merge_status(self, val):
3432 3431 self._last_merge_status = val
3433 3432
3434 3433 @declared_attr
3435 3434 def author(cls):
3436 3435 return relationship('User', lazy='joined')
3437 3436
3438 3437 @declared_attr
3439 3438 def source_repo(cls):
3440 3439 return relationship(
3441 3440 'Repository',
3442 3441 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3443 3442
3444 3443 @property
3445 3444 def source_ref_parts(self):
3446 3445 return self.unicode_to_reference(self.source_ref)
3447 3446
3448 3447 @declared_attr
3449 3448 def target_repo(cls):
3450 3449 return relationship(
3451 3450 'Repository',
3452 3451 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3453 3452
3454 3453 @property
3455 3454 def target_ref_parts(self):
3456 3455 return self.unicode_to_reference(self.target_ref)
3457 3456
3458 3457 @property
3459 3458 def shadow_merge_ref(self):
3460 3459 return self.unicode_to_reference(self._shadow_merge_ref)
3461 3460
3462 3461 @shadow_merge_ref.setter
3463 3462 def shadow_merge_ref(self, ref):
3464 3463 self._shadow_merge_ref = self.reference_to_unicode(ref)
3465 3464
3466 3465 def unicode_to_reference(self, raw):
3467 3466 """
3468 3467 Convert a unicode (or string) to a reference object.
3469 3468 If unicode evaluates to False it returns None.
3470 3469 """
3471 3470 if raw:
3472 3471 refs = raw.split(':')
3473 3472 return Reference(*refs)
3474 3473 else:
3475 3474 return None
3476 3475
3477 3476 def reference_to_unicode(self, ref):
3478 3477 """
3479 3478 Convert a reference object to unicode.
3480 3479 If reference is None it returns None.
3481 3480 """
3482 3481 if ref:
3483 3482 return u':'.join(ref)
3484 3483 else:
3485 3484 return None
3486 3485
3487 3486 def get_api_data(self, with_merge_state=True):
3488 3487 from rhodecode.model.pull_request import PullRequestModel
3489 3488
3490 3489 pull_request = self
3491 3490 if with_merge_state:
3492 3491 merge_status = PullRequestModel().merge_status(pull_request)
3493 3492 merge_state = {
3494 3493 'status': merge_status[0],
3495 3494 'message': safe_unicode(merge_status[1]),
3496 3495 }
3497 3496 else:
3498 3497 merge_state = {'status': 'not_available',
3499 3498 'message': 'not_available'}
3500 3499
3501 3500 merge_data = {
3502 3501 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3503 3502 'reference': (
3504 3503 pull_request.shadow_merge_ref._asdict()
3505 3504 if pull_request.shadow_merge_ref else None),
3506 3505 }
3507 3506
3508 3507 data = {
3509 3508 'pull_request_id': pull_request.pull_request_id,
3510 3509 'url': PullRequestModel().get_url(pull_request),
3511 3510 'title': pull_request.title,
3512 3511 'description': pull_request.description,
3513 3512 'status': pull_request.status,
3514 3513 'created_on': pull_request.created_on,
3515 3514 'updated_on': pull_request.updated_on,
3516 3515 'commit_ids': pull_request.revisions,
3517 3516 'review_status': pull_request.calculated_review_status(),
3518 3517 'mergeable': merge_state,
3519 3518 'source': {
3520 3519 'clone_url': pull_request.source_repo.clone_url(),
3521 3520 'repository': pull_request.source_repo.repo_name,
3522 3521 'reference': {
3523 3522 'name': pull_request.source_ref_parts.name,
3524 3523 'type': pull_request.source_ref_parts.type,
3525 3524 'commit_id': pull_request.source_ref_parts.commit_id,
3526 3525 },
3527 3526 },
3528 3527 'target': {
3529 3528 'clone_url': pull_request.target_repo.clone_url(),
3530 3529 'repository': pull_request.target_repo.repo_name,
3531 3530 'reference': {
3532 3531 'name': pull_request.target_ref_parts.name,
3533 3532 'type': pull_request.target_ref_parts.type,
3534 3533 'commit_id': pull_request.target_ref_parts.commit_id,
3535 3534 },
3536 3535 },
3537 3536 'merge': merge_data,
3538 3537 'author': pull_request.author.get_api_data(include_secrets=False,
3539 3538 details='basic'),
3540 3539 'reviewers': [
3541 3540 {
3542 3541 'user': reviewer.get_api_data(include_secrets=False,
3543 3542 details='basic'),
3544 3543 'reasons': reasons,
3545 3544 'review_status': st[0][1].status if st else 'not_reviewed',
3546 3545 }
3547 3546 for reviewer, reasons, mandatory, st in
3548 3547 pull_request.reviewers_statuses()
3549 3548 ]
3550 3549 }
3551 3550
3552 3551 return data
3553 3552
3554 3553
3555 3554 class PullRequest(Base, _PullRequestBase):
3556 3555 __tablename__ = 'pull_requests'
3557 3556 __table_args__ = (
3558 3557 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3559 3558 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3560 3559 )
3561 3560
3562 3561 pull_request_id = Column(
3563 3562 'pull_request_id', Integer(), nullable=False, primary_key=True)
3564 3563
3565 3564 def __repr__(self):
3566 3565 if self.pull_request_id:
3567 3566 return '<DB:PullRequest #%s>' % self.pull_request_id
3568 3567 else:
3569 3568 return '<DB:PullRequest at %#x>' % id(self)
3570 3569
3571 3570 reviewers = relationship('PullRequestReviewers',
3572 3571 cascade="all, delete, delete-orphan")
3573 3572 statuses = relationship('ChangesetStatus',
3574 3573 cascade="all, delete, delete-orphan")
3575 3574 comments = relationship('ChangesetComment',
3576 3575 cascade="all, delete, delete-orphan")
3577 3576 versions = relationship('PullRequestVersion',
3578 3577 cascade="all, delete, delete-orphan",
3579 3578 lazy='dynamic')
3580 3579
3581 3580 @classmethod
3582 3581 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3583 3582 internal_methods=None):
3584 3583
3585 3584 class PullRequestDisplay(object):
3586 3585 """
3587 3586 Special object wrapper for showing PullRequest data via Versions
3588 3587 It mimics PR object as close as possible. This is read only object
3589 3588 just for display
3590 3589 """
3591 3590
3592 3591 def __init__(self, attrs, internal=None):
3593 3592 self.attrs = attrs
3594 3593 # internal have priority over the given ones via attrs
3595 3594 self.internal = internal or ['versions']
3596 3595
3597 3596 def __getattr__(self, item):
3598 3597 if item in self.internal:
3599 3598 return getattr(self, item)
3600 3599 try:
3601 3600 return self.attrs[item]
3602 3601 except KeyError:
3603 3602 raise AttributeError(
3604 3603 '%s object has no attribute %s' % (self, item))
3605 3604
3606 3605 def __repr__(self):
3607 3606 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3608 3607
3609 3608 def versions(self):
3610 3609 return pull_request_obj.versions.order_by(
3611 3610 PullRequestVersion.pull_request_version_id).all()
3612 3611
3613 3612 def is_closed(self):
3614 3613 return pull_request_obj.is_closed()
3615 3614
3616 3615 @property
3617 3616 def pull_request_version_id(self):
3618 3617 return getattr(pull_request_obj, 'pull_request_version_id', None)
3619 3618
3620 3619 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3621 3620
3622 3621 attrs.author = StrictAttributeDict(
3623 3622 pull_request_obj.author.get_api_data())
3624 3623 if pull_request_obj.target_repo:
3625 3624 attrs.target_repo = StrictAttributeDict(
3626 3625 pull_request_obj.target_repo.get_api_data())
3627 3626 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3628 3627
3629 3628 if pull_request_obj.source_repo:
3630 3629 attrs.source_repo = StrictAttributeDict(
3631 3630 pull_request_obj.source_repo.get_api_data())
3632 3631 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3633 3632
3634 3633 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3635 3634 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3636 3635 attrs.revisions = pull_request_obj.revisions
3637 3636
3638 3637 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3639 3638 attrs.reviewer_data = org_pull_request_obj.reviewer_data
3640 3639 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
3641 3640
3642 3641 return PullRequestDisplay(attrs, internal=internal_methods)
3643 3642
3644 3643 def is_closed(self):
3645 3644 return self.status == self.STATUS_CLOSED
3646 3645
3647 3646 def __json__(self):
3648 3647 return {
3649 3648 'revisions': self.revisions,
3650 3649 }
3651 3650
3652 3651 def calculated_review_status(self):
3653 3652 from rhodecode.model.changeset_status import ChangesetStatusModel
3654 3653 return ChangesetStatusModel().calculated_review_status(self)
3655 3654
3656 3655 def reviewers_statuses(self):
3657 3656 from rhodecode.model.changeset_status import ChangesetStatusModel
3658 3657 return ChangesetStatusModel().reviewers_statuses(self)
3659 3658
3660 3659 @property
3661 3660 def workspace_id(self):
3662 3661 from rhodecode.model.pull_request import PullRequestModel
3663 3662 return PullRequestModel()._workspace_id(self)
3664 3663
3665 3664 def get_shadow_repo(self):
3666 3665 workspace_id = self.workspace_id
3667 3666 vcs_obj = self.target_repo.scm_instance()
3668 3667 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3669 3668 workspace_id)
3670 3669 return vcs_obj.get_shadow_instance(shadow_repository_path)
3671 3670
3672 3671
3673 3672 class PullRequestVersion(Base, _PullRequestBase):
3674 3673 __tablename__ = 'pull_request_versions'
3675 3674 __table_args__ = (
3676 3675 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3677 3676 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3678 3677 )
3679 3678
3680 3679 pull_request_version_id = Column(
3681 3680 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3682 3681 pull_request_id = Column(
3683 3682 'pull_request_id', Integer(),
3684 3683 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3685 3684 pull_request = relationship('PullRequest')
3686 3685
3687 3686 def __repr__(self):
3688 3687 if self.pull_request_version_id:
3689 3688 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3690 3689 else:
3691 3690 return '<DB:PullRequestVersion at %#x>' % id(self)
3692 3691
3693 3692 @property
3694 3693 def reviewers(self):
3695 3694 return self.pull_request.reviewers
3696 3695
3697 3696 @property
3698 3697 def versions(self):
3699 3698 return self.pull_request.versions
3700 3699
3701 3700 def is_closed(self):
3702 3701 # calculate from original
3703 3702 return self.pull_request.status == self.STATUS_CLOSED
3704 3703
3705 3704 def calculated_review_status(self):
3706 3705 return self.pull_request.calculated_review_status()
3707 3706
3708 3707 def reviewers_statuses(self):
3709 3708 return self.pull_request.reviewers_statuses()
3710 3709
3711 3710
3712 3711 class PullRequestReviewers(Base, BaseModel):
3713 3712 __tablename__ = 'pull_request_reviewers'
3714 3713 __table_args__ = (
3715 3714 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3716 3715 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3717 3716 )
3718 3717
3719 3718 @hybrid_property
3720 3719 def reasons(self):
3721 3720 if not self._reasons:
3722 3721 return []
3723 3722 return self._reasons
3724 3723
3725 3724 @reasons.setter
3726 3725 def reasons(self, val):
3727 3726 val = val or []
3728 if any(not isinstance(x, compat.string_types) for x in val):
3727 if any(not isinstance(x, str) for x in val):
3729 3728 raise Exception('invalid reasons type, must be list of strings')
3730 3729 self._reasons = val
3731 3730
3732 3731 pull_requests_reviewers_id = Column(
3733 3732 'pull_requests_reviewers_id', Integer(), nullable=False,
3734 3733 primary_key=True)
3735 3734 pull_request_id = Column(
3736 3735 "pull_request_id", Integer(),
3737 3736 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3738 3737 user_id = Column(
3739 3738 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3740 3739 _reasons = Column(
3741 3740 'reason', MutationList.as_mutable(
3742 3741 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
3743 3742 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3744 3743 user = relationship('User')
3745 3744 pull_request = relationship('PullRequest')
3746 3745
3747 3746
3748 3747 class Notification(Base, BaseModel):
3749 3748 __tablename__ = 'notifications'
3750 3749 __table_args__ = (
3751 3750 Index('notification_type_idx', 'type'),
3752 3751 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3753 3752 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3754 3753 )
3755 3754
3756 3755 TYPE_CHANGESET_COMMENT = u'cs_comment'
3757 3756 TYPE_MESSAGE = u'message'
3758 3757 TYPE_MENTION = u'mention'
3759 3758 TYPE_REGISTRATION = u'registration'
3760 3759 TYPE_PULL_REQUEST = u'pull_request'
3761 3760 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3762 3761
3763 3762 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3764 3763 subject = Column('subject', Unicode(512), nullable=True)
3765 3764 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3766 3765 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3767 3766 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3768 3767 type_ = Column('type', Unicode(255))
3769 3768
3770 3769 created_by_user = relationship('User')
3771 3770 notifications_to_users = relationship('UserNotification', lazy='joined',
3772 3771 cascade="all, delete, delete-orphan")
3773 3772
3774 3773 @property
3775 3774 def recipients(self):
3776 3775 return [x.user for x in UserNotification.query()\
3777 3776 .filter(UserNotification.notification == self)\
3778 3777 .order_by(UserNotification.user_id.asc()).all()]
3779 3778
3780 3779 @classmethod
3781 3780 def create(cls, created_by, subject, body, recipients, type_=None):
3782 3781 if type_ is None:
3783 3782 type_ = Notification.TYPE_MESSAGE
3784 3783
3785 3784 notification = cls()
3786 3785 notification.created_by_user = created_by
3787 3786 notification.subject = subject
3788 3787 notification.body = body
3789 3788 notification.type_ = type_
3790 3789 notification.created_on = datetime.datetime.now()
3791 3790
3792 3791 for u in recipients:
3793 3792 assoc = UserNotification()
3794 3793 assoc.notification = notification
3795 3794
3796 3795 # if created_by is inside recipients mark his notification
3797 3796 # as read
3798 3797 if u.user_id == created_by.user_id:
3799 3798 assoc.read = True
3800 3799
3801 3800 u.notifications.append(assoc)
3802 3801 Session().add(notification)
3803 3802
3804 3803 return notification
3805 3804
3806 3805
3807 3806 class UserNotification(Base, BaseModel):
3808 3807 __tablename__ = 'user_to_notification'
3809 3808 __table_args__ = (
3810 3809 UniqueConstraint('user_id', 'notification_id'),
3811 3810 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3812 3811 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3813 3812 )
3814 3813 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3815 3814 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3816 3815 read = Column('read', Boolean, default=False)
3817 3816 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3818 3817
3819 3818 user = relationship('User', lazy="joined")
3820 3819 notification = relationship('Notification', lazy="joined",
3821 3820 order_by=lambda: Notification.created_on.desc(),)
3822 3821
3823 3822 def mark_as_read(self):
3824 3823 self.read = True
3825 3824 Session().add(self)
3826 3825
3827 3826
3828 3827 class Gist(Base, BaseModel):
3829 3828 __tablename__ = 'gists'
3830 3829 __table_args__ = (
3831 3830 Index('g_gist_access_id_idx', 'gist_access_id'),
3832 3831 Index('g_created_on_idx', 'created_on'),
3833 3832 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3834 3833 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3835 3834 )
3836 3835 GIST_PUBLIC = u'public'
3837 3836 GIST_PRIVATE = u'private'
3838 3837 DEFAULT_FILENAME = u'gistfile1.txt'
3839 3838
3840 3839 ACL_LEVEL_PUBLIC = u'acl_public'
3841 3840 ACL_LEVEL_PRIVATE = u'acl_private'
3842 3841
3843 3842 gist_id = Column('gist_id', Integer(), primary_key=True)
3844 3843 gist_access_id = Column('gist_access_id', Unicode(250))
3845 3844 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3846 3845 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3847 3846 gist_expires = Column('gist_expires', Float(53), nullable=False)
3848 3847 gist_type = Column('gist_type', Unicode(128), nullable=False)
3849 3848 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3850 3849 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3851 3850 acl_level = Column('acl_level', Unicode(128), nullable=True)
3852 3851
3853 3852 owner = relationship('User')
3854 3853
3855 3854 def __repr__(self):
3856 3855 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3857 3856
3858 3857 @hybrid_property
3859 3858 def description_safe(self):
3860 3859 from rhodecode.lib import helpers as h
3861 3860 return h.escape(self.gist_description)
3862 3861
3863 3862 @classmethod
3864 3863 def get_or_404(cls, id_):
3865 3864 from pyramid.httpexceptions import HTTPNotFound
3866 3865
3867 3866 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3868 3867 if not res:
3869 3868 raise HTTPNotFound()
3870 3869 return res
3871 3870
3872 3871 @classmethod
3873 3872 def get_by_access_id(cls, gist_access_id):
3874 3873 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3875 3874
3876 3875 def gist_url(self):
3877 3876 from rhodecode.model.gist import GistModel
3878 3877 return GistModel().get_url(self)
3879 3878
3880 3879 @classmethod
3881 3880 def base_path(cls):
3882 3881 """
3883 3882 Returns base path when all gists are stored
3884 3883
3885 3884 :param cls:
3886 3885 """
3887 3886 from rhodecode.model.gist import GIST_STORE_LOC
3888 3887 q = Session().query(RhodeCodeUi)\
3889 3888 .filter(RhodeCodeUi.ui_key == URL_SEP)
3890 3889 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
3891 3890 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
3892 3891
3893 3892 def get_api_data(self):
3894 3893 """
3895 3894 Common function for generating gist related data for API
3896 3895 """
3897 3896 gist = self
3898 3897 data = {
3899 3898 'gist_id': gist.gist_id,
3900 3899 'type': gist.gist_type,
3901 3900 'access_id': gist.gist_access_id,
3902 3901 'description': gist.gist_description,
3903 3902 'url': gist.gist_url(),
3904 3903 'expires': gist.gist_expires,
3905 3904 'created_on': gist.created_on,
3906 3905 'modified_at': gist.modified_at,
3907 3906 'content': None,
3908 3907 'acl_level': gist.acl_level,
3909 3908 }
3910 3909 return data
3911 3910
3912 3911 def __json__(self):
3913 3912 data = dict(
3914 3913 )
3915 3914 data.update(self.get_api_data())
3916 3915 return data
3917 3916 # SCM functions
3918 3917
3919 3918 def scm_instance(self, **kwargs):
3920 3919 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
3921 3920 return get_vcs_instance(
3922 3921 repo_path=safe_str(full_repo_path), create=False)
3923 3922
3924 3923
3925 3924 class ExternalIdentity(Base, BaseModel):
3926 3925 __tablename__ = 'external_identities'
3927 3926 __table_args__ = (
3928 3927 Index('local_user_id_idx', 'local_user_id'),
3929 3928 Index('external_id_idx', 'external_id'),
3930 3929 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3931 3930 'mysql_charset': 'utf8'})
3932 3931
3933 3932 external_id = Column('external_id', Unicode(255), default=u'',
3934 3933 primary_key=True)
3935 3934 external_username = Column('external_username', Unicode(1024), default=u'')
3936 3935 local_user_id = Column('local_user_id', Integer(),
3937 3936 ForeignKey('users.user_id'), primary_key=True)
3938 3937 provider_name = Column('provider_name', Unicode(255), default=u'',
3939 3938 primary_key=True)
3940 3939 access_token = Column('access_token', String(1024), default=u'')
3941 3940 alt_token = Column('alt_token', String(1024), default=u'')
3942 3941 token_secret = Column('token_secret', String(1024), default=u'')
3943 3942
3944 3943 @classmethod
3945 3944 def by_external_id_and_provider(cls, external_id, provider_name,
3946 3945 local_user_id=None):
3947 3946 """
3948 3947 Returns ExternalIdentity instance based on search params
3949 3948
3950 3949 :param external_id:
3951 3950 :param provider_name:
3952 3951 :return: ExternalIdentity
3953 3952 """
3954 3953 query = cls.query()
3955 3954 query = query.filter(cls.external_id == external_id)
3956 3955 query = query.filter(cls.provider_name == provider_name)
3957 3956 if local_user_id:
3958 3957 query = query.filter(cls.local_user_id == local_user_id)
3959 3958 return query.first()
3960 3959
3961 3960 @classmethod
3962 3961 def user_by_external_id_and_provider(cls, external_id, provider_name):
3963 3962 """
3964 3963 Returns User instance based on search params
3965 3964
3966 3965 :param external_id:
3967 3966 :param provider_name:
3968 3967 :return: User
3969 3968 """
3970 3969 query = User.query()
3971 3970 query = query.filter(cls.external_id == external_id)
3972 3971 query = query.filter(cls.provider_name == provider_name)
3973 3972 query = query.filter(User.user_id == cls.local_user_id)
3974 3973 return query.first()
3975 3974
3976 3975 @classmethod
3977 3976 def by_local_user_id(cls, local_user_id):
3978 3977 """
3979 3978 Returns all tokens for user
3980 3979
3981 3980 :param local_user_id:
3982 3981 :return: ExternalIdentity
3983 3982 """
3984 3983 query = cls.query()
3985 3984 query = query.filter(cls.local_user_id == local_user_id)
3986 3985 return query
3987 3986
3988 3987
3989 3988 class Integration(Base, BaseModel):
3990 3989 __tablename__ = 'integrations'
3991 3990 __table_args__ = (
3992 3991 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3993 3992 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3994 3993 )
3995 3994
3996 3995 integration_id = Column('integration_id', Integer(), primary_key=True)
3997 3996 integration_type = Column('integration_type', String(255))
3998 3997 enabled = Column('enabled', Boolean(), nullable=False)
3999 3998 name = Column('name', String(255), nullable=False)
4000 3999 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4001 4000 default=False)
4002 4001
4003 4002 settings = Column(
4004 4003 'settings_json', MutationObj.as_mutable(
4005 4004 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4006 4005 repo_id = Column(
4007 4006 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4008 4007 nullable=True, unique=None, default=None)
4009 4008 repo = relationship('Repository', lazy='joined')
4010 4009
4011 4010 repo_group_id = Column(
4012 4011 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4013 4012 nullable=True, unique=None, default=None)
4014 4013 repo_group = relationship('RepoGroup', lazy='joined')
4015 4014
4016 4015 @property
4017 4016 def scope(self):
4018 4017 if self.repo:
4019 4018 return repr(self.repo)
4020 4019 if self.repo_group:
4021 4020 if self.child_repos_only:
4022 4021 return repr(self.repo_group) + ' (child repos only)'
4023 4022 else:
4024 4023 return repr(self.repo_group) + ' (recursive)'
4025 4024 if self.child_repos_only:
4026 4025 return 'root_repos'
4027 4026 return 'global'
4028 4027
4029 4028 def __repr__(self):
4030 4029 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4031 4030
4032 4031
4033 4032 class RepoReviewRuleUser(Base, BaseModel):
4034 4033 __tablename__ = 'repo_review_rules_users'
4035 4034 __table_args__ = (
4036 4035 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4037 4036 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4038 4037 )
4039 4038 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4040 4039 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4041 4040 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4042 4041 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4043 4042 user = relationship('User')
4044 4043
4045 4044 def rule_data(self):
4046 4045 return {
4047 4046 'mandatory': self.mandatory
4048 4047 }
4049 4048
4050 4049
4051 4050 class RepoReviewRuleUserGroup(Base, BaseModel):
4052 4051 __tablename__ = 'repo_review_rules_users_groups'
4053 4052 __table_args__ = (
4054 4053 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4055 4054 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4056 4055 )
4057 4056 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4058 4057 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4059 4058 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4060 4059 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4061 4060 users_group = relationship('UserGroup')
4062 4061
4063 4062 def rule_data(self):
4064 4063 return {
4065 4064 'mandatory': self.mandatory
4066 4065 }
4067 4066
4068 4067
4069 4068 class RepoReviewRule(Base, BaseModel):
4070 4069 __tablename__ = 'repo_review_rules'
4071 4070 __table_args__ = (
4072 4071 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4073 4072 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4074 4073 )
4075 4074
4076 4075 repo_review_rule_id = Column(
4077 4076 'repo_review_rule_id', Integer(), primary_key=True)
4078 4077 repo_id = Column(
4079 4078 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4080 4079 repo = relationship('Repository', backref='review_rules')
4081 4080
4082 4081 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4083 4082 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4084 4083
4085 4084 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4086 4085 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4087 4086 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4088 4087 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4089 4088
4090 4089 rule_users = relationship('RepoReviewRuleUser')
4091 4090 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4092 4091
4093 4092 @hybrid_property
4094 4093 def branch_pattern(self):
4095 4094 return self._branch_pattern or '*'
4096 4095
4097 4096 def _validate_glob(self, value):
4098 4097 re.compile('^' + glob2re(value) + '$')
4099 4098
4100 4099 @branch_pattern.setter
4101 4100 def branch_pattern(self, value):
4102 4101 self._validate_glob(value)
4103 4102 self._branch_pattern = value or '*'
4104 4103
4105 4104 @hybrid_property
4106 4105 def file_pattern(self):
4107 4106 return self._file_pattern or '*'
4108 4107
4109 4108 @file_pattern.setter
4110 4109 def file_pattern(self, value):
4111 4110 self._validate_glob(value)
4112 4111 self._file_pattern = value or '*'
4113 4112
4114 4113 def matches(self, branch, files_changed):
4115 4114 """
4116 4115 Check if this review rule matches a branch/files in a pull request
4117 4116
4118 4117 :param branch: branch name for the commit
4119 4118 :param files_changed: list of file paths changed in the pull request
4120 4119 """
4121 4120
4122 4121 branch = branch or ''
4123 4122 files_changed = files_changed or []
4124 4123
4125 4124 branch_matches = True
4126 4125 if branch:
4127 4126 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
4128 4127 branch_matches = bool(branch_regex.search(branch))
4129 4128
4130 4129 files_matches = True
4131 4130 if self.file_pattern != '*':
4132 4131 files_matches = False
4133 4132 file_regex = re.compile(glob2re(self.file_pattern))
4134 4133 for filename in files_changed:
4135 4134 if file_regex.search(filename):
4136 4135 files_matches = True
4137 4136 break
4138 4137
4139 4138 return branch_matches and files_matches
4140 4139
4141 4140 @property
4142 4141 def review_users(self):
4143 4142 """ Returns the users which this rule applies to """
4144 4143
4145 4144 users = collections.OrderedDict()
4146 4145
4147 4146 for rule_user in self.rule_users:
4148 4147 if rule_user.user.active:
4149 4148 if rule_user.user not in users:
4150 4149 users[rule_user.user.username] = {
4151 4150 'user': rule_user.user,
4152 4151 'source': 'user',
4153 4152 'source_data': {},
4154 4153 'data': rule_user.rule_data()
4155 4154 }
4156 4155
4157 4156 for rule_user_group in self.rule_user_groups:
4158 4157 source_data = {
4159 4158 'name': rule_user_group.users_group.users_group_name,
4160 4159 'members': len(rule_user_group.users_group.members)
4161 4160 }
4162 4161 for member in rule_user_group.users_group.members:
4163 4162 if member.user.active:
4164 4163 users[member.user.username] = {
4165 4164 'user': member.user,
4166 4165 'source': 'user_group',
4167 4166 'source_data': source_data,
4168 4167 'data': rule_user_group.rule_data()
4169 4168 }
4170 4169
4171 4170 return users
4172 4171
4173 4172 def __repr__(self):
4174 4173 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4175 4174 self.repo_review_rule_id, self.repo)
4176 4175
4177 4176
4178 4177 class ScheduleEntry(Base, BaseModel):
4179 4178 __tablename__ = 'schedule_entries'
4180 4179 __table_args__ = (
4181 4180 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4182 4181 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4183 4182 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4184 4183 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4185 4184 )
4186 4185 schedule_types = ['crontab', 'timedelta', 'integer']
4187 4186 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4188 4187
4189 4188 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4190 4189 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4191 4190 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4192 4191
4193 4192 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4194 4193 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4195 4194
4196 4195 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4197 4196 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4198 4197
4199 4198 # task
4200 4199 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4201 4200 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4202 4201 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4203 4202 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4204 4203
4205 4204 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4206 4205 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4207 4206
4208 4207 @hybrid_property
4209 4208 def schedule_type(self):
4210 4209 return self._schedule_type
4211 4210
4212 4211 @schedule_type.setter
4213 4212 def schedule_type(self, val):
4214 4213 if val not in self.schedule_types:
4215 4214 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4216 4215 val, self.schedule_type))
4217 4216
4218 4217 self._schedule_type = val
4219 4218
4220 4219 @classmethod
4221 4220 def get_uid(cls, obj):
4222 4221 args = obj.task_args
4223 4222 kwargs = obj.task_kwargs
4224 4223 if isinstance(args, JsonRaw):
4225 4224 try:
4226 4225 args = json.loads(args)
4227 4226 except ValueError:
4228 4227 args = tuple()
4229 4228
4230 4229 if isinstance(kwargs, JsonRaw):
4231 4230 try:
4232 4231 kwargs = json.loads(kwargs)
4233 4232 except ValueError:
4234 4233 kwargs = dict()
4235 4234
4236 4235 dot_notation = obj.task_dot_notation
4237 4236 val = '.'.join(map(safe_str, [
4238 4237 sorted(dot_notation), args, sorted(kwargs.items())]))
4239 4238 return hashlib.sha1(val).hexdigest()
4240 4239
4241 4240 @classmethod
4242 4241 def get_by_schedule_name(cls, schedule_name):
4243 4242 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4244 4243
4245 4244 @classmethod
4246 4245 def get_by_schedule_id(cls, schedule_id):
4247 4246 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4248 4247
4249 4248 @property
4250 4249 def task(self):
4251 4250 return self.task_dot_notation
4252 4251
4253 4252 @property
4254 4253 def schedule(self):
4255 4254 from rhodecode.lib.celerylib.utils import raw_2_schedule
4256 4255 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4257 4256 return schedule
4258 4257
4259 4258 @property
4260 4259 def args(self):
4261 4260 try:
4262 4261 return list(self.task_args or [])
4263 4262 except ValueError:
4264 4263 return list()
4265 4264
4266 4265 @property
4267 4266 def kwargs(self):
4268 4267 try:
4269 4268 return dict(self.task_kwargs or {})
4270 4269 except ValueError:
4271 4270 return dict()
4272 4271
4273 4272 def _as_raw(self, val):
4274 4273 if hasattr(val, 'de_coerce'):
4275 4274 val = val.de_coerce()
4276 4275 if val:
4277 4276 val = json.dumps(val)
4278 4277
4279 4278 return val
4280 4279
4281 4280 @property
4282 4281 def schedule_definition_raw(self):
4283 4282 return self._as_raw(self.schedule_definition)
4284 4283
4285 4284 @property
4286 4285 def args_raw(self):
4287 4286 return self._as_raw(self.task_args)
4288 4287
4289 4288 @property
4290 4289 def kwargs_raw(self):
4291 4290 return self._as_raw(self.task_kwargs)
4292 4291
4293 4292 def __repr__(self):
4294 4293 return '<DB:ScheduleEntry({}:{})>'.format(
4295 4294 self.schedule_entry_id, self.schedule_name)
4296 4295
4297 4296
4298 4297 @event.listens_for(ScheduleEntry, 'before_update')
4299 4298 def update_task_uid(mapper, connection, target):
4300 4299 target.task_uid = ScheduleEntry.get_uid(target)
4301 4300
4302 4301
4303 4302 @event.listens_for(ScheduleEntry, 'before_insert')
4304 4303 def set_task_uid(mapper, connection, target):
4305 4304 target.task_uid = ScheduleEntry.get_uid(target)
4306 4305
4307 4306
4308 4307 class DbMigrateVersion(Base, BaseModel):
4309 4308 __tablename__ = 'db_migrate_version'
4310 4309 __table_args__ = (
4311 4310 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4312 4311 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4313 4312 )
4314 4313 repository_id = Column('repository_id', String(250), primary_key=True)
4315 4314 repository_path = Column('repository_path', Text)
4316 4315 version = Column('version', Integer)
4317 4316
4318 4317
4319 4318 class DbSession(Base, BaseModel):
4320 4319 __tablename__ = 'db_session'
4321 4320 __table_args__ = (
4322 4321 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4323 4322 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4324 4323 )
4325 4324
4326 4325 def __repr__(self):
4327 4326 return '<DB:DbSession({})>'.format(self.id)
4328 4327
4329 4328 id = Column('id', Integer())
4330 4329 namespace = Column('namespace', String(255), primary_key=True)
4331 4330 accessed = Column('accessed', DateTime, nullable=False)
4332 4331 created = Column('created', DateTime, nullable=False)
4333 4332 data = Column('data', PickleType, nullable=False)
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now