Show More
The requested changes are too big and content was truncated. Show full diff
@@ -1,563 +1,564 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | from pyramid import compat | |
|
22 | 23 | |
|
23 | 24 | from rhodecode.api import ( |
|
24 | 25 | jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError) |
|
25 | 26 | from rhodecode.api.utils import ( |
|
26 | 27 | Optional, OAttr, has_superadmin_permission, get_user_or_error, store_update) |
|
27 | 28 | from rhodecode.lib import audit_logger |
|
28 | 29 | from rhodecode.lib.auth import AuthUser, PasswordGenerator |
|
29 | 30 | from rhodecode.lib.exceptions import DefaultUserException |
|
30 | 31 | from rhodecode.lib.utils2 import safe_int, str2bool |
|
31 | 32 | from rhodecode.model.db import Session, User, Repository |
|
32 | 33 | from rhodecode.model.user import UserModel |
|
33 | 34 | from rhodecode.model import validation_schema |
|
34 | 35 | from rhodecode.model.validation_schema.schemas import user_schema |
|
35 | 36 | |
|
36 | 37 | log = logging.getLogger(__name__) |
|
37 | 38 | |
|
38 | 39 | |
|
39 | 40 | @jsonrpc_method() |
|
40 | 41 | def get_user(request, apiuser, userid=Optional(OAttr('apiuser'))): |
|
41 | 42 | """ |
|
42 | 43 | Returns the information associated with a username or userid. |
|
43 | 44 | |
|
44 | 45 | * If the ``userid`` is not set, this command returns the information |
|
45 | 46 | for the ``userid`` calling the method. |
|
46 | 47 | |
|
47 | 48 | .. note:: |
|
48 | 49 | |
|
49 | 50 | Normal users may only run this command against their ``userid``. For |
|
50 | 51 | full privileges you must run this command using an |authtoken| with |
|
51 | 52 | admin rights. |
|
52 | 53 | |
|
53 | 54 | :param apiuser: This is filled automatically from the |authtoken|. |
|
54 | 55 | :type apiuser: AuthUser |
|
55 | 56 | :param userid: Sets the userid for which data will be returned. |
|
56 | 57 | :type userid: Optional(str or int) |
|
57 | 58 | |
|
58 | 59 | Example output: |
|
59 | 60 | |
|
60 | 61 | .. code-block:: bash |
|
61 | 62 | |
|
62 | 63 | { |
|
63 | 64 | "error": null, |
|
64 | 65 | "id": <id>, |
|
65 | 66 | "result": { |
|
66 | 67 | "active": true, |
|
67 | 68 | "admin": false, |
|
68 | 69 | "api_keys": [ list of keys ], |
|
69 | 70 | "auth_tokens": [ list of tokens with details ], |
|
70 | 71 | "email": "user@example.com", |
|
71 | 72 | "emails": [ |
|
72 | 73 | "user@example.com" |
|
73 | 74 | ], |
|
74 | 75 | "extern_name": "rhodecode", |
|
75 | 76 | "extern_type": "rhodecode", |
|
76 | 77 | "firstname": "username", |
|
77 | 78 | "ip_addresses": [], |
|
78 | 79 | "language": null, |
|
79 | 80 | "last_login": "Timestamp", |
|
80 | 81 | "last_activity": "Timestamp", |
|
81 | 82 | "lastname": "surnae", |
|
82 | 83 | "permissions": <deprecated>, |
|
83 | 84 | "permissions_summary": { |
|
84 | 85 | "global": [ |
|
85 | 86 | "hg.inherit_default_perms.true", |
|
86 | 87 | "usergroup.read", |
|
87 | 88 | "hg.repogroup.create.false", |
|
88 | 89 | "hg.create.none", |
|
89 | 90 | "hg.password_reset.enabled", |
|
90 | 91 | "hg.extern_activate.manual", |
|
91 | 92 | "hg.create.write_on_repogroup.false", |
|
92 | 93 | "hg.usergroup.create.false", |
|
93 | 94 | "group.none", |
|
94 | 95 | "repository.none", |
|
95 | 96 | "hg.register.none", |
|
96 | 97 | "hg.fork.repository" |
|
97 | 98 | ], |
|
98 | 99 | "repositories": { "username/example": "repository.write"}, |
|
99 | 100 | "repositories_groups": { "user-group/repo": "group.none" }, |
|
100 | 101 | "user_groups": { "user_group_name": "usergroup.read" } |
|
101 | 102 | } |
|
102 | 103 | "user_id": 32, |
|
103 | 104 | "username": "username" |
|
104 | 105 | } |
|
105 | 106 | } |
|
106 | 107 | """ |
|
107 | 108 | |
|
108 | 109 | if not has_superadmin_permission(apiuser): |
|
109 | 110 | # make sure normal user does not pass someone else userid, |
|
110 | 111 | # he is not allowed to do that |
|
111 | 112 | if not isinstance(userid, Optional) and userid != apiuser.user_id: |
|
112 | 113 | raise JSONRPCError('userid is not the same as your user') |
|
113 | 114 | |
|
114 | 115 | userid = Optional.extract(userid, evaluate_locals=locals()) |
|
115 | 116 | userid = getattr(userid, 'user_id', userid) |
|
116 | 117 | |
|
117 | 118 | user = get_user_or_error(userid) |
|
118 | 119 | data = user.get_api_data(include_secrets=True) |
|
119 | 120 | permissions = AuthUser(user_id=user.user_id).permissions |
|
120 | 121 | data['permissions'] = permissions # TODO(marcink): should be deprecated |
|
121 | 122 | data['permissions_summary'] = permissions |
|
122 | 123 | return data |
|
123 | 124 | |
|
124 | 125 | |
|
125 | 126 | @jsonrpc_method() |
|
126 | 127 | def get_users(request, apiuser): |
|
127 | 128 | """ |
|
128 | 129 | Lists all users in the |RCE| user database. |
|
129 | 130 | |
|
130 | 131 | This command can only be run using an |authtoken| with admin rights to |
|
131 | 132 | the specified repository. |
|
132 | 133 | |
|
133 | 134 | This command takes the following options: |
|
134 | 135 | |
|
135 | 136 | :param apiuser: This is filled automatically from the |authtoken|. |
|
136 | 137 | :type apiuser: AuthUser |
|
137 | 138 | |
|
138 | 139 | Example output: |
|
139 | 140 | |
|
140 | 141 | .. code-block:: bash |
|
141 | 142 | |
|
142 | 143 | id : <id_given_in_input> |
|
143 | 144 | result: [<user_object>, ...] |
|
144 | 145 | error: null |
|
145 | 146 | """ |
|
146 | 147 | |
|
147 | 148 | if not has_superadmin_permission(apiuser): |
|
148 | 149 | raise JSONRPCForbidden() |
|
149 | 150 | |
|
150 | 151 | result = [] |
|
151 | 152 | users_list = User.query().order_by(User.username) \ |
|
152 | 153 | .filter(User.username != User.DEFAULT_USER) \ |
|
153 | 154 | .all() |
|
154 | 155 | for user in users_list: |
|
155 | 156 | result.append(user.get_api_data(include_secrets=True)) |
|
156 | 157 | return result |
|
157 | 158 | |
|
158 | 159 | |
|
159 | 160 | @jsonrpc_method() |
|
160 | 161 | def create_user(request, apiuser, username, email, password=Optional(''), |
|
161 | 162 | firstname=Optional(''), lastname=Optional(''), |
|
162 | 163 | active=Optional(True), admin=Optional(False), |
|
163 | 164 | extern_name=Optional('rhodecode'), |
|
164 | 165 | extern_type=Optional('rhodecode'), |
|
165 | 166 | force_password_change=Optional(False), |
|
166 | 167 | create_personal_repo_group=Optional(None)): |
|
167 | 168 | """ |
|
168 | 169 | Creates a new user and returns the new user object. |
|
169 | 170 | |
|
170 | 171 | This command can only be run using an |authtoken| with admin rights to |
|
171 | 172 | the specified repository. |
|
172 | 173 | |
|
173 | 174 | This command takes the following options: |
|
174 | 175 | |
|
175 | 176 | :param apiuser: This is filled automatically from the |authtoken|. |
|
176 | 177 | :type apiuser: AuthUser |
|
177 | 178 | :param username: Set the new username. |
|
178 | 179 | :type username: str or int |
|
179 | 180 | :param email: Set the user email address. |
|
180 | 181 | :type email: str |
|
181 | 182 | :param password: Set the new user password. |
|
182 | 183 | :type password: Optional(str) |
|
183 | 184 | :param firstname: Set the new user firstname. |
|
184 | 185 | :type firstname: Optional(str) |
|
185 | 186 | :param lastname: Set the new user surname. |
|
186 | 187 | :type lastname: Optional(str) |
|
187 | 188 | :param active: Set the user as active. |
|
188 | 189 | :type active: Optional(``True`` | ``False``) |
|
189 | 190 | :param admin: Give the new user admin rights. |
|
190 | 191 | :type admin: Optional(``True`` | ``False``) |
|
191 | 192 | :param extern_name: Set the authentication plugin name. |
|
192 | 193 | Using LDAP this is filled with LDAP UID. |
|
193 | 194 | :type extern_name: Optional(str) |
|
194 | 195 | :param extern_type: Set the new user authentication plugin. |
|
195 | 196 | :type extern_type: Optional(str) |
|
196 | 197 | :param force_password_change: Force the new user to change password |
|
197 | 198 | on next login. |
|
198 | 199 | :type force_password_change: Optional(``True`` | ``False``) |
|
199 | 200 | :param create_personal_repo_group: Create personal repo group for this user |
|
200 | 201 | :type create_personal_repo_group: Optional(``True`` | ``False``) |
|
201 | 202 | |
|
202 | 203 | Example output: |
|
203 | 204 | |
|
204 | 205 | .. code-block:: bash |
|
205 | 206 | |
|
206 | 207 | id : <id_given_in_input> |
|
207 | 208 | result: { |
|
208 | 209 | "msg" : "created new user `<username>`", |
|
209 | 210 | "user": <user_obj> |
|
210 | 211 | } |
|
211 | 212 | error: null |
|
212 | 213 | |
|
213 | 214 | Example error output: |
|
214 | 215 | |
|
215 | 216 | .. code-block:: bash |
|
216 | 217 | |
|
217 | 218 | id : <id_given_in_input> |
|
218 | 219 | result : null |
|
219 | 220 | error : { |
|
220 | 221 | "user `<username>` already exist" |
|
221 | 222 | or |
|
222 | 223 | "email `<email>` already exist" |
|
223 | 224 | or |
|
224 | 225 | "failed to create user `<username>`" |
|
225 | 226 | } |
|
226 | 227 | |
|
227 | 228 | """ |
|
228 | 229 | if not has_superadmin_permission(apiuser): |
|
229 | 230 | raise JSONRPCForbidden() |
|
230 | 231 | |
|
231 | 232 | if UserModel().get_by_username(username): |
|
232 | 233 | raise JSONRPCError("user `%s` already exist" % (username,)) |
|
233 | 234 | |
|
234 | 235 | if UserModel().get_by_email(email, case_insensitive=True): |
|
235 | 236 | raise JSONRPCError("email `%s` already exist" % (email,)) |
|
236 | 237 | |
|
237 | 238 | # generate random password if we actually given the |
|
238 | 239 | # extern_name and it's not rhodecode |
|
239 | 240 | if (not isinstance(extern_name, Optional) and |
|
240 | 241 | Optional.extract(extern_name) != 'rhodecode'): |
|
241 | 242 | # generate temporary password if user is external |
|
242 | 243 | password = PasswordGenerator().gen_password(length=16) |
|
243 | 244 | create_repo_group = Optional.extract(create_personal_repo_group) |
|
244 |
if isinstance(create_repo_group, |
|
|
245 | if isinstance(create_repo_group, compat.string_types): | |
|
245 | 246 | create_repo_group = str2bool(create_repo_group) |
|
246 | 247 | |
|
247 | 248 | username = Optional.extract(username) |
|
248 | 249 | password = Optional.extract(password) |
|
249 | 250 | email = Optional.extract(email) |
|
250 | 251 | first_name = Optional.extract(firstname) |
|
251 | 252 | last_name = Optional.extract(lastname) |
|
252 | 253 | active = Optional.extract(active) |
|
253 | 254 | admin = Optional.extract(admin) |
|
254 | 255 | extern_type = Optional.extract(extern_type) |
|
255 | 256 | extern_name = Optional.extract(extern_name) |
|
256 | 257 | |
|
257 | 258 | schema = user_schema.UserSchema().bind( |
|
258 | 259 | # user caller |
|
259 | 260 | user=apiuser) |
|
260 | 261 | try: |
|
261 | 262 | schema_data = schema.deserialize(dict( |
|
262 | 263 | username=username, |
|
263 | 264 | email=email, |
|
264 | 265 | password=password, |
|
265 | 266 | first_name=first_name, |
|
266 | 267 | last_name=last_name, |
|
267 | 268 | active=active, |
|
268 | 269 | admin=admin, |
|
269 | 270 | extern_type=extern_type, |
|
270 | 271 | extern_name=extern_name, |
|
271 | 272 | )) |
|
272 | 273 | except validation_schema.Invalid as err: |
|
273 | 274 | raise JSONRPCValidationError(colander_exc=err) |
|
274 | 275 | |
|
275 | 276 | try: |
|
276 | 277 | user = UserModel().create_or_update( |
|
277 | 278 | username=schema_data['username'], |
|
278 | 279 | password=schema_data['password'], |
|
279 | 280 | email=schema_data['email'], |
|
280 | 281 | firstname=schema_data['first_name'], |
|
281 | 282 | lastname=schema_data['last_name'], |
|
282 | 283 | active=schema_data['active'], |
|
283 | 284 | admin=schema_data['admin'], |
|
284 | 285 | extern_type=schema_data['extern_type'], |
|
285 | 286 | extern_name=schema_data['extern_name'], |
|
286 | 287 | force_password_change=Optional.extract(force_password_change), |
|
287 | 288 | create_repo_group=create_repo_group |
|
288 | 289 | ) |
|
289 | 290 | Session().flush() |
|
290 | 291 | creation_data = user.get_api_data() |
|
291 | 292 | audit_logger.store_api( |
|
292 | 293 | 'user.create', action_data={'data': creation_data}, |
|
293 | 294 | user=apiuser) |
|
294 | 295 | |
|
295 | 296 | Session().commit() |
|
296 | 297 | return { |
|
297 | 298 | 'msg': 'created new user `%s`' % username, |
|
298 | 299 | 'user': user.get_api_data(include_secrets=True) |
|
299 | 300 | } |
|
300 | 301 | except Exception: |
|
301 | 302 | log.exception('Error occurred during creation of user') |
|
302 | 303 | raise JSONRPCError('failed to create user `%s`' % (username,)) |
|
303 | 304 | |
|
304 | 305 | |
|
305 | 306 | @jsonrpc_method() |
|
306 | 307 | def update_user(request, apiuser, userid, username=Optional(None), |
|
307 | 308 | email=Optional(None), password=Optional(None), |
|
308 | 309 | firstname=Optional(None), lastname=Optional(None), |
|
309 | 310 | active=Optional(None), admin=Optional(None), |
|
310 | 311 | extern_type=Optional(None), extern_name=Optional(None), ): |
|
311 | 312 | """ |
|
312 | 313 | Updates the details for the specified user, if that user exists. |
|
313 | 314 | |
|
314 | 315 | This command can only be run using an |authtoken| with admin rights to |
|
315 | 316 | the specified repository. |
|
316 | 317 | |
|
317 | 318 | This command takes the following options: |
|
318 | 319 | |
|
319 | 320 | :param apiuser: This is filled automatically from |authtoken|. |
|
320 | 321 | :type apiuser: AuthUser |
|
321 | 322 | :param userid: Set the ``userid`` to update. |
|
322 | 323 | :type userid: str or int |
|
323 | 324 | :param username: Set the new username. |
|
324 | 325 | :type username: str or int |
|
325 | 326 | :param email: Set the new email. |
|
326 | 327 | :type email: str |
|
327 | 328 | :param password: Set the new password. |
|
328 | 329 | :type password: Optional(str) |
|
329 | 330 | :param firstname: Set the new first name. |
|
330 | 331 | :type firstname: Optional(str) |
|
331 | 332 | :param lastname: Set the new surname. |
|
332 | 333 | :type lastname: Optional(str) |
|
333 | 334 | :param active: Set the new user as active. |
|
334 | 335 | :type active: Optional(``True`` | ``False``) |
|
335 | 336 | :param admin: Give the user admin rights. |
|
336 | 337 | :type admin: Optional(``True`` | ``False``) |
|
337 | 338 | :param extern_name: Set the authentication plugin user name. |
|
338 | 339 | Using LDAP this is filled with LDAP UID. |
|
339 | 340 | :type extern_name: Optional(str) |
|
340 | 341 | :param extern_type: Set the authentication plugin type. |
|
341 | 342 | :type extern_type: Optional(str) |
|
342 | 343 | |
|
343 | 344 | |
|
344 | 345 | Example output: |
|
345 | 346 | |
|
346 | 347 | .. code-block:: bash |
|
347 | 348 | |
|
348 | 349 | id : <id_given_in_input> |
|
349 | 350 | result: { |
|
350 | 351 | "msg" : "updated user ID:<userid> <username>", |
|
351 | 352 | "user": <user_object>, |
|
352 | 353 | } |
|
353 | 354 | error: null |
|
354 | 355 | |
|
355 | 356 | Example error output: |
|
356 | 357 | |
|
357 | 358 | .. code-block:: bash |
|
358 | 359 | |
|
359 | 360 | id : <id_given_in_input> |
|
360 | 361 | result : null |
|
361 | 362 | error : { |
|
362 | 363 | "failed to update user `<username>`" |
|
363 | 364 | } |
|
364 | 365 | |
|
365 | 366 | """ |
|
366 | 367 | if not has_superadmin_permission(apiuser): |
|
367 | 368 | raise JSONRPCForbidden() |
|
368 | 369 | |
|
369 | 370 | user = get_user_or_error(userid) |
|
370 | 371 | old_data = user.get_api_data() |
|
371 | 372 | # only non optional arguments will be stored in updates |
|
372 | 373 | updates = {} |
|
373 | 374 | |
|
374 | 375 | try: |
|
375 | 376 | |
|
376 | 377 | store_update(updates, username, 'username') |
|
377 | 378 | store_update(updates, password, 'password') |
|
378 | 379 | store_update(updates, email, 'email') |
|
379 | 380 | store_update(updates, firstname, 'name') |
|
380 | 381 | store_update(updates, lastname, 'lastname') |
|
381 | 382 | store_update(updates, active, 'active') |
|
382 | 383 | store_update(updates, admin, 'admin') |
|
383 | 384 | store_update(updates, extern_name, 'extern_name') |
|
384 | 385 | store_update(updates, extern_type, 'extern_type') |
|
385 | 386 | |
|
386 | 387 | user = UserModel().update_user(user, **updates) |
|
387 | 388 | audit_logger.store_api( |
|
388 | 389 | 'user.edit', action_data={'old_data': old_data}, |
|
389 | 390 | user=apiuser) |
|
390 | 391 | Session().commit() |
|
391 | 392 | return { |
|
392 | 393 | 'msg': 'updated user ID:%s %s' % (user.user_id, user.username), |
|
393 | 394 | 'user': user.get_api_data(include_secrets=True) |
|
394 | 395 | } |
|
395 | 396 | except DefaultUserException: |
|
396 | 397 | log.exception("Default user edit exception") |
|
397 | 398 | raise JSONRPCError('editing default user is forbidden') |
|
398 | 399 | except Exception: |
|
399 | 400 | log.exception("Error occurred during update of user") |
|
400 | 401 | raise JSONRPCError('failed to update user `%s`' % (userid,)) |
|
401 | 402 | |
|
402 | 403 | |
|
403 | 404 | @jsonrpc_method() |
|
404 | 405 | def delete_user(request, apiuser, userid): |
|
405 | 406 | """ |
|
406 | 407 | Deletes the specified user from the |RCE| user database. |
|
407 | 408 | |
|
408 | 409 | This command can only be run using an |authtoken| with admin rights to |
|
409 | 410 | the specified repository. |
|
410 | 411 | |
|
411 | 412 | .. important:: |
|
412 | 413 | |
|
413 | 414 | Ensure all open pull requests and open code review |
|
414 | 415 | requests to this user are close. |
|
415 | 416 | |
|
416 | 417 | Also ensure all repositories, or repository groups owned by this |
|
417 | 418 | user are reassigned before deletion. |
|
418 | 419 | |
|
419 | 420 | This command takes the following options: |
|
420 | 421 | |
|
421 | 422 | :param apiuser: This is filled automatically from the |authtoken|. |
|
422 | 423 | :type apiuser: AuthUser |
|
423 | 424 | :param userid: Set the user to delete. |
|
424 | 425 | :type userid: str or int |
|
425 | 426 | |
|
426 | 427 | Example output: |
|
427 | 428 | |
|
428 | 429 | .. code-block:: bash |
|
429 | 430 | |
|
430 | 431 | id : <id_given_in_input> |
|
431 | 432 | result: { |
|
432 | 433 | "msg" : "deleted user ID:<userid> <username>", |
|
433 | 434 | "user": null |
|
434 | 435 | } |
|
435 | 436 | error: null |
|
436 | 437 | |
|
437 | 438 | Example error output: |
|
438 | 439 | |
|
439 | 440 | .. code-block:: bash |
|
440 | 441 | |
|
441 | 442 | id : <id_given_in_input> |
|
442 | 443 | result : null |
|
443 | 444 | error : { |
|
444 | 445 | "failed to delete user ID:<userid> <username>" |
|
445 | 446 | } |
|
446 | 447 | |
|
447 | 448 | """ |
|
448 | 449 | if not has_superadmin_permission(apiuser): |
|
449 | 450 | raise JSONRPCForbidden() |
|
450 | 451 | |
|
451 | 452 | user = get_user_or_error(userid) |
|
452 | 453 | old_data = user.get_api_data() |
|
453 | 454 | try: |
|
454 | 455 | UserModel().delete(userid) |
|
455 | 456 | audit_logger.store_api( |
|
456 | 457 | 'user.delete', action_data={'old_data': old_data}, |
|
457 | 458 | user=apiuser) |
|
458 | 459 | |
|
459 | 460 | Session().commit() |
|
460 | 461 | return { |
|
461 | 462 | 'msg': 'deleted user ID:%s %s' % (user.user_id, user.username), |
|
462 | 463 | 'user': None |
|
463 | 464 | } |
|
464 | 465 | except Exception: |
|
465 | 466 | log.exception("Error occurred during deleting of user") |
|
466 | 467 | raise JSONRPCError( |
|
467 | 468 | 'failed to delete user ID:%s %s' % (user.user_id, user.username)) |
|
468 | 469 | |
|
469 | 470 | |
|
470 | 471 | @jsonrpc_method() |
|
471 | 472 | def get_user_locks(request, apiuser, userid=Optional(OAttr('apiuser'))): |
|
472 | 473 | """ |
|
473 | 474 | Displays all repositories locked by the specified user. |
|
474 | 475 | |
|
475 | 476 | * If this command is run by a non-admin user, it returns |
|
476 | 477 | a list of |repos| locked by that user. |
|
477 | 478 | |
|
478 | 479 | This command takes the following options: |
|
479 | 480 | |
|
480 | 481 | :param apiuser: This is filled automatically from the |authtoken|. |
|
481 | 482 | :type apiuser: AuthUser |
|
482 | 483 | :param userid: Sets the userid whose list of locked |repos| will be |
|
483 | 484 | displayed. |
|
484 | 485 | :type userid: Optional(str or int) |
|
485 | 486 | |
|
486 | 487 | Example output: |
|
487 | 488 | |
|
488 | 489 | .. code-block:: bash |
|
489 | 490 | |
|
490 | 491 | id : <id_given_in_input> |
|
491 | 492 | result : { |
|
492 | 493 | [repo_object, repo_object,...] |
|
493 | 494 | } |
|
494 | 495 | error : null |
|
495 | 496 | """ |
|
496 | 497 | |
|
497 | 498 | include_secrets = False |
|
498 | 499 | if not has_superadmin_permission(apiuser): |
|
499 | 500 | # make sure normal user does not pass someone else userid, |
|
500 | 501 | # he is not allowed to do that |
|
501 | 502 | if not isinstance(userid, Optional) and userid != apiuser.user_id: |
|
502 | 503 | raise JSONRPCError('userid is not the same as your user') |
|
503 | 504 | else: |
|
504 | 505 | include_secrets = True |
|
505 | 506 | |
|
506 | 507 | userid = Optional.extract(userid, evaluate_locals=locals()) |
|
507 | 508 | userid = getattr(userid, 'user_id', userid) |
|
508 | 509 | user = get_user_or_error(userid) |
|
509 | 510 | |
|
510 | 511 | ret = [] |
|
511 | 512 | |
|
512 | 513 | # show all locks |
|
513 | 514 | for r in Repository.getAll(): |
|
514 | 515 | _user_id, _time, _reason = r.locked |
|
515 | 516 | if _user_id and _time: |
|
516 | 517 | _api_data = r.get_api_data(include_secrets=include_secrets) |
|
517 | 518 | # if we use user filter just show the locks for this user |
|
518 | 519 | if safe_int(_user_id) == user.user_id: |
|
519 | 520 | ret.append(_api_data) |
|
520 | 521 | |
|
521 | 522 | return ret |
|
522 | 523 | |
|
523 | 524 | |
|
524 | 525 | @jsonrpc_method() |
|
525 | 526 | def get_user_audit_logs(request, apiuser, userid=Optional(OAttr('apiuser'))): |
|
526 | 527 | """ |
|
527 | 528 | Fetches all action logs made by the specified user. |
|
528 | 529 | |
|
529 | 530 | This command takes the following options: |
|
530 | 531 | |
|
531 | 532 | :param apiuser: This is filled automatically from the |authtoken|. |
|
532 | 533 | :type apiuser: AuthUser |
|
533 | 534 | :param userid: Sets the userid whose list of locked |repos| will be |
|
534 | 535 | displayed. |
|
535 | 536 | :type userid: Optional(str or int) |
|
536 | 537 | |
|
537 | 538 | Example output: |
|
538 | 539 | |
|
539 | 540 | .. code-block:: bash |
|
540 | 541 | |
|
541 | 542 | id : <id_given_in_input> |
|
542 | 543 | result : { |
|
543 | 544 | [action, action,...] |
|
544 | 545 | } |
|
545 | 546 | error : null |
|
546 | 547 | """ |
|
547 | 548 | |
|
548 | 549 | if not has_superadmin_permission(apiuser): |
|
549 | 550 | # make sure normal user does not pass someone else userid, |
|
550 | 551 | # he is not allowed to do that |
|
551 | 552 | if not isinstance(userid, Optional) and userid != apiuser.user_id: |
|
552 | 553 | raise JSONRPCError('userid is not the same as your user') |
|
553 | 554 | |
|
554 | 555 | userid = Optional.extract(userid, evaluate_locals=locals()) |
|
555 | 556 | userid = getattr(userid, 'user_id', userid) |
|
556 | 557 | user = get_user_or_error(userid) |
|
557 | 558 | |
|
558 | 559 | ret = [] |
|
559 | 560 | |
|
560 | 561 | # show all user actions |
|
561 | 562 | for entry in UserModel().get_user_log(user, filter_term=None): |
|
562 | 563 | ret.append(entry) |
|
563 | 564 | return ret |
@@ -1,686 +1,687 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import time |
|
22 | 22 | import logging |
|
23 | 23 | import operator |
|
24 | 24 | |
|
25 | from pyramid import compat | |
|
25 | 26 | from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPBadRequest |
|
26 | 27 | |
|
27 | 28 | from rhodecode.lib import helpers as h, diffs |
|
28 | 29 | from rhodecode.lib.utils2 import ( |
|
29 | 30 | StrictAttributeDict, safe_int, datetime_to_time, safe_unicode) |
|
30 | 31 | from rhodecode.lib.vcs.exceptions import RepositoryRequirementError |
|
31 | 32 | from rhodecode.model import repo |
|
32 | 33 | from rhodecode.model import repo_group |
|
33 | 34 | from rhodecode.model import user_group |
|
34 | 35 | from rhodecode.model import user |
|
35 | 36 | from rhodecode.model.db import User |
|
36 | 37 | from rhodecode.model.scm import ScmModel |
|
37 | 38 | from rhodecode.model.settings import VcsSettingsModel |
|
38 | 39 | |
|
39 | 40 | log = logging.getLogger(__name__) |
|
40 | 41 | |
|
41 | 42 | |
|
42 | 43 | ADMIN_PREFIX = '/_admin' |
|
43 | 44 | STATIC_FILE_PREFIX = '/_static' |
|
44 | 45 | |
|
45 | 46 | URL_NAME_REQUIREMENTS = { |
|
46 | 47 | # group name can have a slash in them, but they must not end with a slash |
|
47 | 48 | 'group_name': r'.*?[^/]', |
|
48 | 49 | 'repo_group_name': r'.*?[^/]', |
|
49 | 50 | # repo names can have a slash in them, but they must not end with a slash |
|
50 | 51 | 'repo_name': r'.*?[^/]', |
|
51 | 52 | # file path eats up everything at the end |
|
52 | 53 | 'f_path': r'.*', |
|
53 | 54 | # reference types |
|
54 | 55 | 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)', |
|
55 | 56 | 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)', |
|
56 | 57 | } |
|
57 | 58 | |
|
58 | 59 | |
|
59 | 60 | def add_route_with_slash(config,name, pattern, **kw): |
|
60 | 61 | config.add_route(name, pattern, **kw) |
|
61 | 62 | if not pattern.endswith('/'): |
|
62 | 63 | config.add_route(name + '_slash', pattern + '/', **kw) |
|
63 | 64 | |
|
64 | 65 | |
|
65 | 66 | def add_route_requirements(route_path, requirements=None): |
|
66 | 67 | """ |
|
67 | 68 | Adds regex requirements to pyramid routes using a mapping dict |
|
68 | 69 | e.g:: |
|
69 | 70 | add_route_requirements('{repo_name}/settings') |
|
70 | 71 | """ |
|
71 | 72 | requirements = requirements or URL_NAME_REQUIREMENTS |
|
72 | 73 | for key, regex in requirements.items(): |
|
73 | 74 | route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex)) |
|
74 | 75 | return route_path |
|
75 | 76 | |
|
76 | 77 | |
|
77 | 78 | def get_format_ref_id(repo): |
|
78 | 79 | """Returns a `repo` specific reference formatter function""" |
|
79 | 80 | if h.is_svn(repo): |
|
80 | 81 | return _format_ref_id_svn |
|
81 | 82 | else: |
|
82 | 83 | return _format_ref_id |
|
83 | 84 | |
|
84 | 85 | |
|
85 | 86 | def _format_ref_id(name, raw_id): |
|
86 | 87 | """Default formatting of a given reference `name`""" |
|
87 | 88 | return name |
|
88 | 89 | |
|
89 | 90 | |
|
90 | 91 | def _format_ref_id_svn(name, raw_id): |
|
91 | 92 | """Special way of formatting a reference for Subversion including path""" |
|
92 | 93 | return '%s@%s' % (name, raw_id) |
|
93 | 94 | |
|
94 | 95 | |
|
95 | 96 | class TemplateArgs(StrictAttributeDict): |
|
96 | 97 | pass |
|
97 | 98 | |
|
98 | 99 | |
|
99 | 100 | class BaseAppView(object): |
|
100 | 101 | |
|
101 | 102 | def __init__(self, context, request): |
|
102 | 103 | self.request = request |
|
103 | 104 | self.context = context |
|
104 | 105 | self.session = request.session |
|
105 | 106 | if not hasattr(request, 'user'): |
|
106 | 107 | # NOTE(marcink): edge case, we ended up in matched route |
|
107 | 108 | # but probably of web-app context, e.g API CALL/VCS CALL |
|
108 | 109 | if hasattr(request, 'vcs_call') or hasattr(request, 'rpc_method'): |
|
109 | 110 | log.warning('Unable to process request `%s` in this scope', request) |
|
110 | 111 | raise HTTPBadRequest() |
|
111 | 112 | |
|
112 | 113 | self._rhodecode_user = request.user # auth user |
|
113 | 114 | self._rhodecode_db_user = self._rhodecode_user.get_instance() |
|
114 | 115 | self._maybe_needs_password_change( |
|
115 | 116 | request.matched_route.name, self._rhodecode_db_user) |
|
116 | 117 | |
|
117 | 118 | def _maybe_needs_password_change(self, view_name, user_obj): |
|
118 | 119 | log.debug('Checking if user %s needs password change on view %s', |
|
119 | 120 | user_obj, view_name) |
|
120 | 121 | skip_user_views = [ |
|
121 | 122 | 'logout', 'login', |
|
122 | 123 | 'my_account_password', 'my_account_password_update' |
|
123 | 124 | ] |
|
124 | 125 | |
|
125 | 126 | if not user_obj: |
|
126 | 127 | return |
|
127 | 128 | |
|
128 | 129 | if user_obj.username == User.DEFAULT_USER: |
|
129 | 130 | return |
|
130 | 131 | |
|
131 | 132 | now = time.time() |
|
132 | 133 | should_change = user_obj.user_data.get('force_password_change') |
|
133 | 134 | change_after = safe_int(should_change) or 0 |
|
134 | 135 | if should_change and now > change_after: |
|
135 | 136 | log.debug('User %s requires password change', user_obj) |
|
136 | 137 | h.flash('You are required to change your password', 'warning', |
|
137 | 138 | ignore_duplicate=True) |
|
138 | 139 | |
|
139 | 140 | if view_name not in skip_user_views: |
|
140 | 141 | raise HTTPFound( |
|
141 | 142 | self.request.route_path('my_account_password')) |
|
142 | 143 | |
|
143 | 144 | def _log_creation_exception(self, e, repo_name): |
|
144 | 145 | _ = self.request.translate |
|
145 | 146 | reason = None |
|
146 | 147 | if len(e.args) == 2: |
|
147 | 148 | reason = e.args[1] |
|
148 | 149 | |
|
149 | 150 | if reason == 'INVALID_CERTIFICATE': |
|
150 | 151 | log.exception( |
|
151 | 152 | 'Exception creating a repository: invalid certificate') |
|
152 | 153 | msg = (_('Error creating repository %s: invalid certificate') |
|
153 | 154 | % repo_name) |
|
154 | 155 | else: |
|
155 | 156 | log.exception("Exception creating a repository") |
|
156 | 157 | msg = (_('Error creating repository %s') |
|
157 | 158 | % repo_name) |
|
158 | 159 | return msg |
|
159 | 160 | |
|
160 | 161 | def _get_local_tmpl_context(self, include_app_defaults=True): |
|
161 | 162 | c = TemplateArgs() |
|
162 | 163 | c.auth_user = self.request.user |
|
163 | 164 | # TODO(marcink): migrate the usage of c.rhodecode_user to c.auth_user |
|
164 | 165 | c.rhodecode_user = self.request.user |
|
165 | 166 | |
|
166 | 167 | if include_app_defaults: |
|
167 | 168 | from rhodecode.lib.base import attach_context_attributes |
|
168 | 169 | attach_context_attributes(c, self.request, self.request.user.user_id) |
|
169 | 170 | |
|
170 | 171 | return c |
|
171 | 172 | |
|
172 | 173 | def _get_template_context(self, tmpl_args, **kwargs): |
|
173 | 174 | |
|
174 | 175 | local_tmpl_args = { |
|
175 | 176 | 'defaults': {}, |
|
176 | 177 | 'errors': {}, |
|
177 | 178 | 'c': tmpl_args |
|
178 | 179 | } |
|
179 | 180 | local_tmpl_args.update(kwargs) |
|
180 | 181 | return local_tmpl_args |
|
181 | 182 | |
|
182 | 183 | def load_default_context(self): |
|
183 | 184 | """ |
|
184 | 185 | example: |
|
185 | 186 | |
|
186 | 187 | def load_default_context(self): |
|
187 | 188 | c = self._get_local_tmpl_context() |
|
188 | 189 | c.custom_var = 'foobar' |
|
189 | 190 | |
|
190 | 191 | return c |
|
191 | 192 | """ |
|
192 | 193 | raise NotImplementedError('Needs implementation in view class') |
|
193 | 194 | |
|
194 | 195 | |
|
195 | 196 | class RepoAppView(BaseAppView): |
|
196 | 197 | |
|
197 | 198 | def __init__(self, context, request): |
|
198 | 199 | super(RepoAppView, self).__init__(context, request) |
|
199 | 200 | self.db_repo = request.db_repo |
|
200 | 201 | self.db_repo_name = self.db_repo.repo_name |
|
201 | 202 | self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo) |
|
202 | 203 | |
|
203 | 204 | def _handle_missing_requirements(self, error): |
|
204 | 205 | log.error( |
|
205 | 206 | 'Requirements are missing for repository %s: %s', |
|
206 | 207 | self.db_repo_name, safe_unicode(error)) |
|
207 | 208 | |
|
208 | 209 | def _get_local_tmpl_context(self, include_app_defaults=True): |
|
209 | 210 | _ = self.request.translate |
|
210 | 211 | c = super(RepoAppView, self)._get_local_tmpl_context( |
|
211 | 212 | include_app_defaults=include_app_defaults) |
|
212 | 213 | |
|
213 | 214 | # register common vars for this type of view |
|
214 | 215 | c.rhodecode_db_repo = self.db_repo |
|
215 | 216 | c.repo_name = self.db_repo_name |
|
216 | 217 | c.repository_pull_requests = self.db_repo_pull_requests |
|
217 | 218 | self.path_filter = PathFilter(None) |
|
218 | 219 | |
|
219 | 220 | c.repository_requirements_missing = {} |
|
220 | 221 | try: |
|
221 | 222 | self.rhodecode_vcs_repo = self.db_repo.scm_instance() |
|
222 | 223 | if self.rhodecode_vcs_repo: |
|
223 | 224 | path_perms = self.rhodecode_vcs_repo.get_path_permissions( |
|
224 | 225 | c.auth_user.username) |
|
225 | 226 | self.path_filter = PathFilter(path_perms) |
|
226 | 227 | except RepositoryRequirementError as e: |
|
227 | 228 | c.repository_requirements_missing = {'error': str(e)} |
|
228 | 229 | self._handle_missing_requirements(e) |
|
229 | 230 | self.rhodecode_vcs_repo = None |
|
230 | 231 | |
|
231 | 232 | c.path_filter = self.path_filter # used by atom_feed_entry.mako |
|
232 | 233 | |
|
233 | 234 | if self.rhodecode_vcs_repo is None: |
|
234 | 235 | # unable to fetch this repo as vcs instance, report back to user |
|
235 | 236 | h.flash(_( |
|
236 | 237 | "The repository `%(repo_name)s` cannot be loaded in filesystem. " |
|
237 | 238 | "Please check if it exist, or is not damaged.") % |
|
238 | 239 | {'repo_name': c.repo_name}, |
|
239 | 240 | category='error', ignore_duplicate=True) |
|
240 | 241 | if c.repository_requirements_missing: |
|
241 | 242 | route = self.request.matched_route.name |
|
242 | 243 | if route.startswith(('edit_repo', 'repo_summary')): |
|
243 | 244 | # allow summary and edit repo on missing requirements |
|
244 | 245 | return c |
|
245 | 246 | |
|
246 | 247 | raise HTTPFound( |
|
247 | 248 | h.route_path('repo_summary', repo_name=self.db_repo_name)) |
|
248 | 249 | |
|
249 | 250 | else: # redirect if we don't show missing requirements |
|
250 | 251 | raise HTTPFound(h.route_path('home')) |
|
251 | 252 | |
|
252 | 253 | c.has_origin_repo_read_perm = False |
|
253 | 254 | if self.db_repo.fork: |
|
254 | 255 | c.has_origin_repo_read_perm = h.HasRepoPermissionAny( |
|
255 | 256 | 'repository.write', 'repository.read', 'repository.admin')( |
|
256 | 257 | self.db_repo.fork.repo_name, 'summary fork link') |
|
257 | 258 | |
|
258 | 259 | return c |
|
259 | 260 | |
|
260 | 261 | def _get_f_path_unchecked(self, matchdict, default=None): |
|
261 | 262 | """ |
|
262 | 263 | Should only be used by redirects, everything else should call _get_f_path |
|
263 | 264 | """ |
|
264 | 265 | f_path = matchdict.get('f_path') |
|
265 | 266 | if f_path: |
|
266 | 267 | # fix for multiple initial slashes that causes errors for GIT |
|
267 | 268 | return f_path.lstrip('/') |
|
268 | 269 | |
|
269 | 270 | return default |
|
270 | 271 | |
|
271 | 272 | def _get_f_path(self, matchdict, default=None): |
|
272 | 273 | f_path_match = self._get_f_path_unchecked(matchdict, default) |
|
273 | 274 | return self.path_filter.assert_path_permissions(f_path_match) |
|
274 | 275 | |
|
275 | 276 | def _get_general_setting(self, target_repo, settings_key, default=False): |
|
276 | 277 | settings_model = VcsSettingsModel(repo=target_repo) |
|
277 | 278 | settings = settings_model.get_general_settings() |
|
278 | 279 | return settings.get(settings_key, default) |
|
279 | 280 | |
|
280 | 281 | |
|
281 | 282 | class PathFilter(object): |
|
282 | 283 | |
|
283 | 284 | # Expects and instance of BasePathPermissionChecker or None |
|
284 | 285 | def __init__(self, permission_checker): |
|
285 | 286 | self.permission_checker = permission_checker |
|
286 | 287 | |
|
287 | 288 | def assert_path_permissions(self, path): |
|
288 | 289 | if path and self.permission_checker and not self.permission_checker.has_access(path): |
|
289 | 290 | raise HTTPForbidden() |
|
290 | 291 | return path |
|
291 | 292 | |
|
292 | 293 | def filter_patchset(self, patchset): |
|
293 | 294 | if not self.permission_checker or not patchset: |
|
294 | 295 | return patchset, False |
|
295 | 296 | had_filtered = False |
|
296 | 297 | filtered_patchset = [] |
|
297 | 298 | for patch in patchset: |
|
298 | 299 | filename = patch.get('filename', None) |
|
299 | 300 | if not filename or self.permission_checker.has_access(filename): |
|
300 | 301 | filtered_patchset.append(patch) |
|
301 | 302 | else: |
|
302 | 303 | had_filtered = True |
|
303 | 304 | if had_filtered: |
|
304 | 305 | if isinstance(patchset, diffs.LimitedDiffContainer): |
|
305 | 306 | filtered_patchset = diffs.LimitedDiffContainer(patchset.diff_limit, patchset.cur_diff_size, filtered_patchset) |
|
306 | 307 | return filtered_patchset, True |
|
307 | 308 | else: |
|
308 | 309 | return patchset, False |
|
309 | 310 | |
|
310 | 311 | def render_patchset_filtered(self, diffset, patchset, source_ref=None, target_ref=None): |
|
311 | 312 | filtered_patchset, has_hidden_changes = self.filter_patchset(patchset) |
|
312 | 313 | result = diffset.render_patchset( |
|
313 | 314 | filtered_patchset, source_ref=source_ref, target_ref=target_ref) |
|
314 | 315 | result.has_hidden_changes = has_hidden_changes |
|
315 | 316 | return result |
|
316 | 317 | |
|
317 | 318 | def get_raw_patch(self, diff_processor): |
|
318 | 319 | if self.permission_checker is None: |
|
319 | 320 | return diff_processor.as_raw() |
|
320 | 321 | elif self.permission_checker.has_full_access: |
|
321 | 322 | return diff_processor.as_raw() |
|
322 | 323 | else: |
|
323 | 324 | return '# Repository has user-specific filters, raw patch generation is disabled.' |
|
324 | 325 | |
|
325 | 326 | @property |
|
326 | 327 | def is_enabled(self): |
|
327 | 328 | return self.permission_checker is not None |
|
328 | 329 | |
|
329 | 330 | |
|
330 | 331 | class RepoGroupAppView(BaseAppView): |
|
331 | 332 | def __init__(self, context, request): |
|
332 | 333 | super(RepoGroupAppView, self).__init__(context, request) |
|
333 | 334 | self.db_repo_group = request.db_repo_group |
|
334 | 335 | self.db_repo_group_name = self.db_repo_group.group_name |
|
335 | 336 | |
|
336 | 337 | def _revoke_perms_on_yourself(self, form_result): |
|
337 | 338 | _updates = filter(lambda u: self._rhodecode_user.user_id == int(u[0]), |
|
338 | 339 | form_result['perm_updates']) |
|
339 | 340 | _additions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]), |
|
340 | 341 | form_result['perm_additions']) |
|
341 | 342 | _deletions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]), |
|
342 | 343 | form_result['perm_deletions']) |
|
343 | 344 | admin_perm = 'group.admin' |
|
344 | 345 | if _updates and _updates[0][1] != admin_perm or \ |
|
345 | 346 | _additions and _additions[0][1] != admin_perm or \ |
|
346 | 347 | _deletions and _deletions[0][1] != admin_perm: |
|
347 | 348 | return True |
|
348 | 349 | return False |
|
349 | 350 | |
|
350 | 351 | |
|
351 | 352 | class UserGroupAppView(BaseAppView): |
|
352 | 353 | def __init__(self, context, request): |
|
353 | 354 | super(UserGroupAppView, self).__init__(context, request) |
|
354 | 355 | self.db_user_group = request.db_user_group |
|
355 | 356 | self.db_user_group_name = self.db_user_group.users_group_name |
|
356 | 357 | |
|
357 | 358 | |
|
358 | 359 | class UserAppView(BaseAppView): |
|
359 | 360 | def __init__(self, context, request): |
|
360 | 361 | super(UserAppView, self).__init__(context, request) |
|
361 | 362 | self.db_user = request.db_user |
|
362 | 363 | self.db_user_id = self.db_user.user_id |
|
363 | 364 | |
|
364 | 365 | _ = self.request.translate |
|
365 | 366 | if not request.db_user_supports_default: |
|
366 | 367 | if self.db_user.username == User.DEFAULT_USER: |
|
367 | 368 | h.flash(_("Editing user `{}` is disabled.".format( |
|
368 | 369 | User.DEFAULT_USER)), category='warning') |
|
369 | 370 | raise HTTPFound(h.route_path('users')) |
|
370 | 371 | |
|
371 | 372 | |
|
372 | 373 | class DataGridAppView(object): |
|
373 | 374 | """ |
|
374 | 375 | Common class to have re-usable grid rendering components |
|
375 | 376 | """ |
|
376 | 377 | |
|
377 | 378 | def _extract_ordering(self, request, column_map=None): |
|
378 | 379 | column_map = column_map or {} |
|
379 | 380 | column_index = safe_int(request.GET.get('order[0][column]')) |
|
380 | 381 | order_dir = request.GET.get( |
|
381 | 382 | 'order[0][dir]', 'desc') |
|
382 | 383 | order_by = request.GET.get( |
|
383 | 384 | 'columns[%s][data][sort]' % column_index, 'name_raw') |
|
384 | 385 | |
|
385 | 386 | # translate datatable to DB columns |
|
386 | 387 | order_by = column_map.get(order_by) or order_by |
|
387 | 388 | |
|
388 | 389 | search_q = request.GET.get('search[value]') |
|
389 | 390 | return search_q, order_by, order_dir |
|
390 | 391 | |
|
391 | 392 | def _extract_chunk(self, request): |
|
392 | 393 | start = safe_int(request.GET.get('start'), 0) |
|
393 | 394 | length = safe_int(request.GET.get('length'), 25) |
|
394 | 395 | draw = safe_int(request.GET.get('draw')) |
|
395 | 396 | return draw, start, length |
|
396 | 397 | |
|
397 | 398 | def _get_order_col(self, order_by, model): |
|
398 |
if isinstance(order_by, |
|
|
399 | if isinstance(order_by, compat.string_types): | |
|
399 | 400 | try: |
|
400 | 401 | return operator.attrgetter(order_by)(model) |
|
401 | 402 | except AttributeError: |
|
402 | 403 | return None |
|
403 | 404 | else: |
|
404 | 405 | return order_by |
|
405 | 406 | |
|
406 | 407 | |
|
407 | 408 | class BaseReferencesView(RepoAppView): |
|
408 | 409 | """ |
|
409 | 410 | Base for reference view for branches, tags and bookmarks. |
|
410 | 411 | """ |
|
411 | 412 | def load_default_context(self): |
|
412 | 413 | c = self._get_local_tmpl_context() |
|
413 | 414 | |
|
414 | 415 | |
|
415 | 416 | return c |
|
416 | 417 | |
|
417 | 418 | def load_refs_context(self, ref_items, partials_template): |
|
418 | 419 | _render = self.request.get_partial_renderer(partials_template) |
|
419 | 420 | pre_load = ["author", "date", "message"] |
|
420 | 421 | |
|
421 | 422 | is_svn = h.is_svn(self.rhodecode_vcs_repo) |
|
422 | 423 | is_hg = h.is_hg(self.rhodecode_vcs_repo) |
|
423 | 424 | |
|
424 | 425 | format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo) |
|
425 | 426 | |
|
426 | 427 | closed_refs = {} |
|
427 | 428 | if is_hg: |
|
428 | 429 | closed_refs = self.rhodecode_vcs_repo.branches_closed |
|
429 | 430 | |
|
430 | 431 | data = [] |
|
431 | 432 | for ref_name, commit_id in ref_items: |
|
432 | 433 | commit = self.rhodecode_vcs_repo.get_commit( |
|
433 | 434 | commit_id=commit_id, pre_load=pre_load) |
|
434 | 435 | closed = ref_name in closed_refs |
|
435 | 436 | |
|
436 | 437 | # TODO: johbo: Unify generation of reference links |
|
437 | 438 | use_commit_id = '/' in ref_name or is_svn |
|
438 | 439 | |
|
439 | 440 | if use_commit_id: |
|
440 | 441 | files_url = h.route_path( |
|
441 | 442 | 'repo_files', |
|
442 | 443 | repo_name=self.db_repo_name, |
|
443 | 444 | f_path=ref_name if is_svn else '', |
|
444 | 445 | commit_id=commit_id) |
|
445 | 446 | |
|
446 | 447 | else: |
|
447 | 448 | files_url = h.route_path( |
|
448 | 449 | 'repo_files', |
|
449 | 450 | repo_name=self.db_repo_name, |
|
450 | 451 | f_path=ref_name if is_svn else '', |
|
451 | 452 | commit_id=ref_name, |
|
452 | 453 | _query=dict(at=ref_name)) |
|
453 | 454 | |
|
454 | 455 | data.append({ |
|
455 | 456 | "name": _render('name', ref_name, files_url, closed), |
|
456 | 457 | "name_raw": ref_name, |
|
457 | 458 | "date": _render('date', commit.date), |
|
458 | 459 | "date_raw": datetime_to_time(commit.date), |
|
459 | 460 | "author": _render('author', commit.author), |
|
460 | 461 | "commit": _render( |
|
461 | 462 | 'commit', commit.message, commit.raw_id, commit.idx), |
|
462 | 463 | "commit_raw": commit.idx, |
|
463 | 464 | "compare": _render( |
|
464 | 465 | 'compare', format_ref_id(ref_name, commit.raw_id)), |
|
465 | 466 | }) |
|
466 | 467 | |
|
467 | 468 | return data |
|
468 | 469 | |
|
469 | 470 | |
|
470 | 471 | class RepoRoutePredicate(object): |
|
471 | 472 | def __init__(self, val, config): |
|
472 | 473 | self.val = val |
|
473 | 474 | |
|
474 | 475 | def text(self): |
|
475 | 476 | return 'repo_route = %s' % self.val |
|
476 | 477 | |
|
477 | 478 | phash = text |
|
478 | 479 | |
|
479 | 480 | def __call__(self, info, request): |
|
480 | 481 | if hasattr(request, 'vcs_call'): |
|
481 | 482 | # skip vcs calls |
|
482 | 483 | return |
|
483 | 484 | |
|
484 | 485 | repo_name = info['match']['repo_name'] |
|
485 | 486 | repo_model = repo.RepoModel() |
|
486 | 487 | |
|
487 | 488 | by_name_match = repo_model.get_by_repo_name(repo_name, cache=False) |
|
488 | 489 | |
|
489 | 490 | def redirect_if_creating(route_info, db_repo): |
|
490 | 491 | skip_views = ['edit_repo_advanced_delete'] |
|
491 | 492 | route = route_info['route'] |
|
492 | 493 | # we should skip delete view so we can actually "remove" repositories |
|
493 | 494 | # if they get stuck in creating state. |
|
494 | 495 | if route.name in skip_views: |
|
495 | 496 | return |
|
496 | 497 | |
|
497 | 498 | if db_repo.repo_state in [repo.Repository.STATE_PENDING]: |
|
498 | 499 | repo_creating_url = request.route_path( |
|
499 | 500 | 'repo_creating', repo_name=db_repo.repo_name) |
|
500 | 501 | raise HTTPFound(repo_creating_url) |
|
501 | 502 | |
|
502 | 503 | if by_name_match: |
|
503 | 504 | # register this as request object we can re-use later |
|
504 | 505 | request.db_repo = by_name_match |
|
505 | 506 | redirect_if_creating(info, by_name_match) |
|
506 | 507 | return True |
|
507 | 508 | |
|
508 | 509 | by_id_match = repo_model.get_repo_by_id(repo_name) |
|
509 | 510 | if by_id_match: |
|
510 | 511 | request.db_repo = by_id_match |
|
511 | 512 | redirect_if_creating(info, by_id_match) |
|
512 | 513 | return True |
|
513 | 514 | |
|
514 | 515 | return False |
|
515 | 516 | |
|
516 | 517 | |
|
517 | 518 | class RepoForbidArchivedRoutePredicate(object): |
|
518 | 519 | def __init__(self, val, config): |
|
519 | 520 | self.val = val |
|
520 | 521 | |
|
521 | 522 | def text(self): |
|
522 | 523 | return 'repo_forbid_archived = %s' % self.val |
|
523 | 524 | |
|
524 | 525 | phash = text |
|
525 | 526 | |
|
526 | 527 | def __call__(self, info, request): |
|
527 | 528 | _ = request.translate |
|
528 | 529 | rhodecode_db_repo = request.db_repo |
|
529 | 530 | |
|
530 | 531 | log.debug( |
|
531 | 532 | '%s checking if archived flag for repo for %s', |
|
532 | 533 | self.__class__.__name__, rhodecode_db_repo.repo_name) |
|
533 | 534 | |
|
534 | 535 | if rhodecode_db_repo.archived: |
|
535 | 536 | log.warning('Current view is not supported for archived repo:%s', |
|
536 | 537 | rhodecode_db_repo.repo_name) |
|
537 | 538 | |
|
538 | 539 | h.flash( |
|
539 | 540 | h.literal(_('Action not supported for archived repository.')), |
|
540 | 541 | category='warning') |
|
541 | 542 | summary_url = request.route_path( |
|
542 | 543 | 'repo_summary', repo_name=rhodecode_db_repo.repo_name) |
|
543 | 544 | raise HTTPFound(summary_url) |
|
544 | 545 | return True |
|
545 | 546 | |
|
546 | 547 | |
|
547 | 548 | class RepoTypeRoutePredicate(object): |
|
548 | 549 | def __init__(self, val, config): |
|
549 | 550 | self.val = val or ['hg', 'git', 'svn'] |
|
550 | 551 | |
|
551 | 552 | def text(self): |
|
552 | 553 | return 'repo_accepted_type = %s' % self.val |
|
553 | 554 | |
|
554 | 555 | phash = text |
|
555 | 556 | |
|
556 | 557 | def __call__(self, info, request): |
|
557 | 558 | if hasattr(request, 'vcs_call'): |
|
558 | 559 | # skip vcs calls |
|
559 | 560 | return |
|
560 | 561 | |
|
561 | 562 | rhodecode_db_repo = request.db_repo |
|
562 | 563 | |
|
563 | 564 | log.debug( |
|
564 | 565 | '%s checking repo type for %s in %s', |
|
565 | 566 | self.__class__.__name__, rhodecode_db_repo.repo_type, self.val) |
|
566 | 567 | |
|
567 | 568 | if rhodecode_db_repo.repo_type in self.val: |
|
568 | 569 | return True |
|
569 | 570 | else: |
|
570 | 571 | log.warning('Current view is not supported for repo type:%s', |
|
571 | 572 | rhodecode_db_repo.repo_type) |
|
572 | 573 | return False |
|
573 | 574 | |
|
574 | 575 | |
|
575 | 576 | class RepoGroupRoutePredicate(object): |
|
576 | 577 | def __init__(self, val, config): |
|
577 | 578 | self.val = val |
|
578 | 579 | |
|
579 | 580 | def text(self): |
|
580 | 581 | return 'repo_group_route = %s' % self.val |
|
581 | 582 | |
|
582 | 583 | phash = text |
|
583 | 584 | |
|
584 | 585 | def __call__(self, info, request): |
|
585 | 586 | if hasattr(request, 'vcs_call'): |
|
586 | 587 | # skip vcs calls |
|
587 | 588 | return |
|
588 | 589 | |
|
589 | 590 | repo_group_name = info['match']['repo_group_name'] |
|
590 | 591 | repo_group_model = repo_group.RepoGroupModel() |
|
591 | 592 | by_name_match = repo_group_model.get_by_group_name(repo_group_name, cache=False) |
|
592 | 593 | |
|
593 | 594 | if by_name_match: |
|
594 | 595 | # register this as request object we can re-use later |
|
595 | 596 | request.db_repo_group = by_name_match |
|
596 | 597 | return True |
|
597 | 598 | |
|
598 | 599 | return False |
|
599 | 600 | |
|
600 | 601 | |
|
601 | 602 | class UserGroupRoutePredicate(object): |
|
602 | 603 | def __init__(self, val, config): |
|
603 | 604 | self.val = val |
|
604 | 605 | |
|
605 | 606 | def text(self): |
|
606 | 607 | return 'user_group_route = %s' % self.val |
|
607 | 608 | |
|
608 | 609 | phash = text |
|
609 | 610 | |
|
610 | 611 | def __call__(self, info, request): |
|
611 | 612 | if hasattr(request, 'vcs_call'): |
|
612 | 613 | # skip vcs calls |
|
613 | 614 | return |
|
614 | 615 | |
|
615 | 616 | user_group_id = info['match']['user_group_id'] |
|
616 | 617 | user_group_model = user_group.UserGroup() |
|
617 | 618 | by_id_match = user_group_model.get(user_group_id, cache=False) |
|
618 | 619 | |
|
619 | 620 | if by_id_match: |
|
620 | 621 | # register this as request object we can re-use later |
|
621 | 622 | request.db_user_group = by_id_match |
|
622 | 623 | return True |
|
623 | 624 | |
|
624 | 625 | return False |
|
625 | 626 | |
|
626 | 627 | |
|
627 | 628 | class UserRoutePredicateBase(object): |
|
628 | 629 | supports_default = None |
|
629 | 630 | |
|
630 | 631 | def __init__(self, val, config): |
|
631 | 632 | self.val = val |
|
632 | 633 | |
|
633 | 634 | def text(self): |
|
634 | 635 | raise NotImplementedError() |
|
635 | 636 | |
|
636 | 637 | def __call__(self, info, request): |
|
637 | 638 | if hasattr(request, 'vcs_call'): |
|
638 | 639 | # skip vcs calls |
|
639 | 640 | return |
|
640 | 641 | |
|
641 | 642 | user_id = info['match']['user_id'] |
|
642 | 643 | user_model = user.User() |
|
643 | 644 | by_id_match = user_model.get(user_id, cache=False) |
|
644 | 645 | |
|
645 | 646 | if by_id_match: |
|
646 | 647 | # register this as request object we can re-use later |
|
647 | 648 | request.db_user = by_id_match |
|
648 | 649 | request.db_user_supports_default = self.supports_default |
|
649 | 650 | return True |
|
650 | 651 | |
|
651 | 652 | return False |
|
652 | 653 | |
|
653 | 654 | |
|
654 | 655 | class UserRoutePredicate(UserRoutePredicateBase): |
|
655 | 656 | supports_default = False |
|
656 | 657 | |
|
657 | 658 | def text(self): |
|
658 | 659 | return 'user_route = %s' % self.val |
|
659 | 660 | |
|
660 | 661 | phash = text |
|
661 | 662 | |
|
662 | 663 | |
|
663 | 664 | class UserRouteWithDefaultPredicate(UserRoutePredicateBase): |
|
664 | 665 | supports_default = True |
|
665 | 666 | |
|
666 | 667 | def text(self): |
|
667 | 668 | return 'user_with_default_route = %s' % self.val |
|
668 | 669 | |
|
669 | 670 | phash = text |
|
670 | 671 | |
|
671 | 672 | |
|
672 | 673 | def includeme(config): |
|
673 | 674 | config.add_route_predicate( |
|
674 | 675 | 'repo_route', RepoRoutePredicate) |
|
675 | 676 | config.add_route_predicate( |
|
676 | 677 | 'repo_accepted_types', RepoTypeRoutePredicate) |
|
677 | 678 | config.add_route_predicate( |
|
678 | 679 | 'repo_forbid_when_archived', RepoForbidArchivedRoutePredicate) |
|
679 | 680 | config.add_route_predicate( |
|
680 | 681 | 'repo_group_route', RepoGroupRoutePredicate) |
|
681 | 682 | config.add_route_predicate( |
|
682 | 683 | 'user_group_route', UserGroupRoutePredicate) |
|
683 | 684 | config.add_route_predicate( |
|
684 | 685 | 'user_route_with_default', UserRouteWithDefaultPredicate) |
|
685 | 686 | config.add_route_predicate( |
|
686 | 687 | 'user_route', UserRoutePredicate) |
@@ -1,90 +1,90 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | ||
|
20 | import os | |
|
21 | 21 | import logging |
|
22 | import os | |
|
23 | 22 | import shlex |
|
23 | from pyramid import compat | |
|
24 | 24 | |
|
25 | 25 | # Do not use `from rhodecode import events` here, it will be overridden by the |
|
26 | 26 | # events module in this package due to pythons import mechanism. |
|
27 | 27 | from rhodecode.events import RepoGroupEvent |
|
28 | 28 | from rhodecode.subscribers import AsyncSubprocessSubscriber |
|
29 | 29 | from rhodecode.config.middleware import ( |
|
30 | 30 | _bool_setting, _string_setting, _int_setting) |
|
31 | 31 | |
|
32 | 32 | from .events import ModDavSvnConfigChange |
|
33 | 33 | from .subscribers import generate_config_subscriber |
|
34 | 34 | from . import config_keys |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | log = logging.getLogger(__name__) |
|
38 | 38 | |
|
39 | 39 | |
|
40 | 40 | def includeme(config): |
|
41 | 41 | settings = config.registry.settings |
|
42 | 42 | _sanitize_settings_and_apply_defaults(settings) |
|
43 | 43 | |
|
44 | 44 | if settings[config_keys.generate_config]: |
|
45 | 45 | # Add subscriber to generate the Apache mod dav svn configuration on |
|
46 | 46 | # repository group events. |
|
47 | 47 | config.add_subscriber(generate_config_subscriber, RepoGroupEvent) |
|
48 | 48 | |
|
49 | 49 | # If a reload command is set add a subscriber to execute it on |
|
50 | 50 | # configuration changes. |
|
51 | 51 | reload_cmd = shlex.split(settings[config_keys.reload_command]) |
|
52 | 52 | if reload_cmd: |
|
53 | 53 | reload_timeout = settings[config_keys.reload_timeout] or None |
|
54 | 54 | reload_subscriber = AsyncSubprocessSubscriber( |
|
55 | 55 | cmd=reload_cmd, timeout=reload_timeout) |
|
56 | 56 | config.add_subscriber(reload_subscriber, ModDavSvnConfigChange) |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | def _sanitize_settings_and_apply_defaults(settings): |
|
60 | 60 | """ |
|
61 | 61 | Set defaults, convert to python types and validate settings. |
|
62 | 62 | """ |
|
63 | 63 | _bool_setting(settings, config_keys.generate_config, 'false') |
|
64 | 64 | _bool_setting(settings, config_keys.list_parent_path, 'true') |
|
65 | 65 | _int_setting(settings, config_keys.reload_timeout, 10) |
|
66 | 66 | _string_setting(settings, config_keys.config_file_path, '', lower=False) |
|
67 | 67 | _string_setting(settings, config_keys.location_root, '/', lower=False) |
|
68 | 68 | _string_setting(settings, config_keys.reload_command, '', lower=False) |
|
69 | 69 | _string_setting(settings, config_keys.template, '', lower=False) |
|
70 | 70 | |
|
71 | 71 | # Convert negative timeout values to zero. |
|
72 | 72 | if settings[config_keys.reload_timeout] < 0: |
|
73 | 73 | settings[config_keys.reload_timeout] = 0 |
|
74 | 74 | |
|
75 | 75 | # Append path separator to location root. |
|
76 | 76 | settings[config_keys.location_root] = _append_path_sep( |
|
77 | 77 | settings[config_keys.location_root]) |
|
78 | 78 | |
|
79 | 79 | # Validate settings. |
|
80 | 80 | if settings[config_keys.generate_config]: |
|
81 | 81 | assert len(settings[config_keys.config_file_path]) > 0 |
|
82 | 82 | |
|
83 | 83 | |
|
84 | 84 | def _append_path_sep(path): |
|
85 | 85 | """ |
|
86 | 86 | Append the path separator if missing. |
|
87 | 87 | """ |
|
88 |
if isinstance(path, |
|
|
88 | if isinstance(path, compat.string_types) and not path.endswith(os.path.sep): | |
|
89 | 89 | path += os.path.sep |
|
90 | 90 | return path |
@@ -1,339 +1,340 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | RhodeCode task modules, containing all task that suppose to be run |
|
23 | 23 | by celery daemon |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | import os |
|
27 | 27 | import time |
|
28 | 28 | |
|
29 | from pyramid import compat | |
|
29 | 30 | from pyramid_mailer.mailer import Mailer |
|
30 | 31 | from pyramid_mailer.message import Message |
|
31 | 32 | |
|
32 | 33 | import rhodecode |
|
33 | 34 | from rhodecode.lib import audit_logger |
|
34 | 35 | from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask |
|
35 | 36 | from rhodecode.lib.hooks_base import log_create_repository |
|
36 | 37 | from rhodecode.lib.utils2 import safe_int, str2bool |
|
37 | 38 | from rhodecode.model.db import Session, IntegrityError, Repository, User, true |
|
38 | 39 | |
|
39 | 40 | |
|
40 | 41 | @async_task(ignore_result=True, base=RequestContextTask) |
|
41 | 42 | def send_email(recipients, subject, body='', html_body='', email_config=None): |
|
42 | 43 | """ |
|
43 | 44 | Sends an email with defined parameters from the .ini files. |
|
44 | 45 | |
|
45 | 46 | :param recipients: list of recipients, it this is empty the defined email |
|
46 | 47 | address from field 'email_to' is used instead |
|
47 | 48 | :param subject: subject of the mail |
|
48 | 49 | :param body: body of the mail |
|
49 | 50 | :param html_body: html version of body |
|
50 | 51 | """ |
|
51 | 52 | log = get_logger(send_email) |
|
52 | 53 | |
|
53 | 54 | email_config = email_config or rhodecode.CONFIG |
|
54 | 55 | |
|
55 | 56 | mail_server = email_config.get('smtp_server') or None |
|
56 | 57 | if mail_server is None: |
|
57 | 58 | log.error("SMTP server information missing. Sending email failed. " |
|
58 | 59 | "Make sure that `smtp_server` variable is configured " |
|
59 | 60 | "inside the .ini file") |
|
60 | 61 | return False |
|
61 | 62 | |
|
62 | 63 | subject = "%s %s" % (email_config.get('email_prefix', ''), subject) |
|
63 | 64 | |
|
64 | 65 | if recipients: |
|
65 |
if isinstance(recipients, |
|
|
66 | if isinstance(recipients, compat.string_types): | |
|
66 | 67 | recipients = recipients.split(',') |
|
67 | 68 | else: |
|
68 | 69 | # if recipients are not defined we send to email_config + all admins |
|
69 | 70 | admins = [] |
|
70 | 71 | for u in User.query().filter(User.admin == true()).all(): |
|
71 | 72 | if u.email: |
|
72 | 73 | admins.append(u.email) |
|
73 | 74 | recipients = [] |
|
74 | 75 | config_email = email_config.get('email_to') |
|
75 | 76 | if config_email: |
|
76 | 77 | recipients += [config_email] |
|
77 | 78 | recipients += admins |
|
78 | 79 | |
|
79 | 80 | # translate our LEGACY config into the one that pyramid_mailer supports |
|
80 | 81 | email_conf = dict( |
|
81 | 82 | host=mail_server, |
|
82 | 83 | port=email_config.get('smtp_port', 25), |
|
83 | 84 | username=email_config.get('smtp_username'), |
|
84 | 85 | password=email_config.get('smtp_password'), |
|
85 | 86 | |
|
86 | 87 | tls=str2bool(email_config.get('smtp_use_tls')), |
|
87 | 88 | ssl=str2bool(email_config.get('smtp_use_ssl')), |
|
88 | 89 | |
|
89 | 90 | # SSL key file |
|
90 | 91 | # keyfile='', |
|
91 | 92 | |
|
92 | 93 | # SSL certificate file |
|
93 | 94 | # certfile='', |
|
94 | 95 | |
|
95 | 96 | # Location of maildir |
|
96 | 97 | # queue_path='', |
|
97 | 98 | |
|
98 | 99 | default_sender=email_config.get('app_email_from', 'RhodeCode'), |
|
99 | 100 | |
|
100 | 101 | debug=str2bool(email_config.get('smtp_debug')), |
|
101 | 102 | # /usr/sbin/sendmail Sendmail executable |
|
102 | 103 | # sendmail_app='', |
|
103 | 104 | |
|
104 | 105 | # {sendmail_app} -t -i -f {sender} Template for sendmail execution |
|
105 | 106 | # sendmail_template='', |
|
106 | 107 | ) |
|
107 | 108 | |
|
108 | 109 | try: |
|
109 | 110 | mailer = Mailer(**email_conf) |
|
110 | 111 | |
|
111 | 112 | message = Message(subject=subject, |
|
112 | 113 | sender=email_conf['default_sender'], |
|
113 | 114 | recipients=recipients, |
|
114 | 115 | body=body, html=html_body) |
|
115 | 116 | mailer.send_immediately(message) |
|
116 | 117 | |
|
117 | 118 | except Exception: |
|
118 | 119 | log.exception('Mail sending failed') |
|
119 | 120 | return False |
|
120 | 121 | return True |
|
121 | 122 | |
|
122 | 123 | |
|
123 | 124 | @async_task(ignore_result=True, base=RequestContextTask) |
|
124 | 125 | def create_repo(form_data, cur_user): |
|
125 | 126 | from rhodecode.model.repo import RepoModel |
|
126 | 127 | from rhodecode.model.user import UserModel |
|
127 | 128 | from rhodecode.model.settings import SettingsModel |
|
128 | 129 | |
|
129 | 130 | log = get_logger(create_repo) |
|
130 | 131 | |
|
131 | 132 | cur_user = UserModel()._get_user(cur_user) |
|
132 | 133 | owner = cur_user |
|
133 | 134 | |
|
134 | 135 | repo_name = form_data['repo_name'] |
|
135 | 136 | repo_name_full = form_data['repo_name_full'] |
|
136 | 137 | repo_type = form_data['repo_type'] |
|
137 | 138 | description = form_data['repo_description'] |
|
138 | 139 | private = form_data['repo_private'] |
|
139 | 140 | clone_uri = form_data.get('clone_uri') |
|
140 | 141 | repo_group = safe_int(form_data['repo_group']) |
|
141 | 142 | landing_rev = form_data['repo_landing_rev'] |
|
142 | 143 | copy_fork_permissions = form_data.get('copy_permissions') |
|
143 | 144 | copy_group_permissions = form_data.get('repo_copy_permissions') |
|
144 | 145 | fork_of = form_data.get('fork_parent_id') |
|
145 | 146 | state = form_data.get('repo_state', Repository.STATE_PENDING) |
|
146 | 147 | |
|
147 | 148 | # repo creation defaults, private and repo_type are filled in form |
|
148 | 149 | defs = SettingsModel().get_default_repo_settings(strip_prefix=True) |
|
149 | 150 | enable_statistics = form_data.get( |
|
150 | 151 | 'enable_statistics', defs.get('repo_enable_statistics')) |
|
151 | 152 | enable_locking = form_data.get( |
|
152 | 153 | 'enable_locking', defs.get('repo_enable_locking')) |
|
153 | 154 | enable_downloads = form_data.get( |
|
154 | 155 | 'enable_downloads', defs.get('repo_enable_downloads')) |
|
155 | 156 | |
|
156 | 157 | try: |
|
157 | 158 | RepoModel()._create_repo( |
|
158 | 159 | repo_name=repo_name_full, |
|
159 | 160 | repo_type=repo_type, |
|
160 | 161 | description=description, |
|
161 | 162 | owner=owner, |
|
162 | 163 | private=private, |
|
163 | 164 | clone_uri=clone_uri, |
|
164 | 165 | repo_group=repo_group, |
|
165 | 166 | landing_rev=landing_rev, |
|
166 | 167 | fork_of=fork_of, |
|
167 | 168 | copy_fork_permissions=copy_fork_permissions, |
|
168 | 169 | copy_group_permissions=copy_group_permissions, |
|
169 | 170 | enable_statistics=enable_statistics, |
|
170 | 171 | enable_locking=enable_locking, |
|
171 | 172 | enable_downloads=enable_downloads, |
|
172 | 173 | state=state |
|
173 | 174 | ) |
|
174 | 175 | Session().commit() |
|
175 | 176 | |
|
176 | 177 | # now create this repo on Filesystem |
|
177 | 178 | RepoModel()._create_filesystem_repo( |
|
178 | 179 | repo_name=repo_name, |
|
179 | 180 | repo_type=repo_type, |
|
180 | 181 | repo_group=RepoModel()._get_repo_group(repo_group), |
|
181 | 182 | clone_uri=clone_uri, |
|
182 | 183 | ) |
|
183 | 184 | repo = Repository.get_by_repo_name(repo_name_full) |
|
184 | 185 | log_create_repository(created_by=owner.username, **repo.get_dict()) |
|
185 | 186 | |
|
186 | 187 | # update repo commit caches initially |
|
187 | 188 | repo.update_commit_cache() |
|
188 | 189 | |
|
189 | 190 | # set new created state |
|
190 | 191 | repo.set_state(Repository.STATE_CREATED) |
|
191 | 192 | repo_id = repo.repo_id |
|
192 | 193 | repo_data = repo.get_api_data() |
|
193 | 194 | |
|
194 | 195 | audit_logger.store( |
|
195 | 196 | 'repo.create', action_data={'data': repo_data}, |
|
196 | 197 | user=cur_user, |
|
197 | 198 | repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id)) |
|
198 | 199 | |
|
199 | 200 | Session().commit() |
|
200 | 201 | except Exception as e: |
|
201 | 202 | log.warning('Exception occurred when creating repository, ' |
|
202 | 203 | 'doing cleanup...', exc_info=True) |
|
203 | 204 | if isinstance(e, IntegrityError): |
|
204 | 205 | Session().rollback() |
|
205 | 206 | |
|
206 | 207 | # rollback things manually ! |
|
207 | 208 | repo = Repository.get_by_repo_name(repo_name_full) |
|
208 | 209 | if repo: |
|
209 | 210 | Repository.delete(repo.repo_id) |
|
210 | 211 | Session().commit() |
|
211 | 212 | RepoModel()._delete_filesystem_repo(repo) |
|
212 | 213 | log.info('Cleanup of repo %s finished', repo_name_full) |
|
213 | 214 | raise |
|
214 | 215 | |
|
215 | 216 | return True |
|
216 | 217 | |
|
217 | 218 | |
|
218 | 219 | @async_task(ignore_result=True, base=RequestContextTask) |
|
219 | 220 | def create_repo_fork(form_data, cur_user): |
|
220 | 221 | """ |
|
221 | 222 | Creates a fork of repository using internal VCS methods |
|
222 | 223 | """ |
|
223 | 224 | from rhodecode.model.repo import RepoModel |
|
224 | 225 | from rhodecode.model.user import UserModel |
|
225 | 226 | |
|
226 | 227 | log = get_logger(create_repo_fork) |
|
227 | 228 | |
|
228 | 229 | cur_user = UserModel()._get_user(cur_user) |
|
229 | 230 | owner = cur_user |
|
230 | 231 | |
|
231 | 232 | repo_name = form_data['repo_name'] # fork in this case |
|
232 | 233 | repo_name_full = form_data['repo_name_full'] |
|
233 | 234 | repo_type = form_data['repo_type'] |
|
234 | 235 | description = form_data['description'] |
|
235 | 236 | private = form_data['private'] |
|
236 | 237 | clone_uri = form_data.get('clone_uri') |
|
237 | 238 | repo_group = safe_int(form_data['repo_group']) |
|
238 | 239 | landing_rev = form_data['landing_rev'] |
|
239 | 240 | copy_fork_permissions = form_data.get('copy_permissions') |
|
240 | 241 | fork_id = safe_int(form_data.get('fork_parent_id')) |
|
241 | 242 | |
|
242 | 243 | try: |
|
243 | 244 | fork_of = RepoModel()._get_repo(fork_id) |
|
244 | 245 | RepoModel()._create_repo( |
|
245 | 246 | repo_name=repo_name_full, |
|
246 | 247 | repo_type=repo_type, |
|
247 | 248 | description=description, |
|
248 | 249 | owner=owner, |
|
249 | 250 | private=private, |
|
250 | 251 | clone_uri=clone_uri, |
|
251 | 252 | repo_group=repo_group, |
|
252 | 253 | landing_rev=landing_rev, |
|
253 | 254 | fork_of=fork_of, |
|
254 | 255 | copy_fork_permissions=copy_fork_permissions |
|
255 | 256 | ) |
|
256 | 257 | |
|
257 | 258 | Session().commit() |
|
258 | 259 | |
|
259 | 260 | base_path = Repository.base_path() |
|
260 | 261 | source_repo_path = os.path.join(base_path, fork_of.repo_name) |
|
261 | 262 | |
|
262 | 263 | # now create this repo on Filesystem |
|
263 | 264 | RepoModel()._create_filesystem_repo( |
|
264 | 265 | repo_name=repo_name, |
|
265 | 266 | repo_type=repo_type, |
|
266 | 267 | repo_group=RepoModel()._get_repo_group(repo_group), |
|
267 | 268 | clone_uri=source_repo_path, |
|
268 | 269 | ) |
|
269 | 270 | repo = Repository.get_by_repo_name(repo_name_full) |
|
270 | 271 | log_create_repository(created_by=owner.username, **repo.get_dict()) |
|
271 | 272 | |
|
272 | 273 | # update repo commit caches initially |
|
273 | 274 | config = repo._config |
|
274 | 275 | config.set('extensions', 'largefiles', '') |
|
275 | 276 | repo.update_commit_cache(config=config) |
|
276 | 277 | |
|
277 | 278 | # set new created state |
|
278 | 279 | repo.set_state(Repository.STATE_CREATED) |
|
279 | 280 | |
|
280 | 281 | repo_id = repo.repo_id |
|
281 | 282 | repo_data = repo.get_api_data() |
|
282 | 283 | audit_logger.store( |
|
283 | 284 | 'repo.fork', action_data={'data': repo_data}, |
|
284 | 285 | user=cur_user, |
|
285 | 286 | repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id)) |
|
286 | 287 | |
|
287 | 288 | Session().commit() |
|
288 | 289 | except Exception as e: |
|
289 | 290 | log.warning('Exception occurred when forking repository, ' |
|
290 | 291 | 'doing cleanup...', exc_info=True) |
|
291 | 292 | if isinstance(e, IntegrityError): |
|
292 | 293 | Session().rollback() |
|
293 | 294 | |
|
294 | 295 | # rollback things manually ! |
|
295 | 296 | repo = Repository.get_by_repo_name(repo_name_full) |
|
296 | 297 | if repo: |
|
297 | 298 | Repository.delete(repo.repo_id) |
|
298 | 299 | Session().commit() |
|
299 | 300 | RepoModel()._delete_filesystem_repo(repo) |
|
300 | 301 | log.info('Cleanup of repo %s finished', repo_name_full) |
|
301 | 302 | raise |
|
302 | 303 | |
|
303 | 304 | return True |
|
304 | 305 | |
|
305 | 306 | |
|
306 | 307 | @async_task(ignore_result=True) |
|
307 | 308 | def repo_maintenance(repoid): |
|
308 | 309 | from rhodecode.lib import repo_maintenance as repo_maintenance_lib |
|
309 | 310 | log = get_logger(repo_maintenance) |
|
310 | 311 | repo = Repository.get_by_id_or_repo_name(repoid) |
|
311 | 312 | if repo: |
|
312 | 313 | maintenance = repo_maintenance_lib.RepoMaintenance() |
|
313 | 314 | tasks = maintenance.get_tasks_for_repo(repo) |
|
314 | 315 | log.debug('Executing %s tasks on repo `%s`', tasks, repoid) |
|
315 | 316 | executed_types = maintenance.execute(repo) |
|
316 | 317 | log.debug('Got execution results %s', executed_types) |
|
317 | 318 | else: |
|
318 | 319 | log.debug('Repo `%s` not found or without a clone_url', repoid) |
|
319 | 320 | |
|
320 | 321 | |
|
321 | 322 | @async_task(ignore_result=True) |
|
322 | 323 | def check_for_update(): |
|
323 | 324 | from rhodecode.model.update import UpdateModel |
|
324 | 325 | update_url = UpdateModel().get_update_url() |
|
325 | 326 | cur_ver = rhodecode.__version__ |
|
326 | 327 | |
|
327 | 328 | try: |
|
328 | 329 | data = UpdateModel().get_update_data(update_url) |
|
329 | 330 | latest = data['versions'][0] |
|
330 | 331 | UpdateModel().store_version(latest['version']) |
|
331 | 332 | except Exception: |
|
332 | 333 | pass |
|
333 | 334 | |
|
334 | 335 | |
|
335 | 336 | @async_task(ignore_result=False) |
|
336 | 337 | def beat_check(*args, **kwargs): |
|
337 | 338 | log = get_logger(beat_check) |
|
338 | 339 | log.info('Got args: %r and kwargs %r', args, kwargs) |
|
339 | 340 | return time.time() |
@@ -1,775 +1,776 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | import difflib |
|
23 | 23 | from itertools import groupby |
|
24 | 24 | |
|
25 | 25 | from pygments import lex |
|
26 | 26 | from pygments.formatters.html import _get_ttype_class as pygment_token_class |
|
27 | 27 | from pygments.lexers.special import TextLexer, Token |
|
28 | 28 | from pygments.lexers import get_lexer_by_name |
|
29 | from pyramid import compat | |
|
29 | 30 | |
|
30 | 31 | from rhodecode.lib.helpers import ( |
|
31 | 32 | get_lexer_for_filenode, html_escape, get_custom_lexer) |
|
32 | 33 | from rhodecode.lib.utils2 import AttributeDict, StrictAttributeDict, safe_unicode |
|
33 | 34 | from rhodecode.lib.vcs.nodes import FileNode |
|
34 | 35 | from rhodecode.lib.vcs.exceptions import VCSError, NodeDoesNotExistError |
|
35 | 36 | from rhodecode.lib.diff_match_patch import diff_match_patch |
|
36 | 37 | from rhodecode.lib.diffs import LimitedDiffContainer, DEL_FILENODE, BIN_FILENODE |
|
37 | 38 | |
|
38 | 39 | |
|
39 | 40 | plain_text_lexer = get_lexer_by_name( |
|
40 | 41 | 'text', stripall=False, stripnl=False, ensurenl=False) |
|
41 | 42 | |
|
42 | 43 | |
|
43 | 44 | log = logging.getLogger(__name__) |
|
44 | 45 | |
|
45 | 46 | |
|
46 | 47 | def filenode_as_lines_tokens(filenode, lexer=None): |
|
47 | 48 | org_lexer = lexer |
|
48 | 49 | lexer = lexer or get_lexer_for_filenode(filenode) |
|
49 | 50 | log.debug('Generating file node pygment tokens for %s, %s, org_lexer:%s', |
|
50 | 51 | lexer, filenode, org_lexer) |
|
51 | 52 | tokens = tokenize_string(filenode.content, lexer) |
|
52 | 53 | lines = split_token_stream(tokens) |
|
53 | 54 | rv = list(lines) |
|
54 | 55 | return rv |
|
55 | 56 | |
|
56 | 57 | |
|
57 | 58 | def tokenize_string(content, lexer): |
|
58 | 59 | """ |
|
59 | 60 | Use pygments to tokenize some content based on a lexer |
|
60 | 61 | ensuring all original new lines and whitespace is preserved |
|
61 | 62 | """ |
|
62 | 63 | |
|
63 | 64 | lexer.stripall = False |
|
64 | 65 | lexer.stripnl = False |
|
65 | 66 | lexer.ensurenl = False |
|
66 | 67 | |
|
67 | 68 | if isinstance(lexer, TextLexer): |
|
68 | 69 | lexed = [(Token.Text, content)] |
|
69 | 70 | else: |
|
70 | 71 | lexed = lex(content, lexer) |
|
71 | 72 | |
|
72 | 73 | for token_type, token_text in lexed: |
|
73 | 74 | yield pygment_token_class(token_type), token_text |
|
74 | 75 | |
|
75 | 76 | |
|
76 | 77 | def split_token_stream(tokens): |
|
77 | 78 | """ |
|
78 | 79 | Take a list of (TokenType, text) tuples and split them by a string |
|
79 | 80 | |
|
80 | 81 | split_token_stream([(TEXT, 'some\ntext'), (TEXT, 'more\n')]) |
|
81 | 82 | [(TEXT, 'some'), (TEXT, 'text'), |
|
82 | 83 | (TEXT, 'more'), (TEXT, 'text')] |
|
83 | 84 | """ |
|
84 | 85 | |
|
85 | 86 | buffer = [] |
|
86 | 87 | for token_class, token_text in tokens: |
|
87 | 88 | parts = token_text.split('\n') |
|
88 | 89 | for part in parts[:-1]: |
|
89 | 90 | buffer.append((token_class, part)) |
|
90 | 91 | yield buffer |
|
91 | 92 | buffer = [] |
|
92 | 93 | |
|
93 | 94 | buffer.append((token_class, parts[-1])) |
|
94 | 95 | |
|
95 | 96 | if buffer: |
|
96 | 97 | yield buffer |
|
97 | 98 | |
|
98 | 99 | |
|
99 | 100 | def filenode_as_annotated_lines_tokens(filenode): |
|
100 | 101 | """ |
|
101 | 102 | Take a file node and return a list of annotations => lines, if no annotation |
|
102 | 103 | is found, it will be None. |
|
103 | 104 | |
|
104 | 105 | eg: |
|
105 | 106 | |
|
106 | 107 | [ |
|
107 | 108 | (annotation1, [ |
|
108 | 109 | (1, line1_tokens_list), |
|
109 | 110 | (2, line2_tokens_list), |
|
110 | 111 | ]), |
|
111 | 112 | (annotation2, [ |
|
112 | 113 | (3, line1_tokens_list), |
|
113 | 114 | ]), |
|
114 | 115 | (None, [ |
|
115 | 116 | (4, line1_tokens_list), |
|
116 | 117 | ]), |
|
117 | 118 | (annotation1, [ |
|
118 | 119 | (5, line1_tokens_list), |
|
119 | 120 | (6, line2_tokens_list), |
|
120 | 121 | ]) |
|
121 | 122 | ] |
|
122 | 123 | """ |
|
123 | 124 | |
|
124 | 125 | commit_cache = {} # cache commit_getter lookups |
|
125 | 126 | |
|
126 | 127 | def _get_annotation(commit_id, commit_getter): |
|
127 | 128 | if commit_id not in commit_cache: |
|
128 | 129 | commit_cache[commit_id] = commit_getter() |
|
129 | 130 | return commit_cache[commit_id] |
|
130 | 131 | |
|
131 | 132 | annotation_lookup = { |
|
132 | 133 | line_no: _get_annotation(commit_id, commit_getter) |
|
133 | 134 | for line_no, commit_id, commit_getter, line_content |
|
134 | 135 | in filenode.annotate |
|
135 | 136 | } |
|
136 | 137 | |
|
137 | 138 | annotations_lines = ((annotation_lookup.get(line_no), line_no, tokens) |
|
138 | 139 | for line_no, tokens |
|
139 | 140 | in enumerate(filenode_as_lines_tokens(filenode), 1)) |
|
140 | 141 | |
|
141 | 142 | grouped_annotations_lines = groupby(annotations_lines, lambda x: x[0]) |
|
142 | 143 | |
|
143 | 144 | for annotation, group in grouped_annotations_lines: |
|
144 | 145 | yield ( |
|
145 | 146 | annotation, [(line_no, tokens) |
|
146 | 147 | for (_, line_no, tokens) in group] |
|
147 | 148 | ) |
|
148 | 149 | |
|
149 | 150 | |
|
150 | 151 | def render_tokenstream(tokenstream): |
|
151 | 152 | result = [] |
|
152 | 153 | for token_class, token_ops_texts in rollup_tokenstream(tokenstream): |
|
153 | 154 | |
|
154 | 155 | if token_class: |
|
155 | 156 | result.append(u'<span class="%s">' % token_class) |
|
156 | 157 | else: |
|
157 | 158 | result.append(u'<span>') |
|
158 | 159 | |
|
159 | 160 | for op_tag, token_text in token_ops_texts: |
|
160 | 161 | |
|
161 | 162 | if op_tag: |
|
162 | 163 | result.append(u'<%s>' % op_tag) |
|
163 | 164 | |
|
164 | 165 | escaped_text = html_escape(token_text) |
|
165 | 166 | |
|
166 | 167 | # TODO: dan: investigate showing hidden characters like space/nl/tab |
|
167 | 168 | # escaped_text = escaped_text.replace(' ', '<sp> </sp>') |
|
168 | 169 | # escaped_text = escaped_text.replace('\n', '<nl>\n</nl>') |
|
169 | 170 | # escaped_text = escaped_text.replace('\t', '<tab>\t</tab>') |
|
170 | 171 | |
|
171 | 172 | result.append(escaped_text) |
|
172 | 173 | |
|
173 | 174 | if op_tag: |
|
174 | 175 | result.append(u'</%s>' % op_tag) |
|
175 | 176 | |
|
176 | 177 | result.append(u'</span>') |
|
177 | 178 | |
|
178 | 179 | html = ''.join(result) |
|
179 | 180 | return html |
|
180 | 181 | |
|
181 | 182 | |
|
182 | 183 | def rollup_tokenstream(tokenstream): |
|
183 | 184 | """ |
|
184 | 185 | Group a token stream of the format: |
|
185 | 186 | |
|
186 | 187 | ('class', 'op', 'text') |
|
187 | 188 | or |
|
188 | 189 | ('class', 'text') |
|
189 | 190 | |
|
190 | 191 | into |
|
191 | 192 | |
|
192 | 193 | [('class1', |
|
193 | 194 | [('op1', 'text'), |
|
194 | 195 | ('op2', 'text')]), |
|
195 | 196 | ('class2', |
|
196 | 197 | [('op3', 'text')])] |
|
197 | 198 | |
|
198 | 199 | This is used to get the minimal tags necessary when |
|
199 | 200 | rendering to html eg for a token stream ie. |
|
200 | 201 | |
|
201 | 202 | <span class="A"><ins>he</ins>llo</span> |
|
202 | 203 | vs |
|
203 | 204 | <span class="A"><ins>he</ins></span><span class="A">llo</span> |
|
204 | 205 | |
|
205 | 206 | If a 2 tuple is passed in, the output op will be an empty string. |
|
206 | 207 | |
|
207 | 208 | eg: |
|
208 | 209 | |
|
209 | 210 | >>> rollup_tokenstream([('classA', '', 'h'), |
|
210 | 211 | ('classA', 'del', 'ell'), |
|
211 | 212 | ('classA', '', 'o'), |
|
212 | 213 | ('classB', '', ' '), |
|
213 | 214 | ('classA', '', 'the'), |
|
214 | 215 | ('classA', '', 're'), |
|
215 | 216 | ]) |
|
216 | 217 | |
|
217 | 218 | [('classA', [('', 'h'), ('del', 'ell'), ('', 'o')], |
|
218 | 219 | ('classB', [('', ' ')], |
|
219 | 220 | ('classA', [('', 'there')]] |
|
220 | 221 | |
|
221 | 222 | """ |
|
222 | 223 | if tokenstream and len(tokenstream[0]) == 2: |
|
223 | 224 | tokenstream = ((t[0], '', t[1]) for t in tokenstream) |
|
224 | 225 | |
|
225 | 226 | result = [] |
|
226 | 227 | for token_class, op_list in groupby(tokenstream, lambda t: t[0]): |
|
227 | 228 | ops = [] |
|
228 | 229 | for token_op, token_text_list in groupby(op_list, lambda o: o[1]): |
|
229 | 230 | text_buffer = [] |
|
230 | 231 | for t_class, t_op, t_text in token_text_list: |
|
231 | 232 | text_buffer.append(t_text) |
|
232 | 233 | ops.append((token_op, ''.join(text_buffer))) |
|
233 | 234 | result.append((token_class, ops)) |
|
234 | 235 | return result |
|
235 | 236 | |
|
236 | 237 | |
|
237 | 238 | def tokens_diff(old_tokens, new_tokens, use_diff_match_patch=True): |
|
238 | 239 | """ |
|
239 | 240 | Converts a list of (token_class, token_text) tuples to a list of |
|
240 | 241 | (token_class, token_op, token_text) tuples where token_op is one of |
|
241 | 242 | ('ins', 'del', '') |
|
242 | 243 | |
|
243 | 244 | :param old_tokens: list of (token_class, token_text) tuples of old line |
|
244 | 245 | :param new_tokens: list of (token_class, token_text) tuples of new line |
|
245 | 246 | :param use_diff_match_patch: boolean, will use google's diff match patch |
|
246 | 247 | library which has options to 'smooth' out the character by character |
|
247 | 248 | differences making nicer ins/del blocks |
|
248 | 249 | """ |
|
249 | 250 | |
|
250 | 251 | old_tokens_result = [] |
|
251 | 252 | new_tokens_result = [] |
|
252 | 253 | |
|
253 | 254 | similarity = difflib.SequenceMatcher(None, |
|
254 | 255 | ''.join(token_text for token_class, token_text in old_tokens), |
|
255 | 256 | ''.join(token_text for token_class, token_text in new_tokens) |
|
256 | 257 | ).ratio() |
|
257 | 258 | |
|
258 | 259 | if similarity < 0.6: # return, the blocks are too different |
|
259 | 260 | for token_class, token_text in old_tokens: |
|
260 | 261 | old_tokens_result.append((token_class, '', token_text)) |
|
261 | 262 | for token_class, token_text in new_tokens: |
|
262 | 263 | new_tokens_result.append((token_class, '', token_text)) |
|
263 | 264 | return old_tokens_result, new_tokens_result, similarity |
|
264 | 265 | |
|
265 | 266 | token_sequence_matcher = difflib.SequenceMatcher(None, |
|
266 | 267 | [x[1] for x in old_tokens], |
|
267 | 268 | [x[1] for x in new_tokens]) |
|
268 | 269 | |
|
269 | 270 | for tag, o1, o2, n1, n2 in token_sequence_matcher.get_opcodes(): |
|
270 | 271 | # check the differences by token block types first to give a more |
|
271 | 272 | # nicer "block" level replacement vs character diffs |
|
272 | 273 | |
|
273 | 274 | if tag == 'equal': |
|
274 | 275 | for token_class, token_text in old_tokens[o1:o2]: |
|
275 | 276 | old_tokens_result.append((token_class, '', token_text)) |
|
276 | 277 | for token_class, token_text in new_tokens[n1:n2]: |
|
277 | 278 | new_tokens_result.append((token_class, '', token_text)) |
|
278 | 279 | elif tag == 'delete': |
|
279 | 280 | for token_class, token_text in old_tokens[o1:o2]: |
|
280 | 281 | old_tokens_result.append((token_class, 'del', token_text)) |
|
281 | 282 | elif tag == 'insert': |
|
282 | 283 | for token_class, token_text in new_tokens[n1:n2]: |
|
283 | 284 | new_tokens_result.append((token_class, 'ins', token_text)) |
|
284 | 285 | elif tag == 'replace': |
|
285 | 286 | # if same type token blocks must be replaced, do a diff on the |
|
286 | 287 | # characters in the token blocks to show individual changes |
|
287 | 288 | |
|
288 | 289 | old_char_tokens = [] |
|
289 | 290 | new_char_tokens = [] |
|
290 | 291 | for token_class, token_text in old_tokens[o1:o2]: |
|
291 | 292 | for char in token_text: |
|
292 | 293 | old_char_tokens.append((token_class, char)) |
|
293 | 294 | |
|
294 | 295 | for token_class, token_text in new_tokens[n1:n2]: |
|
295 | 296 | for char in token_text: |
|
296 | 297 | new_char_tokens.append((token_class, char)) |
|
297 | 298 | |
|
298 | 299 | old_string = ''.join([token_text for |
|
299 | 300 | token_class, token_text in old_char_tokens]) |
|
300 | 301 | new_string = ''.join([token_text for |
|
301 | 302 | token_class, token_text in new_char_tokens]) |
|
302 | 303 | |
|
303 | 304 | char_sequence = difflib.SequenceMatcher( |
|
304 | 305 | None, old_string, new_string) |
|
305 | 306 | copcodes = char_sequence.get_opcodes() |
|
306 | 307 | obuffer, nbuffer = [], [] |
|
307 | 308 | |
|
308 | 309 | if use_diff_match_patch: |
|
309 | 310 | dmp = diff_match_patch() |
|
310 | 311 | dmp.Diff_EditCost = 11 # TODO: dan: extract this to a setting |
|
311 | 312 | reps = dmp.diff_main(old_string, new_string) |
|
312 | 313 | dmp.diff_cleanupEfficiency(reps) |
|
313 | 314 | |
|
314 | 315 | a, b = 0, 0 |
|
315 | 316 | for op, rep in reps: |
|
316 | 317 | l = len(rep) |
|
317 | 318 | if op == 0: |
|
318 | 319 | for i, c in enumerate(rep): |
|
319 | 320 | obuffer.append((old_char_tokens[a+i][0], '', c)) |
|
320 | 321 | nbuffer.append((new_char_tokens[b+i][0], '', c)) |
|
321 | 322 | a += l |
|
322 | 323 | b += l |
|
323 | 324 | elif op == -1: |
|
324 | 325 | for i, c in enumerate(rep): |
|
325 | 326 | obuffer.append((old_char_tokens[a+i][0], 'del', c)) |
|
326 | 327 | a += l |
|
327 | 328 | elif op == 1: |
|
328 | 329 | for i, c in enumerate(rep): |
|
329 | 330 | nbuffer.append((new_char_tokens[b+i][0], 'ins', c)) |
|
330 | 331 | b += l |
|
331 | 332 | else: |
|
332 | 333 | for ctag, co1, co2, cn1, cn2 in copcodes: |
|
333 | 334 | if ctag == 'equal': |
|
334 | 335 | for token_class, token_text in old_char_tokens[co1:co2]: |
|
335 | 336 | obuffer.append((token_class, '', token_text)) |
|
336 | 337 | for token_class, token_text in new_char_tokens[cn1:cn2]: |
|
337 | 338 | nbuffer.append((token_class, '', token_text)) |
|
338 | 339 | elif ctag == 'delete': |
|
339 | 340 | for token_class, token_text in old_char_tokens[co1:co2]: |
|
340 | 341 | obuffer.append((token_class, 'del', token_text)) |
|
341 | 342 | elif ctag == 'insert': |
|
342 | 343 | for token_class, token_text in new_char_tokens[cn1:cn2]: |
|
343 | 344 | nbuffer.append((token_class, 'ins', token_text)) |
|
344 | 345 | elif ctag == 'replace': |
|
345 | 346 | for token_class, token_text in old_char_tokens[co1:co2]: |
|
346 | 347 | obuffer.append((token_class, 'del', token_text)) |
|
347 | 348 | for token_class, token_text in new_char_tokens[cn1:cn2]: |
|
348 | 349 | nbuffer.append((token_class, 'ins', token_text)) |
|
349 | 350 | |
|
350 | 351 | old_tokens_result.extend(obuffer) |
|
351 | 352 | new_tokens_result.extend(nbuffer) |
|
352 | 353 | |
|
353 | 354 | return old_tokens_result, new_tokens_result, similarity |
|
354 | 355 | |
|
355 | 356 | |
|
356 | 357 | def diffset_node_getter(commit): |
|
357 | 358 | def get_node(fname): |
|
358 | 359 | try: |
|
359 | 360 | return commit.get_node(fname) |
|
360 | 361 | except NodeDoesNotExistError: |
|
361 | 362 | return None |
|
362 | 363 | |
|
363 | 364 | return get_node |
|
364 | 365 | |
|
365 | 366 | |
|
366 | 367 | class DiffSet(object): |
|
367 | 368 | """ |
|
368 | 369 | An object for parsing the diff result from diffs.DiffProcessor and |
|
369 | 370 | adding highlighting, side by side/unified renderings and line diffs |
|
370 | 371 | """ |
|
371 | 372 | |
|
372 | 373 | HL_REAL = 'REAL' # highlights using original file, slow |
|
373 | 374 | HL_FAST = 'FAST' # highlights using just the line, fast but not correct |
|
374 | 375 | # in the case of multiline code |
|
375 | 376 | HL_NONE = 'NONE' # no highlighting, fastest |
|
376 | 377 | |
|
377 | 378 | def __init__(self, highlight_mode=HL_REAL, repo_name=None, |
|
378 | 379 | source_repo_name=None, |
|
379 | 380 | source_node_getter=lambda filename: None, |
|
380 | 381 | target_repo_name=None, |
|
381 | 382 | target_node_getter=lambda filename: None, |
|
382 | 383 | source_nodes=None, target_nodes=None, |
|
383 | 384 | # files over this size will use fast highlighting |
|
384 | 385 | max_file_size_limit=150 * 1024, |
|
385 | 386 | ): |
|
386 | 387 | |
|
387 | 388 | self.highlight_mode = highlight_mode |
|
388 | 389 | self.highlighted_filenodes = {} |
|
389 | 390 | self.source_node_getter = source_node_getter |
|
390 | 391 | self.target_node_getter = target_node_getter |
|
391 | 392 | self.source_nodes = source_nodes or {} |
|
392 | 393 | self.target_nodes = target_nodes or {} |
|
393 | 394 | self.repo_name = repo_name |
|
394 | 395 | self.target_repo_name = target_repo_name or repo_name |
|
395 | 396 | self.source_repo_name = source_repo_name or repo_name |
|
396 | 397 | self.max_file_size_limit = max_file_size_limit |
|
397 | 398 | |
|
398 | 399 | def render_patchset(self, patchset, source_ref=None, target_ref=None): |
|
399 | 400 | diffset = AttributeDict(dict( |
|
400 | 401 | lines_added=0, |
|
401 | 402 | lines_deleted=0, |
|
402 | 403 | changed_files=0, |
|
403 | 404 | files=[], |
|
404 | 405 | file_stats={}, |
|
405 | 406 | limited_diff=isinstance(patchset, LimitedDiffContainer), |
|
406 | 407 | repo_name=self.repo_name, |
|
407 | 408 | target_repo_name=self.target_repo_name, |
|
408 | 409 | source_repo_name=self.source_repo_name, |
|
409 | 410 | source_ref=source_ref, |
|
410 | 411 | target_ref=target_ref, |
|
411 | 412 | )) |
|
412 | 413 | for patch in patchset: |
|
413 | 414 | diffset.file_stats[patch['filename']] = patch['stats'] |
|
414 | 415 | filediff = self.render_patch(patch) |
|
415 | 416 | filediff.diffset = StrictAttributeDict(dict( |
|
416 | 417 | source_ref=diffset.source_ref, |
|
417 | 418 | target_ref=diffset.target_ref, |
|
418 | 419 | repo_name=diffset.repo_name, |
|
419 | 420 | source_repo_name=diffset.source_repo_name, |
|
420 | 421 | target_repo_name=diffset.target_repo_name, |
|
421 | 422 | )) |
|
422 | 423 | diffset.files.append(filediff) |
|
423 | 424 | diffset.changed_files += 1 |
|
424 | 425 | if not patch['stats']['binary']: |
|
425 | 426 | diffset.lines_added += patch['stats']['added'] |
|
426 | 427 | diffset.lines_deleted += patch['stats']['deleted'] |
|
427 | 428 | |
|
428 | 429 | return diffset |
|
429 | 430 | |
|
430 | 431 | _lexer_cache = {} |
|
431 | 432 | |
|
432 | 433 | def _get_lexer_for_filename(self, filename, filenode=None): |
|
433 | 434 | # cached because we might need to call it twice for source/target |
|
434 | 435 | if filename not in self._lexer_cache: |
|
435 | 436 | if filenode: |
|
436 | 437 | lexer = filenode.lexer |
|
437 | 438 | extension = filenode.extension |
|
438 | 439 | else: |
|
439 | 440 | lexer = FileNode.get_lexer(filename=filename) |
|
440 | 441 | extension = filename.split('.')[-1] |
|
441 | 442 | |
|
442 | 443 | lexer = get_custom_lexer(extension) or lexer |
|
443 | 444 | self._lexer_cache[filename] = lexer |
|
444 | 445 | return self._lexer_cache[filename] |
|
445 | 446 | |
|
446 | 447 | def render_patch(self, patch): |
|
447 | 448 | log.debug('rendering diff for %r', patch['filename']) |
|
448 | 449 | |
|
449 | 450 | source_filename = patch['original_filename'] |
|
450 | 451 | target_filename = patch['filename'] |
|
451 | 452 | |
|
452 | 453 | source_lexer = plain_text_lexer |
|
453 | 454 | target_lexer = plain_text_lexer |
|
454 | 455 | |
|
455 | 456 | if not patch['stats']['binary']: |
|
456 | 457 | node_hl_mode = self.HL_NONE if patch['chunks'] == [] else None |
|
457 | 458 | hl_mode = node_hl_mode or self.highlight_mode |
|
458 | 459 | |
|
459 | 460 | if hl_mode == self.HL_REAL: |
|
460 | 461 | if (source_filename and patch['operation'] in ('D', 'M') |
|
461 | 462 | and source_filename not in self.source_nodes): |
|
462 | 463 | self.source_nodes[source_filename] = ( |
|
463 | 464 | self.source_node_getter(source_filename)) |
|
464 | 465 | |
|
465 | 466 | if (target_filename and patch['operation'] in ('A', 'M') |
|
466 | 467 | and target_filename not in self.target_nodes): |
|
467 | 468 | self.target_nodes[target_filename] = ( |
|
468 | 469 | self.target_node_getter(target_filename)) |
|
469 | 470 | |
|
470 | 471 | elif hl_mode == self.HL_FAST: |
|
471 | 472 | source_lexer = self._get_lexer_for_filename(source_filename) |
|
472 | 473 | target_lexer = self._get_lexer_for_filename(target_filename) |
|
473 | 474 | |
|
474 | 475 | source_file = self.source_nodes.get(source_filename, source_filename) |
|
475 | 476 | target_file = self.target_nodes.get(target_filename, target_filename) |
|
476 | 477 | raw_id_uid = '' |
|
477 | 478 | if self.source_nodes.get(source_filename): |
|
478 | 479 | raw_id_uid = self.source_nodes[source_filename].commit.raw_id |
|
479 | 480 | |
|
480 | 481 | if not raw_id_uid and self.target_nodes.get(target_filename): |
|
481 | 482 | # in case this is a new file we only have it in target |
|
482 | 483 | raw_id_uid = self.target_nodes[target_filename].commit.raw_id |
|
483 | 484 | |
|
484 | 485 | source_filenode, target_filenode = None, None |
|
485 | 486 | |
|
486 | 487 | # TODO: dan: FileNode.lexer works on the content of the file - which |
|
487 | 488 | # can be slow - issue #4289 explains a lexer clean up - which once |
|
488 | 489 | # done can allow caching a lexer for a filenode to avoid the file lookup |
|
489 | 490 | if isinstance(source_file, FileNode): |
|
490 | 491 | source_filenode = source_file |
|
491 | 492 | #source_lexer = source_file.lexer |
|
492 | 493 | source_lexer = self._get_lexer_for_filename(source_filename) |
|
493 | 494 | source_file.lexer = source_lexer |
|
494 | 495 | |
|
495 | 496 | if isinstance(target_file, FileNode): |
|
496 | 497 | target_filenode = target_file |
|
497 | 498 | #target_lexer = target_file.lexer |
|
498 | 499 | target_lexer = self._get_lexer_for_filename(target_filename) |
|
499 | 500 | target_file.lexer = target_lexer |
|
500 | 501 | |
|
501 | 502 | source_file_path, target_file_path = None, None |
|
502 | 503 | |
|
503 | 504 | if source_filename != '/dev/null': |
|
504 | 505 | source_file_path = source_filename |
|
505 | 506 | if target_filename != '/dev/null': |
|
506 | 507 | target_file_path = target_filename |
|
507 | 508 | |
|
508 | 509 | source_file_type = source_lexer.name |
|
509 | 510 | target_file_type = target_lexer.name |
|
510 | 511 | |
|
511 | 512 | filediff = AttributeDict({ |
|
512 | 513 | 'source_file_path': source_file_path, |
|
513 | 514 | 'target_file_path': target_file_path, |
|
514 | 515 | 'source_filenode': source_filenode, |
|
515 | 516 | 'target_filenode': target_filenode, |
|
516 | 517 | 'source_file_type': target_file_type, |
|
517 | 518 | 'target_file_type': source_file_type, |
|
518 | 519 | 'patch': {'filename': patch['filename'], 'stats': patch['stats']}, |
|
519 | 520 | 'operation': patch['operation'], |
|
520 | 521 | 'source_mode': patch['stats']['old_mode'], |
|
521 | 522 | 'target_mode': patch['stats']['new_mode'], |
|
522 | 523 | 'limited_diff': patch['is_limited_diff'], |
|
523 | 524 | 'hunks': [], |
|
524 | 525 | 'hunk_ops': None, |
|
525 | 526 | 'diffset': self, |
|
526 | 527 | 'raw_id': raw_id_uid, |
|
527 | 528 | }) |
|
528 | 529 | |
|
529 | 530 | file_chunks = patch['chunks'][1:] |
|
530 | 531 | for hunk in file_chunks: |
|
531 | 532 | hunkbit = self.parse_hunk(hunk, source_file, target_file) |
|
532 | 533 | hunkbit.source_file_path = source_file_path |
|
533 | 534 | hunkbit.target_file_path = target_file_path |
|
534 | 535 | filediff.hunks.append(hunkbit) |
|
535 | 536 | |
|
536 | 537 | # Simulate hunk on OPS type line which doesn't really contain any diff |
|
537 | 538 | # this allows commenting on those |
|
538 | 539 | if not file_chunks: |
|
539 | 540 | actions = [] |
|
540 | 541 | for op_id, op_text in filediff.patch['stats']['ops'].items(): |
|
541 | 542 | if op_id == DEL_FILENODE: |
|
542 | 543 | actions.append(u'file was removed') |
|
543 | 544 | elif op_id == BIN_FILENODE: |
|
544 | 545 | actions.append(u'binary diff hidden') |
|
545 | 546 | else: |
|
546 | 547 | actions.append(safe_unicode(op_text)) |
|
547 | 548 | action_line = u'NO CONTENT: ' + \ |
|
548 | 549 | u', '.join(actions) or u'UNDEFINED_ACTION' |
|
549 | 550 | |
|
550 | 551 | hunk_ops = {'source_length': 0, 'source_start': 0, |
|
551 | 552 | 'lines': [ |
|
552 | 553 | {'new_lineno': 0, 'old_lineno': 1, |
|
553 | 554 | 'action': 'unmod-no-hl', 'line': action_line} |
|
554 | 555 | ], |
|
555 | 556 | 'section_header': u'', 'target_start': 1, 'target_length': 1} |
|
556 | 557 | |
|
557 | 558 | hunkbit = self.parse_hunk(hunk_ops, source_file, target_file) |
|
558 | 559 | hunkbit.source_file_path = source_file_path |
|
559 | 560 | hunkbit.target_file_path = target_file_path |
|
560 | 561 | filediff.hunk_ops = hunkbit |
|
561 | 562 | return filediff |
|
562 | 563 | |
|
563 | 564 | def parse_hunk(self, hunk, source_file, target_file): |
|
564 | 565 | result = AttributeDict(dict( |
|
565 | 566 | source_start=hunk['source_start'], |
|
566 | 567 | source_length=hunk['source_length'], |
|
567 | 568 | target_start=hunk['target_start'], |
|
568 | 569 | target_length=hunk['target_length'], |
|
569 | 570 | section_header=hunk['section_header'], |
|
570 | 571 | lines=[], |
|
571 | 572 | )) |
|
572 | 573 | before, after = [], [] |
|
573 | 574 | |
|
574 | 575 | for line in hunk['lines']: |
|
575 | 576 | if line['action'] in ['unmod', 'unmod-no-hl']: |
|
576 | 577 | no_hl = line['action'] == 'unmod-no-hl' |
|
577 | 578 | result.lines.extend( |
|
578 | 579 | self.parse_lines(before, after, source_file, target_file, no_hl=no_hl)) |
|
579 | 580 | after.append(line) |
|
580 | 581 | before.append(line) |
|
581 | 582 | elif line['action'] == 'add': |
|
582 | 583 | after.append(line) |
|
583 | 584 | elif line['action'] == 'del': |
|
584 | 585 | before.append(line) |
|
585 | 586 | elif line['action'] == 'old-no-nl': |
|
586 | 587 | before.append(line) |
|
587 | 588 | elif line['action'] == 'new-no-nl': |
|
588 | 589 | after.append(line) |
|
589 | 590 | |
|
590 | 591 | all_actions = [x['action'] for x in after] + [x['action'] for x in before] |
|
591 | 592 | no_hl = {x for x in all_actions} == {'unmod-no-hl'} |
|
592 | 593 | result.lines.extend( |
|
593 | 594 | self.parse_lines(before, after, source_file, target_file, no_hl=no_hl)) |
|
594 | 595 | # NOTE(marcink): we must keep list() call here so we can cache the result... |
|
595 | 596 | result.unified = list(self.as_unified(result.lines)) |
|
596 | 597 | result.sideside = result.lines |
|
597 | 598 | |
|
598 | 599 | return result |
|
599 | 600 | |
|
600 | 601 | def parse_lines(self, before_lines, after_lines, source_file, target_file, |
|
601 | 602 | no_hl=False): |
|
602 | 603 | # TODO: dan: investigate doing the diff comparison and fast highlighting |
|
603 | 604 | # on the entire before and after buffered block lines rather than by |
|
604 | 605 | # line, this means we can get better 'fast' highlighting if the context |
|
605 | 606 | # allows it - eg. |
|
606 | 607 | # line 4: """ |
|
607 | 608 | # line 5: this gets highlighted as a string |
|
608 | 609 | # line 6: """ |
|
609 | 610 | |
|
610 | 611 | lines = [] |
|
611 | 612 | |
|
612 | 613 | before_newline = AttributeDict() |
|
613 | 614 | after_newline = AttributeDict() |
|
614 | 615 | if before_lines and before_lines[-1]['action'] == 'old-no-nl': |
|
615 | 616 | before_newline_line = before_lines.pop(-1) |
|
616 | 617 | before_newline.content = '\n {}'.format( |
|
617 | 618 | render_tokenstream( |
|
618 | 619 | [(x[0], '', x[1]) |
|
619 | 620 | for x in [('nonl', before_newline_line['line'])]])) |
|
620 | 621 | |
|
621 | 622 | if after_lines and after_lines[-1]['action'] == 'new-no-nl': |
|
622 | 623 | after_newline_line = after_lines.pop(-1) |
|
623 | 624 | after_newline.content = '\n {}'.format( |
|
624 | 625 | render_tokenstream( |
|
625 | 626 | [(x[0], '', x[1]) |
|
626 | 627 | for x in [('nonl', after_newline_line['line'])]])) |
|
627 | 628 | |
|
628 | 629 | while before_lines or after_lines: |
|
629 | 630 | before, after = None, None |
|
630 | 631 | before_tokens, after_tokens = None, None |
|
631 | 632 | |
|
632 | 633 | if before_lines: |
|
633 | 634 | before = before_lines.pop(0) |
|
634 | 635 | if after_lines: |
|
635 | 636 | after = after_lines.pop(0) |
|
636 | 637 | |
|
637 | 638 | original = AttributeDict() |
|
638 | 639 | modified = AttributeDict() |
|
639 | 640 | |
|
640 | 641 | if before: |
|
641 | 642 | if before['action'] == 'old-no-nl': |
|
642 | 643 | before_tokens = [('nonl', before['line'])] |
|
643 | 644 | else: |
|
644 | 645 | before_tokens = self.get_line_tokens( |
|
645 | 646 | line_text=before['line'], line_number=before['old_lineno'], |
|
646 | 647 | input_file=source_file, no_hl=no_hl) |
|
647 | 648 | original.lineno = before['old_lineno'] |
|
648 | 649 | original.content = before['line'] |
|
649 | 650 | original.action = self.action_to_op(before['action']) |
|
650 | 651 | |
|
651 | 652 | original.get_comment_args = ( |
|
652 | 653 | source_file, 'o', before['old_lineno']) |
|
653 | 654 | |
|
654 | 655 | if after: |
|
655 | 656 | if after['action'] == 'new-no-nl': |
|
656 | 657 | after_tokens = [('nonl', after['line'])] |
|
657 | 658 | else: |
|
658 | 659 | after_tokens = self.get_line_tokens( |
|
659 | 660 | line_text=after['line'], line_number=after['new_lineno'], |
|
660 | 661 | input_file=target_file, no_hl=no_hl) |
|
661 | 662 | modified.lineno = after['new_lineno'] |
|
662 | 663 | modified.content = after['line'] |
|
663 | 664 | modified.action = self.action_to_op(after['action']) |
|
664 | 665 | |
|
665 | 666 | modified.get_comment_args = (target_file, 'n', after['new_lineno']) |
|
666 | 667 | |
|
667 | 668 | # diff the lines |
|
668 | 669 | if before_tokens and after_tokens: |
|
669 | 670 | o_tokens, m_tokens, similarity = tokens_diff( |
|
670 | 671 | before_tokens, after_tokens) |
|
671 | 672 | original.content = render_tokenstream(o_tokens) |
|
672 | 673 | modified.content = render_tokenstream(m_tokens) |
|
673 | 674 | elif before_tokens: |
|
674 | 675 | original.content = render_tokenstream( |
|
675 | 676 | [(x[0], '', x[1]) for x in before_tokens]) |
|
676 | 677 | elif after_tokens: |
|
677 | 678 | modified.content = render_tokenstream( |
|
678 | 679 | [(x[0], '', x[1]) for x in after_tokens]) |
|
679 | 680 | |
|
680 | 681 | if not before_lines and before_newline: |
|
681 | 682 | original.content += before_newline.content |
|
682 | 683 | before_newline = None |
|
683 | 684 | if not after_lines and after_newline: |
|
684 | 685 | modified.content += after_newline.content |
|
685 | 686 | after_newline = None |
|
686 | 687 | |
|
687 | 688 | lines.append(AttributeDict({ |
|
688 | 689 | 'original': original, |
|
689 | 690 | 'modified': modified, |
|
690 | 691 | })) |
|
691 | 692 | |
|
692 | 693 | return lines |
|
693 | 694 | |
|
694 | 695 | def get_line_tokens(self, line_text, line_number, input_file=None, no_hl=False): |
|
695 | 696 | filenode = None |
|
696 | 697 | filename = None |
|
697 | 698 | |
|
698 |
if isinstance(input_file, |
|
|
699 | if isinstance(input_file, compat.string_types): | |
|
699 | 700 | filename = input_file |
|
700 | 701 | elif isinstance(input_file, FileNode): |
|
701 | 702 | filenode = input_file |
|
702 | 703 | filename = input_file.unicode_path |
|
703 | 704 | |
|
704 | 705 | hl_mode = self.HL_NONE if no_hl else self.highlight_mode |
|
705 | 706 | if hl_mode == self.HL_REAL and filenode: |
|
706 | 707 | lexer = self._get_lexer_for_filename(filename) |
|
707 | 708 | file_size_allowed = input_file.size < self.max_file_size_limit |
|
708 | 709 | if line_number and file_size_allowed: |
|
709 | 710 | return self.get_tokenized_filenode_line( |
|
710 | 711 | input_file, line_number, lexer) |
|
711 | 712 | |
|
712 | 713 | if hl_mode in (self.HL_REAL, self.HL_FAST) and filename: |
|
713 | 714 | lexer = self._get_lexer_for_filename(filename) |
|
714 | 715 | return list(tokenize_string(line_text, lexer)) |
|
715 | 716 | |
|
716 | 717 | return list(tokenize_string(line_text, plain_text_lexer)) |
|
717 | 718 | |
|
718 | 719 | def get_tokenized_filenode_line(self, filenode, line_number, lexer=None): |
|
719 | 720 | |
|
720 | 721 | if filenode not in self.highlighted_filenodes: |
|
721 | 722 | tokenized_lines = filenode_as_lines_tokens(filenode, lexer) |
|
722 | 723 | self.highlighted_filenodes[filenode] = tokenized_lines |
|
723 | 724 | return self.highlighted_filenodes[filenode][line_number - 1] |
|
724 | 725 | |
|
725 | 726 | def action_to_op(self, action): |
|
726 | 727 | return { |
|
727 | 728 | 'add': '+', |
|
728 | 729 | 'del': '-', |
|
729 | 730 | 'unmod': ' ', |
|
730 | 731 | 'unmod-no-hl': ' ', |
|
731 | 732 | 'old-no-nl': ' ', |
|
732 | 733 | 'new-no-nl': ' ', |
|
733 | 734 | }.get(action, action) |
|
734 | 735 | |
|
735 | 736 | def as_unified(self, lines): |
|
736 | 737 | """ |
|
737 | 738 | Return a generator that yields the lines of a diff in unified order |
|
738 | 739 | """ |
|
739 | 740 | def generator(): |
|
740 | 741 | buf = [] |
|
741 | 742 | for line in lines: |
|
742 | 743 | |
|
743 | 744 | if buf and not line.original or line.original.action == ' ': |
|
744 | 745 | for b in buf: |
|
745 | 746 | yield b |
|
746 | 747 | buf = [] |
|
747 | 748 | |
|
748 | 749 | if line.original: |
|
749 | 750 | if line.original.action == ' ': |
|
750 | 751 | yield (line.original.lineno, line.modified.lineno, |
|
751 | 752 | line.original.action, line.original.content, |
|
752 | 753 | line.original.get_comment_args) |
|
753 | 754 | continue |
|
754 | 755 | |
|
755 | 756 | if line.original.action == '-': |
|
756 | 757 | yield (line.original.lineno, None, |
|
757 | 758 | line.original.action, line.original.content, |
|
758 | 759 | line.original.get_comment_args) |
|
759 | 760 | |
|
760 | 761 | if line.modified.action == '+': |
|
761 | 762 | buf.append(( |
|
762 | 763 | None, line.modified.lineno, |
|
763 | 764 | line.modified.action, line.modified.content, |
|
764 | 765 | line.modified.get_comment_args)) |
|
765 | 766 | continue |
|
766 | 767 | |
|
767 | 768 | if line.modified: |
|
768 | 769 | yield (None, line.modified.lineno, |
|
769 | 770 | line.modified.action, line.modified.content, |
|
770 | 771 | line.modified.get_comment_args) |
|
771 | 772 | |
|
772 | 773 | for b in buf: |
|
773 | 774 | yield b |
|
774 | 775 | |
|
775 | 776 | return generator() |
@@ -1,30 +1,32 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | from pyramid import compat | |
|
22 | ||
|
21 | 23 | |
|
22 | 24 | def strip_whitespace(value): |
|
23 | 25 | """ |
|
24 | 26 | Removes leading/trailing whitespace, newlines, and tabs from the value. |
|
25 | 27 | Implements the `colander.interface.Preparer` interface. |
|
26 | 28 | """ |
|
27 |
if isinstance(value, |
|
|
29 | if isinstance(value, compat.string_types): | |
|
28 | 30 | return value.strip(' \t\n\r') |
|
29 | 31 | else: |
|
30 | 32 | return value |
@@ -1,665 +1,666 b'' | |||
|
1 | 1 | """ |
|
2 | 2 | Schema module providing common schema operations. |
|
3 | 3 | """ |
|
4 | 4 | import warnings |
|
5 | 5 | |
|
6 | 6 | from UserDict import DictMixin |
|
7 | 7 | |
|
8 | 8 | import sqlalchemy |
|
9 | 9 | |
|
10 | 10 | from sqlalchemy.schema import ForeignKeyConstraint |
|
11 | 11 | from sqlalchemy.schema import UniqueConstraint |
|
12 | from pyramid import compat | |
|
12 | 13 | |
|
13 | 14 | from rhodecode.lib.dbmigrate.migrate.exceptions import * |
|
14 | 15 | from rhodecode.lib.dbmigrate.migrate.changeset import SQLA_07, SQLA_08 |
|
15 | 16 | from rhodecode.lib.dbmigrate.migrate.changeset import util |
|
16 | 17 | from rhodecode.lib.dbmigrate.migrate.changeset.databases.visitor import ( |
|
17 | 18 | get_engine_visitor, run_single_visitor) |
|
18 | 19 | |
|
19 | 20 | |
|
20 | 21 | __all__ = [ |
|
21 | 22 | 'create_column', |
|
22 | 23 | 'drop_column', |
|
23 | 24 | 'alter_column', |
|
24 | 25 | 'rename_table', |
|
25 | 26 | 'rename_index', |
|
26 | 27 | 'ChangesetTable', |
|
27 | 28 | 'ChangesetColumn', |
|
28 | 29 | 'ChangesetIndex', |
|
29 | 30 | 'ChangesetDefaultClause', |
|
30 | 31 | 'ColumnDelta', |
|
31 | 32 | ] |
|
32 | 33 | |
|
33 | 34 | def create_column(column, table=None, *p, **kw): |
|
34 | 35 | """Create a column, given the table. |
|
35 | 36 | |
|
36 | 37 | API to :meth:`ChangesetColumn.create`. |
|
37 | 38 | """ |
|
38 | 39 | if table is not None: |
|
39 | 40 | return table.create_column(column, *p, **kw) |
|
40 | 41 | return column.create(*p, **kw) |
|
41 | 42 | |
|
42 | 43 | |
|
43 | 44 | def drop_column(column, table=None, *p, **kw): |
|
44 | 45 | """Drop a column, given the table. |
|
45 | 46 | |
|
46 | 47 | API to :meth:`ChangesetColumn.drop`. |
|
47 | 48 | """ |
|
48 | 49 | if table is not None: |
|
49 | 50 | return table.drop_column(column, *p, **kw) |
|
50 | 51 | return column.drop(*p, **kw) |
|
51 | 52 | |
|
52 | 53 | |
|
53 | 54 | def rename_table(table, name, engine=None, **kw): |
|
54 | 55 | """Rename a table. |
|
55 | 56 | |
|
56 | 57 | If Table instance is given, engine is not used. |
|
57 | 58 | |
|
58 | 59 | API to :meth:`ChangesetTable.rename`. |
|
59 | 60 | |
|
60 | 61 | :param table: Table to be renamed. |
|
61 | 62 | :param name: New name for Table. |
|
62 | 63 | :param engine: Engine instance. |
|
63 | 64 | :type table: string or Table instance |
|
64 | 65 | :type name: string |
|
65 | 66 | :type engine: obj |
|
66 | 67 | """ |
|
67 | 68 | table = _to_table(table, engine) |
|
68 | 69 | table.rename(name, **kw) |
|
69 | 70 | |
|
70 | 71 | |
|
71 | 72 | def rename_index(index, name, table=None, engine=None, **kw): |
|
72 | 73 | """Rename an index. |
|
73 | 74 | |
|
74 | 75 | If Index instance is given, |
|
75 | 76 | table and engine are not used. |
|
76 | 77 | |
|
77 | 78 | API to :meth:`ChangesetIndex.rename`. |
|
78 | 79 | |
|
79 | 80 | :param index: Index to be renamed. |
|
80 | 81 | :param name: New name for index. |
|
81 | 82 | :param table: Table to which Index is reffered. |
|
82 | 83 | :param engine: Engine instance. |
|
83 | 84 | :type index: string or Index instance |
|
84 | 85 | :type name: string |
|
85 | 86 | :type table: string or Table instance |
|
86 | 87 | :type engine: obj |
|
87 | 88 | """ |
|
88 | 89 | index = _to_index(index, table, engine) |
|
89 | 90 | index.rename(name, **kw) |
|
90 | 91 | |
|
91 | 92 | |
|
92 | 93 | def alter_column(*p, **k): |
|
93 | 94 | """Alter a column. |
|
94 | 95 | |
|
95 | 96 | This is a helper function that creates a :class:`ColumnDelta` and |
|
96 | 97 | runs it. |
|
97 | 98 | |
|
98 | 99 | :argument column: |
|
99 | 100 | The name of the column to be altered or a |
|
100 | 101 | :class:`ChangesetColumn` column representing it. |
|
101 | 102 | |
|
102 | 103 | :param table: |
|
103 | 104 | A :class:`~sqlalchemy.schema.Table` or table name to |
|
104 | 105 | for the table where the column will be changed. |
|
105 | 106 | |
|
106 | 107 | :param engine: |
|
107 | 108 | The :class:`~sqlalchemy.engine.base.Engine` to use for table |
|
108 | 109 | reflection and schema alterations. |
|
109 | 110 | |
|
110 | 111 | :returns: A :class:`ColumnDelta` instance representing the change. |
|
111 | 112 | |
|
112 | 113 | |
|
113 | 114 | """ |
|
114 | 115 | |
|
115 | 116 | if 'table' not in k and isinstance(p[0], sqlalchemy.Column): |
|
116 | 117 | k['table'] = p[0].table |
|
117 | 118 | if 'engine' not in k: |
|
118 | 119 | k['engine'] = k['table'].bind |
|
119 | 120 | |
|
120 | 121 | # deprecation |
|
121 | 122 | if len(p) >= 2 and isinstance(p[1], sqlalchemy.Column): |
|
122 | 123 | warnings.warn( |
|
123 | 124 | "Passing a Column object to alter_column is deprecated." |
|
124 | 125 | " Just pass in keyword parameters instead.", |
|
125 | 126 | MigrateDeprecationWarning |
|
126 | 127 | ) |
|
127 | 128 | engine = k['engine'] |
|
128 | 129 | |
|
129 | 130 | # enough tests seem to break when metadata is always altered |
|
130 | 131 | # that this crutch has to be left in until they can be sorted |
|
131 | 132 | # out |
|
132 | 133 | k['alter_metadata']=True |
|
133 | 134 | |
|
134 | 135 | delta = ColumnDelta(*p, **k) |
|
135 | 136 | |
|
136 | 137 | visitorcallable = get_engine_visitor(engine, 'schemachanger') |
|
137 | 138 | engine._run_visitor(visitorcallable, delta) |
|
138 | 139 | |
|
139 | 140 | return delta |
|
140 | 141 | |
|
141 | 142 | |
|
142 | 143 | def _to_table(table, engine=None): |
|
143 | 144 | """Return if instance of Table, else construct new with metadata""" |
|
144 | 145 | if isinstance(table, sqlalchemy.Table): |
|
145 | 146 | return table |
|
146 | 147 | |
|
147 | 148 | # Given: table name, maybe an engine |
|
148 | 149 | meta = sqlalchemy.MetaData() |
|
149 | 150 | if engine is not None: |
|
150 | 151 | meta.bind = engine |
|
151 | 152 | return sqlalchemy.Table(table, meta) |
|
152 | 153 | |
|
153 | 154 | |
|
154 | 155 | def _to_index(index, table=None, engine=None): |
|
155 | 156 | """Return if instance of Index, else construct new with metadata""" |
|
156 | 157 | if isinstance(index, sqlalchemy.Index): |
|
157 | 158 | return index |
|
158 | 159 | |
|
159 | 160 | # Given: index name; table name required |
|
160 | 161 | table = _to_table(table, engine) |
|
161 | 162 | ret = sqlalchemy.Index(index) |
|
162 | 163 | ret.table = table |
|
163 | 164 | return ret |
|
164 | 165 | |
|
165 | 166 | |
|
166 | 167 | class ColumnDelta(DictMixin, sqlalchemy.schema.SchemaItem): |
|
167 | 168 | """Extracts the differences between two columns/column-parameters |
|
168 | 169 | |
|
169 | 170 | May receive parameters arranged in several different ways: |
|
170 | 171 | |
|
171 | 172 | * **current_column, new_column, \*p, \*\*kw** |
|
172 | 173 | Additional parameters can be specified to override column |
|
173 | 174 | differences. |
|
174 | 175 | |
|
175 | 176 | * **current_column, \*p, \*\*kw** |
|
176 | 177 | Additional parameters alter current_column. Table name is extracted |
|
177 | 178 | from current_column object. |
|
178 | 179 | Name is changed to current_column.name from current_name, |
|
179 | 180 | if current_name is specified. |
|
180 | 181 | |
|
181 | 182 | * **current_col_name, \*p, \*\*kw** |
|
182 | 183 | Table kw must specified. |
|
183 | 184 | |
|
184 | 185 | :param table: Table at which current Column should be bound to.\ |
|
185 | 186 | If table name is given, reflection will be used. |
|
186 | 187 | :type table: string or Table instance |
|
187 | 188 | |
|
188 | 189 | :param metadata: A :class:`MetaData` instance to store |
|
189 | 190 | reflected table names |
|
190 | 191 | |
|
191 | 192 | :param engine: When reflecting tables, either engine or metadata must \ |
|
192 | 193 | be specified to acquire engine object. |
|
193 | 194 | :type engine: :class:`Engine` instance |
|
194 | 195 | :returns: :class:`ColumnDelta` instance provides interface for altered attributes to \ |
|
195 | 196 | `result_column` through :func:`dict` alike object. |
|
196 | 197 | |
|
197 | 198 | * :class:`ColumnDelta`.result_column is altered column with new attributes |
|
198 | 199 | |
|
199 | 200 | * :class:`ColumnDelta`.current_name is current name of column in db |
|
200 | 201 | |
|
201 | 202 | |
|
202 | 203 | """ |
|
203 | 204 | |
|
204 | 205 | # Column attributes that can be altered |
|
205 | 206 | diff_keys = ('name', 'type', 'primary_key', 'nullable', |
|
206 | 207 | 'server_onupdate', 'server_default', 'autoincrement') |
|
207 | 208 | diffs = dict() |
|
208 | 209 | __visit_name__ = 'column' |
|
209 | 210 | |
|
210 | 211 | def __init__(self, *p, **kw): |
|
211 | 212 | # 'alter_metadata' is not a public api. It exists purely |
|
212 | 213 | # as a crutch until the tests that fail when 'alter_metadata' |
|
213 | 214 | # behaviour always happens can be sorted out |
|
214 | 215 | self.alter_metadata = kw.pop("alter_metadata", False) |
|
215 | 216 | |
|
216 | 217 | self.meta = kw.pop("metadata", None) |
|
217 | 218 | self.engine = kw.pop("engine", None) |
|
218 | 219 | |
|
219 | 220 | # Things are initialized differently depending on how many column |
|
220 | 221 | # parameters are given. Figure out how many and call the appropriate |
|
221 | 222 | # method. |
|
222 | 223 | if len(p) >= 1 and isinstance(p[0], sqlalchemy.Column): |
|
223 | 224 | # At least one column specified |
|
224 | 225 | if len(p) >= 2 and isinstance(p[1], sqlalchemy.Column): |
|
225 | 226 | # Two columns specified |
|
226 | 227 | diffs = self.compare_2_columns(*p, **kw) |
|
227 | 228 | else: |
|
228 | 229 | # Exactly one column specified |
|
229 | 230 | diffs = self.compare_1_column(*p, **kw) |
|
230 | 231 | else: |
|
231 | 232 | # Zero columns specified |
|
232 |
if not len(p) or not isinstance(p[0], |
|
|
233 | if not len(p) or not isinstance(p[0], compat.string_types): | |
|
233 | 234 | raise ValueError("First argument must be column name") |
|
234 | 235 | diffs = self.compare_parameters(*p, **kw) |
|
235 | 236 | |
|
236 | 237 | self.apply_diffs(diffs) |
|
237 | 238 | |
|
238 | 239 | def __repr__(self): |
|
239 | 240 | return '<ColumnDelta altermetadata=%r, %s>' % ( |
|
240 | 241 | self.alter_metadata, |
|
241 | 242 | super(ColumnDelta, self).__repr__() |
|
242 | 243 | ) |
|
243 | 244 | |
|
244 | 245 | def __getitem__(self, key): |
|
245 | 246 | if key not in self.keys(): |
|
246 | 247 | raise KeyError("No such diff key, available: %s" % self.diffs ) |
|
247 | 248 | return getattr(self.result_column, key) |
|
248 | 249 | |
|
249 | 250 | def __setitem__(self, key, value): |
|
250 | 251 | if key not in self.keys(): |
|
251 | 252 | raise KeyError("No such diff key, available: %s" % self.diffs ) |
|
252 | 253 | setattr(self.result_column, key, value) |
|
253 | 254 | |
|
254 | 255 | def __delitem__(self, key): |
|
255 | 256 | raise NotImplementedError |
|
256 | 257 | |
|
257 | 258 | def __len__(self): |
|
258 | 259 | raise NotImplementedError |
|
259 | 260 | |
|
260 | 261 | def __iter__(self): |
|
261 | 262 | raise NotImplementedError |
|
262 | 263 | |
|
263 | 264 | def keys(self): |
|
264 | 265 | return self.diffs.keys() |
|
265 | 266 | |
|
266 | 267 | def compare_parameters(self, current_name, *p, **k): |
|
267 | 268 | """Compares Column objects with reflection""" |
|
268 | 269 | self.table = k.pop('table') |
|
269 | 270 | self.result_column = self._table.c.get(current_name) |
|
270 | 271 | if len(p): |
|
271 | 272 | k = self._extract_parameters(p, k, self.result_column) |
|
272 | 273 | return k |
|
273 | 274 | |
|
274 | 275 | def compare_1_column(self, col, *p, **k): |
|
275 | 276 | """Compares one Column object""" |
|
276 | 277 | self.table = k.pop('table', None) |
|
277 | 278 | if self.table is None: |
|
278 | 279 | self.table = col.table |
|
279 | 280 | self.result_column = col |
|
280 | 281 | if len(p): |
|
281 | 282 | k = self._extract_parameters(p, k, self.result_column) |
|
282 | 283 | return k |
|
283 | 284 | |
|
284 | 285 | def compare_2_columns(self, old_col, new_col, *p, **k): |
|
285 | 286 | """Compares two Column objects""" |
|
286 | 287 | self.process_column(new_col) |
|
287 | 288 | self.table = k.pop('table', None) |
|
288 | 289 | # we cannot use bool() on table in SA06 |
|
289 | 290 | if self.table is None: |
|
290 | 291 | self.table = old_col.table |
|
291 | 292 | if self.table is None: |
|
292 | 293 | new_col.table |
|
293 | 294 | self.result_column = old_col |
|
294 | 295 | |
|
295 | 296 | # set differences |
|
296 | 297 | # leave out some stuff for later comp |
|
297 | 298 | for key in (set(self.diff_keys) - set(('type',))): |
|
298 | 299 | val = getattr(new_col, key, None) |
|
299 | 300 | if getattr(self.result_column, key, None) != val: |
|
300 | 301 | k.setdefault(key, val) |
|
301 | 302 | |
|
302 | 303 | # inspect types |
|
303 | 304 | if not self.are_column_types_eq(self.result_column.type, new_col.type): |
|
304 | 305 | k.setdefault('type', new_col.type) |
|
305 | 306 | |
|
306 | 307 | if len(p): |
|
307 | 308 | k = self._extract_parameters(p, k, self.result_column) |
|
308 | 309 | return k |
|
309 | 310 | |
|
310 | 311 | def apply_diffs(self, diffs): |
|
311 | 312 | """Populate dict and column object with new values""" |
|
312 | 313 | self.diffs = diffs |
|
313 | 314 | for key in self.diff_keys: |
|
314 | 315 | if key in diffs: |
|
315 | 316 | setattr(self.result_column, key, diffs[key]) |
|
316 | 317 | |
|
317 | 318 | self.process_column(self.result_column) |
|
318 | 319 | |
|
319 | 320 | # create an instance of class type if not yet |
|
320 | 321 | if 'type' in diffs and callable(self.result_column.type): |
|
321 | 322 | self.result_column.type = self.result_column.type() |
|
322 | 323 | |
|
323 | 324 | # add column to the table |
|
324 | 325 | if self.table is not None and self.alter_metadata: |
|
325 | 326 | self.result_column.add_to_table(self.table) |
|
326 | 327 | |
|
327 | 328 | def are_column_types_eq(self, old_type, new_type): |
|
328 | 329 | """Compares two types to be equal""" |
|
329 | 330 | ret = old_type.__class__ == new_type.__class__ |
|
330 | 331 | |
|
331 | 332 | # String length is a special case |
|
332 | 333 | if ret and isinstance(new_type, sqlalchemy.types.String): |
|
333 | 334 | ret = (getattr(old_type, 'length', None) == \ |
|
334 | 335 | getattr(new_type, 'length', None)) |
|
335 | 336 | return ret |
|
336 | 337 | |
|
337 | 338 | def _extract_parameters(self, p, k, column): |
|
338 | 339 | """Extracts data from p and modifies diffs""" |
|
339 | 340 | p = list(p) |
|
340 | 341 | while len(p): |
|
341 |
if isinstance(p[0], |
|
|
342 | if isinstance(p[0], compat.string_types): | |
|
342 | 343 | k.setdefault('name', p.pop(0)) |
|
343 | 344 | elif isinstance(p[0], sqlalchemy.types.TypeEngine): |
|
344 | 345 | k.setdefault('type', p.pop(0)) |
|
345 | 346 | elif callable(p[0]): |
|
346 | 347 | p[0] = p[0]() |
|
347 | 348 | else: |
|
348 | 349 | break |
|
349 | 350 | |
|
350 | 351 | if len(p): |
|
351 | 352 | new_col = column.copy_fixed() |
|
352 | 353 | new_col._init_items(*p) |
|
353 | 354 | k = self.compare_2_columns(column, new_col, **k) |
|
354 | 355 | return k |
|
355 | 356 | |
|
356 | 357 | def process_column(self, column): |
|
357 | 358 | """Processes default values for column""" |
|
358 | 359 | # XXX: this is a snippet from SA processing of positional parameters |
|
359 | 360 | toinit = list() |
|
360 | 361 | |
|
361 | 362 | if column.server_default is not None: |
|
362 | 363 | if isinstance(column.server_default, sqlalchemy.FetchedValue): |
|
363 | 364 | toinit.append(column.server_default) |
|
364 | 365 | else: |
|
365 | 366 | toinit.append(sqlalchemy.DefaultClause(column.server_default)) |
|
366 | 367 | if column.server_onupdate is not None: |
|
367 | 368 | if isinstance(column.server_onupdate, FetchedValue): |
|
368 | 369 | toinit.append(column.server_default) |
|
369 | 370 | else: |
|
370 | 371 | toinit.append(sqlalchemy.DefaultClause(column.server_onupdate, |
|
371 | 372 | for_update=True)) |
|
372 | 373 | if toinit: |
|
373 | 374 | column._init_items(*toinit) |
|
374 | 375 | |
|
375 | 376 | def _get_table(self): |
|
376 | 377 | return getattr(self, '_table', None) |
|
377 | 378 | |
|
378 | 379 | def _set_table(self, table): |
|
379 |
if isinstance(table, |
|
|
380 | if isinstance(table, compat.string_types): | |
|
380 | 381 | if self.alter_metadata: |
|
381 | 382 | if not self.meta: |
|
382 | 383 | raise ValueError("metadata must be specified for table" |
|
383 | 384 | " reflection when using alter_metadata") |
|
384 | 385 | meta = self.meta |
|
385 | 386 | if self.engine: |
|
386 | 387 | meta.bind = self.engine |
|
387 | 388 | else: |
|
388 | 389 | if not self.engine and not self.meta: |
|
389 | 390 | raise ValueError("engine or metadata must be specified" |
|
390 | 391 | " to reflect tables") |
|
391 | 392 | if not self.engine: |
|
392 | 393 | self.engine = self.meta.bind |
|
393 | 394 | meta = sqlalchemy.MetaData(bind=self.engine) |
|
394 | 395 | self._table = sqlalchemy.Table(table, meta, autoload=True) |
|
395 | 396 | elif isinstance(table, sqlalchemy.Table): |
|
396 | 397 | self._table = table |
|
397 | 398 | if not self.alter_metadata: |
|
398 | 399 | self._table.meta = sqlalchemy.MetaData(bind=self._table.bind) |
|
399 | 400 | def _get_result_column(self): |
|
400 | 401 | return getattr(self, '_result_column', None) |
|
401 | 402 | |
|
402 | 403 | def _set_result_column(self, column): |
|
403 | 404 | """Set Column to Table based on alter_metadata evaluation.""" |
|
404 | 405 | self.process_column(column) |
|
405 | 406 | if not hasattr(self, 'current_name'): |
|
406 | 407 | self.current_name = column.name |
|
407 | 408 | if self.alter_metadata: |
|
408 | 409 | self._result_column = column |
|
409 | 410 | else: |
|
410 | 411 | self._result_column = column.copy_fixed() |
|
411 | 412 | |
|
412 | 413 | table = property(_get_table, _set_table) |
|
413 | 414 | result_column = property(_get_result_column, _set_result_column) |
|
414 | 415 | |
|
415 | 416 | |
|
416 | 417 | class ChangesetTable(object): |
|
417 | 418 | """Changeset extensions to SQLAlchemy tables.""" |
|
418 | 419 | |
|
419 | 420 | def create_column(self, column, *p, **kw): |
|
420 | 421 | """Creates a column. |
|
421 | 422 | |
|
422 | 423 | The column parameter may be a column definition or the name of |
|
423 | 424 | a column in this table. |
|
424 | 425 | |
|
425 | 426 | API to :meth:`ChangesetColumn.create` |
|
426 | 427 | |
|
427 | 428 | :param column: Column to be created |
|
428 | 429 | :type column: Column instance or string |
|
429 | 430 | """ |
|
430 | 431 | if not isinstance(column, sqlalchemy.Column): |
|
431 | 432 | # It's a column name |
|
432 | 433 | column = getattr(self.c, str(column)) |
|
433 | 434 | column.create(table=self, *p, **kw) |
|
434 | 435 | |
|
435 | 436 | def drop_column(self, column, *p, **kw): |
|
436 | 437 | """Drop a column, given its name or definition. |
|
437 | 438 | |
|
438 | 439 | API to :meth:`ChangesetColumn.drop` |
|
439 | 440 | |
|
440 | 441 | :param column: Column to be droped |
|
441 | 442 | :type column: Column instance or string |
|
442 | 443 | """ |
|
443 | 444 | if not isinstance(column, sqlalchemy.Column): |
|
444 | 445 | # It's a column name |
|
445 | 446 | try: |
|
446 | 447 | column = getattr(self.c, str(column)) |
|
447 | 448 | except AttributeError: |
|
448 | 449 | # That column isn't part of the table. We don't need |
|
449 | 450 | # its entire definition to drop the column, just its |
|
450 | 451 | # name, so create a dummy column with the same name. |
|
451 | 452 | column = sqlalchemy.Column(str(column), sqlalchemy.Integer()) |
|
452 | 453 | column.drop(table=self, *p, **kw) |
|
453 | 454 | |
|
454 | 455 | def rename(self, name, connection=None, **kwargs): |
|
455 | 456 | """Rename this table. |
|
456 | 457 | |
|
457 | 458 | :param name: New name of the table. |
|
458 | 459 | :type name: string |
|
459 | 460 | :param connection: reuse connection istead of creating new one. |
|
460 | 461 | :type connection: :class:`sqlalchemy.engine.base.Connection` instance |
|
461 | 462 | """ |
|
462 | 463 | engine = self.bind |
|
463 | 464 | self.new_name = name |
|
464 | 465 | visitorcallable = get_engine_visitor(engine, 'schemachanger') |
|
465 | 466 | run_single_visitor(engine, visitorcallable, self, connection, **kwargs) |
|
466 | 467 | |
|
467 | 468 | # Fix metadata registration |
|
468 | 469 | self.name = name |
|
469 | 470 | self.deregister() |
|
470 | 471 | self._set_parent(self.metadata) |
|
471 | 472 | |
|
472 | 473 | def _meta_key(self): |
|
473 | 474 | """Get the meta key for this table.""" |
|
474 | 475 | return sqlalchemy.schema._get_table_key(self.name, self.schema) |
|
475 | 476 | |
|
476 | 477 | def deregister(self): |
|
477 | 478 | """Remove this table from its metadata""" |
|
478 | 479 | if SQLA_07: |
|
479 | 480 | self.metadata._remove_table(self.name, self.schema) |
|
480 | 481 | else: |
|
481 | 482 | key = self._meta_key() |
|
482 | 483 | meta = self.metadata |
|
483 | 484 | if key in meta.tables: |
|
484 | 485 | del meta.tables[key] |
|
485 | 486 | |
|
486 | 487 | |
|
487 | 488 | class ChangesetColumn(object): |
|
488 | 489 | """Changeset extensions to SQLAlchemy columns.""" |
|
489 | 490 | |
|
490 | 491 | def alter(self, *p, **k): |
|
491 | 492 | """Makes a call to :func:`alter_column` for the column this |
|
492 | 493 | method is called on. |
|
493 | 494 | """ |
|
494 | 495 | if 'table' not in k: |
|
495 | 496 | k['table'] = self.table |
|
496 | 497 | if 'engine' not in k: |
|
497 | 498 | k['engine'] = k['table'].bind |
|
498 | 499 | return alter_column(self, *p, **k) |
|
499 | 500 | |
|
500 | 501 | def create(self, table=None, index_name=None, unique_name=None, |
|
501 | 502 | primary_key_name=None, populate_default=True, connection=None, **kwargs): |
|
502 | 503 | """Create this column in the database. |
|
503 | 504 | |
|
504 | 505 | Assumes the given table exists. ``ALTER TABLE ADD COLUMN``, |
|
505 | 506 | for most databases. |
|
506 | 507 | |
|
507 | 508 | :param table: Table instance to create on. |
|
508 | 509 | :param index_name: Creates :class:`ChangesetIndex` on this column. |
|
509 | 510 | :param unique_name: Creates :class:\ |
|
510 | 511 | `~migrate.changeset.constraint.UniqueConstraint` on this column. |
|
511 | 512 | :param primary_key_name: Creates :class:\ |
|
512 | 513 | `~migrate.changeset.constraint.PrimaryKeyConstraint` on this column. |
|
513 | 514 | :param populate_default: If True, created column will be \ |
|
514 | 515 | populated with defaults |
|
515 | 516 | :param connection: reuse connection istead of creating new one. |
|
516 | 517 | :type table: Table instance |
|
517 | 518 | :type index_name: string |
|
518 | 519 | :type unique_name: string |
|
519 | 520 | :type primary_key_name: string |
|
520 | 521 | :type populate_default: bool |
|
521 | 522 | :type connection: :class:`sqlalchemy.engine.base.Connection` instance |
|
522 | 523 | |
|
523 | 524 | :returns: self |
|
524 | 525 | """ |
|
525 | 526 | self.populate_default = populate_default |
|
526 | 527 | self.index_name = index_name |
|
527 | 528 | self.unique_name = unique_name |
|
528 | 529 | self.primary_key_name = primary_key_name |
|
529 | 530 | for cons in ('index_name', 'unique_name', 'primary_key_name'): |
|
530 | 531 | self._check_sanity_constraints(cons) |
|
531 | 532 | |
|
532 | 533 | self.add_to_table(table) |
|
533 | 534 | engine = self.table.bind |
|
534 | 535 | visitorcallable = get_engine_visitor(engine, 'columngenerator') |
|
535 | 536 | engine._run_visitor(visitorcallable, self, connection, **kwargs) |
|
536 | 537 | |
|
537 | 538 | # TODO: reuse existing connection |
|
538 | 539 | if self.populate_default and self.default is not None: |
|
539 | 540 | stmt = table.update().values({self: engine._execute_default(self.default)}) |
|
540 | 541 | engine.execute(stmt) |
|
541 | 542 | |
|
542 | 543 | return self |
|
543 | 544 | |
|
544 | 545 | def drop(self, table=None, connection=None, **kwargs): |
|
545 | 546 | """Drop this column from the database, leaving its table intact. |
|
546 | 547 | |
|
547 | 548 | ``ALTER TABLE DROP COLUMN``, for most databases. |
|
548 | 549 | |
|
549 | 550 | :param connection: reuse connection istead of creating new one. |
|
550 | 551 | :type connection: :class:`sqlalchemy.engine.base.Connection` instance |
|
551 | 552 | """ |
|
552 | 553 | if table is not None: |
|
553 | 554 | self.table = table |
|
554 | 555 | engine = self.table.bind |
|
555 | 556 | visitorcallable = get_engine_visitor(engine, 'columndropper') |
|
556 | 557 | engine._run_visitor(visitorcallable, self, connection, **kwargs) |
|
557 | 558 | self.remove_from_table(self.table, unset_table=False) |
|
558 | 559 | self.table = None |
|
559 | 560 | return self |
|
560 | 561 | |
|
561 | 562 | def add_to_table(self, table): |
|
562 | 563 | if table is not None and self.table is None: |
|
563 | 564 | if SQLA_07: |
|
564 | 565 | table.append_column(self) |
|
565 | 566 | else: |
|
566 | 567 | self._set_parent(table) |
|
567 | 568 | |
|
568 | 569 | def _col_name_in_constraint(self,cons,name): |
|
569 | 570 | return False |
|
570 | 571 | |
|
571 | 572 | def remove_from_table(self, table, unset_table=True): |
|
572 | 573 | # TODO: remove primary keys, constraints, etc |
|
573 | 574 | if unset_table: |
|
574 | 575 | self.table = None |
|
575 | 576 | |
|
576 | 577 | to_drop = set() |
|
577 | 578 | for index in table.indexes: |
|
578 | 579 | columns = [] |
|
579 | 580 | for col in index.columns: |
|
580 | 581 | if col.name!=self.name: |
|
581 | 582 | columns.append(col) |
|
582 | 583 | if columns: |
|
583 | 584 | index.columns = columns |
|
584 | 585 | if SQLA_08: |
|
585 | 586 | index.expressions = columns |
|
586 | 587 | else: |
|
587 | 588 | to_drop.add(index) |
|
588 | 589 | table.indexes = table.indexes - to_drop |
|
589 | 590 | |
|
590 | 591 | to_drop = set() |
|
591 | 592 | for cons in table.constraints: |
|
592 | 593 | # TODO: deal with other types of constraint |
|
593 | 594 | if isinstance(cons,(ForeignKeyConstraint, |
|
594 | 595 | UniqueConstraint)): |
|
595 | 596 | for col_name in cons.columns: |
|
596 |
if not isinstance(col_name, |
|
|
597 | if not isinstance(col_name, compat.string_types): | |
|
597 | 598 | col_name = col_name.name |
|
598 | 599 | if self.name==col_name: |
|
599 | 600 | to_drop.add(cons) |
|
600 | 601 | table.constraints = table.constraints - to_drop |
|
601 | 602 | |
|
602 | 603 | if table.c.contains_column(self): |
|
603 | 604 | if SQLA_07: |
|
604 | 605 | table._columns.remove(self) |
|
605 | 606 | else: |
|
606 | 607 | table.c.remove(self) |
|
607 | 608 | |
|
608 | 609 | # TODO: this is fixed in 0.6 |
|
609 | 610 | def copy_fixed(self, **kw): |
|
610 | 611 | """Create a copy of this ``Column``, with all attributes.""" |
|
611 | 612 | q = util.safe_quote(self) |
|
612 | 613 | return sqlalchemy.Column(self.name, self.type, self.default, |
|
613 | 614 | key=self.key, |
|
614 | 615 | primary_key=self.primary_key, |
|
615 | 616 | nullable=self.nullable, |
|
616 | 617 | quote=q, |
|
617 | 618 | index=self.index, |
|
618 | 619 | unique=self.unique, |
|
619 | 620 | onupdate=self.onupdate, |
|
620 | 621 | autoincrement=self.autoincrement, |
|
621 | 622 | server_default=self.server_default, |
|
622 | 623 | server_onupdate=self.server_onupdate, |
|
623 | 624 | *[c.copy(**kw) for c in self.constraints]) |
|
624 | 625 | |
|
625 | 626 | def _check_sanity_constraints(self, name): |
|
626 | 627 | """Check if constraints names are correct""" |
|
627 | 628 | obj = getattr(self, name) |
|
628 | 629 | if (getattr(self, name[:-5]) and not obj): |
|
629 | 630 | raise InvalidConstraintError("Column.create() accepts index_name," |
|
630 | 631 | " primary_key_name and unique_name to generate constraints") |
|
631 |
if not isinstance(obj, |
|
|
632 | if not isinstance(obj, compat.string_types) and obj is not None: | |
|
632 | 633 | raise InvalidConstraintError( |
|
633 | 634 | "%s argument for column must be constraint name" % name) |
|
634 | 635 | |
|
635 | 636 | |
|
636 | 637 | class ChangesetIndex(object): |
|
637 | 638 | """Changeset extensions to SQLAlchemy Indexes.""" |
|
638 | 639 | |
|
639 | 640 | __visit_name__ = 'index' |
|
640 | 641 | |
|
641 | 642 | def rename(self, name, connection=None, **kwargs): |
|
642 | 643 | """Change the name of an index. |
|
643 | 644 | |
|
644 | 645 | :param name: New name of the Index. |
|
645 | 646 | :type name: string |
|
646 | 647 | :param connection: reuse connection istead of creating new one. |
|
647 | 648 | :type connection: :class:`sqlalchemy.engine.base.Connection` instance |
|
648 | 649 | """ |
|
649 | 650 | engine = self.table.bind |
|
650 | 651 | self.new_name = name |
|
651 | 652 | visitorcallable = get_engine_visitor(engine, 'schemachanger') |
|
652 | 653 | engine._run_visitor(visitorcallable, self, connection, **kwargs) |
|
653 | 654 | self.name = name |
|
654 | 655 | |
|
655 | 656 | |
|
656 | 657 | class ChangesetDefaultClause(object): |
|
657 | 658 | """Implements comparison between :class:`DefaultClause` instances""" |
|
658 | 659 | |
|
659 | 660 | def __eq__(self, other): |
|
660 | 661 | if isinstance(other, self.__class__): |
|
661 | 662 | if self.arg == other.arg: |
|
662 | 663 | return True |
|
663 | 664 | |
|
664 | 665 | def __ne__(self, other): |
|
665 | 666 | return not self.__eq__(other) |
@@ -1,221 +1,222 b'' | |||
|
1 | 1 | """ |
|
2 | 2 | Database schema version management. |
|
3 | 3 | """ |
|
4 | 4 | import sys |
|
5 | 5 | import logging |
|
6 | 6 | |
|
7 | 7 | from sqlalchemy import (Table, Column, MetaData, String, Text, Integer, |
|
8 | 8 | create_engine) |
|
9 | 9 | from sqlalchemy.sql import and_ |
|
10 | 10 | from sqlalchemy import exc as sa_exceptions |
|
11 | 11 | from sqlalchemy.sql import bindparam |
|
12 | from pyramid import compat | |
|
12 | 13 | |
|
13 | 14 | from rhodecode.lib.dbmigrate.migrate import exceptions |
|
14 | 15 | from rhodecode.lib.dbmigrate.migrate.changeset import SQLA_07 |
|
15 | 16 | from rhodecode.lib.dbmigrate.migrate.versioning import genmodel, schemadiff |
|
16 | 17 | from rhodecode.lib.dbmigrate.migrate.versioning.repository import Repository |
|
17 | 18 | from rhodecode.lib.dbmigrate.migrate.versioning.util import load_model |
|
18 | 19 | from rhodecode.lib.dbmigrate.migrate.versioning.version import VerNum |
|
19 | 20 | |
|
20 | 21 | |
|
21 | 22 | log = logging.getLogger(__name__) |
|
22 | 23 | |
|
23 | 24 | |
|
24 | 25 | class ControlledSchema(object): |
|
25 | 26 | """A database under version control""" |
|
26 | 27 | |
|
27 | 28 | def __init__(self, engine, repository): |
|
28 |
if isinstance(repository, |
|
|
29 | if isinstance(repository, compat.string_types): | |
|
29 | 30 | repository = Repository(repository) |
|
30 | 31 | self.engine = engine |
|
31 | 32 | self.repository = repository |
|
32 | 33 | self.meta = MetaData(engine) |
|
33 | 34 | self.load() |
|
34 | 35 | |
|
35 | 36 | def __eq__(self, other): |
|
36 | 37 | """Compare two schemas by repositories and versions""" |
|
37 | 38 | return (self.repository is other.repository \ |
|
38 | 39 | and self.version == other.version) |
|
39 | 40 | |
|
40 | 41 | def load(self): |
|
41 | 42 | """Load controlled schema version info from DB""" |
|
42 | 43 | tname = self.repository.version_table |
|
43 | 44 | try: |
|
44 | 45 | if not hasattr(self, 'table') or self.table is None: |
|
45 | 46 | self.table = Table(tname, self.meta, autoload=True) |
|
46 | 47 | |
|
47 | 48 | result = self.engine.execute(self.table.select( |
|
48 | 49 | self.table.c.repository_id == str(self.repository.id))) |
|
49 | 50 | |
|
50 | 51 | data = list(result)[0] |
|
51 | 52 | except: |
|
52 | 53 | cls, exc, tb = sys.exc_info() |
|
53 | 54 | raise exceptions.DatabaseNotControlledError, exc.__str__(), tb |
|
54 | 55 | |
|
55 | 56 | self.version = data['version'] |
|
56 | 57 | return data |
|
57 | 58 | |
|
58 | 59 | def drop(self): |
|
59 | 60 | """ |
|
60 | 61 | Remove version control from a database. |
|
61 | 62 | """ |
|
62 | 63 | if SQLA_07: |
|
63 | 64 | try: |
|
64 | 65 | self.table.drop() |
|
65 | 66 | except sa_exceptions.DatabaseError: |
|
66 | 67 | raise exceptions.DatabaseNotControlledError(str(self.table)) |
|
67 | 68 | else: |
|
68 | 69 | try: |
|
69 | 70 | self.table.drop() |
|
70 | 71 | except (sa_exceptions.SQLError): |
|
71 | 72 | raise exceptions.DatabaseNotControlledError(str(self.table)) |
|
72 | 73 | |
|
73 | 74 | def changeset(self, version=None): |
|
74 | 75 | """API to Changeset creation. |
|
75 | 76 | |
|
76 | 77 | Uses self.version for start version and engine.name |
|
77 | 78 | to get database name. |
|
78 | 79 | """ |
|
79 | 80 | database = self.engine.name |
|
80 | 81 | start_ver = self.version |
|
81 | 82 | changeset = self.repository.changeset(database, start_ver, version) |
|
82 | 83 | return changeset |
|
83 | 84 | |
|
84 | 85 | def runchange(self, ver, change, step): |
|
85 | 86 | startver = ver |
|
86 | 87 | endver = ver + step |
|
87 | 88 | # Current database version must be correct! Don't run if corrupt! |
|
88 | 89 | if self.version != startver: |
|
89 | 90 | raise exceptions.InvalidVersionError("%s is not %s" % \ |
|
90 | 91 | (self.version, startver)) |
|
91 | 92 | # Run the change |
|
92 | 93 | change.run(self.engine, step) |
|
93 | 94 | |
|
94 | 95 | # Update/refresh database version |
|
95 | 96 | self.update_repository_table(startver, endver) |
|
96 | 97 | self.load() |
|
97 | 98 | |
|
98 | 99 | def update_repository_table(self, startver, endver): |
|
99 | 100 | """Update version_table with new information""" |
|
100 | 101 | update = self.table.update(and_(self.table.c.version == int(startver), |
|
101 | 102 | self.table.c.repository_id == str(self.repository.id))) |
|
102 | 103 | self.engine.execute(update, version=int(endver)) |
|
103 | 104 | |
|
104 | 105 | def upgrade(self, version=None): |
|
105 | 106 | """ |
|
106 | 107 | Upgrade (or downgrade) to a specified version, or latest version. |
|
107 | 108 | """ |
|
108 | 109 | changeset = self.changeset(version) |
|
109 | 110 | for ver, change in changeset: |
|
110 | 111 | self.runchange(ver, change, changeset.step) |
|
111 | 112 | |
|
112 | 113 | def update_db_from_model(self, model): |
|
113 | 114 | """ |
|
114 | 115 | Modify the database to match the structure of the current Python model. |
|
115 | 116 | """ |
|
116 | 117 | model = load_model(model) |
|
117 | 118 | |
|
118 | 119 | diff = schemadiff.getDiffOfModelAgainstDatabase( |
|
119 | 120 | model, self.engine, excludeTables=[self.repository.version_table] |
|
120 | 121 | ) |
|
121 | 122 | genmodel.ModelGenerator(diff,self.engine).runB2A() |
|
122 | 123 | |
|
123 | 124 | self.update_repository_table(self.version, int(self.repository.latest)) |
|
124 | 125 | |
|
125 | 126 | self.load() |
|
126 | 127 | |
|
127 | 128 | @classmethod |
|
128 | 129 | def create(cls, engine, repository, version=None): |
|
129 | 130 | """ |
|
130 | 131 | Declare a database to be under a repository's version control. |
|
131 | 132 | |
|
132 | 133 | :raises: :exc:`DatabaseAlreadyControlledError` |
|
133 | 134 | :returns: :class:`ControlledSchema` |
|
134 | 135 | """ |
|
135 | 136 | # Confirm that the version # is valid: positive, integer, |
|
136 | 137 | # exists in repos |
|
137 |
if isinstance(repository, |
|
|
138 | if isinstance(repository, compat.string_types): | |
|
138 | 139 | repository = Repository(repository) |
|
139 | 140 | version = cls._validate_version(repository, version) |
|
140 | 141 | table = cls._create_table_version(engine, repository, version) |
|
141 | 142 | # TODO: history table |
|
142 | 143 | # Load repository information and return |
|
143 | 144 | return cls(engine, repository) |
|
144 | 145 | |
|
145 | 146 | @classmethod |
|
146 | 147 | def _validate_version(cls, repository, version): |
|
147 | 148 | """ |
|
148 | 149 | Ensures this is a valid version number for this repository. |
|
149 | 150 | |
|
150 | 151 | :raises: :exc:`InvalidVersionError` if invalid |
|
151 | 152 | :return: valid version number |
|
152 | 153 | """ |
|
153 | 154 | if version is None: |
|
154 | 155 | version = 0 |
|
155 | 156 | try: |
|
156 | 157 | version = VerNum(version) # raises valueerror |
|
157 | 158 | if version < 0 or version > repository.latest: |
|
158 | 159 | raise ValueError() |
|
159 | 160 | except ValueError: |
|
160 | 161 | raise exceptions.InvalidVersionError(version) |
|
161 | 162 | return version |
|
162 | 163 | |
|
163 | 164 | @classmethod |
|
164 | 165 | def _create_table_version(cls, engine, repository, version): |
|
165 | 166 | """ |
|
166 | 167 | Creates the versioning table in a database. |
|
167 | 168 | |
|
168 | 169 | :raises: :exc:`DatabaseAlreadyControlledError` |
|
169 | 170 | """ |
|
170 | 171 | # Create tables |
|
171 | 172 | tname = repository.version_table |
|
172 | 173 | meta = MetaData(engine) |
|
173 | 174 | |
|
174 | 175 | table = Table( |
|
175 | 176 | tname, meta, |
|
176 | 177 | Column('repository_id', String(250), primary_key=True), |
|
177 | 178 | Column('repository_path', Text), |
|
178 | 179 | Column('version', Integer), ) |
|
179 | 180 | |
|
180 | 181 | # there can be multiple repositories/schemas in the same db |
|
181 | 182 | if not table.exists(): |
|
182 | 183 | table.create() |
|
183 | 184 | |
|
184 | 185 | # test for existing repository_id |
|
185 | 186 | s = table.select(table.c.repository_id == bindparam("repository_id")) |
|
186 | 187 | result = engine.execute(s, repository_id=repository.id) |
|
187 | 188 | if result.fetchone(): |
|
188 | 189 | raise exceptions.DatabaseAlreadyControlledError |
|
189 | 190 | |
|
190 | 191 | # Insert data |
|
191 | 192 | engine.execute(table.insert().values( |
|
192 | 193 | repository_id=repository.id, |
|
193 | 194 | repository_path=repository.path, |
|
194 | 195 | version=int(version))) |
|
195 | 196 | return table |
|
196 | 197 | |
|
197 | 198 | @classmethod |
|
198 | 199 | def compare_model_to_db(cls, engine, model, repository): |
|
199 | 200 | """ |
|
200 | 201 | Compare the current model against the current database. |
|
201 | 202 | """ |
|
202 |
if isinstance(repository, |
|
|
203 | if isinstance(repository, compat.string_types): | |
|
203 | 204 | repository = Repository(repository) |
|
204 | 205 | model = load_model(model) |
|
205 | 206 | |
|
206 | 207 | diff = schemadiff.getDiffOfModelAgainstDatabase( |
|
207 | 208 | model, engine, excludeTables=[repository.version_table]) |
|
208 | 209 | return diff |
|
209 | 210 | |
|
210 | 211 | @classmethod |
|
211 | 212 | def create_model(cls, engine, repository, declarative=False): |
|
212 | 213 | """ |
|
213 | 214 | Dump the current database as a Python model. |
|
214 | 215 | """ |
|
215 |
if isinstance(repository, |
|
|
216 | if isinstance(repository, compat.string_types): | |
|
216 | 217 | repository = Repository(repository) |
|
217 | 218 | |
|
218 | 219 | diff = schemadiff.getDiffOfModelAgainstDatabase( |
|
219 | 220 | MetaData(), engine, excludeTables=[repository.version_table] |
|
220 | 221 | ) |
|
221 | 222 | return genmodel.ModelGenerator(diff, engine, declarative).genBDefinition() |
@@ -1,159 +1,160 b'' | |||
|
1 | 1 | #!/usr/bin/env python |
|
2 | 2 | # -*- coding: utf-8 -*- |
|
3 | 3 | |
|
4 | 4 | import shutil |
|
5 | 5 | import warnings |
|
6 | 6 | import logging |
|
7 | 7 | import inspect |
|
8 | 8 | from StringIO import StringIO |
|
9 | 9 | |
|
10 | from pyramid import compat | |
|
10 | 11 | from rhodecode.lib.dbmigrate import migrate |
|
11 | 12 | from rhodecode.lib.dbmigrate.migrate.versioning import genmodel, schemadiff |
|
12 | 13 | from rhodecode.lib.dbmigrate.migrate.versioning.config import operations |
|
13 | 14 | from rhodecode.lib.dbmigrate.migrate.versioning.template import Template |
|
14 | 15 | from rhodecode.lib.dbmigrate.migrate.versioning.script import base |
|
15 | 16 | from rhodecode.lib.dbmigrate.migrate.versioning.util import import_path, load_model, with_engine |
|
16 | 17 | from rhodecode.lib.dbmigrate.migrate.exceptions import MigrateDeprecationWarning, InvalidScriptError, ScriptError |
|
17 | 18 | |
|
18 | 19 | log = logging.getLogger(__name__) |
|
19 | 20 | __all__ = ['PythonScript'] |
|
20 | 21 | |
|
21 | 22 | |
|
22 | 23 | class PythonScript(base.BaseScript): |
|
23 | 24 | """Base for Python scripts""" |
|
24 | 25 | |
|
25 | 26 | @classmethod |
|
26 | 27 | def create(cls, path, **opts): |
|
27 | 28 | """Create an empty migration script at specified path |
|
28 | 29 | |
|
29 | 30 | :returns: :class:`PythonScript instance <migrate.versioning.script.py.PythonScript>`""" |
|
30 | 31 | cls.require_notfound(path) |
|
31 | 32 | |
|
32 | 33 | src = Template(opts.pop('templates_path', None)).get_script(theme=opts.pop('templates_theme', None)) |
|
33 | 34 | shutil.copy(src, path) |
|
34 | 35 | |
|
35 | 36 | return cls(path) |
|
36 | 37 | |
|
37 | 38 | @classmethod |
|
38 | 39 | def make_update_script_for_model(cls, engine, oldmodel, |
|
39 | 40 | model, repository, **opts): |
|
40 | 41 | """Create a migration script based on difference between two SA models. |
|
41 | 42 | |
|
42 | 43 | :param repository: path to migrate repository |
|
43 | 44 | :param oldmodel: dotted.module.name:SAClass or SAClass object |
|
44 | 45 | :param model: dotted.module.name:SAClass or SAClass object |
|
45 | 46 | :param engine: SQLAlchemy engine |
|
46 | 47 | :type repository: string or :class:`Repository instance <migrate.versioning.repository.Repository>` |
|
47 | 48 | :type oldmodel: string or Class |
|
48 | 49 | :type model: string or Class |
|
49 | 50 | :type engine: Engine instance |
|
50 | 51 | :returns: Upgrade / Downgrade script |
|
51 | 52 | :rtype: string |
|
52 | 53 | """ |
|
53 | 54 | |
|
54 |
if isinstance(repository, |
|
|
55 | if isinstance(repository, compat.string_types): | |
|
55 | 56 | # oh dear, an import cycle! |
|
56 | 57 | from rhodecode.lib.dbmigrate.migrate.versioning.repository import Repository |
|
57 | 58 | repository = Repository(repository) |
|
58 | 59 | |
|
59 | 60 | oldmodel = load_model(oldmodel) |
|
60 | 61 | model = load_model(model) |
|
61 | 62 | |
|
62 | 63 | # Compute differences. |
|
63 | 64 | diff = schemadiff.getDiffOfModelAgainstModel( |
|
64 | 65 | model, |
|
65 | 66 | oldmodel, |
|
66 | 67 | excludeTables=[repository.version_table]) |
|
67 | 68 | # TODO: diff can be False (there is no difference?) |
|
68 | 69 | decls, upgradeCommands, downgradeCommands = \ |
|
69 | 70 | genmodel.ModelGenerator(diff,engine).genB2AMigration() |
|
70 | 71 | |
|
71 | 72 | # Store differences into file. |
|
72 | 73 | src = Template(opts.pop('templates_path', None)).get_script(opts.pop('templates_theme', None)) |
|
73 | 74 | with open(src) as f: |
|
74 | 75 | contents = f.read() |
|
75 | 76 | |
|
76 | 77 | # generate source |
|
77 | 78 | search = 'def upgrade(migrate_engine):' |
|
78 | 79 | contents = contents.replace(search, '\n\n'.join((decls, search)), 1) |
|
79 | 80 | if upgradeCommands: |
|
80 | 81 | contents = contents.replace(' pass', upgradeCommands, 1) |
|
81 | 82 | if downgradeCommands: |
|
82 | 83 | contents = contents.replace(' pass', downgradeCommands, 1) |
|
83 | 84 | return contents |
|
84 | 85 | |
|
85 | 86 | @classmethod |
|
86 | 87 | def verify_module(cls, path): |
|
87 | 88 | """Ensure path is a valid script |
|
88 | 89 | |
|
89 | 90 | :param path: Script location |
|
90 | 91 | :type path: string |
|
91 | 92 | :raises: :exc:`InvalidScriptError <migrate.exceptions.InvalidScriptError>` |
|
92 | 93 | :returns: Python module |
|
93 | 94 | """ |
|
94 | 95 | # Try to import and get the upgrade() func |
|
95 | 96 | module = import_path(path) |
|
96 | 97 | try: |
|
97 | 98 | assert callable(module.upgrade) |
|
98 | 99 | except Exception as e: |
|
99 | 100 | raise InvalidScriptError(path + ': %s' % str(e)) |
|
100 | 101 | return module |
|
101 | 102 | |
|
102 | 103 | def preview_sql(self, url, step, **args): |
|
103 | 104 | """Mocks SQLAlchemy Engine to store all executed calls in a string |
|
104 | 105 | and runs :meth:`PythonScript.run <migrate.versioning.script.py.PythonScript.run>` |
|
105 | 106 | |
|
106 | 107 | :returns: SQL file |
|
107 | 108 | """ |
|
108 | 109 | buf = StringIO() |
|
109 | 110 | args['engine_arg_strategy'] = 'mock' |
|
110 | 111 | args['engine_arg_executor'] = lambda s, p = '': buf.write(str(s) + p) |
|
111 | 112 | |
|
112 | 113 | @with_engine |
|
113 | 114 | def go(url, step, **kw): |
|
114 | 115 | engine = kw.pop('engine') |
|
115 | 116 | self.run(engine, step) |
|
116 | 117 | return buf.getvalue() |
|
117 | 118 | |
|
118 | 119 | return go(url, step, **args) |
|
119 | 120 | |
|
120 | 121 | def run(self, engine, step): |
|
121 | 122 | """Core method of Script file. |
|
122 | 123 | Exectues :func:`update` or :func:`downgrade` functions |
|
123 | 124 | |
|
124 | 125 | :param engine: SQLAlchemy Engine |
|
125 | 126 | :param step: Operation to run |
|
126 | 127 | :type engine: string |
|
127 | 128 | :type step: int |
|
128 | 129 | """ |
|
129 | 130 | if step > 0: |
|
130 | 131 | op = 'upgrade' |
|
131 | 132 | elif step < 0: |
|
132 | 133 | op = 'downgrade' |
|
133 | 134 | else: |
|
134 | 135 | raise ScriptError("%d is not a valid step" % step) |
|
135 | 136 | |
|
136 | 137 | funcname = base.operations[op] |
|
137 | 138 | script_func = self._func(funcname) |
|
138 | 139 | |
|
139 | 140 | # check for old way of using engine |
|
140 | 141 | if not inspect.getargspec(script_func)[0]: |
|
141 | 142 | raise TypeError("upgrade/downgrade functions must accept engine" |
|
142 | 143 | " parameter (since version 0.5.4)") |
|
143 | 144 | |
|
144 | 145 | script_func(engine) |
|
145 | 146 | |
|
146 | 147 | @property |
|
147 | 148 | def module(self): |
|
148 | 149 | """Calls :meth:`migrate.versioning.script.py.verify_module` |
|
149 | 150 | and returns it. |
|
150 | 151 | """ |
|
151 | 152 | if not hasattr(self, '_module'): |
|
152 | 153 | self._module = self.verify_module(self.path) |
|
153 | 154 | return self._module |
|
154 | 155 | |
|
155 | 156 | def _func(self, funcname): |
|
156 | 157 | if not hasattr(self.module, funcname): |
|
157 | 158 | msg = "Function '%s' is not defined in this script" |
|
158 | 159 | raise ScriptError(msg % funcname) |
|
159 | 160 | return getattr(self.module, funcname) |
@@ -1,179 +1,181 b'' | |||
|
1 | 1 | #!/usr/bin/env python |
|
2 | 2 | # -*- coding: utf-8 -*- |
|
3 | 3 | """.. currentmodule:: migrate.versioning.util""" |
|
4 | 4 | |
|
5 | 5 | import warnings |
|
6 | 6 | import logging |
|
7 | 7 | from decorator import decorator |
|
8 | 8 | from pkg_resources import EntryPoint |
|
9 | 9 | |
|
10 | 10 | from sqlalchemy import create_engine |
|
11 | 11 | from sqlalchemy.engine import Engine |
|
12 | 12 | from sqlalchemy.pool import StaticPool |
|
13 | 13 | |
|
14 | from pyramid import compat | |
|
14 | 15 | from rhodecode.lib.dbmigrate.migrate import exceptions |
|
15 | 16 | from rhodecode.lib.dbmigrate.migrate.versioning.util.keyedinstance import KeyedInstance |
|
16 | 17 | from rhodecode.lib.dbmigrate.migrate.versioning.util.importpath import import_path |
|
17 | 18 | |
|
18 | 19 | |
|
19 | 20 | log = logging.getLogger(__name__) |
|
20 | 21 | |
|
22 | ||
|
21 | 23 | def load_model(dotted_name): |
|
22 | 24 | """Import module and use module-level variable". |
|
23 | 25 | |
|
24 | 26 | :param dotted_name: path to model in form of string: ``some.python.module:Class`` |
|
25 | 27 | |
|
26 | 28 | .. versionchanged:: 0.5.4 |
|
27 | 29 | |
|
28 | 30 | """ |
|
29 |
if isinstance(dotted_name, |
|
|
31 | if isinstance(dotted_name, compat.string_types): | |
|
30 | 32 | if ':' not in dotted_name: |
|
31 | 33 | # backwards compatibility |
|
32 | 34 | warnings.warn('model should be in form of module.model:User ' |
|
33 | 35 | 'and not module.model.User', exceptions.MigrateDeprecationWarning) |
|
34 | 36 | dotted_name = ':'.join(dotted_name.rsplit('.', 1)) |
|
35 | 37 | return EntryPoint.parse('x=%s' % dotted_name).load(False) |
|
36 | 38 | else: |
|
37 | 39 | # Assume it's already loaded. |
|
38 | 40 | return dotted_name |
|
39 | 41 | |
|
40 | 42 | def asbool(obj): |
|
41 | 43 | """Do everything to use object as bool""" |
|
42 |
if isinstance(obj, |
|
|
44 | if isinstance(obj, compat.string_types): | |
|
43 | 45 | obj = obj.strip().lower() |
|
44 | 46 | if obj in ['true', 'yes', 'on', 'y', 't', '1']: |
|
45 | 47 | return True |
|
46 | 48 | elif obj in ['false', 'no', 'off', 'n', 'f', '0']: |
|
47 | 49 | return False |
|
48 | 50 | else: |
|
49 | 51 | raise ValueError("String is not true/false: %r" % obj) |
|
50 | 52 | if obj in (True, False): |
|
51 | 53 | return bool(obj) |
|
52 | 54 | else: |
|
53 | 55 | raise ValueError("String is not true/false: %r" % obj) |
|
54 | 56 | |
|
55 | 57 | def guess_obj_type(obj): |
|
56 | 58 | """Do everything to guess object type from string |
|
57 | 59 | |
|
58 | 60 | Tries to convert to `int`, `bool` and finally returns if not succeded. |
|
59 | 61 | |
|
60 | 62 | .. versionadded: 0.5.4 |
|
61 | 63 | """ |
|
62 | 64 | |
|
63 | 65 | result = None |
|
64 | 66 | |
|
65 | 67 | try: |
|
66 | 68 | result = int(obj) |
|
67 | 69 | except: |
|
68 | 70 | pass |
|
69 | 71 | |
|
70 | 72 | if result is None: |
|
71 | 73 | try: |
|
72 | 74 | result = asbool(obj) |
|
73 | 75 | except: |
|
74 | 76 | pass |
|
75 | 77 | |
|
76 | 78 | if result is not None: |
|
77 | 79 | return result |
|
78 | 80 | else: |
|
79 | 81 | return obj |
|
80 | 82 | |
|
81 | 83 | @decorator |
|
82 | 84 | def catch_known_errors(f, *a, **kw): |
|
83 | 85 | """Decorator that catches known api errors |
|
84 | 86 | |
|
85 | 87 | .. versionadded: 0.5.4 |
|
86 | 88 | """ |
|
87 | 89 | |
|
88 | 90 | try: |
|
89 | 91 | return f(*a, **kw) |
|
90 | 92 | except exceptions.PathFoundError as e: |
|
91 | 93 | raise exceptions.KnownError("The path %s already exists" % e.args[0]) |
|
92 | 94 | |
|
93 | 95 | def construct_engine(engine, **opts): |
|
94 | 96 | """.. versionadded:: 0.5.4 |
|
95 | 97 | |
|
96 | 98 | Constructs and returns SQLAlchemy engine. |
|
97 | 99 | |
|
98 | 100 | Currently, there are 2 ways to pass create_engine options to :mod:`migrate.versioning.api` functions: |
|
99 | 101 | |
|
100 | 102 | :param engine: connection string or a existing engine |
|
101 | 103 | :param engine_dict: python dictionary of options to pass to `create_engine` |
|
102 | 104 | :param engine_arg_*: keyword parameters to pass to `create_engine` (evaluated with :func:`migrate.versioning.util.guess_obj_type`) |
|
103 | 105 | :type engine_dict: dict |
|
104 | 106 | :type engine: string or Engine instance |
|
105 | 107 | :type engine_arg_*: string |
|
106 | 108 | :returns: SQLAlchemy Engine |
|
107 | 109 | |
|
108 | 110 | .. note:: |
|
109 | 111 | |
|
110 | 112 | keyword parameters override ``engine_dict`` values. |
|
111 | 113 | |
|
112 | 114 | """ |
|
113 | 115 | if isinstance(engine, Engine): |
|
114 | 116 | return engine |
|
115 |
elif not isinstance(engine, |
|
|
117 | elif not isinstance(engine, compat.string_types): | |
|
116 | 118 | raise ValueError("you need to pass either an existing engine or a database uri") |
|
117 | 119 | |
|
118 | 120 | # get options for create_engine |
|
119 | 121 | if opts.get('engine_dict') and isinstance(opts['engine_dict'], dict): |
|
120 | 122 | kwargs = opts['engine_dict'] |
|
121 | 123 | else: |
|
122 | 124 | kwargs = {} |
|
123 | 125 | |
|
124 | 126 | # DEPRECATED: handle echo the old way |
|
125 | 127 | echo = asbool(opts.get('echo', False)) |
|
126 | 128 | if echo: |
|
127 | 129 | warnings.warn('echo=True parameter is deprecated, pass ' |
|
128 | 130 | 'engine_arg_echo=True or engine_dict={"echo": True}', |
|
129 | 131 | exceptions.MigrateDeprecationWarning) |
|
130 | 132 | kwargs['echo'] = echo |
|
131 | 133 | |
|
132 | 134 | # parse keyword arguments |
|
133 | 135 | for key, value in opts.iteritems(): |
|
134 | 136 | if key.startswith('engine_arg_'): |
|
135 | 137 | kwargs[key[11:]] = guess_obj_type(value) |
|
136 | 138 | |
|
137 | 139 | log.debug('Constructing engine') |
|
138 | 140 | # TODO: return create_engine(engine, poolclass=StaticPool, **kwargs) |
|
139 | 141 | # seems like 0.5.x branch does not work with engine.dispose and staticpool |
|
140 | 142 | return create_engine(engine, **kwargs) |
|
141 | 143 | |
|
142 | 144 | @decorator |
|
143 | 145 | def with_engine(f, *a, **kw): |
|
144 | 146 | """Decorator for :mod:`migrate.versioning.api` functions |
|
145 | 147 | to safely close resources after function usage. |
|
146 | 148 | |
|
147 | 149 | Passes engine parameters to :func:`construct_engine` and |
|
148 | 150 | resulting parameter is available as kw['engine']. |
|
149 | 151 | |
|
150 | 152 | Engine is disposed after wrapped function is executed. |
|
151 | 153 | |
|
152 | 154 | .. versionadded: 0.6.0 |
|
153 | 155 | """ |
|
154 | 156 | url = a[0] |
|
155 | 157 | engine = construct_engine(url, **kw) |
|
156 | 158 | |
|
157 | 159 | try: |
|
158 | 160 | kw['engine'] = engine |
|
159 | 161 | return f(*a, **kw) |
|
160 | 162 | finally: |
|
161 | 163 | if isinstance(engine, Engine) and engine is not url: |
|
162 | 164 | log.debug('Disposing SQLAlchemy engine %s', engine) |
|
163 | 165 | engine.dispose() |
|
164 | 166 | |
|
165 | 167 | |
|
166 | 168 | class Memoize: |
|
167 | 169 | """Memoize(fn) - an instance which acts like fn but memoizes its arguments |
|
168 | 170 | Will only work on functions with non-mutable arguments |
|
169 | 171 | |
|
170 | 172 | ActiveState Code 52201 |
|
171 | 173 | """ |
|
172 | 174 | def __init__(self, fn): |
|
173 | 175 | self.fn = fn |
|
174 | 176 | self.memo = {} |
|
175 | 177 | |
|
176 | 178 | def __call__(self, *args): |
|
177 | 179 | if args not in self.memo: |
|
178 | 180 | self.memo[args] = self.fn(*args) |
|
179 | 181 | return self.memo[args] |
@@ -1,1043 +1,1044 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | import logging |
|
23 | 23 | import datetime |
|
24 | 24 | import traceback |
|
25 | 25 | from datetime import date |
|
26 | 26 | |
|
27 | 27 | from sqlalchemy import * |
|
28 | 28 | from sqlalchemy.ext.hybrid import hybrid_property |
|
29 | 29 | from sqlalchemy.orm import relationship, joinedload, class_mapper, validates |
|
30 | 30 | from beaker.cache import cache_region, region_invalidate |
|
31 | from pyramid import compat | |
|
31 | 32 | |
|
32 | 33 | from rhodecode.lib.vcs import get_backend |
|
33 | 34 | from rhodecode.lib.vcs.utils.helpers import get_scm |
|
34 | 35 | from rhodecode.lib.vcs.exceptions import VCSError |
|
35 | 36 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
36 | 37 | from rhodecode.lib.auth import generate_auth_token |
|
37 | 38 | from rhodecode.lib.utils2 import str2bool, safe_str, get_commit_safe, safe_unicode |
|
38 | 39 | from rhodecode.lib.exceptions import UserGroupAssignedException |
|
39 | 40 | from rhodecode.lib.ext_json import json |
|
40 | 41 | |
|
41 | 42 | from rhodecode.model.meta import Base, Session |
|
42 | 43 | from rhodecode.lib.caching_query import FromCache |
|
43 | 44 | |
|
44 | 45 | |
|
45 | 46 | log = logging.getLogger(__name__) |
|
46 | 47 | |
|
47 | 48 | #============================================================================== |
|
48 | 49 | # BASE CLASSES |
|
49 | 50 | #============================================================================== |
|
50 | 51 | |
|
51 | 52 | class ModelSerializer(json.JSONEncoder): |
|
52 | 53 | """ |
|
53 | 54 | Simple Serializer for JSON, |
|
54 | 55 | |
|
55 | 56 | usage:: |
|
56 | 57 | |
|
57 | 58 | to make object customized for serialization implement a __json__ |
|
58 | 59 | method that will return a dict for serialization into json |
|
59 | 60 | |
|
60 | 61 | example:: |
|
61 | 62 | |
|
62 | 63 | class Task(object): |
|
63 | 64 | |
|
64 | 65 | def __init__(self, name, value): |
|
65 | 66 | self.name = name |
|
66 | 67 | self.value = value |
|
67 | 68 | |
|
68 | 69 | def __json__(self): |
|
69 | 70 | return dict(name=self.name, |
|
70 | 71 | value=self.value) |
|
71 | 72 | |
|
72 | 73 | """ |
|
73 | 74 | |
|
74 | 75 | def default(self, obj): |
|
75 | 76 | |
|
76 | 77 | if hasattr(obj, '__json__'): |
|
77 | 78 | return obj.__json__() |
|
78 | 79 | else: |
|
79 | 80 | return json.JSONEncoder.default(self, obj) |
|
80 | 81 | |
|
81 | 82 | class BaseModel(object): |
|
82 | 83 | """Base Model for all classess |
|
83 | 84 | |
|
84 | 85 | """ |
|
85 | 86 | |
|
86 | 87 | @classmethod |
|
87 | 88 | def _get_keys(cls): |
|
88 | 89 | """return column names for this model """ |
|
89 | 90 | return class_mapper(cls).c.keys() |
|
90 | 91 | |
|
91 | 92 | def get_dict(self): |
|
92 | 93 | """return dict with keys and values corresponding |
|
93 | 94 | to this model data """ |
|
94 | 95 | |
|
95 | 96 | d = {} |
|
96 | 97 | for k in self._get_keys(): |
|
97 | 98 | d[k] = getattr(self, k) |
|
98 | 99 | return d |
|
99 | 100 | |
|
100 | 101 | def get_appstruct(self): |
|
101 | 102 | """return list with keys and values tupples corresponding |
|
102 | 103 | to this model data """ |
|
103 | 104 | |
|
104 | 105 | l = [] |
|
105 | 106 | for k in self._get_keys(): |
|
106 | 107 | l.append((k, getattr(self, k),)) |
|
107 | 108 | return l |
|
108 | 109 | |
|
109 | 110 | def populate_obj(self, populate_dict): |
|
110 | 111 | """populate model with data from given populate_dict""" |
|
111 | 112 | |
|
112 | 113 | for k in self._get_keys(): |
|
113 | 114 | if k in populate_dict: |
|
114 | 115 | setattr(self, k, populate_dict[k]) |
|
115 | 116 | |
|
116 | 117 | @classmethod |
|
117 | 118 | def query(cls): |
|
118 | 119 | return Session.query(cls) |
|
119 | 120 | |
|
120 | 121 | @classmethod |
|
121 | 122 | def get(cls, id_): |
|
122 | 123 | if id_: |
|
123 | 124 | return cls.query().get(id_) |
|
124 | 125 | |
|
125 | 126 | @classmethod |
|
126 | 127 | def getAll(cls): |
|
127 | 128 | return cls.query().all() |
|
128 | 129 | |
|
129 | 130 | @classmethod |
|
130 | 131 | def delete(cls, id_): |
|
131 | 132 | obj = cls.query().get(id_) |
|
132 | 133 | Session.delete(obj) |
|
133 | 134 | Session.commit() |
|
134 | 135 | |
|
135 | 136 | |
|
136 | 137 | class RhodeCodeSetting(Base, BaseModel): |
|
137 | 138 | __tablename__ = 'rhodecode_settings' |
|
138 | 139 | __table_args__ = (UniqueConstraint('app_settings_name'), {'extend_existing':True}) |
|
139 | 140 | app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
140 | 141 | app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None) |
|
141 | 142 | _app_settings_value = Column("app_settings_value", String(255), nullable=True, unique=None, default=None) |
|
142 | 143 | |
|
143 | 144 | def __init__(self, k='', v=''): |
|
144 | 145 | self.app_settings_name = k |
|
145 | 146 | self.app_settings_value = v |
|
146 | 147 | |
|
147 | 148 | |
|
148 | 149 | @validates('_app_settings_value') |
|
149 | 150 | def validate_settings_value(self, key, val): |
|
150 | 151 | assert type(val) == unicode |
|
151 | 152 | return val |
|
152 | 153 | |
|
153 | 154 | @hybrid_property |
|
154 | 155 | def app_settings_value(self): |
|
155 | 156 | v = self._app_settings_value |
|
156 | 157 | if v == 'ldap_active': |
|
157 | 158 | v = str2bool(v) |
|
158 | 159 | return v |
|
159 | 160 | |
|
160 | 161 | @app_settings_value.setter |
|
161 | 162 | def app_settings_value(self, val): |
|
162 | 163 | """ |
|
163 | 164 | Setter that will always make sure we use unicode in app_settings_value |
|
164 | 165 | |
|
165 | 166 | :param val: |
|
166 | 167 | """ |
|
167 | 168 | self._app_settings_value = safe_unicode(val) |
|
168 | 169 | |
|
169 | 170 | def __repr__(self): |
|
170 | 171 | return "<%s('%s:%s')>" % (self.__class__.__name__, |
|
171 | 172 | self.app_settings_name, self.app_settings_value) |
|
172 | 173 | |
|
173 | 174 | |
|
174 | 175 | @classmethod |
|
175 | 176 | def get_by_name(cls, ldap_key): |
|
176 | 177 | return cls.query()\ |
|
177 | 178 | .filter(cls.app_settings_name == ldap_key).scalar() |
|
178 | 179 | |
|
179 | 180 | @classmethod |
|
180 | 181 | def get_app_settings(cls, cache=False): |
|
181 | 182 | |
|
182 | 183 | ret = cls.query() |
|
183 | 184 | |
|
184 | 185 | if cache: |
|
185 | 186 | ret = ret.options(FromCache("sql_cache_short", "get_hg_settings")) |
|
186 | 187 | |
|
187 | 188 | if not ret: |
|
188 | 189 | raise Exception('Could not get application settings !') |
|
189 | 190 | settings = {} |
|
190 | 191 | for each in ret: |
|
191 | 192 | settings['rhodecode_' + each.app_settings_name] = \ |
|
192 | 193 | each.app_settings_value |
|
193 | 194 | |
|
194 | 195 | return settings |
|
195 | 196 | |
|
196 | 197 | @classmethod |
|
197 | 198 | def get_ldap_settings(cls, cache=False): |
|
198 | 199 | ret = cls.query()\ |
|
199 | 200 | .filter(cls.app_settings_name.startswith('ldap_')).all() |
|
200 | 201 | fd = {} |
|
201 | 202 | for row in ret: |
|
202 | 203 | fd.update({row.app_settings_name:row.app_settings_value}) |
|
203 | 204 | |
|
204 | 205 | return fd |
|
205 | 206 | |
|
206 | 207 | |
|
207 | 208 | class RhodeCodeUi(Base, BaseModel): |
|
208 | 209 | __tablename__ = 'rhodecode_ui' |
|
209 | 210 | __table_args__ = (UniqueConstraint('ui_key'), {'extend_existing':True}) |
|
210 | 211 | |
|
211 | 212 | HOOK_REPO_SIZE = 'changegroup.repo_size' |
|
212 | 213 | HOOK_PUSH = 'pretxnchangegroup.push_logger' |
|
213 | 214 | HOOK_PULL = 'preoutgoing.pull_logger' |
|
214 | 215 | |
|
215 | 216 | ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
216 | 217 | ui_section = Column("ui_section", String(255), nullable=True, unique=None, default=None) |
|
217 | 218 | ui_key = Column("ui_key", String(255), nullable=True, unique=None, default=None) |
|
218 | 219 | ui_value = Column("ui_value", String(255), nullable=True, unique=None, default=None) |
|
219 | 220 | ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True) |
|
220 | 221 | |
|
221 | 222 | |
|
222 | 223 | @classmethod |
|
223 | 224 | def get_by_key(cls, key): |
|
224 | 225 | return cls.query().filter(cls.ui_key == key) |
|
225 | 226 | |
|
226 | 227 | |
|
227 | 228 | @classmethod |
|
228 | 229 | def get_builtin_hooks(cls): |
|
229 | 230 | q = cls.query() |
|
230 | 231 | q = q.filter(cls.ui_key.in_([cls.HOOK_REPO_SIZE, |
|
231 | 232 | cls.HOOK_PUSH, cls.HOOK_PULL])) |
|
232 | 233 | return q.all() |
|
233 | 234 | |
|
234 | 235 | @classmethod |
|
235 | 236 | def get_custom_hooks(cls): |
|
236 | 237 | q = cls.query() |
|
237 | 238 | q = q.filter(~cls.ui_key.in_([cls.HOOK_REPO_SIZE, |
|
238 | 239 | cls.HOOK_PUSH, cls.HOOK_PULL])) |
|
239 | 240 | q = q.filter(cls.ui_section == 'hooks') |
|
240 | 241 | return q.all() |
|
241 | 242 | |
|
242 | 243 | @classmethod |
|
243 | 244 | def create_or_update_hook(cls, key, val): |
|
244 | 245 | new_ui = cls.get_by_key(key).scalar() or cls() |
|
245 | 246 | new_ui.ui_section = 'hooks' |
|
246 | 247 | new_ui.ui_active = True |
|
247 | 248 | new_ui.ui_key = key |
|
248 | 249 | new_ui.ui_value = val |
|
249 | 250 | |
|
250 | 251 | Session.add(new_ui) |
|
251 | 252 | Session.commit() |
|
252 | 253 | |
|
253 | 254 | |
|
254 | 255 | class User(Base, BaseModel): |
|
255 | 256 | __tablename__ = 'users' |
|
256 | 257 | __table_args__ = (UniqueConstraint('username'), UniqueConstraint('email'), {'extend_existing':True}) |
|
257 | 258 | user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
258 | 259 | username = Column("username", String(255), nullable=True, unique=None, default=None) |
|
259 | 260 | password = Column("password", String(255), nullable=True, unique=None, default=None) |
|
260 | 261 | active = Column("active", Boolean(), nullable=True, unique=None, default=None) |
|
261 | 262 | admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) |
|
262 | 263 | name = Column("name", String(255), nullable=True, unique=None, default=None) |
|
263 | 264 | lastname = Column("lastname", String(255), nullable=True, unique=None, default=None) |
|
264 | 265 | email = Column("email", String(255), nullable=True, unique=None, default=None) |
|
265 | 266 | last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
266 | 267 | ldap_dn = Column("ldap_dn", String(255), nullable=True, unique=None, default=None) |
|
267 | 268 | api_key = Column("api_key", String(255), nullable=True, unique=None, default=None) |
|
268 | 269 | |
|
269 | 270 | user_log = relationship('UserLog', cascade='all') |
|
270 | 271 | user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all') |
|
271 | 272 | |
|
272 | 273 | repositories = relationship('Repository') |
|
273 | 274 | user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') |
|
274 | 275 | repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all') |
|
275 | 276 | |
|
276 | 277 | group_member = relationship('UserGroupMember', cascade='all') |
|
277 | 278 | |
|
278 | 279 | @property |
|
279 | 280 | def full_contact(self): |
|
280 | 281 | return '%s %s <%s>' % (self.name, self.lastname, self.email) |
|
281 | 282 | |
|
282 | 283 | @property |
|
283 | 284 | def short_contact(self): |
|
284 | 285 | return '%s %s' % (self.name, self.lastname) |
|
285 | 286 | |
|
286 | 287 | @property |
|
287 | 288 | def is_admin(self): |
|
288 | 289 | return self.admin |
|
289 | 290 | |
|
290 | 291 | def __repr__(self): |
|
291 | 292 | try: |
|
292 | 293 | return "<%s('id:%s:%s')>" % (self.__class__.__name__, |
|
293 | 294 | self.user_id, self.username) |
|
294 | 295 | except: |
|
295 | 296 | return self.__class__.__name__ |
|
296 | 297 | |
|
297 | 298 | @classmethod |
|
298 | 299 | def get_by_username(cls, username, case_insensitive=False): |
|
299 | 300 | if case_insensitive: |
|
300 | 301 | return Session.query(cls).filter(cls.username.ilike(username)).scalar() |
|
301 | 302 | else: |
|
302 | 303 | return Session.query(cls).filter(cls.username == username).scalar() |
|
303 | 304 | |
|
304 | 305 | @classmethod |
|
305 | 306 | def get_by_auth_token(cls, auth_token): |
|
306 | 307 | return cls.query().filter(cls.api_key == auth_token).one() |
|
307 | 308 | |
|
308 | 309 | def update_lastlogin(self): |
|
309 | 310 | """Update user lastlogin""" |
|
310 | 311 | |
|
311 | 312 | self.last_login = datetime.datetime.now() |
|
312 | 313 | Session.add(self) |
|
313 | 314 | Session.commit() |
|
314 | 315 | log.debug('updated user %s lastlogin', self.username) |
|
315 | 316 | |
|
316 | 317 | @classmethod |
|
317 | 318 | def create(cls, form_data): |
|
318 | 319 | from rhodecode.lib.auth import get_crypt_password |
|
319 | 320 | |
|
320 | 321 | try: |
|
321 | 322 | new_user = cls() |
|
322 | 323 | for k, v in form_data.items(): |
|
323 | 324 | if k == 'password': |
|
324 | 325 | v = get_crypt_password(v) |
|
325 | 326 | setattr(new_user, k, v) |
|
326 | 327 | |
|
327 | 328 | new_user.api_key = generate_auth_token(form_data['username']) |
|
328 | 329 | Session.add(new_user) |
|
329 | 330 | Session.commit() |
|
330 | 331 | return new_user |
|
331 | 332 | except: |
|
332 | 333 | log.error(traceback.format_exc()) |
|
333 | 334 | Session.rollback() |
|
334 | 335 | raise |
|
335 | 336 | |
|
336 | 337 | class UserLog(Base, BaseModel): |
|
337 | 338 | __tablename__ = 'user_logs' |
|
338 | 339 | __table_args__ = {'extend_existing':True} |
|
339 | 340 | user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
340 | 341 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
341 | 342 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
342 | 343 | repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None) |
|
343 | 344 | user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None) |
|
344 | 345 | action = Column("action", String(1200000), nullable=True, unique=None, default=None) |
|
345 | 346 | action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
346 | 347 | |
|
347 | 348 | @property |
|
348 | 349 | def action_as_day(self): |
|
349 | 350 | return date(*self.action_date.timetuple()[:3]) |
|
350 | 351 | |
|
351 | 352 | user = relationship('User') |
|
352 | 353 | repository = relationship('Repository') |
|
353 | 354 | |
|
354 | 355 | |
|
355 | 356 | class UserGroup(Base, BaseModel): |
|
356 | 357 | __tablename__ = 'users_groups' |
|
357 | 358 | __table_args__ = {'extend_existing':True} |
|
358 | 359 | |
|
359 | 360 | users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
360 | 361 | users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None) |
|
361 | 362 | users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) |
|
362 | 363 | |
|
363 | 364 | members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined") |
|
364 | 365 | |
|
365 | 366 | def __repr__(self): |
|
366 | 367 | return '<userGroup(%s)>' % (self.users_group_name) |
|
367 | 368 | |
|
368 | 369 | @classmethod |
|
369 | 370 | def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): |
|
370 | 371 | if case_insensitive: |
|
371 | 372 | gr = cls.query()\ |
|
372 | 373 | .filter(cls.users_group_name.ilike(group_name)) |
|
373 | 374 | else: |
|
374 | 375 | gr = cls.query()\ |
|
375 | 376 | .filter(cls.users_group_name == group_name) |
|
376 | 377 | if cache: |
|
377 | 378 | gr = gr.options(FromCache("sql_cache_short", |
|
378 | 379 | "get_user_%s" % group_name)) |
|
379 | 380 | return gr.scalar() |
|
380 | 381 | |
|
381 | 382 | @classmethod |
|
382 | 383 | def get(cls, users_group_id, cache=False): |
|
383 | 384 | users_group = cls.query() |
|
384 | 385 | if cache: |
|
385 | 386 | users_group = users_group.options(FromCache("sql_cache_short", |
|
386 | 387 | "get_users_group_%s" % users_group_id)) |
|
387 | 388 | return users_group.get(users_group_id) |
|
388 | 389 | |
|
389 | 390 | @classmethod |
|
390 | 391 | def create(cls, form_data): |
|
391 | 392 | try: |
|
392 | 393 | new_user_group = cls() |
|
393 | 394 | for k, v in form_data.items(): |
|
394 | 395 | setattr(new_user_group, k, v) |
|
395 | 396 | |
|
396 | 397 | Session.add(new_user_group) |
|
397 | 398 | Session.commit() |
|
398 | 399 | return new_user_group |
|
399 | 400 | except: |
|
400 | 401 | log.error(traceback.format_exc()) |
|
401 | 402 | Session.rollback() |
|
402 | 403 | raise |
|
403 | 404 | |
|
404 | 405 | @classmethod |
|
405 | 406 | def update(cls, users_group_id, form_data): |
|
406 | 407 | |
|
407 | 408 | try: |
|
408 | 409 | users_group = cls.get(users_group_id, cache=False) |
|
409 | 410 | |
|
410 | 411 | for k, v in form_data.items(): |
|
411 | 412 | if k == 'users_group_members': |
|
412 | 413 | users_group.members = [] |
|
413 | 414 | Session.flush() |
|
414 | 415 | members_list = [] |
|
415 | 416 | if v: |
|
416 |
v = [v] if isinstance(v, |
|
|
417 | v = [v] if isinstance(v, compat.string_types) else v | |
|
417 | 418 | for u_id in set(v): |
|
418 | 419 | member = UserGroupMember(users_group_id, u_id) |
|
419 | 420 | members_list.append(member) |
|
420 | 421 | setattr(users_group, 'members', members_list) |
|
421 | 422 | setattr(users_group, k, v) |
|
422 | 423 | |
|
423 | 424 | Session.add(users_group) |
|
424 | 425 | Session.commit() |
|
425 | 426 | except: |
|
426 | 427 | log.error(traceback.format_exc()) |
|
427 | 428 | Session.rollback() |
|
428 | 429 | raise |
|
429 | 430 | |
|
430 | 431 | @classmethod |
|
431 | 432 | def delete(cls, user_group_id): |
|
432 | 433 | try: |
|
433 | 434 | |
|
434 | 435 | # check if this group is not assigned to repo |
|
435 | 436 | assigned_groups = UserGroupRepoToPerm.query()\ |
|
436 | 437 | .filter(UserGroupRepoToPerm.users_group_id == |
|
437 | 438 | user_group_id).all() |
|
438 | 439 | |
|
439 | 440 | if assigned_groups: |
|
440 | 441 | raise UserGroupAssignedException( |
|
441 | 442 | 'UserGroup assigned to %s' % assigned_groups) |
|
442 | 443 | |
|
443 | 444 | users_group = cls.get(user_group_id, cache=False) |
|
444 | 445 | Session.delete(users_group) |
|
445 | 446 | Session.commit() |
|
446 | 447 | except: |
|
447 | 448 | log.error(traceback.format_exc()) |
|
448 | 449 | Session.rollback() |
|
449 | 450 | raise |
|
450 | 451 | |
|
451 | 452 | class UserGroupMember(Base, BaseModel): |
|
452 | 453 | __tablename__ = 'users_groups_members' |
|
453 | 454 | __table_args__ = {'extend_existing':True} |
|
454 | 455 | |
|
455 | 456 | users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
456 | 457 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
457 | 458 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
458 | 459 | |
|
459 | 460 | user = relationship('User', lazy='joined') |
|
460 | 461 | users_group = relationship('UserGroup') |
|
461 | 462 | |
|
462 | 463 | def __init__(self, gr_id='', u_id=''): |
|
463 | 464 | self.users_group_id = gr_id |
|
464 | 465 | self.user_id = u_id |
|
465 | 466 | |
|
466 | 467 | @staticmethod |
|
467 | 468 | def add_user_to_group(group, user): |
|
468 | 469 | ugm = UserGroupMember() |
|
469 | 470 | ugm.users_group = group |
|
470 | 471 | ugm.user = user |
|
471 | 472 | Session.add(ugm) |
|
472 | 473 | Session.commit() |
|
473 | 474 | return ugm |
|
474 | 475 | |
|
475 | 476 | class Repository(Base, BaseModel): |
|
476 | 477 | __tablename__ = 'repositories' |
|
477 | 478 | __table_args__ = (UniqueConstraint('repo_name'), {'extend_existing':True},) |
|
478 | 479 | |
|
479 | 480 | repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
480 | 481 | repo_name = Column("repo_name", String(255), nullable=False, unique=True, default=None) |
|
481 | 482 | clone_uri = Column("clone_uri", String(255), nullable=True, unique=False, default=None) |
|
482 | 483 | repo_type = Column("repo_type", String(255), nullable=False, unique=False, default='hg') |
|
483 | 484 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) |
|
484 | 485 | private = Column("private", Boolean(), nullable=True, unique=None, default=None) |
|
485 | 486 | enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True) |
|
486 | 487 | enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True) |
|
487 | 488 | description = Column("description", String(10000), nullable=True, unique=None, default=None) |
|
488 | 489 | created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
489 | 490 | |
|
490 | 491 | fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None) |
|
491 | 492 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None) |
|
492 | 493 | |
|
493 | 494 | |
|
494 | 495 | user = relationship('User') |
|
495 | 496 | fork = relationship('Repository', remote_side=repo_id) |
|
496 | 497 | group = relationship('RepoGroup') |
|
497 | 498 | repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id') |
|
498 | 499 | users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all') |
|
499 | 500 | stats = relationship('Statistics', cascade='all', uselist=False) |
|
500 | 501 | |
|
501 | 502 | followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all') |
|
502 | 503 | |
|
503 | 504 | logs = relationship('UserLog', cascade='all') |
|
504 | 505 | |
|
505 | 506 | def __repr__(self): |
|
506 | 507 | return "<%s('%s:%s')>" % (self.__class__.__name__, |
|
507 | 508 | self.repo_id, self.repo_name) |
|
508 | 509 | |
|
509 | 510 | @classmethod |
|
510 | 511 | def url_sep(cls): |
|
511 | 512 | return '/' |
|
512 | 513 | |
|
513 | 514 | @classmethod |
|
514 | 515 | def get_by_repo_name(cls, repo_name): |
|
515 | 516 | q = Session.query(cls).filter(cls.repo_name == repo_name) |
|
516 | 517 | q = q.options(joinedload(Repository.fork))\ |
|
517 | 518 | .options(joinedload(Repository.user))\ |
|
518 | 519 | .options(joinedload(Repository.group)) |
|
519 | 520 | return q.one() |
|
520 | 521 | |
|
521 | 522 | @classmethod |
|
522 | 523 | def get_repo_forks(cls, repo_id): |
|
523 | 524 | return cls.query().filter(Repository.fork_id == repo_id) |
|
524 | 525 | |
|
525 | 526 | @classmethod |
|
526 | 527 | def base_path(cls): |
|
527 | 528 | """ |
|
528 | 529 | Returns base path when all repos are stored |
|
529 | 530 | |
|
530 | 531 | :param cls: |
|
531 | 532 | """ |
|
532 | 533 | q = Session.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == |
|
533 | 534 | cls.url_sep()) |
|
534 | 535 | q.options(FromCache("sql_cache_short", "repository_repo_path")) |
|
535 | 536 | return q.one().ui_value |
|
536 | 537 | |
|
537 | 538 | @property |
|
538 | 539 | def just_name(self): |
|
539 | 540 | return self.repo_name.split(Repository.url_sep())[-1] |
|
540 | 541 | |
|
541 | 542 | @property |
|
542 | 543 | def groups_with_parents(self): |
|
543 | 544 | groups = [] |
|
544 | 545 | if self.group is None: |
|
545 | 546 | return groups |
|
546 | 547 | |
|
547 | 548 | cur_gr = self.group |
|
548 | 549 | groups.insert(0, cur_gr) |
|
549 | 550 | while 1: |
|
550 | 551 | gr = getattr(cur_gr, 'parent_group', None) |
|
551 | 552 | cur_gr = cur_gr.parent_group |
|
552 | 553 | if gr is None: |
|
553 | 554 | break |
|
554 | 555 | groups.insert(0, gr) |
|
555 | 556 | |
|
556 | 557 | return groups |
|
557 | 558 | |
|
558 | 559 | @property |
|
559 | 560 | def groups_and_repo(self): |
|
560 | 561 | return self.groups_with_parents, self.just_name |
|
561 | 562 | |
|
562 | 563 | @LazyProperty |
|
563 | 564 | def repo_path(self): |
|
564 | 565 | """ |
|
565 | 566 | Returns base full path for that repository means where it actually |
|
566 | 567 | exists on a filesystem |
|
567 | 568 | """ |
|
568 | 569 | q = Session.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == |
|
569 | 570 | Repository.url_sep()) |
|
570 | 571 | q.options(FromCache("sql_cache_short", "repository_repo_path")) |
|
571 | 572 | return q.one().ui_value |
|
572 | 573 | |
|
573 | 574 | @property |
|
574 | 575 | def repo_full_path(self): |
|
575 | 576 | p = [self.repo_path] |
|
576 | 577 | # we need to split the name by / since this is how we store the |
|
577 | 578 | # names in the database, but that eventually needs to be converted |
|
578 | 579 | # into a valid system path |
|
579 | 580 | p += self.repo_name.split(Repository.url_sep()) |
|
580 | 581 | return os.path.join(*p) |
|
581 | 582 | |
|
582 | 583 | def get_new_name(self, repo_name): |
|
583 | 584 | """ |
|
584 | 585 | returns new full repository name based on assigned group and new new |
|
585 | 586 | |
|
586 | 587 | :param group_name: |
|
587 | 588 | """ |
|
588 | 589 | path_prefix = self.group.full_path_splitted if self.group else [] |
|
589 | 590 | return Repository.url_sep().join(path_prefix + [repo_name]) |
|
590 | 591 | |
|
591 | 592 | @property |
|
592 | 593 | def _config(self): |
|
593 | 594 | """ |
|
594 | 595 | Returns db based config object. |
|
595 | 596 | """ |
|
596 | 597 | from rhodecode.lib.utils import make_db_config |
|
597 | 598 | return make_db_config(clear_session=False) |
|
598 | 599 | |
|
599 | 600 | @classmethod |
|
600 | 601 | def is_valid(cls, repo_name): |
|
601 | 602 | """ |
|
602 | 603 | returns True if given repo name is a valid filesystem repository |
|
603 | 604 | |
|
604 | 605 | :param cls: |
|
605 | 606 | :param repo_name: |
|
606 | 607 | """ |
|
607 | 608 | from rhodecode.lib.utils import is_valid_repo |
|
608 | 609 | |
|
609 | 610 | return is_valid_repo(repo_name, cls.base_path()) |
|
610 | 611 | |
|
611 | 612 | |
|
612 | 613 | #========================================================================== |
|
613 | 614 | # SCM PROPERTIES |
|
614 | 615 | #========================================================================== |
|
615 | 616 | |
|
616 | 617 | def get_commit(self, rev): |
|
617 | 618 | return get_commit_safe(self.scm_instance, rev) |
|
618 | 619 | |
|
619 | 620 | @property |
|
620 | 621 | def tip(self): |
|
621 | 622 | return self.get_commit('tip') |
|
622 | 623 | |
|
623 | 624 | @property |
|
624 | 625 | def author(self): |
|
625 | 626 | return self.tip.author |
|
626 | 627 | |
|
627 | 628 | @property |
|
628 | 629 | def last_change(self): |
|
629 | 630 | return self.scm_instance.last_change |
|
630 | 631 | |
|
631 | 632 | #========================================================================== |
|
632 | 633 | # SCM CACHE INSTANCE |
|
633 | 634 | #========================================================================== |
|
634 | 635 | |
|
635 | 636 | @property |
|
636 | 637 | def invalidate(self): |
|
637 | 638 | return CacheInvalidation.invalidate(self.repo_name) |
|
638 | 639 | |
|
639 | 640 | def set_invalidate(self): |
|
640 | 641 | """ |
|
641 | 642 | set a cache for invalidation for this instance |
|
642 | 643 | """ |
|
643 | 644 | CacheInvalidation.set_invalidate(self.repo_name) |
|
644 | 645 | |
|
645 | 646 | @LazyProperty |
|
646 | 647 | def scm_instance(self): |
|
647 | 648 | return self.__get_instance() |
|
648 | 649 | |
|
649 | 650 | @property |
|
650 | 651 | def scm_instance_cached(self): |
|
651 | 652 | return self.__get_instance() |
|
652 | 653 | |
|
653 | 654 | def __get_instance(self): |
|
654 | 655 | |
|
655 | 656 | repo_full_path = self.repo_full_path |
|
656 | 657 | |
|
657 | 658 | try: |
|
658 | 659 | alias = get_scm(repo_full_path)[0] |
|
659 | 660 | log.debug('Creating instance of %s repository', alias) |
|
660 | 661 | backend = get_backend(alias) |
|
661 | 662 | except VCSError: |
|
662 | 663 | log.error(traceback.format_exc()) |
|
663 | 664 | log.error('Perhaps this repository is in db and not in ' |
|
664 | 665 | 'filesystem run rescan repositories with ' |
|
665 | 666 | '"destroy old data " option from admin panel') |
|
666 | 667 | return |
|
667 | 668 | |
|
668 | 669 | if alias == 'hg': |
|
669 | 670 | |
|
670 | 671 | repo = backend(safe_str(repo_full_path), create=False, |
|
671 | 672 | config=self._config) |
|
672 | 673 | |
|
673 | 674 | else: |
|
674 | 675 | repo = backend(repo_full_path, create=False) |
|
675 | 676 | |
|
676 | 677 | return repo |
|
677 | 678 | |
|
678 | 679 | |
|
679 | 680 | class Group(Base, BaseModel): |
|
680 | 681 | __tablename__ = 'groups' |
|
681 | 682 | __table_args__ = (UniqueConstraint('group_name', 'group_parent_id'), |
|
682 | 683 | CheckConstraint('group_id != group_parent_id'), {'extend_existing':True},) |
|
683 | 684 | __mapper_args__ = {'order_by':'group_name'} |
|
684 | 685 | |
|
685 | 686 | group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
686 | 687 | group_name = Column("group_name", String(255), nullable=False, unique=True, default=None) |
|
687 | 688 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) |
|
688 | 689 | group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None) |
|
689 | 690 | |
|
690 | 691 | parent_group = relationship('Group', remote_side=group_id) |
|
691 | 692 | |
|
692 | 693 | def __init__(self, group_name='', parent_group=None): |
|
693 | 694 | self.group_name = group_name |
|
694 | 695 | self.parent_group = parent_group |
|
695 | 696 | |
|
696 | 697 | def __repr__(self): |
|
697 | 698 | return "<%s('%s:%s')>" % (self.__class__.__name__, self.group_id, |
|
698 | 699 | self.group_name) |
|
699 | 700 | |
|
700 | 701 | @classmethod |
|
701 | 702 | def url_sep(cls): |
|
702 | 703 | return '/' |
|
703 | 704 | |
|
704 | 705 | @classmethod |
|
705 | 706 | def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): |
|
706 | 707 | if case_insensitive: |
|
707 | 708 | gr = cls.query()\ |
|
708 | 709 | .filter(cls.group_name.ilike(group_name)) |
|
709 | 710 | else: |
|
710 | 711 | gr = cls.query()\ |
|
711 | 712 | .filter(cls.group_name == group_name) |
|
712 | 713 | if cache: |
|
713 | 714 | gr = gr.options(FromCache("sql_cache_short", |
|
714 | 715 | "get_group_%s" % group_name)) |
|
715 | 716 | return gr.scalar() |
|
716 | 717 | |
|
717 | 718 | @property |
|
718 | 719 | def parents(self): |
|
719 | 720 | parents_recursion_limit = 5 |
|
720 | 721 | groups = [] |
|
721 | 722 | if self.parent_group is None: |
|
722 | 723 | return groups |
|
723 | 724 | cur_gr = self.parent_group |
|
724 | 725 | groups.insert(0, cur_gr) |
|
725 | 726 | cnt = 0 |
|
726 | 727 | while 1: |
|
727 | 728 | cnt += 1 |
|
728 | 729 | gr = getattr(cur_gr, 'parent_group', None) |
|
729 | 730 | cur_gr = cur_gr.parent_group |
|
730 | 731 | if gr is None: |
|
731 | 732 | break |
|
732 | 733 | if cnt == parents_recursion_limit: |
|
733 | 734 | # this will prevent accidental infinit loops |
|
734 | 735 | log.error('group nested more than %s', |
|
735 | 736 | parents_recursion_limit) |
|
736 | 737 | break |
|
737 | 738 | |
|
738 | 739 | groups.insert(0, gr) |
|
739 | 740 | return groups |
|
740 | 741 | |
|
741 | 742 | @property |
|
742 | 743 | def children(self): |
|
743 | 744 | return Group.query().filter(Group.parent_group == self) |
|
744 | 745 | |
|
745 | 746 | @property |
|
746 | 747 | def name(self): |
|
747 | 748 | return self.group_name.split(Group.url_sep())[-1] |
|
748 | 749 | |
|
749 | 750 | @property |
|
750 | 751 | def full_path(self): |
|
751 | 752 | return self.group_name |
|
752 | 753 | |
|
753 | 754 | @property |
|
754 | 755 | def full_path_splitted(self): |
|
755 | 756 | return self.group_name.split(Group.url_sep()) |
|
756 | 757 | |
|
757 | 758 | @property |
|
758 | 759 | def repositories(self): |
|
759 | 760 | return Repository.query().filter(Repository.group == self) |
|
760 | 761 | |
|
761 | 762 | @property |
|
762 | 763 | def repositories_recursive_count(self): |
|
763 | 764 | cnt = self.repositories.count() |
|
764 | 765 | |
|
765 | 766 | def children_count(group): |
|
766 | 767 | cnt = 0 |
|
767 | 768 | for child in group.children: |
|
768 | 769 | cnt += child.repositories.count() |
|
769 | 770 | cnt += children_count(child) |
|
770 | 771 | return cnt |
|
771 | 772 | |
|
772 | 773 | return cnt + children_count(self) |
|
773 | 774 | |
|
774 | 775 | |
|
775 | 776 | def get_new_name(self, group_name): |
|
776 | 777 | """ |
|
777 | 778 | returns new full group name based on parent and new name |
|
778 | 779 | |
|
779 | 780 | :param group_name: |
|
780 | 781 | """ |
|
781 | 782 | path_prefix = (self.parent_group.full_path_splitted if |
|
782 | 783 | self.parent_group else []) |
|
783 | 784 | return Group.url_sep().join(path_prefix + [group_name]) |
|
784 | 785 | |
|
785 | 786 | |
|
786 | 787 | class Permission(Base, BaseModel): |
|
787 | 788 | __tablename__ = 'permissions' |
|
788 | 789 | __table_args__ = {'extend_existing':True} |
|
789 | 790 | permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
790 | 791 | permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None) |
|
791 | 792 | permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None) |
|
792 | 793 | |
|
793 | 794 | def __repr__(self): |
|
794 | 795 | return "<%s('%s:%s')>" % (self.__class__.__name__, |
|
795 | 796 | self.permission_id, self.permission_name) |
|
796 | 797 | |
|
797 | 798 | @classmethod |
|
798 | 799 | def get_by_key(cls, key): |
|
799 | 800 | return cls.query().filter(cls.permission_name == key).scalar() |
|
800 | 801 | |
|
801 | 802 | class UserRepoToPerm(Base, BaseModel): |
|
802 | 803 | __tablename__ = 'repo_to_perm' |
|
803 | 804 | __table_args__ = (UniqueConstraint('user_id', 'repository_id'), {'extend_existing':True}) |
|
804 | 805 | repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
805 | 806 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
806 | 807 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
807 | 808 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
808 | 809 | |
|
809 | 810 | user = relationship('User') |
|
810 | 811 | permission = relationship('Permission') |
|
811 | 812 | repository = relationship('Repository') |
|
812 | 813 | |
|
813 | 814 | class UserToPerm(Base, BaseModel): |
|
814 | 815 | __tablename__ = 'user_to_perm' |
|
815 | 816 | __table_args__ = (UniqueConstraint('user_id', 'permission_id'), {'extend_existing':True}) |
|
816 | 817 | user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
817 | 818 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
818 | 819 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
819 | 820 | |
|
820 | 821 | user = relationship('User') |
|
821 | 822 | permission = relationship('Permission') |
|
822 | 823 | |
|
823 | 824 | @classmethod |
|
824 | 825 | def has_perm(cls, user_id, perm): |
|
825 | 826 | if not isinstance(perm, Permission): |
|
826 | 827 | raise Exception('perm needs to be an instance of Permission class') |
|
827 | 828 | |
|
828 | 829 | return cls.query().filter(cls.user_id == user_id)\ |
|
829 | 830 | .filter(cls.permission == perm).scalar() is not None |
|
830 | 831 | |
|
831 | 832 | @classmethod |
|
832 | 833 | def grant_perm(cls, user_id, perm): |
|
833 | 834 | if not isinstance(perm, Permission): |
|
834 | 835 | raise Exception('perm needs to be an instance of Permission class') |
|
835 | 836 | |
|
836 | 837 | new = cls() |
|
837 | 838 | new.user_id = user_id |
|
838 | 839 | new.permission = perm |
|
839 | 840 | try: |
|
840 | 841 | Session.add(new) |
|
841 | 842 | Session.commit() |
|
842 | 843 | except: |
|
843 | 844 | Session.rollback() |
|
844 | 845 | |
|
845 | 846 | |
|
846 | 847 | @classmethod |
|
847 | 848 | def revoke_perm(cls, user_id, perm): |
|
848 | 849 | if not isinstance(perm, Permission): |
|
849 | 850 | raise Exception('perm needs to be an instance of Permission class') |
|
850 | 851 | |
|
851 | 852 | try: |
|
852 | 853 | cls.query().filter(cls.user_id == user_id) \ |
|
853 | 854 | .filter(cls.permission == perm).delete() |
|
854 | 855 | Session.commit() |
|
855 | 856 | except: |
|
856 | 857 | Session.rollback() |
|
857 | 858 | |
|
858 | 859 | class UserGroupRepoToPerm(Base, BaseModel): |
|
859 | 860 | __tablename__ = 'users_group_repo_to_perm' |
|
860 | 861 | __table_args__ = (UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), {'extend_existing':True}) |
|
861 | 862 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
862 | 863 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
863 | 864 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
864 | 865 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
865 | 866 | |
|
866 | 867 | users_group = relationship('UserGroup') |
|
867 | 868 | permission = relationship('Permission') |
|
868 | 869 | repository = relationship('Repository') |
|
869 | 870 | |
|
870 | 871 | def __repr__(self): |
|
871 | 872 | return '<userGroup:%s => %s >' % (self.users_group, self.repository) |
|
872 | 873 | |
|
873 | 874 | class UserGroupToPerm(Base, BaseModel): |
|
874 | 875 | __tablename__ = 'users_group_to_perm' |
|
875 | 876 | __table_args__ = {'extend_existing':True} |
|
876 | 877 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
877 | 878 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
878 | 879 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
879 | 880 | |
|
880 | 881 | users_group = relationship('UserGroup') |
|
881 | 882 | permission = relationship('Permission') |
|
882 | 883 | |
|
883 | 884 | |
|
884 | 885 | @classmethod |
|
885 | 886 | def has_perm(cls, users_group_id, perm): |
|
886 | 887 | if not isinstance(perm, Permission): |
|
887 | 888 | raise Exception('perm needs to be an instance of Permission class') |
|
888 | 889 | |
|
889 | 890 | return cls.query().filter(cls.users_group_id == |
|
890 | 891 | users_group_id)\ |
|
891 | 892 | .filter(cls.permission == perm)\ |
|
892 | 893 | .scalar() is not None |
|
893 | 894 | |
|
894 | 895 | @classmethod |
|
895 | 896 | def grant_perm(cls, users_group_id, perm): |
|
896 | 897 | if not isinstance(perm, Permission): |
|
897 | 898 | raise Exception('perm needs to be an instance of Permission class') |
|
898 | 899 | |
|
899 | 900 | new = cls() |
|
900 | 901 | new.users_group_id = users_group_id |
|
901 | 902 | new.permission = perm |
|
902 | 903 | try: |
|
903 | 904 | Session.add(new) |
|
904 | 905 | Session.commit() |
|
905 | 906 | except: |
|
906 | 907 | Session.rollback() |
|
907 | 908 | |
|
908 | 909 | |
|
909 | 910 | @classmethod |
|
910 | 911 | def revoke_perm(cls, users_group_id, perm): |
|
911 | 912 | if not isinstance(perm, Permission): |
|
912 | 913 | raise Exception('perm needs to be an instance of Permission class') |
|
913 | 914 | |
|
914 | 915 | try: |
|
915 | 916 | cls.query().filter(cls.users_group_id == users_group_id) \ |
|
916 | 917 | .filter(cls.permission == perm).delete() |
|
917 | 918 | Session.commit() |
|
918 | 919 | except: |
|
919 | 920 | Session.rollback() |
|
920 | 921 | |
|
921 | 922 | |
|
922 | 923 | class UserRepoGroupToPerm(Base, BaseModel): |
|
923 | 924 | __tablename__ = 'group_to_perm' |
|
924 | 925 | __table_args__ = (UniqueConstraint('group_id', 'permission_id'), {'extend_existing':True}) |
|
925 | 926 | |
|
926 | 927 | group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
927 | 928 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
928 | 929 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
929 | 930 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
930 | 931 | |
|
931 | 932 | user = relationship('User') |
|
932 | 933 | permission = relationship('Permission') |
|
933 | 934 | group = relationship('RepoGroup') |
|
934 | 935 | |
|
935 | 936 | class Statistics(Base, BaseModel): |
|
936 | 937 | __tablename__ = 'statistics' |
|
937 | 938 | __table_args__ = (UniqueConstraint('repository_id'), {'extend_existing':True}) |
|
938 | 939 | stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
939 | 940 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) |
|
940 | 941 | stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) |
|
941 | 942 | commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data |
|
942 | 943 | commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data |
|
943 | 944 | languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data |
|
944 | 945 | |
|
945 | 946 | repository = relationship('Repository', single_parent=True) |
|
946 | 947 | |
|
947 | 948 | class UserFollowing(Base, BaseModel): |
|
948 | 949 | __tablename__ = 'user_followings' |
|
949 | 950 | __table_args__ = (UniqueConstraint('user_id', 'follows_repository_id'), |
|
950 | 951 | UniqueConstraint('user_id', 'follows_user_id') |
|
951 | 952 | , {'extend_existing':True}) |
|
952 | 953 | |
|
953 | 954 | user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
954 | 955 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
955 | 956 | follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) |
|
956 | 957 | follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
957 | 958 | follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
958 | 959 | |
|
959 | 960 | user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id') |
|
960 | 961 | |
|
961 | 962 | follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') |
|
962 | 963 | follows_repository = relationship('Repository', order_by='Repository.repo_name') |
|
963 | 964 | |
|
964 | 965 | |
|
965 | 966 | @classmethod |
|
966 | 967 | def get_repo_followers(cls, repo_id): |
|
967 | 968 | return cls.query().filter(cls.follows_repo_id == repo_id) |
|
968 | 969 | |
|
969 | 970 | class CacheInvalidation(Base, BaseModel): |
|
970 | 971 | __tablename__ = 'cache_invalidation' |
|
971 | 972 | __table_args__ = (UniqueConstraint('cache_key'), {'extend_existing':True}) |
|
972 | 973 | cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
973 | 974 | cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None) |
|
974 | 975 | cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None) |
|
975 | 976 | cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) |
|
976 | 977 | |
|
977 | 978 | |
|
978 | 979 | def __init__(self, cache_key, cache_args=''): |
|
979 | 980 | self.cache_key = cache_key |
|
980 | 981 | self.cache_args = cache_args |
|
981 | 982 | self.cache_active = False |
|
982 | 983 | |
|
983 | 984 | def __repr__(self): |
|
984 | 985 | return "<%s('%s:%s')>" % (self.__class__.__name__, |
|
985 | 986 | self.cache_id, self.cache_key) |
|
986 | 987 | |
|
987 | 988 | @classmethod |
|
988 | 989 | def invalidate(cls, key): |
|
989 | 990 | """ |
|
990 | 991 | Returns Invalidation object if this given key should be invalidated |
|
991 | 992 | None otherwise. `cache_active = False` means that this cache |
|
992 | 993 | state is not valid and needs to be invalidated |
|
993 | 994 | |
|
994 | 995 | :param key: |
|
995 | 996 | """ |
|
996 | 997 | return cls.query()\ |
|
997 | 998 | .filter(CacheInvalidation.cache_key == key)\ |
|
998 | 999 | .filter(CacheInvalidation.cache_active == False)\ |
|
999 | 1000 | .scalar() |
|
1000 | 1001 | |
|
1001 | 1002 | @classmethod |
|
1002 | 1003 | def set_invalidate(cls, key): |
|
1003 | 1004 | """ |
|
1004 | 1005 | Mark this Cache key for invalidation |
|
1005 | 1006 | |
|
1006 | 1007 | :param key: |
|
1007 | 1008 | """ |
|
1008 | 1009 | |
|
1009 | 1010 | log.debug('marking %s for invalidation', key) |
|
1010 | 1011 | inv_obj = Session.query(cls)\ |
|
1011 | 1012 | .filter(cls.cache_key == key).scalar() |
|
1012 | 1013 | if inv_obj: |
|
1013 | 1014 | inv_obj.cache_active = False |
|
1014 | 1015 | else: |
|
1015 | 1016 | log.debug('cache key not found in invalidation db -> creating one') |
|
1016 | 1017 | inv_obj = CacheInvalidation(key) |
|
1017 | 1018 | |
|
1018 | 1019 | try: |
|
1019 | 1020 | Session.add(inv_obj) |
|
1020 | 1021 | Session.commit() |
|
1021 | 1022 | except Exception: |
|
1022 | 1023 | log.error(traceback.format_exc()) |
|
1023 | 1024 | Session.rollback() |
|
1024 | 1025 | |
|
1025 | 1026 | @classmethod |
|
1026 | 1027 | def set_valid(cls, key): |
|
1027 | 1028 | """ |
|
1028 | 1029 | Mark this cache key as active and currently cached |
|
1029 | 1030 | |
|
1030 | 1031 | :param key: |
|
1031 | 1032 | """ |
|
1032 | 1033 | inv_obj = Session.query(CacheInvalidation)\ |
|
1033 | 1034 | .filter(CacheInvalidation.cache_key == key).scalar() |
|
1034 | 1035 | inv_obj.cache_active = True |
|
1035 | 1036 | Session.add(inv_obj) |
|
1036 | 1037 | Session.commit() |
|
1037 | 1038 | |
|
1038 | 1039 | class DbMigrateVersion(Base, BaseModel): |
|
1039 | 1040 | __tablename__ = 'db_migrate_version' |
|
1040 | 1041 | __table_args__ = {'extend_existing':True} |
|
1041 | 1042 | repository_id = Column('repository_id', String(250), primary_key=True) |
|
1042 | 1043 | repository_path = Column('repository_path', Text) |
|
1043 | 1044 | version = Column('version', Integer) |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now