##// END OF EJS Templates
py3: remove usage of basestring
dan -
r3425:d577b778 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,563 +1,564 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2019 RhodeCode GmbH
3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 from pyramid import compat
22
23
23 from rhodecode.api import (
24 from rhodecode.api import (
24 jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError)
25 jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError)
25 from rhodecode.api.utils import (
26 from rhodecode.api.utils import (
26 Optional, OAttr, has_superadmin_permission, get_user_or_error, store_update)
27 Optional, OAttr, has_superadmin_permission, get_user_or_error, store_update)
27 from rhodecode.lib import audit_logger
28 from rhodecode.lib import audit_logger
28 from rhodecode.lib.auth import AuthUser, PasswordGenerator
29 from rhodecode.lib.auth import AuthUser, PasswordGenerator
29 from rhodecode.lib.exceptions import DefaultUserException
30 from rhodecode.lib.exceptions import DefaultUserException
30 from rhodecode.lib.utils2 import safe_int, str2bool
31 from rhodecode.lib.utils2 import safe_int, str2bool
31 from rhodecode.model.db import Session, User, Repository
32 from rhodecode.model.db import Session, User, Repository
32 from rhodecode.model.user import UserModel
33 from rhodecode.model.user import UserModel
33 from rhodecode.model import validation_schema
34 from rhodecode.model import validation_schema
34 from rhodecode.model.validation_schema.schemas import user_schema
35 from rhodecode.model.validation_schema.schemas import user_schema
35
36
36 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
37
38
38
39
39 @jsonrpc_method()
40 @jsonrpc_method()
40 def get_user(request, apiuser, userid=Optional(OAttr('apiuser'))):
41 def get_user(request, apiuser, userid=Optional(OAttr('apiuser'))):
41 """
42 """
42 Returns the information associated with a username or userid.
43 Returns the information associated with a username or userid.
43
44
44 * If the ``userid`` is not set, this command returns the information
45 * If the ``userid`` is not set, this command returns the information
45 for the ``userid`` calling the method.
46 for the ``userid`` calling the method.
46
47
47 .. note::
48 .. note::
48
49
49 Normal users may only run this command against their ``userid``. For
50 Normal users may only run this command against their ``userid``. For
50 full privileges you must run this command using an |authtoken| with
51 full privileges you must run this command using an |authtoken| with
51 admin rights.
52 admin rights.
52
53
53 :param apiuser: This is filled automatically from the |authtoken|.
54 :param apiuser: This is filled automatically from the |authtoken|.
54 :type apiuser: AuthUser
55 :type apiuser: AuthUser
55 :param userid: Sets the userid for which data will be returned.
56 :param userid: Sets the userid for which data will be returned.
56 :type userid: Optional(str or int)
57 :type userid: Optional(str or int)
57
58
58 Example output:
59 Example output:
59
60
60 .. code-block:: bash
61 .. code-block:: bash
61
62
62 {
63 {
63 "error": null,
64 "error": null,
64 "id": <id>,
65 "id": <id>,
65 "result": {
66 "result": {
66 "active": true,
67 "active": true,
67 "admin": false,
68 "admin": false,
68 "api_keys": [ list of keys ],
69 "api_keys": [ list of keys ],
69 "auth_tokens": [ list of tokens with details ],
70 "auth_tokens": [ list of tokens with details ],
70 "email": "user@example.com",
71 "email": "user@example.com",
71 "emails": [
72 "emails": [
72 "user@example.com"
73 "user@example.com"
73 ],
74 ],
74 "extern_name": "rhodecode",
75 "extern_name": "rhodecode",
75 "extern_type": "rhodecode",
76 "extern_type": "rhodecode",
76 "firstname": "username",
77 "firstname": "username",
77 "ip_addresses": [],
78 "ip_addresses": [],
78 "language": null,
79 "language": null,
79 "last_login": "Timestamp",
80 "last_login": "Timestamp",
80 "last_activity": "Timestamp",
81 "last_activity": "Timestamp",
81 "lastname": "surnae",
82 "lastname": "surnae",
82 "permissions": <deprecated>,
83 "permissions": <deprecated>,
83 "permissions_summary": {
84 "permissions_summary": {
84 "global": [
85 "global": [
85 "hg.inherit_default_perms.true",
86 "hg.inherit_default_perms.true",
86 "usergroup.read",
87 "usergroup.read",
87 "hg.repogroup.create.false",
88 "hg.repogroup.create.false",
88 "hg.create.none",
89 "hg.create.none",
89 "hg.password_reset.enabled",
90 "hg.password_reset.enabled",
90 "hg.extern_activate.manual",
91 "hg.extern_activate.manual",
91 "hg.create.write_on_repogroup.false",
92 "hg.create.write_on_repogroup.false",
92 "hg.usergroup.create.false",
93 "hg.usergroup.create.false",
93 "group.none",
94 "group.none",
94 "repository.none",
95 "repository.none",
95 "hg.register.none",
96 "hg.register.none",
96 "hg.fork.repository"
97 "hg.fork.repository"
97 ],
98 ],
98 "repositories": { "username/example": "repository.write"},
99 "repositories": { "username/example": "repository.write"},
99 "repositories_groups": { "user-group/repo": "group.none" },
100 "repositories_groups": { "user-group/repo": "group.none" },
100 "user_groups": { "user_group_name": "usergroup.read" }
101 "user_groups": { "user_group_name": "usergroup.read" }
101 }
102 }
102 "user_id": 32,
103 "user_id": 32,
103 "username": "username"
104 "username": "username"
104 }
105 }
105 }
106 }
106 """
107 """
107
108
108 if not has_superadmin_permission(apiuser):
109 if not has_superadmin_permission(apiuser):
109 # make sure normal user does not pass someone else userid,
110 # make sure normal user does not pass someone else userid,
110 # he is not allowed to do that
111 # he is not allowed to do that
111 if not isinstance(userid, Optional) and userid != apiuser.user_id:
112 if not isinstance(userid, Optional) and userid != apiuser.user_id:
112 raise JSONRPCError('userid is not the same as your user')
113 raise JSONRPCError('userid is not the same as your user')
113
114
114 userid = Optional.extract(userid, evaluate_locals=locals())
115 userid = Optional.extract(userid, evaluate_locals=locals())
115 userid = getattr(userid, 'user_id', userid)
116 userid = getattr(userid, 'user_id', userid)
116
117
117 user = get_user_or_error(userid)
118 user = get_user_or_error(userid)
118 data = user.get_api_data(include_secrets=True)
119 data = user.get_api_data(include_secrets=True)
119 permissions = AuthUser(user_id=user.user_id).permissions
120 permissions = AuthUser(user_id=user.user_id).permissions
120 data['permissions'] = permissions # TODO(marcink): should be deprecated
121 data['permissions'] = permissions # TODO(marcink): should be deprecated
121 data['permissions_summary'] = permissions
122 data['permissions_summary'] = permissions
122 return data
123 return data
123
124
124
125
125 @jsonrpc_method()
126 @jsonrpc_method()
126 def get_users(request, apiuser):
127 def get_users(request, apiuser):
127 """
128 """
128 Lists all users in the |RCE| user database.
129 Lists all users in the |RCE| user database.
129
130
130 This command can only be run using an |authtoken| with admin rights to
131 This command can only be run using an |authtoken| with admin rights to
131 the specified repository.
132 the specified repository.
132
133
133 This command takes the following options:
134 This command takes the following options:
134
135
135 :param apiuser: This is filled automatically from the |authtoken|.
136 :param apiuser: This is filled automatically from the |authtoken|.
136 :type apiuser: AuthUser
137 :type apiuser: AuthUser
137
138
138 Example output:
139 Example output:
139
140
140 .. code-block:: bash
141 .. code-block:: bash
141
142
142 id : <id_given_in_input>
143 id : <id_given_in_input>
143 result: [<user_object>, ...]
144 result: [<user_object>, ...]
144 error: null
145 error: null
145 """
146 """
146
147
147 if not has_superadmin_permission(apiuser):
148 if not has_superadmin_permission(apiuser):
148 raise JSONRPCForbidden()
149 raise JSONRPCForbidden()
149
150
150 result = []
151 result = []
151 users_list = User.query().order_by(User.username) \
152 users_list = User.query().order_by(User.username) \
152 .filter(User.username != User.DEFAULT_USER) \
153 .filter(User.username != User.DEFAULT_USER) \
153 .all()
154 .all()
154 for user in users_list:
155 for user in users_list:
155 result.append(user.get_api_data(include_secrets=True))
156 result.append(user.get_api_data(include_secrets=True))
156 return result
157 return result
157
158
158
159
159 @jsonrpc_method()
160 @jsonrpc_method()
160 def create_user(request, apiuser, username, email, password=Optional(''),
161 def create_user(request, apiuser, username, email, password=Optional(''),
161 firstname=Optional(''), lastname=Optional(''),
162 firstname=Optional(''), lastname=Optional(''),
162 active=Optional(True), admin=Optional(False),
163 active=Optional(True), admin=Optional(False),
163 extern_name=Optional('rhodecode'),
164 extern_name=Optional('rhodecode'),
164 extern_type=Optional('rhodecode'),
165 extern_type=Optional('rhodecode'),
165 force_password_change=Optional(False),
166 force_password_change=Optional(False),
166 create_personal_repo_group=Optional(None)):
167 create_personal_repo_group=Optional(None)):
167 """
168 """
168 Creates a new user and returns the new user object.
169 Creates a new user and returns the new user object.
169
170
170 This command can only be run using an |authtoken| with admin rights to
171 This command can only be run using an |authtoken| with admin rights to
171 the specified repository.
172 the specified repository.
172
173
173 This command takes the following options:
174 This command takes the following options:
174
175
175 :param apiuser: This is filled automatically from the |authtoken|.
176 :param apiuser: This is filled automatically from the |authtoken|.
176 :type apiuser: AuthUser
177 :type apiuser: AuthUser
177 :param username: Set the new username.
178 :param username: Set the new username.
178 :type username: str or int
179 :type username: str or int
179 :param email: Set the user email address.
180 :param email: Set the user email address.
180 :type email: str
181 :type email: str
181 :param password: Set the new user password.
182 :param password: Set the new user password.
182 :type password: Optional(str)
183 :type password: Optional(str)
183 :param firstname: Set the new user firstname.
184 :param firstname: Set the new user firstname.
184 :type firstname: Optional(str)
185 :type firstname: Optional(str)
185 :param lastname: Set the new user surname.
186 :param lastname: Set the new user surname.
186 :type lastname: Optional(str)
187 :type lastname: Optional(str)
187 :param active: Set the user as active.
188 :param active: Set the user as active.
188 :type active: Optional(``True`` | ``False``)
189 :type active: Optional(``True`` | ``False``)
189 :param admin: Give the new user admin rights.
190 :param admin: Give the new user admin rights.
190 :type admin: Optional(``True`` | ``False``)
191 :type admin: Optional(``True`` | ``False``)
191 :param extern_name: Set the authentication plugin name.
192 :param extern_name: Set the authentication plugin name.
192 Using LDAP this is filled with LDAP UID.
193 Using LDAP this is filled with LDAP UID.
193 :type extern_name: Optional(str)
194 :type extern_name: Optional(str)
194 :param extern_type: Set the new user authentication plugin.
195 :param extern_type: Set the new user authentication plugin.
195 :type extern_type: Optional(str)
196 :type extern_type: Optional(str)
196 :param force_password_change: Force the new user to change password
197 :param force_password_change: Force the new user to change password
197 on next login.
198 on next login.
198 :type force_password_change: Optional(``True`` | ``False``)
199 :type force_password_change: Optional(``True`` | ``False``)
199 :param create_personal_repo_group: Create personal repo group for this user
200 :param create_personal_repo_group: Create personal repo group for this user
200 :type create_personal_repo_group: Optional(``True`` | ``False``)
201 :type create_personal_repo_group: Optional(``True`` | ``False``)
201
202
202 Example output:
203 Example output:
203
204
204 .. code-block:: bash
205 .. code-block:: bash
205
206
206 id : <id_given_in_input>
207 id : <id_given_in_input>
207 result: {
208 result: {
208 "msg" : "created new user `<username>`",
209 "msg" : "created new user `<username>`",
209 "user": <user_obj>
210 "user": <user_obj>
210 }
211 }
211 error: null
212 error: null
212
213
213 Example error output:
214 Example error output:
214
215
215 .. code-block:: bash
216 .. code-block:: bash
216
217
217 id : <id_given_in_input>
218 id : <id_given_in_input>
218 result : null
219 result : null
219 error : {
220 error : {
220 "user `<username>` already exist"
221 "user `<username>` already exist"
221 or
222 or
222 "email `<email>` already exist"
223 "email `<email>` already exist"
223 or
224 or
224 "failed to create user `<username>`"
225 "failed to create user `<username>`"
225 }
226 }
226
227
227 """
228 """
228 if not has_superadmin_permission(apiuser):
229 if not has_superadmin_permission(apiuser):
229 raise JSONRPCForbidden()
230 raise JSONRPCForbidden()
230
231
231 if UserModel().get_by_username(username):
232 if UserModel().get_by_username(username):
232 raise JSONRPCError("user `%s` already exist" % (username,))
233 raise JSONRPCError("user `%s` already exist" % (username,))
233
234
234 if UserModel().get_by_email(email, case_insensitive=True):
235 if UserModel().get_by_email(email, case_insensitive=True):
235 raise JSONRPCError("email `%s` already exist" % (email,))
236 raise JSONRPCError("email `%s` already exist" % (email,))
236
237
237 # generate random password if we actually given the
238 # generate random password if we actually given the
238 # extern_name and it's not rhodecode
239 # extern_name and it's not rhodecode
239 if (not isinstance(extern_name, Optional) and
240 if (not isinstance(extern_name, Optional) and
240 Optional.extract(extern_name) != 'rhodecode'):
241 Optional.extract(extern_name) != 'rhodecode'):
241 # generate temporary password if user is external
242 # generate temporary password if user is external
242 password = PasswordGenerator().gen_password(length=16)
243 password = PasswordGenerator().gen_password(length=16)
243 create_repo_group = Optional.extract(create_personal_repo_group)
244 create_repo_group = Optional.extract(create_personal_repo_group)
244 if isinstance(create_repo_group, basestring):
245 if isinstance(create_repo_group, compat.string_types):
245 create_repo_group = str2bool(create_repo_group)
246 create_repo_group = str2bool(create_repo_group)
246
247
247 username = Optional.extract(username)
248 username = Optional.extract(username)
248 password = Optional.extract(password)
249 password = Optional.extract(password)
249 email = Optional.extract(email)
250 email = Optional.extract(email)
250 first_name = Optional.extract(firstname)
251 first_name = Optional.extract(firstname)
251 last_name = Optional.extract(lastname)
252 last_name = Optional.extract(lastname)
252 active = Optional.extract(active)
253 active = Optional.extract(active)
253 admin = Optional.extract(admin)
254 admin = Optional.extract(admin)
254 extern_type = Optional.extract(extern_type)
255 extern_type = Optional.extract(extern_type)
255 extern_name = Optional.extract(extern_name)
256 extern_name = Optional.extract(extern_name)
256
257
257 schema = user_schema.UserSchema().bind(
258 schema = user_schema.UserSchema().bind(
258 # user caller
259 # user caller
259 user=apiuser)
260 user=apiuser)
260 try:
261 try:
261 schema_data = schema.deserialize(dict(
262 schema_data = schema.deserialize(dict(
262 username=username,
263 username=username,
263 email=email,
264 email=email,
264 password=password,
265 password=password,
265 first_name=first_name,
266 first_name=first_name,
266 last_name=last_name,
267 last_name=last_name,
267 active=active,
268 active=active,
268 admin=admin,
269 admin=admin,
269 extern_type=extern_type,
270 extern_type=extern_type,
270 extern_name=extern_name,
271 extern_name=extern_name,
271 ))
272 ))
272 except validation_schema.Invalid as err:
273 except validation_schema.Invalid as err:
273 raise JSONRPCValidationError(colander_exc=err)
274 raise JSONRPCValidationError(colander_exc=err)
274
275
275 try:
276 try:
276 user = UserModel().create_or_update(
277 user = UserModel().create_or_update(
277 username=schema_data['username'],
278 username=schema_data['username'],
278 password=schema_data['password'],
279 password=schema_data['password'],
279 email=schema_data['email'],
280 email=schema_data['email'],
280 firstname=schema_data['first_name'],
281 firstname=schema_data['first_name'],
281 lastname=schema_data['last_name'],
282 lastname=schema_data['last_name'],
282 active=schema_data['active'],
283 active=schema_data['active'],
283 admin=schema_data['admin'],
284 admin=schema_data['admin'],
284 extern_type=schema_data['extern_type'],
285 extern_type=schema_data['extern_type'],
285 extern_name=schema_data['extern_name'],
286 extern_name=schema_data['extern_name'],
286 force_password_change=Optional.extract(force_password_change),
287 force_password_change=Optional.extract(force_password_change),
287 create_repo_group=create_repo_group
288 create_repo_group=create_repo_group
288 )
289 )
289 Session().flush()
290 Session().flush()
290 creation_data = user.get_api_data()
291 creation_data = user.get_api_data()
291 audit_logger.store_api(
292 audit_logger.store_api(
292 'user.create', action_data={'data': creation_data},
293 'user.create', action_data={'data': creation_data},
293 user=apiuser)
294 user=apiuser)
294
295
295 Session().commit()
296 Session().commit()
296 return {
297 return {
297 'msg': 'created new user `%s`' % username,
298 'msg': 'created new user `%s`' % username,
298 'user': user.get_api_data(include_secrets=True)
299 'user': user.get_api_data(include_secrets=True)
299 }
300 }
300 except Exception:
301 except Exception:
301 log.exception('Error occurred during creation of user')
302 log.exception('Error occurred during creation of user')
302 raise JSONRPCError('failed to create user `%s`' % (username,))
303 raise JSONRPCError('failed to create user `%s`' % (username,))
303
304
304
305
305 @jsonrpc_method()
306 @jsonrpc_method()
306 def update_user(request, apiuser, userid, username=Optional(None),
307 def update_user(request, apiuser, userid, username=Optional(None),
307 email=Optional(None), password=Optional(None),
308 email=Optional(None), password=Optional(None),
308 firstname=Optional(None), lastname=Optional(None),
309 firstname=Optional(None), lastname=Optional(None),
309 active=Optional(None), admin=Optional(None),
310 active=Optional(None), admin=Optional(None),
310 extern_type=Optional(None), extern_name=Optional(None), ):
311 extern_type=Optional(None), extern_name=Optional(None), ):
311 """
312 """
312 Updates the details for the specified user, if that user exists.
313 Updates the details for the specified user, if that user exists.
313
314
314 This command can only be run using an |authtoken| with admin rights to
315 This command can only be run using an |authtoken| with admin rights to
315 the specified repository.
316 the specified repository.
316
317
317 This command takes the following options:
318 This command takes the following options:
318
319
319 :param apiuser: This is filled automatically from |authtoken|.
320 :param apiuser: This is filled automatically from |authtoken|.
320 :type apiuser: AuthUser
321 :type apiuser: AuthUser
321 :param userid: Set the ``userid`` to update.
322 :param userid: Set the ``userid`` to update.
322 :type userid: str or int
323 :type userid: str or int
323 :param username: Set the new username.
324 :param username: Set the new username.
324 :type username: str or int
325 :type username: str or int
325 :param email: Set the new email.
326 :param email: Set the new email.
326 :type email: str
327 :type email: str
327 :param password: Set the new password.
328 :param password: Set the new password.
328 :type password: Optional(str)
329 :type password: Optional(str)
329 :param firstname: Set the new first name.
330 :param firstname: Set the new first name.
330 :type firstname: Optional(str)
331 :type firstname: Optional(str)
331 :param lastname: Set the new surname.
332 :param lastname: Set the new surname.
332 :type lastname: Optional(str)
333 :type lastname: Optional(str)
333 :param active: Set the new user as active.
334 :param active: Set the new user as active.
334 :type active: Optional(``True`` | ``False``)
335 :type active: Optional(``True`` | ``False``)
335 :param admin: Give the user admin rights.
336 :param admin: Give the user admin rights.
336 :type admin: Optional(``True`` | ``False``)
337 :type admin: Optional(``True`` | ``False``)
337 :param extern_name: Set the authentication plugin user name.
338 :param extern_name: Set the authentication plugin user name.
338 Using LDAP this is filled with LDAP UID.
339 Using LDAP this is filled with LDAP UID.
339 :type extern_name: Optional(str)
340 :type extern_name: Optional(str)
340 :param extern_type: Set the authentication plugin type.
341 :param extern_type: Set the authentication plugin type.
341 :type extern_type: Optional(str)
342 :type extern_type: Optional(str)
342
343
343
344
344 Example output:
345 Example output:
345
346
346 .. code-block:: bash
347 .. code-block:: bash
347
348
348 id : <id_given_in_input>
349 id : <id_given_in_input>
349 result: {
350 result: {
350 "msg" : "updated user ID:<userid> <username>",
351 "msg" : "updated user ID:<userid> <username>",
351 "user": <user_object>,
352 "user": <user_object>,
352 }
353 }
353 error: null
354 error: null
354
355
355 Example error output:
356 Example error output:
356
357
357 .. code-block:: bash
358 .. code-block:: bash
358
359
359 id : <id_given_in_input>
360 id : <id_given_in_input>
360 result : null
361 result : null
361 error : {
362 error : {
362 "failed to update user `<username>`"
363 "failed to update user `<username>`"
363 }
364 }
364
365
365 """
366 """
366 if not has_superadmin_permission(apiuser):
367 if not has_superadmin_permission(apiuser):
367 raise JSONRPCForbidden()
368 raise JSONRPCForbidden()
368
369
369 user = get_user_or_error(userid)
370 user = get_user_or_error(userid)
370 old_data = user.get_api_data()
371 old_data = user.get_api_data()
371 # only non optional arguments will be stored in updates
372 # only non optional arguments will be stored in updates
372 updates = {}
373 updates = {}
373
374
374 try:
375 try:
375
376
376 store_update(updates, username, 'username')
377 store_update(updates, username, 'username')
377 store_update(updates, password, 'password')
378 store_update(updates, password, 'password')
378 store_update(updates, email, 'email')
379 store_update(updates, email, 'email')
379 store_update(updates, firstname, 'name')
380 store_update(updates, firstname, 'name')
380 store_update(updates, lastname, 'lastname')
381 store_update(updates, lastname, 'lastname')
381 store_update(updates, active, 'active')
382 store_update(updates, active, 'active')
382 store_update(updates, admin, 'admin')
383 store_update(updates, admin, 'admin')
383 store_update(updates, extern_name, 'extern_name')
384 store_update(updates, extern_name, 'extern_name')
384 store_update(updates, extern_type, 'extern_type')
385 store_update(updates, extern_type, 'extern_type')
385
386
386 user = UserModel().update_user(user, **updates)
387 user = UserModel().update_user(user, **updates)
387 audit_logger.store_api(
388 audit_logger.store_api(
388 'user.edit', action_data={'old_data': old_data},
389 'user.edit', action_data={'old_data': old_data},
389 user=apiuser)
390 user=apiuser)
390 Session().commit()
391 Session().commit()
391 return {
392 return {
392 'msg': 'updated user ID:%s %s' % (user.user_id, user.username),
393 'msg': 'updated user ID:%s %s' % (user.user_id, user.username),
393 'user': user.get_api_data(include_secrets=True)
394 'user': user.get_api_data(include_secrets=True)
394 }
395 }
395 except DefaultUserException:
396 except DefaultUserException:
396 log.exception("Default user edit exception")
397 log.exception("Default user edit exception")
397 raise JSONRPCError('editing default user is forbidden')
398 raise JSONRPCError('editing default user is forbidden')
398 except Exception:
399 except Exception:
399 log.exception("Error occurred during update of user")
400 log.exception("Error occurred during update of user")
400 raise JSONRPCError('failed to update user `%s`' % (userid,))
401 raise JSONRPCError('failed to update user `%s`' % (userid,))
401
402
402
403
403 @jsonrpc_method()
404 @jsonrpc_method()
404 def delete_user(request, apiuser, userid):
405 def delete_user(request, apiuser, userid):
405 """
406 """
406 Deletes the specified user from the |RCE| user database.
407 Deletes the specified user from the |RCE| user database.
407
408
408 This command can only be run using an |authtoken| with admin rights to
409 This command can only be run using an |authtoken| with admin rights to
409 the specified repository.
410 the specified repository.
410
411
411 .. important::
412 .. important::
412
413
413 Ensure all open pull requests and open code review
414 Ensure all open pull requests and open code review
414 requests to this user are close.
415 requests to this user are close.
415
416
416 Also ensure all repositories, or repository groups owned by this
417 Also ensure all repositories, or repository groups owned by this
417 user are reassigned before deletion.
418 user are reassigned before deletion.
418
419
419 This command takes the following options:
420 This command takes the following options:
420
421
421 :param apiuser: This is filled automatically from the |authtoken|.
422 :param apiuser: This is filled automatically from the |authtoken|.
422 :type apiuser: AuthUser
423 :type apiuser: AuthUser
423 :param userid: Set the user to delete.
424 :param userid: Set the user to delete.
424 :type userid: str or int
425 :type userid: str or int
425
426
426 Example output:
427 Example output:
427
428
428 .. code-block:: bash
429 .. code-block:: bash
429
430
430 id : <id_given_in_input>
431 id : <id_given_in_input>
431 result: {
432 result: {
432 "msg" : "deleted user ID:<userid> <username>",
433 "msg" : "deleted user ID:<userid> <username>",
433 "user": null
434 "user": null
434 }
435 }
435 error: null
436 error: null
436
437
437 Example error output:
438 Example error output:
438
439
439 .. code-block:: bash
440 .. code-block:: bash
440
441
441 id : <id_given_in_input>
442 id : <id_given_in_input>
442 result : null
443 result : null
443 error : {
444 error : {
444 "failed to delete user ID:<userid> <username>"
445 "failed to delete user ID:<userid> <username>"
445 }
446 }
446
447
447 """
448 """
448 if not has_superadmin_permission(apiuser):
449 if not has_superadmin_permission(apiuser):
449 raise JSONRPCForbidden()
450 raise JSONRPCForbidden()
450
451
451 user = get_user_or_error(userid)
452 user = get_user_or_error(userid)
452 old_data = user.get_api_data()
453 old_data = user.get_api_data()
453 try:
454 try:
454 UserModel().delete(userid)
455 UserModel().delete(userid)
455 audit_logger.store_api(
456 audit_logger.store_api(
456 'user.delete', action_data={'old_data': old_data},
457 'user.delete', action_data={'old_data': old_data},
457 user=apiuser)
458 user=apiuser)
458
459
459 Session().commit()
460 Session().commit()
460 return {
461 return {
461 'msg': 'deleted user ID:%s %s' % (user.user_id, user.username),
462 'msg': 'deleted user ID:%s %s' % (user.user_id, user.username),
462 'user': None
463 'user': None
463 }
464 }
464 except Exception:
465 except Exception:
465 log.exception("Error occurred during deleting of user")
466 log.exception("Error occurred during deleting of user")
466 raise JSONRPCError(
467 raise JSONRPCError(
467 'failed to delete user ID:%s %s' % (user.user_id, user.username))
468 'failed to delete user ID:%s %s' % (user.user_id, user.username))
468
469
469
470
470 @jsonrpc_method()
471 @jsonrpc_method()
471 def get_user_locks(request, apiuser, userid=Optional(OAttr('apiuser'))):
472 def get_user_locks(request, apiuser, userid=Optional(OAttr('apiuser'))):
472 """
473 """
473 Displays all repositories locked by the specified user.
474 Displays all repositories locked by the specified user.
474
475
475 * If this command is run by a non-admin user, it returns
476 * If this command is run by a non-admin user, it returns
476 a list of |repos| locked by that user.
477 a list of |repos| locked by that user.
477
478
478 This command takes the following options:
479 This command takes the following options:
479
480
480 :param apiuser: This is filled automatically from the |authtoken|.
481 :param apiuser: This is filled automatically from the |authtoken|.
481 :type apiuser: AuthUser
482 :type apiuser: AuthUser
482 :param userid: Sets the userid whose list of locked |repos| will be
483 :param userid: Sets the userid whose list of locked |repos| will be
483 displayed.
484 displayed.
484 :type userid: Optional(str or int)
485 :type userid: Optional(str or int)
485
486
486 Example output:
487 Example output:
487
488
488 .. code-block:: bash
489 .. code-block:: bash
489
490
490 id : <id_given_in_input>
491 id : <id_given_in_input>
491 result : {
492 result : {
492 [repo_object, repo_object,...]
493 [repo_object, repo_object,...]
493 }
494 }
494 error : null
495 error : null
495 """
496 """
496
497
497 include_secrets = False
498 include_secrets = False
498 if not has_superadmin_permission(apiuser):
499 if not has_superadmin_permission(apiuser):
499 # make sure normal user does not pass someone else userid,
500 # make sure normal user does not pass someone else userid,
500 # he is not allowed to do that
501 # he is not allowed to do that
501 if not isinstance(userid, Optional) and userid != apiuser.user_id:
502 if not isinstance(userid, Optional) and userid != apiuser.user_id:
502 raise JSONRPCError('userid is not the same as your user')
503 raise JSONRPCError('userid is not the same as your user')
503 else:
504 else:
504 include_secrets = True
505 include_secrets = True
505
506
506 userid = Optional.extract(userid, evaluate_locals=locals())
507 userid = Optional.extract(userid, evaluate_locals=locals())
507 userid = getattr(userid, 'user_id', userid)
508 userid = getattr(userid, 'user_id', userid)
508 user = get_user_or_error(userid)
509 user = get_user_or_error(userid)
509
510
510 ret = []
511 ret = []
511
512
512 # show all locks
513 # show all locks
513 for r in Repository.getAll():
514 for r in Repository.getAll():
514 _user_id, _time, _reason = r.locked
515 _user_id, _time, _reason = r.locked
515 if _user_id and _time:
516 if _user_id and _time:
516 _api_data = r.get_api_data(include_secrets=include_secrets)
517 _api_data = r.get_api_data(include_secrets=include_secrets)
517 # if we use user filter just show the locks for this user
518 # if we use user filter just show the locks for this user
518 if safe_int(_user_id) == user.user_id:
519 if safe_int(_user_id) == user.user_id:
519 ret.append(_api_data)
520 ret.append(_api_data)
520
521
521 return ret
522 return ret
522
523
523
524
524 @jsonrpc_method()
525 @jsonrpc_method()
525 def get_user_audit_logs(request, apiuser, userid=Optional(OAttr('apiuser'))):
526 def get_user_audit_logs(request, apiuser, userid=Optional(OAttr('apiuser'))):
526 """
527 """
527 Fetches all action logs made by the specified user.
528 Fetches all action logs made by the specified user.
528
529
529 This command takes the following options:
530 This command takes the following options:
530
531
531 :param apiuser: This is filled automatically from the |authtoken|.
532 :param apiuser: This is filled automatically from the |authtoken|.
532 :type apiuser: AuthUser
533 :type apiuser: AuthUser
533 :param userid: Sets the userid whose list of locked |repos| will be
534 :param userid: Sets the userid whose list of locked |repos| will be
534 displayed.
535 displayed.
535 :type userid: Optional(str or int)
536 :type userid: Optional(str or int)
536
537
537 Example output:
538 Example output:
538
539
539 .. code-block:: bash
540 .. code-block:: bash
540
541
541 id : <id_given_in_input>
542 id : <id_given_in_input>
542 result : {
543 result : {
543 [action, action,...]
544 [action, action,...]
544 }
545 }
545 error : null
546 error : null
546 """
547 """
547
548
548 if not has_superadmin_permission(apiuser):
549 if not has_superadmin_permission(apiuser):
549 # make sure normal user does not pass someone else userid,
550 # make sure normal user does not pass someone else userid,
550 # he is not allowed to do that
551 # he is not allowed to do that
551 if not isinstance(userid, Optional) and userid != apiuser.user_id:
552 if not isinstance(userid, Optional) and userid != apiuser.user_id:
552 raise JSONRPCError('userid is not the same as your user')
553 raise JSONRPCError('userid is not the same as your user')
553
554
554 userid = Optional.extract(userid, evaluate_locals=locals())
555 userid = Optional.extract(userid, evaluate_locals=locals())
555 userid = getattr(userid, 'user_id', userid)
556 userid = getattr(userid, 'user_id', userid)
556 user = get_user_or_error(userid)
557 user = get_user_or_error(userid)
557
558
558 ret = []
559 ret = []
559
560
560 # show all user actions
561 # show all user actions
561 for entry in UserModel().get_user_log(user, filter_term=None):
562 for entry in UserModel().get_user_log(user, filter_term=None):
562 ret.append(entry)
563 ret.append(entry)
563 return ret
564 return ret
@@ -1,686 +1,687 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2019 RhodeCode GmbH
3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import time
21 import time
22 import logging
22 import logging
23 import operator
23 import operator
24
24
25 from pyramid import compat
25 from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPBadRequest
26 from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPBadRequest
26
27
27 from rhodecode.lib import helpers as h, diffs
28 from rhodecode.lib import helpers as h, diffs
28 from rhodecode.lib.utils2 import (
29 from rhodecode.lib.utils2 import (
29 StrictAttributeDict, safe_int, datetime_to_time, safe_unicode)
30 StrictAttributeDict, safe_int, datetime_to_time, safe_unicode)
30 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
31 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
31 from rhodecode.model import repo
32 from rhodecode.model import repo
32 from rhodecode.model import repo_group
33 from rhodecode.model import repo_group
33 from rhodecode.model import user_group
34 from rhodecode.model import user_group
34 from rhodecode.model import user
35 from rhodecode.model import user
35 from rhodecode.model.db import User
36 from rhodecode.model.db import User
36 from rhodecode.model.scm import ScmModel
37 from rhodecode.model.scm import ScmModel
37 from rhodecode.model.settings import VcsSettingsModel
38 from rhodecode.model.settings import VcsSettingsModel
38
39
39 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
40
41
41
42
42 ADMIN_PREFIX = '/_admin'
43 ADMIN_PREFIX = '/_admin'
43 STATIC_FILE_PREFIX = '/_static'
44 STATIC_FILE_PREFIX = '/_static'
44
45
45 URL_NAME_REQUIREMENTS = {
46 URL_NAME_REQUIREMENTS = {
46 # group name can have a slash in them, but they must not end with a slash
47 # group name can have a slash in them, but they must not end with a slash
47 'group_name': r'.*?[^/]',
48 'group_name': r'.*?[^/]',
48 'repo_group_name': r'.*?[^/]',
49 'repo_group_name': r'.*?[^/]',
49 # repo names can have a slash in them, but they must not end with a slash
50 # repo names can have a slash in them, but they must not end with a slash
50 'repo_name': r'.*?[^/]',
51 'repo_name': r'.*?[^/]',
51 # file path eats up everything at the end
52 # file path eats up everything at the end
52 'f_path': r'.*',
53 'f_path': r'.*',
53 # reference types
54 # reference types
54 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
55 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)',
55 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
56 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)',
56 }
57 }
57
58
58
59
59 def add_route_with_slash(config,name, pattern, **kw):
60 def add_route_with_slash(config,name, pattern, **kw):
60 config.add_route(name, pattern, **kw)
61 config.add_route(name, pattern, **kw)
61 if not pattern.endswith('/'):
62 if not pattern.endswith('/'):
62 config.add_route(name + '_slash', pattern + '/', **kw)
63 config.add_route(name + '_slash', pattern + '/', **kw)
63
64
64
65
65 def add_route_requirements(route_path, requirements=None):
66 def add_route_requirements(route_path, requirements=None):
66 """
67 """
67 Adds regex requirements to pyramid routes using a mapping dict
68 Adds regex requirements to pyramid routes using a mapping dict
68 e.g::
69 e.g::
69 add_route_requirements('{repo_name}/settings')
70 add_route_requirements('{repo_name}/settings')
70 """
71 """
71 requirements = requirements or URL_NAME_REQUIREMENTS
72 requirements = requirements or URL_NAME_REQUIREMENTS
72 for key, regex in requirements.items():
73 for key, regex in requirements.items():
73 route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex))
74 route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex))
74 return route_path
75 return route_path
75
76
76
77
77 def get_format_ref_id(repo):
78 def get_format_ref_id(repo):
78 """Returns a `repo` specific reference formatter function"""
79 """Returns a `repo` specific reference formatter function"""
79 if h.is_svn(repo):
80 if h.is_svn(repo):
80 return _format_ref_id_svn
81 return _format_ref_id_svn
81 else:
82 else:
82 return _format_ref_id
83 return _format_ref_id
83
84
84
85
85 def _format_ref_id(name, raw_id):
86 def _format_ref_id(name, raw_id):
86 """Default formatting of a given reference `name`"""
87 """Default formatting of a given reference `name`"""
87 return name
88 return name
88
89
89
90
90 def _format_ref_id_svn(name, raw_id):
91 def _format_ref_id_svn(name, raw_id):
91 """Special way of formatting a reference for Subversion including path"""
92 """Special way of formatting a reference for Subversion including path"""
92 return '%s@%s' % (name, raw_id)
93 return '%s@%s' % (name, raw_id)
93
94
94
95
95 class TemplateArgs(StrictAttributeDict):
96 class TemplateArgs(StrictAttributeDict):
96 pass
97 pass
97
98
98
99
99 class BaseAppView(object):
100 class BaseAppView(object):
100
101
101 def __init__(self, context, request):
102 def __init__(self, context, request):
102 self.request = request
103 self.request = request
103 self.context = context
104 self.context = context
104 self.session = request.session
105 self.session = request.session
105 if not hasattr(request, 'user'):
106 if not hasattr(request, 'user'):
106 # NOTE(marcink): edge case, we ended up in matched route
107 # NOTE(marcink): edge case, we ended up in matched route
107 # but probably of web-app context, e.g API CALL/VCS CALL
108 # but probably of web-app context, e.g API CALL/VCS CALL
108 if hasattr(request, 'vcs_call') or hasattr(request, 'rpc_method'):
109 if hasattr(request, 'vcs_call') or hasattr(request, 'rpc_method'):
109 log.warning('Unable to process request `%s` in this scope', request)
110 log.warning('Unable to process request `%s` in this scope', request)
110 raise HTTPBadRequest()
111 raise HTTPBadRequest()
111
112
112 self._rhodecode_user = request.user # auth user
113 self._rhodecode_user = request.user # auth user
113 self._rhodecode_db_user = self._rhodecode_user.get_instance()
114 self._rhodecode_db_user = self._rhodecode_user.get_instance()
114 self._maybe_needs_password_change(
115 self._maybe_needs_password_change(
115 request.matched_route.name, self._rhodecode_db_user)
116 request.matched_route.name, self._rhodecode_db_user)
116
117
117 def _maybe_needs_password_change(self, view_name, user_obj):
118 def _maybe_needs_password_change(self, view_name, user_obj):
118 log.debug('Checking if user %s needs password change on view %s',
119 log.debug('Checking if user %s needs password change on view %s',
119 user_obj, view_name)
120 user_obj, view_name)
120 skip_user_views = [
121 skip_user_views = [
121 'logout', 'login',
122 'logout', 'login',
122 'my_account_password', 'my_account_password_update'
123 'my_account_password', 'my_account_password_update'
123 ]
124 ]
124
125
125 if not user_obj:
126 if not user_obj:
126 return
127 return
127
128
128 if user_obj.username == User.DEFAULT_USER:
129 if user_obj.username == User.DEFAULT_USER:
129 return
130 return
130
131
131 now = time.time()
132 now = time.time()
132 should_change = user_obj.user_data.get('force_password_change')
133 should_change = user_obj.user_data.get('force_password_change')
133 change_after = safe_int(should_change) or 0
134 change_after = safe_int(should_change) or 0
134 if should_change and now > change_after:
135 if should_change and now > change_after:
135 log.debug('User %s requires password change', user_obj)
136 log.debug('User %s requires password change', user_obj)
136 h.flash('You are required to change your password', 'warning',
137 h.flash('You are required to change your password', 'warning',
137 ignore_duplicate=True)
138 ignore_duplicate=True)
138
139
139 if view_name not in skip_user_views:
140 if view_name not in skip_user_views:
140 raise HTTPFound(
141 raise HTTPFound(
141 self.request.route_path('my_account_password'))
142 self.request.route_path('my_account_password'))
142
143
143 def _log_creation_exception(self, e, repo_name):
144 def _log_creation_exception(self, e, repo_name):
144 _ = self.request.translate
145 _ = self.request.translate
145 reason = None
146 reason = None
146 if len(e.args) == 2:
147 if len(e.args) == 2:
147 reason = e.args[1]
148 reason = e.args[1]
148
149
149 if reason == 'INVALID_CERTIFICATE':
150 if reason == 'INVALID_CERTIFICATE':
150 log.exception(
151 log.exception(
151 'Exception creating a repository: invalid certificate')
152 'Exception creating a repository: invalid certificate')
152 msg = (_('Error creating repository %s: invalid certificate')
153 msg = (_('Error creating repository %s: invalid certificate')
153 % repo_name)
154 % repo_name)
154 else:
155 else:
155 log.exception("Exception creating a repository")
156 log.exception("Exception creating a repository")
156 msg = (_('Error creating repository %s')
157 msg = (_('Error creating repository %s')
157 % repo_name)
158 % repo_name)
158 return msg
159 return msg
159
160
160 def _get_local_tmpl_context(self, include_app_defaults=True):
161 def _get_local_tmpl_context(self, include_app_defaults=True):
161 c = TemplateArgs()
162 c = TemplateArgs()
162 c.auth_user = self.request.user
163 c.auth_user = self.request.user
163 # TODO(marcink): migrate the usage of c.rhodecode_user to c.auth_user
164 # TODO(marcink): migrate the usage of c.rhodecode_user to c.auth_user
164 c.rhodecode_user = self.request.user
165 c.rhodecode_user = self.request.user
165
166
166 if include_app_defaults:
167 if include_app_defaults:
167 from rhodecode.lib.base import attach_context_attributes
168 from rhodecode.lib.base import attach_context_attributes
168 attach_context_attributes(c, self.request, self.request.user.user_id)
169 attach_context_attributes(c, self.request, self.request.user.user_id)
169
170
170 return c
171 return c
171
172
172 def _get_template_context(self, tmpl_args, **kwargs):
173 def _get_template_context(self, tmpl_args, **kwargs):
173
174
174 local_tmpl_args = {
175 local_tmpl_args = {
175 'defaults': {},
176 'defaults': {},
176 'errors': {},
177 'errors': {},
177 'c': tmpl_args
178 'c': tmpl_args
178 }
179 }
179 local_tmpl_args.update(kwargs)
180 local_tmpl_args.update(kwargs)
180 return local_tmpl_args
181 return local_tmpl_args
181
182
182 def load_default_context(self):
183 def load_default_context(self):
183 """
184 """
184 example:
185 example:
185
186
186 def load_default_context(self):
187 def load_default_context(self):
187 c = self._get_local_tmpl_context()
188 c = self._get_local_tmpl_context()
188 c.custom_var = 'foobar'
189 c.custom_var = 'foobar'
189
190
190 return c
191 return c
191 """
192 """
192 raise NotImplementedError('Needs implementation in view class')
193 raise NotImplementedError('Needs implementation in view class')
193
194
194
195
195 class RepoAppView(BaseAppView):
196 class RepoAppView(BaseAppView):
196
197
197 def __init__(self, context, request):
198 def __init__(self, context, request):
198 super(RepoAppView, self).__init__(context, request)
199 super(RepoAppView, self).__init__(context, request)
199 self.db_repo = request.db_repo
200 self.db_repo = request.db_repo
200 self.db_repo_name = self.db_repo.repo_name
201 self.db_repo_name = self.db_repo.repo_name
201 self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo)
202 self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo)
202
203
203 def _handle_missing_requirements(self, error):
204 def _handle_missing_requirements(self, error):
204 log.error(
205 log.error(
205 'Requirements are missing for repository %s: %s',
206 'Requirements are missing for repository %s: %s',
206 self.db_repo_name, safe_unicode(error))
207 self.db_repo_name, safe_unicode(error))
207
208
208 def _get_local_tmpl_context(self, include_app_defaults=True):
209 def _get_local_tmpl_context(self, include_app_defaults=True):
209 _ = self.request.translate
210 _ = self.request.translate
210 c = super(RepoAppView, self)._get_local_tmpl_context(
211 c = super(RepoAppView, self)._get_local_tmpl_context(
211 include_app_defaults=include_app_defaults)
212 include_app_defaults=include_app_defaults)
212
213
213 # register common vars for this type of view
214 # register common vars for this type of view
214 c.rhodecode_db_repo = self.db_repo
215 c.rhodecode_db_repo = self.db_repo
215 c.repo_name = self.db_repo_name
216 c.repo_name = self.db_repo_name
216 c.repository_pull_requests = self.db_repo_pull_requests
217 c.repository_pull_requests = self.db_repo_pull_requests
217 self.path_filter = PathFilter(None)
218 self.path_filter = PathFilter(None)
218
219
219 c.repository_requirements_missing = {}
220 c.repository_requirements_missing = {}
220 try:
221 try:
221 self.rhodecode_vcs_repo = self.db_repo.scm_instance()
222 self.rhodecode_vcs_repo = self.db_repo.scm_instance()
222 if self.rhodecode_vcs_repo:
223 if self.rhodecode_vcs_repo:
223 path_perms = self.rhodecode_vcs_repo.get_path_permissions(
224 path_perms = self.rhodecode_vcs_repo.get_path_permissions(
224 c.auth_user.username)
225 c.auth_user.username)
225 self.path_filter = PathFilter(path_perms)
226 self.path_filter = PathFilter(path_perms)
226 except RepositoryRequirementError as e:
227 except RepositoryRequirementError as e:
227 c.repository_requirements_missing = {'error': str(e)}
228 c.repository_requirements_missing = {'error': str(e)}
228 self._handle_missing_requirements(e)
229 self._handle_missing_requirements(e)
229 self.rhodecode_vcs_repo = None
230 self.rhodecode_vcs_repo = None
230
231
231 c.path_filter = self.path_filter # used by atom_feed_entry.mako
232 c.path_filter = self.path_filter # used by atom_feed_entry.mako
232
233
233 if self.rhodecode_vcs_repo is None:
234 if self.rhodecode_vcs_repo is None:
234 # unable to fetch this repo as vcs instance, report back to user
235 # unable to fetch this repo as vcs instance, report back to user
235 h.flash(_(
236 h.flash(_(
236 "The repository `%(repo_name)s` cannot be loaded in filesystem. "
237 "The repository `%(repo_name)s` cannot be loaded in filesystem. "
237 "Please check if it exist, or is not damaged.") %
238 "Please check if it exist, or is not damaged.") %
238 {'repo_name': c.repo_name},
239 {'repo_name': c.repo_name},
239 category='error', ignore_duplicate=True)
240 category='error', ignore_duplicate=True)
240 if c.repository_requirements_missing:
241 if c.repository_requirements_missing:
241 route = self.request.matched_route.name
242 route = self.request.matched_route.name
242 if route.startswith(('edit_repo', 'repo_summary')):
243 if route.startswith(('edit_repo', 'repo_summary')):
243 # allow summary and edit repo on missing requirements
244 # allow summary and edit repo on missing requirements
244 return c
245 return c
245
246
246 raise HTTPFound(
247 raise HTTPFound(
247 h.route_path('repo_summary', repo_name=self.db_repo_name))
248 h.route_path('repo_summary', repo_name=self.db_repo_name))
248
249
249 else: # redirect if we don't show missing requirements
250 else: # redirect if we don't show missing requirements
250 raise HTTPFound(h.route_path('home'))
251 raise HTTPFound(h.route_path('home'))
251
252
252 c.has_origin_repo_read_perm = False
253 c.has_origin_repo_read_perm = False
253 if self.db_repo.fork:
254 if self.db_repo.fork:
254 c.has_origin_repo_read_perm = h.HasRepoPermissionAny(
255 c.has_origin_repo_read_perm = h.HasRepoPermissionAny(
255 'repository.write', 'repository.read', 'repository.admin')(
256 'repository.write', 'repository.read', 'repository.admin')(
256 self.db_repo.fork.repo_name, 'summary fork link')
257 self.db_repo.fork.repo_name, 'summary fork link')
257
258
258 return c
259 return c
259
260
260 def _get_f_path_unchecked(self, matchdict, default=None):
261 def _get_f_path_unchecked(self, matchdict, default=None):
261 """
262 """
262 Should only be used by redirects, everything else should call _get_f_path
263 Should only be used by redirects, everything else should call _get_f_path
263 """
264 """
264 f_path = matchdict.get('f_path')
265 f_path = matchdict.get('f_path')
265 if f_path:
266 if f_path:
266 # fix for multiple initial slashes that causes errors for GIT
267 # fix for multiple initial slashes that causes errors for GIT
267 return f_path.lstrip('/')
268 return f_path.lstrip('/')
268
269
269 return default
270 return default
270
271
271 def _get_f_path(self, matchdict, default=None):
272 def _get_f_path(self, matchdict, default=None):
272 f_path_match = self._get_f_path_unchecked(matchdict, default)
273 f_path_match = self._get_f_path_unchecked(matchdict, default)
273 return self.path_filter.assert_path_permissions(f_path_match)
274 return self.path_filter.assert_path_permissions(f_path_match)
274
275
275 def _get_general_setting(self, target_repo, settings_key, default=False):
276 def _get_general_setting(self, target_repo, settings_key, default=False):
276 settings_model = VcsSettingsModel(repo=target_repo)
277 settings_model = VcsSettingsModel(repo=target_repo)
277 settings = settings_model.get_general_settings()
278 settings = settings_model.get_general_settings()
278 return settings.get(settings_key, default)
279 return settings.get(settings_key, default)
279
280
280
281
281 class PathFilter(object):
282 class PathFilter(object):
282
283
283 # Expects and instance of BasePathPermissionChecker or None
284 # Expects and instance of BasePathPermissionChecker or None
284 def __init__(self, permission_checker):
285 def __init__(self, permission_checker):
285 self.permission_checker = permission_checker
286 self.permission_checker = permission_checker
286
287
287 def assert_path_permissions(self, path):
288 def assert_path_permissions(self, path):
288 if path and self.permission_checker and not self.permission_checker.has_access(path):
289 if path and self.permission_checker and not self.permission_checker.has_access(path):
289 raise HTTPForbidden()
290 raise HTTPForbidden()
290 return path
291 return path
291
292
292 def filter_patchset(self, patchset):
293 def filter_patchset(self, patchset):
293 if not self.permission_checker or not patchset:
294 if not self.permission_checker or not patchset:
294 return patchset, False
295 return patchset, False
295 had_filtered = False
296 had_filtered = False
296 filtered_patchset = []
297 filtered_patchset = []
297 for patch in patchset:
298 for patch in patchset:
298 filename = patch.get('filename', None)
299 filename = patch.get('filename', None)
299 if not filename or self.permission_checker.has_access(filename):
300 if not filename or self.permission_checker.has_access(filename):
300 filtered_patchset.append(patch)
301 filtered_patchset.append(patch)
301 else:
302 else:
302 had_filtered = True
303 had_filtered = True
303 if had_filtered:
304 if had_filtered:
304 if isinstance(patchset, diffs.LimitedDiffContainer):
305 if isinstance(patchset, diffs.LimitedDiffContainer):
305 filtered_patchset = diffs.LimitedDiffContainer(patchset.diff_limit, patchset.cur_diff_size, filtered_patchset)
306 filtered_patchset = diffs.LimitedDiffContainer(patchset.diff_limit, patchset.cur_diff_size, filtered_patchset)
306 return filtered_patchset, True
307 return filtered_patchset, True
307 else:
308 else:
308 return patchset, False
309 return patchset, False
309
310
310 def render_patchset_filtered(self, diffset, patchset, source_ref=None, target_ref=None):
311 def render_patchset_filtered(self, diffset, patchset, source_ref=None, target_ref=None):
311 filtered_patchset, has_hidden_changes = self.filter_patchset(patchset)
312 filtered_patchset, has_hidden_changes = self.filter_patchset(patchset)
312 result = diffset.render_patchset(
313 result = diffset.render_patchset(
313 filtered_patchset, source_ref=source_ref, target_ref=target_ref)
314 filtered_patchset, source_ref=source_ref, target_ref=target_ref)
314 result.has_hidden_changes = has_hidden_changes
315 result.has_hidden_changes = has_hidden_changes
315 return result
316 return result
316
317
317 def get_raw_patch(self, diff_processor):
318 def get_raw_patch(self, diff_processor):
318 if self.permission_checker is None:
319 if self.permission_checker is None:
319 return diff_processor.as_raw()
320 return diff_processor.as_raw()
320 elif self.permission_checker.has_full_access:
321 elif self.permission_checker.has_full_access:
321 return diff_processor.as_raw()
322 return diff_processor.as_raw()
322 else:
323 else:
323 return '# Repository has user-specific filters, raw patch generation is disabled.'
324 return '# Repository has user-specific filters, raw patch generation is disabled.'
324
325
325 @property
326 @property
326 def is_enabled(self):
327 def is_enabled(self):
327 return self.permission_checker is not None
328 return self.permission_checker is not None
328
329
329
330
330 class RepoGroupAppView(BaseAppView):
331 class RepoGroupAppView(BaseAppView):
331 def __init__(self, context, request):
332 def __init__(self, context, request):
332 super(RepoGroupAppView, self).__init__(context, request)
333 super(RepoGroupAppView, self).__init__(context, request)
333 self.db_repo_group = request.db_repo_group
334 self.db_repo_group = request.db_repo_group
334 self.db_repo_group_name = self.db_repo_group.group_name
335 self.db_repo_group_name = self.db_repo_group.group_name
335
336
336 def _revoke_perms_on_yourself(self, form_result):
337 def _revoke_perms_on_yourself(self, form_result):
337 _updates = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
338 _updates = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
338 form_result['perm_updates'])
339 form_result['perm_updates'])
339 _additions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
340 _additions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
340 form_result['perm_additions'])
341 form_result['perm_additions'])
341 _deletions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
342 _deletions = filter(lambda u: self._rhodecode_user.user_id == int(u[0]),
342 form_result['perm_deletions'])
343 form_result['perm_deletions'])
343 admin_perm = 'group.admin'
344 admin_perm = 'group.admin'
344 if _updates and _updates[0][1] != admin_perm or \
345 if _updates and _updates[0][1] != admin_perm or \
345 _additions and _additions[0][1] != admin_perm or \
346 _additions and _additions[0][1] != admin_perm or \
346 _deletions and _deletions[0][1] != admin_perm:
347 _deletions and _deletions[0][1] != admin_perm:
347 return True
348 return True
348 return False
349 return False
349
350
350
351
351 class UserGroupAppView(BaseAppView):
352 class UserGroupAppView(BaseAppView):
352 def __init__(self, context, request):
353 def __init__(self, context, request):
353 super(UserGroupAppView, self).__init__(context, request)
354 super(UserGroupAppView, self).__init__(context, request)
354 self.db_user_group = request.db_user_group
355 self.db_user_group = request.db_user_group
355 self.db_user_group_name = self.db_user_group.users_group_name
356 self.db_user_group_name = self.db_user_group.users_group_name
356
357
357
358
358 class UserAppView(BaseAppView):
359 class UserAppView(BaseAppView):
359 def __init__(self, context, request):
360 def __init__(self, context, request):
360 super(UserAppView, self).__init__(context, request)
361 super(UserAppView, self).__init__(context, request)
361 self.db_user = request.db_user
362 self.db_user = request.db_user
362 self.db_user_id = self.db_user.user_id
363 self.db_user_id = self.db_user.user_id
363
364
364 _ = self.request.translate
365 _ = self.request.translate
365 if not request.db_user_supports_default:
366 if not request.db_user_supports_default:
366 if self.db_user.username == User.DEFAULT_USER:
367 if self.db_user.username == User.DEFAULT_USER:
367 h.flash(_("Editing user `{}` is disabled.".format(
368 h.flash(_("Editing user `{}` is disabled.".format(
368 User.DEFAULT_USER)), category='warning')
369 User.DEFAULT_USER)), category='warning')
369 raise HTTPFound(h.route_path('users'))
370 raise HTTPFound(h.route_path('users'))
370
371
371
372
372 class DataGridAppView(object):
373 class DataGridAppView(object):
373 """
374 """
374 Common class to have re-usable grid rendering components
375 Common class to have re-usable grid rendering components
375 """
376 """
376
377
377 def _extract_ordering(self, request, column_map=None):
378 def _extract_ordering(self, request, column_map=None):
378 column_map = column_map or {}
379 column_map = column_map or {}
379 column_index = safe_int(request.GET.get('order[0][column]'))
380 column_index = safe_int(request.GET.get('order[0][column]'))
380 order_dir = request.GET.get(
381 order_dir = request.GET.get(
381 'order[0][dir]', 'desc')
382 'order[0][dir]', 'desc')
382 order_by = request.GET.get(
383 order_by = request.GET.get(
383 'columns[%s][data][sort]' % column_index, 'name_raw')
384 'columns[%s][data][sort]' % column_index, 'name_raw')
384
385
385 # translate datatable to DB columns
386 # translate datatable to DB columns
386 order_by = column_map.get(order_by) or order_by
387 order_by = column_map.get(order_by) or order_by
387
388
388 search_q = request.GET.get('search[value]')
389 search_q = request.GET.get('search[value]')
389 return search_q, order_by, order_dir
390 return search_q, order_by, order_dir
390
391
391 def _extract_chunk(self, request):
392 def _extract_chunk(self, request):
392 start = safe_int(request.GET.get('start'), 0)
393 start = safe_int(request.GET.get('start'), 0)
393 length = safe_int(request.GET.get('length'), 25)
394 length = safe_int(request.GET.get('length'), 25)
394 draw = safe_int(request.GET.get('draw'))
395 draw = safe_int(request.GET.get('draw'))
395 return draw, start, length
396 return draw, start, length
396
397
397 def _get_order_col(self, order_by, model):
398 def _get_order_col(self, order_by, model):
398 if isinstance(order_by, basestring):
399 if isinstance(order_by, compat.string_types):
399 try:
400 try:
400 return operator.attrgetter(order_by)(model)
401 return operator.attrgetter(order_by)(model)
401 except AttributeError:
402 except AttributeError:
402 return None
403 return None
403 else:
404 else:
404 return order_by
405 return order_by
405
406
406
407
407 class BaseReferencesView(RepoAppView):
408 class BaseReferencesView(RepoAppView):
408 """
409 """
409 Base for reference view for branches, tags and bookmarks.
410 Base for reference view for branches, tags and bookmarks.
410 """
411 """
411 def load_default_context(self):
412 def load_default_context(self):
412 c = self._get_local_tmpl_context()
413 c = self._get_local_tmpl_context()
413
414
414
415
415 return c
416 return c
416
417
417 def load_refs_context(self, ref_items, partials_template):
418 def load_refs_context(self, ref_items, partials_template):
418 _render = self.request.get_partial_renderer(partials_template)
419 _render = self.request.get_partial_renderer(partials_template)
419 pre_load = ["author", "date", "message"]
420 pre_load = ["author", "date", "message"]
420
421
421 is_svn = h.is_svn(self.rhodecode_vcs_repo)
422 is_svn = h.is_svn(self.rhodecode_vcs_repo)
422 is_hg = h.is_hg(self.rhodecode_vcs_repo)
423 is_hg = h.is_hg(self.rhodecode_vcs_repo)
423
424
424 format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo)
425 format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo)
425
426
426 closed_refs = {}
427 closed_refs = {}
427 if is_hg:
428 if is_hg:
428 closed_refs = self.rhodecode_vcs_repo.branches_closed
429 closed_refs = self.rhodecode_vcs_repo.branches_closed
429
430
430 data = []
431 data = []
431 for ref_name, commit_id in ref_items:
432 for ref_name, commit_id in ref_items:
432 commit = self.rhodecode_vcs_repo.get_commit(
433 commit = self.rhodecode_vcs_repo.get_commit(
433 commit_id=commit_id, pre_load=pre_load)
434 commit_id=commit_id, pre_load=pre_load)
434 closed = ref_name in closed_refs
435 closed = ref_name in closed_refs
435
436
436 # TODO: johbo: Unify generation of reference links
437 # TODO: johbo: Unify generation of reference links
437 use_commit_id = '/' in ref_name or is_svn
438 use_commit_id = '/' in ref_name or is_svn
438
439
439 if use_commit_id:
440 if use_commit_id:
440 files_url = h.route_path(
441 files_url = h.route_path(
441 'repo_files',
442 'repo_files',
442 repo_name=self.db_repo_name,
443 repo_name=self.db_repo_name,
443 f_path=ref_name if is_svn else '',
444 f_path=ref_name if is_svn else '',
444 commit_id=commit_id)
445 commit_id=commit_id)
445
446
446 else:
447 else:
447 files_url = h.route_path(
448 files_url = h.route_path(
448 'repo_files',
449 'repo_files',
449 repo_name=self.db_repo_name,
450 repo_name=self.db_repo_name,
450 f_path=ref_name if is_svn else '',
451 f_path=ref_name if is_svn else '',
451 commit_id=ref_name,
452 commit_id=ref_name,
452 _query=dict(at=ref_name))
453 _query=dict(at=ref_name))
453
454
454 data.append({
455 data.append({
455 "name": _render('name', ref_name, files_url, closed),
456 "name": _render('name', ref_name, files_url, closed),
456 "name_raw": ref_name,
457 "name_raw": ref_name,
457 "date": _render('date', commit.date),
458 "date": _render('date', commit.date),
458 "date_raw": datetime_to_time(commit.date),
459 "date_raw": datetime_to_time(commit.date),
459 "author": _render('author', commit.author),
460 "author": _render('author', commit.author),
460 "commit": _render(
461 "commit": _render(
461 'commit', commit.message, commit.raw_id, commit.idx),
462 'commit', commit.message, commit.raw_id, commit.idx),
462 "commit_raw": commit.idx,
463 "commit_raw": commit.idx,
463 "compare": _render(
464 "compare": _render(
464 'compare', format_ref_id(ref_name, commit.raw_id)),
465 'compare', format_ref_id(ref_name, commit.raw_id)),
465 })
466 })
466
467
467 return data
468 return data
468
469
469
470
470 class RepoRoutePredicate(object):
471 class RepoRoutePredicate(object):
471 def __init__(self, val, config):
472 def __init__(self, val, config):
472 self.val = val
473 self.val = val
473
474
474 def text(self):
475 def text(self):
475 return 'repo_route = %s' % self.val
476 return 'repo_route = %s' % self.val
476
477
477 phash = text
478 phash = text
478
479
479 def __call__(self, info, request):
480 def __call__(self, info, request):
480 if hasattr(request, 'vcs_call'):
481 if hasattr(request, 'vcs_call'):
481 # skip vcs calls
482 # skip vcs calls
482 return
483 return
483
484
484 repo_name = info['match']['repo_name']
485 repo_name = info['match']['repo_name']
485 repo_model = repo.RepoModel()
486 repo_model = repo.RepoModel()
486
487
487 by_name_match = repo_model.get_by_repo_name(repo_name, cache=False)
488 by_name_match = repo_model.get_by_repo_name(repo_name, cache=False)
488
489
489 def redirect_if_creating(route_info, db_repo):
490 def redirect_if_creating(route_info, db_repo):
490 skip_views = ['edit_repo_advanced_delete']
491 skip_views = ['edit_repo_advanced_delete']
491 route = route_info['route']
492 route = route_info['route']
492 # we should skip delete view so we can actually "remove" repositories
493 # we should skip delete view so we can actually "remove" repositories
493 # if they get stuck in creating state.
494 # if they get stuck in creating state.
494 if route.name in skip_views:
495 if route.name in skip_views:
495 return
496 return
496
497
497 if db_repo.repo_state in [repo.Repository.STATE_PENDING]:
498 if db_repo.repo_state in [repo.Repository.STATE_PENDING]:
498 repo_creating_url = request.route_path(
499 repo_creating_url = request.route_path(
499 'repo_creating', repo_name=db_repo.repo_name)
500 'repo_creating', repo_name=db_repo.repo_name)
500 raise HTTPFound(repo_creating_url)
501 raise HTTPFound(repo_creating_url)
501
502
502 if by_name_match:
503 if by_name_match:
503 # register this as request object we can re-use later
504 # register this as request object we can re-use later
504 request.db_repo = by_name_match
505 request.db_repo = by_name_match
505 redirect_if_creating(info, by_name_match)
506 redirect_if_creating(info, by_name_match)
506 return True
507 return True
507
508
508 by_id_match = repo_model.get_repo_by_id(repo_name)
509 by_id_match = repo_model.get_repo_by_id(repo_name)
509 if by_id_match:
510 if by_id_match:
510 request.db_repo = by_id_match
511 request.db_repo = by_id_match
511 redirect_if_creating(info, by_id_match)
512 redirect_if_creating(info, by_id_match)
512 return True
513 return True
513
514
514 return False
515 return False
515
516
516
517
517 class RepoForbidArchivedRoutePredicate(object):
518 class RepoForbidArchivedRoutePredicate(object):
518 def __init__(self, val, config):
519 def __init__(self, val, config):
519 self.val = val
520 self.val = val
520
521
521 def text(self):
522 def text(self):
522 return 'repo_forbid_archived = %s' % self.val
523 return 'repo_forbid_archived = %s' % self.val
523
524
524 phash = text
525 phash = text
525
526
526 def __call__(self, info, request):
527 def __call__(self, info, request):
527 _ = request.translate
528 _ = request.translate
528 rhodecode_db_repo = request.db_repo
529 rhodecode_db_repo = request.db_repo
529
530
530 log.debug(
531 log.debug(
531 '%s checking if archived flag for repo for %s',
532 '%s checking if archived flag for repo for %s',
532 self.__class__.__name__, rhodecode_db_repo.repo_name)
533 self.__class__.__name__, rhodecode_db_repo.repo_name)
533
534
534 if rhodecode_db_repo.archived:
535 if rhodecode_db_repo.archived:
535 log.warning('Current view is not supported for archived repo:%s',
536 log.warning('Current view is not supported for archived repo:%s',
536 rhodecode_db_repo.repo_name)
537 rhodecode_db_repo.repo_name)
537
538
538 h.flash(
539 h.flash(
539 h.literal(_('Action not supported for archived repository.')),
540 h.literal(_('Action not supported for archived repository.')),
540 category='warning')
541 category='warning')
541 summary_url = request.route_path(
542 summary_url = request.route_path(
542 'repo_summary', repo_name=rhodecode_db_repo.repo_name)
543 'repo_summary', repo_name=rhodecode_db_repo.repo_name)
543 raise HTTPFound(summary_url)
544 raise HTTPFound(summary_url)
544 return True
545 return True
545
546
546
547
547 class RepoTypeRoutePredicate(object):
548 class RepoTypeRoutePredicate(object):
548 def __init__(self, val, config):
549 def __init__(self, val, config):
549 self.val = val or ['hg', 'git', 'svn']
550 self.val = val or ['hg', 'git', 'svn']
550
551
551 def text(self):
552 def text(self):
552 return 'repo_accepted_type = %s' % self.val
553 return 'repo_accepted_type = %s' % self.val
553
554
554 phash = text
555 phash = text
555
556
556 def __call__(self, info, request):
557 def __call__(self, info, request):
557 if hasattr(request, 'vcs_call'):
558 if hasattr(request, 'vcs_call'):
558 # skip vcs calls
559 # skip vcs calls
559 return
560 return
560
561
561 rhodecode_db_repo = request.db_repo
562 rhodecode_db_repo = request.db_repo
562
563
563 log.debug(
564 log.debug(
564 '%s checking repo type for %s in %s',
565 '%s checking repo type for %s in %s',
565 self.__class__.__name__, rhodecode_db_repo.repo_type, self.val)
566 self.__class__.__name__, rhodecode_db_repo.repo_type, self.val)
566
567
567 if rhodecode_db_repo.repo_type in self.val:
568 if rhodecode_db_repo.repo_type in self.val:
568 return True
569 return True
569 else:
570 else:
570 log.warning('Current view is not supported for repo type:%s',
571 log.warning('Current view is not supported for repo type:%s',
571 rhodecode_db_repo.repo_type)
572 rhodecode_db_repo.repo_type)
572 return False
573 return False
573
574
574
575
575 class RepoGroupRoutePredicate(object):
576 class RepoGroupRoutePredicate(object):
576 def __init__(self, val, config):
577 def __init__(self, val, config):
577 self.val = val
578 self.val = val
578
579
579 def text(self):
580 def text(self):
580 return 'repo_group_route = %s' % self.val
581 return 'repo_group_route = %s' % self.val
581
582
582 phash = text
583 phash = text
583
584
584 def __call__(self, info, request):
585 def __call__(self, info, request):
585 if hasattr(request, 'vcs_call'):
586 if hasattr(request, 'vcs_call'):
586 # skip vcs calls
587 # skip vcs calls
587 return
588 return
588
589
589 repo_group_name = info['match']['repo_group_name']
590 repo_group_name = info['match']['repo_group_name']
590 repo_group_model = repo_group.RepoGroupModel()
591 repo_group_model = repo_group.RepoGroupModel()
591 by_name_match = repo_group_model.get_by_group_name(repo_group_name, cache=False)
592 by_name_match = repo_group_model.get_by_group_name(repo_group_name, cache=False)
592
593
593 if by_name_match:
594 if by_name_match:
594 # register this as request object we can re-use later
595 # register this as request object we can re-use later
595 request.db_repo_group = by_name_match
596 request.db_repo_group = by_name_match
596 return True
597 return True
597
598
598 return False
599 return False
599
600
600
601
601 class UserGroupRoutePredicate(object):
602 class UserGroupRoutePredicate(object):
602 def __init__(self, val, config):
603 def __init__(self, val, config):
603 self.val = val
604 self.val = val
604
605
605 def text(self):
606 def text(self):
606 return 'user_group_route = %s' % self.val
607 return 'user_group_route = %s' % self.val
607
608
608 phash = text
609 phash = text
609
610
610 def __call__(self, info, request):
611 def __call__(self, info, request):
611 if hasattr(request, 'vcs_call'):
612 if hasattr(request, 'vcs_call'):
612 # skip vcs calls
613 # skip vcs calls
613 return
614 return
614
615
615 user_group_id = info['match']['user_group_id']
616 user_group_id = info['match']['user_group_id']
616 user_group_model = user_group.UserGroup()
617 user_group_model = user_group.UserGroup()
617 by_id_match = user_group_model.get(user_group_id, cache=False)
618 by_id_match = user_group_model.get(user_group_id, cache=False)
618
619
619 if by_id_match:
620 if by_id_match:
620 # register this as request object we can re-use later
621 # register this as request object we can re-use later
621 request.db_user_group = by_id_match
622 request.db_user_group = by_id_match
622 return True
623 return True
623
624
624 return False
625 return False
625
626
626
627
627 class UserRoutePredicateBase(object):
628 class UserRoutePredicateBase(object):
628 supports_default = None
629 supports_default = None
629
630
630 def __init__(self, val, config):
631 def __init__(self, val, config):
631 self.val = val
632 self.val = val
632
633
633 def text(self):
634 def text(self):
634 raise NotImplementedError()
635 raise NotImplementedError()
635
636
636 def __call__(self, info, request):
637 def __call__(self, info, request):
637 if hasattr(request, 'vcs_call'):
638 if hasattr(request, 'vcs_call'):
638 # skip vcs calls
639 # skip vcs calls
639 return
640 return
640
641
641 user_id = info['match']['user_id']
642 user_id = info['match']['user_id']
642 user_model = user.User()
643 user_model = user.User()
643 by_id_match = user_model.get(user_id, cache=False)
644 by_id_match = user_model.get(user_id, cache=False)
644
645
645 if by_id_match:
646 if by_id_match:
646 # register this as request object we can re-use later
647 # register this as request object we can re-use later
647 request.db_user = by_id_match
648 request.db_user = by_id_match
648 request.db_user_supports_default = self.supports_default
649 request.db_user_supports_default = self.supports_default
649 return True
650 return True
650
651
651 return False
652 return False
652
653
653
654
654 class UserRoutePredicate(UserRoutePredicateBase):
655 class UserRoutePredicate(UserRoutePredicateBase):
655 supports_default = False
656 supports_default = False
656
657
657 def text(self):
658 def text(self):
658 return 'user_route = %s' % self.val
659 return 'user_route = %s' % self.val
659
660
660 phash = text
661 phash = text
661
662
662
663
663 class UserRouteWithDefaultPredicate(UserRoutePredicateBase):
664 class UserRouteWithDefaultPredicate(UserRoutePredicateBase):
664 supports_default = True
665 supports_default = True
665
666
666 def text(self):
667 def text(self):
667 return 'user_with_default_route = %s' % self.val
668 return 'user_with_default_route = %s' % self.val
668
669
669 phash = text
670 phash = text
670
671
671
672
672 def includeme(config):
673 def includeme(config):
673 config.add_route_predicate(
674 config.add_route_predicate(
674 'repo_route', RepoRoutePredicate)
675 'repo_route', RepoRoutePredicate)
675 config.add_route_predicate(
676 config.add_route_predicate(
676 'repo_accepted_types', RepoTypeRoutePredicate)
677 'repo_accepted_types', RepoTypeRoutePredicate)
677 config.add_route_predicate(
678 config.add_route_predicate(
678 'repo_forbid_when_archived', RepoForbidArchivedRoutePredicate)
679 'repo_forbid_when_archived', RepoForbidArchivedRoutePredicate)
679 config.add_route_predicate(
680 config.add_route_predicate(
680 'repo_group_route', RepoGroupRoutePredicate)
681 'repo_group_route', RepoGroupRoutePredicate)
681 config.add_route_predicate(
682 config.add_route_predicate(
682 'user_group_route', UserGroupRoutePredicate)
683 'user_group_route', UserGroupRoutePredicate)
683 config.add_route_predicate(
684 config.add_route_predicate(
684 'user_route_with_default', UserRouteWithDefaultPredicate)
685 'user_route_with_default', UserRouteWithDefaultPredicate)
685 config.add_route_predicate(
686 config.add_route_predicate(
686 'user_route', UserRoutePredicate)
687 'user_route', UserRoutePredicate)
@@ -1,90 +1,90 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2019 RhodeCode GmbH
3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20 import os
21 import logging
21 import logging
22 import os
23 import shlex
22 import shlex
23 from pyramid import compat
24
24
25 # Do not use `from rhodecode import events` here, it will be overridden by the
25 # Do not use `from rhodecode import events` here, it will be overridden by the
26 # events module in this package due to pythons import mechanism.
26 # events module in this package due to pythons import mechanism.
27 from rhodecode.events import RepoGroupEvent
27 from rhodecode.events import RepoGroupEvent
28 from rhodecode.subscribers import AsyncSubprocessSubscriber
28 from rhodecode.subscribers import AsyncSubprocessSubscriber
29 from rhodecode.config.middleware import (
29 from rhodecode.config.middleware import (
30 _bool_setting, _string_setting, _int_setting)
30 _bool_setting, _string_setting, _int_setting)
31
31
32 from .events import ModDavSvnConfigChange
32 from .events import ModDavSvnConfigChange
33 from .subscribers import generate_config_subscriber
33 from .subscribers import generate_config_subscriber
34 from . import config_keys
34 from . import config_keys
35
35
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 def includeme(config):
40 def includeme(config):
41 settings = config.registry.settings
41 settings = config.registry.settings
42 _sanitize_settings_and_apply_defaults(settings)
42 _sanitize_settings_and_apply_defaults(settings)
43
43
44 if settings[config_keys.generate_config]:
44 if settings[config_keys.generate_config]:
45 # Add subscriber to generate the Apache mod dav svn configuration on
45 # Add subscriber to generate the Apache mod dav svn configuration on
46 # repository group events.
46 # repository group events.
47 config.add_subscriber(generate_config_subscriber, RepoGroupEvent)
47 config.add_subscriber(generate_config_subscriber, RepoGroupEvent)
48
48
49 # If a reload command is set add a subscriber to execute it on
49 # If a reload command is set add a subscriber to execute it on
50 # configuration changes.
50 # configuration changes.
51 reload_cmd = shlex.split(settings[config_keys.reload_command])
51 reload_cmd = shlex.split(settings[config_keys.reload_command])
52 if reload_cmd:
52 if reload_cmd:
53 reload_timeout = settings[config_keys.reload_timeout] or None
53 reload_timeout = settings[config_keys.reload_timeout] or None
54 reload_subscriber = AsyncSubprocessSubscriber(
54 reload_subscriber = AsyncSubprocessSubscriber(
55 cmd=reload_cmd, timeout=reload_timeout)
55 cmd=reload_cmd, timeout=reload_timeout)
56 config.add_subscriber(reload_subscriber, ModDavSvnConfigChange)
56 config.add_subscriber(reload_subscriber, ModDavSvnConfigChange)
57
57
58
58
59 def _sanitize_settings_and_apply_defaults(settings):
59 def _sanitize_settings_and_apply_defaults(settings):
60 """
60 """
61 Set defaults, convert to python types and validate settings.
61 Set defaults, convert to python types and validate settings.
62 """
62 """
63 _bool_setting(settings, config_keys.generate_config, 'false')
63 _bool_setting(settings, config_keys.generate_config, 'false')
64 _bool_setting(settings, config_keys.list_parent_path, 'true')
64 _bool_setting(settings, config_keys.list_parent_path, 'true')
65 _int_setting(settings, config_keys.reload_timeout, 10)
65 _int_setting(settings, config_keys.reload_timeout, 10)
66 _string_setting(settings, config_keys.config_file_path, '', lower=False)
66 _string_setting(settings, config_keys.config_file_path, '', lower=False)
67 _string_setting(settings, config_keys.location_root, '/', lower=False)
67 _string_setting(settings, config_keys.location_root, '/', lower=False)
68 _string_setting(settings, config_keys.reload_command, '', lower=False)
68 _string_setting(settings, config_keys.reload_command, '', lower=False)
69 _string_setting(settings, config_keys.template, '', lower=False)
69 _string_setting(settings, config_keys.template, '', lower=False)
70
70
71 # Convert negative timeout values to zero.
71 # Convert negative timeout values to zero.
72 if settings[config_keys.reload_timeout] < 0:
72 if settings[config_keys.reload_timeout] < 0:
73 settings[config_keys.reload_timeout] = 0
73 settings[config_keys.reload_timeout] = 0
74
74
75 # Append path separator to location root.
75 # Append path separator to location root.
76 settings[config_keys.location_root] = _append_path_sep(
76 settings[config_keys.location_root] = _append_path_sep(
77 settings[config_keys.location_root])
77 settings[config_keys.location_root])
78
78
79 # Validate settings.
79 # Validate settings.
80 if settings[config_keys.generate_config]:
80 if settings[config_keys.generate_config]:
81 assert len(settings[config_keys.config_file_path]) > 0
81 assert len(settings[config_keys.config_file_path]) > 0
82
82
83
83
84 def _append_path_sep(path):
84 def _append_path_sep(path):
85 """
85 """
86 Append the path separator if missing.
86 Append the path separator if missing.
87 """
87 """
88 if isinstance(path, basestring) and not path.endswith(os.path.sep):
88 if isinstance(path, compat.string_types) and not path.endswith(os.path.sep):
89 path += os.path.sep
89 path += os.path.sep
90 return path
90 return path
@@ -1,339 +1,340 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2019 RhodeCode GmbH
3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 RhodeCode task modules, containing all task that suppose to be run
22 RhodeCode task modules, containing all task that suppose to be run
23 by celery daemon
23 by celery daemon
24 """
24 """
25
25
26 import os
26 import os
27 import time
27 import time
28
28
29 from pyramid import compat
29 from pyramid_mailer.mailer import Mailer
30 from pyramid_mailer.mailer import Mailer
30 from pyramid_mailer.message import Message
31 from pyramid_mailer.message import Message
31
32
32 import rhodecode
33 import rhodecode
33 from rhodecode.lib import audit_logger
34 from rhodecode.lib import audit_logger
34 from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask
35 from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask
35 from rhodecode.lib.hooks_base import log_create_repository
36 from rhodecode.lib.hooks_base import log_create_repository
36 from rhodecode.lib.utils2 import safe_int, str2bool
37 from rhodecode.lib.utils2 import safe_int, str2bool
37 from rhodecode.model.db import Session, IntegrityError, Repository, User, true
38 from rhodecode.model.db import Session, IntegrityError, Repository, User, true
38
39
39
40
40 @async_task(ignore_result=True, base=RequestContextTask)
41 @async_task(ignore_result=True, base=RequestContextTask)
41 def send_email(recipients, subject, body='', html_body='', email_config=None):
42 def send_email(recipients, subject, body='', html_body='', email_config=None):
42 """
43 """
43 Sends an email with defined parameters from the .ini files.
44 Sends an email with defined parameters from the .ini files.
44
45
45 :param recipients: list of recipients, it this is empty the defined email
46 :param recipients: list of recipients, it this is empty the defined email
46 address from field 'email_to' is used instead
47 address from field 'email_to' is used instead
47 :param subject: subject of the mail
48 :param subject: subject of the mail
48 :param body: body of the mail
49 :param body: body of the mail
49 :param html_body: html version of body
50 :param html_body: html version of body
50 """
51 """
51 log = get_logger(send_email)
52 log = get_logger(send_email)
52
53
53 email_config = email_config or rhodecode.CONFIG
54 email_config = email_config or rhodecode.CONFIG
54
55
55 mail_server = email_config.get('smtp_server') or None
56 mail_server = email_config.get('smtp_server') or None
56 if mail_server is None:
57 if mail_server is None:
57 log.error("SMTP server information missing. Sending email failed. "
58 log.error("SMTP server information missing. Sending email failed. "
58 "Make sure that `smtp_server` variable is configured "
59 "Make sure that `smtp_server` variable is configured "
59 "inside the .ini file")
60 "inside the .ini file")
60 return False
61 return False
61
62
62 subject = "%s %s" % (email_config.get('email_prefix', ''), subject)
63 subject = "%s %s" % (email_config.get('email_prefix', ''), subject)
63
64
64 if recipients:
65 if recipients:
65 if isinstance(recipients, basestring):
66 if isinstance(recipients, compat.string_types):
66 recipients = recipients.split(',')
67 recipients = recipients.split(',')
67 else:
68 else:
68 # if recipients are not defined we send to email_config + all admins
69 # if recipients are not defined we send to email_config + all admins
69 admins = []
70 admins = []
70 for u in User.query().filter(User.admin == true()).all():
71 for u in User.query().filter(User.admin == true()).all():
71 if u.email:
72 if u.email:
72 admins.append(u.email)
73 admins.append(u.email)
73 recipients = []
74 recipients = []
74 config_email = email_config.get('email_to')
75 config_email = email_config.get('email_to')
75 if config_email:
76 if config_email:
76 recipients += [config_email]
77 recipients += [config_email]
77 recipients += admins
78 recipients += admins
78
79
79 # translate our LEGACY config into the one that pyramid_mailer supports
80 # translate our LEGACY config into the one that pyramid_mailer supports
80 email_conf = dict(
81 email_conf = dict(
81 host=mail_server,
82 host=mail_server,
82 port=email_config.get('smtp_port', 25),
83 port=email_config.get('smtp_port', 25),
83 username=email_config.get('smtp_username'),
84 username=email_config.get('smtp_username'),
84 password=email_config.get('smtp_password'),
85 password=email_config.get('smtp_password'),
85
86
86 tls=str2bool(email_config.get('smtp_use_tls')),
87 tls=str2bool(email_config.get('smtp_use_tls')),
87 ssl=str2bool(email_config.get('smtp_use_ssl')),
88 ssl=str2bool(email_config.get('smtp_use_ssl')),
88
89
89 # SSL key file
90 # SSL key file
90 # keyfile='',
91 # keyfile='',
91
92
92 # SSL certificate file
93 # SSL certificate file
93 # certfile='',
94 # certfile='',
94
95
95 # Location of maildir
96 # Location of maildir
96 # queue_path='',
97 # queue_path='',
97
98
98 default_sender=email_config.get('app_email_from', 'RhodeCode'),
99 default_sender=email_config.get('app_email_from', 'RhodeCode'),
99
100
100 debug=str2bool(email_config.get('smtp_debug')),
101 debug=str2bool(email_config.get('smtp_debug')),
101 # /usr/sbin/sendmail Sendmail executable
102 # /usr/sbin/sendmail Sendmail executable
102 # sendmail_app='',
103 # sendmail_app='',
103
104
104 # {sendmail_app} -t -i -f {sender} Template for sendmail execution
105 # {sendmail_app} -t -i -f {sender} Template for sendmail execution
105 # sendmail_template='',
106 # sendmail_template='',
106 )
107 )
107
108
108 try:
109 try:
109 mailer = Mailer(**email_conf)
110 mailer = Mailer(**email_conf)
110
111
111 message = Message(subject=subject,
112 message = Message(subject=subject,
112 sender=email_conf['default_sender'],
113 sender=email_conf['default_sender'],
113 recipients=recipients,
114 recipients=recipients,
114 body=body, html=html_body)
115 body=body, html=html_body)
115 mailer.send_immediately(message)
116 mailer.send_immediately(message)
116
117
117 except Exception:
118 except Exception:
118 log.exception('Mail sending failed')
119 log.exception('Mail sending failed')
119 return False
120 return False
120 return True
121 return True
121
122
122
123
123 @async_task(ignore_result=True, base=RequestContextTask)
124 @async_task(ignore_result=True, base=RequestContextTask)
124 def create_repo(form_data, cur_user):
125 def create_repo(form_data, cur_user):
125 from rhodecode.model.repo import RepoModel
126 from rhodecode.model.repo import RepoModel
126 from rhodecode.model.user import UserModel
127 from rhodecode.model.user import UserModel
127 from rhodecode.model.settings import SettingsModel
128 from rhodecode.model.settings import SettingsModel
128
129
129 log = get_logger(create_repo)
130 log = get_logger(create_repo)
130
131
131 cur_user = UserModel()._get_user(cur_user)
132 cur_user = UserModel()._get_user(cur_user)
132 owner = cur_user
133 owner = cur_user
133
134
134 repo_name = form_data['repo_name']
135 repo_name = form_data['repo_name']
135 repo_name_full = form_data['repo_name_full']
136 repo_name_full = form_data['repo_name_full']
136 repo_type = form_data['repo_type']
137 repo_type = form_data['repo_type']
137 description = form_data['repo_description']
138 description = form_data['repo_description']
138 private = form_data['repo_private']
139 private = form_data['repo_private']
139 clone_uri = form_data.get('clone_uri')
140 clone_uri = form_data.get('clone_uri')
140 repo_group = safe_int(form_data['repo_group'])
141 repo_group = safe_int(form_data['repo_group'])
141 landing_rev = form_data['repo_landing_rev']
142 landing_rev = form_data['repo_landing_rev']
142 copy_fork_permissions = form_data.get('copy_permissions')
143 copy_fork_permissions = form_data.get('copy_permissions')
143 copy_group_permissions = form_data.get('repo_copy_permissions')
144 copy_group_permissions = form_data.get('repo_copy_permissions')
144 fork_of = form_data.get('fork_parent_id')
145 fork_of = form_data.get('fork_parent_id')
145 state = form_data.get('repo_state', Repository.STATE_PENDING)
146 state = form_data.get('repo_state', Repository.STATE_PENDING)
146
147
147 # repo creation defaults, private and repo_type are filled in form
148 # repo creation defaults, private and repo_type are filled in form
148 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
149 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
149 enable_statistics = form_data.get(
150 enable_statistics = form_data.get(
150 'enable_statistics', defs.get('repo_enable_statistics'))
151 'enable_statistics', defs.get('repo_enable_statistics'))
151 enable_locking = form_data.get(
152 enable_locking = form_data.get(
152 'enable_locking', defs.get('repo_enable_locking'))
153 'enable_locking', defs.get('repo_enable_locking'))
153 enable_downloads = form_data.get(
154 enable_downloads = form_data.get(
154 'enable_downloads', defs.get('repo_enable_downloads'))
155 'enable_downloads', defs.get('repo_enable_downloads'))
155
156
156 try:
157 try:
157 RepoModel()._create_repo(
158 RepoModel()._create_repo(
158 repo_name=repo_name_full,
159 repo_name=repo_name_full,
159 repo_type=repo_type,
160 repo_type=repo_type,
160 description=description,
161 description=description,
161 owner=owner,
162 owner=owner,
162 private=private,
163 private=private,
163 clone_uri=clone_uri,
164 clone_uri=clone_uri,
164 repo_group=repo_group,
165 repo_group=repo_group,
165 landing_rev=landing_rev,
166 landing_rev=landing_rev,
166 fork_of=fork_of,
167 fork_of=fork_of,
167 copy_fork_permissions=copy_fork_permissions,
168 copy_fork_permissions=copy_fork_permissions,
168 copy_group_permissions=copy_group_permissions,
169 copy_group_permissions=copy_group_permissions,
169 enable_statistics=enable_statistics,
170 enable_statistics=enable_statistics,
170 enable_locking=enable_locking,
171 enable_locking=enable_locking,
171 enable_downloads=enable_downloads,
172 enable_downloads=enable_downloads,
172 state=state
173 state=state
173 )
174 )
174 Session().commit()
175 Session().commit()
175
176
176 # now create this repo on Filesystem
177 # now create this repo on Filesystem
177 RepoModel()._create_filesystem_repo(
178 RepoModel()._create_filesystem_repo(
178 repo_name=repo_name,
179 repo_name=repo_name,
179 repo_type=repo_type,
180 repo_type=repo_type,
180 repo_group=RepoModel()._get_repo_group(repo_group),
181 repo_group=RepoModel()._get_repo_group(repo_group),
181 clone_uri=clone_uri,
182 clone_uri=clone_uri,
182 )
183 )
183 repo = Repository.get_by_repo_name(repo_name_full)
184 repo = Repository.get_by_repo_name(repo_name_full)
184 log_create_repository(created_by=owner.username, **repo.get_dict())
185 log_create_repository(created_by=owner.username, **repo.get_dict())
185
186
186 # update repo commit caches initially
187 # update repo commit caches initially
187 repo.update_commit_cache()
188 repo.update_commit_cache()
188
189
189 # set new created state
190 # set new created state
190 repo.set_state(Repository.STATE_CREATED)
191 repo.set_state(Repository.STATE_CREATED)
191 repo_id = repo.repo_id
192 repo_id = repo.repo_id
192 repo_data = repo.get_api_data()
193 repo_data = repo.get_api_data()
193
194
194 audit_logger.store(
195 audit_logger.store(
195 'repo.create', action_data={'data': repo_data},
196 'repo.create', action_data={'data': repo_data},
196 user=cur_user,
197 user=cur_user,
197 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
198 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
198
199
199 Session().commit()
200 Session().commit()
200 except Exception as e:
201 except Exception as e:
201 log.warning('Exception occurred when creating repository, '
202 log.warning('Exception occurred when creating repository, '
202 'doing cleanup...', exc_info=True)
203 'doing cleanup...', exc_info=True)
203 if isinstance(e, IntegrityError):
204 if isinstance(e, IntegrityError):
204 Session().rollback()
205 Session().rollback()
205
206
206 # rollback things manually !
207 # rollback things manually !
207 repo = Repository.get_by_repo_name(repo_name_full)
208 repo = Repository.get_by_repo_name(repo_name_full)
208 if repo:
209 if repo:
209 Repository.delete(repo.repo_id)
210 Repository.delete(repo.repo_id)
210 Session().commit()
211 Session().commit()
211 RepoModel()._delete_filesystem_repo(repo)
212 RepoModel()._delete_filesystem_repo(repo)
212 log.info('Cleanup of repo %s finished', repo_name_full)
213 log.info('Cleanup of repo %s finished', repo_name_full)
213 raise
214 raise
214
215
215 return True
216 return True
216
217
217
218
218 @async_task(ignore_result=True, base=RequestContextTask)
219 @async_task(ignore_result=True, base=RequestContextTask)
219 def create_repo_fork(form_data, cur_user):
220 def create_repo_fork(form_data, cur_user):
220 """
221 """
221 Creates a fork of repository using internal VCS methods
222 Creates a fork of repository using internal VCS methods
222 """
223 """
223 from rhodecode.model.repo import RepoModel
224 from rhodecode.model.repo import RepoModel
224 from rhodecode.model.user import UserModel
225 from rhodecode.model.user import UserModel
225
226
226 log = get_logger(create_repo_fork)
227 log = get_logger(create_repo_fork)
227
228
228 cur_user = UserModel()._get_user(cur_user)
229 cur_user = UserModel()._get_user(cur_user)
229 owner = cur_user
230 owner = cur_user
230
231
231 repo_name = form_data['repo_name'] # fork in this case
232 repo_name = form_data['repo_name'] # fork in this case
232 repo_name_full = form_data['repo_name_full']
233 repo_name_full = form_data['repo_name_full']
233 repo_type = form_data['repo_type']
234 repo_type = form_data['repo_type']
234 description = form_data['description']
235 description = form_data['description']
235 private = form_data['private']
236 private = form_data['private']
236 clone_uri = form_data.get('clone_uri')
237 clone_uri = form_data.get('clone_uri')
237 repo_group = safe_int(form_data['repo_group'])
238 repo_group = safe_int(form_data['repo_group'])
238 landing_rev = form_data['landing_rev']
239 landing_rev = form_data['landing_rev']
239 copy_fork_permissions = form_data.get('copy_permissions')
240 copy_fork_permissions = form_data.get('copy_permissions')
240 fork_id = safe_int(form_data.get('fork_parent_id'))
241 fork_id = safe_int(form_data.get('fork_parent_id'))
241
242
242 try:
243 try:
243 fork_of = RepoModel()._get_repo(fork_id)
244 fork_of = RepoModel()._get_repo(fork_id)
244 RepoModel()._create_repo(
245 RepoModel()._create_repo(
245 repo_name=repo_name_full,
246 repo_name=repo_name_full,
246 repo_type=repo_type,
247 repo_type=repo_type,
247 description=description,
248 description=description,
248 owner=owner,
249 owner=owner,
249 private=private,
250 private=private,
250 clone_uri=clone_uri,
251 clone_uri=clone_uri,
251 repo_group=repo_group,
252 repo_group=repo_group,
252 landing_rev=landing_rev,
253 landing_rev=landing_rev,
253 fork_of=fork_of,
254 fork_of=fork_of,
254 copy_fork_permissions=copy_fork_permissions
255 copy_fork_permissions=copy_fork_permissions
255 )
256 )
256
257
257 Session().commit()
258 Session().commit()
258
259
259 base_path = Repository.base_path()
260 base_path = Repository.base_path()
260 source_repo_path = os.path.join(base_path, fork_of.repo_name)
261 source_repo_path = os.path.join(base_path, fork_of.repo_name)
261
262
262 # now create this repo on Filesystem
263 # now create this repo on Filesystem
263 RepoModel()._create_filesystem_repo(
264 RepoModel()._create_filesystem_repo(
264 repo_name=repo_name,
265 repo_name=repo_name,
265 repo_type=repo_type,
266 repo_type=repo_type,
266 repo_group=RepoModel()._get_repo_group(repo_group),
267 repo_group=RepoModel()._get_repo_group(repo_group),
267 clone_uri=source_repo_path,
268 clone_uri=source_repo_path,
268 )
269 )
269 repo = Repository.get_by_repo_name(repo_name_full)
270 repo = Repository.get_by_repo_name(repo_name_full)
270 log_create_repository(created_by=owner.username, **repo.get_dict())
271 log_create_repository(created_by=owner.username, **repo.get_dict())
271
272
272 # update repo commit caches initially
273 # update repo commit caches initially
273 config = repo._config
274 config = repo._config
274 config.set('extensions', 'largefiles', '')
275 config.set('extensions', 'largefiles', '')
275 repo.update_commit_cache(config=config)
276 repo.update_commit_cache(config=config)
276
277
277 # set new created state
278 # set new created state
278 repo.set_state(Repository.STATE_CREATED)
279 repo.set_state(Repository.STATE_CREATED)
279
280
280 repo_id = repo.repo_id
281 repo_id = repo.repo_id
281 repo_data = repo.get_api_data()
282 repo_data = repo.get_api_data()
282 audit_logger.store(
283 audit_logger.store(
283 'repo.fork', action_data={'data': repo_data},
284 'repo.fork', action_data={'data': repo_data},
284 user=cur_user,
285 user=cur_user,
285 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
286 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
286
287
287 Session().commit()
288 Session().commit()
288 except Exception as e:
289 except Exception as e:
289 log.warning('Exception occurred when forking repository, '
290 log.warning('Exception occurred when forking repository, '
290 'doing cleanup...', exc_info=True)
291 'doing cleanup...', exc_info=True)
291 if isinstance(e, IntegrityError):
292 if isinstance(e, IntegrityError):
292 Session().rollback()
293 Session().rollback()
293
294
294 # rollback things manually !
295 # rollback things manually !
295 repo = Repository.get_by_repo_name(repo_name_full)
296 repo = Repository.get_by_repo_name(repo_name_full)
296 if repo:
297 if repo:
297 Repository.delete(repo.repo_id)
298 Repository.delete(repo.repo_id)
298 Session().commit()
299 Session().commit()
299 RepoModel()._delete_filesystem_repo(repo)
300 RepoModel()._delete_filesystem_repo(repo)
300 log.info('Cleanup of repo %s finished', repo_name_full)
301 log.info('Cleanup of repo %s finished', repo_name_full)
301 raise
302 raise
302
303
303 return True
304 return True
304
305
305
306
306 @async_task(ignore_result=True)
307 @async_task(ignore_result=True)
307 def repo_maintenance(repoid):
308 def repo_maintenance(repoid):
308 from rhodecode.lib import repo_maintenance as repo_maintenance_lib
309 from rhodecode.lib import repo_maintenance as repo_maintenance_lib
309 log = get_logger(repo_maintenance)
310 log = get_logger(repo_maintenance)
310 repo = Repository.get_by_id_or_repo_name(repoid)
311 repo = Repository.get_by_id_or_repo_name(repoid)
311 if repo:
312 if repo:
312 maintenance = repo_maintenance_lib.RepoMaintenance()
313 maintenance = repo_maintenance_lib.RepoMaintenance()
313 tasks = maintenance.get_tasks_for_repo(repo)
314 tasks = maintenance.get_tasks_for_repo(repo)
314 log.debug('Executing %s tasks on repo `%s`', tasks, repoid)
315 log.debug('Executing %s tasks on repo `%s`', tasks, repoid)
315 executed_types = maintenance.execute(repo)
316 executed_types = maintenance.execute(repo)
316 log.debug('Got execution results %s', executed_types)
317 log.debug('Got execution results %s', executed_types)
317 else:
318 else:
318 log.debug('Repo `%s` not found or without a clone_url', repoid)
319 log.debug('Repo `%s` not found or without a clone_url', repoid)
319
320
320
321
321 @async_task(ignore_result=True)
322 @async_task(ignore_result=True)
322 def check_for_update():
323 def check_for_update():
323 from rhodecode.model.update import UpdateModel
324 from rhodecode.model.update import UpdateModel
324 update_url = UpdateModel().get_update_url()
325 update_url = UpdateModel().get_update_url()
325 cur_ver = rhodecode.__version__
326 cur_ver = rhodecode.__version__
326
327
327 try:
328 try:
328 data = UpdateModel().get_update_data(update_url)
329 data = UpdateModel().get_update_data(update_url)
329 latest = data['versions'][0]
330 latest = data['versions'][0]
330 UpdateModel().store_version(latest['version'])
331 UpdateModel().store_version(latest['version'])
331 except Exception:
332 except Exception:
332 pass
333 pass
333
334
334
335
335 @async_task(ignore_result=False)
336 @async_task(ignore_result=False)
336 def beat_check(*args, **kwargs):
337 def beat_check(*args, **kwargs):
337 log = get_logger(beat_check)
338 log = get_logger(beat_check)
338 log.info('Got args: %r and kwargs %r', args, kwargs)
339 log.info('Got args: %r and kwargs %r', args, kwargs)
339 return time.time()
340 return time.time()
@@ -1,775 +1,776 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2019 RhodeCode GmbH
3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import difflib
22 import difflib
23 from itertools import groupby
23 from itertools import groupby
24
24
25 from pygments import lex
25 from pygments import lex
26 from pygments.formatters.html import _get_ttype_class as pygment_token_class
26 from pygments.formatters.html import _get_ttype_class as pygment_token_class
27 from pygments.lexers.special import TextLexer, Token
27 from pygments.lexers.special import TextLexer, Token
28 from pygments.lexers import get_lexer_by_name
28 from pygments.lexers import get_lexer_by_name
29 from pyramid import compat
29
30
30 from rhodecode.lib.helpers import (
31 from rhodecode.lib.helpers import (
31 get_lexer_for_filenode, html_escape, get_custom_lexer)
32 get_lexer_for_filenode, html_escape, get_custom_lexer)
32 from rhodecode.lib.utils2 import AttributeDict, StrictAttributeDict, safe_unicode
33 from rhodecode.lib.utils2 import AttributeDict, StrictAttributeDict, safe_unicode
33 from rhodecode.lib.vcs.nodes import FileNode
34 from rhodecode.lib.vcs.nodes import FileNode
34 from rhodecode.lib.vcs.exceptions import VCSError, NodeDoesNotExistError
35 from rhodecode.lib.vcs.exceptions import VCSError, NodeDoesNotExistError
35 from rhodecode.lib.diff_match_patch import diff_match_patch
36 from rhodecode.lib.diff_match_patch import diff_match_patch
36 from rhodecode.lib.diffs import LimitedDiffContainer, DEL_FILENODE, BIN_FILENODE
37 from rhodecode.lib.diffs import LimitedDiffContainer, DEL_FILENODE, BIN_FILENODE
37
38
38
39
39 plain_text_lexer = get_lexer_by_name(
40 plain_text_lexer = get_lexer_by_name(
40 'text', stripall=False, stripnl=False, ensurenl=False)
41 'text', stripall=False, stripnl=False, ensurenl=False)
41
42
42
43
43 log = logging.getLogger(__name__)
44 log = logging.getLogger(__name__)
44
45
45
46
46 def filenode_as_lines_tokens(filenode, lexer=None):
47 def filenode_as_lines_tokens(filenode, lexer=None):
47 org_lexer = lexer
48 org_lexer = lexer
48 lexer = lexer or get_lexer_for_filenode(filenode)
49 lexer = lexer or get_lexer_for_filenode(filenode)
49 log.debug('Generating file node pygment tokens for %s, %s, org_lexer:%s',
50 log.debug('Generating file node pygment tokens for %s, %s, org_lexer:%s',
50 lexer, filenode, org_lexer)
51 lexer, filenode, org_lexer)
51 tokens = tokenize_string(filenode.content, lexer)
52 tokens = tokenize_string(filenode.content, lexer)
52 lines = split_token_stream(tokens)
53 lines = split_token_stream(tokens)
53 rv = list(lines)
54 rv = list(lines)
54 return rv
55 return rv
55
56
56
57
57 def tokenize_string(content, lexer):
58 def tokenize_string(content, lexer):
58 """
59 """
59 Use pygments to tokenize some content based on a lexer
60 Use pygments to tokenize some content based on a lexer
60 ensuring all original new lines and whitespace is preserved
61 ensuring all original new lines and whitespace is preserved
61 """
62 """
62
63
63 lexer.stripall = False
64 lexer.stripall = False
64 lexer.stripnl = False
65 lexer.stripnl = False
65 lexer.ensurenl = False
66 lexer.ensurenl = False
66
67
67 if isinstance(lexer, TextLexer):
68 if isinstance(lexer, TextLexer):
68 lexed = [(Token.Text, content)]
69 lexed = [(Token.Text, content)]
69 else:
70 else:
70 lexed = lex(content, lexer)
71 lexed = lex(content, lexer)
71
72
72 for token_type, token_text in lexed:
73 for token_type, token_text in lexed:
73 yield pygment_token_class(token_type), token_text
74 yield pygment_token_class(token_type), token_text
74
75
75
76
76 def split_token_stream(tokens):
77 def split_token_stream(tokens):
77 """
78 """
78 Take a list of (TokenType, text) tuples and split them by a string
79 Take a list of (TokenType, text) tuples and split them by a string
79
80
80 split_token_stream([(TEXT, 'some\ntext'), (TEXT, 'more\n')])
81 split_token_stream([(TEXT, 'some\ntext'), (TEXT, 'more\n')])
81 [(TEXT, 'some'), (TEXT, 'text'),
82 [(TEXT, 'some'), (TEXT, 'text'),
82 (TEXT, 'more'), (TEXT, 'text')]
83 (TEXT, 'more'), (TEXT, 'text')]
83 """
84 """
84
85
85 buffer = []
86 buffer = []
86 for token_class, token_text in tokens:
87 for token_class, token_text in tokens:
87 parts = token_text.split('\n')
88 parts = token_text.split('\n')
88 for part in parts[:-1]:
89 for part in parts[:-1]:
89 buffer.append((token_class, part))
90 buffer.append((token_class, part))
90 yield buffer
91 yield buffer
91 buffer = []
92 buffer = []
92
93
93 buffer.append((token_class, parts[-1]))
94 buffer.append((token_class, parts[-1]))
94
95
95 if buffer:
96 if buffer:
96 yield buffer
97 yield buffer
97
98
98
99
99 def filenode_as_annotated_lines_tokens(filenode):
100 def filenode_as_annotated_lines_tokens(filenode):
100 """
101 """
101 Take a file node and return a list of annotations => lines, if no annotation
102 Take a file node and return a list of annotations => lines, if no annotation
102 is found, it will be None.
103 is found, it will be None.
103
104
104 eg:
105 eg:
105
106
106 [
107 [
107 (annotation1, [
108 (annotation1, [
108 (1, line1_tokens_list),
109 (1, line1_tokens_list),
109 (2, line2_tokens_list),
110 (2, line2_tokens_list),
110 ]),
111 ]),
111 (annotation2, [
112 (annotation2, [
112 (3, line1_tokens_list),
113 (3, line1_tokens_list),
113 ]),
114 ]),
114 (None, [
115 (None, [
115 (4, line1_tokens_list),
116 (4, line1_tokens_list),
116 ]),
117 ]),
117 (annotation1, [
118 (annotation1, [
118 (5, line1_tokens_list),
119 (5, line1_tokens_list),
119 (6, line2_tokens_list),
120 (6, line2_tokens_list),
120 ])
121 ])
121 ]
122 ]
122 """
123 """
123
124
124 commit_cache = {} # cache commit_getter lookups
125 commit_cache = {} # cache commit_getter lookups
125
126
126 def _get_annotation(commit_id, commit_getter):
127 def _get_annotation(commit_id, commit_getter):
127 if commit_id not in commit_cache:
128 if commit_id not in commit_cache:
128 commit_cache[commit_id] = commit_getter()
129 commit_cache[commit_id] = commit_getter()
129 return commit_cache[commit_id]
130 return commit_cache[commit_id]
130
131
131 annotation_lookup = {
132 annotation_lookup = {
132 line_no: _get_annotation(commit_id, commit_getter)
133 line_no: _get_annotation(commit_id, commit_getter)
133 for line_no, commit_id, commit_getter, line_content
134 for line_no, commit_id, commit_getter, line_content
134 in filenode.annotate
135 in filenode.annotate
135 }
136 }
136
137
137 annotations_lines = ((annotation_lookup.get(line_no), line_no, tokens)
138 annotations_lines = ((annotation_lookup.get(line_no), line_no, tokens)
138 for line_no, tokens
139 for line_no, tokens
139 in enumerate(filenode_as_lines_tokens(filenode), 1))
140 in enumerate(filenode_as_lines_tokens(filenode), 1))
140
141
141 grouped_annotations_lines = groupby(annotations_lines, lambda x: x[0])
142 grouped_annotations_lines = groupby(annotations_lines, lambda x: x[0])
142
143
143 for annotation, group in grouped_annotations_lines:
144 for annotation, group in grouped_annotations_lines:
144 yield (
145 yield (
145 annotation, [(line_no, tokens)
146 annotation, [(line_no, tokens)
146 for (_, line_no, tokens) in group]
147 for (_, line_no, tokens) in group]
147 )
148 )
148
149
149
150
150 def render_tokenstream(tokenstream):
151 def render_tokenstream(tokenstream):
151 result = []
152 result = []
152 for token_class, token_ops_texts in rollup_tokenstream(tokenstream):
153 for token_class, token_ops_texts in rollup_tokenstream(tokenstream):
153
154
154 if token_class:
155 if token_class:
155 result.append(u'<span class="%s">' % token_class)
156 result.append(u'<span class="%s">' % token_class)
156 else:
157 else:
157 result.append(u'<span>')
158 result.append(u'<span>')
158
159
159 for op_tag, token_text in token_ops_texts:
160 for op_tag, token_text in token_ops_texts:
160
161
161 if op_tag:
162 if op_tag:
162 result.append(u'<%s>' % op_tag)
163 result.append(u'<%s>' % op_tag)
163
164
164 escaped_text = html_escape(token_text)
165 escaped_text = html_escape(token_text)
165
166
166 # TODO: dan: investigate showing hidden characters like space/nl/tab
167 # TODO: dan: investigate showing hidden characters like space/nl/tab
167 # escaped_text = escaped_text.replace(' ', '<sp> </sp>')
168 # escaped_text = escaped_text.replace(' ', '<sp> </sp>')
168 # escaped_text = escaped_text.replace('\n', '<nl>\n</nl>')
169 # escaped_text = escaped_text.replace('\n', '<nl>\n</nl>')
169 # escaped_text = escaped_text.replace('\t', '<tab>\t</tab>')
170 # escaped_text = escaped_text.replace('\t', '<tab>\t</tab>')
170
171
171 result.append(escaped_text)
172 result.append(escaped_text)
172
173
173 if op_tag:
174 if op_tag:
174 result.append(u'</%s>' % op_tag)
175 result.append(u'</%s>' % op_tag)
175
176
176 result.append(u'</span>')
177 result.append(u'</span>')
177
178
178 html = ''.join(result)
179 html = ''.join(result)
179 return html
180 return html
180
181
181
182
182 def rollup_tokenstream(tokenstream):
183 def rollup_tokenstream(tokenstream):
183 """
184 """
184 Group a token stream of the format:
185 Group a token stream of the format:
185
186
186 ('class', 'op', 'text')
187 ('class', 'op', 'text')
187 or
188 or
188 ('class', 'text')
189 ('class', 'text')
189
190
190 into
191 into
191
192
192 [('class1',
193 [('class1',
193 [('op1', 'text'),
194 [('op1', 'text'),
194 ('op2', 'text')]),
195 ('op2', 'text')]),
195 ('class2',
196 ('class2',
196 [('op3', 'text')])]
197 [('op3', 'text')])]
197
198
198 This is used to get the minimal tags necessary when
199 This is used to get the minimal tags necessary when
199 rendering to html eg for a token stream ie.
200 rendering to html eg for a token stream ie.
200
201
201 <span class="A"><ins>he</ins>llo</span>
202 <span class="A"><ins>he</ins>llo</span>
202 vs
203 vs
203 <span class="A"><ins>he</ins></span><span class="A">llo</span>
204 <span class="A"><ins>he</ins></span><span class="A">llo</span>
204
205
205 If a 2 tuple is passed in, the output op will be an empty string.
206 If a 2 tuple is passed in, the output op will be an empty string.
206
207
207 eg:
208 eg:
208
209
209 >>> rollup_tokenstream([('classA', '', 'h'),
210 >>> rollup_tokenstream([('classA', '', 'h'),
210 ('classA', 'del', 'ell'),
211 ('classA', 'del', 'ell'),
211 ('classA', '', 'o'),
212 ('classA', '', 'o'),
212 ('classB', '', ' '),
213 ('classB', '', ' '),
213 ('classA', '', 'the'),
214 ('classA', '', 'the'),
214 ('classA', '', 're'),
215 ('classA', '', 're'),
215 ])
216 ])
216
217
217 [('classA', [('', 'h'), ('del', 'ell'), ('', 'o')],
218 [('classA', [('', 'h'), ('del', 'ell'), ('', 'o')],
218 ('classB', [('', ' ')],
219 ('classB', [('', ' ')],
219 ('classA', [('', 'there')]]
220 ('classA', [('', 'there')]]
220
221
221 """
222 """
222 if tokenstream and len(tokenstream[0]) == 2:
223 if tokenstream and len(tokenstream[0]) == 2:
223 tokenstream = ((t[0], '', t[1]) for t in tokenstream)
224 tokenstream = ((t[0], '', t[1]) for t in tokenstream)
224
225
225 result = []
226 result = []
226 for token_class, op_list in groupby(tokenstream, lambda t: t[0]):
227 for token_class, op_list in groupby(tokenstream, lambda t: t[0]):
227 ops = []
228 ops = []
228 for token_op, token_text_list in groupby(op_list, lambda o: o[1]):
229 for token_op, token_text_list in groupby(op_list, lambda o: o[1]):
229 text_buffer = []
230 text_buffer = []
230 for t_class, t_op, t_text in token_text_list:
231 for t_class, t_op, t_text in token_text_list:
231 text_buffer.append(t_text)
232 text_buffer.append(t_text)
232 ops.append((token_op, ''.join(text_buffer)))
233 ops.append((token_op, ''.join(text_buffer)))
233 result.append((token_class, ops))
234 result.append((token_class, ops))
234 return result
235 return result
235
236
236
237
237 def tokens_diff(old_tokens, new_tokens, use_diff_match_patch=True):
238 def tokens_diff(old_tokens, new_tokens, use_diff_match_patch=True):
238 """
239 """
239 Converts a list of (token_class, token_text) tuples to a list of
240 Converts a list of (token_class, token_text) tuples to a list of
240 (token_class, token_op, token_text) tuples where token_op is one of
241 (token_class, token_op, token_text) tuples where token_op is one of
241 ('ins', 'del', '')
242 ('ins', 'del', '')
242
243
243 :param old_tokens: list of (token_class, token_text) tuples of old line
244 :param old_tokens: list of (token_class, token_text) tuples of old line
244 :param new_tokens: list of (token_class, token_text) tuples of new line
245 :param new_tokens: list of (token_class, token_text) tuples of new line
245 :param use_diff_match_patch: boolean, will use google's diff match patch
246 :param use_diff_match_patch: boolean, will use google's diff match patch
246 library which has options to 'smooth' out the character by character
247 library which has options to 'smooth' out the character by character
247 differences making nicer ins/del blocks
248 differences making nicer ins/del blocks
248 """
249 """
249
250
250 old_tokens_result = []
251 old_tokens_result = []
251 new_tokens_result = []
252 new_tokens_result = []
252
253
253 similarity = difflib.SequenceMatcher(None,
254 similarity = difflib.SequenceMatcher(None,
254 ''.join(token_text for token_class, token_text in old_tokens),
255 ''.join(token_text for token_class, token_text in old_tokens),
255 ''.join(token_text for token_class, token_text in new_tokens)
256 ''.join(token_text for token_class, token_text in new_tokens)
256 ).ratio()
257 ).ratio()
257
258
258 if similarity < 0.6: # return, the blocks are too different
259 if similarity < 0.6: # return, the blocks are too different
259 for token_class, token_text in old_tokens:
260 for token_class, token_text in old_tokens:
260 old_tokens_result.append((token_class, '', token_text))
261 old_tokens_result.append((token_class, '', token_text))
261 for token_class, token_text in new_tokens:
262 for token_class, token_text in new_tokens:
262 new_tokens_result.append((token_class, '', token_text))
263 new_tokens_result.append((token_class, '', token_text))
263 return old_tokens_result, new_tokens_result, similarity
264 return old_tokens_result, new_tokens_result, similarity
264
265
265 token_sequence_matcher = difflib.SequenceMatcher(None,
266 token_sequence_matcher = difflib.SequenceMatcher(None,
266 [x[1] for x in old_tokens],
267 [x[1] for x in old_tokens],
267 [x[1] for x in new_tokens])
268 [x[1] for x in new_tokens])
268
269
269 for tag, o1, o2, n1, n2 in token_sequence_matcher.get_opcodes():
270 for tag, o1, o2, n1, n2 in token_sequence_matcher.get_opcodes():
270 # check the differences by token block types first to give a more
271 # check the differences by token block types first to give a more
271 # nicer "block" level replacement vs character diffs
272 # nicer "block" level replacement vs character diffs
272
273
273 if tag == 'equal':
274 if tag == 'equal':
274 for token_class, token_text in old_tokens[o1:o2]:
275 for token_class, token_text in old_tokens[o1:o2]:
275 old_tokens_result.append((token_class, '', token_text))
276 old_tokens_result.append((token_class, '', token_text))
276 for token_class, token_text in new_tokens[n1:n2]:
277 for token_class, token_text in new_tokens[n1:n2]:
277 new_tokens_result.append((token_class, '', token_text))
278 new_tokens_result.append((token_class, '', token_text))
278 elif tag == 'delete':
279 elif tag == 'delete':
279 for token_class, token_text in old_tokens[o1:o2]:
280 for token_class, token_text in old_tokens[o1:o2]:
280 old_tokens_result.append((token_class, 'del', token_text))
281 old_tokens_result.append((token_class, 'del', token_text))
281 elif tag == 'insert':
282 elif tag == 'insert':
282 for token_class, token_text in new_tokens[n1:n2]:
283 for token_class, token_text in new_tokens[n1:n2]:
283 new_tokens_result.append((token_class, 'ins', token_text))
284 new_tokens_result.append((token_class, 'ins', token_text))
284 elif tag == 'replace':
285 elif tag == 'replace':
285 # if same type token blocks must be replaced, do a diff on the
286 # if same type token blocks must be replaced, do a diff on the
286 # characters in the token blocks to show individual changes
287 # characters in the token blocks to show individual changes
287
288
288 old_char_tokens = []
289 old_char_tokens = []
289 new_char_tokens = []
290 new_char_tokens = []
290 for token_class, token_text in old_tokens[o1:o2]:
291 for token_class, token_text in old_tokens[o1:o2]:
291 for char in token_text:
292 for char in token_text:
292 old_char_tokens.append((token_class, char))
293 old_char_tokens.append((token_class, char))
293
294
294 for token_class, token_text in new_tokens[n1:n2]:
295 for token_class, token_text in new_tokens[n1:n2]:
295 for char in token_text:
296 for char in token_text:
296 new_char_tokens.append((token_class, char))
297 new_char_tokens.append((token_class, char))
297
298
298 old_string = ''.join([token_text for
299 old_string = ''.join([token_text for
299 token_class, token_text in old_char_tokens])
300 token_class, token_text in old_char_tokens])
300 new_string = ''.join([token_text for
301 new_string = ''.join([token_text for
301 token_class, token_text in new_char_tokens])
302 token_class, token_text in new_char_tokens])
302
303
303 char_sequence = difflib.SequenceMatcher(
304 char_sequence = difflib.SequenceMatcher(
304 None, old_string, new_string)
305 None, old_string, new_string)
305 copcodes = char_sequence.get_opcodes()
306 copcodes = char_sequence.get_opcodes()
306 obuffer, nbuffer = [], []
307 obuffer, nbuffer = [], []
307
308
308 if use_diff_match_patch:
309 if use_diff_match_patch:
309 dmp = diff_match_patch()
310 dmp = diff_match_patch()
310 dmp.Diff_EditCost = 11 # TODO: dan: extract this to a setting
311 dmp.Diff_EditCost = 11 # TODO: dan: extract this to a setting
311 reps = dmp.diff_main(old_string, new_string)
312 reps = dmp.diff_main(old_string, new_string)
312 dmp.diff_cleanupEfficiency(reps)
313 dmp.diff_cleanupEfficiency(reps)
313
314
314 a, b = 0, 0
315 a, b = 0, 0
315 for op, rep in reps:
316 for op, rep in reps:
316 l = len(rep)
317 l = len(rep)
317 if op == 0:
318 if op == 0:
318 for i, c in enumerate(rep):
319 for i, c in enumerate(rep):
319 obuffer.append((old_char_tokens[a+i][0], '', c))
320 obuffer.append((old_char_tokens[a+i][0], '', c))
320 nbuffer.append((new_char_tokens[b+i][0], '', c))
321 nbuffer.append((new_char_tokens[b+i][0], '', c))
321 a += l
322 a += l
322 b += l
323 b += l
323 elif op == -1:
324 elif op == -1:
324 for i, c in enumerate(rep):
325 for i, c in enumerate(rep):
325 obuffer.append((old_char_tokens[a+i][0], 'del', c))
326 obuffer.append((old_char_tokens[a+i][0], 'del', c))
326 a += l
327 a += l
327 elif op == 1:
328 elif op == 1:
328 for i, c in enumerate(rep):
329 for i, c in enumerate(rep):
329 nbuffer.append((new_char_tokens[b+i][0], 'ins', c))
330 nbuffer.append((new_char_tokens[b+i][0], 'ins', c))
330 b += l
331 b += l
331 else:
332 else:
332 for ctag, co1, co2, cn1, cn2 in copcodes:
333 for ctag, co1, co2, cn1, cn2 in copcodes:
333 if ctag == 'equal':
334 if ctag == 'equal':
334 for token_class, token_text in old_char_tokens[co1:co2]:
335 for token_class, token_text in old_char_tokens[co1:co2]:
335 obuffer.append((token_class, '', token_text))
336 obuffer.append((token_class, '', token_text))
336 for token_class, token_text in new_char_tokens[cn1:cn2]:
337 for token_class, token_text in new_char_tokens[cn1:cn2]:
337 nbuffer.append((token_class, '', token_text))
338 nbuffer.append((token_class, '', token_text))
338 elif ctag == 'delete':
339 elif ctag == 'delete':
339 for token_class, token_text in old_char_tokens[co1:co2]:
340 for token_class, token_text in old_char_tokens[co1:co2]:
340 obuffer.append((token_class, 'del', token_text))
341 obuffer.append((token_class, 'del', token_text))
341 elif ctag == 'insert':
342 elif ctag == 'insert':
342 for token_class, token_text in new_char_tokens[cn1:cn2]:
343 for token_class, token_text in new_char_tokens[cn1:cn2]:
343 nbuffer.append((token_class, 'ins', token_text))
344 nbuffer.append((token_class, 'ins', token_text))
344 elif ctag == 'replace':
345 elif ctag == 'replace':
345 for token_class, token_text in old_char_tokens[co1:co2]:
346 for token_class, token_text in old_char_tokens[co1:co2]:
346 obuffer.append((token_class, 'del', token_text))
347 obuffer.append((token_class, 'del', token_text))
347 for token_class, token_text in new_char_tokens[cn1:cn2]:
348 for token_class, token_text in new_char_tokens[cn1:cn2]:
348 nbuffer.append((token_class, 'ins', token_text))
349 nbuffer.append((token_class, 'ins', token_text))
349
350
350 old_tokens_result.extend(obuffer)
351 old_tokens_result.extend(obuffer)
351 new_tokens_result.extend(nbuffer)
352 new_tokens_result.extend(nbuffer)
352
353
353 return old_tokens_result, new_tokens_result, similarity
354 return old_tokens_result, new_tokens_result, similarity
354
355
355
356
356 def diffset_node_getter(commit):
357 def diffset_node_getter(commit):
357 def get_node(fname):
358 def get_node(fname):
358 try:
359 try:
359 return commit.get_node(fname)
360 return commit.get_node(fname)
360 except NodeDoesNotExistError:
361 except NodeDoesNotExistError:
361 return None
362 return None
362
363
363 return get_node
364 return get_node
364
365
365
366
366 class DiffSet(object):
367 class DiffSet(object):
367 """
368 """
368 An object for parsing the diff result from diffs.DiffProcessor and
369 An object for parsing the diff result from diffs.DiffProcessor and
369 adding highlighting, side by side/unified renderings and line diffs
370 adding highlighting, side by side/unified renderings and line diffs
370 """
371 """
371
372
372 HL_REAL = 'REAL' # highlights using original file, slow
373 HL_REAL = 'REAL' # highlights using original file, slow
373 HL_FAST = 'FAST' # highlights using just the line, fast but not correct
374 HL_FAST = 'FAST' # highlights using just the line, fast but not correct
374 # in the case of multiline code
375 # in the case of multiline code
375 HL_NONE = 'NONE' # no highlighting, fastest
376 HL_NONE = 'NONE' # no highlighting, fastest
376
377
377 def __init__(self, highlight_mode=HL_REAL, repo_name=None,
378 def __init__(self, highlight_mode=HL_REAL, repo_name=None,
378 source_repo_name=None,
379 source_repo_name=None,
379 source_node_getter=lambda filename: None,
380 source_node_getter=lambda filename: None,
380 target_repo_name=None,
381 target_repo_name=None,
381 target_node_getter=lambda filename: None,
382 target_node_getter=lambda filename: None,
382 source_nodes=None, target_nodes=None,
383 source_nodes=None, target_nodes=None,
383 # files over this size will use fast highlighting
384 # files over this size will use fast highlighting
384 max_file_size_limit=150 * 1024,
385 max_file_size_limit=150 * 1024,
385 ):
386 ):
386
387
387 self.highlight_mode = highlight_mode
388 self.highlight_mode = highlight_mode
388 self.highlighted_filenodes = {}
389 self.highlighted_filenodes = {}
389 self.source_node_getter = source_node_getter
390 self.source_node_getter = source_node_getter
390 self.target_node_getter = target_node_getter
391 self.target_node_getter = target_node_getter
391 self.source_nodes = source_nodes or {}
392 self.source_nodes = source_nodes or {}
392 self.target_nodes = target_nodes or {}
393 self.target_nodes = target_nodes or {}
393 self.repo_name = repo_name
394 self.repo_name = repo_name
394 self.target_repo_name = target_repo_name or repo_name
395 self.target_repo_name = target_repo_name or repo_name
395 self.source_repo_name = source_repo_name or repo_name
396 self.source_repo_name = source_repo_name or repo_name
396 self.max_file_size_limit = max_file_size_limit
397 self.max_file_size_limit = max_file_size_limit
397
398
398 def render_patchset(self, patchset, source_ref=None, target_ref=None):
399 def render_patchset(self, patchset, source_ref=None, target_ref=None):
399 diffset = AttributeDict(dict(
400 diffset = AttributeDict(dict(
400 lines_added=0,
401 lines_added=0,
401 lines_deleted=0,
402 lines_deleted=0,
402 changed_files=0,
403 changed_files=0,
403 files=[],
404 files=[],
404 file_stats={},
405 file_stats={},
405 limited_diff=isinstance(patchset, LimitedDiffContainer),
406 limited_diff=isinstance(patchset, LimitedDiffContainer),
406 repo_name=self.repo_name,
407 repo_name=self.repo_name,
407 target_repo_name=self.target_repo_name,
408 target_repo_name=self.target_repo_name,
408 source_repo_name=self.source_repo_name,
409 source_repo_name=self.source_repo_name,
409 source_ref=source_ref,
410 source_ref=source_ref,
410 target_ref=target_ref,
411 target_ref=target_ref,
411 ))
412 ))
412 for patch in patchset:
413 for patch in patchset:
413 diffset.file_stats[patch['filename']] = patch['stats']
414 diffset.file_stats[patch['filename']] = patch['stats']
414 filediff = self.render_patch(patch)
415 filediff = self.render_patch(patch)
415 filediff.diffset = StrictAttributeDict(dict(
416 filediff.diffset = StrictAttributeDict(dict(
416 source_ref=diffset.source_ref,
417 source_ref=diffset.source_ref,
417 target_ref=diffset.target_ref,
418 target_ref=diffset.target_ref,
418 repo_name=diffset.repo_name,
419 repo_name=diffset.repo_name,
419 source_repo_name=diffset.source_repo_name,
420 source_repo_name=diffset.source_repo_name,
420 target_repo_name=diffset.target_repo_name,
421 target_repo_name=diffset.target_repo_name,
421 ))
422 ))
422 diffset.files.append(filediff)
423 diffset.files.append(filediff)
423 diffset.changed_files += 1
424 diffset.changed_files += 1
424 if not patch['stats']['binary']:
425 if not patch['stats']['binary']:
425 diffset.lines_added += patch['stats']['added']
426 diffset.lines_added += patch['stats']['added']
426 diffset.lines_deleted += patch['stats']['deleted']
427 diffset.lines_deleted += patch['stats']['deleted']
427
428
428 return diffset
429 return diffset
429
430
430 _lexer_cache = {}
431 _lexer_cache = {}
431
432
432 def _get_lexer_for_filename(self, filename, filenode=None):
433 def _get_lexer_for_filename(self, filename, filenode=None):
433 # cached because we might need to call it twice for source/target
434 # cached because we might need to call it twice for source/target
434 if filename not in self._lexer_cache:
435 if filename not in self._lexer_cache:
435 if filenode:
436 if filenode:
436 lexer = filenode.lexer
437 lexer = filenode.lexer
437 extension = filenode.extension
438 extension = filenode.extension
438 else:
439 else:
439 lexer = FileNode.get_lexer(filename=filename)
440 lexer = FileNode.get_lexer(filename=filename)
440 extension = filename.split('.')[-1]
441 extension = filename.split('.')[-1]
441
442
442 lexer = get_custom_lexer(extension) or lexer
443 lexer = get_custom_lexer(extension) or lexer
443 self._lexer_cache[filename] = lexer
444 self._lexer_cache[filename] = lexer
444 return self._lexer_cache[filename]
445 return self._lexer_cache[filename]
445
446
446 def render_patch(self, patch):
447 def render_patch(self, patch):
447 log.debug('rendering diff for %r', patch['filename'])
448 log.debug('rendering diff for %r', patch['filename'])
448
449
449 source_filename = patch['original_filename']
450 source_filename = patch['original_filename']
450 target_filename = patch['filename']
451 target_filename = patch['filename']
451
452
452 source_lexer = plain_text_lexer
453 source_lexer = plain_text_lexer
453 target_lexer = plain_text_lexer
454 target_lexer = plain_text_lexer
454
455
455 if not patch['stats']['binary']:
456 if not patch['stats']['binary']:
456 node_hl_mode = self.HL_NONE if patch['chunks'] == [] else None
457 node_hl_mode = self.HL_NONE if patch['chunks'] == [] else None
457 hl_mode = node_hl_mode or self.highlight_mode
458 hl_mode = node_hl_mode or self.highlight_mode
458
459
459 if hl_mode == self.HL_REAL:
460 if hl_mode == self.HL_REAL:
460 if (source_filename and patch['operation'] in ('D', 'M')
461 if (source_filename and patch['operation'] in ('D', 'M')
461 and source_filename not in self.source_nodes):
462 and source_filename not in self.source_nodes):
462 self.source_nodes[source_filename] = (
463 self.source_nodes[source_filename] = (
463 self.source_node_getter(source_filename))
464 self.source_node_getter(source_filename))
464
465
465 if (target_filename and patch['operation'] in ('A', 'M')
466 if (target_filename and patch['operation'] in ('A', 'M')
466 and target_filename not in self.target_nodes):
467 and target_filename not in self.target_nodes):
467 self.target_nodes[target_filename] = (
468 self.target_nodes[target_filename] = (
468 self.target_node_getter(target_filename))
469 self.target_node_getter(target_filename))
469
470
470 elif hl_mode == self.HL_FAST:
471 elif hl_mode == self.HL_FAST:
471 source_lexer = self._get_lexer_for_filename(source_filename)
472 source_lexer = self._get_lexer_for_filename(source_filename)
472 target_lexer = self._get_lexer_for_filename(target_filename)
473 target_lexer = self._get_lexer_for_filename(target_filename)
473
474
474 source_file = self.source_nodes.get(source_filename, source_filename)
475 source_file = self.source_nodes.get(source_filename, source_filename)
475 target_file = self.target_nodes.get(target_filename, target_filename)
476 target_file = self.target_nodes.get(target_filename, target_filename)
476 raw_id_uid = ''
477 raw_id_uid = ''
477 if self.source_nodes.get(source_filename):
478 if self.source_nodes.get(source_filename):
478 raw_id_uid = self.source_nodes[source_filename].commit.raw_id
479 raw_id_uid = self.source_nodes[source_filename].commit.raw_id
479
480
480 if not raw_id_uid and self.target_nodes.get(target_filename):
481 if not raw_id_uid and self.target_nodes.get(target_filename):
481 # in case this is a new file we only have it in target
482 # in case this is a new file we only have it in target
482 raw_id_uid = self.target_nodes[target_filename].commit.raw_id
483 raw_id_uid = self.target_nodes[target_filename].commit.raw_id
483
484
484 source_filenode, target_filenode = None, None
485 source_filenode, target_filenode = None, None
485
486
486 # TODO: dan: FileNode.lexer works on the content of the file - which
487 # TODO: dan: FileNode.lexer works on the content of the file - which
487 # can be slow - issue #4289 explains a lexer clean up - which once
488 # can be slow - issue #4289 explains a lexer clean up - which once
488 # done can allow caching a lexer for a filenode to avoid the file lookup
489 # done can allow caching a lexer for a filenode to avoid the file lookup
489 if isinstance(source_file, FileNode):
490 if isinstance(source_file, FileNode):
490 source_filenode = source_file
491 source_filenode = source_file
491 #source_lexer = source_file.lexer
492 #source_lexer = source_file.lexer
492 source_lexer = self._get_lexer_for_filename(source_filename)
493 source_lexer = self._get_lexer_for_filename(source_filename)
493 source_file.lexer = source_lexer
494 source_file.lexer = source_lexer
494
495
495 if isinstance(target_file, FileNode):
496 if isinstance(target_file, FileNode):
496 target_filenode = target_file
497 target_filenode = target_file
497 #target_lexer = target_file.lexer
498 #target_lexer = target_file.lexer
498 target_lexer = self._get_lexer_for_filename(target_filename)
499 target_lexer = self._get_lexer_for_filename(target_filename)
499 target_file.lexer = target_lexer
500 target_file.lexer = target_lexer
500
501
501 source_file_path, target_file_path = None, None
502 source_file_path, target_file_path = None, None
502
503
503 if source_filename != '/dev/null':
504 if source_filename != '/dev/null':
504 source_file_path = source_filename
505 source_file_path = source_filename
505 if target_filename != '/dev/null':
506 if target_filename != '/dev/null':
506 target_file_path = target_filename
507 target_file_path = target_filename
507
508
508 source_file_type = source_lexer.name
509 source_file_type = source_lexer.name
509 target_file_type = target_lexer.name
510 target_file_type = target_lexer.name
510
511
511 filediff = AttributeDict({
512 filediff = AttributeDict({
512 'source_file_path': source_file_path,
513 'source_file_path': source_file_path,
513 'target_file_path': target_file_path,
514 'target_file_path': target_file_path,
514 'source_filenode': source_filenode,
515 'source_filenode': source_filenode,
515 'target_filenode': target_filenode,
516 'target_filenode': target_filenode,
516 'source_file_type': target_file_type,
517 'source_file_type': target_file_type,
517 'target_file_type': source_file_type,
518 'target_file_type': source_file_type,
518 'patch': {'filename': patch['filename'], 'stats': patch['stats']},
519 'patch': {'filename': patch['filename'], 'stats': patch['stats']},
519 'operation': patch['operation'],
520 'operation': patch['operation'],
520 'source_mode': patch['stats']['old_mode'],
521 'source_mode': patch['stats']['old_mode'],
521 'target_mode': patch['stats']['new_mode'],
522 'target_mode': patch['stats']['new_mode'],
522 'limited_diff': patch['is_limited_diff'],
523 'limited_diff': patch['is_limited_diff'],
523 'hunks': [],
524 'hunks': [],
524 'hunk_ops': None,
525 'hunk_ops': None,
525 'diffset': self,
526 'diffset': self,
526 'raw_id': raw_id_uid,
527 'raw_id': raw_id_uid,
527 })
528 })
528
529
529 file_chunks = patch['chunks'][1:]
530 file_chunks = patch['chunks'][1:]
530 for hunk in file_chunks:
531 for hunk in file_chunks:
531 hunkbit = self.parse_hunk(hunk, source_file, target_file)
532 hunkbit = self.parse_hunk(hunk, source_file, target_file)
532 hunkbit.source_file_path = source_file_path
533 hunkbit.source_file_path = source_file_path
533 hunkbit.target_file_path = target_file_path
534 hunkbit.target_file_path = target_file_path
534 filediff.hunks.append(hunkbit)
535 filediff.hunks.append(hunkbit)
535
536
536 # Simulate hunk on OPS type line which doesn't really contain any diff
537 # Simulate hunk on OPS type line which doesn't really contain any diff
537 # this allows commenting on those
538 # this allows commenting on those
538 if not file_chunks:
539 if not file_chunks:
539 actions = []
540 actions = []
540 for op_id, op_text in filediff.patch['stats']['ops'].items():
541 for op_id, op_text in filediff.patch['stats']['ops'].items():
541 if op_id == DEL_FILENODE:
542 if op_id == DEL_FILENODE:
542 actions.append(u'file was removed')
543 actions.append(u'file was removed')
543 elif op_id == BIN_FILENODE:
544 elif op_id == BIN_FILENODE:
544 actions.append(u'binary diff hidden')
545 actions.append(u'binary diff hidden')
545 else:
546 else:
546 actions.append(safe_unicode(op_text))
547 actions.append(safe_unicode(op_text))
547 action_line = u'NO CONTENT: ' + \
548 action_line = u'NO CONTENT: ' + \
548 u', '.join(actions) or u'UNDEFINED_ACTION'
549 u', '.join(actions) or u'UNDEFINED_ACTION'
549
550
550 hunk_ops = {'source_length': 0, 'source_start': 0,
551 hunk_ops = {'source_length': 0, 'source_start': 0,
551 'lines': [
552 'lines': [
552 {'new_lineno': 0, 'old_lineno': 1,
553 {'new_lineno': 0, 'old_lineno': 1,
553 'action': 'unmod-no-hl', 'line': action_line}
554 'action': 'unmod-no-hl', 'line': action_line}
554 ],
555 ],
555 'section_header': u'', 'target_start': 1, 'target_length': 1}
556 'section_header': u'', 'target_start': 1, 'target_length': 1}
556
557
557 hunkbit = self.parse_hunk(hunk_ops, source_file, target_file)
558 hunkbit = self.parse_hunk(hunk_ops, source_file, target_file)
558 hunkbit.source_file_path = source_file_path
559 hunkbit.source_file_path = source_file_path
559 hunkbit.target_file_path = target_file_path
560 hunkbit.target_file_path = target_file_path
560 filediff.hunk_ops = hunkbit
561 filediff.hunk_ops = hunkbit
561 return filediff
562 return filediff
562
563
563 def parse_hunk(self, hunk, source_file, target_file):
564 def parse_hunk(self, hunk, source_file, target_file):
564 result = AttributeDict(dict(
565 result = AttributeDict(dict(
565 source_start=hunk['source_start'],
566 source_start=hunk['source_start'],
566 source_length=hunk['source_length'],
567 source_length=hunk['source_length'],
567 target_start=hunk['target_start'],
568 target_start=hunk['target_start'],
568 target_length=hunk['target_length'],
569 target_length=hunk['target_length'],
569 section_header=hunk['section_header'],
570 section_header=hunk['section_header'],
570 lines=[],
571 lines=[],
571 ))
572 ))
572 before, after = [], []
573 before, after = [], []
573
574
574 for line in hunk['lines']:
575 for line in hunk['lines']:
575 if line['action'] in ['unmod', 'unmod-no-hl']:
576 if line['action'] in ['unmod', 'unmod-no-hl']:
576 no_hl = line['action'] == 'unmod-no-hl'
577 no_hl = line['action'] == 'unmod-no-hl'
577 result.lines.extend(
578 result.lines.extend(
578 self.parse_lines(before, after, source_file, target_file, no_hl=no_hl))
579 self.parse_lines(before, after, source_file, target_file, no_hl=no_hl))
579 after.append(line)
580 after.append(line)
580 before.append(line)
581 before.append(line)
581 elif line['action'] == 'add':
582 elif line['action'] == 'add':
582 after.append(line)
583 after.append(line)
583 elif line['action'] == 'del':
584 elif line['action'] == 'del':
584 before.append(line)
585 before.append(line)
585 elif line['action'] == 'old-no-nl':
586 elif line['action'] == 'old-no-nl':
586 before.append(line)
587 before.append(line)
587 elif line['action'] == 'new-no-nl':
588 elif line['action'] == 'new-no-nl':
588 after.append(line)
589 after.append(line)
589
590
590 all_actions = [x['action'] for x in after] + [x['action'] for x in before]
591 all_actions = [x['action'] for x in after] + [x['action'] for x in before]
591 no_hl = {x for x in all_actions} == {'unmod-no-hl'}
592 no_hl = {x for x in all_actions} == {'unmod-no-hl'}
592 result.lines.extend(
593 result.lines.extend(
593 self.parse_lines(before, after, source_file, target_file, no_hl=no_hl))
594 self.parse_lines(before, after, source_file, target_file, no_hl=no_hl))
594 # NOTE(marcink): we must keep list() call here so we can cache the result...
595 # NOTE(marcink): we must keep list() call here so we can cache the result...
595 result.unified = list(self.as_unified(result.lines))
596 result.unified = list(self.as_unified(result.lines))
596 result.sideside = result.lines
597 result.sideside = result.lines
597
598
598 return result
599 return result
599
600
600 def parse_lines(self, before_lines, after_lines, source_file, target_file,
601 def parse_lines(self, before_lines, after_lines, source_file, target_file,
601 no_hl=False):
602 no_hl=False):
602 # TODO: dan: investigate doing the diff comparison and fast highlighting
603 # TODO: dan: investigate doing the diff comparison and fast highlighting
603 # on the entire before and after buffered block lines rather than by
604 # on the entire before and after buffered block lines rather than by
604 # line, this means we can get better 'fast' highlighting if the context
605 # line, this means we can get better 'fast' highlighting if the context
605 # allows it - eg.
606 # allows it - eg.
606 # line 4: """
607 # line 4: """
607 # line 5: this gets highlighted as a string
608 # line 5: this gets highlighted as a string
608 # line 6: """
609 # line 6: """
609
610
610 lines = []
611 lines = []
611
612
612 before_newline = AttributeDict()
613 before_newline = AttributeDict()
613 after_newline = AttributeDict()
614 after_newline = AttributeDict()
614 if before_lines and before_lines[-1]['action'] == 'old-no-nl':
615 if before_lines and before_lines[-1]['action'] == 'old-no-nl':
615 before_newline_line = before_lines.pop(-1)
616 before_newline_line = before_lines.pop(-1)
616 before_newline.content = '\n {}'.format(
617 before_newline.content = '\n {}'.format(
617 render_tokenstream(
618 render_tokenstream(
618 [(x[0], '', x[1])
619 [(x[0], '', x[1])
619 for x in [('nonl', before_newline_line['line'])]]))
620 for x in [('nonl', before_newline_line['line'])]]))
620
621
621 if after_lines and after_lines[-1]['action'] == 'new-no-nl':
622 if after_lines and after_lines[-1]['action'] == 'new-no-nl':
622 after_newline_line = after_lines.pop(-1)
623 after_newline_line = after_lines.pop(-1)
623 after_newline.content = '\n {}'.format(
624 after_newline.content = '\n {}'.format(
624 render_tokenstream(
625 render_tokenstream(
625 [(x[0], '', x[1])
626 [(x[0], '', x[1])
626 for x in [('nonl', after_newline_line['line'])]]))
627 for x in [('nonl', after_newline_line['line'])]]))
627
628
628 while before_lines or after_lines:
629 while before_lines or after_lines:
629 before, after = None, None
630 before, after = None, None
630 before_tokens, after_tokens = None, None
631 before_tokens, after_tokens = None, None
631
632
632 if before_lines:
633 if before_lines:
633 before = before_lines.pop(0)
634 before = before_lines.pop(0)
634 if after_lines:
635 if after_lines:
635 after = after_lines.pop(0)
636 after = after_lines.pop(0)
636
637
637 original = AttributeDict()
638 original = AttributeDict()
638 modified = AttributeDict()
639 modified = AttributeDict()
639
640
640 if before:
641 if before:
641 if before['action'] == 'old-no-nl':
642 if before['action'] == 'old-no-nl':
642 before_tokens = [('nonl', before['line'])]
643 before_tokens = [('nonl', before['line'])]
643 else:
644 else:
644 before_tokens = self.get_line_tokens(
645 before_tokens = self.get_line_tokens(
645 line_text=before['line'], line_number=before['old_lineno'],
646 line_text=before['line'], line_number=before['old_lineno'],
646 input_file=source_file, no_hl=no_hl)
647 input_file=source_file, no_hl=no_hl)
647 original.lineno = before['old_lineno']
648 original.lineno = before['old_lineno']
648 original.content = before['line']
649 original.content = before['line']
649 original.action = self.action_to_op(before['action'])
650 original.action = self.action_to_op(before['action'])
650
651
651 original.get_comment_args = (
652 original.get_comment_args = (
652 source_file, 'o', before['old_lineno'])
653 source_file, 'o', before['old_lineno'])
653
654
654 if after:
655 if after:
655 if after['action'] == 'new-no-nl':
656 if after['action'] == 'new-no-nl':
656 after_tokens = [('nonl', after['line'])]
657 after_tokens = [('nonl', after['line'])]
657 else:
658 else:
658 after_tokens = self.get_line_tokens(
659 after_tokens = self.get_line_tokens(
659 line_text=after['line'], line_number=after['new_lineno'],
660 line_text=after['line'], line_number=after['new_lineno'],
660 input_file=target_file, no_hl=no_hl)
661 input_file=target_file, no_hl=no_hl)
661 modified.lineno = after['new_lineno']
662 modified.lineno = after['new_lineno']
662 modified.content = after['line']
663 modified.content = after['line']
663 modified.action = self.action_to_op(after['action'])
664 modified.action = self.action_to_op(after['action'])
664
665
665 modified.get_comment_args = (target_file, 'n', after['new_lineno'])
666 modified.get_comment_args = (target_file, 'n', after['new_lineno'])
666
667
667 # diff the lines
668 # diff the lines
668 if before_tokens and after_tokens:
669 if before_tokens and after_tokens:
669 o_tokens, m_tokens, similarity = tokens_diff(
670 o_tokens, m_tokens, similarity = tokens_diff(
670 before_tokens, after_tokens)
671 before_tokens, after_tokens)
671 original.content = render_tokenstream(o_tokens)
672 original.content = render_tokenstream(o_tokens)
672 modified.content = render_tokenstream(m_tokens)
673 modified.content = render_tokenstream(m_tokens)
673 elif before_tokens:
674 elif before_tokens:
674 original.content = render_tokenstream(
675 original.content = render_tokenstream(
675 [(x[0], '', x[1]) for x in before_tokens])
676 [(x[0], '', x[1]) for x in before_tokens])
676 elif after_tokens:
677 elif after_tokens:
677 modified.content = render_tokenstream(
678 modified.content = render_tokenstream(
678 [(x[0], '', x[1]) for x in after_tokens])
679 [(x[0], '', x[1]) for x in after_tokens])
679
680
680 if not before_lines and before_newline:
681 if not before_lines and before_newline:
681 original.content += before_newline.content
682 original.content += before_newline.content
682 before_newline = None
683 before_newline = None
683 if not after_lines and after_newline:
684 if not after_lines and after_newline:
684 modified.content += after_newline.content
685 modified.content += after_newline.content
685 after_newline = None
686 after_newline = None
686
687
687 lines.append(AttributeDict({
688 lines.append(AttributeDict({
688 'original': original,
689 'original': original,
689 'modified': modified,
690 'modified': modified,
690 }))
691 }))
691
692
692 return lines
693 return lines
693
694
694 def get_line_tokens(self, line_text, line_number, input_file=None, no_hl=False):
695 def get_line_tokens(self, line_text, line_number, input_file=None, no_hl=False):
695 filenode = None
696 filenode = None
696 filename = None
697 filename = None
697
698
698 if isinstance(input_file, basestring):
699 if isinstance(input_file, compat.string_types):
699 filename = input_file
700 filename = input_file
700 elif isinstance(input_file, FileNode):
701 elif isinstance(input_file, FileNode):
701 filenode = input_file
702 filenode = input_file
702 filename = input_file.unicode_path
703 filename = input_file.unicode_path
703
704
704 hl_mode = self.HL_NONE if no_hl else self.highlight_mode
705 hl_mode = self.HL_NONE if no_hl else self.highlight_mode
705 if hl_mode == self.HL_REAL and filenode:
706 if hl_mode == self.HL_REAL and filenode:
706 lexer = self._get_lexer_for_filename(filename)
707 lexer = self._get_lexer_for_filename(filename)
707 file_size_allowed = input_file.size < self.max_file_size_limit
708 file_size_allowed = input_file.size < self.max_file_size_limit
708 if line_number and file_size_allowed:
709 if line_number and file_size_allowed:
709 return self.get_tokenized_filenode_line(
710 return self.get_tokenized_filenode_line(
710 input_file, line_number, lexer)
711 input_file, line_number, lexer)
711
712
712 if hl_mode in (self.HL_REAL, self.HL_FAST) and filename:
713 if hl_mode in (self.HL_REAL, self.HL_FAST) and filename:
713 lexer = self._get_lexer_for_filename(filename)
714 lexer = self._get_lexer_for_filename(filename)
714 return list(tokenize_string(line_text, lexer))
715 return list(tokenize_string(line_text, lexer))
715
716
716 return list(tokenize_string(line_text, plain_text_lexer))
717 return list(tokenize_string(line_text, plain_text_lexer))
717
718
718 def get_tokenized_filenode_line(self, filenode, line_number, lexer=None):
719 def get_tokenized_filenode_line(self, filenode, line_number, lexer=None):
719
720
720 if filenode not in self.highlighted_filenodes:
721 if filenode not in self.highlighted_filenodes:
721 tokenized_lines = filenode_as_lines_tokens(filenode, lexer)
722 tokenized_lines = filenode_as_lines_tokens(filenode, lexer)
722 self.highlighted_filenodes[filenode] = tokenized_lines
723 self.highlighted_filenodes[filenode] = tokenized_lines
723 return self.highlighted_filenodes[filenode][line_number - 1]
724 return self.highlighted_filenodes[filenode][line_number - 1]
724
725
725 def action_to_op(self, action):
726 def action_to_op(self, action):
726 return {
727 return {
727 'add': '+',
728 'add': '+',
728 'del': '-',
729 'del': '-',
729 'unmod': ' ',
730 'unmod': ' ',
730 'unmod-no-hl': ' ',
731 'unmod-no-hl': ' ',
731 'old-no-nl': ' ',
732 'old-no-nl': ' ',
732 'new-no-nl': ' ',
733 'new-no-nl': ' ',
733 }.get(action, action)
734 }.get(action, action)
734
735
735 def as_unified(self, lines):
736 def as_unified(self, lines):
736 """
737 """
737 Return a generator that yields the lines of a diff in unified order
738 Return a generator that yields the lines of a diff in unified order
738 """
739 """
739 def generator():
740 def generator():
740 buf = []
741 buf = []
741 for line in lines:
742 for line in lines:
742
743
743 if buf and not line.original or line.original.action == ' ':
744 if buf and not line.original or line.original.action == ' ':
744 for b in buf:
745 for b in buf:
745 yield b
746 yield b
746 buf = []
747 buf = []
747
748
748 if line.original:
749 if line.original:
749 if line.original.action == ' ':
750 if line.original.action == ' ':
750 yield (line.original.lineno, line.modified.lineno,
751 yield (line.original.lineno, line.modified.lineno,
751 line.original.action, line.original.content,
752 line.original.action, line.original.content,
752 line.original.get_comment_args)
753 line.original.get_comment_args)
753 continue
754 continue
754
755
755 if line.original.action == '-':
756 if line.original.action == '-':
756 yield (line.original.lineno, None,
757 yield (line.original.lineno, None,
757 line.original.action, line.original.content,
758 line.original.action, line.original.content,
758 line.original.get_comment_args)
759 line.original.get_comment_args)
759
760
760 if line.modified.action == '+':
761 if line.modified.action == '+':
761 buf.append((
762 buf.append((
762 None, line.modified.lineno,
763 None, line.modified.lineno,
763 line.modified.action, line.modified.content,
764 line.modified.action, line.modified.content,
764 line.modified.get_comment_args))
765 line.modified.get_comment_args))
765 continue
766 continue
766
767
767 if line.modified:
768 if line.modified:
768 yield (None, line.modified.lineno,
769 yield (None, line.modified.lineno,
769 line.modified.action, line.modified.content,
770 line.modified.action, line.modified.content,
770 line.modified.get_comment_args)
771 line.modified.get_comment_args)
771
772
772 for b in buf:
773 for b in buf:
773 yield b
774 yield b
774
775
775 return generator()
776 return generator()
@@ -1,30 +1,32 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2019 RhodeCode GmbH
3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 from pyramid import compat
22
21
23
22 def strip_whitespace(value):
24 def strip_whitespace(value):
23 """
25 """
24 Removes leading/trailing whitespace, newlines, and tabs from the value.
26 Removes leading/trailing whitespace, newlines, and tabs from the value.
25 Implements the `colander.interface.Preparer` interface.
27 Implements the `colander.interface.Preparer` interface.
26 """
28 """
27 if isinstance(value, basestring):
29 if isinstance(value, compat.string_types):
28 return value.strip(' \t\n\r')
30 return value.strip(' \t\n\r')
29 else:
31 else:
30 return value
32 return value
@@ -1,665 +1,666 b''
1 """
1 """
2 Schema module providing common schema operations.
2 Schema module providing common schema operations.
3 """
3 """
4 import warnings
4 import warnings
5
5
6 from UserDict import DictMixin
6 from UserDict import DictMixin
7
7
8 import sqlalchemy
8 import sqlalchemy
9
9
10 from sqlalchemy.schema import ForeignKeyConstraint
10 from sqlalchemy.schema import ForeignKeyConstraint
11 from sqlalchemy.schema import UniqueConstraint
11 from sqlalchemy.schema import UniqueConstraint
12 from pyramid import compat
12
13
13 from rhodecode.lib.dbmigrate.migrate.exceptions import *
14 from rhodecode.lib.dbmigrate.migrate.exceptions import *
14 from rhodecode.lib.dbmigrate.migrate.changeset import SQLA_07, SQLA_08
15 from rhodecode.lib.dbmigrate.migrate.changeset import SQLA_07, SQLA_08
15 from rhodecode.lib.dbmigrate.migrate.changeset import util
16 from rhodecode.lib.dbmigrate.migrate.changeset import util
16 from rhodecode.lib.dbmigrate.migrate.changeset.databases.visitor import (
17 from rhodecode.lib.dbmigrate.migrate.changeset.databases.visitor import (
17 get_engine_visitor, run_single_visitor)
18 get_engine_visitor, run_single_visitor)
18
19
19
20
20 __all__ = [
21 __all__ = [
21 'create_column',
22 'create_column',
22 'drop_column',
23 'drop_column',
23 'alter_column',
24 'alter_column',
24 'rename_table',
25 'rename_table',
25 'rename_index',
26 'rename_index',
26 'ChangesetTable',
27 'ChangesetTable',
27 'ChangesetColumn',
28 'ChangesetColumn',
28 'ChangesetIndex',
29 'ChangesetIndex',
29 'ChangesetDefaultClause',
30 'ChangesetDefaultClause',
30 'ColumnDelta',
31 'ColumnDelta',
31 ]
32 ]
32
33
33 def create_column(column, table=None, *p, **kw):
34 def create_column(column, table=None, *p, **kw):
34 """Create a column, given the table.
35 """Create a column, given the table.
35
36
36 API to :meth:`ChangesetColumn.create`.
37 API to :meth:`ChangesetColumn.create`.
37 """
38 """
38 if table is not None:
39 if table is not None:
39 return table.create_column(column, *p, **kw)
40 return table.create_column(column, *p, **kw)
40 return column.create(*p, **kw)
41 return column.create(*p, **kw)
41
42
42
43
43 def drop_column(column, table=None, *p, **kw):
44 def drop_column(column, table=None, *p, **kw):
44 """Drop a column, given the table.
45 """Drop a column, given the table.
45
46
46 API to :meth:`ChangesetColumn.drop`.
47 API to :meth:`ChangesetColumn.drop`.
47 """
48 """
48 if table is not None:
49 if table is not None:
49 return table.drop_column(column, *p, **kw)
50 return table.drop_column(column, *p, **kw)
50 return column.drop(*p, **kw)
51 return column.drop(*p, **kw)
51
52
52
53
53 def rename_table(table, name, engine=None, **kw):
54 def rename_table(table, name, engine=None, **kw):
54 """Rename a table.
55 """Rename a table.
55
56
56 If Table instance is given, engine is not used.
57 If Table instance is given, engine is not used.
57
58
58 API to :meth:`ChangesetTable.rename`.
59 API to :meth:`ChangesetTable.rename`.
59
60
60 :param table: Table to be renamed.
61 :param table: Table to be renamed.
61 :param name: New name for Table.
62 :param name: New name for Table.
62 :param engine: Engine instance.
63 :param engine: Engine instance.
63 :type table: string or Table instance
64 :type table: string or Table instance
64 :type name: string
65 :type name: string
65 :type engine: obj
66 :type engine: obj
66 """
67 """
67 table = _to_table(table, engine)
68 table = _to_table(table, engine)
68 table.rename(name, **kw)
69 table.rename(name, **kw)
69
70
70
71
71 def rename_index(index, name, table=None, engine=None, **kw):
72 def rename_index(index, name, table=None, engine=None, **kw):
72 """Rename an index.
73 """Rename an index.
73
74
74 If Index instance is given,
75 If Index instance is given,
75 table and engine are not used.
76 table and engine are not used.
76
77
77 API to :meth:`ChangesetIndex.rename`.
78 API to :meth:`ChangesetIndex.rename`.
78
79
79 :param index: Index to be renamed.
80 :param index: Index to be renamed.
80 :param name: New name for index.
81 :param name: New name for index.
81 :param table: Table to which Index is reffered.
82 :param table: Table to which Index is reffered.
82 :param engine: Engine instance.
83 :param engine: Engine instance.
83 :type index: string or Index instance
84 :type index: string or Index instance
84 :type name: string
85 :type name: string
85 :type table: string or Table instance
86 :type table: string or Table instance
86 :type engine: obj
87 :type engine: obj
87 """
88 """
88 index = _to_index(index, table, engine)
89 index = _to_index(index, table, engine)
89 index.rename(name, **kw)
90 index.rename(name, **kw)
90
91
91
92
92 def alter_column(*p, **k):
93 def alter_column(*p, **k):
93 """Alter a column.
94 """Alter a column.
94
95
95 This is a helper function that creates a :class:`ColumnDelta` and
96 This is a helper function that creates a :class:`ColumnDelta` and
96 runs it.
97 runs it.
97
98
98 :argument column:
99 :argument column:
99 The name of the column to be altered or a
100 The name of the column to be altered or a
100 :class:`ChangesetColumn` column representing it.
101 :class:`ChangesetColumn` column representing it.
101
102
102 :param table:
103 :param table:
103 A :class:`~sqlalchemy.schema.Table` or table name to
104 A :class:`~sqlalchemy.schema.Table` or table name to
104 for the table where the column will be changed.
105 for the table where the column will be changed.
105
106
106 :param engine:
107 :param engine:
107 The :class:`~sqlalchemy.engine.base.Engine` to use for table
108 The :class:`~sqlalchemy.engine.base.Engine` to use for table
108 reflection and schema alterations.
109 reflection and schema alterations.
109
110
110 :returns: A :class:`ColumnDelta` instance representing the change.
111 :returns: A :class:`ColumnDelta` instance representing the change.
111
112
112
113
113 """
114 """
114
115
115 if 'table' not in k and isinstance(p[0], sqlalchemy.Column):
116 if 'table' not in k and isinstance(p[0], sqlalchemy.Column):
116 k['table'] = p[0].table
117 k['table'] = p[0].table
117 if 'engine' not in k:
118 if 'engine' not in k:
118 k['engine'] = k['table'].bind
119 k['engine'] = k['table'].bind
119
120
120 # deprecation
121 # deprecation
121 if len(p) >= 2 and isinstance(p[1], sqlalchemy.Column):
122 if len(p) >= 2 and isinstance(p[1], sqlalchemy.Column):
122 warnings.warn(
123 warnings.warn(
123 "Passing a Column object to alter_column is deprecated."
124 "Passing a Column object to alter_column is deprecated."
124 " Just pass in keyword parameters instead.",
125 " Just pass in keyword parameters instead.",
125 MigrateDeprecationWarning
126 MigrateDeprecationWarning
126 )
127 )
127 engine = k['engine']
128 engine = k['engine']
128
129
129 # enough tests seem to break when metadata is always altered
130 # enough tests seem to break when metadata is always altered
130 # that this crutch has to be left in until they can be sorted
131 # that this crutch has to be left in until they can be sorted
131 # out
132 # out
132 k['alter_metadata']=True
133 k['alter_metadata']=True
133
134
134 delta = ColumnDelta(*p, **k)
135 delta = ColumnDelta(*p, **k)
135
136
136 visitorcallable = get_engine_visitor(engine, 'schemachanger')
137 visitorcallable = get_engine_visitor(engine, 'schemachanger')
137 engine._run_visitor(visitorcallable, delta)
138 engine._run_visitor(visitorcallable, delta)
138
139
139 return delta
140 return delta
140
141
141
142
142 def _to_table(table, engine=None):
143 def _to_table(table, engine=None):
143 """Return if instance of Table, else construct new with metadata"""
144 """Return if instance of Table, else construct new with metadata"""
144 if isinstance(table, sqlalchemy.Table):
145 if isinstance(table, sqlalchemy.Table):
145 return table
146 return table
146
147
147 # Given: table name, maybe an engine
148 # Given: table name, maybe an engine
148 meta = sqlalchemy.MetaData()
149 meta = sqlalchemy.MetaData()
149 if engine is not None:
150 if engine is not None:
150 meta.bind = engine
151 meta.bind = engine
151 return sqlalchemy.Table(table, meta)
152 return sqlalchemy.Table(table, meta)
152
153
153
154
154 def _to_index(index, table=None, engine=None):
155 def _to_index(index, table=None, engine=None):
155 """Return if instance of Index, else construct new with metadata"""
156 """Return if instance of Index, else construct new with metadata"""
156 if isinstance(index, sqlalchemy.Index):
157 if isinstance(index, sqlalchemy.Index):
157 return index
158 return index
158
159
159 # Given: index name; table name required
160 # Given: index name; table name required
160 table = _to_table(table, engine)
161 table = _to_table(table, engine)
161 ret = sqlalchemy.Index(index)
162 ret = sqlalchemy.Index(index)
162 ret.table = table
163 ret.table = table
163 return ret
164 return ret
164
165
165
166
166 class ColumnDelta(DictMixin, sqlalchemy.schema.SchemaItem):
167 class ColumnDelta(DictMixin, sqlalchemy.schema.SchemaItem):
167 """Extracts the differences between two columns/column-parameters
168 """Extracts the differences between two columns/column-parameters
168
169
169 May receive parameters arranged in several different ways:
170 May receive parameters arranged in several different ways:
170
171
171 * **current_column, new_column, \*p, \*\*kw**
172 * **current_column, new_column, \*p, \*\*kw**
172 Additional parameters can be specified to override column
173 Additional parameters can be specified to override column
173 differences.
174 differences.
174
175
175 * **current_column, \*p, \*\*kw**
176 * **current_column, \*p, \*\*kw**
176 Additional parameters alter current_column. Table name is extracted
177 Additional parameters alter current_column. Table name is extracted
177 from current_column object.
178 from current_column object.
178 Name is changed to current_column.name from current_name,
179 Name is changed to current_column.name from current_name,
179 if current_name is specified.
180 if current_name is specified.
180
181
181 * **current_col_name, \*p, \*\*kw**
182 * **current_col_name, \*p, \*\*kw**
182 Table kw must specified.
183 Table kw must specified.
183
184
184 :param table: Table at which current Column should be bound to.\
185 :param table: Table at which current Column should be bound to.\
185 If table name is given, reflection will be used.
186 If table name is given, reflection will be used.
186 :type table: string or Table instance
187 :type table: string or Table instance
187
188
188 :param metadata: A :class:`MetaData` instance to store
189 :param metadata: A :class:`MetaData` instance to store
189 reflected table names
190 reflected table names
190
191
191 :param engine: When reflecting tables, either engine or metadata must \
192 :param engine: When reflecting tables, either engine or metadata must \
192 be specified to acquire engine object.
193 be specified to acquire engine object.
193 :type engine: :class:`Engine` instance
194 :type engine: :class:`Engine` instance
194 :returns: :class:`ColumnDelta` instance provides interface for altered attributes to \
195 :returns: :class:`ColumnDelta` instance provides interface for altered attributes to \
195 `result_column` through :func:`dict` alike object.
196 `result_column` through :func:`dict` alike object.
196
197
197 * :class:`ColumnDelta`.result_column is altered column with new attributes
198 * :class:`ColumnDelta`.result_column is altered column with new attributes
198
199
199 * :class:`ColumnDelta`.current_name is current name of column in db
200 * :class:`ColumnDelta`.current_name is current name of column in db
200
201
201
202
202 """
203 """
203
204
204 # Column attributes that can be altered
205 # Column attributes that can be altered
205 diff_keys = ('name', 'type', 'primary_key', 'nullable',
206 diff_keys = ('name', 'type', 'primary_key', 'nullable',
206 'server_onupdate', 'server_default', 'autoincrement')
207 'server_onupdate', 'server_default', 'autoincrement')
207 diffs = dict()
208 diffs = dict()
208 __visit_name__ = 'column'
209 __visit_name__ = 'column'
209
210
210 def __init__(self, *p, **kw):
211 def __init__(self, *p, **kw):
211 # 'alter_metadata' is not a public api. It exists purely
212 # 'alter_metadata' is not a public api. It exists purely
212 # as a crutch until the tests that fail when 'alter_metadata'
213 # as a crutch until the tests that fail when 'alter_metadata'
213 # behaviour always happens can be sorted out
214 # behaviour always happens can be sorted out
214 self.alter_metadata = kw.pop("alter_metadata", False)
215 self.alter_metadata = kw.pop("alter_metadata", False)
215
216
216 self.meta = kw.pop("metadata", None)
217 self.meta = kw.pop("metadata", None)
217 self.engine = kw.pop("engine", None)
218 self.engine = kw.pop("engine", None)
218
219
219 # Things are initialized differently depending on how many column
220 # Things are initialized differently depending on how many column
220 # parameters are given. Figure out how many and call the appropriate
221 # parameters are given. Figure out how many and call the appropriate
221 # method.
222 # method.
222 if len(p) >= 1 and isinstance(p[0], sqlalchemy.Column):
223 if len(p) >= 1 and isinstance(p[0], sqlalchemy.Column):
223 # At least one column specified
224 # At least one column specified
224 if len(p) >= 2 and isinstance(p[1], sqlalchemy.Column):
225 if len(p) >= 2 and isinstance(p[1], sqlalchemy.Column):
225 # Two columns specified
226 # Two columns specified
226 diffs = self.compare_2_columns(*p, **kw)
227 diffs = self.compare_2_columns(*p, **kw)
227 else:
228 else:
228 # Exactly one column specified
229 # Exactly one column specified
229 diffs = self.compare_1_column(*p, **kw)
230 diffs = self.compare_1_column(*p, **kw)
230 else:
231 else:
231 # Zero columns specified
232 # Zero columns specified
232 if not len(p) or not isinstance(p[0], basestring):
233 if not len(p) or not isinstance(p[0], compat.string_types):
233 raise ValueError("First argument must be column name")
234 raise ValueError("First argument must be column name")
234 diffs = self.compare_parameters(*p, **kw)
235 diffs = self.compare_parameters(*p, **kw)
235
236
236 self.apply_diffs(diffs)
237 self.apply_diffs(diffs)
237
238
238 def __repr__(self):
239 def __repr__(self):
239 return '<ColumnDelta altermetadata=%r, %s>' % (
240 return '<ColumnDelta altermetadata=%r, %s>' % (
240 self.alter_metadata,
241 self.alter_metadata,
241 super(ColumnDelta, self).__repr__()
242 super(ColumnDelta, self).__repr__()
242 )
243 )
243
244
244 def __getitem__(self, key):
245 def __getitem__(self, key):
245 if key not in self.keys():
246 if key not in self.keys():
246 raise KeyError("No such diff key, available: %s" % self.diffs )
247 raise KeyError("No such diff key, available: %s" % self.diffs )
247 return getattr(self.result_column, key)
248 return getattr(self.result_column, key)
248
249
249 def __setitem__(self, key, value):
250 def __setitem__(self, key, value):
250 if key not in self.keys():
251 if key not in self.keys():
251 raise KeyError("No such diff key, available: %s" % self.diffs )
252 raise KeyError("No such diff key, available: %s" % self.diffs )
252 setattr(self.result_column, key, value)
253 setattr(self.result_column, key, value)
253
254
254 def __delitem__(self, key):
255 def __delitem__(self, key):
255 raise NotImplementedError
256 raise NotImplementedError
256
257
257 def __len__(self):
258 def __len__(self):
258 raise NotImplementedError
259 raise NotImplementedError
259
260
260 def __iter__(self):
261 def __iter__(self):
261 raise NotImplementedError
262 raise NotImplementedError
262
263
263 def keys(self):
264 def keys(self):
264 return self.diffs.keys()
265 return self.diffs.keys()
265
266
266 def compare_parameters(self, current_name, *p, **k):
267 def compare_parameters(self, current_name, *p, **k):
267 """Compares Column objects with reflection"""
268 """Compares Column objects with reflection"""
268 self.table = k.pop('table')
269 self.table = k.pop('table')
269 self.result_column = self._table.c.get(current_name)
270 self.result_column = self._table.c.get(current_name)
270 if len(p):
271 if len(p):
271 k = self._extract_parameters(p, k, self.result_column)
272 k = self._extract_parameters(p, k, self.result_column)
272 return k
273 return k
273
274
274 def compare_1_column(self, col, *p, **k):
275 def compare_1_column(self, col, *p, **k):
275 """Compares one Column object"""
276 """Compares one Column object"""
276 self.table = k.pop('table', None)
277 self.table = k.pop('table', None)
277 if self.table is None:
278 if self.table is None:
278 self.table = col.table
279 self.table = col.table
279 self.result_column = col
280 self.result_column = col
280 if len(p):
281 if len(p):
281 k = self._extract_parameters(p, k, self.result_column)
282 k = self._extract_parameters(p, k, self.result_column)
282 return k
283 return k
283
284
284 def compare_2_columns(self, old_col, new_col, *p, **k):
285 def compare_2_columns(self, old_col, new_col, *p, **k):
285 """Compares two Column objects"""
286 """Compares two Column objects"""
286 self.process_column(new_col)
287 self.process_column(new_col)
287 self.table = k.pop('table', None)
288 self.table = k.pop('table', None)
288 # we cannot use bool() on table in SA06
289 # we cannot use bool() on table in SA06
289 if self.table is None:
290 if self.table is None:
290 self.table = old_col.table
291 self.table = old_col.table
291 if self.table is None:
292 if self.table is None:
292 new_col.table
293 new_col.table
293 self.result_column = old_col
294 self.result_column = old_col
294
295
295 # set differences
296 # set differences
296 # leave out some stuff for later comp
297 # leave out some stuff for later comp
297 for key in (set(self.diff_keys) - set(('type',))):
298 for key in (set(self.diff_keys) - set(('type',))):
298 val = getattr(new_col, key, None)
299 val = getattr(new_col, key, None)
299 if getattr(self.result_column, key, None) != val:
300 if getattr(self.result_column, key, None) != val:
300 k.setdefault(key, val)
301 k.setdefault(key, val)
301
302
302 # inspect types
303 # inspect types
303 if not self.are_column_types_eq(self.result_column.type, new_col.type):
304 if not self.are_column_types_eq(self.result_column.type, new_col.type):
304 k.setdefault('type', new_col.type)
305 k.setdefault('type', new_col.type)
305
306
306 if len(p):
307 if len(p):
307 k = self._extract_parameters(p, k, self.result_column)
308 k = self._extract_parameters(p, k, self.result_column)
308 return k
309 return k
309
310
310 def apply_diffs(self, diffs):
311 def apply_diffs(self, diffs):
311 """Populate dict and column object with new values"""
312 """Populate dict and column object with new values"""
312 self.diffs = diffs
313 self.diffs = diffs
313 for key in self.diff_keys:
314 for key in self.diff_keys:
314 if key in diffs:
315 if key in diffs:
315 setattr(self.result_column, key, diffs[key])
316 setattr(self.result_column, key, diffs[key])
316
317
317 self.process_column(self.result_column)
318 self.process_column(self.result_column)
318
319
319 # create an instance of class type if not yet
320 # create an instance of class type if not yet
320 if 'type' in diffs and callable(self.result_column.type):
321 if 'type' in diffs and callable(self.result_column.type):
321 self.result_column.type = self.result_column.type()
322 self.result_column.type = self.result_column.type()
322
323
323 # add column to the table
324 # add column to the table
324 if self.table is not None and self.alter_metadata:
325 if self.table is not None and self.alter_metadata:
325 self.result_column.add_to_table(self.table)
326 self.result_column.add_to_table(self.table)
326
327
327 def are_column_types_eq(self, old_type, new_type):
328 def are_column_types_eq(self, old_type, new_type):
328 """Compares two types to be equal"""
329 """Compares two types to be equal"""
329 ret = old_type.__class__ == new_type.__class__
330 ret = old_type.__class__ == new_type.__class__
330
331
331 # String length is a special case
332 # String length is a special case
332 if ret and isinstance(new_type, sqlalchemy.types.String):
333 if ret and isinstance(new_type, sqlalchemy.types.String):
333 ret = (getattr(old_type, 'length', None) == \
334 ret = (getattr(old_type, 'length', None) == \
334 getattr(new_type, 'length', None))
335 getattr(new_type, 'length', None))
335 return ret
336 return ret
336
337
337 def _extract_parameters(self, p, k, column):
338 def _extract_parameters(self, p, k, column):
338 """Extracts data from p and modifies diffs"""
339 """Extracts data from p and modifies diffs"""
339 p = list(p)
340 p = list(p)
340 while len(p):
341 while len(p):
341 if isinstance(p[0], basestring):
342 if isinstance(p[0], compat.string_types):
342 k.setdefault('name', p.pop(0))
343 k.setdefault('name', p.pop(0))
343 elif isinstance(p[0], sqlalchemy.types.TypeEngine):
344 elif isinstance(p[0], sqlalchemy.types.TypeEngine):
344 k.setdefault('type', p.pop(0))
345 k.setdefault('type', p.pop(0))
345 elif callable(p[0]):
346 elif callable(p[0]):
346 p[0] = p[0]()
347 p[0] = p[0]()
347 else:
348 else:
348 break
349 break
349
350
350 if len(p):
351 if len(p):
351 new_col = column.copy_fixed()
352 new_col = column.copy_fixed()
352 new_col._init_items(*p)
353 new_col._init_items(*p)
353 k = self.compare_2_columns(column, new_col, **k)
354 k = self.compare_2_columns(column, new_col, **k)
354 return k
355 return k
355
356
356 def process_column(self, column):
357 def process_column(self, column):
357 """Processes default values for column"""
358 """Processes default values for column"""
358 # XXX: this is a snippet from SA processing of positional parameters
359 # XXX: this is a snippet from SA processing of positional parameters
359 toinit = list()
360 toinit = list()
360
361
361 if column.server_default is not None:
362 if column.server_default is not None:
362 if isinstance(column.server_default, sqlalchemy.FetchedValue):
363 if isinstance(column.server_default, sqlalchemy.FetchedValue):
363 toinit.append(column.server_default)
364 toinit.append(column.server_default)
364 else:
365 else:
365 toinit.append(sqlalchemy.DefaultClause(column.server_default))
366 toinit.append(sqlalchemy.DefaultClause(column.server_default))
366 if column.server_onupdate is not None:
367 if column.server_onupdate is not None:
367 if isinstance(column.server_onupdate, FetchedValue):
368 if isinstance(column.server_onupdate, FetchedValue):
368 toinit.append(column.server_default)
369 toinit.append(column.server_default)
369 else:
370 else:
370 toinit.append(sqlalchemy.DefaultClause(column.server_onupdate,
371 toinit.append(sqlalchemy.DefaultClause(column.server_onupdate,
371 for_update=True))
372 for_update=True))
372 if toinit:
373 if toinit:
373 column._init_items(*toinit)
374 column._init_items(*toinit)
374
375
375 def _get_table(self):
376 def _get_table(self):
376 return getattr(self, '_table', None)
377 return getattr(self, '_table', None)
377
378
378 def _set_table(self, table):
379 def _set_table(self, table):
379 if isinstance(table, basestring):
380 if isinstance(table, compat.string_types):
380 if self.alter_metadata:
381 if self.alter_metadata:
381 if not self.meta:
382 if not self.meta:
382 raise ValueError("metadata must be specified for table"
383 raise ValueError("metadata must be specified for table"
383 " reflection when using alter_metadata")
384 " reflection when using alter_metadata")
384 meta = self.meta
385 meta = self.meta
385 if self.engine:
386 if self.engine:
386 meta.bind = self.engine
387 meta.bind = self.engine
387 else:
388 else:
388 if not self.engine and not self.meta:
389 if not self.engine and not self.meta:
389 raise ValueError("engine or metadata must be specified"
390 raise ValueError("engine or metadata must be specified"
390 " to reflect tables")
391 " to reflect tables")
391 if not self.engine:
392 if not self.engine:
392 self.engine = self.meta.bind
393 self.engine = self.meta.bind
393 meta = sqlalchemy.MetaData(bind=self.engine)
394 meta = sqlalchemy.MetaData(bind=self.engine)
394 self._table = sqlalchemy.Table(table, meta, autoload=True)
395 self._table = sqlalchemy.Table(table, meta, autoload=True)
395 elif isinstance(table, sqlalchemy.Table):
396 elif isinstance(table, sqlalchemy.Table):
396 self._table = table
397 self._table = table
397 if not self.alter_metadata:
398 if not self.alter_metadata:
398 self._table.meta = sqlalchemy.MetaData(bind=self._table.bind)
399 self._table.meta = sqlalchemy.MetaData(bind=self._table.bind)
399 def _get_result_column(self):
400 def _get_result_column(self):
400 return getattr(self, '_result_column', None)
401 return getattr(self, '_result_column', None)
401
402
402 def _set_result_column(self, column):
403 def _set_result_column(self, column):
403 """Set Column to Table based on alter_metadata evaluation."""
404 """Set Column to Table based on alter_metadata evaluation."""
404 self.process_column(column)
405 self.process_column(column)
405 if not hasattr(self, 'current_name'):
406 if not hasattr(self, 'current_name'):
406 self.current_name = column.name
407 self.current_name = column.name
407 if self.alter_metadata:
408 if self.alter_metadata:
408 self._result_column = column
409 self._result_column = column
409 else:
410 else:
410 self._result_column = column.copy_fixed()
411 self._result_column = column.copy_fixed()
411
412
412 table = property(_get_table, _set_table)
413 table = property(_get_table, _set_table)
413 result_column = property(_get_result_column, _set_result_column)
414 result_column = property(_get_result_column, _set_result_column)
414
415
415
416
416 class ChangesetTable(object):
417 class ChangesetTable(object):
417 """Changeset extensions to SQLAlchemy tables."""
418 """Changeset extensions to SQLAlchemy tables."""
418
419
419 def create_column(self, column, *p, **kw):
420 def create_column(self, column, *p, **kw):
420 """Creates a column.
421 """Creates a column.
421
422
422 The column parameter may be a column definition or the name of
423 The column parameter may be a column definition or the name of
423 a column in this table.
424 a column in this table.
424
425
425 API to :meth:`ChangesetColumn.create`
426 API to :meth:`ChangesetColumn.create`
426
427
427 :param column: Column to be created
428 :param column: Column to be created
428 :type column: Column instance or string
429 :type column: Column instance or string
429 """
430 """
430 if not isinstance(column, sqlalchemy.Column):
431 if not isinstance(column, sqlalchemy.Column):
431 # It's a column name
432 # It's a column name
432 column = getattr(self.c, str(column))
433 column = getattr(self.c, str(column))
433 column.create(table=self, *p, **kw)
434 column.create(table=self, *p, **kw)
434
435
435 def drop_column(self, column, *p, **kw):
436 def drop_column(self, column, *p, **kw):
436 """Drop a column, given its name or definition.
437 """Drop a column, given its name or definition.
437
438
438 API to :meth:`ChangesetColumn.drop`
439 API to :meth:`ChangesetColumn.drop`
439
440
440 :param column: Column to be droped
441 :param column: Column to be droped
441 :type column: Column instance or string
442 :type column: Column instance or string
442 """
443 """
443 if not isinstance(column, sqlalchemy.Column):
444 if not isinstance(column, sqlalchemy.Column):
444 # It's a column name
445 # It's a column name
445 try:
446 try:
446 column = getattr(self.c, str(column))
447 column = getattr(self.c, str(column))
447 except AttributeError:
448 except AttributeError:
448 # That column isn't part of the table. We don't need
449 # That column isn't part of the table. We don't need
449 # its entire definition to drop the column, just its
450 # its entire definition to drop the column, just its
450 # name, so create a dummy column with the same name.
451 # name, so create a dummy column with the same name.
451 column = sqlalchemy.Column(str(column), sqlalchemy.Integer())
452 column = sqlalchemy.Column(str(column), sqlalchemy.Integer())
452 column.drop(table=self, *p, **kw)
453 column.drop(table=self, *p, **kw)
453
454
454 def rename(self, name, connection=None, **kwargs):
455 def rename(self, name, connection=None, **kwargs):
455 """Rename this table.
456 """Rename this table.
456
457
457 :param name: New name of the table.
458 :param name: New name of the table.
458 :type name: string
459 :type name: string
459 :param connection: reuse connection istead of creating new one.
460 :param connection: reuse connection istead of creating new one.
460 :type connection: :class:`sqlalchemy.engine.base.Connection` instance
461 :type connection: :class:`sqlalchemy.engine.base.Connection` instance
461 """
462 """
462 engine = self.bind
463 engine = self.bind
463 self.new_name = name
464 self.new_name = name
464 visitorcallable = get_engine_visitor(engine, 'schemachanger')
465 visitorcallable = get_engine_visitor(engine, 'schemachanger')
465 run_single_visitor(engine, visitorcallable, self, connection, **kwargs)
466 run_single_visitor(engine, visitorcallable, self, connection, **kwargs)
466
467
467 # Fix metadata registration
468 # Fix metadata registration
468 self.name = name
469 self.name = name
469 self.deregister()
470 self.deregister()
470 self._set_parent(self.metadata)
471 self._set_parent(self.metadata)
471
472
472 def _meta_key(self):
473 def _meta_key(self):
473 """Get the meta key for this table."""
474 """Get the meta key for this table."""
474 return sqlalchemy.schema._get_table_key(self.name, self.schema)
475 return sqlalchemy.schema._get_table_key(self.name, self.schema)
475
476
476 def deregister(self):
477 def deregister(self):
477 """Remove this table from its metadata"""
478 """Remove this table from its metadata"""
478 if SQLA_07:
479 if SQLA_07:
479 self.metadata._remove_table(self.name, self.schema)
480 self.metadata._remove_table(self.name, self.schema)
480 else:
481 else:
481 key = self._meta_key()
482 key = self._meta_key()
482 meta = self.metadata
483 meta = self.metadata
483 if key in meta.tables:
484 if key in meta.tables:
484 del meta.tables[key]
485 del meta.tables[key]
485
486
486
487
487 class ChangesetColumn(object):
488 class ChangesetColumn(object):
488 """Changeset extensions to SQLAlchemy columns."""
489 """Changeset extensions to SQLAlchemy columns."""
489
490
490 def alter(self, *p, **k):
491 def alter(self, *p, **k):
491 """Makes a call to :func:`alter_column` for the column this
492 """Makes a call to :func:`alter_column` for the column this
492 method is called on.
493 method is called on.
493 """
494 """
494 if 'table' not in k:
495 if 'table' not in k:
495 k['table'] = self.table
496 k['table'] = self.table
496 if 'engine' not in k:
497 if 'engine' not in k:
497 k['engine'] = k['table'].bind
498 k['engine'] = k['table'].bind
498 return alter_column(self, *p, **k)
499 return alter_column(self, *p, **k)
499
500
500 def create(self, table=None, index_name=None, unique_name=None,
501 def create(self, table=None, index_name=None, unique_name=None,
501 primary_key_name=None, populate_default=True, connection=None, **kwargs):
502 primary_key_name=None, populate_default=True, connection=None, **kwargs):
502 """Create this column in the database.
503 """Create this column in the database.
503
504
504 Assumes the given table exists. ``ALTER TABLE ADD COLUMN``,
505 Assumes the given table exists. ``ALTER TABLE ADD COLUMN``,
505 for most databases.
506 for most databases.
506
507
507 :param table: Table instance to create on.
508 :param table: Table instance to create on.
508 :param index_name: Creates :class:`ChangesetIndex` on this column.
509 :param index_name: Creates :class:`ChangesetIndex` on this column.
509 :param unique_name: Creates :class:\
510 :param unique_name: Creates :class:\
510 `~migrate.changeset.constraint.UniqueConstraint` on this column.
511 `~migrate.changeset.constraint.UniqueConstraint` on this column.
511 :param primary_key_name: Creates :class:\
512 :param primary_key_name: Creates :class:\
512 `~migrate.changeset.constraint.PrimaryKeyConstraint` on this column.
513 `~migrate.changeset.constraint.PrimaryKeyConstraint` on this column.
513 :param populate_default: If True, created column will be \
514 :param populate_default: If True, created column will be \
514 populated with defaults
515 populated with defaults
515 :param connection: reuse connection istead of creating new one.
516 :param connection: reuse connection istead of creating new one.
516 :type table: Table instance
517 :type table: Table instance
517 :type index_name: string
518 :type index_name: string
518 :type unique_name: string
519 :type unique_name: string
519 :type primary_key_name: string
520 :type primary_key_name: string
520 :type populate_default: bool
521 :type populate_default: bool
521 :type connection: :class:`sqlalchemy.engine.base.Connection` instance
522 :type connection: :class:`sqlalchemy.engine.base.Connection` instance
522
523
523 :returns: self
524 :returns: self
524 """
525 """
525 self.populate_default = populate_default
526 self.populate_default = populate_default
526 self.index_name = index_name
527 self.index_name = index_name
527 self.unique_name = unique_name
528 self.unique_name = unique_name
528 self.primary_key_name = primary_key_name
529 self.primary_key_name = primary_key_name
529 for cons in ('index_name', 'unique_name', 'primary_key_name'):
530 for cons in ('index_name', 'unique_name', 'primary_key_name'):
530 self._check_sanity_constraints(cons)
531 self._check_sanity_constraints(cons)
531
532
532 self.add_to_table(table)
533 self.add_to_table(table)
533 engine = self.table.bind
534 engine = self.table.bind
534 visitorcallable = get_engine_visitor(engine, 'columngenerator')
535 visitorcallable = get_engine_visitor(engine, 'columngenerator')
535 engine._run_visitor(visitorcallable, self, connection, **kwargs)
536 engine._run_visitor(visitorcallable, self, connection, **kwargs)
536
537
537 # TODO: reuse existing connection
538 # TODO: reuse existing connection
538 if self.populate_default and self.default is not None:
539 if self.populate_default and self.default is not None:
539 stmt = table.update().values({self: engine._execute_default(self.default)})
540 stmt = table.update().values({self: engine._execute_default(self.default)})
540 engine.execute(stmt)
541 engine.execute(stmt)
541
542
542 return self
543 return self
543
544
544 def drop(self, table=None, connection=None, **kwargs):
545 def drop(self, table=None, connection=None, **kwargs):
545 """Drop this column from the database, leaving its table intact.
546 """Drop this column from the database, leaving its table intact.
546
547
547 ``ALTER TABLE DROP COLUMN``, for most databases.
548 ``ALTER TABLE DROP COLUMN``, for most databases.
548
549
549 :param connection: reuse connection istead of creating new one.
550 :param connection: reuse connection istead of creating new one.
550 :type connection: :class:`sqlalchemy.engine.base.Connection` instance
551 :type connection: :class:`sqlalchemy.engine.base.Connection` instance
551 """
552 """
552 if table is not None:
553 if table is not None:
553 self.table = table
554 self.table = table
554 engine = self.table.bind
555 engine = self.table.bind
555 visitorcallable = get_engine_visitor(engine, 'columndropper')
556 visitorcallable = get_engine_visitor(engine, 'columndropper')
556 engine._run_visitor(visitorcallable, self, connection, **kwargs)
557 engine._run_visitor(visitorcallable, self, connection, **kwargs)
557 self.remove_from_table(self.table, unset_table=False)
558 self.remove_from_table(self.table, unset_table=False)
558 self.table = None
559 self.table = None
559 return self
560 return self
560
561
561 def add_to_table(self, table):
562 def add_to_table(self, table):
562 if table is not None and self.table is None:
563 if table is not None and self.table is None:
563 if SQLA_07:
564 if SQLA_07:
564 table.append_column(self)
565 table.append_column(self)
565 else:
566 else:
566 self._set_parent(table)
567 self._set_parent(table)
567
568
568 def _col_name_in_constraint(self,cons,name):
569 def _col_name_in_constraint(self,cons,name):
569 return False
570 return False
570
571
571 def remove_from_table(self, table, unset_table=True):
572 def remove_from_table(self, table, unset_table=True):
572 # TODO: remove primary keys, constraints, etc
573 # TODO: remove primary keys, constraints, etc
573 if unset_table:
574 if unset_table:
574 self.table = None
575 self.table = None
575
576
576 to_drop = set()
577 to_drop = set()
577 for index in table.indexes:
578 for index in table.indexes:
578 columns = []
579 columns = []
579 for col in index.columns:
580 for col in index.columns:
580 if col.name!=self.name:
581 if col.name!=self.name:
581 columns.append(col)
582 columns.append(col)
582 if columns:
583 if columns:
583 index.columns = columns
584 index.columns = columns
584 if SQLA_08:
585 if SQLA_08:
585 index.expressions = columns
586 index.expressions = columns
586 else:
587 else:
587 to_drop.add(index)
588 to_drop.add(index)
588 table.indexes = table.indexes - to_drop
589 table.indexes = table.indexes - to_drop
589
590
590 to_drop = set()
591 to_drop = set()
591 for cons in table.constraints:
592 for cons in table.constraints:
592 # TODO: deal with other types of constraint
593 # TODO: deal with other types of constraint
593 if isinstance(cons,(ForeignKeyConstraint,
594 if isinstance(cons,(ForeignKeyConstraint,
594 UniqueConstraint)):
595 UniqueConstraint)):
595 for col_name in cons.columns:
596 for col_name in cons.columns:
596 if not isinstance(col_name,basestring):
597 if not isinstance(col_name, compat.string_types):
597 col_name = col_name.name
598 col_name = col_name.name
598 if self.name==col_name:
599 if self.name==col_name:
599 to_drop.add(cons)
600 to_drop.add(cons)
600 table.constraints = table.constraints - to_drop
601 table.constraints = table.constraints - to_drop
601
602
602 if table.c.contains_column(self):
603 if table.c.contains_column(self):
603 if SQLA_07:
604 if SQLA_07:
604 table._columns.remove(self)
605 table._columns.remove(self)
605 else:
606 else:
606 table.c.remove(self)
607 table.c.remove(self)
607
608
608 # TODO: this is fixed in 0.6
609 # TODO: this is fixed in 0.6
609 def copy_fixed(self, **kw):
610 def copy_fixed(self, **kw):
610 """Create a copy of this ``Column``, with all attributes."""
611 """Create a copy of this ``Column``, with all attributes."""
611 q = util.safe_quote(self)
612 q = util.safe_quote(self)
612 return sqlalchemy.Column(self.name, self.type, self.default,
613 return sqlalchemy.Column(self.name, self.type, self.default,
613 key=self.key,
614 key=self.key,
614 primary_key=self.primary_key,
615 primary_key=self.primary_key,
615 nullable=self.nullable,
616 nullable=self.nullable,
616 quote=q,
617 quote=q,
617 index=self.index,
618 index=self.index,
618 unique=self.unique,
619 unique=self.unique,
619 onupdate=self.onupdate,
620 onupdate=self.onupdate,
620 autoincrement=self.autoincrement,
621 autoincrement=self.autoincrement,
621 server_default=self.server_default,
622 server_default=self.server_default,
622 server_onupdate=self.server_onupdate,
623 server_onupdate=self.server_onupdate,
623 *[c.copy(**kw) for c in self.constraints])
624 *[c.copy(**kw) for c in self.constraints])
624
625
625 def _check_sanity_constraints(self, name):
626 def _check_sanity_constraints(self, name):
626 """Check if constraints names are correct"""
627 """Check if constraints names are correct"""
627 obj = getattr(self, name)
628 obj = getattr(self, name)
628 if (getattr(self, name[:-5]) and not obj):
629 if (getattr(self, name[:-5]) and not obj):
629 raise InvalidConstraintError("Column.create() accepts index_name,"
630 raise InvalidConstraintError("Column.create() accepts index_name,"
630 " primary_key_name and unique_name to generate constraints")
631 " primary_key_name and unique_name to generate constraints")
631 if not isinstance(obj, basestring) and obj is not None:
632 if not isinstance(obj, compat.string_types) and obj is not None:
632 raise InvalidConstraintError(
633 raise InvalidConstraintError(
633 "%s argument for column must be constraint name" % name)
634 "%s argument for column must be constraint name" % name)
634
635
635
636
636 class ChangesetIndex(object):
637 class ChangesetIndex(object):
637 """Changeset extensions to SQLAlchemy Indexes."""
638 """Changeset extensions to SQLAlchemy Indexes."""
638
639
639 __visit_name__ = 'index'
640 __visit_name__ = 'index'
640
641
641 def rename(self, name, connection=None, **kwargs):
642 def rename(self, name, connection=None, **kwargs):
642 """Change the name of an index.
643 """Change the name of an index.
643
644
644 :param name: New name of the Index.
645 :param name: New name of the Index.
645 :type name: string
646 :type name: string
646 :param connection: reuse connection istead of creating new one.
647 :param connection: reuse connection istead of creating new one.
647 :type connection: :class:`sqlalchemy.engine.base.Connection` instance
648 :type connection: :class:`sqlalchemy.engine.base.Connection` instance
648 """
649 """
649 engine = self.table.bind
650 engine = self.table.bind
650 self.new_name = name
651 self.new_name = name
651 visitorcallable = get_engine_visitor(engine, 'schemachanger')
652 visitorcallable = get_engine_visitor(engine, 'schemachanger')
652 engine._run_visitor(visitorcallable, self, connection, **kwargs)
653 engine._run_visitor(visitorcallable, self, connection, **kwargs)
653 self.name = name
654 self.name = name
654
655
655
656
656 class ChangesetDefaultClause(object):
657 class ChangesetDefaultClause(object):
657 """Implements comparison between :class:`DefaultClause` instances"""
658 """Implements comparison between :class:`DefaultClause` instances"""
658
659
659 def __eq__(self, other):
660 def __eq__(self, other):
660 if isinstance(other, self.__class__):
661 if isinstance(other, self.__class__):
661 if self.arg == other.arg:
662 if self.arg == other.arg:
662 return True
663 return True
663
664
664 def __ne__(self, other):
665 def __ne__(self, other):
665 return not self.__eq__(other)
666 return not self.__eq__(other)
@@ -1,221 +1,222 b''
1 """
1 """
2 Database schema version management.
2 Database schema version management.
3 """
3 """
4 import sys
4 import sys
5 import logging
5 import logging
6
6
7 from sqlalchemy import (Table, Column, MetaData, String, Text, Integer,
7 from sqlalchemy import (Table, Column, MetaData, String, Text, Integer,
8 create_engine)
8 create_engine)
9 from sqlalchemy.sql import and_
9 from sqlalchemy.sql import and_
10 from sqlalchemy import exc as sa_exceptions
10 from sqlalchemy import exc as sa_exceptions
11 from sqlalchemy.sql import bindparam
11 from sqlalchemy.sql import bindparam
12 from pyramid import compat
12
13
13 from rhodecode.lib.dbmigrate.migrate import exceptions
14 from rhodecode.lib.dbmigrate.migrate import exceptions
14 from rhodecode.lib.dbmigrate.migrate.changeset import SQLA_07
15 from rhodecode.lib.dbmigrate.migrate.changeset import SQLA_07
15 from rhodecode.lib.dbmigrate.migrate.versioning import genmodel, schemadiff
16 from rhodecode.lib.dbmigrate.migrate.versioning import genmodel, schemadiff
16 from rhodecode.lib.dbmigrate.migrate.versioning.repository import Repository
17 from rhodecode.lib.dbmigrate.migrate.versioning.repository import Repository
17 from rhodecode.lib.dbmigrate.migrate.versioning.util import load_model
18 from rhodecode.lib.dbmigrate.migrate.versioning.util import load_model
18 from rhodecode.lib.dbmigrate.migrate.versioning.version import VerNum
19 from rhodecode.lib.dbmigrate.migrate.versioning.version import VerNum
19
20
20
21
21 log = logging.getLogger(__name__)
22 log = logging.getLogger(__name__)
22
23
23
24
24 class ControlledSchema(object):
25 class ControlledSchema(object):
25 """A database under version control"""
26 """A database under version control"""
26
27
27 def __init__(self, engine, repository):
28 def __init__(self, engine, repository):
28 if isinstance(repository, basestring):
29 if isinstance(repository, compat.string_types):
29 repository = Repository(repository)
30 repository = Repository(repository)
30 self.engine = engine
31 self.engine = engine
31 self.repository = repository
32 self.repository = repository
32 self.meta = MetaData(engine)
33 self.meta = MetaData(engine)
33 self.load()
34 self.load()
34
35
35 def __eq__(self, other):
36 def __eq__(self, other):
36 """Compare two schemas by repositories and versions"""
37 """Compare two schemas by repositories and versions"""
37 return (self.repository is other.repository \
38 return (self.repository is other.repository \
38 and self.version == other.version)
39 and self.version == other.version)
39
40
40 def load(self):
41 def load(self):
41 """Load controlled schema version info from DB"""
42 """Load controlled schema version info from DB"""
42 tname = self.repository.version_table
43 tname = self.repository.version_table
43 try:
44 try:
44 if not hasattr(self, 'table') or self.table is None:
45 if not hasattr(self, 'table') or self.table is None:
45 self.table = Table(tname, self.meta, autoload=True)
46 self.table = Table(tname, self.meta, autoload=True)
46
47
47 result = self.engine.execute(self.table.select(
48 result = self.engine.execute(self.table.select(
48 self.table.c.repository_id == str(self.repository.id)))
49 self.table.c.repository_id == str(self.repository.id)))
49
50
50 data = list(result)[0]
51 data = list(result)[0]
51 except:
52 except:
52 cls, exc, tb = sys.exc_info()
53 cls, exc, tb = sys.exc_info()
53 raise exceptions.DatabaseNotControlledError, exc.__str__(), tb
54 raise exceptions.DatabaseNotControlledError, exc.__str__(), tb
54
55
55 self.version = data['version']
56 self.version = data['version']
56 return data
57 return data
57
58
58 def drop(self):
59 def drop(self):
59 """
60 """
60 Remove version control from a database.
61 Remove version control from a database.
61 """
62 """
62 if SQLA_07:
63 if SQLA_07:
63 try:
64 try:
64 self.table.drop()
65 self.table.drop()
65 except sa_exceptions.DatabaseError:
66 except sa_exceptions.DatabaseError:
66 raise exceptions.DatabaseNotControlledError(str(self.table))
67 raise exceptions.DatabaseNotControlledError(str(self.table))
67 else:
68 else:
68 try:
69 try:
69 self.table.drop()
70 self.table.drop()
70 except (sa_exceptions.SQLError):
71 except (sa_exceptions.SQLError):
71 raise exceptions.DatabaseNotControlledError(str(self.table))
72 raise exceptions.DatabaseNotControlledError(str(self.table))
72
73
73 def changeset(self, version=None):
74 def changeset(self, version=None):
74 """API to Changeset creation.
75 """API to Changeset creation.
75
76
76 Uses self.version for start version and engine.name
77 Uses self.version for start version and engine.name
77 to get database name.
78 to get database name.
78 """
79 """
79 database = self.engine.name
80 database = self.engine.name
80 start_ver = self.version
81 start_ver = self.version
81 changeset = self.repository.changeset(database, start_ver, version)
82 changeset = self.repository.changeset(database, start_ver, version)
82 return changeset
83 return changeset
83
84
84 def runchange(self, ver, change, step):
85 def runchange(self, ver, change, step):
85 startver = ver
86 startver = ver
86 endver = ver + step
87 endver = ver + step
87 # Current database version must be correct! Don't run if corrupt!
88 # Current database version must be correct! Don't run if corrupt!
88 if self.version != startver:
89 if self.version != startver:
89 raise exceptions.InvalidVersionError("%s is not %s" % \
90 raise exceptions.InvalidVersionError("%s is not %s" % \
90 (self.version, startver))
91 (self.version, startver))
91 # Run the change
92 # Run the change
92 change.run(self.engine, step)
93 change.run(self.engine, step)
93
94
94 # Update/refresh database version
95 # Update/refresh database version
95 self.update_repository_table(startver, endver)
96 self.update_repository_table(startver, endver)
96 self.load()
97 self.load()
97
98
98 def update_repository_table(self, startver, endver):
99 def update_repository_table(self, startver, endver):
99 """Update version_table with new information"""
100 """Update version_table with new information"""
100 update = self.table.update(and_(self.table.c.version == int(startver),
101 update = self.table.update(and_(self.table.c.version == int(startver),
101 self.table.c.repository_id == str(self.repository.id)))
102 self.table.c.repository_id == str(self.repository.id)))
102 self.engine.execute(update, version=int(endver))
103 self.engine.execute(update, version=int(endver))
103
104
104 def upgrade(self, version=None):
105 def upgrade(self, version=None):
105 """
106 """
106 Upgrade (or downgrade) to a specified version, or latest version.
107 Upgrade (or downgrade) to a specified version, or latest version.
107 """
108 """
108 changeset = self.changeset(version)
109 changeset = self.changeset(version)
109 for ver, change in changeset:
110 for ver, change in changeset:
110 self.runchange(ver, change, changeset.step)
111 self.runchange(ver, change, changeset.step)
111
112
112 def update_db_from_model(self, model):
113 def update_db_from_model(self, model):
113 """
114 """
114 Modify the database to match the structure of the current Python model.
115 Modify the database to match the structure of the current Python model.
115 """
116 """
116 model = load_model(model)
117 model = load_model(model)
117
118
118 diff = schemadiff.getDiffOfModelAgainstDatabase(
119 diff = schemadiff.getDiffOfModelAgainstDatabase(
119 model, self.engine, excludeTables=[self.repository.version_table]
120 model, self.engine, excludeTables=[self.repository.version_table]
120 )
121 )
121 genmodel.ModelGenerator(diff,self.engine).runB2A()
122 genmodel.ModelGenerator(diff,self.engine).runB2A()
122
123
123 self.update_repository_table(self.version, int(self.repository.latest))
124 self.update_repository_table(self.version, int(self.repository.latest))
124
125
125 self.load()
126 self.load()
126
127
127 @classmethod
128 @classmethod
128 def create(cls, engine, repository, version=None):
129 def create(cls, engine, repository, version=None):
129 """
130 """
130 Declare a database to be under a repository's version control.
131 Declare a database to be under a repository's version control.
131
132
132 :raises: :exc:`DatabaseAlreadyControlledError`
133 :raises: :exc:`DatabaseAlreadyControlledError`
133 :returns: :class:`ControlledSchema`
134 :returns: :class:`ControlledSchema`
134 """
135 """
135 # Confirm that the version # is valid: positive, integer,
136 # Confirm that the version # is valid: positive, integer,
136 # exists in repos
137 # exists in repos
137 if isinstance(repository, basestring):
138 if isinstance(repository, compat.string_types):
138 repository = Repository(repository)
139 repository = Repository(repository)
139 version = cls._validate_version(repository, version)
140 version = cls._validate_version(repository, version)
140 table = cls._create_table_version(engine, repository, version)
141 table = cls._create_table_version(engine, repository, version)
141 # TODO: history table
142 # TODO: history table
142 # Load repository information and return
143 # Load repository information and return
143 return cls(engine, repository)
144 return cls(engine, repository)
144
145
145 @classmethod
146 @classmethod
146 def _validate_version(cls, repository, version):
147 def _validate_version(cls, repository, version):
147 """
148 """
148 Ensures this is a valid version number for this repository.
149 Ensures this is a valid version number for this repository.
149
150
150 :raises: :exc:`InvalidVersionError` if invalid
151 :raises: :exc:`InvalidVersionError` if invalid
151 :return: valid version number
152 :return: valid version number
152 """
153 """
153 if version is None:
154 if version is None:
154 version = 0
155 version = 0
155 try:
156 try:
156 version = VerNum(version) # raises valueerror
157 version = VerNum(version) # raises valueerror
157 if version < 0 or version > repository.latest:
158 if version < 0 or version > repository.latest:
158 raise ValueError()
159 raise ValueError()
159 except ValueError:
160 except ValueError:
160 raise exceptions.InvalidVersionError(version)
161 raise exceptions.InvalidVersionError(version)
161 return version
162 return version
162
163
163 @classmethod
164 @classmethod
164 def _create_table_version(cls, engine, repository, version):
165 def _create_table_version(cls, engine, repository, version):
165 """
166 """
166 Creates the versioning table in a database.
167 Creates the versioning table in a database.
167
168
168 :raises: :exc:`DatabaseAlreadyControlledError`
169 :raises: :exc:`DatabaseAlreadyControlledError`
169 """
170 """
170 # Create tables
171 # Create tables
171 tname = repository.version_table
172 tname = repository.version_table
172 meta = MetaData(engine)
173 meta = MetaData(engine)
173
174
174 table = Table(
175 table = Table(
175 tname, meta,
176 tname, meta,
176 Column('repository_id', String(250), primary_key=True),
177 Column('repository_id', String(250), primary_key=True),
177 Column('repository_path', Text),
178 Column('repository_path', Text),
178 Column('version', Integer), )
179 Column('version', Integer), )
179
180
180 # there can be multiple repositories/schemas in the same db
181 # there can be multiple repositories/schemas in the same db
181 if not table.exists():
182 if not table.exists():
182 table.create()
183 table.create()
183
184
184 # test for existing repository_id
185 # test for existing repository_id
185 s = table.select(table.c.repository_id == bindparam("repository_id"))
186 s = table.select(table.c.repository_id == bindparam("repository_id"))
186 result = engine.execute(s, repository_id=repository.id)
187 result = engine.execute(s, repository_id=repository.id)
187 if result.fetchone():
188 if result.fetchone():
188 raise exceptions.DatabaseAlreadyControlledError
189 raise exceptions.DatabaseAlreadyControlledError
189
190
190 # Insert data
191 # Insert data
191 engine.execute(table.insert().values(
192 engine.execute(table.insert().values(
192 repository_id=repository.id,
193 repository_id=repository.id,
193 repository_path=repository.path,
194 repository_path=repository.path,
194 version=int(version)))
195 version=int(version)))
195 return table
196 return table
196
197
197 @classmethod
198 @classmethod
198 def compare_model_to_db(cls, engine, model, repository):
199 def compare_model_to_db(cls, engine, model, repository):
199 """
200 """
200 Compare the current model against the current database.
201 Compare the current model against the current database.
201 """
202 """
202 if isinstance(repository, basestring):
203 if isinstance(repository, compat.string_types):
203 repository = Repository(repository)
204 repository = Repository(repository)
204 model = load_model(model)
205 model = load_model(model)
205
206
206 diff = schemadiff.getDiffOfModelAgainstDatabase(
207 diff = schemadiff.getDiffOfModelAgainstDatabase(
207 model, engine, excludeTables=[repository.version_table])
208 model, engine, excludeTables=[repository.version_table])
208 return diff
209 return diff
209
210
210 @classmethod
211 @classmethod
211 def create_model(cls, engine, repository, declarative=False):
212 def create_model(cls, engine, repository, declarative=False):
212 """
213 """
213 Dump the current database as a Python model.
214 Dump the current database as a Python model.
214 """
215 """
215 if isinstance(repository, basestring):
216 if isinstance(repository, compat.string_types):
216 repository = Repository(repository)
217 repository = Repository(repository)
217
218
218 diff = schemadiff.getDiffOfModelAgainstDatabase(
219 diff = schemadiff.getDiffOfModelAgainstDatabase(
219 MetaData(), engine, excludeTables=[repository.version_table]
220 MetaData(), engine, excludeTables=[repository.version_table]
220 )
221 )
221 return genmodel.ModelGenerator(diff, engine, declarative).genBDefinition()
222 return genmodel.ModelGenerator(diff, engine, declarative).genBDefinition()
@@ -1,159 +1,160 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
2 # -*- coding: utf-8 -*-
3
3
4 import shutil
4 import shutil
5 import warnings
5 import warnings
6 import logging
6 import logging
7 import inspect
7 import inspect
8 from StringIO import StringIO
8 from StringIO import StringIO
9
9
10 from pyramid import compat
10 from rhodecode.lib.dbmigrate import migrate
11 from rhodecode.lib.dbmigrate import migrate
11 from rhodecode.lib.dbmigrate.migrate.versioning import genmodel, schemadiff
12 from rhodecode.lib.dbmigrate.migrate.versioning import genmodel, schemadiff
12 from rhodecode.lib.dbmigrate.migrate.versioning.config import operations
13 from rhodecode.lib.dbmigrate.migrate.versioning.config import operations
13 from rhodecode.lib.dbmigrate.migrate.versioning.template import Template
14 from rhodecode.lib.dbmigrate.migrate.versioning.template import Template
14 from rhodecode.lib.dbmigrate.migrate.versioning.script import base
15 from rhodecode.lib.dbmigrate.migrate.versioning.script import base
15 from rhodecode.lib.dbmigrate.migrate.versioning.util import import_path, load_model, with_engine
16 from rhodecode.lib.dbmigrate.migrate.versioning.util import import_path, load_model, with_engine
16 from rhodecode.lib.dbmigrate.migrate.exceptions import MigrateDeprecationWarning, InvalidScriptError, ScriptError
17 from rhodecode.lib.dbmigrate.migrate.exceptions import MigrateDeprecationWarning, InvalidScriptError, ScriptError
17
18
18 log = logging.getLogger(__name__)
19 log = logging.getLogger(__name__)
19 __all__ = ['PythonScript']
20 __all__ = ['PythonScript']
20
21
21
22
22 class PythonScript(base.BaseScript):
23 class PythonScript(base.BaseScript):
23 """Base for Python scripts"""
24 """Base for Python scripts"""
24
25
25 @classmethod
26 @classmethod
26 def create(cls, path, **opts):
27 def create(cls, path, **opts):
27 """Create an empty migration script at specified path
28 """Create an empty migration script at specified path
28
29
29 :returns: :class:`PythonScript instance <migrate.versioning.script.py.PythonScript>`"""
30 :returns: :class:`PythonScript instance <migrate.versioning.script.py.PythonScript>`"""
30 cls.require_notfound(path)
31 cls.require_notfound(path)
31
32
32 src = Template(opts.pop('templates_path', None)).get_script(theme=opts.pop('templates_theme', None))
33 src = Template(opts.pop('templates_path', None)).get_script(theme=opts.pop('templates_theme', None))
33 shutil.copy(src, path)
34 shutil.copy(src, path)
34
35
35 return cls(path)
36 return cls(path)
36
37
37 @classmethod
38 @classmethod
38 def make_update_script_for_model(cls, engine, oldmodel,
39 def make_update_script_for_model(cls, engine, oldmodel,
39 model, repository, **opts):
40 model, repository, **opts):
40 """Create a migration script based on difference between two SA models.
41 """Create a migration script based on difference between two SA models.
41
42
42 :param repository: path to migrate repository
43 :param repository: path to migrate repository
43 :param oldmodel: dotted.module.name:SAClass or SAClass object
44 :param oldmodel: dotted.module.name:SAClass or SAClass object
44 :param model: dotted.module.name:SAClass or SAClass object
45 :param model: dotted.module.name:SAClass or SAClass object
45 :param engine: SQLAlchemy engine
46 :param engine: SQLAlchemy engine
46 :type repository: string or :class:`Repository instance <migrate.versioning.repository.Repository>`
47 :type repository: string or :class:`Repository instance <migrate.versioning.repository.Repository>`
47 :type oldmodel: string or Class
48 :type oldmodel: string or Class
48 :type model: string or Class
49 :type model: string or Class
49 :type engine: Engine instance
50 :type engine: Engine instance
50 :returns: Upgrade / Downgrade script
51 :returns: Upgrade / Downgrade script
51 :rtype: string
52 :rtype: string
52 """
53 """
53
54
54 if isinstance(repository, basestring):
55 if isinstance(repository, compat.string_types):
55 # oh dear, an import cycle!
56 # oh dear, an import cycle!
56 from rhodecode.lib.dbmigrate.migrate.versioning.repository import Repository
57 from rhodecode.lib.dbmigrate.migrate.versioning.repository import Repository
57 repository = Repository(repository)
58 repository = Repository(repository)
58
59
59 oldmodel = load_model(oldmodel)
60 oldmodel = load_model(oldmodel)
60 model = load_model(model)
61 model = load_model(model)
61
62
62 # Compute differences.
63 # Compute differences.
63 diff = schemadiff.getDiffOfModelAgainstModel(
64 diff = schemadiff.getDiffOfModelAgainstModel(
64 model,
65 model,
65 oldmodel,
66 oldmodel,
66 excludeTables=[repository.version_table])
67 excludeTables=[repository.version_table])
67 # TODO: diff can be False (there is no difference?)
68 # TODO: diff can be False (there is no difference?)
68 decls, upgradeCommands, downgradeCommands = \
69 decls, upgradeCommands, downgradeCommands = \
69 genmodel.ModelGenerator(diff,engine).genB2AMigration()
70 genmodel.ModelGenerator(diff,engine).genB2AMigration()
70
71
71 # Store differences into file.
72 # Store differences into file.
72 src = Template(opts.pop('templates_path', None)).get_script(opts.pop('templates_theme', None))
73 src = Template(opts.pop('templates_path', None)).get_script(opts.pop('templates_theme', None))
73 with open(src) as f:
74 with open(src) as f:
74 contents = f.read()
75 contents = f.read()
75
76
76 # generate source
77 # generate source
77 search = 'def upgrade(migrate_engine):'
78 search = 'def upgrade(migrate_engine):'
78 contents = contents.replace(search, '\n\n'.join((decls, search)), 1)
79 contents = contents.replace(search, '\n\n'.join((decls, search)), 1)
79 if upgradeCommands:
80 if upgradeCommands:
80 contents = contents.replace(' pass', upgradeCommands, 1)
81 contents = contents.replace(' pass', upgradeCommands, 1)
81 if downgradeCommands:
82 if downgradeCommands:
82 contents = contents.replace(' pass', downgradeCommands, 1)
83 contents = contents.replace(' pass', downgradeCommands, 1)
83 return contents
84 return contents
84
85
85 @classmethod
86 @classmethod
86 def verify_module(cls, path):
87 def verify_module(cls, path):
87 """Ensure path is a valid script
88 """Ensure path is a valid script
88
89
89 :param path: Script location
90 :param path: Script location
90 :type path: string
91 :type path: string
91 :raises: :exc:`InvalidScriptError <migrate.exceptions.InvalidScriptError>`
92 :raises: :exc:`InvalidScriptError <migrate.exceptions.InvalidScriptError>`
92 :returns: Python module
93 :returns: Python module
93 """
94 """
94 # Try to import and get the upgrade() func
95 # Try to import and get the upgrade() func
95 module = import_path(path)
96 module = import_path(path)
96 try:
97 try:
97 assert callable(module.upgrade)
98 assert callable(module.upgrade)
98 except Exception as e:
99 except Exception as e:
99 raise InvalidScriptError(path + ': %s' % str(e))
100 raise InvalidScriptError(path + ': %s' % str(e))
100 return module
101 return module
101
102
102 def preview_sql(self, url, step, **args):
103 def preview_sql(self, url, step, **args):
103 """Mocks SQLAlchemy Engine to store all executed calls in a string
104 """Mocks SQLAlchemy Engine to store all executed calls in a string
104 and runs :meth:`PythonScript.run <migrate.versioning.script.py.PythonScript.run>`
105 and runs :meth:`PythonScript.run <migrate.versioning.script.py.PythonScript.run>`
105
106
106 :returns: SQL file
107 :returns: SQL file
107 """
108 """
108 buf = StringIO()
109 buf = StringIO()
109 args['engine_arg_strategy'] = 'mock'
110 args['engine_arg_strategy'] = 'mock'
110 args['engine_arg_executor'] = lambda s, p = '': buf.write(str(s) + p)
111 args['engine_arg_executor'] = lambda s, p = '': buf.write(str(s) + p)
111
112
112 @with_engine
113 @with_engine
113 def go(url, step, **kw):
114 def go(url, step, **kw):
114 engine = kw.pop('engine')
115 engine = kw.pop('engine')
115 self.run(engine, step)
116 self.run(engine, step)
116 return buf.getvalue()
117 return buf.getvalue()
117
118
118 return go(url, step, **args)
119 return go(url, step, **args)
119
120
120 def run(self, engine, step):
121 def run(self, engine, step):
121 """Core method of Script file.
122 """Core method of Script file.
122 Exectues :func:`update` or :func:`downgrade` functions
123 Exectues :func:`update` or :func:`downgrade` functions
123
124
124 :param engine: SQLAlchemy Engine
125 :param engine: SQLAlchemy Engine
125 :param step: Operation to run
126 :param step: Operation to run
126 :type engine: string
127 :type engine: string
127 :type step: int
128 :type step: int
128 """
129 """
129 if step > 0:
130 if step > 0:
130 op = 'upgrade'
131 op = 'upgrade'
131 elif step < 0:
132 elif step < 0:
132 op = 'downgrade'
133 op = 'downgrade'
133 else:
134 else:
134 raise ScriptError("%d is not a valid step" % step)
135 raise ScriptError("%d is not a valid step" % step)
135
136
136 funcname = base.operations[op]
137 funcname = base.operations[op]
137 script_func = self._func(funcname)
138 script_func = self._func(funcname)
138
139
139 # check for old way of using engine
140 # check for old way of using engine
140 if not inspect.getargspec(script_func)[0]:
141 if not inspect.getargspec(script_func)[0]:
141 raise TypeError("upgrade/downgrade functions must accept engine"
142 raise TypeError("upgrade/downgrade functions must accept engine"
142 " parameter (since version 0.5.4)")
143 " parameter (since version 0.5.4)")
143
144
144 script_func(engine)
145 script_func(engine)
145
146
146 @property
147 @property
147 def module(self):
148 def module(self):
148 """Calls :meth:`migrate.versioning.script.py.verify_module`
149 """Calls :meth:`migrate.versioning.script.py.verify_module`
149 and returns it.
150 and returns it.
150 """
151 """
151 if not hasattr(self, '_module'):
152 if not hasattr(self, '_module'):
152 self._module = self.verify_module(self.path)
153 self._module = self.verify_module(self.path)
153 return self._module
154 return self._module
154
155
155 def _func(self, funcname):
156 def _func(self, funcname):
156 if not hasattr(self.module, funcname):
157 if not hasattr(self.module, funcname):
157 msg = "Function '%s' is not defined in this script"
158 msg = "Function '%s' is not defined in this script"
158 raise ScriptError(msg % funcname)
159 raise ScriptError(msg % funcname)
159 return getattr(self.module, funcname)
160 return getattr(self.module, funcname)
@@ -1,179 +1,181 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
2 # -*- coding: utf-8 -*-
3 """.. currentmodule:: migrate.versioning.util"""
3 """.. currentmodule:: migrate.versioning.util"""
4
4
5 import warnings
5 import warnings
6 import logging
6 import logging
7 from decorator import decorator
7 from decorator import decorator
8 from pkg_resources import EntryPoint
8 from pkg_resources import EntryPoint
9
9
10 from sqlalchemy import create_engine
10 from sqlalchemy import create_engine
11 from sqlalchemy.engine import Engine
11 from sqlalchemy.engine import Engine
12 from sqlalchemy.pool import StaticPool
12 from sqlalchemy.pool import StaticPool
13
13
14 from pyramid import compat
14 from rhodecode.lib.dbmigrate.migrate import exceptions
15 from rhodecode.lib.dbmigrate.migrate import exceptions
15 from rhodecode.lib.dbmigrate.migrate.versioning.util.keyedinstance import KeyedInstance
16 from rhodecode.lib.dbmigrate.migrate.versioning.util.keyedinstance import KeyedInstance
16 from rhodecode.lib.dbmigrate.migrate.versioning.util.importpath import import_path
17 from rhodecode.lib.dbmigrate.migrate.versioning.util.importpath import import_path
17
18
18
19
19 log = logging.getLogger(__name__)
20 log = logging.getLogger(__name__)
20
21
22
21 def load_model(dotted_name):
23 def load_model(dotted_name):
22 """Import module and use module-level variable".
24 """Import module and use module-level variable".
23
25
24 :param dotted_name: path to model in form of string: ``some.python.module:Class``
26 :param dotted_name: path to model in form of string: ``some.python.module:Class``
25
27
26 .. versionchanged:: 0.5.4
28 .. versionchanged:: 0.5.4
27
29
28 """
30 """
29 if isinstance(dotted_name, basestring):
31 if isinstance(dotted_name, compat.string_types):
30 if ':' not in dotted_name:
32 if ':' not in dotted_name:
31 # backwards compatibility
33 # backwards compatibility
32 warnings.warn('model should be in form of module.model:User '
34 warnings.warn('model should be in form of module.model:User '
33 'and not module.model.User', exceptions.MigrateDeprecationWarning)
35 'and not module.model.User', exceptions.MigrateDeprecationWarning)
34 dotted_name = ':'.join(dotted_name.rsplit('.', 1))
36 dotted_name = ':'.join(dotted_name.rsplit('.', 1))
35 return EntryPoint.parse('x=%s' % dotted_name).load(False)
37 return EntryPoint.parse('x=%s' % dotted_name).load(False)
36 else:
38 else:
37 # Assume it's already loaded.
39 # Assume it's already loaded.
38 return dotted_name
40 return dotted_name
39
41
40 def asbool(obj):
42 def asbool(obj):
41 """Do everything to use object as bool"""
43 """Do everything to use object as bool"""
42 if isinstance(obj, basestring):
44 if isinstance(obj, compat.string_types):
43 obj = obj.strip().lower()
45 obj = obj.strip().lower()
44 if obj in ['true', 'yes', 'on', 'y', 't', '1']:
46 if obj in ['true', 'yes', 'on', 'y', 't', '1']:
45 return True
47 return True
46 elif obj in ['false', 'no', 'off', 'n', 'f', '0']:
48 elif obj in ['false', 'no', 'off', 'n', 'f', '0']:
47 return False
49 return False
48 else:
50 else:
49 raise ValueError("String is not true/false: %r" % obj)
51 raise ValueError("String is not true/false: %r" % obj)
50 if obj in (True, False):
52 if obj in (True, False):
51 return bool(obj)
53 return bool(obj)
52 else:
54 else:
53 raise ValueError("String is not true/false: %r" % obj)
55 raise ValueError("String is not true/false: %r" % obj)
54
56
55 def guess_obj_type(obj):
57 def guess_obj_type(obj):
56 """Do everything to guess object type from string
58 """Do everything to guess object type from string
57
59
58 Tries to convert to `int`, `bool` and finally returns if not succeded.
60 Tries to convert to `int`, `bool` and finally returns if not succeded.
59
61
60 .. versionadded: 0.5.4
62 .. versionadded: 0.5.4
61 """
63 """
62
64
63 result = None
65 result = None
64
66
65 try:
67 try:
66 result = int(obj)
68 result = int(obj)
67 except:
69 except:
68 pass
70 pass
69
71
70 if result is None:
72 if result is None:
71 try:
73 try:
72 result = asbool(obj)
74 result = asbool(obj)
73 except:
75 except:
74 pass
76 pass
75
77
76 if result is not None:
78 if result is not None:
77 return result
79 return result
78 else:
80 else:
79 return obj
81 return obj
80
82
81 @decorator
83 @decorator
82 def catch_known_errors(f, *a, **kw):
84 def catch_known_errors(f, *a, **kw):
83 """Decorator that catches known api errors
85 """Decorator that catches known api errors
84
86
85 .. versionadded: 0.5.4
87 .. versionadded: 0.5.4
86 """
88 """
87
89
88 try:
90 try:
89 return f(*a, **kw)
91 return f(*a, **kw)
90 except exceptions.PathFoundError as e:
92 except exceptions.PathFoundError as e:
91 raise exceptions.KnownError("The path %s already exists" % e.args[0])
93 raise exceptions.KnownError("The path %s already exists" % e.args[0])
92
94
93 def construct_engine(engine, **opts):
95 def construct_engine(engine, **opts):
94 """.. versionadded:: 0.5.4
96 """.. versionadded:: 0.5.4
95
97
96 Constructs and returns SQLAlchemy engine.
98 Constructs and returns SQLAlchemy engine.
97
99
98 Currently, there are 2 ways to pass create_engine options to :mod:`migrate.versioning.api` functions:
100 Currently, there are 2 ways to pass create_engine options to :mod:`migrate.versioning.api` functions:
99
101
100 :param engine: connection string or a existing engine
102 :param engine: connection string or a existing engine
101 :param engine_dict: python dictionary of options to pass to `create_engine`
103 :param engine_dict: python dictionary of options to pass to `create_engine`
102 :param engine_arg_*: keyword parameters to pass to `create_engine` (evaluated with :func:`migrate.versioning.util.guess_obj_type`)
104 :param engine_arg_*: keyword parameters to pass to `create_engine` (evaluated with :func:`migrate.versioning.util.guess_obj_type`)
103 :type engine_dict: dict
105 :type engine_dict: dict
104 :type engine: string or Engine instance
106 :type engine: string or Engine instance
105 :type engine_arg_*: string
107 :type engine_arg_*: string
106 :returns: SQLAlchemy Engine
108 :returns: SQLAlchemy Engine
107
109
108 .. note::
110 .. note::
109
111
110 keyword parameters override ``engine_dict`` values.
112 keyword parameters override ``engine_dict`` values.
111
113
112 """
114 """
113 if isinstance(engine, Engine):
115 if isinstance(engine, Engine):
114 return engine
116 return engine
115 elif not isinstance(engine, basestring):
117 elif not isinstance(engine, compat.string_types):
116 raise ValueError("you need to pass either an existing engine or a database uri")
118 raise ValueError("you need to pass either an existing engine or a database uri")
117
119
118 # get options for create_engine
120 # get options for create_engine
119 if opts.get('engine_dict') and isinstance(opts['engine_dict'], dict):
121 if opts.get('engine_dict') and isinstance(opts['engine_dict'], dict):
120 kwargs = opts['engine_dict']
122 kwargs = opts['engine_dict']
121 else:
123 else:
122 kwargs = {}
124 kwargs = {}
123
125
124 # DEPRECATED: handle echo the old way
126 # DEPRECATED: handle echo the old way
125 echo = asbool(opts.get('echo', False))
127 echo = asbool(opts.get('echo', False))
126 if echo:
128 if echo:
127 warnings.warn('echo=True parameter is deprecated, pass '
129 warnings.warn('echo=True parameter is deprecated, pass '
128 'engine_arg_echo=True or engine_dict={"echo": True}',
130 'engine_arg_echo=True or engine_dict={"echo": True}',
129 exceptions.MigrateDeprecationWarning)
131 exceptions.MigrateDeprecationWarning)
130 kwargs['echo'] = echo
132 kwargs['echo'] = echo
131
133
132 # parse keyword arguments
134 # parse keyword arguments
133 for key, value in opts.iteritems():
135 for key, value in opts.iteritems():
134 if key.startswith('engine_arg_'):
136 if key.startswith('engine_arg_'):
135 kwargs[key[11:]] = guess_obj_type(value)
137 kwargs[key[11:]] = guess_obj_type(value)
136
138
137 log.debug('Constructing engine')
139 log.debug('Constructing engine')
138 # TODO: return create_engine(engine, poolclass=StaticPool, **kwargs)
140 # TODO: return create_engine(engine, poolclass=StaticPool, **kwargs)
139 # seems like 0.5.x branch does not work with engine.dispose and staticpool
141 # seems like 0.5.x branch does not work with engine.dispose and staticpool
140 return create_engine(engine, **kwargs)
142 return create_engine(engine, **kwargs)
141
143
142 @decorator
144 @decorator
143 def with_engine(f, *a, **kw):
145 def with_engine(f, *a, **kw):
144 """Decorator for :mod:`migrate.versioning.api` functions
146 """Decorator for :mod:`migrate.versioning.api` functions
145 to safely close resources after function usage.
147 to safely close resources after function usage.
146
148
147 Passes engine parameters to :func:`construct_engine` and
149 Passes engine parameters to :func:`construct_engine` and
148 resulting parameter is available as kw['engine'].
150 resulting parameter is available as kw['engine'].
149
151
150 Engine is disposed after wrapped function is executed.
152 Engine is disposed after wrapped function is executed.
151
153
152 .. versionadded: 0.6.0
154 .. versionadded: 0.6.0
153 """
155 """
154 url = a[0]
156 url = a[0]
155 engine = construct_engine(url, **kw)
157 engine = construct_engine(url, **kw)
156
158
157 try:
159 try:
158 kw['engine'] = engine
160 kw['engine'] = engine
159 return f(*a, **kw)
161 return f(*a, **kw)
160 finally:
162 finally:
161 if isinstance(engine, Engine) and engine is not url:
163 if isinstance(engine, Engine) and engine is not url:
162 log.debug('Disposing SQLAlchemy engine %s', engine)
164 log.debug('Disposing SQLAlchemy engine %s', engine)
163 engine.dispose()
165 engine.dispose()
164
166
165
167
166 class Memoize:
168 class Memoize:
167 """Memoize(fn) - an instance which acts like fn but memoizes its arguments
169 """Memoize(fn) - an instance which acts like fn but memoizes its arguments
168 Will only work on functions with non-mutable arguments
170 Will only work on functions with non-mutable arguments
169
171
170 ActiveState Code 52201
172 ActiveState Code 52201
171 """
173 """
172 def __init__(self, fn):
174 def __init__(self, fn):
173 self.fn = fn
175 self.fn = fn
174 self.memo = {}
176 self.memo = {}
175
177
176 def __call__(self, *args):
178 def __call__(self, *args):
177 if args not in self.memo:
179 if args not in self.memo:
178 self.memo[args] = self.fn(*args)
180 self.memo[args] = self.fn(*args)
179 return self.memo[args]
181 return self.memo[args]
@@ -1,1043 +1,1044 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import logging
22 import logging
23 import datetime
23 import datetime
24 import traceback
24 import traceback
25 from datetime import date
25 from datetime import date
26
26
27 from sqlalchemy import *
27 from sqlalchemy import *
28 from sqlalchemy.ext.hybrid import hybrid_property
28 from sqlalchemy.ext.hybrid import hybrid_property
29 from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
29 from sqlalchemy.orm import relationship, joinedload, class_mapper, validates
30 from beaker.cache import cache_region, region_invalidate
30 from beaker.cache import cache_region, region_invalidate
31 from pyramid import compat
31
32
32 from rhodecode.lib.vcs import get_backend
33 from rhodecode.lib.vcs import get_backend
33 from rhodecode.lib.vcs.utils.helpers import get_scm
34 from rhodecode.lib.vcs.utils.helpers import get_scm
34 from rhodecode.lib.vcs.exceptions import VCSError
35 from rhodecode.lib.vcs.exceptions import VCSError
35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 from zope.cachedescriptors.property import Lazy as LazyProperty
36 from rhodecode.lib.auth import generate_auth_token
37 from rhodecode.lib.auth import generate_auth_token
37 from rhodecode.lib.utils2 import str2bool, safe_str, get_commit_safe, safe_unicode
38 from rhodecode.lib.utils2 import str2bool, safe_str, get_commit_safe, safe_unicode
38 from rhodecode.lib.exceptions import UserGroupAssignedException
39 from rhodecode.lib.exceptions import UserGroupAssignedException
39 from rhodecode.lib.ext_json import json
40 from rhodecode.lib.ext_json import json
40
41
41 from rhodecode.model.meta import Base, Session
42 from rhodecode.model.meta import Base, Session
42 from rhodecode.lib.caching_query import FromCache
43 from rhodecode.lib.caching_query import FromCache
43
44
44
45
45 log = logging.getLogger(__name__)
46 log = logging.getLogger(__name__)
46
47
47 #==============================================================================
48 #==============================================================================
48 # BASE CLASSES
49 # BASE CLASSES
49 #==============================================================================
50 #==============================================================================
50
51
51 class ModelSerializer(json.JSONEncoder):
52 class ModelSerializer(json.JSONEncoder):
52 """
53 """
53 Simple Serializer for JSON,
54 Simple Serializer for JSON,
54
55
55 usage::
56 usage::
56
57
57 to make object customized for serialization implement a __json__
58 to make object customized for serialization implement a __json__
58 method that will return a dict for serialization into json
59 method that will return a dict for serialization into json
59
60
60 example::
61 example::
61
62
62 class Task(object):
63 class Task(object):
63
64
64 def __init__(self, name, value):
65 def __init__(self, name, value):
65 self.name = name
66 self.name = name
66 self.value = value
67 self.value = value
67
68
68 def __json__(self):
69 def __json__(self):
69 return dict(name=self.name,
70 return dict(name=self.name,
70 value=self.value)
71 value=self.value)
71
72
72 """
73 """
73
74
74 def default(self, obj):
75 def default(self, obj):
75
76
76 if hasattr(obj, '__json__'):
77 if hasattr(obj, '__json__'):
77 return obj.__json__()
78 return obj.__json__()
78 else:
79 else:
79 return json.JSONEncoder.default(self, obj)
80 return json.JSONEncoder.default(self, obj)
80
81
81 class BaseModel(object):
82 class BaseModel(object):
82 """Base Model for all classess
83 """Base Model for all classess
83
84
84 """
85 """
85
86
86 @classmethod
87 @classmethod
87 def _get_keys(cls):
88 def _get_keys(cls):
88 """return column names for this model """
89 """return column names for this model """
89 return class_mapper(cls).c.keys()
90 return class_mapper(cls).c.keys()
90
91
91 def get_dict(self):
92 def get_dict(self):
92 """return dict with keys and values corresponding
93 """return dict with keys and values corresponding
93 to this model data """
94 to this model data """
94
95
95 d = {}
96 d = {}
96 for k in self._get_keys():
97 for k in self._get_keys():
97 d[k] = getattr(self, k)
98 d[k] = getattr(self, k)
98 return d
99 return d
99
100
100 def get_appstruct(self):
101 def get_appstruct(self):
101 """return list with keys and values tupples corresponding
102 """return list with keys and values tupples corresponding
102 to this model data """
103 to this model data """
103
104
104 l = []
105 l = []
105 for k in self._get_keys():
106 for k in self._get_keys():
106 l.append((k, getattr(self, k),))
107 l.append((k, getattr(self, k),))
107 return l
108 return l
108
109
109 def populate_obj(self, populate_dict):
110 def populate_obj(self, populate_dict):
110 """populate model with data from given populate_dict"""
111 """populate model with data from given populate_dict"""
111
112
112 for k in self._get_keys():
113 for k in self._get_keys():
113 if k in populate_dict:
114 if k in populate_dict:
114 setattr(self, k, populate_dict[k])
115 setattr(self, k, populate_dict[k])
115
116
116 @classmethod
117 @classmethod
117 def query(cls):
118 def query(cls):
118 return Session.query(cls)
119 return Session.query(cls)
119
120
120 @classmethod
121 @classmethod
121 def get(cls, id_):
122 def get(cls, id_):
122 if id_:
123 if id_:
123 return cls.query().get(id_)
124 return cls.query().get(id_)
124
125
125 @classmethod
126 @classmethod
126 def getAll(cls):
127 def getAll(cls):
127 return cls.query().all()
128 return cls.query().all()
128
129
129 @classmethod
130 @classmethod
130 def delete(cls, id_):
131 def delete(cls, id_):
131 obj = cls.query().get(id_)
132 obj = cls.query().get(id_)
132 Session.delete(obj)
133 Session.delete(obj)
133 Session.commit()
134 Session.commit()
134
135
135
136
136 class RhodeCodeSetting(Base, BaseModel):
137 class RhodeCodeSetting(Base, BaseModel):
137 __tablename__ = 'rhodecode_settings'
138 __tablename__ = 'rhodecode_settings'
138 __table_args__ = (UniqueConstraint('app_settings_name'), {'extend_existing':True})
139 __table_args__ = (UniqueConstraint('app_settings_name'), {'extend_existing':True})
139 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
140 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
140 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
141 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
141 _app_settings_value = Column("app_settings_value", String(255), nullable=True, unique=None, default=None)
142 _app_settings_value = Column("app_settings_value", String(255), nullable=True, unique=None, default=None)
142
143
143 def __init__(self, k='', v=''):
144 def __init__(self, k='', v=''):
144 self.app_settings_name = k
145 self.app_settings_name = k
145 self.app_settings_value = v
146 self.app_settings_value = v
146
147
147
148
148 @validates('_app_settings_value')
149 @validates('_app_settings_value')
149 def validate_settings_value(self, key, val):
150 def validate_settings_value(self, key, val):
150 assert type(val) == unicode
151 assert type(val) == unicode
151 return val
152 return val
152
153
153 @hybrid_property
154 @hybrid_property
154 def app_settings_value(self):
155 def app_settings_value(self):
155 v = self._app_settings_value
156 v = self._app_settings_value
156 if v == 'ldap_active':
157 if v == 'ldap_active':
157 v = str2bool(v)
158 v = str2bool(v)
158 return v
159 return v
159
160
160 @app_settings_value.setter
161 @app_settings_value.setter
161 def app_settings_value(self, val):
162 def app_settings_value(self, val):
162 """
163 """
163 Setter that will always make sure we use unicode in app_settings_value
164 Setter that will always make sure we use unicode in app_settings_value
164
165
165 :param val:
166 :param val:
166 """
167 """
167 self._app_settings_value = safe_unicode(val)
168 self._app_settings_value = safe_unicode(val)
168
169
169 def __repr__(self):
170 def __repr__(self):
170 return "<%s('%s:%s')>" % (self.__class__.__name__,
171 return "<%s('%s:%s')>" % (self.__class__.__name__,
171 self.app_settings_name, self.app_settings_value)
172 self.app_settings_name, self.app_settings_value)
172
173
173
174
174 @classmethod
175 @classmethod
175 def get_by_name(cls, ldap_key):
176 def get_by_name(cls, ldap_key):
176 return cls.query()\
177 return cls.query()\
177 .filter(cls.app_settings_name == ldap_key).scalar()
178 .filter(cls.app_settings_name == ldap_key).scalar()
178
179
179 @classmethod
180 @classmethod
180 def get_app_settings(cls, cache=False):
181 def get_app_settings(cls, cache=False):
181
182
182 ret = cls.query()
183 ret = cls.query()
183
184
184 if cache:
185 if cache:
185 ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
186 ret = ret.options(FromCache("sql_cache_short", "get_hg_settings"))
186
187
187 if not ret:
188 if not ret:
188 raise Exception('Could not get application settings !')
189 raise Exception('Could not get application settings !')
189 settings = {}
190 settings = {}
190 for each in ret:
191 for each in ret:
191 settings['rhodecode_' + each.app_settings_name] = \
192 settings['rhodecode_' + each.app_settings_name] = \
192 each.app_settings_value
193 each.app_settings_value
193
194
194 return settings
195 return settings
195
196
196 @classmethod
197 @classmethod
197 def get_ldap_settings(cls, cache=False):
198 def get_ldap_settings(cls, cache=False):
198 ret = cls.query()\
199 ret = cls.query()\
199 .filter(cls.app_settings_name.startswith('ldap_')).all()
200 .filter(cls.app_settings_name.startswith('ldap_')).all()
200 fd = {}
201 fd = {}
201 for row in ret:
202 for row in ret:
202 fd.update({row.app_settings_name:row.app_settings_value})
203 fd.update({row.app_settings_name:row.app_settings_value})
203
204
204 return fd
205 return fd
205
206
206
207
207 class RhodeCodeUi(Base, BaseModel):
208 class RhodeCodeUi(Base, BaseModel):
208 __tablename__ = 'rhodecode_ui'
209 __tablename__ = 'rhodecode_ui'
209 __table_args__ = (UniqueConstraint('ui_key'), {'extend_existing':True})
210 __table_args__ = (UniqueConstraint('ui_key'), {'extend_existing':True})
210
211
211 HOOK_REPO_SIZE = 'changegroup.repo_size'
212 HOOK_REPO_SIZE = 'changegroup.repo_size'
212 HOOK_PUSH = 'pretxnchangegroup.push_logger'
213 HOOK_PUSH = 'pretxnchangegroup.push_logger'
213 HOOK_PULL = 'preoutgoing.pull_logger'
214 HOOK_PULL = 'preoutgoing.pull_logger'
214
215
215 ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
216 ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
216 ui_section = Column("ui_section", String(255), nullable=True, unique=None, default=None)
217 ui_section = Column("ui_section", String(255), nullable=True, unique=None, default=None)
217 ui_key = Column("ui_key", String(255), nullable=True, unique=None, default=None)
218 ui_key = Column("ui_key", String(255), nullable=True, unique=None, default=None)
218 ui_value = Column("ui_value", String(255), nullable=True, unique=None, default=None)
219 ui_value = Column("ui_value", String(255), nullable=True, unique=None, default=None)
219 ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
220 ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True)
220
221
221
222
222 @classmethod
223 @classmethod
223 def get_by_key(cls, key):
224 def get_by_key(cls, key):
224 return cls.query().filter(cls.ui_key == key)
225 return cls.query().filter(cls.ui_key == key)
225
226
226
227
227 @classmethod
228 @classmethod
228 def get_builtin_hooks(cls):
229 def get_builtin_hooks(cls):
229 q = cls.query()
230 q = cls.query()
230 q = q.filter(cls.ui_key.in_([cls.HOOK_REPO_SIZE,
231 q = q.filter(cls.ui_key.in_([cls.HOOK_REPO_SIZE,
231 cls.HOOK_PUSH, cls.HOOK_PULL]))
232 cls.HOOK_PUSH, cls.HOOK_PULL]))
232 return q.all()
233 return q.all()
233
234
234 @classmethod
235 @classmethod
235 def get_custom_hooks(cls):
236 def get_custom_hooks(cls):
236 q = cls.query()
237 q = cls.query()
237 q = q.filter(~cls.ui_key.in_([cls.HOOK_REPO_SIZE,
238 q = q.filter(~cls.ui_key.in_([cls.HOOK_REPO_SIZE,
238 cls.HOOK_PUSH, cls.HOOK_PULL]))
239 cls.HOOK_PUSH, cls.HOOK_PULL]))
239 q = q.filter(cls.ui_section == 'hooks')
240 q = q.filter(cls.ui_section == 'hooks')
240 return q.all()
241 return q.all()
241
242
242 @classmethod
243 @classmethod
243 def create_or_update_hook(cls, key, val):
244 def create_or_update_hook(cls, key, val):
244 new_ui = cls.get_by_key(key).scalar() or cls()
245 new_ui = cls.get_by_key(key).scalar() or cls()
245 new_ui.ui_section = 'hooks'
246 new_ui.ui_section = 'hooks'
246 new_ui.ui_active = True
247 new_ui.ui_active = True
247 new_ui.ui_key = key
248 new_ui.ui_key = key
248 new_ui.ui_value = val
249 new_ui.ui_value = val
249
250
250 Session.add(new_ui)
251 Session.add(new_ui)
251 Session.commit()
252 Session.commit()
252
253
253
254
254 class User(Base, BaseModel):
255 class User(Base, BaseModel):
255 __tablename__ = 'users'
256 __tablename__ = 'users'
256 __table_args__ = (UniqueConstraint('username'), UniqueConstraint('email'), {'extend_existing':True})
257 __table_args__ = (UniqueConstraint('username'), UniqueConstraint('email'), {'extend_existing':True})
257 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
258 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
258 username = Column("username", String(255), nullable=True, unique=None, default=None)
259 username = Column("username", String(255), nullable=True, unique=None, default=None)
259 password = Column("password", String(255), nullable=True, unique=None, default=None)
260 password = Column("password", String(255), nullable=True, unique=None, default=None)
260 active = Column("active", Boolean(), nullable=True, unique=None, default=None)
261 active = Column("active", Boolean(), nullable=True, unique=None, default=None)
261 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
262 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
262 name = Column("name", String(255), nullable=True, unique=None, default=None)
263 name = Column("name", String(255), nullable=True, unique=None, default=None)
263 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
264 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
264 email = Column("email", String(255), nullable=True, unique=None, default=None)
265 email = Column("email", String(255), nullable=True, unique=None, default=None)
265 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
266 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
266 ldap_dn = Column("ldap_dn", String(255), nullable=True, unique=None, default=None)
267 ldap_dn = Column("ldap_dn", String(255), nullable=True, unique=None, default=None)
267 api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
268 api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
268
269
269 user_log = relationship('UserLog', cascade='all')
270 user_log = relationship('UserLog', cascade='all')
270 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
271 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
271
272
272 repositories = relationship('Repository')
273 repositories = relationship('Repository')
273 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
274 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
274 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
275 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
275
276
276 group_member = relationship('UserGroupMember', cascade='all')
277 group_member = relationship('UserGroupMember', cascade='all')
277
278
278 @property
279 @property
279 def full_contact(self):
280 def full_contact(self):
280 return '%s %s <%s>' % (self.name, self.lastname, self.email)
281 return '%s %s <%s>' % (self.name, self.lastname, self.email)
281
282
282 @property
283 @property
283 def short_contact(self):
284 def short_contact(self):
284 return '%s %s' % (self.name, self.lastname)
285 return '%s %s' % (self.name, self.lastname)
285
286
286 @property
287 @property
287 def is_admin(self):
288 def is_admin(self):
288 return self.admin
289 return self.admin
289
290
290 def __repr__(self):
291 def __repr__(self):
291 try:
292 try:
292 return "<%s('id:%s:%s')>" % (self.__class__.__name__,
293 return "<%s('id:%s:%s')>" % (self.__class__.__name__,
293 self.user_id, self.username)
294 self.user_id, self.username)
294 except:
295 except:
295 return self.__class__.__name__
296 return self.__class__.__name__
296
297
297 @classmethod
298 @classmethod
298 def get_by_username(cls, username, case_insensitive=False):
299 def get_by_username(cls, username, case_insensitive=False):
299 if case_insensitive:
300 if case_insensitive:
300 return Session.query(cls).filter(cls.username.ilike(username)).scalar()
301 return Session.query(cls).filter(cls.username.ilike(username)).scalar()
301 else:
302 else:
302 return Session.query(cls).filter(cls.username == username).scalar()
303 return Session.query(cls).filter(cls.username == username).scalar()
303
304
304 @classmethod
305 @classmethod
305 def get_by_auth_token(cls, auth_token):
306 def get_by_auth_token(cls, auth_token):
306 return cls.query().filter(cls.api_key == auth_token).one()
307 return cls.query().filter(cls.api_key == auth_token).one()
307
308
308 def update_lastlogin(self):
309 def update_lastlogin(self):
309 """Update user lastlogin"""
310 """Update user lastlogin"""
310
311
311 self.last_login = datetime.datetime.now()
312 self.last_login = datetime.datetime.now()
312 Session.add(self)
313 Session.add(self)
313 Session.commit()
314 Session.commit()
314 log.debug('updated user %s lastlogin', self.username)
315 log.debug('updated user %s lastlogin', self.username)
315
316
316 @classmethod
317 @classmethod
317 def create(cls, form_data):
318 def create(cls, form_data):
318 from rhodecode.lib.auth import get_crypt_password
319 from rhodecode.lib.auth import get_crypt_password
319
320
320 try:
321 try:
321 new_user = cls()
322 new_user = cls()
322 for k, v in form_data.items():
323 for k, v in form_data.items():
323 if k == 'password':
324 if k == 'password':
324 v = get_crypt_password(v)
325 v = get_crypt_password(v)
325 setattr(new_user, k, v)
326 setattr(new_user, k, v)
326
327
327 new_user.api_key = generate_auth_token(form_data['username'])
328 new_user.api_key = generate_auth_token(form_data['username'])
328 Session.add(new_user)
329 Session.add(new_user)
329 Session.commit()
330 Session.commit()
330 return new_user
331 return new_user
331 except:
332 except:
332 log.error(traceback.format_exc())
333 log.error(traceback.format_exc())
333 Session.rollback()
334 Session.rollback()
334 raise
335 raise
335
336
336 class UserLog(Base, BaseModel):
337 class UserLog(Base, BaseModel):
337 __tablename__ = 'user_logs'
338 __tablename__ = 'user_logs'
338 __table_args__ = {'extend_existing':True}
339 __table_args__ = {'extend_existing':True}
339 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
340 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
340 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
341 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
341 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
342 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
342 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
343 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
343 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
344 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
344 action = Column("action", String(1200000), nullable=True, unique=None, default=None)
345 action = Column("action", String(1200000), nullable=True, unique=None, default=None)
345 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
346 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
346
347
347 @property
348 @property
348 def action_as_day(self):
349 def action_as_day(self):
349 return date(*self.action_date.timetuple()[:3])
350 return date(*self.action_date.timetuple()[:3])
350
351
351 user = relationship('User')
352 user = relationship('User')
352 repository = relationship('Repository')
353 repository = relationship('Repository')
353
354
354
355
355 class UserGroup(Base, BaseModel):
356 class UserGroup(Base, BaseModel):
356 __tablename__ = 'users_groups'
357 __tablename__ = 'users_groups'
357 __table_args__ = {'extend_existing':True}
358 __table_args__ = {'extend_existing':True}
358
359
359 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
360 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
360 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
361 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
361 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
362 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
362
363
363 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
364 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
364
365
365 def __repr__(self):
366 def __repr__(self):
366 return '<userGroup(%s)>' % (self.users_group_name)
367 return '<userGroup(%s)>' % (self.users_group_name)
367
368
368 @classmethod
369 @classmethod
369 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
370 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
370 if case_insensitive:
371 if case_insensitive:
371 gr = cls.query()\
372 gr = cls.query()\
372 .filter(cls.users_group_name.ilike(group_name))
373 .filter(cls.users_group_name.ilike(group_name))
373 else:
374 else:
374 gr = cls.query()\
375 gr = cls.query()\
375 .filter(cls.users_group_name == group_name)
376 .filter(cls.users_group_name == group_name)
376 if cache:
377 if cache:
377 gr = gr.options(FromCache("sql_cache_short",
378 gr = gr.options(FromCache("sql_cache_short",
378 "get_user_%s" % group_name))
379 "get_user_%s" % group_name))
379 return gr.scalar()
380 return gr.scalar()
380
381
381 @classmethod
382 @classmethod
382 def get(cls, users_group_id, cache=False):
383 def get(cls, users_group_id, cache=False):
383 users_group = cls.query()
384 users_group = cls.query()
384 if cache:
385 if cache:
385 users_group = users_group.options(FromCache("sql_cache_short",
386 users_group = users_group.options(FromCache("sql_cache_short",
386 "get_users_group_%s" % users_group_id))
387 "get_users_group_%s" % users_group_id))
387 return users_group.get(users_group_id)
388 return users_group.get(users_group_id)
388
389
389 @classmethod
390 @classmethod
390 def create(cls, form_data):
391 def create(cls, form_data):
391 try:
392 try:
392 new_user_group = cls()
393 new_user_group = cls()
393 for k, v in form_data.items():
394 for k, v in form_data.items():
394 setattr(new_user_group, k, v)
395 setattr(new_user_group, k, v)
395
396
396 Session.add(new_user_group)
397 Session.add(new_user_group)
397 Session.commit()
398 Session.commit()
398 return new_user_group
399 return new_user_group
399 except:
400 except:
400 log.error(traceback.format_exc())
401 log.error(traceback.format_exc())
401 Session.rollback()
402 Session.rollback()
402 raise
403 raise
403
404
404 @classmethod
405 @classmethod
405 def update(cls, users_group_id, form_data):
406 def update(cls, users_group_id, form_data):
406
407
407 try:
408 try:
408 users_group = cls.get(users_group_id, cache=False)
409 users_group = cls.get(users_group_id, cache=False)
409
410
410 for k, v in form_data.items():
411 for k, v in form_data.items():
411 if k == 'users_group_members':
412 if k == 'users_group_members':
412 users_group.members = []
413 users_group.members = []
413 Session.flush()
414 Session.flush()
414 members_list = []
415 members_list = []
415 if v:
416 if v:
416 v = [v] if isinstance(v, basestring) else v
417 v = [v] if isinstance(v, compat.string_types) else v
417 for u_id in set(v):
418 for u_id in set(v):
418 member = UserGroupMember(users_group_id, u_id)
419 member = UserGroupMember(users_group_id, u_id)
419 members_list.append(member)
420 members_list.append(member)
420 setattr(users_group, 'members', members_list)
421 setattr(users_group, 'members', members_list)
421 setattr(users_group, k, v)
422 setattr(users_group, k, v)
422
423
423 Session.add(users_group)
424 Session.add(users_group)
424 Session.commit()
425 Session.commit()
425 except:
426 except:
426 log.error(traceback.format_exc())
427 log.error(traceback.format_exc())
427 Session.rollback()
428 Session.rollback()
428 raise
429 raise
429
430
430 @classmethod
431 @classmethod
431 def delete(cls, user_group_id):
432 def delete(cls, user_group_id):
432 try:
433 try:
433
434
434 # check if this group is not assigned to repo
435 # check if this group is not assigned to repo
435 assigned_groups = UserGroupRepoToPerm.query()\
436 assigned_groups = UserGroupRepoToPerm.query()\
436 .filter(UserGroupRepoToPerm.users_group_id ==
437 .filter(UserGroupRepoToPerm.users_group_id ==
437 user_group_id).all()
438 user_group_id).all()
438
439
439 if assigned_groups:
440 if assigned_groups:
440 raise UserGroupAssignedException(
441 raise UserGroupAssignedException(
441 'UserGroup assigned to %s' % assigned_groups)
442 'UserGroup assigned to %s' % assigned_groups)
442
443
443 users_group = cls.get(user_group_id, cache=False)
444 users_group = cls.get(user_group_id, cache=False)
444 Session.delete(users_group)
445 Session.delete(users_group)
445 Session.commit()
446 Session.commit()
446 except:
447 except:
447 log.error(traceback.format_exc())
448 log.error(traceback.format_exc())
448 Session.rollback()
449 Session.rollback()
449 raise
450 raise
450
451
451 class UserGroupMember(Base, BaseModel):
452 class UserGroupMember(Base, BaseModel):
452 __tablename__ = 'users_groups_members'
453 __tablename__ = 'users_groups_members'
453 __table_args__ = {'extend_existing':True}
454 __table_args__ = {'extend_existing':True}
454
455
455 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
456 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
456 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
457 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
457 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
458 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
458
459
459 user = relationship('User', lazy='joined')
460 user = relationship('User', lazy='joined')
460 users_group = relationship('UserGroup')
461 users_group = relationship('UserGroup')
461
462
462 def __init__(self, gr_id='', u_id=''):
463 def __init__(self, gr_id='', u_id=''):
463 self.users_group_id = gr_id
464 self.users_group_id = gr_id
464 self.user_id = u_id
465 self.user_id = u_id
465
466
466 @staticmethod
467 @staticmethod
467 def add_user_to_group(group, user):
468 def add_user_to_group(group, user):
468 ugm = UserGroupMember()
469 ugm = UserGroupMember()
469 ugm.users_group = group
470 ugm.users_group = group
470 ugm.user = user
471 ugm.user = user
471 Session.add(ugm)
472 Session.add(ugm)
472 Session.commit()
473 Session.commit()
473 return ugm
474 return ugm
474
475
475 class Repository(Base, BaseModel):
476 class Repository(Base, BaseModel):
476 __tablename__ = 'repositories'
477 __tablename__ = 'repositories'
477 __table_args__ = (UniqueConstraint('repo_name'), {'extend_existing':True},)
478 __table_args__ = (UniqueConstraint('repo_name'), {'extend_existing':True},)
478
479
479 repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
480 repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
480 repo_name = Column("repo_name", String(255), nullable=False, unique=True, default=None)
481 repo_name = Column("repo_name", String(255), nullable=False, unique=True, default=None)
481 clone_uri = Column("clone_uri", String(255), nullable=True, unique=False, default=None)
482 clone_uri = Column("clone_uri", String(255), nullable=True, unique=False, default=None)
482 repo_type = Column("repo_type", String(255), nullable=False, unique=False, default='hg')
483 repo_type = Column("repo_type", String(255), nullable=False, unique=False, default='hg')
483 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
484 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
484 private = Column("private", Boolean(), nullable=True, unique=None, default=None)
485 private = Column("private", Boolean(), nullable=True, unique=None, default=None)
485 enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
486 enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True)
486 enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
487 enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True)
487 description = Column("description", String(10000), nullable=True, unique=None, default=None)
488 description = Column("description", String(10000), nullable=True, unique=None, default=None)
488 created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
489 created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
489
490
490 fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
491 fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None)
491 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
492 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None)
492
493
493
494
494 user = relationship('User')
495 user = relationship('User')
495 fork = relationship('Repository', remote_side=repo_id)
496 fork = relationship('Repository', remote_side=repo_id)
496 group = relationship('RepoGroup')
497 group = relationship('RepoGroup')
497 repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
498 repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
498 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
499 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
499 stats = relationship('Statistics', cascade='all', uselist=False)
500 stats = relationship('Statistics', cascade='all', uselist=False)
500
501
501 followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all')
502 followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all')
502
503
503 logs = relationship('UserLog', cascade='all')
504 logs = relationship('UserLog', cascade='all')
504
505
505 def __repr__(self):
506 def __repr__(self):
506 return "<%s('%s:%s')>" % (self.__class__.__name__,
507 return "<%s('%s:%s')>" % (self.__class__.__name__,
507 self.repo_id, self.repo_name)
508 self.repo_id, self.repo_name)
508
509
509 @classmethod
510 @classmethod
510 def url_sep(cls):
511 def url_sep(cls):
511 return '/'
512 return '/'
512
513
513 @classmethod
514 @classmethod
514 def get_by_repo_name(cls, repo_name):
515 def get_by_repo_name(cls, repo_name):
515 q = Session.query(cls).filter(cls.repo_name == repo_name)
516 q = Session.query(cls).filter(cls.repo_name == repo_name)
516 q = q.options(joinedload(Repository.fork))\
517 q = q.options(joinedload(Repository.fork))\
517 .options(joinedload(Repository.user))\
518 .options(joinedload(Repository.user))\
518 .options(joinedload(Repository.group))
519 .options(joinedload(Repository.group))
519 return q.one()
520 return q.one()
520
521
521 @classmethod
522 @classmethod
522 def get_repo_forks(cls, repo_id):
523 def get_repo_forks(cls, repo_id):
523 return cls.query().filter(Repository.fork_id == repo_id)
524 return cls.query().filter(Repository.fork_id == repo_id)
524
525
525 @classmethod
526 @classmethod
526 def base_path(cls):
527 def base_path(cls):
527 """
528 """
528 Returns base path when all repos are stored
529 Returns base path when all repos are stored
529
530
530 :param cls:
531 :param cls:
531 """
532 """
532 q = Session.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key ==
533 q = Session.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key ==
533 cls.url_sep())
534 cls.url_sep())
534 q.options(FromCache("sql_cache_short", "repository_repo_path"))
535 q.options(FromCache("sql_cache_short", "repository_repo_path"))
535 return q.one().ui_value
536 return q.one().ui_value
536
537
537 @property
538 @property
538 def just_name(self):
539 def just_name(self):
539 return self.repo_name.split(Repository.url_sep())[-1]
540 return self.repo_name.split(Repository.url_sep())[-1]
540
541
541 @property
542 @property
542 def groups_with_parents(self):
543 def groups_with_parents(self):
543 groups = []
544 groups = []
544 if self.group is None:
545 if self.group is None:
545 return groups
546 return groups
546
547
547 cur_gr = self.group
548 cur_gr = self.group
548 groups.insert(0, cur_gr)
549 groups.insert(0, cur_gr)
549 while 1:
550 while 1:
550 gr = getattr(cur_gr, 'parent_group', None)
551 gr = getattr(cur_gr, 'parent_group', None)
551 cur_gr = cur_gr.parent_group
552 cur_gr = cur_gr.parent_group
552 if gr is None:
553 if gr is None:
553 break
554 break
554 groups.insert(0, gr)
555 groups.insert(0, gr)
555
556
556 return groups
557 return groups
557
558
558 @property
559 @property
559 def groups_and_repo(self):
560 def groups_and_repo(self):
560 return self.groups_with_parents, self.just_name
561 return self.groups_with_parents, self.just_name
561
562
562 @LazyProperty
563 @LazyProperty
563 def repo_path(self):
564 def repo_path(self):
564 """
565 """
565 Returns base full path for that repository means where it actually
566 Returns base full path for that repository means where it actually
566 exists on a filesystem
567 exists on a filesystem
567 """
568 """
568 q = Session.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key ==
569 q = Session.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key ==
569 Repository.url_sep())
570 Repository.url_sep())
570 q.options(FromCache("sql_cache_short", "repository_repo_path"))
571 q.options(FromCache("sql_cache_short", "repository_repo_path"))
571 return q.one().ui_value
572 return q.one().ui_value
572
573
573 @property
574 @property
574 def repo_full_path(self):
575 def repo_full_path(self):
575 p = [self.repo_path]
576 p = [self.repo_path]
576 # we need to split the name by / since this is how we store the
577 # we need to split the name by / since this is how we store the
577 # names in the database, but that eventually needs to be converted
578 # names in the database, but that eventually needs to be converted
578 # into a valid system path
579 # into a valid system path
579 p += self.repo_name.split(Repository.url_sep())
580 p += self.repo_name.split(Repository.url_sep())
580 return os.path.join(*p)
581 return os.path.join(*p)
581
582
582 def get_new_name(self, repo_name):
583 def get_new_name(self, repo_name):
583 """
584 """
584 returns new full repository name based on assigned group and new new
585 returns new full repository name based on assigned group and new new
585
586
586 :param group_name:
587 :param group_name:
587 """
588 """
588 path_prefix = self.group.full_path_splitted if self.group else []
589 path_prefix = self.group.full_path_splitted if self.group else []
589 return Repository.url_sep().join(path_prefix + [repo_name])
590 return Repository.url_sep().join(path_prefix + [repo_name])
590
591
591 @property
592 @property
592 def _config(self):
593 def _config(self):
593 """
594 """
594 Returns db based config object.
595 Returns db based config object.
595 """
596 """
596 from rhodecode.lib.utils import make_db_config
597 from rhodecode.lib.utils import make_db_config
597 return make_db_config(clear_session=False)
598 return make_db_config(clear_session=False)
598
599
599 @classmethod
600 @classmethod
600 def is_valid(cls, repo_name):
601 def is_valid(cls, repo_name):
601 """
602 """
602 returns True if given repo name is a valid filesystem repository
603 returns True if given repo name is a valid filesystem repository
603
604
604 :param cls:
605 :param cls:
605 :param repo_name:
606 :param repo_name:
606 """
607 """
607 from rhodecode.lib.utils import is_valid_repo
608 from rhodecode.lib.utils import is_valid_repo
608
609
609 return is_valid_repo(repo_name, cls.base_path())
610 return is_valid_repo(repo_name, cls.base_path())
610
611
611
612
612 #==========================================================================
613 #==========================================================================
613 # SCM PROPERTIES
614 # SCM PROPERTIES
614 #==========================================================================
615 #==========================================================================
615
616
616 def get_commit(self, rev):
617 def get_commit(self, rev):
617 return get_commit_safe(self.scm_instance, rev)
618 return get_commit_safe(self.scm_instance, rev)
618
619
619 @property
620 @property
620 def tip(self):
621 def tip(self):
621 return self.get_commit('tip')
622 return self.get_commit('tip')
622
623
623 @property
624 @property
624 def author(self):
625 def author(self):
625 return self.tip.author
626 return self.tip.author
626
627
627 @property
628 @property
628 def last_change(self):
629 def last_change(self):
629 return self.scm_instance.last_change
630 return self.scm_instance.last_change
630
631
631 #==========================================================================
632 #==========================================================================
632 # SCM CACHE INSTANCE
633 # SCM CACHE INSTANCE
633 #==========================================================================
634 #==========================================================================
634
635
635 @property
636 @property
636 def invalidate(self):
637 def invalidate(self):
637 return CacheInvalidation.invalidate(self.repo_name)
638 return CacheInvalidation.invalidate(self.repo_name)
638
639
639 def set_invalidate(self):
640 def set_invalidate(self):
640 """
641 """
641 set a cache for invalidation for this instance
642 set a cache for invalidation for this instance
642 """
643 """
643 CacheInvalidation.set_invalidate(self.repo_name)
644 CacheInvalidation.set_invalidate(self.repo_name)
644
645
645 @LazyProperty
646 @LazyProperty
646 def scm_instance(self):
647 def scm_instance(self):
647 return self.__get_instance()
648 return self.__get_instance()
648
649
649 @property
650 @property
650 def scm_instance_cached(self):
651 def scm_instance_cached(self):
651 return self.__get_instance()
652 return self.__get_instance()
652
653
653 def __get_instance(self):
654 def __get_instance(self):
654
655
655 repo_full_path = self.repo_full_path
656 repo_full_path = self.repo_full_path
656
657
657 try:
658 try:
658 alias = get_scm(repo_full_path)[0]
659 alias = get_scm(repo_full_path)[0]
659 log.debug('Creating instance of %s repository', alias)
660 log.debug('Creating instance of %s repository', alias)
660 backend = get_backend(alias)
661 backend = get_backend(alias)
661 except VCSError:
662 except VCSError:
662 log.error(traceback.format_exc())
663 log.error(traceback.format_exc())
663 log.error('Perhaps this repository is in db and not in '
664 log.error('Perhaps this repository is in db and not in '
664 'filesystem run rescan repositories with '
665 'filesystem run rescan repositories with '
665 '"destroy old data " option from admin panel')
666 '"destroy old data " option from admin panel')
666 return
667 return
667
668
668 if alias == 'hg':
669 if alias == 'hg':
669
670
670 repo = backend(safe_str(repo_full_path), create=False,
671 repo = backend(safe_str(repo_full_path), create=False,
671 config=self._config)
672 config=self._config)
672
673
673 else:
674 else:
674 repo = backend(repo_full_path, create=False)
675 repo = backend(repo_full_path, create=False)
675
676
676 return repo
677 return repo
677
678
678
679
679 class Group(Base, BaseModel):
680 class Group(Base, BaseModel):
680 __tablename__ = 'groups'
681 __tablename__ = 'groups'
681 __table_args__ = (UniqueConstraint('group_name', 'group_parent_id'),
682 __table_args__ = (UniqueConstraint('group_name', 'group_parent_id'),
682 CheckConstraint('group_id != group_parent_id'), {'extend_existing':True},)
683 CheckConstraint('group_id != group_parent_id'), {'extend_existing':True},)
683 __mapper_args__ = {'order_by':'group_name'}
684 __mapper_args__ = {'order_by':'group_name'}
684
685
685 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
686 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
686 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
687 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
687 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
688 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
688 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
689 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
689
690
690 parent_group = relationship('Group', remote_side=group_id)
691 parent_group = relationship('Group', remote_side=group_id)
691
692
692 def __init__(self, group_name='', parent_group=None):
693 def __init__(self, group_name='', parent_group=None):
693 self.group_name = group_name
694 self.group_name = group_name
694 self.parent_group = parent_group
695 self.parent_group = parent_group
695
696
696 def __repr__(self):
697 def __repr__(self):
697 return "<%s('%s:%s')>" % (self.__class__.__name__, self.group_id,
698 return "<%s('%s:%s')>" % (self.__class__.__name__, self.group_id,
698 self.group_name)
699 self.group_name)
699
700
700 @classmethod
701 @classmethod
701 def url_sep(cls):
702 def url_sep(cls):
702 return '/'
703 return '/'
703
704
704 @classmethod
705 @classmethod
705 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
706 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
706 if case_insensitive:
707 if case_insensitive:
707 gr = cls.query()\
708 gr = cls.query()\
708 .filter(cls.group_name.ilike(group_name))
709 .filter(cls.group_name.ilike(group_name))
709 else:
710 else:
710 gr = cls.query()\
711 gr = cls.query()\
711 .filter(cls.group_name == group_name)
712 .filter(cls.group_name == group_name)
712 if cache:
713 if cache:
713 gr = gr.options(FromCache("sql_cache_short",
714 gr = gr.options(FromCache("sql_cache_short",
714 "get_group_%s" % group_name))
715 "get_group_%s" % group_name))
715 return gr.scalar()
716 return gr.scalar()
716
717
717 @property
718 @property
718 def parents(self):
719 def parents(self):
719 parents_recursion_limit = 5
720 parents_recursion_limit = 5
720 groups = []
721 groups = []
721 if self.parent_group is None:
722 if self.parent_group is None:
722 return groups
723 return groups
723 cur_gr = self.parent_group
724 cur_gr = self.parent_group
724 groups.insert(0, cur_gr)
725 groups.insert(0, cur_gr)
725 cnt = 0
726 cnt = 0
726 while 1:
727 while 1:
727 cnt += 1
728 cnt += 1
728 gr = getattr(cur_gr, 'parent_group', None)
729 gr = getattr(cur_gr, 'parent_group', None)
729 cur_gr = cur_gr.parent_group
730 cur_gr = cur_gr.parent_group
730 if gr is None:
731 if gr is None:
731 break
732 break
732 if cnt == parents_recursion_limit:
733 if cnt == parents_recursion_limit:
733 # this will prevent accidental infinit loops
734 # this will prevent accidental infinit loops
734 log.error('group nested more than %s',
735 log.error('group nested more than %s',
735 parents_recursion_limit)
736 parents_recursion_limit)
736 break
737 break
737
738
738 groups.insert(0, gr)
739 groups.insert(0, gr)
739 return groups
740 return groups
740
741
741 @property
742 @property
742 def children(self):
743 def children(self):
743 return Group.query().filter(Group.parent_group == self)
744 return Group.query().filter(Group.parent_group == self)
744
745
745 @property
746 @property
746 def name(self):
747 def name(self):
747 return self.group_name.split(Group.url_sep())[-1]
748 return self.group_name.split(Group.url_sep())[-1]
748
749
749 @property
750 @property
750 def full_path(self):
751 def full_path(self):
751 return self.group_name
752 return self.group_name
752
753
753 @property
754 @property
754 def full_path_splitted(self):
755 def full_path_splitted(self):
755 return self.group_name.split(Group.url_sep())
756 return self.group_name.split(Group.url_sep())
756
757
757 @property
758 @property
758 def repositories(self):
759 def repositories(self):
759 return Repository.query().filter(Repository.group == self)
760 return Repository.query().filter(Repository.group == self)
760
761
761 @property
762 @property
762 def repositories_recursive_count(self):
763 def repositories_recursive_count(self):
763 cnt = self.repositories.count()
764 cnt = self.repositories.count()
764
765
765 def children_count(group):
766 def children_count(group):
766 cnt = 0
767 cnt = 0
767 for child in group.children:
768 for child in group.children:
768 cnt += child.repositories.count()
769 cnt += child.repositories.count()
769 cnt += children_count(child)
770 cnt += children_count(child)
770 return cnt
771 return cnt
771
772
772 return cnt + children_count(self)
773 return cnt + children_count(self)
773
774
774
775
775 def get_new_name(self, group_name):
776 def get_new_name(self, group_name):
776 """
777 """
777 returns new full group name based on parent and new name
778 returns new full group name based on parent and new name
778
779
779 :param group_name:
780 :param group_name:
780 """
781 """
781 path_prefix = (self.parent_group.full_path_splitted if
782 path_prefix = (self.parent_group.full_path_splitted if
782 self.parent_group else [])
783 self.parent_group else [])
783 return Group.url_sep().join(path_prefix + [group_name])
784 return Group.url_sep().join(path_prefix + [group_name])
784
785
785
786
786 class Permission(Base, BaseModel):
787 class Permission(Base, BaseModel):
787 __tablename__ = 'permissions'
788 __tablename__ = 'permissions'
788 __table_args__ = {'extend_existing':True}
789 __table_args__ = {'extend_existing':True}
789 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
790 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
790 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
791 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
791 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
792 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
792
793
793 def __repr__(self):
794 def __repr__(self):
794 return "<%s('%s:%s')>" % (self.__class__.__name__,
795 return "<%s('%s:%s')>" % (self.__class__.__name__,
795 self.permission_id, self.permission_name)
796 self.permission_id, self.permission_name)
796
797
797 @classmethod
798 @classmethod
798 def get_by_key(cls, key):
799 def get_by_key(cls, key):
799 return cls.query().filter(cls.permission_name == key).scalar()
800 return cls.query().filter(cls.permission_name == key).scalar()
800
801
801 class UserRepoToPerm(Base, BaseModel):
802 class UserRepoToPerm(Base, BaseModel):
802 __tablename__ = 'repo_to_perm'
803 __tablename__ = 'repo_to_perm'
803 __table_args__ = (UniqueConstraint('user_id', 'repository_id'), {'extend_existing':True})
804 __table_args__ = (UniqueConstraint('user_id', 'repository_id'), {'extend_existing':True})
804 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
805 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
805 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
806 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
806 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
807 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
807 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
808 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
808
809
809 user = relationship('User')
810 user = relationship('User')
810 permission = relationship('Permission')
811 permission = relationship('Permission')
811 repository = relationship('Repository')
812 repository = relationship('Repository')
812
813
813 class UserToPerm(Base, BaseModel):
814 class UserToPerm(Base, BaseModel):
814 __tablename__ = 'user_to_perm'
815 __tablename__ = 'user_to_perm'
815 __table_args__ = (UniqueConstraint('user_id', 'permission_id'), {'extend_existing':True})
816 __table_args__ = (UniqueConstraint('user_id', 'permission_id'), {'extend_existing':True})
816 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
817 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
817 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
818 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
818 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
819 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
819
820
820 user = relationship('User')
821 user = relationship('User')
821 permission = relationship('Permission')
822 permission = relationship('Permission')
822
823
823 @classmethod
824 @classmethod
824 def has_perm(cls, user_id, perm):
825 def has_perm(cls, user_id, perm):
825 if not isinstance(perm, Permission):
826 if not isinstance(perm, Permission):
826 raise Exception('perm needs to be an instance of Permission class')
827 raise Exception('perm needs to be an instance of Permission class')
827
828
828 return cls.query().filter(cls.user_id == user_id)\
829 return cls.query().filter(cls.user_id == user_id)\
829 .filter(cls.permission == perm).scalar() is not None
830 .filter(cls.permission == perm).scalar() is not None
830
831
831 @classmethod
832 @classmethod
832 def grant_perm(cls, user_id, perm):
833 def grant_perm(cls, user_id, perm):
833 if not isinstance(perm, Permission):
834 if not isinstance(perm, Permission):
834 raise Exception('perm needs to be an instance of Permission class')
835 raise Exception('perm needs to be an instance of Permission class')
835
836
836 new = cls()
837 new = cls()
837 new.user_id = user_id
838 new.user_id = user_id
838 new.permission = perm
839 new.permission = perm
839 try:
840 try:
840 Session.add(new)
841 Session.add(new)
841 Session.commit()
842 Session.commit()
842 except:
843 except:
843 Session.rollback()
844 Session.rollback()
844
845
845
846
846 @classmethod
847 @classmethod
847 def revoke_perm(cls, user_id, perm):
848 def revoke_perm(cls, user_id, perm):
848 if not isinstance(perm, Permission):
849 if not isinstance(perm, Permission):
849 raise Exception('perm needs to be an instance of Permission class')
850 raise Exception('perm needs to be an instance of Permission class')
850
851
851 try:
852 try:
852 cls.query().filter(cls.user_id == user_id) \
853 cls.query().filter(cls.user_id == user_id) \
853 .filter(cls.permission == perm).delete()
854 .filter(cls.permission == perm).delete()
854 Session.commit()
855 Session.commit()
855 except:
856 except:
856 Session.rollback()
857 Session.rollback()
857
858
858 class UserGroupRepoToPerm(Base, BaseModel):
859 class UserGroupRepoToPerm(Base, BaseModel):
859 __tablename__ = 'users_group_repo_to_perm'
860 __tablename__ = 'users_group_repo_to_perm'
860 __table_args__ = (UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), {'extend_existing':True})
861 __table_args__ = (UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), {'extend_existing':True})
861 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
862 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
862 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
863 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
863 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
864 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
864 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
865 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
865
866
866 users_group = relationship('UserGroup')
867 users_group = relationship('UserGroup')
867 permission = relationship('Permission')
868 permission = relationship('Permission')
868 repository = relationship('Repository')
869 repository = relationship('Repository')
869
870
870 def __repr__(self):
871 def __repr__(self):
871 return '<userGroup:%s => %s >' % (self.users_group, self.repository)
872 return '<userGroup:%s => %s >' % (self.users_group, self.repository)
872
873
873 class UserGroupToPerm(Base, BaseModel):
874 class UserGroupToPerm(Base, BaseModel):
874 __tablename__ = 'users_group_to_perm'
875 __tablename__ = 'users_group_to_perm'
875 __table_args__ = {'extend_existing':True}
876 __table_args__ = {'extend_existing':True}
876 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
877 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
877 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
878 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
878 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
879 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
879
880
880 users_group = relationship('UserGroup')
881 users_group = relationship('UserGroup')
881 permission = relationship('Permission')
882 permission = relationship('Permission')
882
883
883
884
884 @classmethod
885 @classmethod
885 def has_perm(cls, users_group_id, perm):
886 def has_perm(cls, users_group_id, perm):
886 if not isinstance(perm, Permission):
887 if not isinstance(perm, Permission):
887 raise Exception('perm needs to be an instance of Permission class')
888 raise Exception('perm needs to be an instance of Permission class')
888
889
889 return cls.query().filter(cls.users_group_id ==
890 return cls.query().filter(cls.users_group_id ==
890 users_group_id)\
891 users_group_id)\
891 .filter(cls.permission == perm)\
892 .filter(cls.permission == perm)\
892 .scalar() is not None
893 .scalar() is not None
893
894
894 @classmethod
895 @classmethod
895 def grant_perm(cls, users_group_id, perm):
896 def grant_perm(cls, users_group_id, perm):
896 if not isinstance(perm, Permission):
897 if not isinstance(perm, Permission):
897 raise Exception('perm needs to be an instance of Permission class')
898 raise Exception('perm needs to be an instance of Permission class')
898
899
899 new = cls()
900 new = cls()
900 new.users_group_id = users_group_id
901 new.users_group_id = users_group_id
901 new.permission = perm
902 new.permission = perm
902 try:
903 try:
903 Session.add(new)
904 Session.add(new)
904 Session.commit()
905 Session.commit()
905 except:
906 except:
906 Session.rollback()
907 Session.rollback()
907
908
908
909
909 @classmethod
910 @classmethod
910 def revoke_perm(cls, users_group_id, perm):
911 def revoke_perm(cls, users_group_id, perm):
911 if not isinstance(perm, Permission):
912 if not isinstance(perm, Permission):
912 raise Exception('perm needs to be an instance of Permission class')
913 raise Exception('perm needs to be an instance of Permission class')
913
914
914 try:
915 try:
915 cls.query().filter(cls.users_group_id == users_group_id) \
916 cls.query().filter(cls.users_group_id == users_group_id) \
916 .filter(cls.permission == perm).delete()
917 .filter(cls.permission == perm).delete()
917 Session.commit()
918 Session.commit()
918 except:
919 except:
919 Session.rollback()
920 Session.rollback()
920
921
921
922
922 class UserRepoGroupToPerm(Base, BaseModel):
923 class UserRepoGroupToPerm(Base, BaseModel):
923 __tablename__ = 'group_to_perm'
924 __tablename__ = 'group_to_perm'
924 __table_args__ = (UniqueConstraint('group_id', 'permission_id'), {'extend_existing':True})
925 __table_args__ = (UniqueConstraint('group_id', 'permission_id'), {'extend_existing':True})
925
926
926 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
927 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
927 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
928 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
928 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
929 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
929 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
930 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
930
931
931 user = relationship('User')
932 user = relationship('User')
932 permission = relationship('Permission')
933 permission = relationship('Permission')
933 group = relationship('RepoGroup')
934 group = relationship('RepoGroup')
934
935
935 class Statistics(Base, BaseModel):
936 class Statistics(Base, BaseModel):
936 __tablename__ = 'statistics'
937 __tablename__ = 'statistics'
937 __table_args__ = (UniqueConstraint('repository_id'), {'extend_existing':True})
938 __table_args__ = (UniqueConstraint('repository_id'), {'extend_existing':True})
938 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
939 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
939 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
940 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
940 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
941 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
941 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
942 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
942 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
943 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
943 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
944 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
944
945
945 repository = relationship('Repository', single_parent=True)
946 repository = relationship('Repository', single_parent=True)
946
947
947 class UserFollowing(Base, BaseModel):
948 class UserFollowing(Base, BaseModel):
948 __tablename__ = 'user_followings'
949 __tablename__ = 'user_followings'
949 __table_args__ = (UniqueConstraint('user_id', 'follows_repository_id'),
950 __table_args__ = (UniqueConstraint('user_id', 'follows_repository_id'),
950 UniqueConstraint('user_id', 'follows_user_id')
951 UniqueConstraint('user_id', 'follows_user_id')
951 , {'extend_existing':True})
952 , {'extend_existing':True})
952
953
953 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
954 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
954 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
955 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
955 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
956 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
956 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
957 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
957 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
958 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
958
959
959 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
960 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
960
961
961 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
962 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
962 follows_repository = relationship('Repository', order_by='Repository.repo_name')
963 follows_repository = relationship('Repository', order_by='Repository.repo_name')
963
964
964
965
965 @classmethod
966 @classmethod
966 def get_repo_followers(cls, repo_id):
967 def get_repo_followers(cls, repo_id):
967 return cls.query().filter(cls.follows_repo_id == repo_id)
968 return cls.query().filter(cls.follows_repo_id == repo_id)
968
969
969 class CacheInvalidation(Base, BaseModel):
970 class CacheInvalidation(Base, BaseModel):
970 __tablename__ = 'cache_invalidation'
971 __tablename__ = 'cache_invalidation'
971 __table_args__ = (UniqueConstraint('cache_key'), {'extend_existing':True})
972 __table_args__ = (UniqueConstraint('cache_key'), {'extend_existing':True})
972 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
973 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
973 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
974 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
974 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
975 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
975 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
976 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
976
977
977
978
978 def __init__(self, cache_key, cache_args=''):
979 def __init__(self, cache_key, cache_args=''):
979 self.cache_key = cache_key
980 self.cache_key = cache_key
980 self.cache_args = cache_args
981 self.cache_args = cache_args
981 self.cache_active = False
982 self.cache_active = False
982
983
983 def __repr__(self):
984 def __repr__(self):
984 return "<%s('%s:%s')>" % (self.__class__.__name__,
985 return "<%s('%s:%s')>" % (self.__class__.__name__,
985 self.cache_id, self.cache_key)
986 self.cache_id, self.cache_key)
986
987
987 @classmethod
988 @classmethod
988 def invalidate(cls, key):
989 def invalidate(cls, key):
989 """
990 """
990 Returns Invalidation object if this given key should be invalidated
991 Returns Invalidation object if this given key should be invalidated
991 None otherwise. `cache_active = False` means that this cache
992 None otherwise. `cache_active = False` means that this cache
992 state is not valid and needs to be invalidated
993 state is not valid and needs to be invalidated
993
994
994 :param key:
995 :param key:
995 """
996 """
996 return cls.query()\
997 return cls.query()\
997 .filter(CacheInvalidation.cache_key == key)\
998 .filter(CacheInvalidation.cache_key == key)\
998 .filter(CacheInvalidation.cache_active == False)\
999 .filter(CacheInvalidation.cache_active == False)\
999 .scalar()
1000 .scalar()
1000
1001
1001 @classmethod
1002 @classmethod
1002 def set_invalidate(cls, key):
1003 def set_invalidate(cls, key):
1003 """
1004 """
1004 Mark this Cache key for invalidation
1005 Mark this Cache key for invalidation
1005
1006
1006 :param key:
1007 :param key:
1007 """
1008 """
1008
1009
1009 log.debug('marking %s for invalidation', key)
1010 log.debug('marking %s for invalidation', key)
1010 inv_obj = Session.query(cls)\
1011 inv_obj = Session.query(cls)\
1011 .filter(cls.cache_key == key).scalar()
1012 .filter(cls.cache_key == key).scalar()
1012 if inv_obj:
1013 if inv_obj:
1013 inv_obj.cache_active = False
1014 inv_obj.cache_active = False
1014 else:
1015 else:
1015 log.debug('cache key not found in invalidation db -> creating one')
1016 log.debug('cache key not found in invalidation db -> creating one')
1016 inv_obj = CacheInvalidation(key)
1017 inv_obj = CacheInvalidation(key)
1017
1018
1018 try:
1019 try:
1019 Session.add(inv_obj)
1020 Session.add(inv_obj)
1020 Session.commit()
1021 Session.commit()
1021 except Exception:
1022 except Exception:
1022 log.error(traceback.format_exc())
1023 log.error(traceback.format_exc())
1023 Session.rollback()
1024 Session.rollback()
1024
1025
1025 @classmethod
1026 @classmethod
1026 def set_valid(cls, key):
1027 def set_valid(cls, key):
1027 """
1028 """
1028 Mark this cache key as active and currently cached
1029 Mark this cache key as active and currently cached
1029
1030
1030 :param key:
1031 :param key:
1031 """
1032 """
1032 inv_obj = Session.query(CacheInvalidation)\
1033 inv_obj = Session.query(CacheInvalidation)\
1033 .filter(CacheInvalidation.cache_key == key).scalar()
1034 .filter(CacheInvalidation.cache_key == key).scalar()
1034 inv_obj.cache_active = True
1035 inv_obj.cache_active = True
1035 Session.add(inv_obj)
1036 Session.add(inv_obj)
1036 Session.commit()
1037 Session.commit()
1037
1038
1038 class DbMigrateVersion(Base, BaseModel):
1039 class DbMigrateVersion(Base, BaseModel):
1039 __tablename__ = 'db_migrate_version'
1040 __tablename__ = 'db_migrate_version'
1040 __table_args__ = {'extend_existing':True}
1041 __table_args__ = {'extend_existing':True}
1041 repository_id = Column('repository_id', String(250), primary_key=True)
1042 repository_id = Column('repository_id', String(250), primary_key=True)
1042 repository_path = Column('repository_path', Text)
1043 repository_path = Column('repository_path', Text)
1043 version = Column('version', Integer)
1044 version = Column('version', Integer)
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now